hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7c6f3efd2e0edc12881f78fa5d1a561d41b8f5e | 2,070 | ex | Elixir | lib/mars_web/channels/event_timeline_channel.ex | jchristopherinc/mars | a109958cb549ede8d983c3af8183d52528a5eaea | [
"MIT"
] | 2 | 2020-08-28T19:17:33.000Z | 2020-09-13T18:49:20.000Z | lib/mars_web/channels/event_timeline_channel.ex | jchristopherinc/mars | a109958cb549ede8d983c3af8183d52528a5eaea | [
"MIT"
] | 5 | 2018-10-28T14:39:26.000Z | 2019-01-31T17:23:36.000Z | lib/mars_web/channels/event_timeline_channel.ex | jchristopherinc/mars | a109958cb549ede8d983c3af8183d52528a5eaea | [
"MIT"
] | null | null | null | defmodule MarsWeb.EventTimelineChannel do
use MarsWeb, :channel
@moduledoc """
Realtime layer for updating events in Message - Event lifecycle page
"""
alias MarsWeb.TimeHelper
@doc """
Accept Socket connections that have topic `event_timeline:<event_id/message_id>`
"""
def join("event_timeline:" <> event_id, payload, socket) do
if authorized?(payload) do
{:ok, "event_timeline:#{event_id}", socket}
else
{:error, %{reason: "unauthorized"}}
end
end
@doc """
Channels can be used in a request/response fashion
by sending replies to requests from the client
"""
def handle_in("ping", payload, socket) do
{:reply, {:ok, payload}, socket}
end
@doc """
It is also common to receive messages from the client and
broadcast to everyone in the current topic (event_timeline:*).
"""
def handle_in("shout", payload, socket) do
broadcast(socket, "shout", payload)
{:noreply, socket}
end
@doc """
To handle outgoing messages out of socket.
Generic enough to accept anyevent from Backend to be sent over to the Frontend
"""
def handle_out(event, payload, socket) do
push(socket, event, payload)
{:noreply, socket}
end
@doc """
Public method to broadcast message lifecycle events to UI
"""
def broadcast_events(message_id, events) do
# events is already a map
MarsWeb.Endpoint.broadcast("event_timeline:#{message_id}", "add_to_timeline", events)
end
@doc """
A test method to broadcast test events to any message_id
"""
def test_broadcast_events(message_id) do
time = TimeHelper.mars_formatted_time(Timex.now())
key = "Test Event for #{message_id}"
event_key =
key
|> String.upcase()
|> String.replace("_", " ")
payload = %{
"event" => event_key,
"time" => time
}
MarsWeb.Endpoint.broadcast("event_timeline:#{message_id}", "add_to_timeline", payload)
end
# Private method
@doc """
Add authorization logic here as required.
"""
defp authorized?(_payload) do
true
end
end
| 24.352941 | 90 | 0.669565 |
f7c71452561b5960bd9a51c95f324601a0c8908c | 1,977 | exs | Elixir | clients/billing_budgets/mix.exs | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/billing_budgets/mix.exs | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/billing_budgets/mix.exs | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BillingBudgets.Mixfile do
use Mix.Project
@version "0.13.7"
def project() do
[
app: :google_api_billing_budgets,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/billing_budgets"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Cloud Billing Budget API client library. The Cloud Billing Budget API stores Cloud Billing budgets, which define a budget plan and the rules to execute as spend is tracked against that plan.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/billing_budgets",
"Homepage" => "https://cloud.google.com/billing/docs/how-to/budget-api-overview"
}
]
end
end
| 29.507463 | 194 | 0.668184 |
f7c71b5d1ab9a4fa4373fa97175c7f23f20d8554 | 1,124 | ex | Elixir | lib/faker/team/en.ex | pharosproduction/faker | 91deca51b3dc4bf8de75f81480e9f8880aa93886 | [
"MIT"
] | null | null | null | lib/faker/team/en.ex | pharosproduction/faker | 91deca51b3dc4bf8de75f81480e9f8880aa93886 | [
"MIT"
] | null | null | null | lib/faker/team/en.ex | pharosproduction/faker | 91deca51b3dc4bf8de75f81480e9f8880aa93886 | [
"MIT"
] | null | null | null | defmodule Faker.Team.En do
import Faker, only: [sampler: 2]
alias Faker.Address
@moduledoc """
Functions for generating team related data in English
"""
@doc """
Returns a string of the form [state] [creature]
"""
@spec name() :: String.t
def name, do: "#{Address.state} #{creature()}"
@doc """
Returns a random creature name
"""
@spec creature() :: String.t
sampler :creature, ["ants", "banshees", "bats", "bears", "bees", "birds", "black cats", "buffalo", "cats", "cattle", "chickens", "chimeras", "conspirators", "crows", "dogs", "dolphins", "dragons", "druids", "ducks", "dwarves", "elephants", "elves", "enchanters", "exorcists", "fishes", "foes", "foxes", "frogs", "geese", "ghosts", "giants", "gnomes", "goats", "goblins", "gooses", "griffins", "horses", "kangaroos", "lions", "lycanthropes", "monkeys", "nemesis", "ogres", "oracles", "owls", "oxen", "penguins", "people", "pigs", "prophets", "rabbits", "sheep", "sons", "sorcerors", "spiders", "spirits", "tigers", "vampires", "vixens", "warlocks", "werewolves", "whales", "witches", "wolves", "worshipers", "zebras", "zombies"]
end
| 53.52381 | 729 | 0.627224 |
f7c732cf9795e8c9841821ea135232f0ca7ee46b | 349 | exs | Elixir | apps/core/priv/repo/migrations/20170715104052_credentials_recovery_requests.exs | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 8 | 2019-06-14T11:34:49.000Z | 2021-08-05T19:14:24.000Z | apps/core/priv/repo/migrations/20170715104052_credentials_recovery_requests.exs | edenlabllc/ehealth.api.public | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 1 | 2019-07-08T15:20:22.000Z | 2019-07-08T15:20:22.000Z | apps/core/priv/repo/migrations/20170715104052_credentials_recovery_requests.exs | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 6 | 2018-05-11T13:59:32.000Z | 2022-01-19T20:15:22.000Z | defmodule Core.Repo.Migrations.CredentialsRecoveryRequests do
use Ecto.Migration
def change do
create table(:credentials_recovery_requests, primary_key: false) do
add(:id, :uuid, primary_key: true)
add(:user_id, :uuid)
add(:is_active, :boolean, default: true)
timestamps(type: :utc_datetime_usec)
end
end
end
| 24.928571 | 71 | 0.713467 |
f7c73e5ff0847d0df812880c18bd38adad40a074 | 4,846 | ex | Elixir | lib/shadowsocks/blacklist.ex | lingoer/shadowsocks-ex | 745685e0871b90e9aaf455b3b30fca157527389d | [
"BSD-3-Clause"
] | null | null | null | lib/shadowsocks/blacklist.ex | lingoer/shadowsocks-ex | 745685e0871b90e9aaf455b3b30fca157527389d | [
"BSD-3-Clause"
] | null | null | null | lib/shadowsocks/blacklist.ex | lingoer/shadowsocks-ex | 745685e0871b90e9aaf455b3b30fca157527389d | [
"BSD-3-Clause"
] | null | null | null | defmodule Shadowsocks.BlackList do
use GenServer
require Shadowsocks.Event
@tab :ss_blacklist
@cache_tab :ss_failed_ips
@check_block_time 600 * 1000
defmodule EventHandler do
@behaviour :gen_event
def init([pid]), do: {:ok, pid}
def handle_event({:bad_request, _, saddr}, pid) do
send pid, {:bad_request, saddr}
{:ok, pid}
end
def handle_event(_event, pid) do
{:ok, pid}
end
def handle_call(_, state), do: {:ok, :ok, state}
def handle_info(_, state), do: {:ok, state}
def terminate(_,_), do: :ok
def code_change(_old_vsn, state, _extra), do: {:ok, state}
end
@doc """
check ip in blacklist?
"""
@spec blocked?(tuple) :: boolean
def blocked?(addr) do
:ets.member(@tab, addr)
end
@doc """
add ip to blacklist's static rule
"""
@spec add(tuple) :: boolean
def add(addr) do
add(addr, :static)
end
@doc """
remove ip to blacklist
"""
@spec del(tuple) :: boolean
def del({a,b,c,d}) do
# translate ipv4 to ipv6
<<a1::16, a2::16>> = <<a::8, b::8, c::8, d::8>>
:ets.delete(@tab, {0,0,0,0,0,0xFFFF, a1, a2})
:ets.delete(@tab, {a,b,c,d})
end
def del(addr) do
:ets.delete(@tab, addr)
end
@doc """
clear blacklist
"""
@spec clear(:static | :dynamic | :all) :: boolean
def clear(:all) do
:ets.delete(@tab)
end
def clear(:static) do
:ets.match_delete(@tab, {:_, :static, :_})
end
def clear(:dynamic) do
:ets.match_delete(@tab, {:_, :dynamic, :_})
end
def clear(_), do: false
@doc """
list block rules
"""
@spec list() :: [{tuple, :static | :dynamic, integer}]
def list() do
:ets.tab2list(@tab)
end
def start_link, do: GenServer.start_link(__MODULE__, [], name: __MODULE__)
def init([]) do
@tab = :ets.new(@tab, [:set,
:named_table,
:public,
{:read_concurrency, true}])
:ets.new(@cache_tab, [:set, :protected, :named_table])
with args <- Application.get_env(:shadowsocks, :dynamic_blocklist),
true <- Keyword.keyword?(args),
true <- Keyword.get(args, :enable, false),
attack_times <- Keyword.get(args, :attack_times, 50),
attack_time <- Keyword.get(args, :collect_duration, 3600*1000),
block_time <- Keyword.get(args, :block_expire, 7 * 24 * 3600 * 1000) do
block_expire = min(block_time, @check_block_time)
:gen_event.add_handler(Shadowsocks.Event, EventHandler, [self()])
Process.send_after self(), :attack_check, attack_time
Process.send_after self(), :expire_check, block_expire
{:ok, %{block_time: block_time,
attack_time: attack_time,
attack_times: attack_times,
block_expire: block_expire}}
else
_ ->
{:ok, %{}}
end
end
def handle_info({:bad_request, addr}, %{attack_times: attack_times}=state) do
case :ets.lookup(@cache_tab, addr) do
[{_, times}] when times >= attack_times ->
add(addr, :dynamic)
Shadowsocks.Event.dynamic_blocked(addr)
:ets.delete(@cache_tab, addr)
[{_, times}] ->
:ets.insert(@cache_tab, {addr, times+1})
[] ->
:ets.insert(@cache_tab, {addr, 1})
end
{:noreply, state}
end
def handle_info(:attack_check, %{attack_time: attack_time}=state) do
:ets.delete(@cache_tab)
Process.send_after self(), :attack_check, attack_time
{:noreply, state}
end
def handle_info(:expire_check, %{block_time: block_time, block_expire: check_time}=state) do
time = System.system_time(:milliseconds) - block_time
:ets.select_delete(@tab, [{{:_, :dynamic, :"$1"}, [{:"<", :"$1", time}], [true]}])
Process.send_after self(), :expire_check, check_time
{:noreply, state}
end
def handle_info(_, state) do
{:noreply, state}
end
def terminate(_, _) do
:gen_event.delete_handler(Shadowsocks.Event, EventHandler, [self()])
end
defp add({a,b,c,d}, type) do
# translate ipv4 to ipv6
<<a1::16, a2::16>> = <<a::8, b::8, c::8, d::8>>
:ets.insert(@tab, {{0,0,0,0,0,0xFFFF, a1, a2}, type, System.system_time(:milliseconds)})
:ets.insert(@tab, {{a,b,c,d}, type, System.system_time(:milliseconds)})
end
defp add(addr, type) do
:ets.insert(@tab, {addr, type, System.system_time(:milliseconds)})
end
end | 33.888112 | 96 | 0.542922 |
f7c759edbae29c7d5908f9a14d14c871a61064d1 | 19,172 | ex | Elixir | apps/debugger/lib/debugger/server.ex | tmepple/elixir-ls | 01d5ee857bb8b9e729da622dbc4f64b680aac3fc | [
"Apache-2.0"
] | null | null | null | apps/debugger/lib/debugger/server.ex | tmepple/elixir-ls | 01d5ee857bb8b9e729da622dbc4f64b680aac3fc | [
"Apache-2.0"
] | null | null | null | apps/debugger/lib/debugger/server.ex | tmepple/elixir-ls | 01d5ee857bb8b9e729da622dbc4f64b680aac3fc | [
"Apache-2.0"
] | null | null | null | defmodule ElixirLS.Debugger.Server do
@moduledoc """
Implements the VS Code Debug Protocol
Refer to the protocol's [documentation](https://github.com/Microsoft/vscode/blob/master/src/vs/workbench/parts/debug/common/debugProtocol.d.ts)
for details.
The protocol specifies that we must assign unique IDs to "threads" (or processes), to stack
frames, and to any variables that can be expanded. We keep a counter with the next ID to use and
increment it any time we assign an ID.
"""
alias ElixirLS.Debugger.{Output, Stacktrace, Protocol, Variables}
use GenServer
use Protocol
@temp_beam_dir ".elixir_ls/temp_beams"
defstruct client_info: nil,
config: %{},
task_ref: nil,
threads: %{},
threads_inverse: %{},
paused_processes: %{},
next_id: 1,
output: Output,
breakpoints: %{}
defmodule PausedProcess do
defstruct stack: nil,
frames: %{},
frames_inverse: %{},
vars: %{},
vars_inverse: %{}
end
## Client API
def start_link(opts \\ []) do
name = opts[:name]
opts = Keyword.delete(opts, :name)
GenServer.start_link(__MODULE__, opts, name: name)
end
def receive_packet(server \\ __MODULE__, packet) do
GenServer.cast(server, {:receive_packet, packet})
end
def breakpoint_reached(pid, server) do
GenServer.cast(server, {:breakpoint_reached, pid})
end
## Server Callbacks
def init(opts) do
:int.start()
state = if opts[:output], do: %__MODULE__{output: opts[:output]}, else: %__MODULE__{}
{:ok, state}
end
def handle_cast({:receive_packet, request(_, _) = packet}, state) do
{response_body, state} = handle_request(packet, state)
Output.send_response(packet, response_body)
{:noreply, state}
end
def handle_cast({:breakpoint_reached, pid}, state) do
{state, thread_id} = ensure_thread_id(state, pid)
paused_process = %PausedProcess{stack: Stacktrace.get(pid)}
state = put_in(state.paused_processes[pid], paused_process)
body = %{"reason" => "breakpoint", "threadId" => thread_id, "allThreadsStopped" => false}
Output.send_event("stopped", body)
{:noreply, state}
end
def handle_info({:DOWN, ref, :process, _pid, reason}, %{task_ref: ref} = state) do
exit_code =
case reason do
:normal ->
0
_ ->
IO.puts(
:standard_error,
"(Debugger) Task failed because " <> Exception.format_exit(reason)
)
1
end
Output.send_event("exited", %{"exitCode" => exit_code})
Output.send_event("terminated", %{"restart" => false})
{:noreply, %{state | task_ref: nil}}
end
def handle_info({:EXIT, _, :normal}, state) do
{:noreply, state}
end
# If we get the disconnect request from the client, we send :disconnect to the server so it will
# die right after responding to the request
def handle_info(:disconnect, state) do
System.halt(0)
{:noreply, state}
end
def terminate(reason, _state) do
if reason != :normal do
IO.puts(:standard_error, "(Debugger) Terminating because #{Exception.format_exit(reason)}")
end
end
## Helpers
defp handle_request(initialize_req(_, client_info), state) do
check_erlang_version()
{capabilities(), %{state | client_info: client_info}}
end
defp handle_request(launch_req(_, config), state) do
{_, ref} = spawn_monitor(fn -> initialize(config) end)
receive do
{:DOWN, ^ref, :process, _pid, reason} ->
if reason != :normal do
IO.puts(
:standard_error,
"(Debugger) Initialization failed because " <> Exception.format_exit(reason)
)
Output.send_event("exited", %{"exitCode" => 1})
Output.send_event("terminated", %{"restart" => false})
end
end
{%{}, %{state | config: config}}
end
defp handle_request(set_breakpoints_req(_, %{"path" => path}, breakpoints), state) do
new_lines = for %{"line" => line} <- breakpoints, do: line
existing_bps = state.breakpoints[path] || []
existing_bp_lines = for {_module, line} <- existing_bps, do: line
removed_lines = existing_bp_lines -- new_lines
removed_bps = Enum.filter(existing_bps, fn {_, line} -> line in removed_lines end)
for {module, line} <- removed_bps do
:int.delete_break(module, line)
end
result = set_breakpoints(path, new_lines)
new_bps = for {:ok, module, line} <- result, do: {module, line}
state = put_in(state.breakpoints[path], new_bps)
breakpoints_json =
Enum.map(result, fn
{:ok, _, _} -> %{"verified" => true}
{:error, error} -> %{"verified" => false, "message" => error}
end)
{%{"breakpoints" => breakpoints_json}, state}
end
defp handle_request(set_exception_breakpoints_req(_), state) do
{%{}, state}
end
defp handle_request(configuration_done_req(_), state) do
server = :erlang.process_info(self())[:registered_name] || self()
:int.auto_attach([:break], {__MODULE__, :breakpoint_reached, [server]})
task = state.config["task"] || Mix.Project.config()[:default_task]
args = state.config["taskArgs"] || []
{_pid, task_ref} = spawn_monitor(fn -> launch_task(task, args) end)
{%{}, %{state | task_ref: task_ref}}
end
defp handle_request(threads_req(_), state) do
pids = :erlang.processes()
{state, thread_ids} = ensure_thread_ids(state, pids)
threads =
for {pid, thread_id} <- List.zip([pids, thread_ids]), (info = Process.info(pid)) != nil do
thread_info = Enum.into(info, %{})
name =
case Enum.into(thread_info, %{}) do
%{:registered_name => registered_name} ->
inspect(registered_name)
%{:initial_call => {mod, func, arity}} ->
"#{inspect(mod)}.#{to_string(func)}/#{arity}"
end
full_name = Enum.join([name, String.trim_leading(inspect(pid), "#PID")], " ")
%{"id" => thread_id, "name" => full_name}
end
threads = Enum.sort_by(threads, fn %{"name" => name} -> name end)
{%{"threads" => threads}, state}
end
defp handle_request(request(_, "stackTrace", %{"threadId" => thread_id} = args), state) do
pid = state.threads[thread_id]
paused_process = state.paused_processes[pid]
total_frames = Enum.count(paused_process.stack)
start_frame =
case args do
%{"startFrame" => start_frame} when is_integer(start_frame) -> start_frame
_ -> 0
end
end_frame =
case args do
%{"levels" => levels} when is_integer(levels) and levels > 0 -> start_frame + levels
_ -> -1
end
stack_frames = Enum.slice(paused_process.stack, start_frame..end_frame)
{state, frame_ids} = ensure_frame_ids(state, pid, stack_frames)
stack_frames_json =
for {stack_frame, frame_id} <- List.zip([stack_frames, frame_ids]) do
%{
"id" => frame_id,
"name" => Stacktrace.Frame.name(stack_frame),
"line" => stack_frame.line,
"column" => 0,
"source" => %{"path" => stack_frame.file}
}
end
{%{"stackFrames" => stack_frames_json, "totalFrames" => total_frames}, state}
end
defp handle_request(request(_, "scopes", %{"frameId" => frame_id}), state) do
{pid, frame} = find_frame(state.paused_processes, frame_id)
{state, args_id} = ensure_var_id(state, pid, frame.args)
{state, bindings_id} = ensure_var_id(state, pid, frame.bindings)
vars_scope = %{
"name" => "variables",
"variablesReference" => bindings_id,
"namedVariables" => Enum.count(frame.bindings),
"indexedVariables" => 0,
"expensive" => false
}
args_scope = %{
"name" => "arguments",
"variablesReference" => args_id,
"namedVariables" => 0,
"indexedVariables" => Enum.count(frame.args),
"expensive" => false
}
scopes = if Enum.count(frame.args) > 0, do: [vars_scope, args_scope], else: [vars_scope]
{%{"scopes" => scopes}, state}
end
defp handle_request(request(_, "variables", %{"variablesReference" => var_id} = args), state) do
{pid, var} = find_var(state.paused_processes, var_id)
{state, vars_json} = variables(state, pid, var, args["start"], args["count"], args["filter"])
{%{"variables" => vars_json}, state}
end
defp handle_request(request(_, "evaluate"), state) do
msg = "(Debugger) Expression evaluation in Elixir debugger is not supported (yet)."
{%{"result" => msg, "variablesReference" => 0}, state}
end
defp handle_request(request(_, "disconnect"), state) do
send(self(), :disconnect)
{%{}, state}
end
defp handle_request(continue_req(_, thread_id), state) do
pid = state.threads[thread_id]
state = remove_paused_process(state, pid)
:int.continue(pid)
{%{"allThreadsContinued" => false}, state}
end
defp handle_request(next_req(_, thread_id), state) do
pid = state.threads[thread_id]
state = remove_paused_process(state, pid)
:int.next(pid)
{%{}, state}
end
defp handle_request(step_in_req(_, thread_id), state) do
pid = state.threads[thread_id]
state = remove_paused_process(state, pid)
:int.step(pid)
{%{}, state}
end
defp handle_request(step_out_req(_, thread_id), state) do
pid = state.threads[thread_id]
state = remove_paused_process(state, pid)
:int.finish(pid)
{%{}, state}
end
defp remove_paused_process(state, pid) do
update_in(state.paused_processes, fn paused_processes ->
Map.delete(paused_processes, pid)
end)
end
defp variables(state, pid, var, start, count, filter) do
children =
if (filter == "named" and Variables.child_type(var) == :indexed) or
(filter == "indexed" and Variables.child_type(var) == :named) do
[]
else
Variables.children(var, start, count)
end
Enum.reduce(children, {state, []}, fn {name, value}, {state, result} ->
{state, var_id} =
if Variables.expandable?(value) do
ensure_var_id(state, pid, value)
else
{state, 0}
end
json = %{
"name" => to_string(name),
"value" => inspect(value),
"variablesReference" => var_id,
"type" => Variables.type(value)
}
json =
case Variables.child_type(value) do
:indexed -> Map.put(json, "indexedVariables", Variables.num_children(value))
:named -> Map.put(json, "namedVariables", Variables.num_children(value))
nil -> json
end
{state, result ++ [json]}
end)
end
defp find_var(paused_processes, var_id) do
Enum.find_value(paused_processes, fn {pid, paused_process} ->
if Map.has_key?(paused_process.vars, var_id) do
{pid, paused_process.vars[var_id]}
end
end)
end
defp find_frame(paused_processes, frame_id) do
Enum.find_value(paused_processes, fn {pid, paused_process} ->
if Map.has_key?(paused_process.frames, frame_id) do
{pid, paused_process.frames[frame_id]}
end
end)
end
defp ensure_thread_id(state, pid) do
if Map.has_key?(state.threads_inverse, pid) do
{state, state.threads_inverse[pid]}
else
id = state.next_id
state = put_in(state.threads[id], pid)
state = put_in(state.threads_inverse[pid], id)
state = put_in(state.next_id, id + 1)
{state, id}
end
end
defp ensure_thread_ids(state, pids) do
Enum.reduce(pids, {state, []}, fn pid, {state, ids} ->
{state, id} = ensure_thread_id(state, pid)
{state, ids ++ [id]}
end)
end
defp ensure_var_id(state, pid, var) do
if Map.has_key?(state.paused_processes[pid].vars_inverse, var) do
{state, state.paused_processes[pid].vars_inverse[var]}
else
id = state.next_id
state = put_in(state.paused_processes[pid].vars[id], var)
state = put_in(state.paused_processes[pid].vars_inverse[var], id)
state = put_in(state.next_id, id + 1)
{state, id}
end
end
defp ensure_frame_ids(state, pid, stack_frames) do
Enum.reduce(stack_frames, {state, []}, fn stack_frame, {state, ids} ->
{state, id} = ensure_frame_id(state, pid, stack_frame)
{state, ids ++ [id]}
end)
end
defp ensure_frame_id(state, pid, frame) do
if Map.has_key?(state.paused_processes[pid].frames_inverse, frame) do
{state, state.paused_processes[pid].frames_inverse[frame]}
else
id = state.next_id
state = put_in(state.paused_processes[pid].frames[id], frame)
state = put_in(state.paused_processes[pid].frames_inverse[frame], id)
state = put_in(state.next_id, id + 1)
{state, id}
end
end
defp initialize(%{"projectDir" => project_dir} = config) do
prev_env = Mix.env()
task = config["task"]
task_args = config["taskArgs"]
set_stack_trace_mode(config["stackTraceMode"])
set_env_vars(config["env"])
File.cd!(project_dir)
# Mixfile may already be loaded depending on cwd when launching debugger task
mixfile = Path.absname(System.get_env("MIX_EXS") || "mix.exs")
unless match?(%{file: ^mixfile}, Mix.ProjectStack.peek()) do
Code.load_file(System.get_env("MIX_EXS") || "mix.exs")
end
task = task || Mix.Project.config()[:default_task]
env = task_env(task)
if env != prev_env, do: change_env(env)
Mix.Task.run("loadconfig")
unless is_list(task_args) and "--no-compile" in task_args do
case Mix.Task.run("compile", ["--ignore-module-conflict"]) do
{:error, _} ->
IO.puts(:standard_error, "Aborting debugger due to compile errors")
:init.stop(1)
_ ->
:ok
end
end
# Some tasks (such as Phoenix tests) expect apps to already be running before the test files are
# required
if config["startApps"] do
Mix.Task.run("app.start", [])
end
exclude_modules =
config
|> Map.get("excludeModules", [])
|> Enum.map(&string_to_module/1)
interpret_modules_in(Mix.Project.build_path(), exclude_modules)
if required_files = config["requireFiles"], do: require_files(required_files)
ElixirLS.Debugger.Output.send_event("initialized", %{})
end
defp set_env_vars(env) when is_map(env) do
for {k, v} <- env, do: System.put_env(k, v)
:ok
end
defp set_env_vars(env) when is_nil(env), do: :ok
defp set_stack_trace_mode("all"), do: :int.stack_trace(:all)
defp set_stack_trace_mode("no_tail"), do: :int.stack_trace(:no_tail)
defp set_stack_trace_mode("false"), do: :int.stack_trace(false)
defp set_stack_trace_mode(nil), do: nil
defp set_stack_trace_mode(_) do
IO.warn(~S(stackTraceMode must be "all", "no_tail", or "false"))
end
defp capabilities do
%{
"supportsConfigurationDoneRequest" => true,
"supportsFunctionBreakpoints" => false,
"supportsConditionalBreakpoints" => false,
"supportsHitConditionalBreakpoints" => false,
"supportsEvaluateForHovers" => false,
"exceptionBreakpointFilters" => [],
"supportsStepBack" => false,
"supportsSetVariable" => false,
"supportsRestartFrame" => false,
"supportsGotoTargetsRequest" => false,
"supportsStepInTargetsRequest" => false,
"supportsCompletionsRequest" => false,
"supportsModulesRequest" => false,
"additionalModuleColumns" => [],
"supportedChecksumAlgorithms" => [],
"supportsRestartRequest" => false,
"supportsExceptionOptions" => false,
"supportsValueFormattingOptions" => false,
"supportsExceptionInfoRequest" => false,
"supportTerminateDebuggee" => false
}
end
defp interpret_modules_in(path, exclude_modules) do
path
|> Path.join("**/*.beam")
|> Path.wildcard()
|> Enum.map(&(Path.basename(&1, ".beam") |> String.to_atom()))
|> Enum.filter(&interpretable?(&1, exclude_modules))
|> Enum.map(&:int.ni(&1))
end
defp interpretable?(module, exclude_modules) do
:int.interpretable(module) == true and !:code.is_sticky(module) and module != __MODULE__ and
module not in exclude_modules
end
defp check_erlang_version do
version = String.to_integer(to_string(:erlang.system_info(:otp_release)))
if version < 20 do
IO.warn(
"Erlang version >= OTP 20 is required to debug Elixir. " <>
"(Current version: #{version})\n"
)
end
end
defp change_env(env) do
Mix.env(env)
if project = Mix.Project.pop() do
%{name: name, file: file} = project
:code.purge(name)
:code.delete(name)
Code.load_file(file)
end
end
defp task_env(task) do
if System.get_env("MIX_ENV") do
String.to_atom(System.get_env("MIX_ENV"))
else
task = String.to_atom(task)
Mix.Project.config()[:preferred_cli_env][task] || Mix.Task.preferred_cli_env(task) || :dev
end
end
defp launch_task(task, args) do
Mix.Task.run(task, args)
end
# Interpreting modules defined in .exs files requires that we first load the file and save any
# modules it defines to actual .beam files in the code path. The user must specify which .exs
# files to load via the launch configuration. They must be in the correct order (for example,
# test helpers before tests). We save the .beam files to a temporary folder which we add to the
# code path.
defp require_files(required_files) do
File.rm_rf(@temp_beam_dir)
File.mkdir_p(@temp_beam_dir)
Code.append_path(Path.expand(@temp_beam_dir))
for path <- required_files,
file <- Path.wildcard(path),
modules = Code.require_file(file),
is_list(modules),
{module, beam_bin} <- modules,
do: save_and_reload(module, beam_bin)
end
defp save_and_reload(module, beam_bin) do
File.write(Path.join(@temp_beam_dir, to_string(module) <> ".beam"), beam_bin)
:code.delete(module)
:int.ni(module)
end
defp set_breakpoints(path, lines) do
if Path.extname(path) == ".erl" do
module = String.to_atom(Path.basename(path, ".erl"))
for line <- lines, do: set_breakpoint(module, line)
else
try do
metadata = ElixirSense.Core.Parser.parse_file(path, false, false, nil)
for line <- lines do
env = ElixirSense.Core.Metadata.get_env(metadata, line)
if env.module == nil do
{:error, "Could not determine module at line"}
else
set_breakpoint(env.module, line)
end
end
rescue
error ->
for _line <- lines, do: {:error, Exception.format_exit(error)}
end
end
end
defp set_breakpoint(module, line) do
case :int.ni(module) do
{:module, _} ->
:int.break(module, line)
{:ok, module, line}
_ ->
{:error, "Cannot interpret module #{inspect(module)}"}
end
end
defp string_to_module(str) when is_binary(str) do
case str do
":" <> name -> String.to_atom(name)
name -> String.to_atom("Elixir." <> name)
end
end
end
| 30.335443 | 145 | 0.633424 |
f7c763c231a999b50214e7df0ee084390b4415f3 | 1,953 | exs | Elixir | config/dev.exs | jonathanleang/phoenix_sample | 2dbc65d74087fedc9eccb87436870f0927c74522 | [
"Apache-2.0"
] | 1 | 2021-03-07T06:56:35.000Z | 2021-03-07T06:56:35.000Z | config/dev.exs | jonathanleang/phoenix_sample | 2dbc65d74087fedc9eccb87436870f0927c74522 | [
"Apache-2.0"
] | null | null | null | config/dev.exs | jonathanleang/phoenix_sample | 2dbc65d74087fedc9eccb87436870f0927c74522 | [
"Apache-2.0"
] | 2 | 2020-11-29T05:56:44.000Z | 2021-07-23T10:22:52.000Z | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :phoenix_sample, PhoenixSampleWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :phoenix_sample, PhoenixSampleWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/phoenix_sample_web/(live|views)/.*(ex)$",
~r"lib/phoenix_sample_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 28.720588 | 68 | 0.69022 |
f7c77bda89cfa6cf24a8b4be3c96b92576b0f75e | 1,815 | exs | Elixir | test/ex_pesa/Jenga/send_money/eft_test.exs | r-coh/ex_pesa | df2bb2485a3acba481787a434e4d0ee254e60186 | [
"AML",
"MIT"
] | 20 | 2020-08-07T18:45:03.000Z | 2021-12-02T12:47:07.000Z | test/ex_pesa/Jenga/send_money/eft_test.exs | r-coh/ex_pesa | df2bb2485a3acba481787a434e4d0ee254e60186 | [
"AML",
"MIT"
] | 65 | 2020-08-17T05:52:33.000Z | 2021-05-20T16:06:34.000Z | test/ex_pesa/Jenga/send_money/eft_test.exs | r-coh/ex_pesa | df2bb2485a3acba481787a434e4d0ee254e60186 | [
"AML",
"MIT"
] | 11 | 2020-08-17T07:53:02.000Z | 2021-04-02T20:57:16.000Z | defmodule ExPesa.Jenga.SendMoney.EFTTest do
@moduledoc false
use ExUnit.Case, async: true
import Tesla.Mock
doctest ExPesa.Jenga.SendMoney.EFT
alias ExPesa.Jenga.SendMoney.EFT
setup do
mock(fn
%{
url: "https://uat.jengahq.io/identity/v2/token",
method: :post
} ->
%Tesla.Env{
status: 200,
body: """
{
"access_token" : "SGWcJPtNtYNPGm6uSYR9yPYrAI3Bm",
"expires_in" : "3599"
}
"""
}
%{url: "https://uat.jengahq.io/transaction/v2/remittance#eft", method: :post} ->
%Tesla.Env{
status: 200,
body: %{
"transactionId" => "1452854",
"status" => "SUCCESS"
}
}
end)
:ok
end
describe "Send Money To Other Banks Via Electronic Funds Transfer" do
test "request/1 with correct params successfully sends money" do
request_body = %{
source: %{countryCode: "KE", name: "John Doe", accountNumber: "0770194201783"},
destination: %{
type: "bank",
countryCode: "KE",
name: "Tom Doe",
branchCode: "01",
bankCode: "112",
accountNumber: "0740161904311"
},
transfer: %{
type: "EFT",
amount: "10",
currencyCode: "KES",
reference: "639434645740",
date: "2020-12-02",
description: "some remarks here"
}
}
assert {:ok, result} = EFT.request(request_body)
assert result["status"] == "SUCCESS"
end
test "request/1 fails when invalid params are passed" do
assert {:error, message} = EFT.request("invalid params")
assert message == "Required Parameters missing, check your request body"
end
end
end
| 24.527027 | 87 | 0.541047 |
f7c78406e903601e3b380c0e53abd1ad0e66506f | 53,988 | ex | Elixir | lib/ecto_adapters_dynamodb.ex | glebmikulko/ecto_adapters_dynamodb | e29a9007ad32b745d761fe41dd0de40303089bca | [
"Apache-2.0"
] | null | null | null | lib/ecto_adapters_dynamodb.ex | glebmikulko/ecto_adapters_dynamodb | e29a9007ad32b745d761fe41dd0de40303089bca | [
"Apache-2.0"
] | null | null | null | lib/ecto_adapters_dynamodb.ex | glebmikulko/ecto_adapters_dynamodb | e29a9007ad32b745d761fe41dd0de40303089bca | [
"Apache-2.0"
] | 1 | 2018-09-12T13:11:05.000Z | 2018-09-12T13:11:05.000Z | defmodule Ecto.Adapters.DynamoDB do
@moduledoc """
Ecto adapter for Amazon DynamoDB
Currently for a fairly limited subset of Ecto, enough for basic operations.
"""
#NOTE: in ecto, Repo.get[!] ends up calling:
#-> querable.get
#-> queryable.one
#-> queryable.all
#-> queryable.execute
#-> adapter.execute (possibly prepare somewhere in their too? trace.)
@behaviour Ecto.Adapter
#@behaviour Ecto.Adapter.Storage
#@behaviour Ecto.Adapter.Migration
defmacro __before_compile__(_env) do
# Nothing to see here, yet...
end
use Bitwise, only_operators: true
alias ExAws.Dynamo
alias Ecto.Query.BooleanExpr
# I don't think this is necessary: Probably under child_spec and ensure_all_started
def start_link(repo, opts) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.start_link", %{"#{inspect __MODULE__}.start_link-params" => %{repo: repo, opts: opts}})
Agent.start_link fn -> [] end
end
## Adapter behaviour - defined in lib/ecto/adapter.ex (in the ecto github repository)
@doc """
Returns the childspec that starts the adapter process.
"""
def child_spec(repo, opts) do
# TODO: need something here...
# * Pull dynamo db connection options from config
# * Start dynamo connector/aws libraries
# we'll return our own start_link for now, but I don't think we actually need
# an app here, we only need to ensure that our dependencies such as aws libs are started.
#
[:debug_requests, :access_key_id, :secret_access_key, :region, :dynamodb] |> Enum.map(fn key ->
if opts[key] != nil, do: Application.put_env(:ex_aws, key, opts[key])
end)
import Supervisor.Spec
child_spec = worker(__MODULE__, [repo, opts])
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.child_spec", %{"#{inspect __MODULE__}.child_spec-params" => %{repo: repo, child_spec: child_spec, opts: opts}})
child_spec
end
@doc """
Ensure all applications necessary to run the adapter are started.
"""
def ensure_all_started(repo, type) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.ensure_all_started", %{"#{inspect __MODULE__}.ensure_all_started-params" => %{type: type, repo: repo}})
with {:ok, _} = Application.ensure_all_started(:ecto_adapters_dynamodb)
do
{:ok, [repo]}
end
end
def supports_ddl_transaction?, do: false
def execute_ddl(repo, command, options) do
Ecto.Adapters.DynamoDB.Migration.execute_ddl(repo, command, options)
end
# moved to transaction.ex in ecto 2.1.4
# def in_transaction?(_repo), do: false
#
# def rollback(_repo, _value), do:
# raise BadFunctionError, message: "#{inspect __MODULE__} does not support transactions."
@doc """
Called to autogenerate a value for id/embed_id/binary_id.
Returns the autogenerated value, or nil if it must be
autogenerated inside the storage or raise if not supported.
For the Ecto type, `:id`, the adapter autogenerates a 128-bit integer
For the Ecto type, `:embed_id`, the adapter autogenerates a string, using `Ecto.UUID.generate()`
For the Ecto type, `:binary_id`, the adapter autogenerates a string, using `Ecto.UUID.generate()`
"""
@max_id ((1 <<< 128) - 1) # biggest possible int in 128 bits
def autogenerate(:id), do: Enum.random(1..@max_id)
def autogenerate(:embed_id), do: Ecto.UUID.generate()
def autogenerate(:binary_id), do: Ecto.UUID.generate()
@doc """
Returns the loaders for a given type.
Rather than use the Ecto adapter loaders callback, the adapter builds on ExAws' decoding functionality, please see ExAws's `ExAws.Dynamo.decode_item` and the private function, `custom_decode`, in this module, which at this time only loads :utc_datetime and :naive_datetime.
"""
def loaders(_primitive, type), do: [type]
@doc """
Returns the dumpers for a given type.
We rely on ExAws encoding functionality during insertion and update to properly format types for DynamoDB. Please see ExAws `ExAws.Dynamo.update_item` and `ExAws.Dynamo.put_item` for specifics. Currently, we only modify :utc_datetime and :naive_datetime, appending the UTC offset, "Z", to the datetime string before passing to ExAws.
"""
def dumpers(:utc_datetime, datetime), do: [datetime, &to_iso_string/1]
def dumpers(:naive_datetime, datetime), do: [datetime, &to_iso_string/1]
def dumpers(_primitive, type), do: [type]
# Add UTC offset
# We are adding the offset here also for the :naive_datetime, this
# assumes we are getting a UTC date (which does correspond with the
# timestamps() macro but not necessarily with :naive_datetime in general)
defp to_iso_string(datetime) do
{:ok, (datetime |> Ecto.DateTime.cast! |> Ecto.DateTime.to_iso8601) <> "Z"}
end
@doc """
Commands invoked to prepare a query for `all`, `update_all` and `delete_all`.
The returned result is given to `execute/6`.
"""
#@callback prepare(atom :: :all | :update_all | :delete_all, query :: Ecto.Query.t) ::
# {:cache, prepared} | {:nocache, prepared}
def prepare(:all, query) do
# 'preparing' is more a SQL concept - Do we really need to do anything here or just pass the params through?
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.prepare: :all", %{"#{inspect __MODULE__}.prepare-params" => %{query: inspect(query, structs: false)}})
{:nocache, {:all, query}}
end
def prepare(:update_all, query) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.prepare: :update_all", %{"#{inspect __MODULE__}.prepare-params" => %{query: inspect(query, structs: false)}})
{:nocache, {:update_all, query}}
end
# do: {:cache, {System.unique_integer([:positive]), @conn.update_all(query)}}
def prepare(:delete_all, query) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.prepare: :delete_all", %{"#{inspect __MODULE__}.prepare-params" => %{query: inspect(query, structs: false)}})
{:nocache, {:delete_all, query}}
end
# do: {:cache, {System.unique_integer([:positive]), @conn.delete_all(query)}}
@doc """
Executes a previously prepared query.
It must return a tuple containing the number of entries and
the result set as a list of lists. The result set may also be
`nil` if a particular operation does not support them.
The `meta` field is a map containing some of the fields found
in the `Ecto.Query` struct.
It receives a process function that should be invoked for each
selected field in the query result in order to convert them to the
expected Ecto type. The `process` function will be nil if no
result set is expected from the query.
"""
#@callback execute(repo, query_meta, query, params :: list(), process | nil, options) :: result when
# result: {integer, [[term]] | nil} | no_return,
# query: {:nocache, prepared} |
# {:cached, (prepared -> :ok), cached} |
# {:cache, (cached -> :ok), prepared}
def execute(repo, meta, {:nocache, {func, prepared}}, params, process, opts) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.execute", %{"#{inspect __MODULE__}.execute-params" => %{repo: repo, meta: meta, prepared: prepared, params: params, process: process, opts: opts}})
{table, model} = prepared.from
validate_where_clauses!(prepared)
lookup_fields = extract_lookup_fields(prepared.wheres, params, [])
limit_option = opts[:scan_limit]
scan_limit = if is_integer(limit_option), do: [limit: limit_option], else: []
# Ecto migration does not know to specify 'scan: true' to retrieve the persisted migration versions
# from line 34, file "deps/ecto/lib/ecto/migration/schema_migration.ex"
migration_source = Keyword.get(repo.config, :migration_source, "schema_migrations")
updated_opts = if table == migration_source do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.execute: table name corresponds with migration source: #{inspect migration_source}. Setting options for recursive scan.", %{})
Keyword.drop(opts, [:timeout, :log]) ++ [recursive: true]
else
Keyword.drop(opts, [:scan_limit, :limit]) ++ scan_limit
end
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.execute: local variables", %{"#{inspect __MODULE__}.execute-vars" => %{table: table, lookup_fields: lookup_fields, scan_limit: scan_limit}})
case func do
:delete_all ->
delete_all(table, lookup_fields, updated_opts)
:update_all ->
update_all(table, lookup_fields, updated_opts, prepared.updates, params)
:all ->
ecto_dynamo_log(:info, "#{inspect __MODULE__}.execute: :all", %{"#{inspect __MODULE__}.execute-all-vars" => %{table: table, lookup_fields: lookup_fields, updated_opts: updated_opts}})
result = Ecto.Adapters.DynamoDB.Query.get_item(table, lookup_fields, updated_opts)
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.execute: all: result", %{"#{inspect __MODULE__}.execute-all-result" => inspect result})
if opts[:query_info_key], do: Ecto.Adapters.DynamoDB.QueryInfo.put(opts[:query_info_key], extract_query_info(result))
if result == %{} do
# Empty map means "not found"
{0, []}
else
sources =
model.__schema__(:fields)
|> Enum.into(%{}, fn f ->
{model.__schema__(:field_source, f), f}
end)
cond do
!result["Count"] and !result["Responses"] ->
decoded = decode_item(result["Item"], model, sources, prepared.select)
{1, [decoded]}
true ->
# batch_get_item returns "Responses" rather than "Items"
results_to_decode = if result["Items"], do: result["Items"], else: result["Responses"][table]
decoded = Enum.map(results_to_decode, &(decode_item(&1, model, sources, prepared.select)))
{length(decoded), decoded}
end
end
end
end
# delete_all allows for the recursive option, scanning through multiple pages
defp delete_all(table, lookup_fields, opts) do
ecto_dynamo_log(:info, "#{inspect __MODULE__}.delete_all", %{"#{inspect __MODULE__}.delete_all-params" => %{table: table, lookup_fields: lookup_fields, opts: opts}})
# select only the key
{:primary, key_list} = Ecto.Adapters.DynamoDB.Info.primary_key!(table)
scan_or_query = Ecto.Adapters.DynamoDB.Query.scan_or_query?(table, lookup_fields)
recursive = Ecto.Adapters.DynamoDB.Query.parse_recursive_option(scan_or_query, opts)
updated_opts = prepare_recursive_opts(opts ++ [projection_expression: Enum.join(key_list, ", ")])
delete_all_recursive(table, lookup_fields, updated_opts, recursive, %{}, 0)
end
defp delete_all_recursive(table, lookup_fields, opts, recursive, query_info, total_processed) do
# query the table for which records to delete
fetch_result = Ecto.Adapters.DynamoDB.Query.get_item(table, lookup_fields, opts)
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.delete_all_recursive: fetch_result", %{"#{inspect __MODULE__}.delete_all_recursive-fetch_result" => inspect fetch_result})
items = case fetch_result do
%{"Items" => fetch_items} -> fetch_items
%{"Item" => item} -> [item]
%{"Responses" => table_map} -> table_map[table]
_ -> []
end
prepared_data = for key_list <- Enum.map(items, &Map.to_list/1) do
key_map = for {key, val_map} <- key_list, into: %{}, do: {key, Dynamo.Decoder.decode(val_map)}
[delete_request: [key: key_map]]
end
unprocessed_items = if prepared_data != [] do
batch_delete(table, prepared_data)
else
%{}
end
num_processed =
length(prepared_data) - if !unprocessed_items[table], do: 0, else: length(unprocessed_items[table])
updated_query_info = Enum.reduce(fetch_result, query_info, fn({key, val}, acc) ->
case key do
"Count" -> Map.update(acc, key, val, fn x -> x + val end)
"ScannedCount" -> Map.update(acc, key, val, fn x -> x + val end)
"LastEvaluatedKey" -> Map.update(acc, key, val, fn _ -> fetch_result["LastEvaluatedKey"] end)
_ -> acc
end
end) |> Map.update("UnprocessedItems", unprocessed_items, fn map -> if map == %{}, do: %{}, else: %{table => map[table] ++ unprocessed_items[table]} end)
updated_recursive = Ecto.Adapters.DynamoDB.Query.update_recursive_option(recursive)
if fetch_result["LastEvaluatedKey"] != nil and updated_recursive.continue do
opts_with_offset = opts ++ [exclusive_start_key: fetch_result["LastEvaluatedKey"]]
delete_all_recursive(table, lookup_fields, opts_with_offset, updated_recursive.new_value, updated_query_info, total_processed + num_processed)
else
# We're not retrying unprocessed items yet, but we are providing the relevant info in the QueryInfo agent if :query_info_key is supplied
if opts[:query_info_key], do: Ecto.Adapters.DynamoDB.QueryInfo.put(opts[:query_info_key], updated_query_info)
{num_processed + total_processed, nil}
end
end
# returns unprocessed_items
defp batch_delete(table, prepared_data) do
batch_write_attempt = Dynamo.batch_write_item(%{table => prepared_data}) |> ExAws.request |> handle_error!(%{table: table, records: []})
batch_write_attempt["UnprocessedItems"]
end
defp update_all(table, lookup_fields, opts, updates, params) do
ecto_dynamo_log(:info, "#{inspect __MODULE__}.update_all", %{"#{inspect __MODULE__}.update_all-params" => %{table: table, lookup_fields: lookup_fields, opts: opts}})
scan_or_query = Ecto.Adapters.DynamoDB.Query.scan_or_query?(table, lookup_fields)
recursive = Ecto.Adapters.DynamoDB.Query.parse_recursive_option(scan_or_query, opts)
key_list = Ecto.Adapters.DynamoDB.Info.primary_key!(table)
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.update_all: key_list", %{"#{inspect __MODULE__}.update_all-key_list" => inspect key_list})
# The remove statement must be constructed after finding pull-indexes, but it
# also includes possibly removing nil fields, and since we have one handler for
# both set and remove, we call it during the batch update process
{update_expression, update_fields_sans_set_remove, set_remove_fields} = construct_update_expression(updates, params, opts)
ecto_dynamo_log(:info, "#{inspect __MODULE__}.update_all: update fields", %{"#{inspect __MODULE__}.update_all-update_fields" => %{update_fields_sans_set_remove: inspect(update_fields_sans_set_remove), set_remove_fields: inspect(set_remove_fields)}})
attribute_names = construct_expression_attribute_names(update_fields_sans_set_remove)
attribute_values = construct_expression_attribute_values(update_fields_sans_set_remove, opts)
base_update_options = [expression_attribute_names: attribute_names,
update_expression: update_expression,
return_values: :all_new]
updated_opts = prepare_recursive_opts(opts)
update_options = maybe_add_attribute_values(base_update_options, attribute_values)
pull_actions_without_index =
Keyword.keys(set_remove_fields[:pull])
|> Enum.any?(fn x -> !Enum.member?(Keyword.keys(maybe_list(opts[:pull_indexes])), x) end)
{new_update_options, new_set_remove_fields} =
if pull_actions_without_index do
{update_options, set_remove_fields}
else
merged_pull_indexes = Keyword.merge(set_remove_fields[:pull], maybe_list(opts[:pull_indexes]))
opts_with_pull_indexes = Keyword.update(opts, :pull_indexes, merged_pull_indexes, fn _ -> merged_pull_indexes end)
{update_batch_update_options(update_options, set_remove_fields, opts_with_pull_indexes), []}
end
update_all_recursive(table, lookup_fields, updated_opts, new_update_options, key_list, new_set_remove_fields, recursive, %{}, 0)
end
defp update_all_recursive(table, lookup_fields, opts, update_options, key_list, set_remove_fields, recursive, query_info, total_updated) do
fetch_result = Ecto.Adapters.DynamoDB.Query.get_item(table, lookup_fields, opts)
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.update_all_recursive: fetch_result", %{"#{inspect __MODULE__}.update_all_recursive-fetch_result" => inspect(fetch_result)})
updated_query_info = case fetch_result do
%{"Count" => last_count, "ScannedCount" => last_scanned_count} ->
%{"Count" => last_count + Map.get(query_info, "Count", 0),
"ScannedCount" => last_scanned_count + Map.get(query_info, "ScannedCount", 0),
"LastEvaluatedKey" => Map.get(fetch_result, "LastEvaluatedKey")}
_ -> query_info
end
items = case fetch_result do
%{"Items" => fetch_items} -> fetch_items
%{"Item" => item} -> [item]
%{"Responses" => table_map} -> table_map[table]
_ -> []
end
num_updated = if items != [] do
batch_update(table, items, key_list, update_options, set_remove_fields, opts)
else
0
end
updated_recursive = Ecto.Adapters.DynamoDB.Query.update_recursive_option(recursive)
if fetch_result["LastEvaluatedKey"] != nil and updated_recursive.continue do
opts_with_offset = opts ++ [exclusive_start_key: fetch_result["LastEvaluatedKey"]]
update_all_recursive(table, lookup_fields, opts_with_offset, update_options, key_list, set_remove_fields, updated_recursive.new_value, updated_query_info, total_updated + num_updated)
else
if opts[:query_info_key], do: Ecto.Adapters.DynamoDB.QueryInfo.put(opts[:query_info_key], updated_query_info)
{total_updated + num_updated, []}
end
end
defp batch_update(table, items, key_list, update_options, set_remove_fields, opts) do
Enum.reduce(items, 0, fn(result_to_update, acc) ->
filters = get_key_values_dynamo_map(result_to_update, key_list)
# we only update this on a case-by-case basis if pull actions
# without specific indexes are specified
options_with_set_and_remove = case set_remove_fields do
[] -> update_options
_ ->
pull_fields_with_indexes =
Enum.map(set_remove_fields[:pull], fn {field_atom, val} ->
list = result_to_update[to_string(field_atom)]
{field_atom, find_all_indexes_in_dynamodb_list(list, val)}
end)
merged_pull_indexes = Keyword.merge(pull_fields_with_indexes, maybe_list(opts[:pull_indexes]))
opts_with_pull_indexes = Keyword.update(opts, :pull_indexes, merged_pull_indexes, fn _ -> merged_pull_indexes end)
update_batch_update_options(update_options, set_remove_fields, opts_with_pull_indexes)
end
# 'options_with_set_and_remove' might not have the key, ':expression_attribute_values',
# when there are only removal statements.
record = if options_with_set_and_remove[:expression_attribute_values],
do: [options_with_set_and_remove[:expression_attribute_values] |> Enum.into(%{})],
else: []
if options_with_set_and_remove[:update_expression] |> String.trim != "" do
Dynamo.update_item(table, filters, options_with_set_and_remove) |> ExAws.request |> handle_error!(%{table: table, records: record ++ []})
acc + 1
else
acc
end
end)
end
defp update_batch_update_options(update_options, set_remove_fields, opts) do
attribute_names = construct_expression_attribute_names(Keyword.values(set_remove_fields) |> List.flatten)
set_and_push_fields = maybe_list(set_remove_fields[:set]) ++ maybe_list(set_remove_fields[:push])
opts_with_push = opts ++ Keyword.take(set_remove_fields, [:push])
attribute_values = construct_expression_attribute_values(set_and_push_fields, opts_with_push)
set_statement = construct_set_statement(set_remove_fields[:set], opts_with_push)
opts_for_construct_remove = Keyword.take(set_remove_fields, [:pull]) ++ Keyword.take(opts, [:pull_indexes, :remove_nil_fields])
remove_statement = construct_remove_statement(set_remove_fields[:set], opts_for_construct_remove)
base_update_options =
[expression_attribute_names: Map.merge(attribute_names, update_options[:expression_attribute_names]),
update_expression: set_statement <> " " <> remove_statement <> " " <> update_options[:update_expression] |> String.trim,
return_values: :all_new]
maybe_add_attribute_values(base_update_options, attribute_values ++ maybe_list(update_options[:expression_attribute_values]))
end
# find indexes to remove for update :pull action
defp find_all_indexes_in_dynamodb_list(dynamodb_list, target) do
Dynamo.Decoder.decode(dynamodb_list)
|> Enum.with_index()
|> Enum.filter(fn {x, _} -> x == target end)
|> Enum.map(fn {_, i} -> i end)
end
# During delete_all's and update_all's recursive
# procedure, we want to keep the recursion in
# the top-level, between actions, rather than
# load all the results into memory and then act;
# so we disable the recursion on get_item
defp prepare_recursive_opts(opts) do
opts |> Keyword.delete(:page_limit) |> Keyword.update(:recursive, false, fn _ -> false end)
end
@doc """
Inserts a single new struct in the data store.
## Autogenerate
The primary key will be automatically included in `returning` if the
field has type `:id` or `:binary_id` and no value was set by the
developer or none was autogenerated by the adapter.
"""
#@callback insert(repo, schema_meta, fields, on_conflict, returning, options) ::
# {:ok, fields} | {:invalid, constraints} | no_return
# def insert(_,_,_,_,_) do
def insert(repo, schema_meta, fields, on_conflict, returning, opts) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.insert", %{"#{inspect __MODULE__}.insert-params" => %{repo: repo, schema_meta: schema_meta, fields: fields, on_conflict: on_conflict, returning: returning, opts: opts}})
insert_nil_field_option = Keyword.get(opts, :insert_nil_fields, true)
do_not_insert_nil_fields = insert_nil_field_option == false || Application.get_env(:ecto_adapters_dynamodb, :insert_nil_fields) == false
{_, table} = schema_meta.source
model = schema_meta.schema
fields_map = Enum.into(fields, %{})
record = if do_not_insert_nil_fields, do: fields_map, else: build_record_map(model, fields_map)
ecto_dynamo_log(:info, "#{inspect __MODULE__}.insert: local variables", %{"#{inspect __MODULE__}.insert-vars" => %{table: table, record: record}})
{:primary, key_list} = Ecto.Adapters.DynamoDB.Info.primary_key!(table)
hash_key = hd(key_list)
on_conflict_action = elem(on_conflict, 0)
options = case on_conflict_action do
:replace_all -> []
_ ->
attribute_names = for k <- key_list, into: %{}, do: {"##{k}", k}
conditions = for k <- key_list, do: "attribute_not_exists(##{k})"
condition_expression = Enum.join(conditions, " and ")
[expression_attribute_names: attribute_names,
condition_expression: condition_expression]
end
case Dynamo.put_item(table, record, options) |> ExAws.request |> handle_error!(%{table: table, records: [record]}) do
{:error, "ConditionalCheckFailedException"} ->
case on_conflict_action do
# Per discussion with Jose Valim (https://github.com/elixir-ecto/ecto/issues/2378)
# clarifying the adapter should return nothing if there is no `:returning` specified,
# and what we thought was to be returned as a `nil` id, is only for cases where
# "the field is autogenerated by the database" (https://hexdocs.pm/ecto/Ecto.Repo.html)
:nothing -> {:ok, []}
:raise ->
# This constraint name yields the correct behavior in the case the user
# has specified a unique constraint on the primary key in their schema:
constraint_name = "#{table}_#{hash_key}_index"
{:invalid, [unique: constraint_name]}
end
%{} ->
{:ok, []}
end
end
def insert_all(repo, schema_meta, field_list, fields, on_conflict, returning, opts) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.insert_all", %{"#{inspect __MODULE__}.insert_all-params" => %{repo: repo, schema_meta: schema_meta, field_list: field_list, fields: fields, on_conflict: on_conflict, returning: returning, opts: opts}})
insert_nil_field_option = Keyword.get(opts, :insert_nil_fields, true)
do_not_insert_nil_fields = insert_nil_field_option == false || Application.get_env(:ecto_adapters_dynamodb, :insert_nil_fields) == false
{_, table} = schema_meta.source
model = schema_meta.schema
prepared_fields = Enum.map(fields, fn(field_set) ->
mapped_fields = Enum.into(field_set, %{})
record = if do_not_insert_nil_fields, do: mapped_fields, else: build_record_map(model, mapped_fields)
[put_request: [item: record]]
end)
ecto_dynamo_log(:info, "#{inspect __MODULE__}.insert_all: local variables", %{"#{inspect __MODULE__}.insert_all-vars" => %{table: table, records: get_records_from_fields(prepared_fields)}})
batch_write(table, prepared_fields, opts)
end
# DynamoDB will reject an entire batch of insert_all() records if there are more than 25 requests.
# https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html
# batch_write/3 will break the list into chunks of 25 items and insert each separately.
defp batch_write(table, prepared_fields, opts) do
batch_write_limit = 25
response_element = "UnprocessedItems"
grouped_records = Enum.chunk_every(prepared_fields, batch_write_limit)
num_batches = length grouped_records
# Break the prepared_fields into chunks of at most 25 elements to be batch inserted, accumulating
# the total count of records and appropriate results as it loops through the reduce.
{total_processed, results} = grouped_records
|> Stream.with_index
|> Enum.reduce({0, []}, fn({field_group, i}, {running_total_processed, batch_write_results}) ->
{total_batch_processed, batch_write_attempt} = handle_batch_write(field_group, table, response_element)
# Log depth of 11 will capture the full data structure returned in any UnprocessedItems - https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.batch_write #{i + 1} of #{num_batches}: local variables", %{"#{inspect __MODULE__}.insert_all-batch_write" => %{table: table, field_group: field_group, results: batch_write_attempt}}, [depth: 11])
# We're not retrying unprocessed items yet, but we are providing the relevant info in the QueryInfo agent if :query_info_key is supplied
if opts[:query_info_key] do
query_info = extract_query_info(batch_write_attempt)
Ecto.Adapters.DynamoDB.QueryInfo.update(opts[:query_info_key], [query_info], fn(list) -> list ++ [query_info] end)
end
{running_total_processed + total_batch_processed, batch_write_results ++ [batch_write_attempt]}
end)
result_body_for_log = %{table => Enum.flat_map(results, fn(res) -> res[response_element][table] || [] end)}
ecto_dynamo_log(:info, "#{inspect __MODULE__}.batch_write: batch_write_attempt result", %{"#{inspect __MODULE__}.insert_all-batch_write" => inspect %{response_element => (if result_body_for_log[table] == [], do: %{}, else: result_body_for_log)}})
{total_processed, nil}
end
defp handle_batch_write(field_group, table, response_element) do
results = Dynamo.batch_write_item(%{table => field_group})
|> ExAws.request
|> handle_error!(%{table: table, records: get_records_from_fields(field_group)})
if results[response_element] == %{} do
{length(field_group), results}
else
{length(field_group) - length(results[response_element][table]), results}
end
end
defp get_records_from_fields(fields), do: Enum.map(fields, fn [put_request: [item: record]] -> record end)
defp build_record_map(model, fields_to_insert) do
# Ecto does not convert empty strings to nil before passing them
# to Repo.insert_all, and ExAws will remove empty strings (as well as empty lists)
# when building the insertion query but not nil values. We don't mind the removal
# of empty lists since those cannot be inserted to indexed fields, but we'd like to
# catch the removal of fields with empty strings by ExAws to support our option, :remove_nil_fields,
# so we convert these to nil.
fields = model.__schema__(:fields)
sources = fields |> Enum.into(%{}, fn f -> {f, model.__schema__(:field_source, f)} end)
empty_strings_to_nil = fields_to_insert
|> Enum.map(fn {field, val} -> {field, (if val == "", do: nil, else: val)} end)
|> Enum.into(%{})
model.__struct__
|> Map.delete(:__meta__)
|> Map.from_struct
|> Enum.reduce(%{}, fn {k, v}, acc ->
Map.put(acc, Map.get(sources, k), v)
end)
|> Map.merge(empty_strings_to_nil)
end
# In testing, 'filters' contained only the primary key and value
# TODO: handle cases of more than one tuple in 'filters'?
def delete(repo, schema_meta, filters, opts) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.delete", %{"#{inspect __MODULE__}.delete-params" => %{repo: repo, schema_meta: schema_meta, filters: filters, opts: opts}})
{_, table} = schema_meta.source
# We offer the :range_key option for tables with composite primary key
# since Ecto will not provide the range_key value needed for the query.
# If :range_key is not provided, check if the table has a composite
# primary key and query for all the key values
updated_filters = case opts[:range_key] do
nil ->
{:primary, key_list} = Ecto.Adapters.DynamoDB.Info.primary_key!(table)
if (length key_list) > 1 do
updated_opts = opts ++ [projection_expression: Enum.join(key_list, ", ")]
filters_as_strings = for {field, val} <- filters, do: {Atom.to_string(field), {val, :==}}
fetch_result = Ecto.Adapters.DynamoDB.Query.get_item(table, filters_as_strings, updated_opts)
items = case fetch_result do
%{"Items" => fetch_items} -> fetch_items
%{"Item" => item} -> [item]
_ -> []
end
if items == [], do: raise "__MODULE__.update error: no results found for record: #{inspect filters}"
if (length items) > 1, do: raise "__MODULE__.update error: more than one result found for record: #{inspect filters}"
for {field, key_map} <- Map.to_list(hd items) do
[{_field_type, val}] = Map.to_list(key_map)
{field, val}
end
else
filters
end
range_key ->
[range_key | filters]
end
attribute_names = construct_expression_attribute_names(keys_to_atoms(filters))
base_options = [expression_attribute_names: attribute_names]
condition_expression = construct_condition_expression(filters)
options = base_options ++ [condition_expression: condition_expression]
# 'options' might not have the key, ':expression_attribute_values', when there are only removal statements
record = if options[:expression_attribute_values], do: [options[:expression_attribute_values] |> Enum.into(%{})], else: []
case Dynamo.delete_item(table, updated_filters, options) |> ExAws.request |> handle_error!(%{table: table, records: record ++ []}) do
%{} -> {:ok, []}
{:error, "ConditionalCheckFailedException"} -> {:error, :stale}
end
end
def update(repo, schema_meta, fields, filters, returning, opts) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.update", %{"#{inspect __MODULE__}.update-params" => %{repo: repo, schema_meta: schema_meta, fields: fields, filters: filters, returning: returning, opts: opts}})
{_, table} = schema_meta.source
# We offer the :range_key option for tables with composite primary key
# since Ecto will not provide the range_key value needed for the query.
# If :range_key is not provided, check if the table has a composite
# primary key and query for all the key values
updated_filters = case opts[:range_key] do
nil ->
{:primary, key_list} = Ecto.Adapters.DynamoDB.Info.primary_key!(table)
if (length key_list) > 1 do
updated_opts = opts ++ [projection_expression: Enum.join(key_list, ", ")]
filters_as_strings = for {field, val} <- filters, do: {Atom.to_string(field), {val, :==}}
fetch_result = Ecto.Adapters.DynamoDB.Query.get_item(table, filters_as_strings, updated_opts)
items = case fetch_result do
%{"Items" => fetch_items} -> fetch_items
%{"Item" => item} -> [item]
_ -> []
end
if items == [], do: raise "__MODULE__.update error: no results found for record: #{inspect filters}"
if (length items) > 1, do: raise "__MODULE__.update error: more than one result found for record: #{inspect filters}"
for {field, key_map} <- Map.to_list(hd items) do
{field, ExAws.Dynamo.Decoder.decode(key_map)}
end
else
filters
end
range_key ->
[range_key | filters]
end
update_expression = construct_update_expression(fields, opts)
# add updated_filters to attribute_ names and values for condition_expression
attribute_names = construct_expression_attribute_names(fields ++ keys_to_atoms(filters))
attribute_values = construct_expression_attribute_values(fields, opts)
base_options = [expression_attribute_names: attribute_names,
update_expression: update_expression]
condition_expression =
filters
|> Enum.map(fn(tuple) -> construct_condition_expression(tuple) end)
|> Enum.join(" AND ")
options = maybe_add_attribute_values(base_options, attribute_values)
++ [condition_expression: condition_expression]
# 'options' might not have the key, ':expression_attribute_values', when there are only removal statements
record = if options[:expression_attribute_values], do: [options[:expression_attribute_values] |> Enum.into(%{})], else: []
case Dynamo.update_item(table, updated_filters, options) |> ExAws.request |> handle_error!(%{table: table, records: record ++ []}) do
%{} -> {:ok, []}
{:error, "ConditionalCheckFailedException"} -> {:error, :stale}
end
end
defp keys_to_atoms(list),
do: for {k, v} <- list, do: {maybe_string_to_atom(k), v}
defp maybe_string_to_atom(s),
do: if is_binary(s), do: String.to_atom(s), else: s
defp construct_condition_expression({field, _val} = _filters),
do: "attribute_exists(##{to_string(field)})"
defp extract_query_info(result), do: result |> Map.take(["Count", "ScannedCount", "LastEvaluatedKey", "UnprocessedItems", "UnprocessedKeys"])
# Used in update_all
defp extract_update_params([], _action_atom, _params), do: []
defp extract_update_params([%{expr: key_list}], action_atom, params) do
case key_list[action_atom] do
nil ->
[]
action_list ->
for s <- action_list do
{field_atom, {:^, _, [idx]}} = s
{field_atom, Enum.at(params,idx)}
end
end
end
defp extract_update_params([a], _action_atom, _params), do: error "#{inspect __MODULE__}.extract_update_params: Updates is either missing the :expr key or does not contain a struct or map: #{inspect a}"
defp extract_update_params(unsupported, _action_atom, _params), do: error "#{inspect __MODULE__}.extract_update_params: unsupported parameter construction. #{inspect unsupported}"
# Ecto does not support push pull for types other than array.
# Therefore, we enable add and delete via opts
defp extract_update_params(key_list, action_atom) do
case key_list[action_atom] do
nil -> []
action_list -> action_list
end
end
# used in :update_all
defp get_key_values_dynamo_map(dynamo_map, {:primary, keys}) do
for k <- keys, do: {String.to_atom(k), Dynamo.Decoder.decode(dynamo_map[k])}
end
defp construct_expression_attribute_names(fields) do
for {f, _} <- fields, into: %{}, do: {"##{Atom.to_string(f)}", Atom.to_string(f)}
end
defp construct_expression_attribute_values(fields, opts) do
remove_rather_than_set_to_null = opts[:remove_nil_fields] || Application.get_env(:ecto_adapters_dynamodb, :remove_nil_fields_on_update) == true
# If the value is nil and the :remove_nil_fields option is set,
# we're removing this attribute, not updating it, so filter out any such fields:
if remove_rather_than_set_to_null do
for {k, v} <- fields, !is_nil(v), do: {k, format_val(k, v, opts)}
else
for {k, v} <- fields, do: {k, format_nil(k, v, opts)}
end |> Enum.filter(fn {x, _} -> not Keyword.has_key?(maybe_list(opts[:pull]), x) end)
end
defp maybe_list(l) when is_list(l), do: l
defp maybe_list(_), do: []
defp format_nil(_k, v, _opts) when is_nil(v), do: %{"NULL" => "true"}
defp format_nil(k, v, opts), do: format_val(k, v, opts)
defp format_val(k, v, opts) do
case opts[:push][k] do
nil -> v
_ -> [v]
end
end
# DynamoDB throws an error if we pass in an empty list for attribute values,
# so we have to implement this stupid little helper function to avoid hurting
# its feelings:
defp maybe_add_attribute_values(options, []) do
options
end
defp maybe_add_attribute_values(options, attribute_values) do
[expression_attribute_values: attribute_values] ++ options
end
defp construct_update_expression(updates, params, opts) do
to_set = extract_update_params(updates, :set, params)
to_push = extract_update_params(updates, :push, params)
to_pull = extract_update_params(updates, :pull, params)
to_add = extract_update_params(opts, :add) ++ extract_update_params(updates, :inc, params)
to_delete = extract_update_params(opts, :delete)
{construct_add_statement(to_add, opts) <> " " <>
construct_delete_statement(to_delete, opts) |> String.trim(),
to_add ++ to_delete,
[set: to_set, push: to_push, pull: to_pull]}
end
# The update callback supplies fields in the paramaters
# whereas update_all includes a more complicated updates
# structure
defp construct_update_expression(fields, opts) do
set_statement = construct_set_statement(fields, opts)
rem_statement = construct_remove_statement(fields, opts)
String.trim("#{set_statement} #{rem_statement}")
end
# fields::[{:field, val}]
defp construct_set_statement(fields, opts) do
remove_rather_than_set_to_null = opts[:remove_nil_fields] || Application.get_env(:ecto_adapters_dynamodb, :remove_nil_fields_on_update) == true
set_clauses = for {key, val} <- fields, not (is_nil(val) and remove_rather_than_set_to_null) do
key_str = Atom.to_string(key)
"##{key_str}=:#{key_str}"
end
++ case opts[:push] do
nil -> []
push_list ->
for {key, _val} <- push_list do
key_str = Atom.to_string(key)
if Enum.member?(maybe_list(opts[:prepend_to_list]), key),
do: "##{key_str} = list_append(:#{key_str}, ##{key_str})",
else: "##{key_str} = list_append(##{key_str}, :#{key_str})"
end
end
case set_clauses do
[] ->
""
_ ->
"SET " <> Enum.join(set_clauses, ", ")
end
end
defp construct_remove_statement(fields, opts) do
remove_rather_than_set_to_null = opts[:remove_nil_fields] || Application.get_env(:ecto_adapters_dynamodb, :remove_nil_fields_on_update) == true
remove_clauses =
if remove_rather_than_set_to_null do
for {key, val} <- fields, is_nil(val), do: "##{Atom.to_string(key)}"
else
[]
end
# Ecto :pull update can be emulated provided
# we are given an index to remove in opts[:pull_indexes]
++ cond do
!opts[:pull_indexes] or (Keyword.values(opts[:pull_indexes]) |> List.flatten) == [] ->
[]
opts[:pull] == nil ->
[]
true ->
for {key, _val} <- opts[:pull] do
key_str = Atom.to_string(key)
Enum.map(opts[:pull_indexes][key], fn index -> "##{key_str}[#{index}]" end) |> Enum.join(", ")
end
end
case remove_clauses do
[] ->
""
_ ->
"REMOVE " <> Enum.join(remove_clauses, ", ")
end
end
# fields::[{:field, val}]
defp construct_add_statement(fields, _opts) do
add_clauses = for {key, _val} <- fields do
key_str = Atom.to_string(key)
"##{key_str} :#{key_str}"
end
case add_clauses do
[] ->
""
_ ->
"ADD " <> Enum.join(add_clauses, ", ")
end
end
defp construct_delete_statement(fields, _opts) do
delete_clauses = for {key, _val} <- fields do
key_str = Atom.to_string(key)
"##{key_str} :#{key_str}"
end
case delete_clauses do
[] ->
""
_ ->
"DELETE " <> Enum.join(delete_clauses, ", ")
end
end
defp validate_where_clauses!(query) do
for w <- query.wheres do
validate_where_clause! w
end
end
defp validate_where_clause!(%BooleanExpr{expr: {op, _, _}}) when op in [:==, :<, :>, :<=, :>=, :in], do: :ok
defp validate_where_clause!(%BooleanExpr{expr: {logical_op, _, _}}) when logical_op in [:and, :or], do: :ok
defp validate_where_clause!(%BooleanExpr{expr: {:is_nil, _, _}}), do: :ok
defp validate_where_clause!(%BooleanExpr{expr: {:fragment, _, _}}), do: :ok
defp validate_where_clause!(unsupported), do: error "unsupported where clause: #{inspect unsupported}"
# We are parsing a nested, recursive structure of the general type:
# %{:logical_op, list_of_clauses} | %{:conditional_op, field_and_value}
defp extract_lookup_fields([], _params, lookup_fields), do: lookup_fields
defp extract_lookup_fields([query | queries], params, lookup_fields) do
# A logical operator tuple does not always have a parent 'expr' key.
maybe_extract_from_expr = case query do
%BooleanExpr{expr: expr} -> expr
# TODO: could there be other cases?
_ -> query
end
case maybe_extract_from_expr do
# A logical operator points to a list of conditionals
{op, _, [left, right]} when op in [:==, :<, :>, :<=, :>=, :in] ->
{field, value} = get_op_clause(left, right, params)
updated_lookup_fields =
case List.keyfind(lookup_fields, field, 0) do
# we assume the most ops we can apply to one field is two, otherwise this might throw an error
{field, {old_val, old_op}} ->
List.keyreplace(lookup_fields, field, 0, {field, {[value, old_val], [op, old_op]}})
_ -> [{field, {value, op}} | lookup_fields]
end
extract_lookup_fields(queries, params, updated_lookup_fields)
# Logical operator expressions have more than one op clause
# We are matching queries of the type: 'from(p in Person, where: p.email == "[email protected]" and p.first_name == "George")'
# But not of the type: 'from(p in Person, where: [email: "[email protected]", first_name: "George"])'
#
# A logical operator is a member of a list
{logical_op, _, clauses} when logical_op in [:and, :or] ->
deeper_lookup_fields = extract_lookup_fields(clauses, params, [])
extract_lookup_fields(queries, params, [{logical_op, deeper_lookup_fields} | lookup_fields])
{:fragment, _, raw_expr_mixed_list} ->
parsed_fragment = parse_raw_expr_mixed_list(raw_expr_mixed_list, params)
extract_lookup_fields(queries, params, [parsed_fragment | lookup_fields])
# We perform a post-query is_nil filter on indexed fields and have DynamoDB filter
# for nil non-indexed fields (although post-query nil-filters on (missing) indexed
# attributes could only find matches when the attributes are not the range part of
# a queried partition key (hash part) since those would not return the sought records).
{:is_nil, _, [arg]} ->
{{:., _, [_, field_name]}, _, _} = arg
# We give the nil value a string, "null", since it will be mapped as a DynamoDB attribute_expression_value
extract_lookup_fields(queries, params, [{to_string(field_name), {"null", :is_nil}} | lookup_fields])
_ -> extract_lookup_fields(queries, params, lookup_fields)
end
end
# Specific (as opposed to generalized) parsing for Ecto :fragment - the only use for it
# so far is 'between' which is the only way to query 'between' on an indexed field since
# those accept only single conditions.
#
# Example with values as strings: [raw: "", expr: {{:., [], [{:&, [], [0]}, :person_id]}, [], []}, raw: " between ", expr: "person:a", raw: " and ", expr: "person:f", raw: ""]
#
# Example with values as part of the string itself: [raw: "", expr: {{:., [], [{:&, [], [0]}, :person_id]}, [], []}, raw: " between person:a and person:f"]
#
# Example with values in params: [raw: "", expr: {{:., [], [{:&, [], [0]}, :person_id]}, [], []}, raw: " between ", expr: {:^, [], [0]}, raw: " and ", expr: {:^, [], [1]}, raw: ""]
#
defp parse_raw_expr_mixed_list(raw_expr_mixed_list, params) do
# group the expression into fields, values, and operators,
# only supporting the example with values in params
case raw_expr_mixed_list do
# between
[raw: _, expr: {{:., [], [{:&, [], [0]}, field_atom]}, [], []}, raw: between_str, expr: {:^, [], [idx1]}, raw: and_str, expr: {:^, [], [idx2]}, raw: _] ->
if not (Regex.match?(~r/^\s*between\s*and\s*$/i, between_str <> and_str)), do:
parse_raw_expr_mixed_list_error(raw_expr_mixed_list)
{to_string(field_atom), {[Enum.at(params, idx1), Enum.at(params, idx2)], :between}}
# begins_with
[raw: begins_with_str, expr: {{:., [], [{:&, [], [0]}, field_atom]}, [], []}, raw: comma_str, expr: {:^, [], [idx]}, raw: closing_parenthesis_str] ->
if not (Regex.match?(~r/^\s*begins_with\(\s*,\s*\)\s*$/i, begins_with_str <> comma_str <> closing_parenthesis_str)), do:
parse_raw_expr_mixed_list_error(raw_expr_mixed_list)
{to_string(field_atom), {Enum.at(params, idx), :begins_with}}
_ -> parse_raw_expr_mixed_list_error(raw_expr_mixed_list)
end
end
defp parse_raw_expr_mixed_list_error(raw_expr_mixed_list), do:
raise "#{inspect __MODULE__}.parse_raw_expr_mixed_list parse error. We currently only support the Ecto fragments of the form, 'where: fragment(\"? between ? and ?\", FIELD_AS_VARIABLE, VALUE_AS_VARIABLE, VALUE_AS_VARIABLE)'; and 'where: fragment(\"begins_with(?, ?)\", FIELD_AS_VARIABLE, VALUE_AS_VARIABLE)'. Received: #{inspect raw_expr_mixed_list}"
defp get_op_clause(left, right, params) do
field = left |> get_field |> Atom.to_string
value = get_value(right, params)
{field, value}
end
defp get_field({{:., _, [{:&, _, [0]}, field]}, _, []}), do: field
defp get_field(other_clause) do
error "Unsupported where clause, left hand side: #{other_clause}"
end
defp get_value({:^, _, [idx]}, params), do: Enum.at(params, idx)
# Handle queries with interpolated values
# ex. Repo.all from i in Item, where: i.id in ^item_ids
defp get_value({:^, _, _}, params), do: params
# Handle .all(query) QUERIES
defp get_value(other_clause, _params), do: other_clause
defp error(msg) do
raise ArgumentError, message: msg
end
defp extract_select_fields(%Ecto.Query.SelectExpr{expr: expr} = _) do
case expr do
{_, _, [0]} ->
[]
{{:., _, [{_, _, _}, field]}, _, _} ->
[field]
{:{}, _, clauses} ->
for {{_, _, [{_, _, _}, field]}, _, _} <- clauses, do: field
end
end
# Decodes maps and datetime, seemingly unhandled by ExAws Dynamo decoder
# (timestamps() corresponds with :naive_datetime)
defp custom_decode(item, model, select) do
selected_fields = extract_select_fields(select)
case selected_fields do
[] ->
[Enum.reduce(model.__schema__(:fields), item, fn (field, acc) ->
Map.update!(acc, field, fn val -> decode_type(model.__schema__(:type, field), val) end)
end)]
fields ->
for field <- fields, do: decode_type(model.__schema__(:type, field), Map.get(item, field))
end
end
defp decode_item(item, model, sources, select) do
item = Enum.reduce(item, %{}, fn {k, v}, acc ->
key = to_string(Map.get(sources, String.to_atom(k)))
Map.put(acc, key, v)
end)
%{"Item" => item}
|> Dynamo.decode_item(as: model)
|> custom_decode(model, select)
end
# This is used slightly differently
# when handling select in custom_decode/2
defp decode_type(type, val) do
if is_nil val do
val
else
case type do
:utc_datetime ->
{:ok, dt, _offset} = DateTime.from_iso8601(val)
dt
:naive_datetime ->
NaiveDateTime.from_iso8601!(val)
{:embed, _} ->
decode_embed(type, val)
t when t in [Ecto.Adapters.DynamoDB.DynamoDBSet, MapSet] ->
MapSet.new(val)
_ -> val
end
end
end
defp decode_embed(type, val) do
case Ecto.Adapters.SQL.load_embed(type, val) do
{:ok, decoded_value} ->
decoded_value
:error ->
ecto_dynamo_log(:info, "#{inspect __MODULE__}.decode_embed: failed to decode embedded value: #{inspect val}")
nil
end
end
# We found one instance where DynamoDB's error message could
# be more instructive - when trying to set an indexed field to something
# other than a string or number - so we're adding a more helpful message.
# The parameter, 'params', has the type %{table: :string, records: [:map]}
defp handle_error!(ex_aws_request_result, params) do
case ex_aws_request_result do
{:ok, result} -> result
{:error, {error_name, _} = error} ->
# Check for inappropriate insert into indexed field
indexed_fields = Ecto.Adapters.DynamoDB.Info.indexed_attributes(params.table)
# Repo.insert_all can present multiple records at once
forbidden_insert_on_indexed_field = Enum.reduce(params.records, false, fn (record, acc) ->
acc || Enum.any?(record, fn {field, val} ->
[type] = ExAws.Dynamo.Encoder.encode(val) |> Map.keys
# Ecto does not convert Empty strings to nil before passing them to Repo.update_all or
# Repo.insert_all DynamoDB provides an instructive message during an update (forwarded by ExAws),
# but less so for batch_write_item, so we catch the empty string as well.
# Dynamo does not allow insertion of empty strings in any case.
(Enum.member?(indexed_fields, to_string(field)) and not type in ["S", "N"]) || val == ""
end)
end)
cond do
# we use this error to check if an update or delete record does not exist
error_name == "ConditionalCheckFailedException" ->
{:error, error_name}
forbidden_insert_on_indexed_field ->
raise "The following request error could be related to attempting to insert an empty string or attempting to insert a type other than a string or number on an indexed field. Indexed fields: #{inspect indexed_fields}. Records: #{inspect params.records}.\n\nExAws Request Error! #{inspect error}"
true ->
raise ExAws.Error, message: "ExAws Request Error! #{inspect error}"
end
end
end
@doc """
Logs message to console and optionally to file. Log levels, colours and file path may be set in configuration (details in README.md).
"""
def ecto_dynamo_log(level, message, attributes \\ %{}, opts \\ []) do
depth = opts[:depth] || 4
colours = Application.get_env(:ecto_adapters_dynamodb, :log_colours)
d = DateTime.utc_now
formatted_message = "#{d.year}-#{d.month}-#{d.day} #{d.hour}:#{d.minute}:#{d.second} UTC [Ecto dynamo #{level}] #{message}"
{:ok, log_message} = Poison.encode(%{message: formatted_message, attributes: chisel(attributes, depth)})
log_path = Application.get_env(:ecto_adapters_dynamodb, :log_path)
log_levels = Application.get_env(:ecto_adapters_dynamodb, :log_levels) || [:info]
if level in log_levels do
if Application.get_env(:ecto_adapters_dynamodb, :log_in_colour) do
IO.ANSI.format([colours[level] || :normal, log_message], true) |> IO.puts
else
log_message |> IO.puts
end
if String.valid?(log_path) and Regex.match?(~r/\S/, log_path), do: log_pipe(log_path, log_message)
end
end
defp chisel(str, _depth) when is_binary(str), do: str
defp chisel(num, _depth) when is_number(num), do: num
defp chisel(any, _depth) when (not is_map(any) and not is_list(any)), do: inspect any
defp chisel(_, 0), do: "beyond_log_depth"
defp chisel(%{__struct__: _} = struct, _depth), do: inspect struct
defp chisel(map, depth) when is_map(map) do
for {k, v} <- map, into: %{}, do: {k, chisel(v, depth - 1)}
end
defp chisel(list, depth) when is_list(list) do
for e <- list, do: chisel(e, depth - 1)
#Stream.with_index(list) |> Enum.reduce(%{}, fn({v,k}, acc)-> Map.put(acc, k, chisel(v, depth - 1)) end)
end
defp log_pipe(path, str) do
{:ok, file} = File.open(path, [:append])
IO.binwrite(file, str)
File.close(file)
end
end
| 44.655087 | 354 | 0.673075 |
f7c7968193eff94be73383069022eef7ce2c2a91 | 3,750 | ex | Elixir | lib/exvault/kv1.ex | praekeltfoundation/exvault | 338b05855e40dd5547b105a608e6bac39f849e78 | [
"BSD-3-Clause"
] | null | null | null | lib/exvault/kv1.ex | praekeltfoundation/exvault | 338b05855e40dd5547b105a608e6bac39f849e78 | [
"BSD-3-Clause"
] | 61 | 2019-02-12T10:05:29.000Z | 2020-07-27T09:50:15.000Z | lib/exvault/kv1.ex | praekeltfoundation/exvault | 338b05855e40dd5547b105a608e6bac39f849e78 | [
"BSD-3-Clause"
] | 1 | 2019-10-27T10:01:43.000Z | 2019-10-27T10:01:43.000Z | defmodule ExVault.KV1 do
@moduledoc """
A very thin wrapper over the basic operations for working with KV v1 data.
Construct a *backend*--a client paired with the mount path for the `kv`
version 1 secrets engine it interacts with--using the `ExVault.KV1.new/2`
function.
Each of the operations in this module have a variant that operates on a client
and mount path, and another that operates on a backend.
See the [Vault documentation](https://www.vaultproject.io/docs/secrets/kv/kv-v1.html)
for the secrets engine.
"""
defstruct [:client, :mount]
@type t :: %__MODULE__{
client: ExVault.client(),
mount: String.t()
}
@doc """
Create a new backend for the `kv` version 1 secrets engine.
Params:
* `client` the `ExVault` client.
* `mount` the mount path for the `kv` secrets engine.
"""
@spec new(ExVault.client(), String.t()) :: t()
def new(client, mount), do: %__MODULE__{client: client, mount: mount}
@doc """
Read the value of a key.
Params:
* `client` the `ExVault` client.
* `mount` the mount path for the `kv` secrets engine.
* `path` the path to the key in the secrets engine.
"""
@spec read(ExVault.client(), String.t(), String.t()) :: ExVault.response()
def read(client, mount, path), do: ExVault.read(client, "#{mount}/#{path}")
@doc """
Read the value of a key.
Params:
* `backend` the `ExVault.KV1` backend.
* `path` the path to the key in the secrets engine.
"""
@spec read(t(), String.t()) :: ExVault.response()
def read(backend, path), do: read(backend.client, backend.mount, path)
@doc """
Write the value of a key.
Params:
* `client` the `ExVault` client.
* `mount` the mount path for the `kv` secrets engine.
* `path` the path to the key in the secrets engine.
* `data` the data to write as a map of string keys to string values.
"""
@spec write(ExVault.client(), String.t(), String.t(), %{String.t() => String.t()}) ::
ExVault.response()
def write(client, mount, path, data), do: ExVault.write(client, "#{mount}/#{path}", data)
@doc """
Write the value of a key.
Params:
* `backend` the `ExVault.KV1` backend.
* `path` the path to the key in the secrets engine.
* `data` the data to write as a map of string keys to string values.
"""
@spec write(t(), String.t(), any()) :: ExVault.response()
def write(backend, path, data), do: write(backend.client, backend.mount, path, data)
@doc """
Delete a key.
Params:
* `client` the `ExVault` client.
* `mount` the mount path for the `kv` secrets engine.
* `path` the path to the key in the secrets engine.
"""
@spec delete(ExVault.client(), String.t(), String.t()) :: ExVault.response()
def delete(client, mount, path), do: ExVault.delete(client, "#{mount}/#{path}")
@doc """
Delete a key.
Params:
* `backend` the `ExVault.KV1` backend.
* `path` the path to the key in the secrets engine.
"""
@spec delete(t(), String.t()) :: ExVault.response()
def delete(backend, path), do: delete(backend.client, backend.mount, path)
@doc """
List the keys.
Params:
* `client` the ExVault client.
* `mount` the mount path for the `kv` secrets engine.
* `path` the path to the key or key prefix in the secrets engine.
"""
@spec list(ExVault.client(), String.t(), String.t()) :: ExVault.response()
def list(client, mount, path), do: ExVault.list(client, "#{mount}/#{path}")
@doc """
List the keys.
Params:
* `backend` the `ExVault.KV1` backend.
* `path` the path to the key or key prefix in the secrets engine.
"""
@spec list(t(), String.t()) :: ExVault.response()
def list(backend, path), do: list(backend.client, backend.mount, path)
end
| 31.25 | 91 | 0.642133 |
f7c798f4636659e59e74942b0f14fac3705c3ad4 | 118 | exs | Elixir | test/blackvue_test.exs | johnhamelink/blackvue | 4c36d18adf0d97d63fd212fc6b38cfc3ca856efe | [
"MIT"
] | 15 | 2018-03-06T16:39:23.000Z | 2021-05-27T06:11:11.000Z | test/blackvue_test.exs | johnhamelink/blackvue | 4c36d18adf0d97d63fd212fc6b38cfc3ca856efe | [
"MIT"
] | 1 | 2018-06-25T14:38:18.000Z | 2018-06-25T14:38:18.000Z | test/blackvue_test.exs | johnhamelink/blackvue | 4c36d18adf0d97d63fd212fc6b38cfc3ca856efe | [
"MIT"
] | 2 | 2019-10-07T22:07:23.000Z | 2020-01-23T03:18:44.000Z | defmodule BlackvueTest do
use ExUnit.Case
doctest Blackvue
test "the truth" do
assert 1 + 1 == 2
end
end
| 13.111111 | 25 | 0.677966 |
f7c79d75d7df505a9876f372a5b4f0d470a5902c | 2,559 | ex | Elixir | clients/people/lib/google_api/people/v1/model/phone_number.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/people/lib/google_api/people/v1/model/phone_number.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/people/lib/google_api/people/v1/model/phone_number.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.People.V1.Model.PhoneNumber do
@moduledoc """
A person's phone number.
## Attributes
- canonicalForm (String.t): The read-only canonicalized [ITU-T E.164](https://law.resource.org/pub/us/cfr/ibr/004/itu-t.E.164.1.2008.pdf) form of the phone number. Defaults to: `null`.
- formattedType (String.t): The read-only type of the phone number translated and formatted in the viewer's account locale or the `Accept-Language` HTTP header locale. Defaults to: `null`.
- metadata (FieldMetadata): Metadata about the phone number. Defaults to: `null`.
- type (String.t): The type of the phone number. The type can be custom or one of these predefined values: * `home` * `work` * `mobile` * `homeFax` * `workFax` * `otherFax` * `pager` * `workMobile` * `workPager` * `main` * `googleVoice` * `other` Defaults to: `null`.
- value (String.t): The phone number. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:canonicalForm => any(),
:formattedType => any(),
:metadata => GoogleApi.People.V1.Model.FieldMetadata.t(),
:type => any(),
:value => any()
}
field(:canonicalForm)
field(:formattedType)
field(:metadata, as: GoogleApi.People.V1.Model.FieldMetadata)
field(:type)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.People.V1.Model.PhoneNumber do
def decode(value, options) do
GoogleApi.People.V1.Model.PhoneNumber.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.People.V1.Model.PhoneNumber do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.65 | 390 | 0.704963 |
f7c7a284f21f24661d458afada10e7717329f832 | 1,920 | ex | Elixir | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/volume.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/volume.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/volume.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ContainerAnalysis.V1beta1.Model.Volume do
@moduledoc """
Volume describes a Docker container volume which is mounted into build steps in order to persist files across build step execution.
## Attributes
* `name` (*type:* `String.t`, *default:* `nil`) - Name of the volume to mount. Volume names must be unique per build step and must be valid names for Docker volumes. Each named volume must be used by at least two build steps.
* `path` (*type:* `String.t`, *default:* `nil`) - Path at which to mount the volume. Paths must be absolute and cannot conflict with other volume paths on the same build step or with certain reserved volume paths.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:name => String.t() | nil,
:path => String.t() | nil
}
field(:name)
field(:path)
end
defimpl Poison.Decoder, for: GoogleApi.ContainerAnalysis.V1beta1.Model.Volume do
def decode(value, options) do
GoogleApi.ContainerAnalysis.V1beta1.Model.Volume.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ContainerAnalysis.V1beta1.Model.Volume do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.4 | 229 | 0.735417 |
f7c7b60e2e1d99eef1f9f4f8263d5615dca9f2e0 | 15,807 | ex | Elixir | lib/livebook/runtime/evaluator.ex | apoorv-2204/elixir_learn_work_notebook_setup | 7ea75c1b53e0e9ce7ae89d386cc16b7277a1d86e | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime/evaluator.ex | apoorv-2204/elixir_learn_work_notebook_setup | 7ea75c1b53e0e9ce7ae89d386cc16b7277a1d86e | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime/evaluator.ex | apoorv-2204/elixir_learn_work_notebook_setup | 7ea75c1b53e0e9ce7ae89d386cc16b7277a1d86e | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Runtime.Evaluator do
@moduledoc false
# A process responsible for evaluating notebook code.
#
# Evaluator receives an evaluation request and synchronously
# evaluates the given code within itself (rather than spawning
# a separate process). It stores the resulting binding and env
# in its state (under a specific reference).
#
# Storing the binding in the same process that evaluates the
# code is essential, because otherwise we would have to send it
# to another process, which means copying a potentially massive
# amounts of data.
#
# Also, note that this process is intentionally not a GenServer,
# because during evaluation we it may receive arbitrary messages
# and we want to keep them in the inbox, while a GenServer would
# always consume them.
require Logger
alias Livebook.Runtime.Evaluator
@type t :: %{pid: pid(), ref: reference()}
@type state :: %{
evaluator_ref: reference(),
formatter: module(),
io_proxy: pid(),
send_to: pid(),
runtime_broadcast_to: pid(),
object_tracker: pid(),
contexts: %{ref() => context()},
initial_context: context()
}
@typedoc """
An evaluation context.
Each evaluation produces a new context, which may be optionally
used by a later evaluation.
"""
@type context :: %{binding: Code.binding(), env: Macro.Env.t(), id: binary()}
@typedoc """
A term used to identify evaluation.
"""
@type ref :: term()
@typedoc """
An evaluation result, either the return value or an error if
raised.
"""
@type evaluation_result ::
{:ok, result :: any()}
| {:error, Exception.kind(), error :: any(), Exception.stacktrace()}
# We store evaluation envs in the process dictionary, so that we
# can build intellisense context without asking the evaluator
@env_key :evaluation_env
@initial_env_key :initial_env
@doc """
Starts an evaluator.
## Options
* `:send_to` - the process to send evaluation messages to. Required
* `:object_tracker` - a pid of `Livebook.Runtime.Evaluator.ObjectTracker`.
Required
* `:runtime_broadcast_to` - the process to send runtime broadcast
events to. Defaults to the value of `:send_to`
* `:formatter` - a module implementing the `Livebook.Runtime.Evaluator.Formatter`
behaviour, used for transforming evaluation result before sending
it to the client. Defaults to identity
"""
@spec start_link(keyword()) :: {:ok, pid(), t()} | {:error, term()}
def start_link(opts \\ []) do
case :proc_lib.start_link(__MODULE__, :init, [opts]) do
{:error, error} -> {:error, error}
evaluator -> {:ok, evaluator.pid, evaluator}
end
end
@doc """
Computes the memory usage for the current node.
"""
@spec memory() :: Livebook.Runtime.runtime_memory()
def memory() do
%{
total: total,
processes: processes,
atom: atom,
binary: binary,
code: code,
ets: ets
} = Map.new(:erlang.memory())
%{
total: total,
processes: processes,
atom: atom,
binary: binary,
code: code,
ets: ets,
other: total - processes - atom - binary - code - ets
}
end
@doc """
Asynchronously parses and evaluates the given code.
Any exceptions are captured and transformed into an error
result.
The resulting contxt (binding and env) is stored under `ref`.
Any subsequent calls may specify `base_ref` pointing to a
previous evaluation, in which case the corresponding context
is used as the entry point for evaluation.
The evaluation result is transformed with the configured
formatter send to the configured client (see `start_link/1`).
See `Livebook.Runtime.evaluate_code/5` for the messages format
and the list of available options.
## Options
* `:on_finish` - a function to run when the evaluation is
finished. The function receives `t:evaluation_result/0`
as an argument
"""
@spec evaluate_code(t(), String.t(), ref(), ref() | nil, keyword()) :: :ok
def evaluate_code(evaluator, code, ref, base_ref \\ nil, opts \\ []) when ref != nil do
cast(evaluator, {:evaluate_code, code, ref, base_ref, opts})
end
@doc """
Fetches the evaluation context (binding and env) for the given
evaluation reference.
## Options
* `:cached_id` - id of context that the sender may already have,
if it matches the fetched context, `{:error, :not_modified}`
is returned instead
"""
@spec fetch_evaluation_context(t(), ref(), keyword()) ::
{:ok, context()} | {:error, :not_modified}
def fetch_evaluation_context(evaluator, ref, opts \\ []) do
cached_id = opts[:cached_id]
call(evaluator, {:fetch_evaluation_context, ref, cached_id})
end
@doc """
Fetches an evaluation context from `source_evaluator` and configures
it as the initial context for `evaluator`.
The process dictionary is also copied to match `source_evaluator`.
"""
@spec initialize_from(t(), t(), ref()) :: :ok
def initialize_from(evaluator, source_evaluator, source_evaluation_ref) do
call(evaluator, {:initialize_from, source_evaluator, source_evaluation_ref})
end
@doc """
Removes the evaluation identified by `ref` from history.
The corresponding context is removed and garbage collected.
"""
@spec forget_evaluation(t(), ref()) :: :ok
def forget_evaluation(evaluator, ref) do
cast(evaluator, {:forget_evaluation, ref})
end
@doc """
Returns an empty intellisense context.
"""
@spec intellisense_context() :: Livebook.Intellisense.intellisense_context()
def intellisense_context() do
# TODO: Use Code.env_for_eval and eval_quoted_with_env on Elixir v1.14+
env = :elixir.env_for_eval([])
map_binding = fn fun -> fun.([]) end
%{env: env, map_binding: map_binding}
end
@doc """
Builds intellisense context from the given evaluation.
"""
@spec intellisense_context(t(), ref()) :: Livebook.Intellisense.intellisense_context()
def intellisense_context(evaluator, ref) do
{:dictionary, dictionary} = Process.info(evaluator.pid, :dictionary)
env =
find_in_dictionary(dictionary, {@env_key, ref}) ||
find_in_dictionary(dictionary, @initial_env_key)
map_binding = fn fun -> map_binding(evaluator, ref, fun) end
%{env: env, map_binding: map_binding}
end
defp find_in_dictionary(dictionary, key) do
Enum.find_value(dictionary, fn
{^key, value} -> value
_pair -> nil
end)
end
# Applies the given function to evaluation binding
defp map_binding(evaluator, ref, fun) do
call(evaluator, {:map_binding, ref, fun})
end
@doc """
Runs the given function with binding and env of the given evaluation.
Ths function runs within the evaluator process, so that no data
is copied between processes, unless explicitly sent.
"""
@spec peek_context(t(), ref(), (context() -> any())) :: :ok
def peek_context(evaluator, ref, fun) do
cast(evaluator, {:peek_context, ref, fun})
end
defp cast(evaluator, message) do
send(evaluator.pid, {:cast, evaluator.ref, message})
:ok
end
defp call(evaluator, message) do
call_ref = Process.monitor(evaluator.pid)
send(evaluator.pid, {:call, evaluator.ref, self(), call_ref, message})
receive do
{^call_ref, reply} ->
reply
{:DOWN, ^call_ref, _, _, reason} ->
exit({reason, {__MODULE__, :call, [evaluator, message]}})
end
end
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :worker,
restart: :temporary
}
end
def init(opts) do
send_to = Keyword.fetch!(opts, :send_to)
runtime_broadcast_to = Keyword.get(opts, :runtime_broadcast_to, send_to)
object_tracker = Keyword.fetch!(opts, :object_tracker)
formatter = Keyword.get(opts, :formatter, Evaluator.IdentityFormatter)
{:ok, io_proxy} =
Evaluator.IOProxy.start_link(self(), send_to, runtime_broadcast_to, object_tracker)
# Use the dedicated IO device as the group leader, so that
# intercepts all :stdio requests and also handles Livebook
# specific ones
Process.group_leader(self(), io_proxy)
evaluator_ref = make_ref()
evaluator = %{pid: self(), ref: evaluator_ref}
context = initial_context()
Process.put(@initial_env_key, context.env)
state = %{
evaluator_ref: evaluator_ref,
formatter: formatter,
io_proxy: io_proxy,
send_to: send_to,
runtime_broadcast_to: runtime_broadcast_to,
object_tracker: object_tracker,
contexts: %{},
initial_context: context
}
:proc_lib.init_ack(evaluator)
loop(state)
end
defp loop(%{evaluator_ref: evaluator_ref} = state) do
receive do
{:call, ^evaluator_ref, pid, ref, message} ->
{:reply, reply, state} = handle_call(message, pid, state)
send(pid, {ref, reply})
loop(state)
{:cast, ^evaluator_ref, message} ->
{:noreply, state} = handle_cast(message, state)
loop(state)
end
end
defp initial_context() do
# TODO: Use Code.env_for_eval and eval_quoted_with_env on Elixir v1.14+
env = :elixir.env_for_eval([])
%{binding: [], env: env, id: random_id()}
end
defp handle_cast({:evaluate_code, code, ref, base_ref, opts}, state) do
Evaluator.IOProxy.configure(state.io_proxy, ref)
Evaluator.ObjectTracker.remove_reference(state.object_tracker, {self(), ref})
context = get_context(state, base_ref)
file = Keyword.get(opts, :file, "nofile")
context = put_in(context.env.file, file)
start_time = System.monotonic_time()
{result_context, result, code_error} =
case eval(code, context.binding, context.env) do
{:ok, value, binding, env} ->
result_context = %{binding: binding, env: env, id: random_id()}
result = {:ok, value}
{result_context, result, nil}
{:error, kind, error, stacktrace, code_error} ->
result = {:error, kind, error, stacktrace}
{context, result, code_error}
end
evaluation_time_ms = get_execution_time_delta(start_time)
state = put_context(state, ref, result_context)
Evaluator.IOProxy.flush(state.io_proxy)
Evaluator.IOProxy.clear_input_cache(state.io_proxy)
output = state.formatter.format_result(result)
metadata = %{
evaluation_time_ms: evaluation_time_ms,
memory_usage: memory(),
code_error: code_error
}
send(state.send_to, {:runtime_evaluation_response, ref, output, metadata})
if on_finish = opts[:on_finish] do
on_finish.(result)
end
:erlang.garbage_collect(self())
{:noreply, state}
end
defp handle_cast({:forget_evaluation, ref}, state) do
state = delete_context(state, ref)
Evaluator.ObjectTracker.remove_reference(state.object_tracker, {self(), ref})
:erlang.garbage_collect(self())
{:noreply, state}
end
defp handle_cast({:peek_context, ref, fun}, state) do
context = get_context(state, ref)
fun.(context)
{:noreply, state}
end
defp handle_call({:fetch_evaluation_context, ref, cached_id}, _from, state) do
context = get_context(state, ref)
reply =
if context.id == cached_id do
{:error, :not_modified}
else
{:ok, context}
end
{:reply, reply, state}
end
defp handle_call({:initialize_from, source_evaluator, source_evaluation_ref}, _from, state) do
state =
case Evaluator.fetch_evaluation_context(
source_evaluator,
source_evaluation_ref,
cached_id: state.initial_context.id
) do
{:ok, context} ->
# If the context changed, mirror the process dictionary again
copy_process_dictionary_from(source_evaluator)
Process.put(@initial_env_key, context.env)
put_in(state.initial_context, context)
{:error, :not_modified} ->
state
end
{:reply, :ok, state}
end
defp handle_call({:map_binding, ref, fun}, _from, state) do
context = get_context(state, ref)
result = fun.(context.binding)
{:reply, result, state}
end
defp put_context(state, ref, context) do
Process.put({@env_key, ref}, context.env)
put_in(state.contexts[ref], context)
end
defp delete_context(state, ref) do
Process.delete({@env_key, ref})
{_, state} = pop_in(state.contexts[ref])
state
end
defp get_context(state, ref) do
Map.get_lazy(state.contexts, ref, fn -> state.initial_context end)
end
defp eval(code, binding, env) do
try do
quoted = Code.string_to_quoted!(code, file: env.file)
# TODO: Use Code.eval_quoted_with_env/3 on Elixir v1.14
{value, binding, env} = :elixir.eval_quoted(quoted, binding, env)
# TODO: Remove this line on Elixir v1.14 as binding propagates to env correctly
{_, binding, env} = :elixir.eval_forms(:ok, binding, env)
{:ok, value, binding, env}
catch
kind, error ->
stacktrace = prune_stacktrace(__STACKTRACE__)
code_error =
if code_error?(error) and (error.file == env.file and error.file != "nofile") do
%{line: error.line, description: error.description}
else
nil
end
{:error, kind, error, stacktrace, code_error}
end
end
defp code_error?(%SyntaxError{}), do: true
defp code_error?(%TokenMissingError{}), do: true
defp code_error?(%CompileError{}), do: true
defp code_error?(_error), do: false
# Adapted from https://github.com/elixir-lang/elixir/blob/1c1654c88adfdbef38ff07fc30f6fbd34a542c07/lib/iex/lib/iex/evaluator.ex#L355-L372
# TODO: Remove else branch once we depend on the versions below
if System.otp_release() >= "25" and Version.match?(System.version(), "~> 1.14-dev") do
defp prune_stacktrace(stack) do
stack
|> Enum.reverse()
|> Enum.drop_while(&(elem(&1, 0) != :elixir_eval))
|> Enum.reverse()
end
else
@elixir_internals [:elixir, :elixir_expand, :elixir_compiler, :elixir_module] ++
[:elixir_clauses, :elixir_lexical, :elixir_def, :elixir_map] ++
[:elixir_erl, :elixir_erl_clauses, :elixir_erl_pass]
defp prune_stacktrace(stacktrace) do
# The order in which each drop_while is listed is important.
# For example, the user may call Code.eval_string/2 in their
# code and if there is an error we should not remove erl_eval
# and eval_bits information from the user stacktrace.
stacktrace
|> Enum.reverse()
|> Enum.drop_while(&(elem(&1, 0) == :proc_lib))
|> Enum.drop_while(&(elem(&1, 0) == :gen_server))
|> Enum.drop_while(&(elem(&1, 0) == __MODULE__))
|> Enum.drop_while(&(elem(&1, 0) == :elixir))
|> Enum.drop_while(&(elem(&1, 0) in [:erl_eval, :eval_bits]))
|> Enum.reverse()
|> Enum.reject(&(elem(&1, 0) in @elixir_internals))
end
end
defp random_id() do
:crypto.strong_rand_bytes(20) |> Base.encode32(case: :lower)
end
defp copy_process_dictionary_from(source_evaluator) do
{:dictionary, dictionary} = Process.info(source_evaluator.pid, :dictionary)
for {key, value} <- dictionary, not internal_dictionary_key?(key) do
Process.put(key, value)
end
end
defp internal_dictionary_key?("$" <> _), do: true
defp internal_dictionary_key?({@env_key, _ref}), do: true
defp internal_dictionary_key?(@initial_env_key), do: true
defp internal_dictionary_key?(_), do: false
defp get_execution_time_delta(started_at) do
System.monotonic_time()
|> Kernel.-(started_at)
|> System.convert_time_unit(:native, :millisecond)
end
end
| 30.933464 | 139 | 0.663567 |
f7c7c2e1591600a28642d8ddc481c90581cd10a3 | 4,083 | ex | Elixir | lib/docusign/model/envelopes.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 4 | 2020-12-21T12:50:13.000Z | 2022-01-12T16:50:43.000Z | lib/docusign/model/envelopes.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 12 | 2018-09-18T15:26:34.000Z | 2019-09-28T15:29:39.000Z | lib/docusign/model/envelopes.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 15 | 2020-04-29T21:50:16.000Z | 2022-02-11T18:01:51.000Z | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule DocuSign.Model.Envelopes do
@moduledoc """
Envelope creation, management
"""
@derive [Poison.Encoder]
defstruct [
:allowMarkup,
:allowReassign,
:allowViewHistory,
:asynchronous,
:attachmentsUri,
:authoritativeCopy,
:authoritativeCopyDefault,
:autoNavigation,
:brandId,
:brandLock,
:certificateUri,
:completedDateTime,
:createdDateTime,
:customFields,
:customFieldsUri,
:declinedDateTime,
:deletedDateTime,
:deliveredDateTime,
:documentsCombinedUri,
:documentsUri,
:emailBlurb,
:emailSettings,
:emailSubject,
:enableWetSign,
:enforceSignerVisibility,
:envelopeId,
:envelopeIdStamping,
:envelopeUri,
:initialSentDateTime,
:is21CFRPart11,
:isSignatureProviderEnvelope,
:lastModifiedDateTime,
:lockInformation,
:messageLock,
:notification,
:notificationUri,
:purgeState,
:recipients,
:recipientsLock,
:recipientsUri,
:sentDateTime,
:signerCanSignOnMobile,
:signingLocation,
:status,
:statusChangedDateTime,
:templatesUri,
:transactionId,
:useDisclosure,
:voidedDateTime,
:voidedReason
]
@type t :: %__MODULE__{
:allowMarkup => String.t(),
:allowReassign => String.t(),
:allowViewHistory => String.t(),
:asynchronous => String.t(),
:attachmentsUri => String.t(),
:authoritativeCopy => String.t(),
:authoritativeCopyDefault => String.t(),
:autoNavigation => String.t(),
:brandId => String.t(),
:brandLock => String.t(),
:certificateUri => String.t(),
:completedDateTime => String.t(),
:createdDateTime => String.t(),
:customFields => AccountCustomFields,
:customFieldsUri => String.t(),
:declinedDateTime => String.t(),
:deletedDateTime => String.t(),
:deliveredDateTime => String.t(),
:documentsCombinedUri => String.t(),
:documentsUri => String.t(),
:emailBlurb => String.t(),
:emailSettings => EnvelopeEmailSettings,
:emailSubject => String.t(),
:enableWetSign => String.t(),
:enforceSignerVisibility => String.t(),
:envelopeId => String.t(),
:envelopeIdStamping => String.t(),
:envelopeUri => String.t(),
:initialSentDateTime => String.t(),
:is21CFRPart11 => String.t(),
:isSignatureProviderEnvelope => String.t(),
:lastModifiedDateTime => String.t(),
:lockInformation => EnvelopeLocks,
:messageLock => String.t(),
:notification => Notification,
:notificationUri => String.t(),
:purgeState => String.t(),
:recipients => EnvelopeRecipients,
:recipientsLock => String.t(),
:recipientsUri => String.t(),
:sentDateTime => String.t(),
:signerCanSignOnMobile => String.t(),
:signingLocation => String.t(),
:status => String.t(),
:statusChangedDateTime => String.t(),
:templatesUri => String.t(),
:transactionId => String.t(),
:useDisclosure => String.t(),
:voidedDateTime => String.t(),
:voidedReason => String.t()
}
end
defimpl Poison.Decoder, for: DocuSign.Model.Envelopes do
import DocuSign.Deserializer
def decode(value, options) do
value
|> deserialize(:customFields, :struct, DocuSign.Model.AccountCustomFields, options)
|> deserialize(:emailSettings, :struct, DocuSign.Model.EnvelopeEmailSettings, options)
|> deserialize(:lockInformation, :struct, DocuSign.Model.EnvelopeLocks, options)
|> deserialize(:notification, :struct, DocuSign.Model.Notification, options)
|> deserialize(:recipients, :struct, DocuSign.Model.EnvelopeRecipients, options)
end
end
| 31.407692 | 90 | 0.614989 |
f7c7cfde2eec39ef3140656075496ec91b80cf76 | 667 | ex | Elixir | lib/stripe_mock/api/token.ex | whitepaperclip/stripe_mock | a8ba9101a04216f26d0650eb38448173d8e090a1 | [
"MIT"
] | 4 | 2019-06-04T20:35:21.000Z | 2021-09-02T04:04:21.000Z | lib/stripe_mock/api/token.ex | whitepaperclip/stripe_mock | a8ba9101a04216f26d0650eb38448173d8e090a1 | [
"MIT"
] | 2 | 2020-02-04T17:38:12.000Z | 2021-04-29T06:59:06.000Z | lib/stripe_mock/api/token.ex | whitepaperclip/stripe_mock | a8ba9101a04216f26d0650eb38448173d8e090a1 | [
"MIT"
] | null | null | null | defmodule StripeMock.API.Token do
use StripeMock.Schema
schema "tokens" do
field :client_ip, :string
field :type, :string
field :used, :boolean, default: false
belongs_to :card, API.Card
common_fields()
timestamps()
end
@doc false
def changeset(token, attrs) do
token
|> cast(attrs, [:client_ip, :type])
|> cast_assoc(:card, with: &API.Card.token_changeset/2)
|> set_type()
|> validate_required([:client_ip, :type])
|> put_common_fields()
end
defp set_type(changeset) do
case get_field(changeset, :card) do
nil -> changeset
_ -> put_change(changeset, :type, "card")
end
end
end
| 20.84375 | 59 | 0.644678 |
f7c7d6a06032c3dceb6844f6b106046ac51f4ef9 | 739 | exs | Elixir | rna-transcription/rna_transcription_test.exs | SLIB53/exercism-elixir-answers | 352e3b9b6c9e5f8025ccd462845f1682d115e1da | [
"MIT"
] | 2 | 2019-03-26T09:32:41.000Z | 2020-03-09T19:16:36.000Z | rna-transcription/rna_transcription_test.exs | SLIB53/exercism-elixir-answers | 352e3b9b6c9e5f8025ccd462845f1682d115e1da | [
"MIT"
] | null | null | null | rna-transcription/rna_transcription_test.exs | SLIB53/exercism-elixir-answers | 352e3b9b6c9e5f8025ccd462845f1682d115e1da | [
"MIT"
] | null | null | null | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("rna_transcription.exs", __DIR__)
end
ExUnit.start()
ExUnit.configure(trace: true)
defmodule RNATranscriptionTest do
use ExUnit.Case
test "transcribes guanine to cytosine" do
assert RNATranscription.to_rna('G') == 'C'
end
test "transcribes cytosine to guanine" do
assert RNATranscription.to_rna('C') == 'G'
end
test "transcribes thymidine to adenine" do
assert RNATranscription.to_rna('T') == 'A'
end
test "transcribes adenine to uracil" do
assert RNATranscription.to_rna('A') == 'U'
end
test "it transcribes all dna nucleotides to rna equivalents" do
assert RNATranscription.to_rna('ACGTGGTCTTAA') == 'UGCACCAGAAUU'
end
end
| 23.83871 | 68 | 0.728011 |
f7c837410f6f09ca679780e687b08d149e239398 | 5,464 | exs | Elixir | apps/omg_db/test/omg_db/db_test.exs | PinkDiamond1/elixir-omg | 70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7 | [
"Apache-2.0"
] | null | null | null | apps/omg_db/test/omg_db/db_test.exs | PinkDiamond1/elixir-omg | 70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7 | [
"Apache-2.0"
] | null | null | null | apps/omg_db/test/omg_db/db_test.exs | PinkDiamond1/elixir-omg | 70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7 | [
"Apache-2.0"
] | 1 | 2021-12-04T00:37:46.000Z | 2021-12-04T00:37:46.000Z | # Copyright 2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.DBTest do
@moduledoc """
A smoke test of the LevelDB support. The intention here is to **only** test minimally, that the pipes work.
For more detailed persistence test look for `...PersistenceTest` tests throughout the apps.
Note the excluded moduletag, this test requires an explicit `--include wrappers`
"""
use ExUnitFixtures
use OMG.DB.LevelDBCase, async: false
alias OMG.DB
@moduletag :wrappers
@moduletag :common
@writes 10
test "handles object storage", %{db_dir: dir, db_pid: pid} do
:ok =
DB.multi_update(
[{:put, :block, %{hash: "xyz"}}, {:put, :block, %{hash: "vxyz"}}, {:put, :block, %{hash: "wvxyz"}}],
pid
)
assert {:ok, [%{hash: "wvxyz"}, %{hash: "xyz"}]} == DB.blocks(["wvxyz", "xyz"], pid)
:ok = DB.multi_update([{:delete, :block, "xyz"}], pid)
checks = fn pid ->
assert {:ok, [%{hash: "wvxyz"}, :not_found, %{hash: "vxyz"}]} == DB.blocks(["wvxyz", "xyz", "vxyz"], pid)
end
checks.(pid)
# check actual persistence
pid = restart(dir, pid)
checks.(pid)
end
test "handles single value storage", %{db_dir: dir, db_pid: pid} do
:ok = DB.multi_update([{:put, :last_exit_finalizer_eth_height, 12}], pid)
checks = fn pid ->
assert {:ok, 12} == DB.get_single_value(:last_exit_finalizer_eth_height, pid)
end
checks.(pid)
# check actual persistence
pid = restart(dir, pid)
checks.(pid)
end
test "block hashes return the correct range", %{db_dir: _dir, db_pid: pid} do
:ok =
DB.multi_update(
[
{:put, :block, %{hash: "xyz", number: 1}},
{:put, :block, %{hash: "vxyz", number: 2}},
{:put, :block, %{hash: "wvxyz", number: 3}}
],
pid
)
{:ok, ["xyz", "vxyz", "wvxyz"]} = OMG.DB.block_hashes([1, 2, 3], pid)
end
test "if multi reading exit infos returns writen results", %{db_dir: _dir, db_pid: pid} do
db_writes = create_write(:exit_info, pid)
{:ok, exits} = DB.exit_infos(pid)
# what we wrote and what we read must be equal
[] = exits -- db_writes
end
test "if multi reading utxos returns writen results", %{db_dir: _dir, db_pid: pid} do
db_writes = create_write(:utxo, pid)
{:ok, utxos} = DB.utxos(pid)
[] = utxos -- db_writes
end
test "if multi reading in flight exit infos returns writen results", %{db_dir: _dir, db_pid: pid} do
db_writes = create_write(:in_flight_exit_info, pid)
{:ok, in_flight_exits_infos} = DB.in_flight_exits_info(pid)
[] = in_flight_exits_infos -- db_writes
end
test "if multi reading competitor infos returns writen results", %{db_dir: _dir, db_pid: pid} do
db_writes = create_write(:competitor_info, pid)
{:ok, competitors_info} = DB.competitors_info(pid)
[] = competitors_info -- db_writes
end
test "if multi reading and writting does not pollute returned values", %{db_dir: _dir, db_pid: pid} do
db_writes = create_write(:exit_info, pid)
{:ok, exits} = DB.exit_infos(pid)
# what we wrote and what we read must be equal
[] = exits -- db_writes
db_writes = create_write(:utxo, pid)
{:ok, utxos} = DB.utxos(pid)
[] = utxos -- db_writes
db_writes = create_write(:in_flight_exit_info, pid)
{:ok, in_flight_exits_infos} = DB.in_flight_exits_info(pid)
[] = in_flight_exits_infos -- db_writes
db_writes = create_write(:competitor_info, pid)
{:ok, competitors_info} = DB.competitors_info(pid)
[] = competitors_info -- db_writes
end
defp create_write(:exit_info = type, pid) do
db_writes =
Enum.map(1..@writes, fn index -> {:put, type, {{index, index, index}, :crypto.strong_rand_bytes(index)}} end)
:ok = write(db_writes, pid)
get_raw_values(db_writes)
end
defp create_write(:utxo = type, pid) do
db_writes =
Enum.map(1..@writes, fn index ->
{:put, type, {{index, index, index}, %{test: :crypto.strong_rand_bytes(index)}}}
end)
:ok = write(db_writes, pid)
get_raw_values(db_writes)
end
defp create_write(:in_flight_exit_info = type, pid) do
db_writes = Enum.map(1..@writes, fn index -> {:put, type, {:crypto.strong_rand_bytes(index), index}} end)
:ok = write(db_writes, pid)
get_raw_values(db_writes)
end
defp create_write(:competitor_info = type, pid) do
db_writes = Enum.map(1..@writes, fn index -> {:put, type, {:crypto.strong_rand_bytes(index), index}} end)
:ok = write(db_writes, pid)
get_raw_values(db_writes)
end
defp write(db_writes, pid), do: OMG.DB.multi_update(db_writes, pid)
defp get_raw_values(db_writes), do: Enum.map(db_writes, &elem(&1, 2))
defp restart(dir, pid) do
:ok = GenServer.stop(pid)
name = :"TestDB_#{make_ref() |> inspect()}"
{:ok, pid} = start_supervised(OMG.DB.child_spec(db_path: dir, name: name), restart: :temporary)
pid
end
end
| 32.718563 | 115 | 0.652269 |
f7c8744a909920e5de782bae938a5a54af2da3fd | 6,972 | ex | Elixir | clients/memcache/lib/google_api/memcache/v1beta2/model/google_cloud_saasaccelerator_management_providers_v1_instance.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/memcache/lib/google_api/memcache/v1beta2/model/google_cloud_saasaccelerator_management_providers_v1_instance.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/memcache/lib/google_api/memcache/v1beta2/model/google_cloud_saasaccelerator_management_providers_v1_instance.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1Instance do
@moduledoc """
## Attributes
* `consumerDefinedName` (*type:* `String.t`, *default:* `nil`) - consumer_defined_name is the name that is set by the consumer. On the other hand Name field represents system-assigned id of an instance so consumers are not necessarily aware of it. consumer_defined_name is used for notification/UI purposes for consumer to recognize their instances.
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. Timestamp when the resource was created.
* `labels` (*type:* `map()`, *default:* `nil`) - Optional. Resource labels to represent user provided metadata. Each label is a key-value pair, where both the key and the value are arbitrary strings provided by the user.
* `maintenancePolicyNames` (*type:* `map()`, *default:* `nil`) - Deprecated. The MaintenancePolicies that have been attached to the instance. The key must be of the type name of the oneof policy name defined in MaintenancePolicy, and the referenced policy must define the same policy type. For complete details of MaintenancePolicy, please refer to go/cloud-saas-mw-ug.
* `maintenanceSchedules` (*type:* `%{optional(String.t) => GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1MaintenanceSchedule.t}`, *default:* `nil`) - The MaintenanceSchedule contains the scheduling information of published maintenance schedule with same key as software_versions.
* `maintenanceSettings` (*type:* `GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1MaintenanceSettings.t`, *default:* `nil`) - Optional. The MaintenanceSettings associated with instance.
* `name` (*type:* `String.t`, *default:* `nil`) - Unique name of the resource. It uses the form: `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
* `producerMetadata` (*type:* `map()`, *default:* `nil`) - Output only. Custom string attributes used primarily to expose producer-specific information in monitoring dashboards. See go/get-instance-metadata.
* `provisionedResources` (*type:* `list(GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1ProvisionedResource.t)`, *default:* `nil`) - Output only. The list of data plane resources provisioned for this instance, e.g. compute VMs. See go/get-instance-metadata.
* `slmInstanceTemplate` (*type:* `String.t`, *default:* `nil`) - Link to the SLM instance template. Only populated when updating SLM instances via SSA's Actuation service adaptor. Service producers with custom control plane (e.g. Cloud SQL) doesn't need to populate this field. Instead they should use software_versions.
* `sloMetadata` (*type:* `GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1SloMetadata.t`, *default:* `nil`) - Output only. SLO metadata for instance classification in the Standardized dataplane SLO platform. See go/cloud-ssa-standard-slo for feature description.
* `softwareVersions` (*type:* `map()`, *default:* `nil`) - Software versions that are used to deploy this instance. This can be mutated by rollout services.
* `state` (*type:* `String.t`, *default:* `nil`) - Output only. Current lifecycle state of the resource (e.g. if it's being created or ready to use).
* `tenantProjectId` (*type:* `String.t`, *default:* `nil`) - Output only. ID of the associated GCP tenant project. See go/get-instance-metadata.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. Timestamp when the resource was last modified.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:consumerDefinedName => String.t(),
:createTime => DateTime.t(),
:labels => map(),
:maintenancePolicyNames => map(),
:maintenanceSchedules => %{
optional(String.t()) =>
GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1MaintenanceSchedule.t()
},
:maintenanceSettings =>
GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1MaintenanceSettings.t(),
:name => String.t(),
:producerMetadata => map(),
:provisionedResources =>
list(
GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1ProvisionedResource.t()
),
:slmInstanceTemplate => String.t(),
:sloMetadata =>
GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1SloMetadata.t(),
:softwareVersions => map(),
:state => String.t(),
:tenantProjectId => String.t(),
:updateTime => DateTime.t()
}
field(:consumerDefinedName)
field(:createTime, as: DateTime)
field(:labels, type: :map)
field(:maintenancePolicyNames, type: :map)
field(:maintenanceSchedules,
as:
GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1MaintenanceSchedule,
type: :map
)
field(:maintenanceSettings,
as:
GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1MaintenanceSettings
)
field(:name)
field(:producerMetadata, type: :map)
field(:provisionedResources,
as:
GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1ProvisionedResource,
type: :list
)
field(:slmInstanceTemplate)
field(:sloMetadata,
as:
GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1SloMetadata
)
field(:softwareVersions, type: :map)
field(:state)
field(:tenantProjectId)
field(:updateTime, as: DateTime)
end
defimpl Poison.Decoder,
for: GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1Instance do
def decode(value, options) do
GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1Instance.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Memcache.V1beta2.Model.GoogleCloudSaasacceleratorManagementProvidersV1Instance do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 56.682927 | 373 | 0.737522 |
f7c88130694a3fc76b3225f0dba8093af8cfe125 | 18,814 | ex | Elixir | lib/ex_gdax.ex | aquamatt/ex_gdax | f3baf6aba40215b21e70712b89add30df7a873de | [
"MIT"
] | 11 | 2017-08-31T01:18:17.000Z | 2021-02-12T04:09:12.000Z | lib/ex_gdax.ex | aquamatt/ex_gdax | f3baf6aba40215b21e70712b89add30df7a873de | [
"MIT"
] | 5 | 2017-10-27T20:03:00.000Z | 2019-02-07T05:19:30.000Z | lib/ex_gdax.ex | aquamatt/ex_gdax | f3baf6aba40215b21e70712b89add30df7a873de | [
"MIT"
] | 9 | 2017-08-31T01:14:29.000Z | 2020-05-28T12:33:56.000Z | defmodule ExGdax do
@moduledoc """
GDAX API client.
"""
@doc """
List known currencies.
## Examples
iex> ExGdax.list_currencies()
{:ok,
[%{"id" => "BTC", "min_size" => "0.00000001", "name" => "Bitcoin"},
%{"id" => "ETH", "min_size" => "0.00000001", "name" => "Ether"}, ...]}
"""
defdelegate list_currencies, to: ExGdax.Market, as: :list_currencies
@doc """
Get the API server time.
## Examples
iex> EcGdax.get_time()
{:ok, %{"epoch" => 1501141821.835, "iso" => "2017-07-27T07:50:21.835Z"}}
"""
defdelegate get_time, to: ExGdax.Market, as: :get_time
@doc """
Get a list of available currency pairs for trading.
## Examples
iex> ExGdax.list_products()
{:ok,
[%{"base_currency" => "ETH", "base_max_size" => "5000",
"base_min_size" => "0.01", "display_name" => "ETH/USD", "id" => "ETH-USD",
"margin_enabled" => false, "quote_currency" => "USD",
"quote_increment" => "0.01"}, ...]}
"""
defdelegate list_products, to: ExGdax.Market, as: :list_products
@doc """
Get a list of open orders for a product.
## Parameters
Name | Description
:------ | :----------
`level` | Response detail. Valid options are 1, 2, or 3.
## Examples
iex> ExGdax.get_order_book("ETH-USD")
{:ok,
%{"asks" => [["200.42", "28.447359", 4]],
"bids" => [["200.41", "11.35615248", 3]], "sequence" => 873754533}}
iex> ExGdax.order_book("ETH-USD", %{level: 2})
{:ok,
%{"asks" => [["200.49", "73.898254", 6], ["200.5", "1.017412", 2],
["200.51", "0.017366", 1], ["200.52", "0.017387", 1], ...],
"bids" => [["200.48", "0.7", 2], ["200.47", "0.01", 1],
["200.42", "0.76212582", 1], ["200.32", "0.2", 1], ...]}
"""
defdelegate get_order_book(product_id, params \\ %{}), to: ExGdax.Market, as: :get_order_book
@doc """
Snapshot information about the last trade (tick), best bid/ask and 24h volume.
## Examples
iex> ExGdax.get_ticker("ETH-USD")
{:ok,
%{"ask" => "200.47", "bid" => "200.46", "price" => "200.47000000",
"size" => "2.65064800", "time" => "2017-07-27T08:00:43.697000Z",
"trade_id" => 8430635, "volume" => "144080.88916080"}}
"""
defdelegate get_ticker(product_id), to: ExGdax.Market, as: :get_ticker
@doc """
List the latest trades for a product.
## Parameters
Name | Description
:------- | :----------
`before` | Request page before (newer) this pagination id.
`after` | Request page after (older) this pagination id.
`limit` | Number of results per request. Maximum 100. (default 100)
## Examples
iex> ExGdax.list_trades("ETH-USD")
{:ok,
[%{"price" => "200.65000000", "side" => "sell", "size" => "1.94831509",
"time" => "2017-07-27T08:01:54.347Z", "trade_id" => 8430778}, ...]
"""
defdelegate list_trades(product_id, params \\ %{}), to: ExGdax.Market, as: :list_trades
@doc """
Historic rates for a product.
## Parameters
Name | Description
:------------ | :----------
`start` | Start time in ISO 8601.
`end` | End time in ISO 8601.
`granularity` | Desired timeslice in seconds.
## Examples
iex> ExGdax.list_historic_rates("ETH-USD")
{:ok,
[[1501142880, 200.43, 200.43, 200.43, 200.43, 5.6956], ...]}
"""
defdelegate list_historic_rates(product_id, params \\ %{}),
to: ExGdax.Market,
as: :list_historic_rates
@doc """
Get 24 hr stats for a product.
## Examples
iex> ExGdax.get_stats("ETH-USD")
{:ok,
%{"high" => "205.80000000", "last" => "201.68000000", "low" => "194.42000000",
"open" => "197.97000000", "volume" => "143965.79255890",
"volume_30day" => "9270459.77394214"}}
"""
defdelegate get_stats(product_id), to: ExGdax.Market, as: :get_stats
@doc """
List accounts.
## Examples
iex> ExGdax.list_accounts()
{:ok,
[%{"available" => "0.0000000000000000", "balance" => "0.0000000000000000",
"currency" => "USD", "hold" => "0.0000000000000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
"profile_id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"}, ...]}
"""
defdelegate list_accounts(config \\ nil), to: ExGdax.Private, as: :list_accounts
@doc """
Get an account.
## Examples
iex> ExGdax.get_account(account["id"])
{:ok,
%{"available" => "0.0000000000000000", "balance" => "0.0000000000000000",
"currency" => "USD", "hold" => "0.0000000000000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
"profile_id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"}}
"""
defdelegate get_account(account_id, config \\ nil), to: ExGdax.Private, as: :get_account
@doc """
List activity for an account.
## Parameters
Name | Description
:------- | :----------
`before` | Request page before (newer) this pagination id.
`after` | Request page after (older) this pagination id.
`limit` | Number of results per request. Maximum 100. (default 100)
## Examples
iex> ExGdax.list_account_history(account["id"], %{limit: 5})
{:ok,
[%{"amount" => "0.0000000000000000", "balance" => "0.0000000000000000",
"created_at" => "2017-07-08T15:26:17.04917Z",
"details" => %{"transfer_id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
"transfer_type" => "withdraw"}, "id" => XXXXXXXX, "type" => "transfer"}, ...]}
"""
defdelegate list_account_history(account_id, params \\ %{}, config \\ nil),
to: ExGdax.Private,
as: :list_account_history
@doc """
Lists holds on an account for active orders or withdraw requests.
## Parameters
Name | Description
:------- | :----------
`before` | Request page before (newer) this pagination id.
`after` | Request page after (older) this pagination id.
`limit` | Number of results per request. Maximum 100. (default 100)
## Examples
iex> ExGdax.list_holds(account["id"])
{:ok, []}
"""
defdelegate list_holds(account_id, params \\ %{}, config \\ nil),
to: ExGdax.Private,
as: :list_holds
@doc """
Place a new order.
Refer to params listed in [GDAX API docs](https://docs.gdax.com/#place-a-new-order)
## Examples
iex> ExGdax.create_order(%{type: "limit", side: "buy", product_id: "ETH-USD", price: "0.50", size: "1.0"})
{:ok,
%{"created_at" => "2017-08-20T23:29:17.752637Z",
"executed_value" => "0.0000000000000000",
"fill_fees" => "0.0000000000000000", "filled_size" => "0.00000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX", "post_only" => false,
"price" => "0.50000000", "product_id" => "ETH-USD",
"settled" => false, "side" => "buy", "size" => "1.00000000",
"status" => "pending", "stp" => "dc", "time_in_force" => "GTC",
"type" => "limit"}}
"""
defdelegate create_order(params, config \\ nil), to: ExGdax.Private, as: :create_order
@doc """
Cancel all open orders.
## Examples
iex> ExGdax.cancel_orders()
{:ok, ["XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"]}
"""
defdelegate cancel_orders(config \\ nil), to: ExGdax.Private, as: :cancel_orders
@doc """
Cancel a previously placed order.
## Examples
iex> ExGdax.cancel_order("XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX")
{:ok, ["XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"]}
"""
defdelegate cancel_order(order_id, config \\ nil), to: ExGdax.Private, as: :cancel_order
@doc """
List open orders.
## Parameters
Name | Default | Description
:----------- | :---------------------- | :----------
`status` | [open, pending, active] | Limit list of orders to these statuses.
`product_id` | | Only list orders for a specific product.
`before` | | Request page before (newer) this pagination id.
`after` | | Request page after (older) this pagination id.
`limit` | | Number of results per request. Maximum 100. (default 100)
## Examples
iex> ExGdax.list_orders(%{status: "open"})
{:ok,
[%{"created_at" => "2017-08-20T23:31:49.235409Z",
"executed_value" => "0.0000000000000000",
"fill_fees" => "0.0000000000000000", "filled_size" => "0.00000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX", "post_only" => true,
"price" => "0.75000000", "product_id" => "ETH-USD",
"settled" => false, "side" => "buy", "size" => "1.00000000",
"status" => "open", "stp" => "dc", "time_in_force" => "GTC",
"type" => "limit"}]}
"""
defdelegate list_orders(params \\ %{}, config \\ nil), to: ExGdax.Private, as: :list_orders
@doc """
Get an order.
## Examples
iex> ExGdax.get_order(order["id"])
{:ok,
%{"created_at" => "2017-08-20T23:31:49.235409Z",
"executed_value" => "0.0000000000000000",
"fill_fees" => "0.0000000000000000", "filled_size" => "0.00000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX", "post_only" => true,
"price" => "0.75000000", "product_id" => "ETH-USD",
"settled" => false, "side" => "buy", "size" => "1.00000000",
"status" => "open", "stp" => "dc", "time_in_force" => "GTC",
"type" => "limit"}}
"""
defdelegate get_order(order_id, config \\ nil), to: ExGdax.Private, as: :get_order
@doc """
Get a list of recent fills.
## Parameters
Name | Description
:----------- | :----------
`order_id` | Limit list of fills to this order_id.
`product_id` | Limit list of fills to this product_id.
`before` | Request page before (newer) this pagination id.
`after` | Request page after (older) this pagination id.
`limit` | Number of results per request. Maximum 100. (default 100)
## Examples
iex> ExGdax.list_fills(%{product_id: "ETH-USD", limit: 1})
{:ok,
[%{"created_at" => "2017-08-12T21:25:43.453Z",
"fee" => "0.0000000000000000", "liquidity" => "M",
"order_id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
"price" => "305.00000000", "product_id" => "ETH-USD",
"profile_id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
"settled" => true, "side" => "buy", "size" => "0.29000000",
"trade_id" => XXXXXXX, "user_id" => "XXXXXXXXXXXXXXXXXXXXXXX"}]}
"""
defdelegate list_fills(params \\ %{}, config \\ nil), to: ExGdax.Private, as: :list_fills
@doc """
List funding records.
## Parameters
Name | Options | Description
:------- | :-------------------------------- | :----------
`status` | outstanding, settled, or rejected | Limit list of funding records to these statuses.
`before` | | Request page before (newer) this pagination id.
`after` | | Request page after (older) this pagination id.
`limit` | | Number of results per request. Maximum 100. (default 100)
"""
defdelegate list_funding(params \\ %{}, config \\ nil), to: ExGdax.Private, as: :list_funding
@doc """
Repay funding. Repays the older funding records first.
## Parameters
Name | Description
:--------- | :----------
`amount` | Amount of currency to repay.
`currency` | The currency, example `USD`.
"""
defdelegate repay_funding(params, config \\ nil), to: ExGdax.Private, as: :repay_funding
@doc """
Transfer funds between your standard/default profile and a margin profile.
## Parameters
Name | Description
:------------------ | :----------
`margin_profile_id` | The id of the margin profile you’d like to deposit to or withdraw from.
`type` | `deposit` or `withdraw`.
`currency` | The currency to transfer, ex: `BTC` or `USD`.
`amount` | The amount to transfer between the default and margin profile.
"""
defdelegate margin_transfer(params, config \\ nil), to: ExGdax.Private, as: :margin_transfer
@doc """
An overview of your profile.
## Examples
iex> ExGdax.get_position()
{:ok,
%{"accounts" => %{"BTC" => %{"balance" => "0.0000000000000000",
"default_amount" => "0", "funded_amount" => "0",
"hold" => "0.0000000000000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"},
"ETH" => %{"balance" => "0.0000000000000000", "default_amount" => "0",
"funded_amount" => "0", "hold" => "0.0000000000000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"},
"LTC" => %{"balance" => "0.0000000000000000", "default_amount" => "0",
"funded_amount" => "0", "hold" => "0.0000000000000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"},
"USD" => %{"balance" => "0.0000000000000000", "default_amount" => "0",
"funded_amount" => "0", "hold" => "0.0000000000000000",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"}},
"profile_id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX", "status" => "active",
"user_id" => "XXXXXXXXXXXXXXXXXXXXXXXX"}}
"""
defdelegate get_position(config \\ nil), to: ExGdax.Private, as: :get_position
@doc """
Close your position.
## Parameters
Name | Description
:----------- | :----------
`repay_only` | Either `true` or `false`
"""
defdelegate close_position(params, config \\ nil), to: ExGdax.Private, as: :close_position
@doc """
Deposit funds from a payment method.
## Parameters
Name | Description
:------------------ | :----------
`amount` | The amount to deposit.
`currency` | The type of currency.
`payment_method_id` | ID of the payment method.
"""
defdelegate deposit_from_payment_method(params, config \\ nil),
to: ExGdax.Private,
as: :deposit_from_payment_method
@doc """
Deposit funds from a coinbase account.
## Parameters
Name | Description
:-------------------- | :----------
`amount` | The amount to deposit.
`currency` | The type of currency.
`coinbase_account_id` | ID of the coinbase account.
## Examples
iex> ExGdax.deposit_from_coinbase(%{amount: "0.1", currency: "ETH", coinbase_account_id: "XXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"})
{:ok,
%{"amount" => "0.10000000", "currency" => "ETH",
"id" => "XXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"}}
"""
defdelegate deposit_from_coinbase(params, config \\ nil),
to: ExGdax.Private,
as: :deposit_from_coinbase
@doc """
Withdraw funds to a payment method.
## Parameters
Name | Description
:------------------ | :----------
`amount` | The amount to withdraw.
`currency` | The type of currency.
`payment_method_id` | ID of the payment method.
"""
defdelegate withdraw_to_payment_method(params, config \\ nil),
to: ExGdax.Private,
as: :withdraw_to_payment_method
@doc """
Withdraw funds to a coinbase account.
## Parameters
Name | Description
:-------------------- | :----------
`amount` | The amount to withdraw.
`currency` | The type of currency.
`coinbase_account_id` | ID of the coinbase account.
"""
defdelegate withdraw_to_coinbase(params, config \\ nil),
to: ExGdax.Private,
as: :withdraw_to_coinbase
@doc """
Withdraw funds to a crypto address.
## Parameters
Name | Description
:--------------- | :----------
`amount` | The amount to withdraw.
`currency` | The type of currency.
`crypto_address` | A crypto address of the recipient.
## Examples
iex> ExGdax.withdraw_to_crypto(%{amount: "0.01", currency: "ETH", crypto_address: "0x30a9f8b57e2dcb519a4e4982ed6379f9dd6a0bfc"})
{:ok,
%{"amount" => "0.01000000", "currency" => "ETH",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"}}
"""
defdelegate withdraw_to_crypto(params, config \\ nil),
to: ExGdax.Private,
as: :withdraw_to_crypto
@doc """
List your payment methods.
## Examples
iex> ExGdax.list_payment_methods()
{:ok,
[%{"allow_buy" => false, "allow_deposit" => true, "allow_sell" => true,
"allow_withdraw" => false, "created_at" => "2015-11-03T00:32:02Z",
"currency" => "USD",
"fiat_account" => %{"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
"resource" => "account",
"resource_path" => "/v2/accounts/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"}, ...]}
"""
defdelegate list_payment_methods(config \\ nil), to: ExGdax.Private, as: :list_payment_methods
@doc """
List your coinbase accounts.
## Examples
iex> ExGdax.list_coinbase_accounts()
{:ok,
[%{"active" => true, "balance" => "0.00000000", "currency" => "ETH",
"id" => "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX", "name" => "ETH Wallet",
"primary" => false, "type" => "wallet"}, ...]}
"""
defdelegate list_coinbase_accounts(config \\ nil),
to: ExGdax.Private,
as: :list_coinbase_accounts
@doc """
Create a report.
## Parameters
Name | Description
:----------- | :----------
`type` | `fills` or `account`.
`start_date` | Starting date for the report (inclusive).
`end_date` | Ending date for the report (inclusive).
`product_id` | ID of the product to generate a fills report for. E.g. BTC-USD. Required if `type` is `fills`.
`account_id` | ID of the account to generate an account report for. Required if `type` is `account`.
`format` | `pdf` or `csv` (defualt is `pdf`).
`email` | Email address to send the report to (optional).
"""
defdelegate create_report(params, config \\ nil), to: ExGdax.Private, as: :create_report
@doc """
Get report status.
"""
defdelegate get_report(report_id, config \\ nil), to: ExGdax.Private, as: :get_report
@doc """
List your 30-day trailing volume for all products.
## Examples
iex> ExGdax.list_trailing_volume()
{:ok,
[%{"exchange_volume" => "8704829.60943332", "product_id" => "ETH-USD",
"recorded_at" => "2017-07-31T00:17:16.331884Z", "volume" => "1.00000000"}]}
"""
defdelegate list_trailing_volume(config \\ nil), to: ExGdax.Private, as: :list_trailing_volume
@doc """
Get your current fee rate based on the 30-day trailing USD volume
iex> ExGdax.user_fee_rate()
{:ok, %{"fee" => "0.0030", "usd_volume" => nil, "fee_usd_total" => nil}}
"""
defdelegate user_fee_rate(config \\ nil), to: ExGdax.Private, as: :user_fee_rate
end
| 33.777379 | 134 | 0.572127 |
f7c882592c2bfe00cadf286429b9a590b10b9ebe | 9,496 | exs | Elixir | lib/elixir/test/elixir/path_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | 1 | 2015-02-23T00:01:48.000Z | 2015-02-23T00:01:48.000Z | lib/elixir/test/elixir/path_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/path_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule PathTest do
use ExUnit.Case, async: true
import PathHelpers
if :file.native_name_encoding == :utf8 do
test :wildcard_with_utf8 do
File.mkdir_p(tmp_path("héllò"))
assert Path.wildcard(tmp_path("héllò")) == [tmp_path("héllò")]
after
File.rm_rf tmp_path("héllò")
end
end
test :wildcard do
hello = tmp_path("wildcard/.hello")
world = tmp_path("wildcard/.hello/world")
File.mkdir_p(world)
assert Path.wildcard(tmp_path("wildcard/*/*")) == []
assert Path.wildcard(tmp_path("wildcard/**/*")) == []
assert Path.wildcard(tmp_path("wildcard/?hello/world")) == []
assert Path.wildcard(tmp_path("wildcard/*/*"), match_dot: true) == [world]
assert Path.wildcard(tmp_path("wildcard/**/*"), match_dot: true) == [hello, world]
assert Path.wildcard(tmp_path("wildcard/?hello/world"), match_dot: true) == [world]
after
File.rm_rf tmp_path("wildcard")
end
if is_win? do
test :relative_win do
assert Path.relative("C:/usr/local/bin") == "usr/local/bin"
assert Path.relative("C:\\usr\\local\\bin") == "usr\\local\\bin"
assert Path.relative("C:usr\\local\\bin") == "usr\\local\\bin"
assert Path.relative("/usr/local/bin") == "usr/local/bin"
assert Path.relative("usr/local/bin") == "usr/local/bin"
assert Path.relative("../usr/local/bin") == "../usr/local/bin"
assert Path.relative_to("D:/usr/local/foo", "D:/usr/") == "local/foo"
assert Path.relative_to("D:/usr/local/foo", "d:/usr/") == "local/foo"
assert Path.relative_to("d:/usr/local/foo", "D:/usr/") == "local/foo"
assert Path.relative_to("D:/usr/local/foo", "d:") == "usr/local/foo"
assert Path.relative_to("D:/usr/local/foo", "D:") == "usr/local/foo"
end
test :type_win do
assert Path.type("C:/usr/local/bin") == :absolute
assert Path.type('C:\\usr\\local\\bin') == :absolute
assert Path.type("C:usr\\local\\bin") == :volumerelative
assert Path.type("/usr/local/bin") == :volumerelative
assert Path.type('usr/local/bin') == :relative
assert Path.type("../usr/local/bin") == :relative
end
test :split_win do
assert Path.split("C:\\foo\\bar") == ["c:/", "foo", "bar"]
assert Path.split("C:/foo/bar") == ["c:/", "foo", "bar"]
end
else
test :relative_unix do
assert Path.relative("/usr/local/bin") == "usr/local/bin"
assert Path.relative("usr/local/bin") == "usr/local/bin"
assert Path.relative("../usr/local/bin") == "../usr/local/bin"
assert Path.relative(['/usr', ?/, "local/bin"]) == "usr/local/bin"
end
test :type_unix do
assert Path.type("/usr/local/bin") == :absolute
assert Path.type("usr/local/bin") == :relative
assert Path.type("../usr/local/bin") == :relative
assert Path.type('/usr/local/bin') == :absolute
assert Path.type('usr/local/bin') == :relative
assert Path.type('../usr/local/bin') == :relative
assert Path.type(['/usr/', 'local/bin']) == :absolute
assert Path.type(['usr/', 'local/bin']) == :relative
assert Path.type(['../usr', '/local/bin']) == :relative
end
end
test :relative_to_cwd do
assert Path.relative_to_cwd(__ENV__.file) ==
Path.relative_to(__ENV__.file, System.cwd!)
assert Path.relative_to_cwd(to_char_list(__ENV__.file)) ==
Path.relative_to(to_char_list(__ENV__.file), to_char_list(System.cwd!))
end
test :absname do
assert (Path.absname("/") |> strip_drive_letter_if_windows) == "/"
assert (Path.absname("/foo") |> strip_drive_letter_if_windows) == "/foo"
assert (Path.absname("/./foo") |> strip_drive_letter_if_windows) == "/foo"
assert (Path.absname("/foo/bar") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.absname("/foo/bar/") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.absname("/foo/bar/../bar") |> strip_drive_letter_if_windows) == "/foo/bar/../bar"
assert Path.absname("bar", "/foo") == "/foo/bar"
assert Path.absname("bar/", "/foo") == "/foo/bar"
assert Path.absname("bar/.", "/foo") == "/foo/bar/."
assert Path.absname("bar/../bar", "/foo") == "/foo/bar/../bar"
assert Path.absname("bar/../bar", "foo") == "foo/bar/../bar"
assert Path.absname(["bar/", ?., ?., ["/bar"]], "/foo") == "/foo/bar/../bar"
end
test :expand_path_with_user_home do
home = System.user_home!
assert home == Path.expand("~")
assert home == Path.expand('~')
assert is_binary Path.expand("~/foo")
assert is_binary Path.expand('~/foo')
assert Path.expand("~/file") == Path.join(home, "file")
assert Path.expand("~/file", "whatever") == Path.join(home, "file")
assert Path.expand("file", Path.expand("~")) == Path.expand("~/file")
assert Path.expand("file", "~") == Path.join(home, "file")
assert Path.expand("~file") == Path.join(System.cwd!, "file")
end
test :expand_path do
assert (Path.expand("/") |> strip_drive_letter_if_windows) == "/"
assert (Path.expand("/foo") |> strip_drive_letter_if_windows) == "/foo"
assert (Path.expand("/./foo") |> strip_drive_letter_if_windows) == "/foo"
assert (Path.expand("/foo/bar") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.expand("/foo/bar/") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.expand("/foo/bar/.") |> strip_drive_letter_if_windows)== "/foo/bar"
assert (Path.expand("/foo/bar/../bar") |> strip_drive_letter_if_windows) == "/foo/bar"
assert (Path.expand("bar", "/foo") |> strip_drive_letter_if_windows)== "/foo/bar"
assert (Path.expand("bar/", "/foo") |> strip_drive_letter_if_windows)== "/foo/bar"
assert (Path.expand("bar/.", "/foo") |> strip_drive_letter_if_windows)== "/foo/bar"
assert (Path.expand("bar/../bar", "/foo") |> strip_drive_letter_if_windows)== "/foo/bar"
assert (Path.expand("../bar/../bar", "/foo/../foo/../foo") |> strip_drive_letter_if_windows) == "/bar"
assert (Path.expand(['..', ?/, "bar/../bar"], '/foo/../foo/../foo') |>
strip_drive_letter_if_windows) == "/bar"
assert (Path.expand("/..") |> strip_drive_letter_if_windows) == "/"
assert Path.expand("bar/../bar", "foo") == Path.expand("foo/bar")
end
test :relative_to do
assert Path.relative_to("/usr/local/foo", "/usr/local") == "foo"
assert Path.relative_to("/usr/local/foo", "/") == "usr/local/foo"
assert Path.relative_to("/usr/local/foo", "/etc") == "/usr/local/foo"
assert Path.relative_to("/usr/local/foo", "/usr/local/foo") == "/usr/local/foo"
assert Path.relative_to("usr/local/foo", "usr/local") == "foo"
assert Path.relative_to("usr/local/foo", "etc") == "usr/local/foo"
assert Path.relative_to('usr/local/foo', "etc") == "usr/local/foo"
assert Path.relative_to("usr/local/foo", "usr/local") == "foo"
assert Path.relative_to(["usr", ?/, 'local/foo'], 'usr/local') == "foo"
end
test :rootname do
assert Path.rootname("~/foo/bar.ex", ".ex") == "~/foo/bar"
assert Path.rootname("~/foo/bar.exs", ".ex") == "~/foo/bar.exs"
assert Path.rootname("~/foo/bar.old.ex", ".ex") == "~/foo/bar.old"
assert Path.rootname([?~, '/foo/bar', ".old.ex"], '.ex') == "~/foo/bar.old"
end
test :extname do
assert Path.extname("foo.erl") == ".erl"
assert Path.extname("~/foo/bar") == ""
assert Path.extname('foo.erl') == ".erl"
assert Path.extname('~/foo/bar') == ""
end
test :dirname do
assert Path.dirname("/foo/bar.ex") == "/foo"
assert Path.dirname("foo/bar.ex") == "foo"
assert Path.dirname("~/foo/bar.ex") == "~/foo"
assert Path.dirname("/foo/bar/baz/") == "/foo/bar/baz"
assert Path.dirname([?~, "/foo", '/bar.ex']) == "~/foo"
end
test :basename do
assert Path.basename("foo") == "foo"
assert Path.basename("/foo/bar") == "bar"
assert Path.basename("/") == ""
assert Path.basename("~/foo/bar.ex", ".ex") == "bar"
assert Path.basename("~/foo/bar.exs", ".ex") == "bar.exs"
assert Path.basename("~/for/bar.old.ex", ".ex") == "bar.old"
assert Path.basename([?~, "/for/bar", '.old.ex'], ".ex") == "bar.old"
end
test :join do
assert Path.join([""]) == ""
assert Path.join(["foo"]) == "foo"
assert Path.join(["/", "foo", "bar"]) == "/foo/bar"
assert Path.join(["~", "foo", "bar"]) == "~/foo/bar"
assert Path.join(['/foo/', "/bar/"]) == "/foo/bar"
end
test :join_two do
assert Path.join("/foo", "bar") == "/foo/bar"
assert Path.join("~", "foo") == "~/foo"
assert Path.join("", "bar") == "bar"
assert Path.join("bar", "") == "bar"
assert Path.join("", "/bar") == "bar"
assert Path.join("/bar", "") == "/bar"
assert Path.join("foo", "/bar") == "foo/bar"
assert Path.join("/foo", "/bar") == "/foo/bar"
assert Path.join("/foo", "/bar") == "/foo/bar"
assert Path.join("/foo", "./bar") == "/foo/./bar"
assert Path.join([?/, "foo"], "./bar") == "/foo/./bar"
end
test :split do
assert Path.split("") == []
assert Path.split("foo") == ["foo"]
assert Path.split("/foo/bar") == ["/", "foo", "bar"]
assert Path.split([?/, "foo/bar"]) == ["/", "foo", "bar"]
end
if is_win? do
defp strip_drive_letter_if_windows([_d,?:|rest]), do: rest
defp strip_drive_letter_if_windows(<<_d,?:,rest::binary>>), do: rest
else
defp strip_drive_letter_if_windows(path), do: path
end
end
| 40.067511 | 106 | 0.595619 |
f7c8b8bd52aae85dfe0f2908c00e2aec34af17bd | 1,946 | ex | Elixir | lib/guards.ex | agix/momento | cd42abe9322f27e65278b65fb35d223baa317423 | [
"MIT"
] | 37 | 2016-07-01T22:35:41.000Z | 2020-10-02T23:41:45.000Z | lib/guards.ex | agix/momento | cd42abe9322f27e65278b65fb35d223baa317423 | [
"MIT"
] | 5 | 2016-07-02T14:15:57.000Z | 2016-07-24T09:46:22.000Z | lib/guards.ex | agix/momento | cd42abe9322f27e65278b65fb35d223baa317423 | [
"MIT"
] | 5 | 2016-07-02T03:46:52.000Z | 2019-04-23T14:15:54.000Z | defmodule Momento.Guards do
@moduledoc """
This module holds all the various custom guards `natural?/1`, `positive?/1`, `negative?/1` and `days_in_month/1`.
"""
@doc """
Is in the set of natural numbers?
## Examples
iex> Momento.Guards.natural?(5)
true
...> Momento.Guards.natural?(0)
true
...> Momento.Guards.natural?(-5)
false
"""
@spec natural?(integer) :: boolean
defmacro natural?(num), do: quote do: unquote(num) |> is_integer and unquote(num) >= 0
@doc """
Is in set of natural numbers not including 0?
## Examples
iex> Momento.Guards.positive?(5)
true
...> Momento.Guards.positive?(0)
false
...> Momento.Guards.positive?(-5)
false
"""
@spec positive?(integer) :: boolean
defmacro positive?(num), do: quote do: unquote(num) |> is_integer and unquote(num) > 0
@doc """
Is in the set of natural numbers below 0?
## Examples
iex> Momento.Guards.negative?(-5)
true
...> Momento.Guards.negative?(0)
false
...> Momento.Guards.negative?(5)
false
"""
@spec negative?(integer) :: boolean
defmacro negative?(num), do: quote do: unquote(num) |> is_integer and unquote(num) < 0
@doc """
Provides thow many days are in a given month (natural number) and is rollover safe.
## Examples
iex> Momento.Guards.days_in_month(2)
28
...> Momento.Guards.days_in_month(14)
28
"""
@spec days_in_month(integer) :: integer
defmacro days_in_month(month) do
month = cond do
month > 12 -> quote do: unquote(month) |> rem(12)
month == 0 -> 12
# TODO: Make a negative number index from the end
# TODO: abs/1 doesn't seem to be working here
month < 0 -> quote do: unquote(month) |> abs |> rem(12)
true -> month
end
quote do: elem({31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}, unquote(month) - 1)
end
end
| 23.445783 | 115 | 0.600206 |
f7c8bd82a9256d5f0c4f4b082a479d7e5c417601 | 27 | ex | Elixir | lib/ex_sweeper.ex | myrridin/exsweeper | 86ee16c5b356d2063b2bb56aa302039f3b4f4bb4 | [
"MIT"
] | null | null | null | lib/ex_sweeper.ex | myrridin/exsweeper | 86ee16c5b356d2063b2bb56aa302039f3b4f4bb4 | [
"MIT"
] | null | null | null | lib/ex_sweeper.ex | myrridin/exsweeper | 86ee16c5b356d2063b2bb56aa302039f3b4f4bb4 | [
"MIT"
] | null | null | null | defmodule ExSweeper do
end
| 9 | 22 | 0.851852 |
f7c8c1540bffd60c72d63975a9ada4a11b88bfe2 | 3,547 | ex | Elixir | apps/ewallet/lib/ewallet/web/v1/serializers/transaction_request_serializer.ex | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/lib/ewallet/web/v1/serializers/transaction_request_serializer.ex | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/lib/ewallet/web/v1/serializers/transaction_request_serializer.ex | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.Web.V1.TransactionRequestSerializer do
@moduledoc """
Serializes transaction request data into V1 JSON response format.
"""
alias Ecto.Association.NotLoaded
alias EWallet.Web.V1.{
AccountSerializer,
PaginatorSerializer,
TokenSerializer,
UserSerializer,
WalletSerializer
}
alias EWallet.Web.Paginator
alias Utils.Helpers.{Assoc, DateFormatter}
alias EWalletDB.TransactionRequest
alias ActivityLogger.System
def serialize(%Paginator{} = paginator) do
PaginatorSerializer.serialize(paginator, &serialize/1)
end
def serialize(%TransactionRequest{} = transaction_request) do
transaction_request =
TransactionRequest.load_consumptions_count(transaction_request, %System{})
%{
object: "transaction_request",
id: transaction_request.id,
formatted_id: transaction_request.id,
socket_topic: "transaction_request:#{transaction_request.id}",
type: transaction_request.type,
amount: transaction_request.amount,
status: transaction_request.status,
correlation_id: transaction_request.correlation_id,
token_id: Assoc.get(transaction_request, [:token, :id]),
token: TokenSerializer.serialize(transaction_request.token),
address: transaction_request.wallet_address,
user_id: Assoc.get(transaction_request, [:user, :id]),
user: UserSerializer.serialize(transaction_request.user),
account_id: Assoc.get(transaction_request, [:account, :id]),
account: AccountSerializer.serialize(transaction_request.account),
exchange_account_id: Assoc.get(transaction_request, [:exchange_account, :id]),
exchange_account: AccountSerializer.serialize(transaction_request.exchange_account),
exchange_wallet_address: Assoc.get(transaction_request, [:exchange_wallet, :address]),
exchange_wallet:
WalletSerializer.serialize_without_balances(transaction_request.exchange_wallet),
require_confirmation: transaction_request.require_confirmation,
current_consumptions_count: transaction_request.consumptions_count,
max_consumptions: transaction_request.max_consumptions,
max_consumptions_per_user: transaction_request.max_consumptions_per_user,
consumption_lifetime: transaction_request.consumption_lifetime,
expiration_reason: transaction_request.expiration_reason,
allow_amount_override: transaction_request.allow_amount_override,
metadata: transaction_request.metadata || %{},
encrypted_metadata: transaction_request.encrypted_metadata || %{},
expiration_date: DateFormatter.to_iso8601(transaction_request.expiration_date),
expired_at: DateFormatter.to_iso8601(transaction_request.expired_at),
created_at: DateFormatter.to_iso8601(transaction_request.inserted_at),
updated_at: DateFormatter.to_iso8601(transaction_request.updated_at)
}
end
def serialize(%NotLoaded{}), do: nil
def serialize(nil), do: nil
end
| 43.256098 | 92 | 0.77192 |
f7c8c4d5fe695487a7d1975577567aa9bda11630 | 880 | ex | Elixir | clients/compute/lib/google_api/compute/v1/metadata.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/metadata.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/metadata.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1 do
@moduledoc """
API client metadata for GoogleApi.Compute.V1.
"""
@discovery_revision "20220526"
def discovery_revision(), do: @discovery_revision
end
| 32.592593 | 74 | 0.757955 |
f7c8ee355c40716316f5237ac05b260a61e93f64 | 28,991 | ex | Elixir | lib/ash/query/query.ex | maartenvanvliet/ash | c7fd1927169b45d9e1e5ad4ba2ee81703fcf27db | [
"MIT"
] | null | null | null | lib/ash/query/query.ex | maartenvanvliet/ash | c7fd1927169b45d9e1e5ad4ba2ee81703fcf27db | [
"MIT"
] | null | null | null | lib/ash/query/query.ex | maartenvanvliet/ash | c7fd1927169b45d9e1e5ad4ba2ee81703fcf27db | [
"MIT"
] | null | null | null | defmodule Ash.Query do
@moduledoc """
Utilties around constructing/manipulating ash queries.
Ash queries are used for read actions and side loads, and ultimately
map to queries to a resource's data layer.
Queries are run by calling `read` on an API that contains the resource in question
Examples:
```elixir
MyApp.Post
|> Query.filter(likes > 10)
|> Query.sort([:title])
|> MyApp.Api.read!()
MyApp.Author
|> Query.aggregate(:published_post_count, :posts, filter: [published: true])
|> Query.sort(published_post_count: :desc)
|> Query.limit(10)
|> MyApp.Api.read!()
MyApp.Author
|> Query.load([:post_count, :comment_count])
|> Query.load(posts: [:comments])
|> MyApp.Api.read!()
```
"""
defstruct [
:api,
:resource,
:filter,
:data_layer_query,
aggregates: %{},
side_load: [],
calculations: %{},
context: %{},
sort: [],
limit: nil,
offset: 0,
errors: [],
valid?: true
]
@type t :: %__MODULE__{}
defimpl Inspect do
import Inspect.Algebra
def inspect(query, opts) do
sort? = query.sort != []
side_load? = query.side_load != []
aggregates? = query.aggregates != %{}
calculations? = query.calculations != %{}
limit? = not is_nil(query.limit)
offset? = not (is_nil(query.offset) || query.offset == 0)
filter? = not is_nil(query.filter)
errors? = not Enum.empty?(query.errors)
container_doc(
"#Ash.Query<",
[
concat("resource: ", inspect(query.resource)),
or_empty(concat("filter: ", to_doc(query.filter, opts)), filter?),
or_empty(concat("sort: ", to_doc(query.sort, opts)), sort?),
or_empty(concat("limit: ", to_doc(query.limit, opts)), limit?),
or_empty(concat("offset: ", to_doc(query.offset, opts)), offset?),
or_empty(concat("side_load: ", to_doc(query.side_load, opts)), side_load?),
or_empty(concat("aggregates: ", to_doc(query.aggregates, opts)), aggregates?),
or_empty(concat("calculations: ", to_doc(query.calculations, opts)), calculations?),
or_empty(concat("errors: ", to_doc(query.errors, opts)), errors?)
],
">",
opts,
fn str, _ -> str end
)
end
defp or_empty(value, true), do: value
defp or_empty(_, false), do: empty()
end
alias Ash.Actions.Sort
alias Ash.Error.Query.{AggregatesNotSupported, InvalidLimit, InvalidOffset}
alias Ash.Error.SideLoad.{InvalidQuery, NoSuchRelationship}
alias Ash.Query.{Aggregate, Calculation}
@doc """
Attach a filter statement to the query.
The filter is applied as an "and" to any filters currently on the query.
For more information on writing filters, see: `Ash.Filter`.
"""
defmacro filter(query, %Ash.Filter{} = filter) do
quote do
Ash.Query.do_filter(unquote(query), unquote(filter))
end
end
defmacro filter(query, nil), do: query
defmacro filter(query, true), do: query
defmacro filter(query, false) do
quote do
Ash.Query.do_filter(unquote(query), false)
end
end
defmacro filter(query, do: body) do
quote do
Ash.Query.do_filter(unquote(query), unquote(body))
end
end
defmacro filter(query, expression) do
if Keyword.keyword?(expression) do
quote do
Ash.Query.do_filter(unquote(query), unquote(expression))
end
else
expr = do_expr(expression)
quote do
Ash.Query.do_filter(unquote(query), List.wrap(unquote(expr)))
end
end
end
@doc "Create a new query"
def new(resource, api \\ nil)
def new(%__MODULE__{} = query, _), do: query
def new(resource, api) when is_atom(resource) do
query =
%__MODULE__{
api: api,
filter: nil,
resource: resource
}
|> set_data_layer_query()
case Ash.Resource.base_filter(resource) do
nil ->
query
filter ->
filter = Ash.Filter.parse!(resource, filter)
filter =
Ash.Filter.map(filter, fn
%{__predicate__?: true} = pred ->
%{pred | embedded?: true}
other ->
other
end)
do_filter(query, filter)
end
end
defmacro expr(do: body) do
quote do
Ash.Query.expr(unquote(body))
end
end
defmacro expr({var, _, context} = binding) when is_atom(var) and is_atom(context) do
quote do
unquote(binding)
end
end
defmacro expr(body) do
if Keyword.keyword?(body) do
quote do
unquote(body)
end
else
quote do
List.wrap(unquote(do_expr(body)))
end
end
end
defp do_expr({:^, _, [var]}), do: var
defp do_expr({:., _, [left, right]} = ref) when is_atom(right) do
case do_ref(left, right) do
%Ash.Query.Ref{} = ref ->
Macro.escape(ref)
:error ->
raise "Invalid reference! #{Macro.to_string(ref)}"
end
end
defp do_expr({op, _, nil}) when is_atom(op) do
Macro.escape(%Ash.Query.Ref{relationship_path: [], attribute: op})
end
defp do_expr({op, _, args}) when is_atom(op) and is_list(args) do
{op, Enum.map(args, &do_expr(&1))}
end
defp do_expr({left, _, _}) when is_tuple(left), do: do_expr(left)
defp do_expr(other), do: other
defp do_ref({_, _, list}, _right) when is_list(list) do
:error
end
defp do_ref({left, _, _}, right) when is_atom(left) and is_atom(right) do
%Ash.Query.Ref{relationship_path: [left], attribute: right}
end
defp do_ref({:., _, [left, right]}, far_right) do
case do_ref(left, right) do
%Ash.Query.Ref{relationship_path: path, attribute: attribute} = ref ->
%{ref | relationship_path: path ++ [attribute], attribute: far_right}
:error ->
:error
end
end
defp do_ref(_left, _right) do
:error
end
@doc """
Loads named calculations or aggregates on the resource.
Currently, loading attributes has no effects, as all attributes are returned.
Before long, we will have the default list to load as the attributes, but if you say
`load(query, [:attribute1])`, that will be the only field filled in. This will let
data layers make more intelligent "select" statements as well.
"""
@spec load(t(), atom | list(atom) | Keyword.t()) :: t()
def load(query, fields) when not is_list(fields) do
load(query, List.wrap(fields))
end
def load(query, fields) do
query = to_query(query)
Enum.reduce(fields, query, fn
{field, %__MODULE__{} = nested}, query ->
side_load(query, [{field, nested}])
{field, rest}, query ->
cond do
rel = Ash.Resource.relationship(query.resource, field) ->
nested_query = load(rel.destination, rest)
side_load(query, [{field, nested_query}])
calculation = Ash.Resource.calculation(query.resource, field) ->
{module, opts} = module_and_opts(calculation.calculation)
with {:ok, args} <- validate_arguments(calculation, rest),
{:ok, calculation} <-
Calculation.new(
calculation.name,
module,
opts,
args
) do
calculation = %{calculation | load: field}
%{query | calculations: Map.put(query.calculations, field, calculation)}
end
true ->
add_error(query, :load, "Invalid load #{inspect(field)}")
end
field, query ->
do_load(query, field)
end)
end
defp module_and_opts({module, opts}), do: {module, opts}
defp module_and_opts(module), do: {module, []}
defp do_load(query, field) do
cond do
Ash.Resource.attribute(query.resource, field) ->
query
Ash.Resource.relationship(query.resource, field) ->
side_load(query, field)
aggregate = Ash.Resource.aggregate(query.resource, field) ->
related = Ash.Resource.related(query.resource, aggregate.relationship_path)
with %{valid?: true} = aggregate_query <-
build(related, filter: aggregate.filter),
{:ok, query_aggregate} <-
Aggregate.new(
query.resource,
aggregate.name,
aggregate.kind,
aggregate.relationship_path,
aggregate_query
) do
query_aggregate = %{query_aggregate | load: field}
new_aggregates = Map.put(query.aggregates, aggregate.name, query_aggregate)
%{query | aggregates: new_aggregates}
else
%{errors: errors} ->
add_error(query, :aggregates, Ash.Error.to_ash_error(errors))
{:error, error} ->
add_error(query, :aggregates, Ash.Error.to_ash_error(error))
end
calculation = Ash.Resource.calculation(query.resource, field) ->
{module, opts} =
case calculation.calculation do
{module, opts} -> {module, opts}
module -> {module, []}
end
with {:ok, args} <- validate_arguments(calculation, %{}),
{:ok, calculation} <-
Calculation.new(calculation.name, module, opts, args) do
calculation = %{calculation | load: field}
%{query | calculations: Map.put(query.calculations, field, calculation)}
else
{:error, error} ->
add_error(query, :load, error)
end
true ->
add_error(query, :load, "Could not load #{inspect(field)}")
end
end
defp validate_arguments(calculation, args) do
Enum.reduce_while(calculation.arguments, {:ok, %{}}, fn argument, {:ok, arg_values} ->
value = default(Map.get(args, argument.name), argument.default)
if is_nil(value) do
if argument.allow_nil? do
{:cont, {:ok, Map.put(arg_values, argument.name, nil)}}
else
{:halt, {:error, "Argument #{argument.name} is required"}}
end
else
with {:ok, casted} <- Ash.Type.cast_input(argument.type, value),
:ok <-
Ash.Type.apply_constraints(argument.type, casted, argument.constraints) do
{:cont, {:ok, Map.put(arg_values, argument.name, casted)}}
else
{:error, error} ->
{:halt, {:error, error}}
end
end
end)
end
defp default(nil, {module, function, args}), do: apply(module, function, args)
defp default(nil, value) when is_function(value, 0), do: value.()
defp default(nil, value), do: value
defp default(value, _), do: value
@doc """
Sets a specific context key to a specific value
See `set_context/2` for more information.
"""
@spec put_context(t(), atom, term) :: t()
def put_context(query, key, value) do
%{query | context: Map.put(query.context, key, value)}
end
@doc """
Merge a map of values into the query context
Not much uses this currently.
"""
@spec set_context(t(), map) :: t()
def set_context(query, map) do
%{
query
| context:
Map.merge(query.context, map, fn _k, v1, v2 ->
if is_map(v1) and is_map(v2) do
Map.merge(v1, v2)
else
v2
end
end)
}
end
@doc "Removes a field from the list of fields to load"
@spec unload(t(), list(atom)) :: t()
def unload(query, fields) do
query = to_query(query)
Enum.reduce(fields, query, fn field, query ->
case field do
{field, rest} ->
new_side_loads = do_unload_side_load(query.side_load, {field, rest})
%{query | side_load: new_side_loads}
field ->
do_unload(query, field)
end
end)
end
defp do_unload(query, field) do
cond do
Ash.Resource.attribute(query.resource, field) ->
query
Ash.Resource.relationship(query.resource, field) ->
%{query | side_load: Keyword.delete(query.side_load, field)}
Ash.Resource.aggregate(query.resource, field) ->
new_aggregates =
Enum.reduce(query.aggregates, %{}, fn
{_field, %{load: ^field}}, acc ->
acc
{field, aggregate}, acc ->
Map.put(acc, field, aggregate)
end)
%{query | aggregates: new_aggregates}
end
end
defp do_unload_side_load(%__MODULE__{} = query, unload) do
%{query | side_load: do_unload_side_load(query.side_load, unload)}
end
defp do_unload_side_load(side_loads, {field, rest}) do
Enum.reduce(side_loads, [], fn
^field, acc ->
acc
{^field, value}, acc ->
new_value =
rest
|> List.wrap()
|> Enum.reduce(value, &do_unload_side_load(&2, &1))
[{field, new_value} | acc]
value, acc ->
[value | acc]
end)
|> Enum.reverse()
end
defp do_unload_side_load(side_loads, field) do
do_unload_side_load(side_loads, {field, []})
end
@doc """
Builds a query from a keyword list.
This is used by certain query constructs like aggregates. It can also be used to manipulate a data structure
before passing it to an ash query.
For example:
```elixir
Ash.Query.build(MyResource, filter: [name: "fred"], sort: [name: :asc], offset: 10)
```
"""
@spec build(Ash.resource(), Ash.api() | nil, Keyword.t()) :: t()
def build(resource, api \\ nil, keyword) do
Enum.reduce(keyword, new(resource, api), fn
{:filter, value}, query ->
do_filter(query, value)
{:sort, value}, query ->
sort(query, value)
{:limit, value}, query ->
limit(query, value)
{:offset, value}, query ->
offset(query, value)
{:side_load, value}, query ->
side_load(query, value)
{:aggregate, {name, type, relationship}}, query ->
aggregate(query, name, type, relationship)
{:aggregate, {name, type, relationship, agg_query}}, query ->
aggregate(query, name, type, relationship, agg_query)
{:calculate, {name, module_and_opts}}, query ->
calculate(query, name, module_and_opts)
{:calculate, {name, module_and_opts, context}}, query ->
calculate(query, name, module_and_opts, context)
end)
end
@doc "Set the query's api, and any side loaded query's api"
def set_api(query, api) do
query = to_query(query)
%{query | api: api, side_load: set_side_load_api(query.side_load, api)}
end
@doc """
Adds an aggregation to the query.
Aggregations are made available on the `aggregates` field of the records returned
The only aggregate available currently is a `count` aggregate. They filter option accepts
either a filter or a keyword list of options to supply to build a limiting query for that aggregate.
However, currently only filters are accepted.
"""
@spec aggregate(
t() | Ash.resource(),
atom(),
Ash.aggregate_kind(),
atom | list(atom),
Ash.query() | nil
) :: t()
def aggregate(query, name, type, relationship, agg_query \\ nil) do
query = to_query(query)
relationship = List.wrap(relationship)
if Ash.Resource.data_layer_can?(query.resource, {:aggregate, type}) do
agg_query =
case agg_query do
nil ->
nil
%__MODULE__{} = agg_query ->
agg_query
options when is_list(options) ->
build(Ash.Resource.related(query.resource, relationship), options)
end
case Aggregate.new(query.resource, name, type, relationship, agg_query) do
{:ok, aggregate} ->
new_aggregates = Map.put(query.aggregates, aggregate.name, aggregate)
set_data_layer_query(%{query | aggregates: new_aggregates})
{:error, error} ->
add_error(query, :aggregate, error)
end
else
add_error(
query,
:aggregate,
AggregatesNotSupported.exception(resource: query.resource, feature: "using")
)
end
end
@doc """
Adds a calculation to the query.
Calculations are made available on the `calculations` field of the records returned
The `module_and_opts` argument accepts either a `module` or a `{module, opts}`. For more information
on what that module should look like, see `Ash.Calculation`.
More features for calculations, like passing anonymous functions, will be supported in the future.
"""
def calculate(query, name, module_and_opts, context \\ %{}) do
query = to_query(query)
{module, opts} =
case module_and_opts do
{module, opts} -> {module, opts}
module -> {module, []}
end
case Calculation.new(name, module, opts, context) do
{:ok, calculation} ->
%{query | calculations: Map.put(query.calculations, name, calculation)}
{:error, error} ->
add_error(query, :calculations, error)
end
end
@doc "Limit the results returned from the query"
@spec limit(t() | Ash.resource(), nil | integer()) :: t()
def limit(query, nil), do: to_query(query)
def limit(query, limit) when is_integer(limit) do
query = to_query(query)
if Ash.Resource.data_layer_can?(query.resource, :limit) do
query
|> Map.put(:limit, max(0, limit))
|> set_data_layer_query()
else
add_error(query, :limit, "Data layer does not support limits")
end
end
def limit(query, limit) do
add_error(query, :offset, InvalidLimit.exception(limit: limit))
end
@doc "Skip the first n records"
@spec offset(t() | Ash.resource(), nil | integer()) :: t()
def offset(query, nil), do: to_query(query)
def offset(query, offset) when is_integer(offset) do
query = to_query(query)
if Ash.Resource.data_layer_can?(query.resource, :offset) do
query
|> Map.put(:offset, max(0, offset))
|> set_data_layer_query()
else
add_error(query, :offset, "Data layer does not support offset")
end
end
def offset(query, offset) do
query
|> to_query()
|> add_error(:offset, InvalidOffset.exception(offset: offset))
end
defp side_load(query, statement) do
query = to_query(query)
with sanitized_statement <- List.wrap(sanitize_side_loads(statement)),
:ok <- validate_side_load(query.resource, sanitized_statement),
new_side_loads <- merge_side_load(query.side_load, sanitized_statement) do
%{query | side_load: new_side_loads}
else
{:error, errors} ->
Enum.reduce(errors, query, &add_error(&2, :side_load, &1))
end
end
@doc false
def validate_side_load(resource, side_loads, path \\ []) do
case do_validate_side_load(resource, side_loads, path) do
[] -> :ok
errors -> {:error, errors}
end
end
defp do_validate_side_load(_resource, %Ash.Query{} = query, path) do
if query.limit || (query.offset && query.offset != 0) do
[{:error, InvalidQuery.exception(query: query, side_load_path: Enum.reverse(path))}]
else
case query.errors do
[] ->
[]
_errors ->
[
{:error,
InvalidQuery.exception(
query: query,
side_load_path: Enum.reverse(path)
)}
]
end
end
end
defp do_validate_side_load(resource, {atom, _} = tuple, path) when is_atom(atom) do
do_validate_side_load(resource, [tuple], path)
end
defp do_validate_side_load(resource, side_loads, path) when is_list(side_loads) do
side_loads
|> List.wrap()
|> Enum.flat_map(fn
{_key, %Ash.Query{}} ->
[]
{key, value} ->
case Ash.Resource.relationship(resource, key) do
nil ->
[
{:error,
NoSuchRelationship.exception(
resource: resource,
relationship: key,
side_load_path: Enum.reverse(path)
)}
]
relationship ->
validate_matching_query_and_continue(value, resource, key, path, relationship)
end
end)
end
@doc false
def do_filter(query, %Ash.Filter{} = filter) do
query = to_query(query)
if Ash.Resource.data_layer_can?(query.resource, :filter) do
new_filter =
case query.filter do
nil ->
{:ok, filter}
existing_filter ->
Ash.Filter.add_to_filter(existing_filter, filter, :and, query.aggregates)
end
case new_filter do
{:ok, filter} ->
set_data_layer_query(%{query | filter: filter})
{:error, error} ->
add_error(query, :filter, error)
end
else
add_error(query, :filter, "Data layer does not support filtering")
end
end
def do_filter(query, statement) do
query = to_query(query)
if Ash.Resource.data_layer_can?(query.resource, :filter) do
filter =
if query.filter do
Ash.Filter.add_to_filter(query.filter, statement, :and, query.aggregates)
else
Ash.Filter.parse(query.resource, statement, query.aggregates)
end
case filter do
{:ok, filter} ->
query
|> Map.put(:filter, filter)
|> set_data_layer_query()
{:error, error} ->
add_error(query, :filter, error)
end
else
add_error(query, :filter, "Data layer does not support filtering")
end
end
@doc """
Sort the results based on attributes or aggregates (calculations are not yet supported)
Takes a list of fields to sort on, or a keyword list/mixed keyword list of fields and sort directions.
The default sort direction is `:asc`.
Examples:
```
Ash.Query.sort(query, [:foo, :bar])
Ash.Query.sort(query, [:foo, bar: :desc])
Ash.Query.sort(query, [foo: :desc, bar: :asc])
```
"""
@spec sort(t() | Ash.resource(), Ash.sort()) :: t()
def sort(query, sorts) do
query = to_query(query)
if Ash.Resource.data_layer_can?(query.resource, :sort) do
sorts
|> List.wrap()
|> Enum.reduce(query, fn
{sort, direction}, query ->
%{query | sort: query.sort ++ [{sort, direction}]}
sort, query ->
%{query | sort: query.sort ++ [{sort, :asc}]}
end)
|> validate_sort()
|> set_data_layer_query()
else
add_error(query, :sort, "Data layer does not support sorting")
end
end
@doc """
Reverse the sort order of a query.
If the query has no sort, an error is added indicating that.
"""
@spec reverse(t()) :: t()
def reverse(%{sort: nil} = query) do
add_error(query, :sort, "Unreversable sort")
end
def reverse(query) do
query
|> Ash.Query.unset(:sort)
|> Ash.Query.sort(Ash.Actions.Sort.reverse(query.sort))
end
@spec unset(Ash.resource() | t(), atom | [atom]) :: t()
def unset(query, keys) when is_list(keys) do
query = to_query(query)
keys
|> Enum.reduce(query, fn key, query ->
if key in [:api, :resource] do
query
else
struct(query, [{key, Map.get(%__MODULE__{}, key)}])
end
end)
|> set_data_layer_query()
end
def unset(query, key) do
if key in [:api, :resource] do
to_query(query)
else
query
|> to_query()
|> struct([{key, Map.get(%__MODULE__{}, key)}])
|> set_data_layer_query()
end
end
@doc false
def data_layer_query(%{resource: resource} = ash_query, opts \\ []) do
if Ash.Resource.data_layer_can?(resource, :read) do
query = Ash.DataLayer.resource_to_query(resource)
filter_aggregates =
if ash_query.filter do
Ash.Filter.used_aggregates(ash_query.filter)
else
[]
end
sort_aggregates =
Enum.flat_map(ash_query.sort, fn {field, _} ->
case Map.fetch(ash_query.aggregates, field) do
:error ->
[]
{:ok, agg} ->
[agg]
end
end)
aggregates = Enum.uniq_by(filter_aggregates ++ sort_aggregates, & &1.name)
with {:ok, query} <-
add_aggregates(query, ash_query.resource, aggregates),
{:ok, query} <-
Ash.DataLayer.sort(query, ash_query.sort, resource),
{:ok, query} <- maybe_filter(query, ash_query, opts),
{:ok, query} <-
Ash.DataLayer.limit(query, ash_query.limit, resource),
{:ok, query} <-
Ash.DataLayer.offset(query, ash_query.offset, resource) do
{:ok, Ash.DataLayer.set_context(resource, query, ash_query.context)}
else
{:error, error} -> {:error, error}
end
else
{:error, "Resource does not support reading"}
end
end
defp add_aggregates(query, resource, aggregates) do
Enum.reduce_while(aggregates, {:ok, query}, fn aggregate, {:ok, query} ->
case Ash.DataLayer.add_aggregate(query, aggregate, resource) do
{:ok, query} -> {:cont, {:ok, query}}
{:error, error} -> {:halt, {:error, error}}
end
end)
end
defp validate_sort(%{resource: resource, sort: sort} = query) do
case Sort.process(resource, sort, query.aggregates) do
{:ok, new_sort} -> %{query | sort: new_sort}
{:error, error} -> add_error(query, :sort, error)
end
end
defp add_error(query, key, message) do
query = to_query(query)
message =
if is_binary(message) do
"#{key}: #{message}"
else
message
end
%{
query
| errors: [Map.put(Ash.Error.to_ash_error(message), :path, key) | query.errors],
valid?: false
}
end
defp set_data_layer_query(query) do
case data_layer_query(query) do
{:ok, data_layer_query} -> %{query | data_layer_query: data_layer_query}
{:error, error} -> add_error(query, :data_layer_query, error)
end
end
defp validate_matching_query_and_continue(value, resource, key, path, relationship) do
%{destination: relationship_resource} = relationship
case value do
%__MODULE__{resource: query_resource} = destination_query
when query_resource != relationship_resource ->
[
InvalidQuery.exception(
resource: resource,
relationship: key,
query: destination_query,
side_load_path: Enum.reverse(path)
)
]
other ->
do_validate_side_load(relationship.destination, other, [key | path])
end
end
defp maybe_filter(query, %{filter: nil}, _) do
{:ok, query}
end
defp maybe_filter(query, ash_query, opts) do
case Ash.DataLayer.filter(query, ash_query.filter, ash_query.resource) do
{:ok, filtered} ->
if Keyword.get(opts, :only_validate_filter?, true) do
{:ok, query}
else
{:ok, filtered}
end
{:error, error} ->
{:error, error}
end
end
defp set_side_load_api(nil, _), do: nil
defp set_side_load_api([], _), do: []
defp set_side_load_api(%__MODULE__{} = query, api) do
set_api(query, api)
end
defp set_side_load_api(side_loads, api) do
Enum.map(side_loads, fn {key, further} ->
{key, set_side_load_api(further, api)}
end)
end
defp to_query(%__MODULE__{} = query), do: query
defp to_query(resource) do
resource
|> new()
|> Ash.DataLayer.transform_query()
end
defp merge_side_load([], right), do: sanitize_side_loads(right)
defp merge_side_load(left, []), do: sanitize_side_loads(left)
defp merge_side_load(
%__MODULE__{side_load: left_side_loads},
%__MODULE__{side_load: right_side_loads} = query
) do
%{query | side_load: merge_side_load(left_side_loads, right_side_loads)}
end
defp merge_side_load(%__MODULE__{} = query, right) when is_list(right) do
side_load(query, right)
end
defp merge_side_load(left, %Ash.Query{} = query) when is_list(left) do
side_load(query, left)
end
defp merge_side_load(left, right) when is_atom(left), do: merge_side_load([{left, []}], right)
defp merge_side_load(left, right) when is_atom(right), do: merge_side_load(left, [{right, []}])
defp merge_side_load(left, right) when is_list(left) and is_list(right) do
right
|> sanitize_side_loads()
|> Enum.reduce(sanitize_side_loads(left), fn {rel, rest}, acc ->
Keyword.update(acc, rel, rest, &merge_side_load(&1, rest))
end)
end
defp sanitize_side_loads(side_load) when is_atom(side_load), do: {side_load, []}
defp sanitize_side_loads(%Ash.Query{} = query) do
Map.update!(query, :side_load, &sanitize_side_loads/1)
end
defp sanitize_side_loads(side_loads) do
side_loads
|> List.wrap()
|> Enum.map(fn
{key, value} ->
{key, sanitize_side_loads(value)}
side_load_part ->
cond do
is_atom(side_load_part) -> {side_load_part, []}
is_list(side_load_part) -> sanitize_side_loads(side_load_part)
true -> side_load_part
end
end)
end
end
| 28.146602 | 110 | 0.605395 |
f7c8f0f448900903ec908787807a3652d9360258 | 2,362 | exs | Elixir | apps/api_web/test/api_web/plugs/rate_limiter_test.exs | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 62 | 2019-01-17T12:34:39.000Z | 2022-03-20T21:49:47.000Z | apps/api_web/test/api_web/plugs/rate_limiter_test.exs | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 375 | 2019-02-13T15:30:50.000Z | 2022-03-30T18:50:41.000Z | apps/api_web/test/api_web/plugs/rate_limiter_test.exs | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 14 | 2019-01-16T19:35:57.000Z | 2022-02-26T18:55:54.000Z | defmodule ApiWeb.Plugs.RateLimiterTest do
use ApiWeb.ConnCase, async: false
@url "/stops/"
defp simulate_max_anon_requests do
# Wait for clear if we're close to clearing the rate limit.
# This avoids spurious test failures due to the limit clearing in the middle of our requests.
interval_ms = ApiWeb.config(:rate_limiter, :clear_interval)
remaining_ms = rem(System.system_time(:millisecond), interval_ms)
clear_ms = interval_ms - remaining_ms
if clear_ms < interval_ms / 2 do
Process.sleep(clear_ms + div(interval_ms, 10))
end
for _ <- 1..ApiWeb.config(:rate_limiter, :max_anon_per_interval) do
assert get(build_conn(), @url).status == 200
end
end
test "opts" do
assert ApiWeb.Plugs.RateLimiter.init([]) == []
end
describe "requests with no api key" do
setup %{conn: conn} do
conn = assign(conn, :api_key, nil)
ApiWeb.RateLimiter.force_clear()
{:ok, conn: conn}
end
test "assigns anonymous user", %{conn: conn} do
conn = get(conn, @url)
assert %ApiWeb.User{type: :anon} = conn.assigns.api_user
end
test "rate limits anonymous requests", %{conn: conn} do
simulate_max_anon_requests()
assert get(conn, @url).status == 429
end
end
describe "requests with valid key" do
setup %{conn: conn} do
ApiWeb.RateLimiter.force_clear()
{:ok, conn: conn}
end
test "does not rate limit requests at anon rate", %{conn: conn} do
simulate_max_anon_requests()
assert get(conn, @url).status == 200
end
end
describe "requests with invalid key" do
setup %{conn: conn} do
conn =
conn
|> assign(:api_key, "invalid")
|> bypass_through(ApiWeb.Router, :api)
{:ok, conn: conn}
end
test "forbids access", %{conn: conn} do
conn = get(conn, @url)
assert json_response(conn, :forbidden)["errors"]
end
end
describe "requests" do
setup %{conn: conn} do
ApiWeb.RateLimiter.force_clear()
{:ok, conn: conn}
end
test "have rate limiting headers in response", %{conn: conn} do
conn = get(conn, @url)
refute [] == get_resp_header(conn, "x-ratelimit-limit")
refute [] == get_resp_header(conn, "x-ratelimit-remaining")
refute [] == get_resp_header(conn, "x-ratelimit-reset")
end
end
end
| 27.465116 | 97 | 0.642676 |
f7c9257efac599b341896dac4ee93e9bf6f0d5a6 | 274 | exs | Elixir | apps/db_store/priv/repo/migrations/20180611225156_create_mail_header_names.exs | shymega/dialoguex | 974bd195780aea952497913537b3386fb6875977 | [
"Apache-2.0"
] | 2 | 2018-03-20T17:28:58.000Z | 2018-05-07T14:13:21.000Z | apps/db_store/priv/repo/migrations/20180611225156_create_mail_header_names.exs | shymega/dialoguex | 974bd195780aea952497913537b3386fb6875977 | [
"Apache-2.0"
] | 1 | 2018-03-20T17:54:56.000Z | 2018-03-31T16:27:15.000Z | apps/db_store/priv/repo/migrations/20180611225156_create_mail_header_names.exs | shymega/dialoguex | 974bd195780aea952497913537b3386fb6875977 | [
"Apache-2.0"
] | null | null | null | defmodule DBStore.DB.Repo.Migrations.CreateMailHeaderNames do
use Ecto.Migration
def change do
create table(:header_names) do
add :header_name, :string, null: false
timestamps()
end
create unique_index(:header_names, [:header_name])
end
end
| 19.571429 | 61 | 0.715328 |
f7c93f7e307cb8c8a88d717e089cdb7fba997b56 | 154 | ex | Elixir | lib/elixir_with_gleam.ex | midas-framework/elixir_with_gleam | eb08c9a176b6d4478315443cf594150ec5b930bf | [
"Apache-2.0"
] | 34 | 2020-05-22T11:14:33.000Z | 2021-06-05T07:46:30.000Z | lib/elixir_with_gleam.ex | midas-framework/elixir_with_gleam | eb08c9a176b6d4478315443cf594150ec5b930bf | [
"Apache-2.0"
] | 1 | 2020-06-01T03:23:24.000Z | 2020-06-01T03:23:24.000Z | lib/elixir_with_gleam.ex | midas-framework/elixir_with_gleam | eb08c9a176b6d4478315443cf594150ec5b930bf | [
"Apache-2.0"
] | 2 | 2020-06-08T17:13:36.000Z | 2020-06-23T16:19:50.000Z | defmodule ElixirWithGleam do
@moduledoc """
Documentation for `ElixirWithGleam`.
"""
def hello do
:hello.switch(:hello.greeting())
end
end
| 15.4 | 38 | 0.688312 |
f7c96e5c5f002f39396a558a7cdc027a433b8926 | 4,503 | ex | Elixir | lib/stripe.ex | gitlunar/stripity-stripe | a45079ec3fcc2c80f686297614f78c52c41c6b80 | [
"BSD-3-Clause"
] | 1 | 2020-05-03T15:41:49.000Z | 2020-05-03T15:41:49.000Z | lib/stripe.ex | gitlunar/stripity-stripe | a45079ec3fcc2c80f686297614f78c52c41c6b80 | [
"BSD-3-Clause"
] | null | null | null | lib/stripe.ex | gitlunar/stripity-stripe | a45079ec3fcc2c80f686297614f78c52c41c6b80 | [
"BSD-3-Clause"
] | 2 | 2016-08-23T21:06:49.000Z | 2020-02-13T16:04:16.000Z | defmodule Stripe do
@moduledoc """
A HTTP client for Stripe.
This module contains the Application that you can use to perform
transactions on stripe API.
### Configuring
By default the STRIPE_SECRET_KEY environment variable is used to find
your API key for Stripe. You can also manually set your API key by
configuring the :stripity_stripe application. You can see the default
configuration in the default_config/0 private function at the bottom of
this file. The value for platform client id is optional.
config :stripity_stripe, secret_key: YOUR_STRIPE_KEY
config :stripity_stripe, platform_client_id: STRIPE_PLATFORM_CLIENT_ID
"""
# Let's build on top of HTTPoison
use HTTPoison.Base
defmodule MissingSecretKeyError do
defexception message: """
The secret_key setting is required so that we can report the
correct environment instance to Stripe. Please configure
secret_key in your config.exs and environment specific config files
to have accurate reporting of errors.
config :stripity_stripe, secret_key: YOUR_SECRET_KEY
"""
end
@doc """
Grabs STRIPE_SECRET_KEY from system ENV
Returns binary
"""
def config_or_env_key do
require_stripe_key()
end
@doc """
Grabs STRIPE_PLATFORM_CLIENT_ID from system ENV
Returns binary
"""
def config_or_env_platform_client_id do
Application.get_env(:stripity_stripe, :platform_client_id) || System.get_env "STRIPE_PLATFORM_CLIENT_ID"
end
@doc """
Creates the URL for our endpoint.
Args:
* endpoint - part of the API we're hitting
Returns string
"""
def process_url(endpoint) do
"https://api.stripe.com/v1/" <> endpoint
end
@doc """
Set our request headers for every request.
"""
def req_headers(key) do
Map.new
|> Map.put("Authorization", "Bearer #{key}")
|> Map.put("User-Agent", "Stripe/v1 stripity-stripe/1.4.0")
|> Map.put("Content-Type", "application/x-www-form-urlencoded")
end
@doc """
Converts the binary keys in our response to atoms.
Args:
* body - string binary response
Returns Record or ArgumentError
"""
def process_response_body(body) do
Poison.decode! body
end
@doc """
Boilerplate code to make requests with a given key.
Args:
* method - request method
* endpoint - string requested API endpoint
* key - stripe key passed to the api
* body - request body
* headers - request headers
* options - request options
Returns tuple
"""
def make_request_with_key( method, endpoint, key, body \\ %{}, headers \\ %{}, options \\ []) do
rb = Stripe.URI.encode_query(body)
rh = req_headers(key)
|> Map.merge(headers)
|> Map.to_list
options = Keyword.merge(httpoison_request_options(), options)
{:ok, response} = request(method, endpoint, rb, rh, options)
response.body
end
@doc """
Boilerplate code to make requests with the key read from config or env.see config_or_env_key/0
Args:
* method - request method
* endpoint - string requested API endpoint
* key - stripe key passed to the api
* body - request body
* headers - request headers
* options - request options
Returns tuple
"""
def make_request(method, endpoint, body \\ %{}, headers \\ %{}, options \\ []) do
make_request_with_key( method, endpoint, config_or_env_key(), body, headers, options )
end
@doc """
"""
def make_oauth_token_callback_request(body) do
rb = Stripe.URI.encode_query(body)
rh = req_headers(Stripe.config_or_env_key)
|> Map.to_list
options = httpoison_request_options()
HTTPoison.request(:post, "#{Stripe.Connect.base_url}oauth/token", rb, rh, options)
end
@doc """
"""
def make_oauth_deauthorize_request(stripe_user_id) do
rb = Stripe.URI.encode_query([
stripe_user_id: stripe_user_id,
client_id: Stripe.config_or_env_platform_client_id])
rh = req_headers( Stripe.config_or_env_key)
|> Map.to_list
options = httpoison_request_options()
HTTPoison.request(:post, "#{Stripe.Connect.base_url}oauth/deauthorize", rb, rh, options)
end
defp require_stripe_key do
case Application.get_env(:stripity_stripe, :secret_key, System.get_env "STRIPE_SECRET_KEY") || :not_found do
:not_found ->
raise MissingSecretKeyError
value -> value
end
end
defp httpoison_request_options() do
Application.get_env(:stripity_stripe, :httpoison_options, [])
end
end
| 30.02 | 112 | 0.698867 |
f7c972661b1e17325c5be331dc0c5090ebb1082b | 753 | exs | Elixir | test/publisher_test.exs | sevenmind/KaufmannEx | 44225125946921850316c272db53175bb1658fb7 | [
"MIT"
] | 84 | 2018-03-20T08:19:10.000Z | 2022-01-30T07:40:56.000Z | test/publisher_test.exs | sevenmind/KaufmannEx | 44225125946921850316c272db53175bb1658fb7 | [
"MIT"
] | 23 | 2018-03-29T15:15:56.000Z | 2019-12-04T14:53:57.000Z | test/publisher_test.exs | sevenmind/KaufmannEx | 44225125946921850316c272db53175bb1658fb7 | [
"MIT"
] | 8 | 2018-07-03T18:18:27.000Z | 2022-03-08T14:04:09.000Z | defmodule KaufmannEx.PublisherTest do
use ExUnit.Case
import Mock
setup do
Application.put_env(:kaufmann_ex, :transcoder,
default: KaufmannEx.Transcoder.SevenAvro,
json: KaufmannEx.Transcoder.Json
)
bypass = Bypass.open()
Application.put_env(:kaufmann_ex, :schema_registry_uri, "http://localhost:#{bypass.port}")
TestHelper.mock_get_schema(bypass, "event.test")
end
describe "publish/1" do
test "publishes an event to kafka" do
with_mock KafkaEx, [],
produce: fn _, _ -> nil end,
metadata: fn _ -> %{topic_metadatas: [%{topic: "rapids", partition_metadatas: [%{}]}]} end do
assert KaufmannEx.Publisher.publish("event.test", %{message: "hello"})
end
end
end
end
| 27.888889 | 101 | 0.667995 |
f7c974fc22778cc91cee62ce1ca0e2750d1b7f73 | 70,226 | ex | Elixir | testData/org/elixir_lang/beam/decompiler/asn1ct.ex | osbre/intellij-elixir | 36ced8c5e4f4cc873b6e21b5c011d42783b54817 | [
"Apache-2.0"
] | null | null | null | testData/org/elixir_lang/beam/decompiler/asn1ct.ex | osbre/intellij-elixir | 36ced8c5e4f4cc873b6e21b5c011d42783b54817 | [
"Apache-2.0"
] | null | null | null | testData/org/elixir_lang/beam/decompiler/asn1ct.ex | osbre/intellij-elixir | 36ced8c5e4f4cc873b6e21b5c011d42783b54817 | [
"Apache-2.0"
] | null | null | null | # Source code recreated from a .beam file by IntelliJ Elixir
defmodule :asn1ct do
# Functions
def add_generated_refed_func(data) do
case is_function_generated(data) do
true ->
:ok
_ ->
l = get_gen_state_field(:gen_refed_funcs)
update_gen_state(:gen_refed_funcs, [data | l])
end
end
def add_tobe_refed_func(data) do
{name, sI, pattern} = (fn {n, si, p, _} ->
{n, si, p}
d ->
d
end).(data)
newData = case sI do
i when is_integer(i) ->
(fn d ->
d
end).(data)
_ ->
(fn {n, _, p} ->
{n, 0, p}
{n, _, p, t} ->
{n, 0, p, t}
end).(data)
end
l = get_gen_state_field(:generated_functions)
case generated_functions_member(get(:currmod), name, l, pattern) do
true ->
:ok
_ ->
add_once_tobe_refed_func(newData)
maybe_rename_function(:tobe_refed, name, pattern)
end
end
def compile(file), do: compile(file, [])
def compile(file, options0) when is_list(options0) do
try do
translate_options(options0)
catch
{:throw, error, _} ->
error
else
options1 ->
options2 = includes(file, options1)
includes = strip_includes(options2)
in_process(fn ->
compile_proc(file, includes, options2)
end)
end
end
def compile(file, _OutFile, options) do
case compile(file, make_erl_options(options)) do
{:error, _Reason} ->
:error
:ok ->
:ok
parseRes when is_tuple(parseRes) ->
:io.format('~p~n', [parseRes])
:ok
scanRes when is_list(scanRes) ->
:io.format('~p~n', [scanRes])
:ok
end
end
def compile_asn(file, outFile, options), do: compile(:lists.concat([file, '.asn']), outFile, options)
def compile_asn1(file, outFile, options), do: compile(:lists.concat([file, '.asn1']), outFile, options)
def compile_py(file, outFile, options), do: compile(:lists.concat([file, '.py']), outFile, options)
def current_sindex(), do: get_gen_state_field(:current_suffix_index)
def error(format, args, s) do
case is_error(s) do
true ->
:io.format(format, args)
false ->
:ok
end
end
def format_error({:write_error, file, reason}), do: :io_lib.format('writing output file ~s failed: ~s', [file, :file.format_error(reason)])
def generated_refed_func(name) do
l = get_gen_state_field(:tobe_refed_funcs)
newL = :lists.keydelete(name, 1, l)
update_gen_state(:tobe_refed_funcs, newL)
l2 = get_gen_state_field(:gen_refed_funcs)
update_gen_state(:gen_refed_funcs, [name | l2])
end
def get_bit_string_format(), do: get(:bit_string_format)
def get_gen_state_field(field) do
case read_config_data(:gen_state) do
:undefined ->
:undefined
genState when is_record(genState, :gen_state) ->
get_gen_state_field(genState, field)
err ->
exit({:error, {:asn1, {'false configuration file info', err}}})
end
end
def get_name_of_def(typedef(name: name)), do: name
def get_name_of_def(classdef(name: name)), do: name
def get_name_of_def(valuedef(name: name)), do: name
def get_name_of_def(ptypedef(name: name)), do: name
def get_name_of_def(pvaluedef(name: name)), do: name
def get_name_of_def(pvaluesetdef(name: name)), do: name
def get_name_of_def(pobjectdef(name: name)), do: name
def get_name_of_def(pobjectsetdef(name: name)), do: name
def get_name_of_def(_), do: :undefined
def get_pos_of_def(typedef(pos: pos)), do: pos
def get_pos_of_def(classdef(pos: pos)), do: pos
def get_pos_of_def(valuedef(pos: pos)), do: pos
def get_pos_of_def(ptypedef(pos: pos)), do: pos
def get_pos_of_def(pvaluedef(pos: pos)), do: pos
def get_pos_of_def(pvaluesetdef(pos: pos)), do: pos
def get_pos_of_def(pobjectdef(pos: pos)), do: pos
def get_pos_of_def(pobjectsetdef(pos: pos)), do: pos
def get_pos_of_def(unquote(:"Externaltypereference")(pos: pos)), do: pos
def get_pos_of_def(unquote(:"Externalvaluereference")(pos: pos)), do: pos
def get_pos_of_def(_), do: :undefined
def get_tobe_refed_func(name) do
case get_gen_state_field(:tobe_refed_funcs) do
l when is_list(l) ->
case :lists.keysearch(name, 1, l) do
{_, element} ->
element
_ ->
:undefined
end
_ ->
:undefined
end
end
def is_function_generated(name) do
case get_gen_state_field(:gen_refed_funcs) do
l when is_list(l) ->
:lists.member(name, l)
_ ->
false
end
end
def maybe_rename_function(mode, name, pattern), do: ...
def maybe_saved_sindex(name, pattern) do
case get_gen_state_field(:generated_functions) do
[] ->
false
l ->
case generated_functions_member(get(:currmod), name, l) do
true ->
l2 = generated_functions_filter(get(:currmod), name, l)
case :lists.keysearch(pattern, 3, l2) do
{:value, {_, i, _}} ->
i
_ ->
length(l2)
end
_ ->
false
end
end
end
def module_info() do
# body not decompiled
end
def module_info(p0) do
# body not decompiled
end
def next_refed_func() do
case get_gen_state_field(:tobe_refed_funcs) do
[] ->
[]
[h | t] ->
update_gen_state(:tobe_refed_funcs, t)
h
end
end
def parse_and_save(module, s) do
options = state(s, :options)
sourceDir = state(s, :sourcedir)
includes = for {:i, i} <- options do
i
end
erule = state(s, :erule)
maps = :lists.member(:maps, options)
case get_input_file(module, [sourceDir | includes]) do
{:file, suffixedASN1source} ->
mtime = :filelib.last_modified(suffixedASN1source)
case :asn1_db.dbload(module, erule, maps, mtime) do
:ok ->
:ok
:error ->
parse_and_save1(s, suffixedASN1source, options)
end
err when notmaps ->
case :asn1_db.dbload(module) do
:ok ->
warning('could not do a consistency check of the ~p file: no asn1 source file was found.~n', [:lists.concat([module, '.asn1db'])], options)
:error ->
:ok
end
{:error, {:asn1, :input_file_error, err}}
err ->
{:error, {:asn1, :input_file_error, err}}
end
end
def partial_inc_dec_toptype([t | _]) when is_atom(t), do: t
def partial_inc_dec_toptype([{t, _} | _]) when is_atom(t), do: t
def partial_inc_dec_toptype([l | _]) when is_list(l), do: partial_inc_dec_toptype(l)
def partial_inc_dec_toptype(_), do: throw({:error, {'no top type found for partial incomplete decode'}})
def read_config_data(key) do
case :asn1ct_table.exists(:asn1_general) do
false ->
:undefined
true ->
case :asn1ct_table.lookup(:asn1_general, {:asn1_config, key}) do
[{_, data}] ->
data
err ->
err
end
end
end
def reset_gen_state(), do: save_gen_state(gen_state())
def set_current_sindex(index), do: update_gen_state(:current_suffix_index, index)
def step_in_constructed() do
case get_gen_state_field(:namelist) do
[l] when is_list(l) ->
update_gen_state(:namelist, l)
_ ->
:ok
end
end
def test(module), do: test_module(module, [])
def test(module, [] = options), do: test_module(module, options)
def test(module, [{:i, _} | _] = options), do: test_module(module, options)
def test(module, type), do: test_type(module, type, [])
def test(module, type, [] = options), do: test_type(module, type, options)
def test(module, type, [{:i, _} | _] = options), do: test_type(module, type, options)
def test(module, type, value), do: test_value(module, type, value)
def unset_pos_mod(def) when is_record(def, :typedef), do: typedef(def, pos: :undefined)
def unset_pos_mod(def) when is_record(def, :classdef), do: classdef(def, pos: :undefined)
def unset_pos_mod(def) when is_record(def, :valuedef), do: valuedef(def, pos: :undefined, module: :undefined)
def unset_pos_mod(def) when is_record(def, :ptypedef), do: ptypedef(def, pos: :undefined)
def unset_pos_mod(def) when is_record(def, :pvaluedef), do: pvaluedef(def, pos: :undefined)
def unset_pos_mod(def) when is_record(def, :pvaluesetdef), do: pvaluesetdef(def, pos: :undefined)
def unset_pos_mod(def) when is_record(def, :pobjectdef), do: pobjectdef(def, pos: :undefined)
def unset_pos_mod(def) when is_record(def, :pobjectsetdef), do: pobjectsetdef(def, pos: :undefined)
def unset_pos_mod(unquote(:"ComponentType")() = def), do: unquote(:"ComponentType")(def, pos: :undefined)
def unset_pos_mod(def), do: def
def update_gen_state(field, data) do
case get_gen_state() do
state when is_record(state, :gen_state) ->
update_gen_state(field, state, data)
_ ->
exit({:error, {:asn1, {:internal, 'tried to update nonexistent gen_state', field, data}}})
end
end
def update_namelist(name) do
case get_gen_state_field(:namelist) do
[name, rest] ->
update_gen_state(:namelist, rest)
[name | rest] ->
update_gen_state(:namelist, rest)
[{name, list}] when is_list(list) ->
update_gen_state(:namelist, list)
[{name, atom} | rest] when is_atom(atom) ->
update_gen_state(:namelist, rest)
other ->
other
end
end
def use_legacy_types(), do: get(:use_legacy_erlang_types)
def value(module, type), do: value(module, type, [])
def value(module, type, includes) do
in_process(fn ->
start(strip_includes(includes))
case check(module, includes) do
{:ok, _NewTypes} ->
get_value(module, type)
error ->
error
end
end)
end
def verbose(format, args, s) do
case is_verbose(s) do
true ->
:io.format(format, args)
false ->
:ok
end
end
def vsn(), do: '5.0.15.1'
def warning(format, args, s) do
case is_warning(s) do
true ->
:io.format('Warning: ' ++ format, args)
false ->
:ok
end
end
def warning(format, args, s, reason) do
case {is_werr(s), is_error(s), is_warning(s)} do
{true, true, _} ->
:io.format(format, args)
throw({:error, reason})
{false, _, true} ->
:io.format(format, args)
_ ->
:ok
end
end
# Private Functions
defp unquote(:"-add_once_tobe_refed_func/1-fun-0-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-add_tobe_refed_func/1-fun-0-")(p0) do
# body not decompiled
end
defp unquote(:"-add_tobe_refed_func/1-fun-1-")(p0) do
# body not decompiled
end
defp unquote(:"-add_tobe_refed_func/1-fun-2-")(p0) do
# body not decompiled
end
defp unquote(:"-clean_errors/1-fun-0-")(p0) do
# body not decompiled
end
defp unquote(:"-common_exports/1-fun-0-")(p0) do
# body not decompiled
end
defp unquote(:"-common_exports/1-fun-1-")(p0) do
# body not decompiled
end
defp unquote(:"-compile/2-fun-0-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-compile_set/3-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-create_partial_decode_gen_info/2-lc$^0/1-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-create_pdec_command/4-fun-0-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-delete_double_of_symbol1/2-fun-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-delete_double_of_symbol1/2-fun-1-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-delete_double_of_symbol1/2-fun-2-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-discover_dupl_in_mods/5-fun-0-")(p0, p1, p2, p3, p4) do
# body not decompiled
end
defp unquote(:"-exit_if_nameduplicate2/2-fun-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-export_all/1-fun-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-export_all/1-fun-1-")(p0) do
# body not decompiled
end
defp unquote(:"-fun.abs_listing/1-")(p0) do
# body not decompiled
end
defp unquote(:"-fun.check_pass/1-")(p0) do
# body not decompiled
end
defp unquote(:"-fun.compile_pass/1-")(p0) do
# body not decompiled
end
defp unquote(:"-fun.exit_if_nameduplicate/1-")(p0) do
# body not decompiled
end
defp unquote(:"-fun.generate_pass/1-")(p0) do
# body not decompiled
end
defp unquote(:"-fun.merge_pass/1-")(p0) do
# body not decompiled
end
defp unquote(:"-fun.parse_listing/1-")(p0) do
# body not decompiled
end
defp unquote(:"-fun.parse_pass/1-")(p0) do
# body not decompiled
end
defp unquote(:"-fun.run_tc/3-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-fun.save_pass/1-")(p0) do
# body not decompiled
end
defp unquote(:"-fun.scan_pass/1-")(p0) do
# body not decompiled
end
defp unquote(:"-fun.set_scan_parse_pass/1-")(p0) do
# body not decompiled
end
defp unquote(:"-generated_functions_filter/3-fun-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-generated_functions_filter/3-fun-1-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-get_rule/1-lc$^0/1-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-get_rule/1-lc$^1/1-1-")(p0, p1, p2, p3) do
# body not decompiled
end
defp unquote(:"-in_process/1-fun-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-include_append/2-fun-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-include_prepend/2-fun-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-make_erl_options/1-fun-0-")(p0) do
# body not decompiled
end
defp unquote(:"-make_erl_options/1-fun-1-")(p0) do
# body not decompiled
end
defp unquote(:"-merge_modules/2-fun-0-")(p0) do
# body not decompiled
end
defp unquote(:"-merge_modules/2-fun-1-")(p0) do
# body not decompiled
end
defp unquote(:"-merge_modules/2-fun-2-")(p0) do
# body not decompiled
end
defp unquote(:"-merge_symbols_from_module/2-fun-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-merge_symbols_from_module/2-fun-1-")(p0) do
# body not decompiled
end
defp unquote(:"-parse_and_save/2-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-pretty2/2-fun-0-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-pretty2/2-fun-1-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-pretty2/2-fun-2-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-pretty2/2-fun-3-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-pretty2/2-fun-4-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-pretty2/2-fun-5-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-print_structured_errors/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-read_config_file/2-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-remove_asn_flags/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-run_passes/2-fun-0-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-save_automatic_tagged_types/1-fun-0-")(p0) do
# body not decompiled
end
defp unquote(:"-save_imports/1-fun-0-")(p0) do
# body not decompiled
end
defp unquote(:"-save_imports/1-fun-1-")(p0) do
# body not decompiled
end
defp unquote(:"-strip_includes/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-test_module/2-fun-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-test_type/3-fun-0-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-test_value/3-fun-0-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-type_check/1-fun-0-")(p0) do
# body not decompiled
end
defp unquote(:"-value/3-fun-0-")(p0, p1, p2) do
# body not decompiled
end
def abs_listing(st(code: {m, _}, outfile: outFile)) do
pretty2(module(m, :name), outFile ++ '.abs')
:done
end
def add_generated_function(data) do
l = get_gen_state_field(:generated_functions)
update_gen_state(:generated_functions, [data | l])
end
def add_once_tobe_refed_func(data) do
tRFL = get_gen_state_field(:tobe_refed_funcs)
{name, index} = {element(1, data), element(2, data)}
case :lists.filter(fn {n, i, _} when n == name and i == index ->
true
{n, i, _, _} when n == name and i == index ->
true
_ ->
false
end, tRFL) do
[] ->
update_gen_state(:tobe_refed_funcs, [data | tRFL])
_ ->
:ok
end
end
def anonymous_dec_command(:undec, :"OPTIONAL"), do: :opt_undec
def anonymous_dec_command(command, _), do: command
def check(module, includes) do
case :asn1_db.dbload(module) do
:error ->
{:error, :asn1db_missing_or_out_of_date}
:ok ->
m = :asn1_db.dbget(module, :"MODULE")
typeOrVal = module(m, :typeorval)
state = state(mname: module(m, :name), module: module(m, typeorval: []), options: includes)
case :asn1ct_check.check(state, typeOrVal) do
{:ok, {newTypes, _, _, _, _, _}, _} ->
{:ok, newTypes}
{:error, reason} ->
{:error, reason}
end
end
end
def check_maps_option(gen(pack: :map)) do
case get_bit_string_format() do
:bitstring ->
:ok
_ ->
message1 = 'The \'maps\' option must not be combined with \'compact_bit_string\' or \'legacy_bit_string\''
exit({:error, {:asn1, message1}})
end
case use_legacy_types() do
false ->
:ok
true ->
message2 = 'The \'maps\' option must not be combined with \'legacy_erlang_types\''
exit({:error, {:asn1, message2}})
end
end
def check_maps_option(gen()), do: :ok
def check_pass(st(code: m, file: file, includes: includes, erule: erule, dbfile: dbFile, opts: opts, inputmodules: inputModules) = st) do
start(includes)
case :asn1ct_check.storeindb(state(erule: erule, options: opts), m) do
:ok ->
module = :asn1_db.dbget(module(m, :name), :"MODULE")
state = state(mname: module(module, :name), module: module(module, typeorval: []), erule: erule, inputmodules: inputModules, options: opts, sourcedir: :filename.dirname(file))
case :asn1ct_check.check(state, module(module, :typeorval)) do
{:error, reason} ->
{:error, st(st, error: reason)}
{:ok, newTypeOrVal, genTypeOrVal} ->
newM = module(module, typeorval: newTypeOrVal)
:asn1_db.dbput(module(newM, :name), :"MODULE", newM)
:asn1_db.dbsave(dbFile, module(m, :name))
verbose('--~p--~n', [{:generated, dbFile}], opts)
{:ok, st(st, code: {m, genTypeOrVal})}
end
{:error, reason} ->
{:error, st(st, error: reason)}
end
end
def check_tagdefault(modList) do
case have_same_tagdefault(modList) do
{true, tagDefault} ->
tagDefault
{false, tagDefault} ->
:asn1ct_table.new(:automatic_tags)
save_automatic_tagged_types(modList)
tagDefault
end
end
def clean_errors(errors) when is_list(errors) do
f = fn {:structured_error, _, _, _} ->
true
_ ->
false
end
{structured0, adHoc} = :lists.partition(f, errors)
structured = :lists.sort(structured0)
{structured, structured ++ adHoc}
end
def clean_errors(adHoc), do: {[], adHoc}
def cleanup_bit_string_format(), do: erase(:bit_string_format)
def common_exports(moduleList) do
case :lists.filter(fn x ->
element(2, module(x, :exports)) != :all
end, moduleList) do
[] ->
{:exports, :all}
modsWithExpList ->
cExports1 = :lists.append(:lists.map(fn x ->
element(2, module(x, :exports))
end, modsWithExpList))
cExports2 = export_all(:lists.subtract(moduleList, modsWithExpList))
{:exports, cExports1 ++ cExports2}
end
end
def common_imports(iList, inputMNameL) do
setExternalImportsList = remove_in_set_imports(iList, inputMNameL, [])
{:imports, remove_import_doubles(setExternalImportsList)}
end
def common_passes(), do: [{:iff, :parse, {:pass, :parse_listing, &parse_listing/1}}, {:pass, :check, &check_pass/1}, {:iff, :abs, {:pass, :abs_listing, &abs_listing/1}}, {:pass, :generate, &generate_pass/1}, {:unless, :noobj, {:pass, :compile, &compile_pass/1}}]
def compare_defs(d1, d2), do: compare_defs2(unset_pos_mod(d1), unset_pos_mod(d2))
def compare_defs2(d, ^d), do: :equal
def compare_defs2(_, _), do: :not_equal
def compile1(file, st(opts: opts) = st0) do
compiler_verbose(file, opts)
passes = single_passes()
base = :filename.rootname(:filename.basename(file))
outFile = outfile(base, "", opts)
dbFile = outfile(base, 'asn1db', opts)
st1 = st(st0, file: file, outfile: outFile, dbfile: dbFile)
run_passes(passes, st1)
end
def compile_pass(st(outfile: outFile, opts: opts0) = st) do
:asn1_db.dbstop()
:asn1ct_table.delete([:renamed_defs, :original_imports, :automatic_tags])
opts = remove_asn_flags(opts0)
case :c.c(outFile, opts) do
{:ok, _Module} ->
{:ok, st}
_ ->
{:error, st}
end
end
def compile_proc(file, includes, options) do
erule = get_rule(options)
st = st(opts: options, includes: includes, erule: erule)
case input_file_type(file, includes) do
{:single_file, suffixedFile} ->
compile1(suffixedFile, st)
{:multiple_files_file, setBase, fileName} ->
case get_file_list(fileName, includes) do
fileList when is_list(fileList) ->
compile_set(setBase, fileList, st)
err ->
err
end
err = {:input_file_error, _Reason} ->
{:error, err}
end
end
def compile_set(setBase, files, st(opts: opts) = st0) do
compiler_verbose(files, opts)
outFile = outfile(setBase, "", opts)
dbFile = outfile(setBase, 'asn1db', opts)
inputModules = for f0 <- files do
(f1 = :filename.basename(f0); f = :filename.rootname(f1); list_to_atom(f))
end
st = st(st0, file: setBase, files: files, outfile: outFile, dbfile: dbFile, inputmodules: inputModules)
passes = set_passes()
run_passes(passes, st)
end
def compiler_verbose(what, opts) do
verbose('Erlang ASN.1 compiler ~s\n', ['5.0.15.1'], opts)
verbose('Compiling: ~p\n', [what], opts)
verbose('Options: ~p\n', [opts], opts)
end
def concat_sequential(l = [a, b], acc) when is_atom(a) and is_binary(b), do: [l | acc]
def concat_sequential(l, acc) when is_list(l), do: concat_sequential1(:lists.reverse(l), acc)
def concat_sequential(a, acc), do: [a | acc]
def concat_sequential1([], acc), do: acc
def concat_sequential1([[]], acc), do: acc
def concat_sequential1([el | restEl], acc) when is_list(el), do: concat_sequential1(restEl, [el | acc])
def concat_sequential1([:mandatory | restEl], acc), do: concat_sequential1(restEl, [:mandatory | acc])
def concat_sequential1(l, acc), do: [l | acc]
def concat_tags(ts, acc) do
case many_tags(ts) do
true when is_list(ts) ->
:lists.reverse(ts) ++ acc
true ->
[ts | acc]
false ->
[ts | acc]
end
end
def create_partial_decode_gen_info(modName, {^modName, typeLists}) do
for tL <- typeLists do
create_partial_decode_gen_info1(modName, tL)
end
end
def create_partial_decode_gen_info(_, []), do: []
def create_partial_decode_gen_info(_M1, {m2, _}), do: throw({:error, {'wrong module name in asn1 config file', m2}})
def create_partial_decode_gen_info1(modName, {funcName, typeList}) do
case typeList do
[topType | rest] ->
case :asn1_db.dbget(modName, topType) do
typedef(typespec: tS) ->
tagCommand = get_tag_command(tS, :choosen)
ret = create_pdec_command(modName, get_components(type(tS, :def)), rest, concat_tags(tagCommand, []))
{funcName, ret}
_ ->
throw({:error, {'wrong type list in asn1 config file', typeList}})
end
_ ->
[]
end
end
def create_partial_decode_gen_info1(_, _), do: :ok
def create_partial_inc_decode_gen_info(modName, {mod, [{name, l} | ls]}) when is_list(l) do
topTypeName = partial_inc_dec_toptype(l)
[{name, topTypeName, create_partial_inc_decode_gen_info1(modName, topTypeName, {mod, l})} | create_partial_inc_decode_gen_info(modName, {mod, ls})]
end
def create_partial_inc_decode_gen_info(_, {_, []}), do: []
def create_partial_inc_decode_gen_info(_, []), do: []
def create_partial_inc_decode_gen_info1(modName, topTypeName, {^modName, [_TopType | rest]}) do
case :asn1_db.dbget(modName, topTypeName) do
typedef(typespec: tS) ->
tagCommand = get_tag_command(tS, :mandatory, :mandatory)
create_pdec_inc_command(modName, get_components(type(tS, :def)), rest, [tagCommand])
_ ->
throw({:error, {'wrong type list in asn1 config file', topTypeName}})
end
end
def create_partial_inc_decode_gen_info1(m1, _, {m2, _}) when m1 != m2, do: throw({:error, {'wrong module name in asn1 config file', m2}})
def create_partial_inc_decode_gen_info1(_, _, tNL), do: throw({:error, {'wrong type list in asn1 config file', tNL}})
def create_pdec_command(_ModName, _, [], acc) do
remove_empty_lists = fn [[] | l], res, fun ->
fun.(l, res, fun)
[], res, _ ->
res
[h | l], res, fun ->
fun.(l, [h | res], fun)
end
remove_empty_lists.(acc, [], remove_empty_lists)
end
def create_pdec_command(modName, [unquote(:"ComponentType")(name: c1, typespec: tS) | _Comps], [^c1 | cs], acc) do
tagCommand = get_tag_command(tS, :choosen)
create_pdec_command(modName, get_components(type(tS, :def)), cs, concat_tags(tagCommand, acc))
end
def create_pdec_command(modName, [unquote(:"ComponentType")(typespec: tS, prop: prop) | comps], [c2 | cs], acc) do
tagCommand = case prop do
:mandatory ->
get_tag_command(tS, :skip)
_ ->
get_tag_command(tS, :skip_optional)
end
create_pdec_command(modName, comps, [c2 | cs], concat_tags(tagCommand, acc))
end
def create_pdec_command(modName, {:"CHOICE", [comp = unquote(:"ComponentType")(name: c1) | _]}, tNL = [c1 | _Cs], acc), do: create_pdec_command(modName, [comp], tNL, acc)
def create_pdec_command(modName, {:"CHOICE", [unquote(:"ComponentType")() | comps]}, tNL, acc), do: create_pdec_command(modName, {:"CHOICE", comps}, tNL, acc)
def create_pdec_command(modName, {:"CHOICE", {cs1, cs2}}, tNL, acc) when is_list(cs1) and is_list(cs2), do: create_pdec_command(modName, {:"CHOICE", cs1 ++ cs2}, tNL, acc)
def create_pdec_command(modName, unquote(:"Externaltypereference")(module: m, type: c1), typeNameList, acc) do
type(def: def) = get_referenced_type(m, c1)
create_pdec_command(modName, get_components(def), typeNameList, acc)
end
def create_pdec_command(modName, tS = type(def: def), [c1 | cs], acc) do
case c1 do
[1] ->
tagCommand = get_tag_command(tS, :choosen)
create_pdec_command(modName, def, cs, concat_tags(tagCommand, acc))
[n] when is_integer(n) ->
tagCommand = get_tag_command(tS, :skip)
create_pdec_command(modName, def, [[n - 1] | cs], concat_tags(tagCommand, acc))
err ->
throw({:error, {'unexpected error when creating partial decode command', err}})
end
end
def create_pdec_command(_, _, tNL, _), do: throw({:error, {'unexpected error when creating partial decode command', tNL}})
def create_pdec_inc_command(_ModName, _, [], acc), do: :lists.reverse(acc)
def create_pdec_inc_command(modName, {comps1, comps2}, tNL, acc) when is_list(comps1) and is_list(comps2), do: create_pdec_inc_command(modName, comps1 ++ comps2, tNL, acc)
def create_pdec_inc_command(modN, clist, [cL | _Rest], [[]]) when is_list(cL), do: create_pdec_inc_command(modN, clist, cL, [])
def create_pdec_inc_command(modN, clist, [cL | _Rest], acc) when is_list(cL) do
innerDirectives = create_pdec_inc_command(modN, clist, cL, [])
:lists.reverse([innerDirectives | acc])
end
def create_pdec_inc_command(modName, cList = [unquote(:"ComponentType")(name: name, typespec: tS, prop: prop) | comps], tNL = [c1 | cs], acc), do: ...
def create_pdec_inc_command(modName, {:"CHOICE", [unquote(:"ComponentType")(name: c1, typespec: tS, prop: prop) | comps]}, [{^c1, directive} | rest], acc), do: ...
def create_pdec_inc_command(modName, {:"CHOICE", [unquote(:"ComponentType")(typespec: tS, prop: prop) | comps]}, tNL, acc) do
tagCommand = get_tag_command(tS, :alt, prop)
create_pdec_inc_command(modName, {:"CHOICE", comps}, tNL, concat_sequential(tagCommand, acc))
end
def create_pdec_inc_command(m, {:"CHOICE", {cs1, cs2}}, tNL, acc) when is_list(cs1) and is_list(cs2), do: create_pdec_inc_command(m, {:"CHOICE", cs1 ++ cs2}, tNL, acc)
def create_pdec_inc_command(modName, unquote(:"Externaltypereference")(module: m, type: name), tNL, acc) do
type(def: def) = get_referenced_type(m, name)
create_pdec_inc_command(modName, get_components(def), tNL, acc)
end
def create_pdec_inc_command(_, _, tNL, _), do: throw({:error, {'unexpected error when creating partial decode command', tNL}})
def delete_double_of_symbol([i | is], acc) do
symL = unquote(:"SymbolsFromModule")(i, :symbols)
newSymL = delete_double_of_symbol1(symL, [])
delete_double_of_symbol(is, [unquote(:"SymbolsFromModule")(i, symbols: newSymL) | acc])
end
def delete_double_of_symbol([], acc), do: acc
def delete_double_of_symbol1([tRef = unquote(:"Externaltypereference")(type: trefName) | rest], acc) do
newRest = :lists.filter(fn s ->
case s do
unquote(:"Externaltypereference")(type: trefName) ->
false
_ ->
true
end
end, rest)
delete_double_of_symbol1(newRest, [tRef | acc])
end
def delete_double_of_symbol1([vRef = unquote(:"Externalvaluereference")(value: vName) | rest], acc) do
newRest = :lists.filter(fn s ->
case s do
unquote(:"Externalvaluereference")(value: vName) ->
false
_ ->
true
end
end, rest)
delete_double_of_symbol1(newRest, [vRef | acc])
end
def delete_double_of_symbol1([tRef = {unquote(:"Externaltypereference")(type: mRef), unquote(:"Externaltypereference")(type: tRef)} | rest], acc) do
newRest = :lists.filter(fn s ->
case s do
{unquote(:"Externaltypereference")(type: mRef), unquote(:"Externaltypereference")(type: tRef)} ->
false
_ ->
true
end
end, rest)
delete_double_of_symbol1(newRest, [tRef | acc])
end
def delete_double_of_symbol1([], acc), do: acc
def discover_dupl_in_mods(name, def, [m = module(name: n, typeorval: torV) | ms], acc, anyRenamed) do
fun = fn t, renamedOrDupl ->
case {get_name_of_def(t), compare_defs(def, t)} do
{name, :not_equal} ->
newT = set_name_of_def(n, name, t)
warn_renamed_def(n, get_name_of_def(newT), name)
:asn1ct_table.insert(:renamed_defs, {get_name_of_def(newT), name, n})
{newT, 1 ||| renamedOrDupl}
{name, :equal} ->
warn_deleted_def(n, name)
{[], 2 ||| renamedOrDupl}
_ ->
{t, renamedOrDupl}
end
end
{newTorV, newAnyRenamed} = :lists.mapfoldl(fun, anyRenamed, torV)
discover_dupl_in_mods(name, def, ms, [module(m, typeorval: :lists.flatten(newTorV)) | acc], newAnyRenamed)
end
def discover_dupl_in_mods(_, _, [], acc, anyRenamed), do: {acc, anyRenamed}
def ensure_ext(moduleName, ext) do
name = :filename.join([moduleName])
case :filename.extension(name) do
ext ->
name
_ ->
name ++ ext
end
end
def exit_if_nameduplicate(module(typeorval: torV)), do: exit_if_nameduplicate(torV)
def exit_if_nameduplicate([]), do: :ok
def exit_if_nameduplicate([def | rest]) do
name = get_name_of_def(def)
exit_if_nameduplicate2(name, rest)
exit_if_nameduplicate(rest)
end
def exit_if_nameduplicate2(name, rest) do
pred = fn def ->
case get_name_of_def(def) do
name ->
true
_ ->
false
end
end
case :lists.any(pred, rest) do
true ->
throw({:error, {'more than one definition with same name', name}})
_ ->
:ok
end
end
def export_all([]), do: []
def export_all(moduleList), do: ...
def finished_warn_prints(), do: put(:warn_duplicate_defs, :undefined)
def generate({m, codeTuple}, outFile, encodingRule, options) do
{types, values, ptypes, classes, objects, objectSets} = codeTuple
code = abst(name: module(m, :name), types: types, values: values, ptypes: ptypes, classes: classes, objects: objects, objsets: objectSets)
setup_bit_string_format(options)
setup_legacy_erlang_types(options)
:asn1ct_table.new(:check_functions)
gen = init_gen_record(encodingRule, options)
check_maps_option(gen)
try do
specialized_decode_prepare(gen, m)
catch
{:throw, {:error, reason}, _} ->
warning('Error in configuration file: ~n~p~n', [reason], options, 'Error in configuration file')
end
:asn1ct_gen.pgen(outFile, gen, code)
cleanup_bit_string_format()
erase(:tlv_format)
erase(:class_default_type)
:asn1ct_table.delete(:check_functions)
:ok
end
def generate_pass(st(code: code, outfile: outFile, erule: erule, opts: opts) = st0) do
st = st(st0, code: :undefined)
generate(code, outFile, erule, opts)
{:ok, st}
end
def generated_functions_filter(_, name, l) when is_atom(name) or is_list(name) do
:lists.filter(fn {n, _, _} when n == name ->
true
_ ->
false
end, l)
end
def generated_functions_filter(m, unquote(:"Externaltypereference")(module: ^m, type: name), l) do
removeTType = fn {n, i, [n, p]} when n == name ->
{n, i, p}
{unquote(:"Externaltypereference")(module: m1, type: n), i, p} when m1 == m ->
{n, i, p}
p ->
p
end
l2 = :lists.map(removeTType, l)
generated_functions_filter(m, name, l2)
end
def generated_functions_member(_M, name, [{^name, _, _} | _]), do: true
def generated_functions_member(m, unquote(:"Externaltypereference")(module: ^m, type: t), [{unquote(:"Externaltypereference")(module: ^m, type: ^t), _, _} | _]), do: true
def generated_functions_member(m, unquote(:"Externaltypereference")(module: ^m, type: name), [{^name, _, _} | _]), do: true
def generated_functions_member(m, name, [_ | t]), do: generated_functions_member(m, name, t)
def generated_functions_member(_, _, []), do: false
def generated_functions_member(m, name, l, pattern) do
case generated_functions_member(m, name, l) do
true ->
l2 = generated_functions_filter(m, name, l)
case :lists.keysearch(pattern, 3, l2) do
{:value, _} ->
true
_ ->
false
end
_ ->
false
end
end
def get_components(unquote(:"SEQUENCE")(components: {c1, c2})) when is_list(c1) and is_list(c2), do: c1 ++ c2
def get_components(unquote(:"SEQUENCE")(components: components)), do: components
def get_components(unquote(:"SET")(components: {c1, c2})) when is_list(c1) and is_list(c2), do: c1 ++ c2
def get_components(unquote(:"SET")(components: components)), do: components
def get_components({:"SEQUENCE OF", components}), do: components
def get_components({:"SET OF", components}), do: components
def get_components(def), do: def
def get_config_info(cfgList, infoType) do
case :lists.keysearch(infoType, 1, cfgList) do
{:value, {infoType, value}} ->
value
false ->
[]
end
end
def get_file_list(file, includes) do
case :file.open(file, [:read]) do
{:error, reason} ->
{:error, {file, :file.format_error(reason)}}
{:ok, stream} ->
get_file_list1(stream, :filename.dirname(file), includes, [])
end
end
def get_file_list1(stream, dir, includes, acc) do
ret = :io.get_line(stream, :"")
case ret do
:eof ->
:ok = :file.close(stream)
:lists.reverse(acc)
fileName ->
suffixedNameList = try do
input_file_type(:filename.join([dir, :lists.delete(?\n, fileName)]), includes)
catch
error -> error
end
|> case do
{:empty_name, []} ->
[]
{:single_file, name} ->
[name]
{:multiple_files_file, _, name} ->
get_file_list(name, includes)
_Err ->
[]
end
get_file_list1(stream, dir, includes, suffixedNameList ++ acc)
end
end
def get_gen_state(), do: read_config_data(:gen_state)
def get_gen_state_field(gen_state(active: active), :active), do: active
def get_gen_state_field(_, :active), do: false
def get_gen_state_field(gS, :prefix), do: gen_state(gS, :prefix)
def get_gen_state_field(gS, :inc_tag_pattern), do: gen_state(gS, :inc_tag_pattern)
def get_gen_state_field(gS, :tag_pattern), do: gen_state(gS, :tag_pattern)
def get_gen_state_field(gS, :inc_type_pattern), do: gen_state(gS, :inc_type_pattern)
def get_gen_state_field(gS, :type_pattern), do: gen_state(gS, :type_pattern)
def get_gen_state_field(gS, :func_name), do: gen_state(gS, :func_name)
def get_gen_state_field(gS, :namelist), do: gen_state(gS, :namelist)
def get_gen_state_field(gS, :tobe_refed_funcs), do: gen_state(gS, :tobe_refed_funcs)
def get_gen_state_field(gS, :gen_refed_funcs), do: gen_state(gS, :gen_refed_funcs)
def get_gen_state_field(gS, :generated_functions), do: gen_state(gS, :generated_functions)
def get_gen_state_field(gS, :suffix_index), do: gen_state(gS, :suffix_index)
def get_gen_state_field(gS, :current_suffix_index), do: gen_state(gS, :current_suffix_index)
def get_input_file(module, []), do: module
def get_input_file(module, [i | includes]) do
try do
input_file_type(:filename.join([i, module]))
catch
error -> error
end
|> case do
{:single_file, fileName} ->
{:file, fileName}
_ ->
get_input_file(module, includes)
end
end
def get_referenced_type(m, name) do
case :asn1_db.dbget(m, name) do
typedef(typespec: tS) ->
case tS do
type(def: unquote(:"Externaltypereference")(module: m2, type: name2)) ->
get_referenced_type(m2, name2)
type() ->
tS
_ ->
throw({:error, {'unexpected element when fetching referenced type', tS}})
end
t ->
throw({:error, {'unexpected element when fetching referenced type', t}})
end
end
def get_rule(options) do
for rule <- [:ber, :per, :uper, :jer], opt <- options, rule === opt do
rule
end
|> case do
[rule] ->
rule
[rule | _] ->
rule
[] ->
:ber
end
end
def get_tag_command(type(tag: []), _), do: []
def get_tag_command(type(), :skip), do: :skip
def get_tag_command(type(tag: tags), :skip_optional) do
tag = hd(tags)
[:skip_optional, encode_tag_val(decode_class(tag(tag, :class)), tag(tag, :form), tag(tag, :number))]
end
def get_tag_command(type(tag: [tag]), command), do: [command, encode_tag_val(decode_class(tag(tag, :class)), tag(tag, :form), tag(tag, :number))]
def get_tag_command(t = type(tag: [tag | tags]), command) do
tC = get_tag_command(type(t, tag: [tag]), command)
tCs = get_tag_command(type(t, tag: tags), command)
case many_tags(tCs) do
true when is_list(tCs) ->
[tC | tCs]
_ ->
[tC, tCs]
end
end
def get_tag_command(type(tag: []), _, _), do: []
def get_tag_command(type(tag: [tag]), :mandatory, prop) do
case prop do
:mandatory ->
:mandatory
{:"DEFAULT", _} ->
[:default, encode_tag_val(decode_class(tag(tag, :class)), tag(tag, :form), tag(tag, :number))]
_ ->
[:opt, encode_tag_val(decode_class(tag(tag, :class)), tag(tag, :form), tag(tag, :number))]
end
end
def get_tag_command(type(tag: [tag]), command, prop), do: [anonymous_dec_command(command, prop), encode_tag_val(decode_class(tag(tag, :class)), tag(tag, :form), tag(tag, :number))]
def get_tag_command(type(tag: tag), command, prop) when is_record(tag, :tag), do: get_tag_command(type(tag: [tag]), command, prop)
def get_tag_command(t = type(tag: [tag | tags]), command, prop), do: [get_tag_command(type(t, tag: [tag]), command, prop), get_tag_command(type(t, tag: tags), command, prop)]
def get_value(module, type) do
case :asn1ct_value.from_type(module, type) do
{:error, reason} ->
{:error, reason}
result ->
{:ok, result}
end
end
def have_same_tagdefault([module(tagdefault: t) | ms]), do: have_same_tagdefault(ms, {true, t})
def have_same_tagdefault([], tagDefault), do: tagDefault
def have_same_tagdefault([module(tagdefault: t) | ms], tDefault = {_, ^t}), do: have_same_tagdefault(ms, tDefault)
def have_same_tagdefault([module(tagdefault: t1) | ms], {_, t2}), do: have_same_tagdefault(ms, {false, rank_tagdef([t1, t2])})
def in_process(fun) do
parent = self()
pid = spawn_link(fn ->
process(parent, fun)
end)
receive do
{^pid, result} ->
result
{^pid, class, reason, stack} ->
sT = try do
throw(:x)
catch
{:throw, :x, stk} ->
stk
end
:erlang.raise(class, reason, stack ++ sT)
end
end
def include_append(dir, options) do
option_add({:i, dir}, options, fn opts ->
opts ++ [{:i, dir}]
end)
end
def include_prepend(dir, options) do
option_add({:i, dir}, options, fn opts ->
[{:i, dir} | opts]
end)
end
def includes(file, options) do
options2 = include_append('.', options)
options3 = include_append(:filename.dirname(file), options2)
case :proplists.get_value(:outdir, options) do
:undefined ->
options3
outDir ->
include_prepend(outDir, options3)
end
end
def init_gen_record(encodingRule, options) do
erule = case encodingRule do
:uper ->
:per
_ ->
encodingRule
end
der = :proplists.get_bool(:der, options)
jer = :proplists.get_bool(:jer, options) and encodingRule !== :jer
aligned = encodingRule === :per
recPrefix = :proplists.get_value(:record_name_prefix, options, "")
macroPrefix = :proplists.get_value(:macro_name_prefix, options, "")
pack = case :proplists.get_value(:maps, options, false) do
true ->
:map
false ->
:record
end
gen(erule: erule, der: der, jer: jer, aligned: aligned, rec_prefix: recPrefix, macro_prefix: macroPrefix, pack: pack, options: options)
end
def input_file_type([]), do: {:empty_name, []}
def input_file_type(file), do: ...
def input_file_type(name, i) do
case input_file_type(name) do
{:error, _} ->
input_file_type2(:filename.basename(name), i)
err = {:input_file_error, _} ->
err
res ->
res
end
end
def input_file_type2(name, [i | is]) do
case input_file_type(:filename.join([i, name])) do
{:error, _} ->
input_file_type2(name, is)
err = {:input_file_error, _} ->
err
res ->
res
end
end
def input_file_type2(name, []), do: input_file_type(name)
def is_asn1_flag(:asn1config), do: true
def is_asn1_flag(:ber), do: true
def is_asn1_flag(:compact_bit_string), do: true
def is_asn1_flag(:debug), do: true
def is_asn1_flag(:der), do: true
def is_asn1_flag(:legacy_bit_string), do: true
def is_asn1_flag({:macro_name_prefix, _}), do: true
def is_asn1_flag({:n2n, _}), do: true
def is_asn1_flag(:noobj), do: true
def is_asn1_flag(:no_ok_wrapper), do: true
def is_asn1_flag(:optimize), do: true
def is_asn1_flag(:per), do: true
def is_asn1_flag({:record_name_prefix, _}), do: true
def is_asn1_flag(:undec_rec), do: true
def is_asn1_flag(:uper), do: true
def is_asn1_flag(:verbose), do: true
def is_asn1_flag(_), do: false
def is_error(state(options: opts)), do: is_error(opts)
def is_error(gen(options: opts)), do: is_error(opts)
def is_error(o), do: :lists.member(:errors, o) or is_verbose(o)
def is_verbose(state(options: opts)), do: is_verbose(opts)
def is_verbose(gen(options: opts)), do: is_verbose(opts)
def is_verbose(o), do: :lists.member(:verbose, o)
def is_warning(s) when is_record(s, :state), do: is_warning(state(s, :options))
def is_warning(o), do: :lists.member(:warnings, o) or is_verbose(o)
def is_werr(s) when is_record(s, :state), do: is_werr(state(s, :options))
def is_werr(o), do: :lists.member(:warnings_as_errors, o)
def legacy_forced_info(opt), do: :io.format('Info: The option \'legacy_erlang_types\' is implied by the \'~s\' option.\n', [opt])
def make_erl_options(opts) do
includes = options(opts, :includes)
defines = options(opts, :defines)
outdir = options(opts, :outdir)
warning = options(opts, :warning)
verbose = options(opts, :verbose)
specific = options(opts, :specific)
optimize = options(opts, :optimize)
outputType = options(opts, :output_type)
cwd = options(opts, :cwd)
options = (case verbose do
true ->
[:verbose]
false ->
[]
end) ++ (case warning do
0 ->
[]
_ ->
[:warnings]
end) ++ [] ++ (case optimize do
1 ->
[:optimize]
999 ->
[]
_ ->
[{:optimize, optimize}]
end) ++ :lists.map(fn {name, value} ->
{:d, name, value}
name ->
{:d, name}
end, defines) ++ (case outputType do
:undefined ->
[:ber]
_ ->
[outputType]
end)
options ++ [:errors, {:cwd, cwd}, {:outdir, outdir} | :lists.map(fn dir ->
{:i, dir}
end, includes)] ++ specific
end
def make_suffix({_, {_, 0, _}}), do: ""
def make_suffix({_, {_, i, _}}), do: :lists.concat(['_', i])
def make_suffix(_), do: ""
def many_tags([:skip]), do: false
def many_tags([:skip_optional, _]), do: false
def many_tags([:choosen, _]), do: false
def many_tags(_), do: true
def maybe_first_warn_print() do
case get(:warn_duplicate_defs) do
:undefined ->
put(:warn_duplicate_defs, true)
:io.format('~nDue to multiple occurrences of a definition name in multi-file compiled files:~n')
_ ->
:ok
end
end
def maybe_rename_function2(:record, unquote(:"Externaltypereference")(type: name), suffix), do: :lists.concat([name, suffix])
def maybe_rename_function2(:list, list, suffix), do: :lists.concat([:asn1ct_gen.list2name(list), suffix])
def maybe_rename_function2(thing, name, suffix) when thing == :atom or thing == :integer or thing == :string, do: :lists.concat([name, suffix])
def merge_modules(moduleList, commonName) do
newModuleList = remove_name_collisions(moduleList)
case :asn1ct_table.size(:renamed_defs) do
0 ->
:asn1ct_table.delete(:renamed_defs)
_ ->
:ok
end
save_imports(newModuleList)
typeOrVal = :lists.append(:lists.map(fn x ->
module(x, :typeorval)
end, newModuleList))
inputMNameList = :lists.map(fn x ->
module(x, :name)
end, newModuleList)
cExports = common_exports(newModuleList)
importsModuleNameList = :lists.map(fn x ->
{module(x, :imports), module(x, :name)}
end, newModuleList)
cImports = common_imports(importsModuleNameList, inputMNameList)
tagDefault = check_tagdefault(newModuleList)
module(name: commonName, tagdefault: tagDefault, exports: cExports, imports: cImports, typeorval: typeOrVal)
end
def merge_pass(st(file: base, code: code) = st) do
m = merge_modules(code, base)
{:ok, st(st, code: m)}
end
def merge_symbols_from_module([imp | imps], acc) do
unquote(:"Externaltypereference")(type: modName) = unquote(:"SymbolsFromModule")(imp, :module)
ifromModName = :lists.filter(fn i ->
case unquote(:"SymbolsFromModule")(i, :module) do
unquote(:"Externaltypereference")(type: modName) ->
true
unquote(:"Externalvaluereference")(value: modName) ->
true
_ ->
false
end
end, imps)
newImps = :lists.subtract(imps, ifromModName)
newImp = unquote(:"SymbolsFromModule")(^imp, symbols: :lists.append(:lists.map(fn sL ->
unquote(:"SymbolsFromModule")(sL, :symbols)
end, [imp | ifromModName])))
merge_symbols_from_module(newImps, [newImp | acc])
end
def merge_symbols_from_module([], acc), do: :lists.reverse(acc)
def option_add(option, options, fun) do
case :lists.member(option, options) do
true ->
options
false ->
fun.(options)
end
end
def outfile(base, ext, opts) do
obase = case :lists.keysearch(:outdir, 1, opts) do
{:value, {:outdir, odir}} ->
:filename.join(odir, base)
_NotFound ->
base
end
case ext do
[] ->
obase
_ ->
:lists.concat([obase, '.', ext])
end
end
def parse_and_save1(state(erule: erule), file, options) do
ext = :filename.extension(file)
base = :filename.basename(file, ext)
dbFile = outfile(base, 'asn1db', options)
st = st(file: file, dbfile: dbFile, erule: erule)
passes = parse_and_save_passes()
run_passes(passes, st)
end
def parse_and_save_passes(), do: [{:pass, :scan, &scan_pass/1}, {:pass, :parse, &parse_pass/1}, {:pass, :save, &save_pass/1}]
def parse_listing(st(code: code, outfile: outFile0) = st) do
outFile = outFile0 ++ '.parse'
case :file.write_file(outFile, :io_lib.format('~p\n', [code])) do
:ok ->
:done
{:error, reason} ->
error = {:write_error, outFile, reason}
{:error, st(st, error: [{:structured_error, {outFile0, :none}, :asn1ct, error}])}
end
end
def parse_pass(st(file: file, code: tokens) = st) do
case :asn1ct_parser2.parse(file, tokens) do
{:ok, m} ->
{:ok, st(st, code: m)}
{:error, errors} ->
{:error, st(st, error: errors)}
end
end
def prepare_bytes(bytes) when is_binary(bytes), do: bytes
def prepare_bytes(bytes), do: list_to_binary(bytes)
def pretty2(module, absFile), do: ...
def print_structured_errors([_ | _] = errors) do
_ = for {:structured_error, {f, l}, m, e} <- errors do
:io.format('~ts:~w: ~ts\n', [f, l, m.format_error(e)])
end
:ok
end
def print_structured_errors(_), do: :ok
def process(parent, fun) do
try do
send(parent, {self(), fun.()})
catch
{class, reason, stack} ->
send(parent, {self(), class, reason, stack})
end
end
def rank_tagdef(l) do
case :lists.member(:"EXPLICIT", l) do
true ->
:"EXPLICIT"
_ ->
:"IMPLICIT"
end
end
def read_config_file(gen(options: options), moduleName) do
name = ensure_ext(moduleName, '.asn1config')
includes = for {:i, i} <- options do
i
end
read_config_file0(name, ['.' | includes])
end
def read_config_file0(name, [d | dirs]) do
case :file.consult(:filename.join(d, name)) do
{:ok, cfgList} ->
cfgList
{:error, :enoent} ->
read_config_file0(name, dirs)
{:error, reason} ->
error = 'error reading asn1 config file: ' ++ :file.format_error(reason)
throw({:error, error})
end
end
def read_config_file0(_, []), do: :no_config_file
def read_config_file_info(moduleName, infoType) when is_atom(infoType) do
name = ensure_ext(moduleName, '.asn1config')
cfgList = read_config_file0(name, [])
get_config_info(cfgList, infoType)
end
def remove_asn_flags(options) do
for x <- options, notis_asn1_flag(x) do
x
end
end
def remove_import_doubles([]), do: []
def remove_import_doubles(importList) do
mergedImportList = merge_symbols_from_module(importList, [])
delete_double_of_symbol(mergedImportList, [])
end
def remove_in_set_imports([{{:imports, impL}, _ModName} | rest], inputMNameL, acc) do
newImpL = remove_in_set_imports1(impL, inputMNameL, [])
remove_in_set_imports(rest, inputMNameL, newImpL ++ acc)
end
def remove_in_set_imports([], _, acc), do: :lists.reverse(acc)
def remove_in_set_imports1([i | is], inputMNameL, acc) do
case unquote(:"SymbolsFromModule")(i, :module) do
unquote(:"Externaltypereference")(type: mName) ->
case :lists.member(mName, inputMNameL) do
true ->
remove_in_set_imports1(is, inputMNameL, acc)
false ->
remove_in_set_imports1(is, inputMNameL, [i | acc])
end
_ ->
remove_in_set_imports1(is, inputMNameL, [i | acc])
end
end
def remove_in_set_imports1([], _, acc), do: :lists.reverse(acc)
def remove_name_collisions(modules) do
:asn1ct_table.new(:renamed_defs)
:lists.foreach(&exit_if_nameduplicate/1, modules)
remove_name_collisions2(modules, [])
end
def remove_name_collisions2([m | ms], acc) do
typeOrVal = module(m, :typeorval)
mName = module(m, :name)
{newM, newMs} = remove_name_collisions2(mName, typeOrVal, ms, [])
remove_name_collisions2(newMs, [module(m, typeorval: newM) | acc])
end
def remove_name_collisions2([], acc) do
finished_warn_prints()
acc
end
def remove_name_collisions2(modName, [t | ts], ms, acc) do
name = get_name_of_def(t)
case discover_dupl_in_mods(name, t, ms, [], 0) do
{_, 0} ->
remove_name_collisions2(modName, ts, ms, [t | acc])
{newMs, 1} ->
newT = set_name_of_def(modName, name, t)
warn_renamed_def(modName, get_name_of_def(newT), name)
:asn1ct_table.insert(:renamed_defs, {get_name_of_def(newT), name, modName})
remove_name_collisions2(modName, ts, newMs, [newT | acc])
{newMs, 2} ->
warn_kept_def(modName, name)
remove_name_collisions2(modName, ts, newMs, [t | acc])
{newMs, 2 ||| 1} ->
warn_kept_def(modName, name)
remove_name_collisions2(modName, ts, newMs, [t | acc])
end
end
def remove_name_collisions2(_, [], ms, acc), do: {acc, ms}
def run_passes(passes, st(opts: opts) = st) do
run = case :lists.member(:time, opts) do
false ->
fn _, pass, s ->
pass.(s)
end
true ->
&run_tc/3
end
run_passes_1(passes, st(st, run: run))
end
def run_passes_1([{:unless, opt, pass} | passes], st(opts: opts) = st) do
case :proplists.get_bool(opt, opts) do
false ->
run_passes_1([pass | passes], st)
true ->
run_passes_1(passes, st)
end
end
def run_passes_1([{:iff, opt, pass} | passes], st(opts: opts) = st) do
case :proplists.get_bool(opt, opts) do
true ->
run_passes_1([pass | passes], st)
false ->
run_passes_1(passes, st)
end
end
def run_passes_1([{:pass, name, pass} | passes], st(run: run) = st0) when is_function(pass, 1) do
try do
run.(name, pass, st0)
catch
{class, error, stk} ->
:io.format('Internal error: ~p:~p\n~p\n', [class, error, stk])
{:error, {:internal_error, {class, error}}}
else
{:ok, st} ->
run_passes_1(passes, st)
{:error, st(error: errors)} ->
{structured, allErrors} = clean_errors(errors)
print_structured_errors(structured)
{:error, allErrors}
:done ->
:ok
end
end
def run_passes_1([], _St), do: :ok
def run_tc(name, fun, st) do
before0 = statistics(:runtime)
val = try do
fun.(st)
catch
error -> error
end
after0 = statistics(:runtime)
{before_c, _} = before0
{after_c, _} = after0
:io.format('~-31s: ~10.2f s\n', [name, after_c - before_c / 1000])
val
end
def save_automatic_tagged_types([]), do: :done
def save_automatic_tagged_types([module(tagdefault: :"AUTOMATIC", typeorval: torV) | ms]) do
fun = fn t ->
:asn1ct_table.insert(:automatic_tags, {get_name_of_def(t)})
end
:lists.foreach(fun, torV)
save_automatic_tagged_types(ms)
end
def save_automatic_tagged_types([_M | ms]), do: save_automatic_tagged_types(ms)
def save_config(key, info) do
:asn1ct_table.new_reuse(:asn1_general)
:asn1ct_table.insert(:asn1_general, {{:asn1_config, key}, info})
end
def save_gen_state(genState) when is_record(genState, :gen_state), do: save_config(:gen_state, genState)
def save_gen_state(:selective_decode, {_, type_component_name_list}) do
state = case get_gen_state() do
s when is_record(s, :gen_state) ->
s
_ ->
gen_state()
end
stateRec = gen_state(state, type_pattern: type_component_name_list)
save_config(:gen_state, stateRec)
end
def save_gen_state(:selective_decode, _), do: :ok
def save_gen_state(:exclusive_decode, {_, confList}, partIncTlvTagList) do
state = case get_gen_state() do
s when is_record(s, :gen_state) ->
s
_ ->
gen_state()
end
stateRec = gen_state(state, inc_tag_pattern: partIncTlvTagList, inc_type_pattern: confList)
save_config(:gen_state, stateRec)
end
def save_gen_state(_, _, _) do
case get_gen_state() do
s when is_record(s, :gen_state) ->
:ok
_ ->
save_config(:gen_state, gen_state())
end
end
def save_imports(moduleList) do
fun = fn m ->
case module(m, :imports) do
{_, []} ->
[]
{_, i} ->
{module(m, :name), i}
end
end
importsList = :lists.map(fun, moduleList)
case :lists.flatten(importsList) do
[] ->
:ok
importsList2 ->
:asn1ct_table.new(:original_imports)
:lists.foreach(fn x ->
:asn1ct_table.insert(:original_imports, x)
end, importsList2)
end
end
def save_pass(st(code: m, erule: erule, opts: opts) = st) do
:ok = :asn1ct_check.storeindb(state(erule: erule, options: opts), m)
{:ok, st}
end
def scan_pass(st(file: file) = st) do
case :asn1ct_tok.file(file) do
{:error, reason} ->
{:error, st(st, error: reason)}
tokens when is_list(tokens) ->
{:ok, st(st, code: tokens)}
end
end
def set_name_of_def(modName, name, oldDef) do
newName = list_to_atom(:lists.concat([name, modName]))
case oldDef do
typedef() ->
typedef(oldDef, name: newName)
classdef() ->
classdef(oldDef, name: newName)
valuedef() ->
valuedef(oldDef, name: newName)
ptypedef() ->
ptypedef(oldDef, name: newName)
pvaluedef() ->
pvaluedef(oldDef, name: newName)
pvaluesetdef() ->
pvaluesetdef(oldDef, name: newName)
pobjectdef() ->
pobjectdef(oldDef, name: newName)
pobjectsetdef() ->
pobjectsetdef(oldDef, name: newName)
end
end
def set_passes(), do: [{:pass, :scan_parse, &set_scan_parse_pass/1}, {:pass, :merge, &merge_pass/1} | common_passes()]
def set_scan_parse_pass(st(files: files) = st) do
try do
l = set_scan_parse_pass_1(files, st)
{:ok, st(st, code: l)}
catch
{:throw, error, _} ->
{:error, st(st, error: error)}
end
end
def set_scan_parse_pass_1([f | fs], st(file: file) = st) do
case :asn1ct_tok.file(f) do
{:error, error} ->
throw(error)
tokens when is_list(tokens) ->
case :asn1ct_parser2.parse(file, tokens) do
{:ok, m} ->
[m | set_scan_parse_pass_1(fs, st)]
{:error, errors} ->
throw(errors)
end
end
end
def set_scan_parse_pass_1([], _), do: []
def setup_bit_string_format(opts) do
format = case {:lists.member(:compact_bit_string, opts), :lists.member(:legacy_bit_string, opts)} do
{false, false} ->
:bitstring
{true, false} ->
:compact
{false, true} ->
:legacy
{true, true} ->
message = 'Contradicting options given: compact_bit_string and legacy_bit_string'
exit({:error, {:asn1, message}})
end
put(:bit_string_format, format)
end
def setup_legacy_erlang_types(opts) do
f = case :lists.member(:legacy_erlang_types, opts) do
false ->
case get_bit_string_format() do
:bitstring ->
false
:compact ->
legacy_forced_info(:compact_bit_string)
true
:legacy ->
legacy_forced_info(:legacy_bit_string)
true
end
true ->
true
end
put(:use_legacy_erlang_types, f)
end
def single_passes(), do: [{:pass, :scan, &scan_pass/1}, {:pass, :parse, &parse_pass/1} | common_passes()]
def special_decode_prepare_1(gen(options: options) = gen, m) do
modName = case :lists.keyfind(:asn1config, 1, options) do
{_, mName} ->
mName
false ->
module(m, :name)
end
case read_config_file(gen, modName) do
:no_config_file ->
:ok
cfgList ->
selectedDecode = get_config_info(cfgList, :selective_decode)
exclusiveDecode = get_config_info(cfgList, :exclusive_decode)
commandList = create_partial_decode_gen_info(module(m, :name), selectedDecode)
save_config(:partial_decode, commandList)
save_gen_state(:selective_decode, selectedDecode)
commandList2 = create_partial_inc_decode_gen_info(module(m, :name), exclusiveDecode)
part_inc_tlv_tags = tlv_tags(commandList2)
save_config(:partial_incomplete_decode, part_inc_tlv_tags)
save_gen_state(:exclusive_decode, exclusiveDecode, part_inc_tlv_tags)
end
end
def specialized_decode_prepare(gen(erule: :ber, options: options) = gen, m) do
case :lists.member(:asn1config, options) do
true ->
special_decode_prepare_1(gen, m)
false ->
:ok
end
end
def specialized_decode_prepare(_, _), do: :ok
def start(includes) when is_list(includes), do: :asn1_db.dbstart(includes)
def strip_includes(includes) do
for {:i, i} <- includes do
i
end
end
def test_each(module, [type | rest]) do
case test_type(module, type) do
{:ok, _Result} ->
test_each(module, rest)
error ->
error
end
end
def test_each(_, []), do: :ok
def test_module(module, includes) do
in_process(fn ->
start(strip_includes(includes))
case check(module, includes) do
{:ok, newTypes} ->
test_each(module, newTypes)
error ->
error
end
end)
end
def test_type(module, type) do
case get_value(module, type) do
{:ok, val} ->
test_value(module, type, val)
{:error, reason} ->
{:error, {:asn1, {:value, reason}}}
end
end
def test_type(module, type, includes) do
in_process(fn ->
start(strip_includes(includes))
case check(module, includes) do
{:ok, _NewTypes} ->
test_type(module, type)
error ->
error
end
end)
end
def test_value(module, type, value) do
in_process(fn ->
try do
module.encode(type, value)
catch
error -> error
end
|> case do
{:ok, bytes} ->
test_value_decode(module, type, value, bytes)
bytes when is_binary(bytes) ->
test_value_decode(module, type, value, bytes)
error ->
{:error, {:asn1, {:encode, {{module, type, value}, error}}}}
end
end)
end
def test_value_decode(module, type, value, bytes) do
newBytes = prepare_bytes(bytes)
case module.decode(type, newBytes) do
{:ok, value} ->
{:ok, {module, type, value}}
{:ok, value, <<>>} ->
{:ok, {module, type, value}}
value ->
{:ok, {module, type, value}}
{value, <<>>} ->
{:ok, {module, type, value}}
{:ok, res} ->
{:error, {:asn1, {:encode_decode_mismatch, {{module, type, value}, res}}}}
{:ok, res, rest} ->
{:error, {:asn1, {:encode_decode_mismatch, {{module, type, value}, {res, rest}}}}}
error ->
{:error, {:asn1, {{:decode, {module, type, value}, error}}}}
end
end
def tlv_tag(<<cl :: 2, _ :: 1, tagNo :: 5>>) when tagNo < 31, do: cl <<< 16 + tagNo
def tlv_tag(<<cl :: 2, _ :: 1, 31 :: 5, 0 :: 1, tagNo :: 7>>), do: cl <<< 16 + tagNo
def tlv_tag(<<cl :: 2, _ :: 1, 31 :: 5, buffer :: binary>>) do
tagNo = tlv_tag1(buffer, 0)
cl <<< 16 + tagNo
end
def tlv_tag1(<<0 :: 1, partialTag :: 7>>, acc), do: acc <<< 7 ||| partialTag
def tlv_tag1(<<1 :: 1, partialTag :: 7, buffer :: binary>>, acc), do: tlv_tag1(buffer, acc <<< 7 ||| partialTag)
def tlv_tags([]), do: []
def tlv_tags([:mandatory | rest]), do: [:mandatory | tlv_tags(rest)]
def tlv_tags([[command, tag] | rest]) when is_atom(command) and is_binary(tag), do: [[command, tlv_tag(tag)] | tlv_tags(rest)]
def tlv_tags([[command, directives] | rest]) when is_atom(command) and is_list(directives), do: [[command, tlv_tags(directives)] | tlv_tags(rest)]
def tlv_tags([[] | rest]), do: tlv_tags(rest)
def tlv_tags([{name, topType, l1} | rest]) when is_list(l1) and is_atom(topType), do: [{name, topType, tlv_tags(l1)} | tlv_tags(rest)]
def tlv_tags([[command, tag, l1] | rest]) when is_list(l1) and is_binary(tag), do: [[command, tlv_tag(tag), tlv_tags(l1)] | tlv_tags(rest)]
def tlv_tags([[:mandatory | rest]]), do: [[:mandatory | tlv_tags(rest)]]
def tlv_tags([l = [l1 | _] | rest]) when is_list(l1), do: [tlv_tags(l) | tlv_tags(rest)]
def translate_options([:ber_bin | t]) do
:io.format('Warning: The option \'ber_bin\' is now called \'ber\'.\n')
[:ber | translate_options(t)]
end
def translate_options([:per_bin | t]) do
:io.format('Warning: The option \'per_bin\' is now called \'per\'.\n')
[:per | translate_options(t)]
end
def translate_options([:uper_bin | t]) do
:io.format('Warning: The option \'uper_bin\' is now called \'uper\'.\n')
translate_options([:uper | t])
end
def translate_options([:nif | t]) do
:io.format('Warning: The option \'nif\' is no longer needed.\n')
translate_options(t)
end
def translate_options([:optimize | t]) do
:io.format('Warning: The option \'optimize\' is no longer needed.\n')
translate_options(t)
end
def translate_options([:inline | t]) do
:io.format('Warning: The option \'inline\' is no longer needed.\n')
translate_options(t)
end
def translate_options([{:inline, _} | _]) do
:io.format('ERROR: The option {inline,OutputFilename} is no longer supported.\n')
throw({:error, {:unsupported_option, :inline}})
end
def translate_options([h | t]), do: [h | translate_options(t)]
def translate_options([]), do: []
def type_check(a) when is_atom(a), do: :atom
def type_check(l) when is_list(l) do
pred = fn x when x <= 255 ->
false
_ ->
true
end
case :lists.filter(pred, l) do
[] ->
:string
_ ->
:list
end
end
def type_check(unquote(:"Externaltypereference")()), do: :record
def update_gen_state(:active, state, data), do: save_gen_state(gen_state(state, active: data))
def update_gen_state(:prefix, state, data), do: save_gen_state(gen_state(state, prefix: data))
def update_gen_state(:inc_tag_pattern, state, data), do: save_gen_state(gen_state(state, inc_tag_pattern: data))
def update_gen_state(:tag_pattern, state, data), do: save_gen_state(gen_state(state, tag_pattern: data))
def update_gen_state(:inc_type_pattern, state, data), do: save_gen_state(gen_state(state, inc_type_pattern: data))
def update_gen_state(:type_pattern, state, data), do: save_gen_state(gen_state(state, type_pattern: data))
def update_gen_state(:func_name, state, data), do: save_gen_state(gen_state(state, func_name: data))
def update_gen_state(:namelist, state, data), do: save_gen_state(gen_state(state, namelist: data))
def update_gen_state(:tobe_refed_funcs, state, data), do: save_gen_state(gen_state(state, tobe_refed_funcs: data))
def update_gen_state(:gen_refed_funcs, state, data), do: save_gen_state(gen_state(state, gen_refed_funcs: data))
def update_gen_state(:generated_functions, state, data), do: save_gen_state(gen_state(state, generated_functions: data))
def update_gen_state(:suffix_index, state, data), do: save_gen_state(gen_state(state, suffix_index: data))
def update_gen_state(:current_suffix_index, state, data), do: save_gen_state(gen_state(state, current_suffix_index: data))
def warn_deleted_def(modName, defName) do
maybe_first_warn_print()
:io.format('NOTICE: The ASN.1 definition in module ~p with name ~p has been deleted in generated module.~n', [modName, defName])
end
def warn_kept_def(modName, defName) do
maybe_first_warn_print()
:io.format('NOTICE: The ASN.1 definition in module ~p with name ~p has kept its name due to equal definition as duplicate.~n', [modName, defName])
end
def warn_renamed_def(modName, newName, oldName) do
maybe_first_warn_print()
:io.format('NOTICE: The ASN.1 definition in module ~p with name ~p has been renamed in generated module. New name is ~p.~n', [modName, oldName, newName])
end
end
| 30.08826 | 264 | 0.625025 |
f7c9d494a30a8104958b77b2e956338a7932b359 | 3,103 | exs | Elixir | apps/grapevine/mix.exs | oestrich/grapevine | 7fb745a3a6e4eb68bd761baa190b2df32fa1f73d | [
"MIT"
] | 107 | 2018-10-05T18:20:32.000Z | 2022-02-28T04:02:50.000Z | apps/grapevine/mix.exs | oestrich/grapevine | 7fb745a3a6e4eb68bd761baa190b2df32fa1f73d | [
"MIT"
] | 33 | 2018-10-05T14:11:18.000Z | 2022-02-10T22:19:18.000Z | apps/grapevine/mix.exs | oestrich/grapevine | 7fb745a3a6e4eb68bd761baa190b2df32fa1f73d | [
"MIT"
] | 18 | 2019-02-03T03:08:20.000Z | 2021-12-28T04:29:36.000Z | defmodule Grapevine.Mixfile do
use Mix.Project
def project do
[
app: :grapevine,
version: "2.3.0",
elixir: "~> 1.9",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
releases: releases()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Grapevine.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:bamboo, "~> 1.1"},
{:bamboo_smtp, "~> 2.0"},
{:cowboy, "~> 2.0"},
{:credo, "~> 1.0", only: [:dev, :test], runtime: false},
{:earmark, "~> 1.2"},
{:ex_aws, "~> 2.1"},
{:ex_aws_s3, "~> 2.0"},
{:ex_doc, "~> 0.19", only: :dev, runtime: false},
{:gettext, "~> 0.11"},
{:grapevine_data, path: "../data"},
{:grapevine_socket, path: "../socket", runtime: false},
{:grapevine_telnet, path: "../telnet/", runtime: false},
{:hackney, "~> 1.9"},
{:html_sanitize_ex, "~> 1.3"},
{:jason, "~> 1.1"},
{:libcluster, "~> 3.0"},
{:logster, "~> 1.0"},
{:mojito, "~> 0.5.0"},
{:phoenix, "~> 1.4", override: true},
{:phoenix_ecto, "~> 4.0"},
{:phoenix_html, "~> 2.10"},
{:phoenix_live_view, github: "phoenixframework/phoenix_live_view"},
{:phoenix_pubsub, "~> 1.0"},
{:plug_cowboy, "~> 2.0", override: true},
{:poison, "~> 4.0"},
{:prometheus_ex, git: "https://github.com/deadtrickster/prometheus.ex.git", override: true},
{:prometheus_plugs, "~> 1.1.1"},
{:sentry, "~> 7.0"},
{:sweet_xml, "~> 0.6"},
{:telemetry, "~> 0.3"},
{:telemetry_poller, "~> 0.2"},
{:telnet, git: "https://github.com/oestrich/telnet-elixir.git"},
{:timber, "~> 3.0"},
{:timex, "~> 3.1"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"ecto.migrate.reset": ["ecto.drop", "ecto.create", "ecto.migrate"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
defp releases() do
[
grapevine: [
include_executables_for: [:unix],
applications: [
grapevine_socket: :none,
grapevine_telnet: :none,
runtime_tools: :permanent
],
config_providers: [{Grapevine.ConfigProvider, "/etc/grapevine/config.exs"}]
]
]
end
end
| 30.126214 | 98 | 0.550435 |
f7c9ff20e3214153912f61b8b558b9a60d19a094 | 6,063 | exs | Elixir | test/phoenix_html/tag_test.exs | lukaszsamson/phoenix_html | 03c937e808bbda859488a1ac90ce8e297f8644b8 | [
"MIT"
] | 1 | 2020-04-28T12:28:52.000Z | 2020-04-28T12:28:52.000Z | test/phoenix_html/tag_test.exs | lukaszsamson/phoenix_html | 03c937e808bbda859488a1ac90ce8e297f8644b8 | [
"MIT"
] | null | null | null | test/phoenix_html/tag_test.exs | lukaszsamson/phoenix_html | 03c937e808bbda859488a1ac90ce8e297f8644b8 | [
"MIT"
] | null | null | null | defmodule Phoenix.HTML.TagTest do
use ExUnit.Case, async: true
import Phoenix.HTML
import Phoenix.HTML.Tag
doctest Phoenix.HTML.Tag
test "tag" do
assert tag(:br) |> safe_to_string() == ~s(<br>)
assert tag(:input, name: ~s("<3")) |> safe_to_string() == ~s(<input name=""<3"">)
assert tag(:input, name: raw("<3")) |> safe_to_string() == ~s(<input name="<3">)
assert tag(:input, name: :hello) |> safe_to_string() == ~s(<input name="hello">)
assert tag(:input, type: "text", name: "user_id") |> safe_to_string() ==
~s(<input name="user_id" type="text">)
assert tag(:input, data: [toggle: "dropdown"]) |> safe_to_string() ==
~s(<input data-toggle="dropdown">)
assert tag(:input, my_attr: "blah") |> safe_to_string() == ~s(<input my-attr="blah">)
assert tag(:input, data: [my_attr: "blah"]) |> safe_to_string() ==
~s(<input data-my-attr="blah">)
assert tag(:input, data: [toggle: [target: "#parent", attr: "blah"]]) |> safe_to_string() ==
~s(<input data-toggle-attr="blah" data-toggle-target="#parent">)
assert tag(:audio, autoplay: "autoplay") |> safe_to_string() ==
~s(<audio autoplay="autoplay">)
assert tag(:audio, autoplay: true) |> safe_to_string() == ~s(<audio autoplay>)
assert tag(:audio, autoplay: false) |> safe_to_string() == ~s(<audio>)
assert tag(:audio, autoplay: nil) |> safe_to_string() == ~s(<audio>)
end
test "content_tag" do
assert content_tag(:p, "Hello") |> safe_to_string() == "<p>Hello</p>"
assert content_tag(:p, "Hello", class: "dark") |> safe_to_string() ==
"<p class=\"dark\">Hello</p>"
assert content_tag(:p, [class: "dark"], do: "Hello") |> safe_to_string() ==
"<p class=\"dark\">Hello</p>"
assert content_tag(:p, "<Hello>") |> safe_to_string() == "<p><Hello></p>"
assert content_tag(:p, 13) |> safe_to_string() == "<p>13</p>"
assert content_tag(:p, [class: "dark"], do: "<Hello>") |> safe_to_string() ==
"<p class=\"dark\"><Hello></p>"
assert content_tag(:p, raw("<Hello>")) |> safe_to_string() == "<p><Hello></p>"
assert content_tag(:p, [class: "dark"], do: raw("<Hello>")) |> safe_to_string() ==
"<p class=\"dark\"><Hello></p>"
content =
content_tag :form, action: "/users", data: [remote: true] do
tag(:input, name: "user[name]")
end
assert safe_to_string(content) ==
~s(<form action="/users" data-remote="true">) <> ~s(<input name="user[name]"></form>)
assert content_tag(:p, do: "Hello") |> safe_to_string() == "<p>Hello</p>"
content =
content_tag :ul do
content_tag :li do
"Hello"
end
end
assert safe_to_string(content) == "<ul><li>Hello</li></ul>"
assert content_tag(:p, ["hello", ?\s, "world"]) |> safe_to_string() == "<p>hello world</p>"
assert content_tag(:div, [autoplay: "autoplay"], do: "") |> safe_to_string() ==
~s(<div autoplay="autoplay"></div>)
assert content_tag(:div, [autoplay: true], do: "") |> safe_to_string() ==
~s(<div autoplay></div>)
assert content_tag(:div, [autoplay: false], do: "") |> safe_to_string() == ~s(<div></div>)
assert content_tag(:div, [autoplay: nil], do: "") |> safe_to_string() == ~s(<div></div>)
end
test "img_tag" do
assert img_tag("user.png") |> safe_to_string() == ~s(<img src="user.png">)
assert img_tag("user.png", class: "big") |> safe_to_string() ==
~s(<img class="big" src="user.png">)
end
test "form_tag for get" do
assert safe_to_string(form_tag("/", method: :get)) ==
~s(<form accept-charset="UTF-8" action="/" method="get">) <>
~s(<input name="_utf8" type="hidden" value="✓">)
assert safe_to_string(form_tag("/", method: :get, enforce_utf8: false)) ==
~s(<form action="/" method="get">)
end
test "form_tag for post" do
csrf_token = Plug.CSRFProtection.get_csrf_token()
assert safe_to_string(form_tag("/")) ==
~s(<form accept-charset="UTF-8" action="/" method="post">) <>
~s(<input name="_csrf_token" type="hidden" value="#{csrf_token}">) <>
~s(<input name="_utf8" type="hidden" value="✓">)
assert safe_to_string(form_tag("/", method: :post, csrf_token: false, multipart: true)) ==
~s(<form accept-charset="UTF-8" action="/" enctype="multipart/form-data" method="post">) <>
~s(<input name="_utf8" type="hidden" value="✓">)
end
test "form_tag for other method" do
csrf_token = Plug.CSRFProtection.get_csrf_token()
assert safe_to_string(form_tag("/", method: :put)) ==
~s(<form accept-charset="UTF-8" action="/" method="post">) <>
~s(<input name="_method" type="hidden" value="put">) <>
~s(<input name="_csrf_token" type="hidden" value="#{csrf_token}">) <>
~s(<input name="_utf8" type="hidden" value="✓">)
end
test "form_tag with do block" do
csrf_token = Plug.CSRFProtection.get_csrf_token()
assert safe_to_string(
form_tag "/" do
"<>"
end
) ==
~s(<form accept-charset="UTF-8" action="/" method="post">) <>
~s(<input name="_csrf_token" type="hidden" value="#{csrf_token}">) <>
~s(<input name="_utf8" type="hidden" value="✓">) <> ~s(<>) <> ~s(</form>)
assert safe_to_string(
form_tag "/", method: :get do
"<>"
end
) ==
~s(<form accept-charset="UTF-8" action="/" method="get">) <>
~s(<input name="_utf8" type="hidden" value="✓">) <> ~s(<>) <> ~s(</form>)
end
test "csrf_meta_tag" do
csrf_token = Plug.CSRFProtection.get_csrf_token()
assert safe_to_string(csrf_meta_tag()) ==
~s(<meta charset="UTF-8" content="#{csrf_token}" csrf-param="_csrf_token" method-param="_method" name="csrf-token">)
end
end
| 38.617834 | 129 | 0.562263 |
f7ca12c07b7d542d58406121cb291c43cfac1a52 | 1,304 | exs | Elixir | config/dev.exs | zachary-kuhn/guardian_demo | f973cb5a665db1608543a31d3d2340caa2d11228 | [
"MIT"
] | 2 | 2017-09-10T12:44:31.000Z | 2017-11-05T14:15:55.000Z | config/dev.exs | zachary-kuhn/guardian_demo | f973cb5a665db1608543a31d3d2340caa2d11228 | [
"MIT"
] | null | null | null | config/dev.exs | zachary-kuhn/guardian_demo | f973cb5a665db1608543a31d3d2340caa2d11228 | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :guardian_demo, GuardianDemoWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: []
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# command from your terminal:
#
# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
#
# The `http:` config above can be replaced with:
#
# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
| 34.315789 | 170 | 0.738497 |
f7ca1766008bfbd6d2aba4ecac9b1525158b792d | 2,937 | exs | Elixir | lib/elixir/test/elixir/string/chars_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/string/chars_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/string/chars_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | Code.require_file "../test_helper.exs", __DIR__
defmodule String.Chars.AtomTest do
use ExUnit.Case, async: true
test :basic do
assert to_string(:foo) == "foo"
end
test :empty do
assert to_string(:"") == ""
end
test :true_false_nil do
assert to_string(false) == "false"
assert to_string(true) == "true"
assert to_string(nil) == ""
end
test :with_uppercase do
assert to_string(:fOO) == "fOO"
assert to_string(:FOO) == "FOO"
end
test :alias_atom do
assert to_string(Foo.Bar) == "Elixir.Foo.Bar"
end
end
defmodule String.Chars.BitStringTest do
use ExUnit.Case, async: true
test :binary do
assert to_string("foo") == "foo"
assert to_string(<<?a, ?b, ?c>>) == "abc"
assert to_string("我今天要学习.") == "我今天要学习."
end
end
defmodule String.Chars.NumberTest do
use ExUnit.Case, async: true
test :integer do
assert to_string(100) == "100"
end
test :float do
assert to_string(1.0) == "1.0"
assert to_string(1.0e10) == "1.0e10"
end
end
defmodule String.Chars.ListTest do
use ExUnit.Case, async: true
test :basic do
assert to_string([ 1, "b", 3 ]) == <<1, 98, 3>>
end
test :printable do
assert to_string('abc') == "abc"
end
test :char_list do
assert to_string([0, 1, 2, 3, 255]) ==
<<0, 1, 2, 3, 195, 191>>
assert to_string([0, [1, "hello"], 2, [["bye"]]]) ==
<<0, 1, 104, 101, 108, 108, 111, 2, 98, 121, 101>>
end
test :empty do
assert to_string([]) == ""
end
end
defmodule String.Chars.ErrorsTest do
use ExUnit.Case, async: true
test :bitstring do
assert_raise Protocol.UndefinedError,
"protocol String.Chars not implemented for <<0, 1::size(4)>>, " <>
"cannot convert a bitstring to a string", fn ->
to_string(<<1 :: [size(12), integer, signed]>>)
end
end
test :tuple do
assert_raise Protocol.UndefinedError, "protocol String.Chars not implemented for {1, 2, 3}", fn ->
to_string({1, 2, 3})
end
end
test :record do
assert_raise Protocol.UndefinedError, "protocol String.Chars not implemented for ArgumentError[message: \"argument error\"]", fn ->
to_string(ArgumentError[])
end
end
test :pid do
assert_raise Protocol.UndefinedError, %r"^protocol String\.Chars not implemented for #PID<.+?>$", fn ->
to_string(self())
end
end
test :ref do
assert_raise Protocol.UndefinedError, %r"^protocol String\.Chars not implemented for #Reference<.+?>$", fn ->
to_string(make_ref()) == ""
end
end
test :function do
assert_raise Protocol.UndefinedError, %r"^protocol String\.Chars not implemented for #Function<.+?>$", fn ->
to_string(fn -> end)
end
end
test :port do
[port|_] = Port.list
assert_raise Protocol.UndefinedError, %r"^protocol String\.Chars not implemented for #Port<.+?>$", fn ->
to_string(port)
end
end
end
| 23.496 | 135 | 0.625809 |
f7ca29256346bc41f2ad1ad1894022b3e5027d79 | 32 | exs | Elixir | test/test_helper.exs | pkinney/simplify_ex | c4d5d1c65f5bd720c0a54a630bb9bf477afa212e | [
"MIT"
] | 16 | 2016-03-26T14:41:52.000Z | 2022-03-31T16:03:47.000Z | test/test_helper.exs | pkinney/simplify_ex | c4d5d1c65f5bd720c0a54a630bb9bf477afa212e | [
"MIT"
] | 2 | 2016-05-03T20:37:14.000Z | 2019-05-25T16:12:20.000Z | test/test_helper.exs | pkinney/simplify_ex | c4d5d1c65f5bd720c0a54a630bb9bf477afa212e | [
"MIT"
] | 5 | 2016-05-03T20:28:33.000Z | 2019-05-21T07:14:08.000Z | ExUnit.start(exclude: [:bench])
| 16 | 31 | 0.71875 |
f7ca35a810adba4cb7fb2a23dc08a898c8ac317f | 1,396 | ex | Elixir | test/support/data_case.ex | shawnonthenet/taskasync.com | 7ebd8ec1afc9fa5fa8708568b4ce9cf75968ae41 | [
"MIT"
] | null | null | null | test/support/data_case.ex | shawnonthenet/taskasync.com | 7ebd8ec1afc9fa5fa8708568b4ce9cf75968ae41 | [
"MIT"
] | null | null | null | test/support/data_case.ex | shawnonthenet/taskasync.com | 7ebd8ec1afc9fa5fa8708568b4ce9cf75968ae41 | [
"MIT"
] | null | null | null | defmodule Taskasync.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Taskasync.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Taskasync.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Taskasync.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Taskasync.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Enum.reduce(opts, message, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 25.851852 | 77 | 0.682665 |
f7ca47a3dda93315b2b37068657048a06928195e | 2,372 | ex | Elixir | lib/codes/codes_d02.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_d02.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_d02.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_D02 do
alias IcdCode.ICDCode
def _D020 do
%ICDCode{full_code: "D020",
category_code: "D02",
short_code: "0",
full_name: "Carcinoma in situ of larynx",
short_name: "Carcinoma in situ of larynx",
category_name: "Carcinoma in situ of larynx"
}
end
def _D021 do
%ICDCode{full_code: "D021",
category_code: "D02",
short_code: "1",
full_name: "Carcinoma in situ of trachea",
short_name: "Carcinoma in situ of trachea",
category_name: "Carcinoma in situ of trachea"
}
end
def _D0220 do
%ICDCode{full_code: "D0220",
category_code: "D02",
short_code: "20",
full_name: "Carcinoma in situ of unspecified bronchus and lung",
short_name: "Carcinoma in situ of unspecified bronchus and lung",
category_name: "Carcinoma in situ of unspecified bronchus and lung"
}
end
def _D0221 do
%ICDCode{full_code: "D0221",
category_code: "D02",
short_code: "21",
full_name: "Carcinoma in situ of right bronchus and lung",
short_name: "Carcinoma in situ of right bronchus and lung",
category_name: "Carcinoma in situ of right bronchus and lung"
}
end
def _D0222 do
%ICDCode{full_code: "D0222",
category_code: "D02",
short_code: "22",
full_name: "Carcinoma in situ of left bronchus and lung",
short_name: "Carcinoma in situ of left bronchus and lung",
category_name: "Carcinoma in situ of left bronchus and lung"
}
end
def _D023 do
%ICDCode{full_code: "D023",
category_code: "D02",
short_code: "3",
full_name: "Carcinoma in situ of other parts of respiratory system",
short_name: "Carcinoma in situ of other parts of respiratory system",
category_name: "Carcinoma in situ of other parts of respiratory system"
}
end
def _D024 do
%ICDCode{full_code: "D024",
category_code: "D02",
short_code: "4",
full_name: "Carcinoma in situ of respiratory system, unspecified",
short_name: "Carcinoma in situ of respiratory system, unspecified",
category_name: "Carcinoma in situ of respiratory system, unspecified"
}
end
end
| 33.885714 | 81 | 0.620152 |
f7ca57d51e22f636b3beae0b9d7359d3d5116dfe | 1,087 | ex | Elixir | lib/atfirs_web/live/page_live.ex | iboss-ptk/Atfirs | 1361c8f1b86971317b33212f9269aaffa0f09110 | [
"MIT"
] | 2 | 2021-01-23T09:23:20.000Z | 2021-02-12T09:15:45.000Z | lib/atfirs_web/live/page_live.ex | iboss-ptk/Atfirs | 1361c8f1b86971317b33212f9269aaffa0f09110 | [
"MIT"
] | null | null | null | lib/atfirs_web/live/page_live.ex | iboss-ptk/Atfirs | 1361c8f1b86971317b33212f9269aaffa0f09110 | [
"MIT"
] | null | null | null | defmodule AtfirsWeb.PageLive do
use AtfirsWeb, :live_view
@impl true
def mount(_params, _session, socket) do
{:ok, assign(socket, query: "", results: %{})}
end
@impl true
def handle_event("suggest", %{"q" => query}, socket) do
{:noreply, assign(socket, results: search(query), query: query)}
end
@impl true
def handle_event("search", %{"q" => query}, socket) do
case search(query) do
%{^query => vsn} ->
{:noreply, redirect(socket, external: "https://hexdocs.pm/#{query}/#{vsn}")}
_ ->
{:noreply,
socket
|> put_flash(:error, "No dependencies found matching \"#{query}\"")
|> assign(results: %{}, query: query)}
end
end
defp search(query) do
if not AtfirsWeb.Endpoint.config(:code_reloader) do
raise "action disabled when not in development"
end
for {app, desc, vsn} <- Application.started_applications(),
app = to_string(app),
String.starts_with?(app, query) and not List.starts_with?(desc, ~c"ERTS"),
into: %{},
do: {app, vsn}
end
end
| 27.175 | 84 | 0.600736 |
f7ca5e46de454f18f322bee08f109e96498aa4c4 | 1,624 | ex | Elixir | lib/google/protobuf/struct.pb.ex | coingaming/grpc | 33b8b3ceec007ae530a33b49a7b302a1ee356b05 | [
"Apache-2.0"
] | null | null | null | lib/google/protobuf/struct.pb.ex | coingaming/grpc | 33b8b3ceec007ae530a33b49a7b302a1ee356b05 | [
"Apache-2.0"
] | null | null | null | lib/google/protobuf/struct.pb.ex | coingaming/grpc | 33b8b3ceec007ae530a33b49a7b302a1ee356b05 | [
"Apache-2.0"
] | 1 | 2022-01-19T22:21:04.000Z | 2022-01-19T22:21:04.000Z | defmodule Google.Protobuf.NullValue do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
@type t :: integer | :NULL_VALUE
field :NULL_VALUE, 0
end
defmodule Google.Protobuf.Struct.FieldsEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: Google.Protobuf.Value.t() | nil
}
defstruct [:key, :value]
field :key, 1, type: :string
field :value, 2, type: Google.Protobuf.Value
end
defmodule Google.Protobuf.Struct do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
fields: %{String.t() => Google.Protobuf.Value.t() | nil}
}
defstruct [:fields]
field :fields, 1, repeated: true, type: Google.Protobuf.Struct.FieldsEntry, map: true
end
defmodule Google.Protobuf.Value do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
kind: {atom, any}
}
defstruct [:kind]
oneof :kind, 0
field :null_value, 1, type: Google.Protobuf.NullValue, enum: true, oneof: 0
field :number_value, 2, type: :double, oneof: 0
field :string_value, 3, type: :string, oneof: 0
field :bool_value, 4, type: :bool, oneof: 0
field :struct_value, 5, type: Google.Protobuf.Struct, oneof: 0
field :list_value, 6, type: Google.Protobuf.ListValue, oneof: 0
end
defmodule Google.Protobuf.ListValue do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
values: [Google.Protobuf.Value.t()]
}
defstruct [:values]
field :values, 1, repeated: true, type: Google.Protobuf.Value
end
| 23.882353 | 87 | 0.666872 |
f7ca9c1dd239e0edcb6aed2f8557349d4e0d6b0c | 1,661 | ex | Elixir | lib/collector.ex | areski/excdr-pusher | 45389e29ab97d11585f22fe29fd7da54d9f31ef8 | [
"MIT"
] | 3 | 2019-04-18T15:36:50.000Z | 2021-01-19T04:38:48.000Z | lib/collector.ex | areski/excdr-pusher | 45389e29ab97d11585f22fe29fd7da54d9f31ef8 | [
"MIT"
] | null | null | null | lib/collector.ex | areski/excdr-pusher | 45389e29ab97d11585f22fe29fd7da54d9f31ef8 | [
"MIT"
] | 3 | 2017-08-15T10:43:40.000Z | 2018-11-06T09:12:49.000Z | defmodule Collector do
use GenServer
require Logger
alias ExCdrPusher.HSqlite
@moduledoc """
This module implement the heartbeat to retrieve the CDRs from the SQLite
and then push them to the Genserver in charge of sending the CDRs to
PostgreSQL
"""
def start_link(state, opts \\ []) do
GenServer.start_link(__MODULE__, state, opts)
end
def init(state) do
Logger.info(
"[init] start collecting CDRs from " <> Application.fetch_env!(:excdr_pusher, :sqlite_db)
)
# 0.1 sec
Process.send_after(
self(),
:timeout_tick,
Application.fetch_env!(:excdr_pusher, :tick_frequency)
)
# 1 sec
Process.send_after(self(), :timeout_1sec, 1 * 1000)
{:ok, state}
end
def handle_info(:timeout_tick, state) do
# Reschedule once more
schedule_task()
{:noreply, state}
end
def handle_info(:timeout_1sec, state) do
Logger.warn("(check collector alive) 1s heartbeat")
# 1 sec
Process.send_after(self(), :timeout_1sec, 1 * 1000)
{:noreply, state}
end
defp schedule_task do
# 0.1 sec
Process.send_after(
self(),
:timeout_tick,
Application.fetch_env!(:excdr_pusher, :tick_frequency)
)
if File.regular?(Application.fetch_env!(:excdr_pusher, :sqlite_db)) do
start_import()
else
Logger.error(
"Sqlite database not found: " <> Application.fetch_env!(:excdr_pusher, :sqlite_db)
)
end
end
defp start_import do
# HSqlite.count_cdr()
{:ok, cdr_list} = HSqlite.fetch_cdr()
HSqlite.mark_cdr_imported(cdr_list)
# Send CDRs to PostgreSQL
PusherPG.sync_push(cdr_list)
end
end
| 23.394366 | 95 | 0.664058 |
f7ca9ee3ff0f6869531e6620df4ee4cd47aeb224 | 3,135 | ex | Elixir | apps/omg_utils/lib/omg_utils/http_rpc/response.ex | omgnetwork/omg-childchain-v1 | 1e2313029ece2282c22ce411edc078a17e6bba09 | [
"Apache-2.0"
] | 1 | 2020-10-06T03:07:47.000Z | 2020-10-06T03:07:47.000Z | apps/omg_utils/lib/omg_utils/http_rpc/response.ex | omgnetwork/omg-childchain-v1 | 1e2313029ece2282c22ce411edc078a17e6bba09 | [
"Apache-2.0"
] | 9 | 2020-09-16T15:31:17.000Z | 2021-03-17T07:12:35.000Z | apps/omg_utils/lib/omg_utils/http_rpc/response.ex | omgnetwork/omg-childchain-v1 | 1e2313029ece2282c22ce411edc078a17e6bba09 | [
"Apache-2.0"
] | 1 | 2020-09-30T17:17:27.000Z | 2020-09-30T17:17:27.000Z | # Copyright 2019-2020 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Utils.HttpRPC.Response do
@moduledoc """
Serializes the response into expected result/data format.
"""
alias OMG.Utils.HttpRPC.Encoding
@sha String.replace(elem(System.cmd("git", ["rev-parse", "--short=7", "HEAD"]), 0), "\n", "")
@type response_t :: %{version: binary(), success: boolean(), data: map()}
def serialize_page(data, data_paging) do
data
|> serialize()
|> Map.put(:data_paging, data_paging)
end
@doc """
Append result of operation to the response data forming standard api response structure
"""
@spec serialize(any()) :: response_t()
def serialize(%{object: :error} = error) do
to_response(error, :error)
end
def serialize(data) do
data
|> sanitize()
|> to_response(:success)
end
@doc """
Removes or encodes fields in response that cannot be serialized to api response.
By default, it:
* encodes to hex all binary values
* removes metadata fields
Provides standard data structure for API response
"""
@spec sanitize(any()) :: any()
def sanitize(response)
# serialize all DateTimes to ISO8601 formatted strings
def sanitize(%DateTime{} = datetime) do
datetime |> DateTime.truncate(:second) |> DateTime.to_iso8601()
end
def sanitize(list) when is_list(list) do
Enum.map(list, &sanitize/1)
end
def sanitize(map_or_struct) when is_map(map_or_struct) do
map_or_struct
|> to_map()
|> sanitize_map()
end
def sanitize(bin) when is_binary(bin), do: Encoding.to_hex(bin)
def sanitize({:skip_hex_encode, bin}), do: bin
def sanitize({{key, value}, _}), do: Map.put_new(%{}, key, value)
def sanitize({key, value}), do: Map.put_new(%{}, key, value)
def sanitize(value), do: value
@doc """
Derive the running service's version for adding to a response.
"""
@spec version(Application.app()) :: String.t()
def version(app) do
{:ok, vsn} = :application.get_key(app, :vsn)
List.to_string(vsn) <> "+" <> @sha
end
# Allows to skip sanitize on specifies keys provided in list in key :skip_hex_encode
defp sanitize_map(map) do
{skip_keys, map} = Map.pop(map, :skip_hex_encode, [])
skip_keys = MapSet.new(skip_keys)
map
|> Enum.map(fn {k, v} ->
case MapSet.member?(skip_keys, k) do
true -> {k, v}
false -> {k, sanitize(v)}
end
end)
|> Map.new()
end
defp to_map(struct), do: Map.drop(struct, [:__struct__, :__meta__])
defp to_response(data, result) do
%{
success: result == :success,
data: data
}
end
end
| 28.761468 | 95 | 0.671132 |
f7cab46f739dc2a7b9b1f0596f76104362025bc1 | 153 | ex | Elixir | src/proto_app/apps/gateway/lib/gateway/portal.ex | fortelabsinc/ethermint_prototype | c54f54ad9090070b79599067133edcd4949c1aff | [
"MIT"
] | null | null | null | src/proto_app/apps/gateway/lib/gateway/portal.ex | fortelabsinc/ethermint_prototype | c54f54ad9090070b79599067133edcd4949c1aff | [
"MIT"
] | null | null | null | src/proto_app/apps/gateway/lib/gateway/portal.ex | fortelabsinc/ethermint_prototype | c54f54ad9090070b79599067133edcd4949c1aff | [
"MIT"
] | 1 | 2020-01-22T19:18:34.000Z | 2020-01-22T19:18:34.000Z | defmodule Gateway.Portal do
@doc """
Simple function to test code completion setup
"""
@spec test :: :world
def test() do
:world
end
end
| 15.3 | 47 | 0.647059 |
f7cab84f1586958e7263996fb1bec4718e0195d1 | 278 | exs | Elixir | test/secret_santa_web/views/layout_view_test.exs | mariaclrd/secret_santa | 18e3ea38647370b45cca2df150b5a839842608f1 | [
"MIT"
] | null | null | null | test/secret_santa_web/views/layout_view_test.exs | mariaclrd/secret_santa | 18e3ea38647370b45cca2df150b5a839842608f1 | [
"MIT"
] | null | null | null | test/secret_santa_web/views/layout_view_test.exs | mariaclrd/secret_santa | 18e3ea38647370b45cca2df150b5a839842608f1 | [
"MIT"
] | null | null | null | defmodule SecretSantaWeb.LayoutViewTest do
use SecretSantaWeb.ConnCase, async: true
# When testing helpers, you may want to import Phoenix.HTML and
# use functions such as safe_to_string() to convert the helper
# result into an HTML string.
# import Phoenix.HTML
end
| 30.888889 | 65 | 0.773381 |
f7cabc50c4175a76bd8bbbf72ed83de9d231023f | 2,421 | ex | Elixir | clients/machine_learning/lib/google_api/machine_learning/v1/model/google_cloud_ml_v1__training_output.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/machine_learning/lib/google_api/machine_learning/v1/model/google_cloud_ml_v1__training_output.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/machine_learning/lib/google_api/machine_learning/v1/model/google_cloud_ml_v1__training_output.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1TrainingOutput do
@moduledoc """
Represents results of a training job. Output only.
## Attributes
- completedTrialCount (String.t): The number of hyperparameter tuning trials that completed successfully. Only set for hyperparameter tuning jobs. Defaults to: `null`.
- consumedMLUnits (float()): The amount of ML units consumed by the job. Defaults to: `null`.
- isHyperparameterTuningJob (boolean()): Whether this job is a hyperparameter tuning job. Defaults to: `null`.
- trials ([GoogleCloudMlV1HyperparameterOutput]): Results for individual Hyperparameter trials. Only set for hyperparameter tuning jobs. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:completedTrialCount => any(),
:consumedMLUnits => any(),
:isHyperparameterTuningJob => any(),
:trials =>
list(GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1HyperparameterOutput.t())
}
field(:completedTrialCount)
field(:consumedMLUnits)
field(:isHyperparameterTuningJob)
field(
:trials,
as: GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1HyperparameterOutput,
type: :list
)
end
defimpl Poison.Decoder, for: GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1TrainingOutput do
def decode(value, options) do
GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1TrainingOutput.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1TrainingOutput do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.428571 | 169 | 0.752995 |
f7caec82d27dd0e660dfcf0bc21b6c4b7086a225 | 329 | ex | Elixir | lib/online_editor_web/models/auth_user.ex | zzats/online-editor | 2532315b40c974fe766e960e0b0933773907906d | [
"MIT"
] | null | null | null | lib/online_editor_web/models/auth_user.ex | zzats/online-editor | 2532315b40c974fe766e960e0b0933773907906d | [
"MIT"
] | null | null | null | lib/online_editor_web/models/auth_user.ex | zzats/online-editor | 2532315b40c974fe766e960e0b0933773907906d | [
"MIT"
] | null | null | null | defmodule OnlineEditorWeb.Models.AuthUser do
alias Ueberauth.Auth
def basic_info(%Auth{} = auth) do
IO.puts("Auth info")
IO.inspect(auth)
{:ok,
%{
avatar: auth.info.image,
email: auth.info.email,
first_name: auth.info.first_name,
last_name: auth.info.last_name
}}
end
end
| 20.5625 | 44 | 0.629179 |
f7caee19488fad08f2cb7da1c31202fb5519f4a8 | 1,065 | ex | Elixir | lib/screens_web/channels/user_socket.ex | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | 3 | 2021-07-27T14:11:00.000Z | 2022-01-03T14:16:43.000Z | lib/screens_web/channels/user_socket.ex | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | 444 | 2021-03-10T20:57:17.000Z | 2022-03-31T16:00:35.000Z | lib/screens_web/channels/user_socket.ex | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | null | null | null | defmodule ScreensWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", ScreensWeb.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# ScreensWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 31.323529 | 83 | 0.696714 |
f7cafcbab77e0760c1298f43836d4cabba135f0c | 622 | ex | Elixir | samples/client/petstore/elixir/lib/swagger_petstore/model/mixed_properties_and_additional_properties_class.ex | bruceadams/swagger-codegen-1 | 2e5289c4d74eafd48e3a324ccdd9e39323b5fb06 | [
"Apache-2.0"
] | null | null | null | samples/client/petstore/elixir/lib/swagger_petstore/model/mixed_properties_and_additional_properties_class.ex | bruceadams/swagger-codegen-1 | 2e5289c4d74eafd48e3a324ccdd9e39323b5fb06 | [
"Apache-2.0"
] | null | null | null | samples/client/petstore/elixir/lib/swagger_petstore/model/mixed_properties_and_additional_properties_class.ex | bruceadams/swagger-codegen-1 | 2e5289c4d74eafd48e3a324ccdd9e39323b5fb06 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule SwaggerPetstore.Model.MixedPropertiesAndAdditionalPropertiesClass do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"uuid",
:"dateTime",
:"map"
]
end
defimpl Poison.Decoder, for: SwaggerPetstore.Model.MixedPropertiesAndAdditionalPropertiesClass do
import SwaggerPetstore.Deserializer
def decode(value, options) do
value
|> deserialize(:"map", :map, SwaggerPetstore.Model.Animal, options)
end
end
| 23.923077 | 97 | 0.73955 |
f7cb13146d7aea4d31e8111a9e7d68a1ebff42ec | 76 | exs | Elixir | examples/.formatter.exs | kianmeng/benchee_markdown | 701ac80e0d862888e17ccea29cb784ab1e943de5 | [
"MIT"
] | 8 | 2019-03-15T08:52:52.000Z | 2021-07-02T16:59:16.000Z | examples/.formatter.exs | kianmeng/benchee_markdown | 701ac80e0d862888e17ccea29cb784ab1e943de5 | [
"MIT"
] | 6 | 2019-03-16T12:50:04.000Z | 2020-10-13T16:32:04.000Z | examples/.formatter.exs | kianmeng/benchee_markdown | 701ac80e0d862888e17ccea29cb784ab1e943de5 | [
"MIT"
] | 7 | 2019-03-15T08:53:13.000Z | 2020-10-22T11:38:22.000Z | [
inputs: ["{mix,.formatter}.exs", "bench/**/*.exs"],
line_length: 80
]
| 15.2 | 53 | 0.552632 |
f7cb66683777038a01b1d90fe1e485bdfd7eaba0 | 4,384 | exs | Elixir | test/absinthe/phase/document/validation/variables_of_correct_type_test.exs | pulkit110/absinthe | fa2060307a401d0943bde72d08267602e4027889 | [
"MIT"
] | null | null | null | test/absinthe/phase/document/validation/variables_of_correct_type_test.exs | pulkit110/absinthe | fa2060307a401d0943bde72d08267602e4027889 | [
"MIT"
] | null | null | null | test/absinthe/phase/document/validation/variables_of_correct_type_test.exs | pulkit110/absinthe | fa2060307a401d0943bde72d08267602e4027889 | [
"MIT"
] | null | null | null | defmodule Absinthe.Phase.Document.Validation.VariablesOfCorrectTypeTest do
@phase Absinthe.Phase.Document.Arguments.VariableTypesMatch
use Absinthe.ValidationPhaseCase, async: true, phase: @phase
defp error_message(op, variable_name, var_type, arg_type) do
var = %Absinthe.Blueprint.Input.Variable{name: variable_name}
@phase.error_message(op, var, var_type, arg_type)
end
test "types of variables match types of arguments" do
{:ok, %{errors: errors}} =
Absinthe.run(
"""
query test($intArg: Int!) {
complicatedArgs {
stringArgField(stringArg: $intArg)
}
}
""",
Absinthe.Fixtures.PetsSchema,
variables: %{"intArg" => 5}
)
expected_error_msg = error_message("test", "intArg", "Int!", "String")
assert expected_error_msg in (errors |> Enum.map(& &1.message))
end
test "variable type check handles non existent type" do
{:ok, %{errors: errors}} =
Absinthe.run(
"""
query test($intArg: DoesNotExist!) {
complicatedArgs {
stringArgField(stringArg: $intArg)
}
}
""",
Absinthe.Fixtures.PetsSchema,
variables: %{"intArg" => 5}
)
expected_error_msg = error_message("test", "intArg", "DoesNotExist!", "String")
assert expected_error_msg in (errors |> Enum.map(& &1.message))
end
test "types of variables match types of arguments even when the value is null" do
{:ok, %{errors: errors}} =
Absinthe.run(
"""
query test($intArg: Int) {
complicatedArgs {
stringArgField(stringArg: $intArg)
}
}
""",
Absinthe.Fixtures.PetsSchema,
variables: %{"intArg" => nil}
)
expected_error_msg = error_message("test", "intArg", "Int", "String")
assert expected_error_msg in (errors |> Enum.map(& &1.message))
end
test "types of variables match types of arguments in named fragments" do
{:ok, %{errors: errors}} =
Absinthe.run(
"""
query test($intArg: Int) {
complicatedArgs {
...Fragment
}
}
fragment Fragment on ComplicatedArgs {
stringArgField(stringArg: $intArg)
}
""",
Absinthe.Fixtures.PetsSchema,
variables: %{"intArg" => 5}
)
expected_error_msg = error_message("test", "intArg", "Int", "String")
assert expected_error_msg in (errors |> Enum.map(& &1.message))
end
test "non null types of variables match non null types of arguments" do
{:ok, %{errors: errors}} =
Absinthe.run(
"""
query test($intArg: Int) {
complicatedArgs {
nonNullIntArgField(nonNullIntArg: $intArg)
}
}
""",
Absinthe.Fixtures.PetsSchema,
variables: %{"intArg" => 5}
)
expected_error_msg = error_message("test", "intArg", "Int", "Int!")
assert expected_error_msg in (errors |> Enum.map(& &1.message))
end
test "list types of variables match list types of arguments" do
result =
Absinthe.run(
"""
query test($stringListArg: [String!]) {
complicatedArgs {
stringListArgField(stringListArg: $stringListArg)
}
}
""",
Absinthe.Fixtures.PetsSchema,
variables: %{"stringListArg" => ["a"]}
)
assert {:ok, %{data: %{"complicatedArgs" => nil}}} = result
end
test "variable can be nullable for non-nullable argument with default" do
result =
Absinthe.run(
"""
query booleanArgQueryWithDefault($booleanArg: Boolean) {
complicatedArgs {
optionalNonNullBooleanArgField(optionalBooleanArg: $booleanArg)
}
}
""",
Absinthe.Fixtures.PetsSchema
)
assert {:ok, %{data: %{"complicatedArgs" => nil}}} = result
end
test "variable with default can be nullable for non-nullable argument" do
result =
Absinthe.run(
"""
query booleanArgQueryWithDefault($booleanArg: Boolean = true) {
complicatedArgs {
nonNullBooleanArgField(nonNullBooleanArg: $booleanArg)
}
}
""",
Absinthe.Fixtures.PetsSchema
)
assert {:ok, %{data: %{"complicatedArgs" => nil}}} = result
end
end
| 28.283871 | 83 | 0.583714 |
f7cb8d34b84f25403108f3abf1b2f9b04bfd24c8 | 13,394 | ex | Elixir | lib/aws/generated/media_convert.ex | benmmari/aws-elixir | b97477498a9e8ba0d46a09255302d88c6a1c8573 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/media_convert.ex | benmmari/aws-elixir | b97477498a9e8ba0d46a09255302d88c6a1c8573 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/media_convert.ex | benmmari/aws-elixir | b97477498a9e8ba0d46a09255302d88c6a1c8573 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.MediaConvert do
@moduledoc """
AWS Elemental MediaConvert
"""
@doc """
Associates an AWS Certificate Manager (ACM) Amazon Resource Name (ARN) with
AWS Elemental MediaConvert.
"""
def associate_certificate(client, input, options \\ []) do
path_ = "/2017-08-29/certificates"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Permanently cancel a job. Once you have canceled a job, you can't start it
again.
"""
def cancel_job(client, id, input, options \\ []) do
path_ = "/2017-08-29/jobs/#{URI.encode(id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 202)
end
@doc """
Create a new transcoding job. For information about jobs and job settings,
see the User Guide at
http://docs.aws.amazon.com/mediaconvert/latest/ug/what-is.html
"""
def create_job(client, input, options \\ []) do
path_ = "/2017-08-29/jobs"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Create a new job template. For information about job templates see the User
Guide at http://docs.aws.amazon.com/mediaconvert/latest/ug/what-is.html
"""
def create_job_template(client, input, options \\ []) do
path_ = "/2017-08-29/jobTemplates"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Create a new preset. For information about job templates see the User Guide
at http://docs.aws.amazon.com/mediaconvert/latest/ug/what-is.html
"""
def create_preset(client, input, options \\ []) do
path_ = "/2017-08-29/presets"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Create a new transcoding queue. For information about queues, see Working
With Queues in the User Guide at
https://docs.aws.amazon.com/mediaconvert/latest/ug/working-with-queues.html
"""
def create_queue(client, input, options \\ []) do
path_ = "/2017-08-29/queues"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Permanently delete a job template you have created.
"""
def delete_job_template(client, name, input, options \\ []) do
path_ = "/2017-08-29/jobTemplates/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 202)
end
@doc """
Permanently delete a preset you have created.
"""
def delete_preset(client, name, input, options \\ []) do
path_ = "/2017-08-29/presets/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 202)
end
@doc """
Permanently delete a queue you have created.
"""
def delete_queue(client, name, input, options \\ []) do
path_ = "/2017-08-29/queues/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 202)
end
@doc """
Send an request with an empty body to the regional API endpoint to get your
account API endpoint.
"""
def describe_endpoints(client, input, options \\ []) do
path_ = "/2017-08-29/endpoints"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Removes an association between the Amazon Resource Name (ARN) of an AWS
Certificate Manager (ACM) certificate and an AWS Elemental MediaConvert
resource.
"""
def disassociate_certificate(client, arn, input, options \\ []) do
path_ = "/2017-08-29/certificates/#{URI.encode(arn)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 202)
end
@doc """
Retrieve the JSON for a specific completed transcoding job.
"""
def get_job(client, id, options \\ []) do
path_ = "/2017-08-29/jobs/#{URI.encode(id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieve the JSON for a specific job template.
"""
def get_job_template(client, name, options \\ []) do
path_ = "/2017-08-29/jobTemplates/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieve the JSON for a specific preset.
"""
def get_preset(client, name, options \\ []) do
path_ = "/2017-08-29/presets/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieve the JSON for a specific queue.
"""
def get_queue(client, name, options \\ []) do
path_ = "/2017-08-29/queues/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieve a JSON array of up to twenty of your job templates. This will
return the templates themselves, not just a list of them. To retrieve the
next twenty templates, use the nextToken string returned with the array
"""
def list_job_templates(client, category \\ nil, list_by \\ nil, max_results \\ nil, next_token \\ nil, order \\ nil, options \\ []) do
path_ = "/2017-08-29/jobTemplates"
headers = []
query_ = []
query_ = if !is_nil(order) do
[{"order", order} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(list_by) do
[{"listBy", list_by} | query_]
else
query_
end
query_ = if !is_nil(category) do
[{"category", category} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieve a JSON array of up to twenty of your most recently created jobs.
This array includes in-process, completed, and errored jobs. This will
return the jobs themselves, not just a list of the jobs. To retrieve the
twenty next most recent jobs, use the nextToken string returned with the
array.
"""
def list_jobs(client, max_results \\ nil, next_token \\ nil, order \\ nil, queue \\ nil, status \\ nil, options \\ []) do
path_ = "/2017-08-29/jobs"
headers = []
query_ = []
query_ = if !is_nil(status) do
[{"status", status} | query_]
else
query_
end
query_ = if !is_nil(queue) do
[{"queue", queue} | query_]
else
query_
end
query_ = if !is_nil(order) do
[{"order", order} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieve a JSON array of up to twenty of your presets. This will return the
presets themselves, not just a list of them. To retrieve the next twenty
presets, use the nextToken string returned with the array.
"""
def list_presets(client, category \\ nil, list_by \\ nil, max_results \\ nil, next_token \\ nil, order \\ nil, options \\ []) do
path_ = "/2017-08-29/presets"
headers = []
query_ = []
query_ = if !is_nil(order) do
[{"order", order} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(list_by) do
[{"listBy", list_by} | query_]
else
query_
end
query_ = if !is_nil(category) do
[{"category", category} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieve a JSON array of up to twenty of your queues. This will return the
queues themselves, not just a list of them. To retrieve the next twenty
queues, use the nextToken string returned with the array.
"""
def list_queues(client, list_by \\ nil, max_results \\ nil, next_token \\ nil, order \\ nil, options \\ []) do
path_ = "/2017-08-29/queues"
headers = []
query_ = []
query_ = if !is_nil(order) do
[{"order", order} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(list_by) do
[{"listBy", list_by} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieve the tags for a MediaConvert resource.
"""
def list_tags_for_resource(client, arn, options \\ []) do
path_ = "/2017-08-29/tags/#{URI.encode(arn)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Add tags to a MediaConvert queue, preset, or job template. For information
about tagging, see the User Guide at
https://docs.aws.amazon.com/mediaconvert/latest/ug/tagging-resources.html
"""
def tag_resource(client, input, options \\ []) do
path_ = "/2017-08-29/tags"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Remove tags from a MediaConvert queue, preset, or job template. For
information about tagging, see the User Guide at
https://docs.aws.amazon.com/mediaconvert/latest/ug/tagging-resources.html
"""
def untag_resource(client, arn, input, options \\ []) do
path_ = "/2017-08-29/tags/#{URI.encode(arn)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Modify one of your existing job templates.
"""
def update_job_template(client, name, input, options \\ []) do
path_ = "/2017-08-29/jobTemplates/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Modify one of your existing presets.
"""
def update_preset(client, name, input, options \\ []) do
path_ = "/2017-08-29/presets/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Modify one of your existing queues.
"""
def update_queue(client, name, input, options \\ []) do
path_ = "/2017-08-29/queues/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "mediaconvert"}
host = build_host("mediaconvert", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
| 30.234763 | 136 | 0.635732 |
f7cbd7e19830531a1d6ffd546b7fa86ec0d13a2a | 4,532 | exs | Elixir | test/mix_generator_test.exs | mbramson/mix_generator | d0283fc43c3f873c370cdcd1939f18014da37528 | [
"Apache-2.0"
] | 123 | 2017-04-10T22:20:47.000Z | 2022-02-28T00:02:25.000Z | test/mix_generator_test.exs | grahamhay/mix_generator | cb5c60a94542fe0688f7fa10c8a2100c4d1cbf49 | [
"Apache-2.0"
] | 9 | 2017-04-20T06:57:47.000Z | 2021-01-21T19:49:08.000Z | test/mix_generator_test.exs | grahamhay/mix_generator | cb5c60a94542fe0688f7fa10c8a2100c4d1cbf49 | [
"Apache-2.0"
] | 17 | 2017-04-28T13:16:59.000Z | 2020-08-29T13:32:39.000Z | Code.require_file "./test_helper.exs", __DIR__
defmodule MixGeneratorTest do
use ExUnit.Case
@template Path.join(__DIR__, "../test_templates/project")
@child_template Path.join(__DIR__, "../test_templates/child")
@project_name "cecil"
@project_name_camel_case "Cecil"
test "basic project can be created" do
in_tmp(%{
setup: fn ->
Mix.Tasks.Gen.run([ @template, @project_name ])
end,
test: fn ->
~w{ .gitignore
README.md
mix.exs
config/config.exs
lib/#{@project_name}.ex
test/#{@project_name}_test.exs
test/test_helper.exs
}
|> Enum.each(&assert_file/1)
assert_file("mix.exs", ~r/@name\s+:#{@project_name}/)
assert_file("lib/#{@project_name}.ex", ~r/defmodule #{@project_name_camel_case}/)
end})
end
test "basic project can be created when name is capitalized" do
in_tmp(%{
setup: fn ->
Mix.Tasks.Gen.run([ @template, String.capitalize(@project_name) ])
end,
test: fn ->
~w{ .gitignore
README.md
mix.exs
config/config.exs
lib/#{@project_name}.ex
test/#{@project_name}_test.exs
test/test_helper.exs
}
|> Enum.each(&assert_file/1)
assert_file("mix.exs", ~r/@name\s+:#{@project_name}/)
assert_file("lib/#{@project_name}.ex", ~r/defmodule #{@project_name_camel_case}/)
end})
end
test "project with --sup can be created" do
in_tmp(%{
setup: fn ->
Mix.Tasks.Gen.run([ @template, @project_name, "--sup" ])
end,
test: fn ->
~w{ .gitignore
README.md
mix.exs
config/config.exs
lib/#{@project_name}.ex
lib/#{@project_name}/application.ex
test/#{@project_name}_test.exs
test/test_helper.exs
}
|> Enum.each(&assert_file/1)
%{
"mix.exs" =>
~r/@name\s+:#{@project_name}/,
"lib/#{@project_name}.ex" =>
~r/defmodule #{@project_name_camel_case}/,
"lib/#{@project_name}/application.ex" =>
~r/defmodule #{@project_name_camel_case}.Application/,
"lib/#{@project_name}/application.ex" =>
~r/#{@project_name_camel_case}.Supervisor/
}
|>
Enum.each(fn {file, content} ->
assert_file(file, content)
end)
end})
end
# the child project is like project, but adds a file lib/child.ex, and removes
# lib/#{project_name}.ex
test "template based on another can be created" do
in_tmp(%{
setup: fn ->
Mix.Tasks.Gen.run([ @child_template, @project_name,
"--name_of_child", "cedric" ])
end,
test: fn ->
~w{ .gitignore
README.md
mix.exs
config/config.exs
lib/child.ex
test/#{@project_name}_test.exs
test/test_helper.exs
}
|> Enum.each(&assert_file/1)
%{
"mix.exs" =>
~r/@name\s+:#{@project_name}/,
"lib/child.ex" =>
~r/Child is called cedric/,
}
|>
Enum.each(fn {file, content} ->
assert_file(file, content)
end)
assert !File.exists?("lib/#{@project_name}.ex")
end})
end
############################################################
# stolen from mix/test/tasks/new
defp assert_file(file) do
assert File.regular?(file), "Expected #{file} to exist, but does not"
end
defp assert_file(file, matcher) when is_function(matcher, 1) do
assert_file(file)
matcher.(File.read!(file))
end
defp assert_file(file, match) do
assert_file file, &(assert &1 =~ match)
end
def in_tmp(%{setup: setup, test: tests}) do
System.tmp_dir!
|> File.cd!(fn ->
File.rm_rf!(@project_name)
setup.()
assert File.dir?(@project_name)
File.cd!(@project_name, fn ->
tests.()
end)
File.rm_rf!(@project_name)
end)
end
end
| 29.428571 | 93 | 0.487864 |
f7cbe7de001cf863215c963a990b9babf6e7f346 | 1,280 | ex | Elixir | lib/hermit/Sink.ex | Tigger2014/hermit | ccfb4f24fa9f4d771f081c787dbbf6266d9a5ecd | [
"MIT"
] | 6 | 2017-07-24T00:02:01.000Z | 2021-11-26T11:49:13.000Z | lib/hermit/Sink.ex | Tigger2014/hermit | ccfb4f24fa9f4d771f081c787dbbf6266d9a5ecd | [
"MIT"
] | 1 | 2018-09-10T16:19:07.000Z | 2018-09-10T16:19:07.000Z | lib/hermit/Sink.ex | Tigger2014/hermit | ccfb4f24fa9f4d771f081c787dbbf6266d9a5ecd | [
"MIT"
] | 1 | 2018-09-10T14:01:19.000Z | 2018-09-10T14:01:19.000Z | defmodule Hermit.Sink do
require Logger
def listen(port) do
{:ok, addr} = Hermit.Config.sink_bind
|> String.to_charlist()
|> :inet_parse.address()
{:ok, socket} = :gen_tcp.listen(port,
[:binary, active: false, reuseaddr: true, ifaddr: addr])
Logger.info "Sink listening on #{:inet.ntoa addr}:#{port}"
listen_loop(socket)
end
defp listen_loop(socket) do
base_url = Hermit.Config.base_url
{:ok, client} = :gen_tcp.accept(socket)
pipe_id = Hermit.Plumber.new_pipe()
:gen_tcp.send(client, "Your pipe is available at #{base_url}/v/#{pipe_id}\n")
Logger.info("pipe opened: #{pipe_id}")
{:ok, pid} = Task.Supervisor.start_child(Hermit.TaskSupervisor, fn ->
serve(client, pipe_id)
end)
:ok = :gen_tcp.controlling_process(client, pid)
listen_loop(socket)
end
defp serve(socket, pipe_id) do
case :gen_tcp.recv(socket, 0) do
{:ok, chunk} ->
case Hermit.Plumber.pipe_input(pipe_id, chunk) do
:ok ->
serve(socket, pipe_id)
:file_too_large ->
:gen_tcp.send(socket, "max pipe size reached")
end
{:error, :closed} ->
Logger.info("pipe closed: #{pipe_id}")
Hermit.Plumber.close_pipe(pipe_id)
end
end
end
| 25.6 | 81 | 0.624219 |
f7cbf377d8a13f10663ff54c37471db2e24d24c6 | 1,242 | exs | Elixir | config/config.exs | agnaldo4j/prevayler-iex | 4ff587f954149a94ff10a22550198ab79e9fcfe0 | [
"BSD-3-Clause"
] | 1 | 2017-04-06T23:16:03.000Z | 2017-04-06T23:16:03.000Z | config/config.exs | agnaldo4j/prevayler-iex | 4ff587f954149a94ff10a22550198ab79e9fcfe0 | [
"BSD-3-Clause"
] | null | null | null | config/config.exs | agnaldo4j/prevayler-iex | 4ff587f954149a94ff10a22550198ab79e9fcfe0 | [
"BSD-3-Clause"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :prevayler_iex, Prevalent.Journaling,
snapshot_path: "db/snapshot",
commands_path: "db/commands"
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :prevalence, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:prevalence, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 35.485714 | 73 | 0.747987 |
f7cc041ed34e7884d9888f0fe32d19d6854c15b4 | 287 | ex | Elixir | apps/authenticator/lib/sessions/cache.ex | dcdourado/watcher_ex | ce80df81610a6e9b77612911aac2a6d6cf4de8d5 | [
"Apache-2.0"
] | 9 | 2020-10-13T14:11:37.000Z | 2021-08-12T18:40:08.000Z | apps/authenticator/lib/sessions/cache.ex | dcdourado/watcher_ex | ce80df81610a6e9b77612911aac2a6d6cf4de8d5 | [
"Apache-2.0"
] | 28 | 2020-10-04T14:43:48.000Z | 2021-12-07T16:54:22.000Z | apps/authenticator/lib/sessions/cache.ex | dcdourado/watcher_ex | ce80df81610a6e9b77612911aac2a6d6cf4de8d5 | [
"Apache-2.0"
] | 3 | 2020-11-25T20:59:47.000Z | 2021-08-30T10:36:58.000Z | defmodule Authenticator.Sessions.Cache do
@moduledoc """
Sessions generic cache.
This is important to avoid be going on database in any request and
to be faster in authentication requests.
"""
use Nebulex.Cache, otp_app: :authenticator, adapter: Nebulex.Adapters.Local
end
| 26.090909 | 77 | 0.763066 |
f7cc25aa2d55623d936fa4817d18a99b9b7be26d | 226 | ex | Elixir | test/support/project_case.ex | StabbyMcDuck/elixir_ravelry | c503fc019842bb82ad58673a1b6867e3214fc7e1 | [
"Apache-2.0"
] | 21 | 2017-08-27T13:59:18.000Z | 2020-10-07T03:35:22.000Z | test/support/project_case.ex | StabbyMcDuck/elixir_ravelry | c503fc019842bb82ad58673a1b6867e3214fc7e1 | [
"Apache-2.0"
] | 2 | 2017-09-02T15:49:47.000Z | 2017-09-02T17:43:29.000Z | test/support/project_case.ex | StabbyMcDuck/elixir_ravelry | c503fc019842bb82ad58673a1b6867e3214fc7e1 | [
"Apache-2.0"
] | 7 | 2017-09-02T15:11:49.000Z | 2019-08-20T19:00:48.000Z | defmodule ElixirRavelry.ProjectCase do
alias ElixirRavelry.Repo
alias ElixirRavelryWeb.Project
def create_project(bolt_sips_conn) do
Repo.Project.create(bolt_sips_conn, %Project{name: Faker.Name.name()})
end
end | 22.6 | 74 | 0.792035 |
f7cc35cd93c905613ddc97656d12c7dcc3ce24b8 | 1,803 | exs | Elixir | apps/webapp/spec/models/user_spec.exs | iporaitech/phoenix-webpack-react-docker | c454db0b851b9d00db868a64b96e567d4a0cc3d9 | [
"MIT"
] | 25 | 2016-08-09T15:04:37.000Z | 2021-11-15T12:20:27.000Z | apps/webapp/spec/models/user_spec.exs | iporaitech/phoenix-webpack-react-docker | c454db0b851b9d00db868a64b96e567d4a0cc3d9 | [
"MIT"
] | 62 | 2016-05-23T20:16:40.000Z | 2017-04-18T18:36:29.000Z | apps/webapp/spec/models/user_spec.exs | iporaitech/phoenix-webpack-react-docker | c454db0b851b9d00db868a64b96e567d4a0cc3d9 | [
"MIT"
] | 10 | 2016-08-17T15:29:21.000Z | 2017-02-28T07:58:30.000Z | defmodule Webapp.UserSpec do
use ESpec.Phoenix, model: User
alias Webapp.User
@valid_params %{
first_name: "J",
last_name: "R",
email: "[email protected]",
role: "admin",
password: "12341234"
}
describe "changeset/2" do
let :min, do: Keyword.fetch!(User.valid_name_length, :min)
let :max, do: Keyword.fetch!(User.valid_name_length, :max)
# Apparently is not possible to loop through a list to create examples (it).
# For example for f <- ~w(first_name last_name), do it "validates length of #{f}" end
# ** (FunctionClauseError) no function clause matching in ESpec.ExampleHelpers.it/1
#
it "validates length of first_name and last_name" do
expect User.changeset(%User{}, @valid_params).valid?
|> to(be_true)
for f <- [:first_name, :last_name] do
too_short = %{@valid_params | f => String.slice(@valid_params.first_name, 0, min-1)}
too_long = %{@valid_params | f => String.duplicate("a", max+1)}
for params <- [too_short, too_long] do
expect User.changeset(%User{}, params).valid?
|> to(be_false)
end
end
end
end # END changeset/2
describe "registration_changeset/2" do
it "calls changeset" do
allow User |> to(accept :changeset)
User.registration_changeset(%User{}, @valid_params)
expect User |> to(accepted :changeset, [%User{}, @valid_params])
end
it "validates inclusion of role in User.valid_roles" do
for r <- ["", "anything"] do
expect User.registration_changeset(%User{}, %{@valid_params | role: r}).valid?
|> to(be_false)
end
end
it "validates format and uniqueness of email"
it "validates length of password"
it "encrypts password hash"
end # END registration_changeset/2
end
| 31.086207 | 92 | 0.642263 |
f7cc6be0ce178de51961a41dbbd6c4ff05d02438 | 2,587 | ex | Elixir | clients/content/lib/google_api/content/v2/model/datafeedstatuses_custom_batch_request_entry.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/datafeedstatuses_custom_batch_request_entry.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/content/lib/google_api/content/v2/model/datafeedstatuses_custom_batch_request_entry.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.DatafeedstatusesCustomBatchRequestEntry do
@moduledoc """
A batch entry encoding a single non-batch datafeedstatuses request.
## Attributes
* `batchId` (*type:* `integer()`, *default:* `nil`) - An entry ID, unique within the batch request.
* `country` (*type:* `String.t`, *default:* `nil`) - The country for which to get the datafeed status. If this parameter is provided then language must also be provided. Note that for multi-target datafeeds this parameter is required.
* `datafeedId` (*type:* `String.t`, *default:* `nil`) - The ID of the data feed to get.
* `language` (*type:* `String.t`, *default:* `nil`) - The language for which to get the datafeed status. If this parameter is provided then country must also be provided. Note that for multi-target datafeeds this parameter is required.
* `merchantId` (*type:* `String.t`, *default:* `nil`) - The ID of the managing account.
* `method` (*type:* `String.t`, *default:* `nil`) - The method of the batch entry. Acceptable values are: - "`get`"
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:batchId => integer(),
:country => String.t(),
:datafeedId => String.t(),
:language => String.t(),
:merchantId => String.t(),
:method => String.t()
}
field(:batchId)
field(:country)
field(:datafeedId)
field(:language)
field(:merchantId)
field(:method)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.DatafeedstatusesCustomBatchRequestEntry do
def decode(value, options) do
GoogleApi.Content.V2.Model.DatafeedstatusesCustomBatchRequestEntry.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.DatafeedstatusesCustomBatchRequestEntry do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.725806 | 239 | 0.708543 |
f7cc87d1ec7968394f5f71b3709636368c53be6d | 1,133 | exs | Elixir | apps/feedback/mix.exs | mbta/crispy-spoon | 7ef28a1a6adc73899b007e334b9220f7a48a60fa | [
"MIT"
] | null | null | null | apps/feedback/mix.exs | mbta/crispy-spoon | 7ef28a1a6adc73899b007e334b9220f7a48a60fa | [
"MIT"
] | null | null | null | apps/feedback/mix.exs | mbta/crispy-spoon | 7ef28a1a6adc73899b007e334b9220f7a48a60fa | [
"MIT"
] | null | null | null | defmodule Feedback.Mixfile do
use Mix.Project
def project do
[
app: :feedback,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
test_coverage: [tool: ExCoveralls],
deps: deps()
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[extra_applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:poison, "~> 2.2", override: true},
{:timex, ">= 2.0.0"},
{:briefly, "~> 0.3"},
{:plug, "~> 1.12"},
{:ex_aws, "~> 2.1.2"},
{:ex_aws_ses, "~> 2.1.1"},
{:mail, "~> 0.2"},
{:exvcr_helpers, in_umbrella: true, only: :test}
]
end
end
| 23.604167 | 77 | 0.540159 |
f7ccd9f49840d7eaa2c47ee5cb46e15ffc1c41b8 | 2,456 | ex | Elixir | lib/analyzer.ex | scdehmlow/dealer-reviews | cc106e90e4c460c0484991078e2b966b1b367889 | [
"MIT"
] | null | null | null | lib/analyzer.ex | scdehmlow/dealer-reviews | cc106e90e4c460c0484991078e2b966b1b367889 | [
"MIT"
] | null | null | null | lib/analyzer.ex | scdehmlow/dealer-reviews | cc106e90e4c460c0484991078e2b966b1b367889 | [
"MIT"
] | null | null | null | defmodule DealerReviews.Analyzer do
@moduledoc """
Contains functions to analyze the contents of a review and
score different properties for sorting.
"""
@doc """
Average ratings when four or more are provided.
"""
def score_ratings(%DealerReviews.Review{ratings: ratings}) do
score_ratings(ratings)
end
def score_ratings(ratings = %DealerReviews.Review.Ratings{}) do
%DealerReviews.Review.Ratings{
customer_service: customer_service,
friendliness: friendliness,
overall: overall,
pricing: pricing,
quality: quality,
recommend: recommend
} = ratings
# convert the recommend status to a numerical value
recommend_value =
case recommend do
# highest rating is a 5
true -> 5
# lowest rating is a 1
false -> 1
end
# ignore missing values
rating_values =
[customer_service, friendliness, overall, pricing, quality, recommend_value]
|> Enum.filter(fn r -> r != nil end)
rating_values_count = Enum.count(rating_values)
case rating_values do
v when rating_values_count > 3 -> Enum.sum(v) / rating_values_count
# three or less ratings returns a score of 1
_ -> 1
end
end
@doc """
Ratings of employees combined with the total number of employees listed which is weighted at 2x.
"""
def score_employees(%DealerReviews.Review{employees: employees}) do
score_employees(employees)
end
def score_employees(employees) do
count_weight = 2
count = Enum.count(employees)
count_value =
case count do
# max score of 5, greater doesn't matter
c when c >= 5 -> 5
# lowest score of 1 when no employees
0 -> 1
c -> c
end
employees_rated = employees |> Enum.filter(fn e -> e.rating != nil end)
employee_ratings_total =
employees_rated
|> Enum.map(fn e -> e.rating end)
|> Enum.sum()
(employee_ratings_total + count_value * count_weight) /
(Enum.count(employees_rated) + count_weight)
end
@doc """
Number of `!` characters in the review body.
"""
def score_body(%DealerReviews.Review{body: body}) do
score_body(body)
end
def score_body(body) do
perfect = 10
exclaimations =
body
|> String.graphemes()
|> Enum.filter(fn b -> b == "!" end)
|> Enum.count()
# convert to a 1-5 scale
exclaimations / perfect * 4 + 1
end
end
| 25.061224 | 98 | 0.64373 |
f7cd03219412ae03607b5a49f4968144071283f0 | 906 | ex | Elixir | lib/obelisk/post.ex | FreedomBen/obelisk | f8f5ca8d73f619f26213c3b2442127c25dec45a2 | [
"MIT"
] | 406 | 2015-01-01T14:59:37.000Z | 2022-02-19T08:08:47.000Z | lib/obelisk/post.ex | FreedomBen/obelisk | f8f5ca8d73f619f26213c3b2442127c25dec45a2 | [
"MIT"
] | 38 | 2015-01-19T11:58:30.000Z | 2019-01-18T14:06:24.000Z | lib/obelisk/post.ex | FreedomBen/obelisk | f8f5ca8d73f619f26213c3b2442127c25dec45a2 | [
"MIT"
] | 64 | 2015-01-19T09:59:55.000Z | 2021-02-06T01:14:59.000Z | defmodule Obelisk.Post do
def compile(md_file) do
spawn_link fn ->
Obelisk.Document.compile "./posts/#{md_file}", Obelisk.Layout.post
end
end
def prepare(md_file, store) do
layouts = Obelisk.Store.get_layouts(store)
Obelisk.Store.add_posts(store, [ Obelisk.Document.prepare("./posts/#{md_file}", layouts.post) ])
end
def title(md) do
String.capitalize(String.replace(String.replace(String.slice(md, 11, 1000), "-", " "), ".markdown", ""))
end
def list do
File.ls!("./posts")
|> Enum.sort
|> Enum.reverse
end
def create(title) do
File.write(filename_from_title(title), Obelisk.Templates.post(title))
end
def filename_from_title(title) do
datepart = Chronos.today |> Chronos.Formatter.strftime("%Y-%0m-%0d")
titlepart = String.downcase(title) |> String.replace(" ", "-")
"./posts/#{datepart}-#{titlepart}.markdown"
end
end
| 25.885714 | 108 | 0.663355 |
f7cd09c392dcbab1aa7f2bc2327ab163f23c3371 | 1,206 | exs | Elixir | config/test.exs | exshome/exshome | ef6b7a89f11dcd2016856dd49517b74aeebb6513 | [
"MIT"
] | 2 | 2021-12-21T16:32:56.000Z | 2022-02-22T17:06:39.000Z | config/test.exs | exshome/exshome | ef6b7a89f11dcd2016856dd49517b74aeebb6513 | [
"MIT"
] | null | null | null | config/test.exs | exshome/exshome | ef6b7a89f11dcd2016856dd49517b74aeebb6513 | [
"MIT"
] | null | null | null | import Config
# Configure your database
config :exshome, Exshome.Repo,
pool_size: 5,
show_sensitive_data_on_connection_error: true,
database_name: "exshome_test.db"
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :exshome, ExshomeWeb.Endpoint,
http: [ip: {127, 0, 0, 1}, port: 4002],
secret_key_base: "McB+GRVaMaeC1KOJLkJLw7XbjJCuCJn9lwIWVdh3orgUOdLMhtohdySqbKg2ddi3",
server: false
config :exshome, :environment, :test
config :exshome, :app_page_hook_module, ExshomeTest.Hooks.AppPage
config :exshome, :dependency_hook_module, ExshomeTest.Hooks.Dependency
config :exshome, :file_utils_hook_module, ExshomeTest.Hooks.FileUtils
config :exshome, :mpv_server_hook_module, ExshomeTest.Hooks.MpvServer
config :exshome, :pub_sub_hook_module, ExshomeTest.Hooks.PubSub
config :exshome, :repo_hook_module, ExshomeTest.Hooks.Repo
config :exshome, :live_view_hooks, [ExshomeTest.Hooks.LiveView]
config :exshome, :application_children, [
ExshomeTest.TestRegistry
]
# Print only warnings and errors during test
config :logger, level: :warn
# Initialize plugs at runtime for faster test compilation
config :phoenix, :plug_init_mode, :runtime
| 35.470588 | 86 | 0.800166 |
f7cd14b494ef6ea204a8c8916d8bee471579abe5 | 313 | exs | Elixir | integration_test/test_helper.exs | MishaConway/cafex | 32965b7e099bc45de24c229cc76f7b83b35ff7b4 | [
"Apache-2.0"
] | null | null | null | integration_test/test_helper.exs | MishaConway/cafex | 32965b7e099bc45de24c229cc76f7b83b35ff7b4 | [
"Apache-2.0"
] | null | null | null | integration_test/test_helper.exs | MishaConway/cafex | 32965b7e099bc45de24c229cc76f7b83b35ff7b4 | [
"Apache-2.0"
] | null | null | null | Application.start :erlzk
Code.require_file("support/zk_helper.exs", __DIR__)
Logger.remove_backend(:console)
zk_cfg = Application.get_env(:cafex, :zookeeper)
zk_prefix = Keyword.get(zk_cfg, :chroot)
{:ok, pid} = ZKHelper.connect(zk_cfg)
:ok = ZKHelper.rmr(pid, zk_prefix)
# ZKHelper.close(pid)
ExUnit.start()
| 24.076923 | 51 | 0.757188 |
f7cd5f5554c2c51c727cbf2ce23a5894d835ccbd | 877 | ex | Elixir | debian/watch.ex | MoonLightDE/CppMicroServices | cf471b30539d7e1a138d6308b2b249fe19df2302 | [
"Apache-2.0"
] | 1 | 2021-06-27T05:11:08.000Z | 2021-06-27T05:11:08.000Z | debian/watch.ex | MoonLightDE/mlde.l.cppmicroservices | cf471b30539d7e1a138d6308b2b249fe19df2302 | [
"Apache-2.0"
] | 3 | 2017-08-20T22:10:51.000Z | 2017-09-04T12:48:40.000Z | debian/watch.ex | MoonLightDE/mlde.l.cppmicroservices | cf471b30539d7e1a138d6308b2b249fe19df2302 | [
"Apache-2.0"
] | 1 | 2017-08-19T06:41:58.000Z | 2017-08-19T06:41:58.000Z | # Example watch control file for uscan
# Rename this file to "watch" and then you can run the "uscan" command
# to check for upstream updates and more.
# See uscan(1) for format
# Compulsory line, this is a version 3 file
version=3
# Uncomment to examine a Webpage
# <Webpage URL> <string match>
#http://www.example.com/downloads.php mlde.l.cppmicroservices-(.*)\.tar\.gz
# Uncomment to examine a Webserver directory
#http://www.example.com/pub/mlde.l.cppmicroservices-(.*)\.tar\.gz
# Uncommment to examine a FTP server
#ftp://ftp.example.com/pub/mlde.l.cppmicroservices-(.*)\.tar\.gz debian uupdate
# Uncomment to find new files on sourceforge, for devscripts >= 2.9
# http://sf.net/mlde.l.cppmicroservices/mlde.l.cppmicroservices-(.*)\.tar\.gz
# Uncomment to find new files on GooglePages
# http://example.googlepages.com/foo.html mlde.l.cppmicroservices-(.*)\.tar\.gz
| 36.541667 | 79 | 0.737742 |
f7cda2f2af2deed48d01d1277a0b5578f1fb4137 | 1,031 | ex | Elixir | lib/jaya_currency_converter/accounts.ex | franknfjr/jaya_currency_converter | 56dfcf40b2ed2c9307fa39d7a5d1121cf4a1a37e | [
"MIT"
] | null | null | null | lib/jaya_currency_converter/accounts.ex | franknfjr/jaya_currency_converter | 56dfcf40b2ed2c9307fa39d7a5d1121cf4a1a37e | [
"MIT"
] | null | null | null | lib/jaya_currency_converter/accounts.ex | franknfjr/jaya_currency_converter | 56dfcf40b2ed2c9307fa39d7a5d1121cf4a1a37e | [
"MIT"
] | null | null | null | defmodule JayaCurrencyConverter.Accounts do
@moduledoc """
The Accounts context.
"""
import Ecto.Query, warn: false
alias JayaCurrencyConverter.Repo
alias JayaCurrencyConverter.Accounts.User
@doc """
Returns the list of users.
## Examples
iex> list_users()
[%User{}, ...]
"""
def list_users do
Repo.all(User)
end
@doc """
Gets a single user.
Raises `Ecto.NoResultsError` if the User does not exist.
## Examples
iex> get_user!(123)
%User{}
iex> get_user!(456)
** (Ecto.NoResultsError)
"""
def get_user!(id) do
User
|> Repo.get_by(id: id)
|> case do
nil -> {:error, "user is not found"}
user -> {:ok, user}
end
end
@doc """
Creates a user.
## Examples
iex> create_user(%{field: value})
{:ok, %User{}}
iex> create_user(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_user(attrs \\ %{}) do
%User{}
|> User.changeset(attrs)
|> Repo.insert()
end
end
| 15.861538 | 58 | 0.57517 |
f7cdc15417231d0bc55dfc0abb373fa3f399a4b6 | 1,693 | exs | Elixir | exercises/practice/pythagorean-triplet/test/triplet_test.exs | SaschaMann/elixir | 2489747bba72a0ba5efa27e7e00441a428fdf987 | [
"MIT"
] | null | null | null | exercises/practice/pythagorean-triplet/test/triplet_test.exs | SaschaMann/elixir | 2489747bba72a0ba5efa27e7e00441a428fdf987 | [
"MIT"
] | null | null | null | exercises/practice/pythagorean-triplet/test/triplet_test.exs | SaschaMann/elixir | 2489747bba72a0ba5efa27e7e00441a428fdf987 | [
"MIT"
] | null | null | null | defmodule TripletTest do
use ExUnit.Case
# @tag :pending
test "sum" do
triplet = [3, 4, 5]
assert Triplet.sum(triplet) == 12
end
@tag :pending
test "product" do
triplet = [3, 4, 5]
assert Triplet.product(triplet) == 60
end
@tag :pending
test "pythagorean" do
triplet = [3, 4, 5]
assert Triplet.pythagorean?(triplet)
end
@tag :pending
test "not pythagorean" do
triplet = [5, 6, 7]
refute Triplet.pythagorean?(triplet)
end
@tag :pending
test "triplets whose sum is 12" do
assert Triplet.generate(12) == [[3, 4, 5]]
end
@tag :pending
test "triplets whose sum is 108" do
assert Triplet.generate(108) == [[27, 36, 45]]
end
@tag :pending
test "triplets whose sum is 1000" do
assert Triplet.generate(1000) == [[200, 375, 425]]
end
@tag :pending
test "no matching triplets for 1001" do
assert Triplet.generate(1001) == []
end
@tag :pending
test "returns all matching triplets" do
assert Triplet.generate(90) == [[9, 40, 41], [15, 36, 39]]
end
@tag :pending
test "several matching triplets" do
assert Triplet.generate(840) == [
[40, 399, 401],
[56, 390, 394],
[105, 360, 375],
[120, 350, 370],
[140, 336, 364],
[168, 315, 357],
[210, 280, 350],
[240, 252, 348]
]
end
@tag :pending
test "triplets for large number" do
assert Triplet.generate(30000) == [
[1200, 14375, 14425],
[1875, 14000, 14125],
[5000, 12000, 13000],
[6000, 11250, 12750],
[7500, 10000, 12500]
]
end
end
| 21.705128 | 62 | 0.552865 |
f7ce0ee021bc34849a1c60ba785ab9befeb39e72 | 4,664 | ex | Elixir | debian/manpage.sgml.ex | tarkhov/deb-release | 8421a676ec542cc5421b39c3ac353f2fb823789a | [
"MIT"
] | null | null | null | debian/manpage.sgml.ex | tarkhov/deb-release | 8421a676ec542cc5421b39c3ac353f2fb823789a | [
"MIT"
] | 3 | 2019-06-04T19:43:09.000Z | 2020-11-20T11:55:32.000Z | debian/manpage.sgml.ex | tarkhov/deb-release | 8421a676ec542cc5421b39c3ac353f2fb823789a | [
"MIT"
] | null | null | null | <!doctype refentry PUBLIC "-//OASIS//DTD DocBook V4.1//EN" [
<!-- Process this file with docbook-to-man to generate an nroff manual
page: `docbook-to-man manpage.sgml > manpage.1'. You may view
the manual page with: `docbook-to-man manpage.sgml | nroff -man |
less'. A typical entry in a Makefile or Makefile.am is:
manpage.1: manpage.sgml
docbook-to-man $< > $@
The docbook-to-man binary is found in the docbook-to-man package.
Please remember that if you create the nroff version in one of the
debian/rules file targets (such as build), you will need to include
docbook-to-man in your Build-Depends control field.
-->
<!-- Fill in your name for FIRSTNAME and SURNAME. -->
<!ENTITY dhfirstname "<firstname>FIRSTNAME</firstname>">
<!ENTITY dhsurname "<surname>SURNAME</surname>">
<!-- Please adjust the date whenever revising the manpage. -->
<!ENTITY dhdate "<date>August 12 2018</date>">
<!-- SECTION should be 1-8, maybe w/ subsection other parameters are
allowed: see man(7), man(1). -->
<!ENTITY dhsection "<manvolnum>SECTION</manvolnum>">
<!ENTITY dhemail "<email>[email protected]</email>">
<!ENTITY dhusername "Alexander Tarkhov">
<!ENTITY dhucpackage "<refentrytitle>Deb-release</refentrytitle>">
<!ENTITY dhpackage "deb-release">
<!ENTITY debian "<productname>Debian</productname>">
<!ENTITY gnu "<acronym>GNU</acronym>">
<!ENTITY gpl "&gnu; <acronym>GPL</acronym>">
]>
<refentry>
<refentryinfo>
<address>
&dhemail;
</address>
<author>
&dhfirstname;
&dhsurname;
</author>
<copyright>
<year>2003</year>
<holder>&dhusername;</holder>
</copyright>
&dhdate;
</refentryinfo>
<refmeta>
&dhucpackage;
&dhsection;
</refmeta>
<refnamediv>
<refname>&dhpackage;</refname>
<refpurpose>program to do something</refpurpose>
</refnamediv>
<refsynopsisdiv>
<cmdsynopsis>
<command>&dhpackage;</command>
<arg><option>-e <replaceable>this</replaceable></option></arg>
<arg><option>--example <replaceable>that</replaceable></option></arg>
</cmdsynopsis>
</refsynopsisdiv>
<refsect1>
<title>DESCRIPTION</title>
<para>This manual page documents briefly the
<command>&dhpackage;</command> and <command>bar</command>
commands.</para>
<para>This manual page was written for the &debian; distribution
because the original program does not have a manual page.
Instead, it has documentation in the &gnu;
<application>Info</application> format; see below.</para>
<para><command>&dhpackage;</command> is a program that...</para>
</refsect1>
<refsect1>
<title>OPTIONS</title>
<para>These programs follow the usual &gnu; command line syntax,
with long options starting with two dashes (`-'). A summary of
options is included below. For a complete description, see the
<application>Info</application> files.</para>
<variablelist>
<varlistentry>
<term><option>-h</option>
<option>--help</option>
</term>
<listitem>
<para>Show summary of options.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option>-v</option>
<option>--version</option>
</term>
<listitem>
<para>Show version of program.</para>
</listitem>
</varlistentry>
</variablelist>
</refsect1>
<refsect1>
<title>SEE ALSO</title>
<para>bar (1), baz (1).</para>
<para>The programs are documented fully by <citetitle>The Rise and
Fall of a Fooish Bar</citetitle> available via the
<application>Info</application> system.</para>
</refsect1>
<refsect1>
<title>AUTHOR</title>
<para>This manual page was written by &dhusername; &dhemail; for
the &debian; system (and may be used by others). Permission is
granted to copy, distribute and/or modify this document under
the terms of the &gnu; General Public License, Version 2 any
later version published by the Free Software Foundation.
</para>
<para>
On Debian systems, the complete text of the GNU General Public
License can be found in /usr/share/common-licenses/GPL.
</para>
</refsect1>
</refentry>
<!-- Keep this comment at the end of the file
Local variables:
mode: sgml
sgml-omittag:t
sgml-shorttag:t
sgml-minimize-attributes:nil
sgml-always-quote-attributes:t
sgml-indent-step:2
sgml-indent-data:t
sgml-parent-document:nil
sgml-default-dtd-file:nil
sgml-exposed-tags:nil
sgml-local-catalogs:nil
sgml-local-ecat-files:nil
End:
-->
| 30.090323 | 75 | 0.658448 |
f7ce130ab9b79f5a896ea9b2eafe64811caccc1b | 1,613 | exs | Elixir | elixir/phone-number/phone_number.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | elixir/phone-number/phone_number.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | elixir/phone-number/phone_number.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | defmodule Phone do
@doc """
Remove formatting from a phone number.
Returns "0000000000" if phone number is not valid
(10 digits or "1" followed by 10 digits)
## Examples
iex> Phone.number("123-456-7890")
"1234567890"
iex> Phone.number("+1 (303) 555-1212")
"3035551212"
iex> Phone.number("867.5309")
"0000000000"
"""
@spec number(String.t) :: String.t
def number(raw_number) do
number = Regex.replace(~r/\W/, raw_number, "")
cond do
String.length(number) == 10 -> number
String.length(number) == 11 and String.starts_with?(number, "1") ->
String.slice(number, 1..-1)
true -> "0000000000"
end
end
@doc """
Extract the area code from a phone number
Returns the first three digits from a phone number,
ignoring long distance indicator
## Examples
iex> Phone.area_code("123-456-7890")
"123"
iex> Phone.area_code("+1 (303) 555-1212")
"303"
iex> Phone.area_code("867.5309")
"000"
"""
@spec area_code(String.t) :: String.t
def area_code(raw_number) do
raw_number
|> number
|> String.slice(0..2)
end
@doc """
Pretty print a phone number
Wraps the area code in parentheses and separates
exchange and subscriber number with a dash.
## Examples
iex> Phone.pretty("123-456-7890")
"(123) 456-7890"
iex> Phone.pretty("+1 (303) 555-1212")
"(303) 555-1212"
iex> Phone.pretty("867.5309")
"(000) 000-0000"
"""
@spec pretty(String.t) :: String.t
def pretty(raw_number) do
raw_number
|> number
|> String.replace(~r/(\d{3})(\d{3})(\d{4})/, "(\\1) \\2-\\3")
end
end
| 20.417722 | 73 | 0.625542 |
f7ce1da36b178a5d21ca0a9b5ce4d8a00adbde94 | 3,000 | ex | Elixir | lib/livebook/runtime/attached.ex | howard0su/livebook | 6b7825871338af0ec3f4196ec3e17d2670e6a92c | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime/attached.ex | howard0su/livebook | 6b7825871338af0ec3f4196ec3e17d2670e6a92c | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime/attached.ex | howard0su/livebook | 6b7825871338af0ec3f4196ec3e17d2670e6a92c | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Runtime.Attached do
@moduledoc false
# A runtime backed by an Elixir node managed externally.
#
# Such node must be already started and available,
# Livebook doesn't manage its lifetime in any way
# and only loads/unloads the necessary elements.
# The node can be an ordinary Elixir runtime,
# a Mix project shell, a running release or anything else.
defstruct [:node, :cookie, :server_pid]
@type t :: %__MODULE__{
node: node(),
cookie: atom(),
server_pid: pid()
}
@doc """
Checks if the given node is available for use and initializes
it with Livebook-specific modules and processes.
"""
@spec init(node(), atom()) :: {:ok, t()} | {:error, :unreachable}
def init(node, cookie \\ Node.get_cookie()) do
# Set cookie for connecting to this specific node
Node.set_cookie(node, cookie)
case Node.ping(node) do
:pong ->
opts = [parent_node: node()]
server_pid = Livebook.Runtime.ErlDist.initialize(node, opts)
{:ok, %__MODULE__{node: node, cookie: cookie, server_pid: server_pid}}
:pang ->
{:error, :unreachable}
end
end
end
defimpl Livebook.Runtime, for: Livebook.Runtime.Attached do
alias Livebook.Runtime.ErlDist
def connect(runtime, opts \\ []) do
ErlDist.RuntimeServer.attach(runtime.server_pid, self(), opts)
Process.monitor(runtime.server_pid)
end
def disconnect(runtime) do
ErlDist.RuntimeServer.stop(runtime.server_pid)
end
def evaluate_code(runtime, code, locator, base_locator, opts \\ []) do
ErlDist.RuntimeServer.evaluate_code(runtime.server_pid, code, locator, base_locator, opts)
end
def forget_evaluation(runtime, locator) do
ErlDist.RuntimeServer.forget_evaluation(runtime.server_pid, locator)
end
def drop_container(runtime, container_ref) do
ErlDist.RuntimeServer.drop_container(runtime.server_pid, container_ref)
end
def handle_intellisense(runtime, send_to, ref, request, base_locator) do
ErlDist.RuntimeServer.handle_intellisense(
runtime.server_pid,
send_to,
ref,
request,
base_locator
)
end
def duplicate(runtime) do
case Livebook.Runtime.Attached.init(runtime.node, runtime.cookie) do
{:ok, runtime} -> {:ok, runtime}
{:error, :unreachable} -> {:error, "node #{inspect(runtime.node)} is unreachable"}
end
end
def standalone?(_runtime), do: false
def read_file(runtime, path) do
ErlDist.RuntimeServer.read_file(runtime.server_pid, path)
end
def start_smart_cell(runtime, kind, ref, attrs, base_locator) do
ErlDist.RuntimeServer.start_smart_cell(runtime.server_pid, kind, ref, attrs, base_locator)
end
def set_smart_cell_base_locator(runtime, ref, base_locator) do
ErlDist.RuntimeServer.set_smart_cell_base_locator(runtime.server_pid, ref, base_locator)
end
def stop_smart_cell(runtime, ref) do
ErlDist.RuntimeServer.stop_smart_cell(runtime.server_pid, ref)
end
end
| 30 | 94 | 0.708333 |
f7ce2a55ae419c59217a7aca18503cc850466da1 | 83 | ex | Elixir | lib/matchalert/web/views/player_subscription_view.ex | troelsim/matchalert | 9379796b3046e0ce28674432568f9cb5b01aab6f | [
"Apache-2.0"
] | null | null | null | lib/matchalert/web/views/player_subscription_view.ex | troelsim/matchalert | 9379796b3046e0ce28674432568f9cb5b01aab6f | [
"Apache-2.0"
] | null | null | null | lib/matchalert/web/views/player_subscription_view.ex | troelsim/matchalert | 9379796b3046e0ce28674432568f9cb5b01aab6f | [
"Apache-2.0"
] | null | null | null | defmodule Matchalert.Web.PlayerSubscriptionView do
use Matchalert.Web, :view
end
| 20.75 | 50 | 0.831325 |
f7ce933ac5cd0df2c69614f2b5f70de449455311 | 443 | ex | Elixir | lib/key_value_store.ex | tegon/elixir-in-action | 1bcb8fb0e1d32593a505dc634977c0ea3d4a56ba | [
"MIT"
] | 1 | 2017-09-07T14:36:47.000Z | 2017-09-07T14:36:47.000Z | lib/key_value_store.ex | tegon/elixir-in-action | 1bcb8fb0e1d32593a505dc634977c0ea3d4a56ba | [
"MIT"
] | null | null | null | lib/key_value_store.ex | tegon/elixir-in-action | 1bcb8fb0e1d32593a505dc634977c0ea3d4a56ba | [
"MIT"
] | null | null | null | defmodule KeyValueStore do
def init do
%{}
end
def handle_cast({:put, key, value}, state) do
Map.put(state, key, value)
end
def handle_call({:get, key}, state) do
{Map.get(state, key), state}
end
def start do
ServerProcess.start(KeyValueStore)
end
def put(pid, key, value) do
ServerProcess.cast(pid, {:put, key, value})
end
def get(pid, key) do
ServerProcess.call(pid, {:get, key})
end
end
| 17.038462 | 47 | 0.636569 |
f7cec22448f5154af86abf2a55736379e61ac1cd | 1,575 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_protobuf_empty.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_protobuf_empty.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_protobuf_empty.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-03-04T13:43:47.000Z | 2021-03-04T13:43:47.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleProtobufEmpty do
@moduledoc """
A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The JSON representation for `Empty` is empty JSON object `{}`.
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.Dialogflow.V2.Model.GoogleProtobufEmpty do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleProtobufEmpty.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dialogflow.V2.Model.GoogleProtobufEmpty do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.5 | 345 | 0.765079 |
f7cecd07ab761f5693b6120eb6b1a832c32f35ef | 1,235 | exs | Elixir | farmbot_core/test/bot_state_test.exs | elbow-jason/farmbot_os | f5dfc8f58a309285ca3d441b1b7272f15315b2a9 | [
"MIT"
] | 1 | 2019-08-06T11:51:48.000Z | 2019-08-06T11:51:48.000Z | farmbot_core/test/bot_state_test.exs | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | null | null | null | farmbot_core/test/bot_state_test.exs | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | null | null | null | defmodule FarmbotCore.BotStateTest do
use ExUnit.Case
alias FarmbotCore.BotState
describe "bot state pub/sub" do
test "subscribes to bot state updates" do
{:ok, bot_state_pid} = BotState.start_link([], [])
_initial_state = BotState.subscribe(bot_state_pid)
:ok = BotState.set_user_env(bot_state_pid, "some_key", "some_val")
assert_receive {BotState, %Ecto.Changeset{valid?: true}}
end
test "invalid data doesn't get dispatched" do
{:ok, bot_state_pid} = BotState.start_link([], [])
_initial_state = BotState.subscribe(bot_state_pid)
result = BotState.report_disk_usage(bot_state_pid, "this is invalid")
assert match?({:error, %Ecto.Changeset{valid?: false}}, result)
refute_receive {BotState, %Ecto.Changeset{valid?: true}}
end
end
describe "pins" do
test "sets pin data" do
{:ok, bot_state_pid} = BotState.start_link([], [])
:ok = BotState.set_pin_value(bot_state_pid, 9, 1)
:ok = BotState.set_pin_value(bot_state_pid, 10, 1)
:ok = BotState.set_pin_value(bot_state_pid, 11, 0)
assert %{pins: %{9 => %{value: 1}, 10 => %{value: 1}, 11 => %{value: 0}}} =
BotState.fetch(bot_state_pid)
end
end
end
| 36.323529 | 81 | 0.660729 |
f7ced50df775dcdc235bd04be33393cf17daba33 | 3,380 | exs | Elixir | test/sled_test.exs | skunkwerks/sled | 2a84b30a15719ec20002771afeee978a504966f1 | [
"Apache-2.0",
"MIT"
] | 11 | 2020-06-08T20:58:33.000Z | 2022-01-12T03:41:29.000Z | test/sled_test.exs | skunkwerks/sled | 2a84b30a15719ec20002771afeee978a504966f1 | [
"Apache-2.0",
"MIT"
] | 1 | 2020-06-08T21:37:00.000Z | 2020-07-06T20:55:31.000Z | test/sled_test.exs | skunkwerks/sled | 2a84b30a15719ec20002771afeee978a504966f1 | [
"Apache-2.0",
"MIT"
] | 3 | 2020-06-08T21:03:00.000Z | 2022-01-21T09:05:55.000Z | defmodule SledTest do
use ExUnit.Case
doctest Sled
setup_all do
on_exit(fn ->
File.rm_rf!("test_db")
File.rm_rf!("test_default_db")
end)
end
setup do
path = Sled.TestHelpers.test_db_name()
File.rm_rf!(path)
on_exit(fn ->
File.rm_rf!(path)
end)
{:ok, path: path}
end
test "open db_path", context do
assert %Sled{} = Sled.open(context.path)
assert File.exists?(context.path)
end
test "db inspect", context do
assert inspect(Sled.open(context.path)) == "#Sled<path: #{inspect(context.path)}, ...>"
end
test "open invalid db_path" do
assert_raise ErlangError,
~r/Erlang error: \"sled::Error::Io\(Custom { kind: InvalidInput, error: .*/,
fn -> Sled.open("\0") end
end
test "open options", context do
assert %Sled{} = Sled.open(path: context.path)
assert File.exists?(context.path)
end
test "open config", context do
assert %Sled{} = Sled.Config.open(Sled.Config.new(path: context.path))
assert File.exists?(context.path)
end
test "db_checksum", context do
assert db = Sled.open(context.path)
db_checksum = Sled.db_checksum(db)
Sled.Tree.insert(db, "hello", "world")
assert db_checksum != Sled.db_checksum(db)
end
test "size_on_disk", context do
assert db = Sled.open(context.path)
size_on_disk = Sled.size_on_disk(db)
Sled.Tree.insert(db, "hello", :crypto.strong_rand_bytes(1000))
Sled.Tree.flush(db)
assert size_on_disk != Sled.size_on_disk(db)
end
test "was_recovered", context do
assert db = Sled.open(context.path)
refute Sled.was_recovered(db)
# Since there's no way to force a resource to be dropped, and a sled DB can only be open from
# one process, we create the db from a separate VM in order to open it a second time from our
# tests.
try do
{_stdout, 0} =
System.cmd(
"mix",
[
"run",
"--no-compile",
"--no-deps-check",
"--no-archives-check",
"--no-start",
"--require",
"test/was_recovered_helper.exs"
],
into: IO.stream(:stdio, :line),
env: [{"MIX_ENV", "test"}],
stderr_to_stdout: true
)
assert db2 = Sled.open("test_recovered_db")
assert Sled.was_recovered(db2)
after
File.rm_rf!("test_recovered_db")
end
end
test "generate_id", context do
db = Sled.open(context.path)
a = Sled.generate_id(db)
assert is_integer(a)
b = Sled.generate_id(db)
assert is_integer(b)
assert a != b
end
test "export", context do
db = Sled.open(context.path)
Sled.Tree.insert(db, "hello", "world")
Sled.Tree.insert(db, "hello2", "world2")
assert [{"tree", "__sled__default", [["hello", "world"], ["hello2", "world2"]]}] ==
Sled.export(db)
end
test "import", context do
db = Sled.open(context.path)
Sled.Tree.insert(db, "hello", "world")
Sled.Tree.insert(db, "hello2", "world2")
export = Sled.export(db)
path = Sled.TestHelpers.test_db_name()
try do
db2 = Sled.open(path)
assert :ok == Sled.import(db2, export)
assert "world" = Sled.Tree.get(db2, "hello")
assert "world2" = Sled.Tree.get(db2, "hello2")
after
File.rm_rf!(path)
end
end
end
| 25.223881 | 97 | 0.602663 |
f7cef30c9eb9de4f976483c78896ac5c383878f6 | 78 | ex | Elixir | lib/lv_template/mailer.ex | ustrajunior/lv_template | 633c85d8c5810a130bbf24077845dda49e82ca3f | [
"MIT"
] | null | null | null | lib/lv_template/mailer.ex | ustrajunior/lv_template | 633c85d8c5810a130bbf24077845dda49e82ca3f | [
"MIT"
] | null | null | null | lib/lv_template/mailer.ex | ustrajunior/lv_template | 633c85d8c5810a130bbf24077845dda49e82ca3f | [
"MIT"
] | null | null | null | defmodule LvTemplate.Mailer do
use Swoosh.Mailer, otp_app: :lv_template
end
| 19.5 | 42 | 0.807692 |
f7cf161b7f870976a32c5191680547035bfef6f7 | 3,431 | ex | Elixir | clients/ad_sense/lib/google_api/ad_sense/v14/api/adclients.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/ad_sense/lib/google_api/ad_sense/v14/api/adclients.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/ad_sense/lib/google_api/ad_sense/v14/api/adclients.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdSense.V14.Api.Adclients do
@moduledoc """
API calls for all endpoints tagged `Adclients`.
"""
alias GoogleApi.AdSense.V14.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
List all ad clients in this AdSense account.
## Parameters
* `connection` (*type:* `GoogleApi.AdSense.V14.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:maxResults` (*type:* `integer()`) - The maximum number of ad clients to include in the response, used for paging.
* `:pageToken` (*type:* `String.t`) - A continuation token, used to page through ad clients. To retrieve the next page, set this parameter to the value of "nextPageToken" from the previous response.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdSense.V14.Model.AdClients{}}` on success
* `{:error, info}` on failure
"""
@spec adsense_adclients_list(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.AdSense.V14.Model.AdClients.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def adsense_adclients_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:maxResults => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/adclients", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.AdSense.V14.Model.AdClients{}])
end
end
| 42.358025 | 206 | 0.660449 |
f7cf3f15da1682481cab2889bec0a2d750e18d28 | 3,227 | ex | Elixir | lib/ueberauth/azure_ad.ex | gabrielpra1/ueberauth_azure_ad | 85e38af09a288f68ac2301b3847a05fac8ff314c | [
"MIT"
] | null | null | null | lib/ueberauth/azure_ad.ex | gabrielpra1/ueberauth_azure_ad | 85e38af09a288f68ac2301b3847a05fac8ff314c | [
"MIT"
] | null | null | null | lib/ueberauth/azure_ad.ex | gabrielpra1/ueberauth_azure_ad | 85e38af09a288f68ac2301b3847a05fac8ff314c | [
"MIT"
] | null | null | null | defmodule Ueberauth.Strategy.AzureAD do
@moduledoc """
"""
use Ueberauth.Strategy
alias Ueberauth.Strategy.AzureAD.Client
alias Ueberauth.Strategy.AzureAD.Callback
require Logger
alias Ueberauth.Auth.Info
alias Ueberauth.Auth.Credentials
alias Ueberauth.Auth.Extra
def handle_request!(conn) do
if Client.configured? do
callback_url = callback_url(conn)
url = Client.authorize_url!(callback_url)
redirect!(conn, url)
else
redirect!(conn, "/")
end
end
def logout(conn, _token), do: logout(conn)
def logout(conn) do
if Client.configured? do
redirect!(conn, Client.logout_url())
else
error_msg = "Failed to logout, please close your browser"
set_errors!(conn, [error("Logout Failed", error_msg)])
end
end
def handle_callback!(conn) do
case Map.get(conn, :params) do
%{"id_token" => id_token, "code" => code} ->
handle_callback!(conn, id_token, code)
%{"error" => _error, "error_description" => "AADB2C90091: " <> _error_description} ->
callback_url = callback_url(conn)
redirect!(conn, Client.authorize_url!(callback_url))
%{"error" => _error, "error_description" => "AADB2C90118: " <> _error_description} ->
callback_url = callback_url(conn)
redirect!(conn, Client.forgot_password_url!(callback_url))
%{"error" => error, "error_description" => error_description} ->
set_errors!(conn, error(error, error_description))
_ ->
set_errors!(conn, error("missing_code_or_token", "Missing code or id_token"))
end
end
defp handle_callback!(conn, id_token, code) do
try do
claims = Callback.process_callback!(id_token, code)
put_private(conn, :aad_user, claims)
rescue
e in RuntimeError ->
set_errors!(conn, error("failed_auth_callback", e.message))
end
end
def handle_cleanup!(conn) do
# TODO I'm not sure that this does it's job properly
conn
|> put_private(:aad_user, nil)
end
def uid(conn) do
conn.private.aad_user.oid
end
def credentials(conn) do
claims = conn.private.aad_user
struct(
Credentials,
other: %{
id_token: conn.params["id_token"],
code: conn.params["code"],
claims: claims,
}
)
end
def info(conn) do
claims = conn.private.aad_user
nickname = get_name(conn.private.aad_user)
struct(
Info,
email: Map.get(claims, :emails) |> List.first,
name: Map.get(claims, :name),
first_name: Map.get(claims, :given_name),
last_name: Map.get(claims, :family_name),
nickname: nickname,
)
end
def extra(conn) do
struct(Extra, raw_info: conn.params)
end
defp get_name(map) do
cond do
map[:username] -> format_name(map[:username])
map[:upn] -> format_name(map[:upn])
map[:unique_name] -> format_name(map[:unique_name])
map[:name] -> format_name(map[:name])
map[:email] -> format_name(map[:email])
true -> nil
end
end
defp format_name(name) do
name
|> String.split(["@", "_"])
|> hd
|> String.split(".")
|> Enum.map(&String.capitalize/1)
|> Enum.join(" ")
end
end
| 26.669421 | 91 | 0.634025 |
f7cf50cd48dde5e99145211ca7096998b643277c | 1,401 | ex | Elixir | packages/api/lib/api_web/controllers/auth_controller.ex | ErikSkare/Meower | ddc5c75004111aa64587994f27085bba1c5bd377 | [
"MIT"
] | 1 | 2022-02-10T20:08:10.000Z | 2022-02-10T20:08:10.000Z | packages/api/lib/api_web/controllers/auth_controller.ex | ErikSkare/Meower | ddc5c75004111aa64587994f27085bba1c5bd377 | [
"MIT"
] | null | null | null | packages/api/lib/api_web/controllers/auth_controller.ex | ErikSkare/Meower | ddc5c75004111aa64587994f27085bba1c5bd377 | [
"MIT"
] | null | null | null | defmodule ApiWeb.AuthController do
use ApiWeb, :controller
alias Api.Accounts
alias Api.Accounts.Auth
action_fallback ApiWeb.FallbackController
def register(conn, %{"user" => user_params}) do
with {:ok, user} <- Accounts.create_user(user_params) do
new_conn = put_tokens(conn, user)
new_conn
|> put_status(:created)
|> json(%{access: Auth.Token.Plug.current_token(new_conn)})
end
end
def login(conn, %{"email" => email, "password" => password}) do
with {:ok, user} <- Accounts.get_user_by_credentials(email, password) do
new_conn = put_tokens(conn, user)
new_conn
|> put_status(:accepted)
|> json(%{access: Auth.Token.Plug.current_token(new_conn)})
end
end
def logout(conn, _params) do
conn
|> Auth.Token.Plug.sign_out([clear_remember_me: true])
|> send_resp(:no_content, "")
end
def refresh(conn, _params) do
with {:ok, refresh} <- Guardian.Plug.find_token_from_cookies(conn),
{:ok, {_, _}, {new_access, _}} <- Auth.Token.exchange(refresh, "refresh", "access")
do
conn
|> put_status(:accepted)
|> json(%{access: new_access})
else
:no_token_found -> {:error, :no_token_found}
_ -> {:error, :bad_token}
end
end
defp put_tokens(conn, user) do
conn
|> Auth.Token.Plug.sign_in(user)
|> Auth.Token.Plug.remember_me(user)
end
end
| 27.470588 | 92 | 0.64454 |
f7cf5cf5c087caa8e2ab8ca9f028ef967c4780ab | 2,536 | exs | Elixir | test/grizzly/command_handlers/aggregate_report_test.exs | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | 76 | 2019-09-04T16:56:58.000Z | 2022-03-29T06:54:36.000Z | test/grizzly/command_handlers/aggregate_report_test.exs | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | 124 | 2019-09-05T14:01:24.000Z | 2022-02-28T22:58:14.000Z | test/grizzly/command_handlers/aggregate_report_test.exs | jellybob/grizzly | 290bee04cb16acbb9dc996925f5c501697b7ac94 | [
"Apache-2.0"
] | 10 | 2019-10-23T19:25:45.000Z | 2021-11-17T13:21:20.000Z | defmodule Grizzly.CommandHandlers.AggregateReportTest do
use ExUnit.Case, async: true
alias Grizzly.CommandHandlers.AggregateReport
alias Grizzly.ZWave.Commands.{AssociationReport, SwitchBinaryReport}
alias Grizzly.ZWave.Command
test "when the waiting report has no reports to follow" do
{:ok, state} =
AggregateReport.init(complete_report: :association_report, aggregate_param: :nodes)
{:ok, association_report} =
AssociationReport.new(
grouping_identifier: 1,
max_nodes_supported: 5,
reports_to_follow: 0,
nodes: [1, 2]
)
{:complete, association_report_complete} =
AggregateReport.handle_command(association_report, state)
assert Command.param!(association_report_complete, :nodes) == [1, 2]
end
test "when the waiting report is aggregated" do
{:ok, state} =
AggregateReport.init(complete_report: :association_report, aggregate_param: :nodes)
{:ok, association_report_one} =
AssociationReport.new(
grouping_identifier: 1,
max_nodes_supported: 5,
reports_to_follow: 1,
nodes: [1, 2]
)
{:continue, new_state} = AggregateReport.handle_command(association_report_one, state)
{:ok, association_report_two} =
AssociationReport.new(
grouping_identifier: 1,
max_nodes_supported: 5,
reports_to_follow: 0,
nodes: [5, 6]
)
{:complete, association_report_complete} =
AggregateReport.handle_command(association_report_two, new_state)
assert Command.param!(association_report_complete, :nodes) == [1, 2, 5, 6]
end
test "when the waiting report has reports to follow" do
{:ok, state} =
AggregateReport.init(complete_report: :association_report, aggregate_param: :nodes)
{:ok, association_report} =
AssociationReport.new(
grouping_identifier: 1,
max_nodes_supported: 5,
reports_to_follow: 1,
nodes: [1, 2]
)
expected_state = Map.put(state, :aggregates, [1, 2])
assert {:continue, expected_state} ==
AggregateReport.handle_command(association_report, state)
end
test "when different report is being handled than the one that is being waited on" do
{:ok, state} =
AggregateReport.init(complete_report: :association_report, aggregate_param: :nodes)
{:ok, switch_binary_report} = SwitchBinaryReport.new(target_value: :on)
assert {:continue, state} == AggregateReport.handle_command(switch_binary_report, state)
end
end
| 31.308642 | 92 | 0.695189 |
f7cf66b525536ca661be4a8e9761f94f827f1284 | 8,089 | ex | Elixir | lib/artsy.ex | ImpossibilityLabs/artsy | f0e1fb9988bac1dc06d726b5d698a00036bcc40f | [
"MIT"
] | null | null | null | lib/artsy.ex | ImpossibilityLabs/artsy | f0e1fb9988bac1dc06d726b5d698a00036bcc40f | [
"MIT"
] | null | null | null | lib/artsy.ex | ImpossibilityLabs/artsy | f0e1fb9988bac1dc06d726b5d698a00036bcc40f | [
"MIT"
] | null | null | null | defmodule Artsy do
@moduledoc """
REST API wrapper for [Artsy](http://artsy.net).
"""
import Artsy.ApiHelpers
use Application
use GenServer
use HTTPoison.Base
require Logger
@token_min_delay 5000
@token_reload 60 * 3
@token_failure_delay 1000
@token_max_retries 5
unless Application.get_env(:artsy, Artsy) do
raise Artsy.ConfigError, message: "Artsy is not configured"
end
unless Keyword.get(Application.get_env(:artsy, Artsy), :url) do
raise Artsy.ConfigError, message: "Artsy requires url"
end
unless Keyword.get(Application.get_env(:artsy, Artsy), :client_id) do
raise Artsy.ConfigError, message: "Artsy requires client_id"
end
unless Keyword.get(Application.get_env(:artsy, Artsy), :client_secret) do
raise Artsy.ConfigError, message: "Artsy requires client_secret"
end
@spec start(any(), [any()]) :: {:ok, pid}
def start(_type, _args) do
Artsy.start_link()
end
@spec start_link() :: {:ok, pid}
def start_link do
GenServer.start_link(__MODULE__, %{}, name: __MODULE__)
end
@spec start_link(atom()) :: {:ok, pid}
def start_link(name) when is_atom(name) do
GenServer.start_link(__MODULE__, %{})
end
@spec init(map()) :: {:ok, pid}
def init(_) do
with {:ok, token, expire} <- get_token() do
timer = Process.send_after(self(), {:get_new_token}, get_token_delay(expire))
{:ok, %{
"token" => token,
"token_expire_ts" => expire,
"token_timer" => timer,
"token_retries" => 0
}}
else
_ ->
raise Artsy.TokenError, message: "Can't load initial JWT token"
end
end
@doc """
Get all artworks from decoded JSON object.
Each call will call the next page with artworks.
"""
@spec artworks() :: {:ok, map()} | {:error, Exception.t}
def artworks() do
GenServer.call(__MODULE__, {:artworks})
end
@doc """
Reset pagination for artworks.
"""
@spec artworks(:reset) :: :ok
def artworks(:reset) do
GenServer.call(__MODULE__, {:artworks_reset})
end
@doc """
Get all artists from decoded JSON object.
Each call will call the next page with artists.
"""
@spec artists() :: {:ok, map()} | {:error, Exception.t}
def artists() do
GenServer.call(__MODULE__, {:artists})
end
@doc """
Get all artists for specific artwork.
"""
@spec artists(:artwork, String.t) :: {:ok, map()} | {:error, Exception.t}
def artists(:artwork, artwork_id) do
GenServer.call(__MODULE__, {:artists_artwork, artwork_id})
end
@doc """
Load artworks.
We save cursor in state for effective pagination, of next request.
Use artworks_reset to reset a pagination cursor.
"""
def handle_call({:artworks}, _from, %{"token" => token} = state) when token != nil do
url = Map.get(state, "next_artworks", "/artworks")
raw_response = Artsy.get(url, headers(:token, token))
{result, new_state} = get_request state, raw_response, fn(state, response) ->
new_state = case response do
%{"_links" => %{"next" => %{"href" => next_url}}} ->
Map.put(state, "next_artworks", String.replace(next_url, config(:url), ""))
_ ->
state
end
{{:ok, response}, new_state}
end
{:reply, result, new_state}
end
@doc """
Reset pagination for artworks.
"""
def handle_call({:artworks_reset}, _from, %{"next_artworks" => next_artworks} = state)
when next_artworks != nil do
{:reply, :ok, Map.drop(state, ["next_artworks"])}
end
@doc """
Load artists.
We save cursor in state for effective pagination, of next request.
Use artists_reset to reset a pagination cursor.
"""
def handle_call({:artists}, _from, %{"token" => token} = state) when token != nil do
url = Map.get(state, "next_artists", "/artists")
raw_response = Artsy.get(url, headers(:token, token))
{result, new_state} = get_request state, raw_response, fn(state, response) ->
new_state = case response do
%{"_links" => %{"next" => %{"href" => next_url}}} ->
Map.put(state, "next_artists", String.replace(next_url, config(:url), ""))
_ ->
state
end
{{:ok, response}, new_state}
end
{:reply, result, new_state}
end
@doc """
Load artists for specific artwork.
"""
def handle_call({:artists_artwork, artwork_id}, _from, %{"token" => token} = state) when token != nil do
url = "/artists?artwork_id=#{artwork_id}"
raw_response = Artsy.get(url, headers(:token, token))
{result, _state} = get_request state, raw_response, fn(state, response) ->
{{:ok, response}, state}
end
{:reply, result, state}
end
@doc """
Reset pagination for artists.
"""
def handle_call({:artists_reset}, _from, %{"next_artists" => next_artists} = state)
when next_artists != nil do
{:reply, :ok, Map.drop(state, ["next_artists"])}
end
@doc """
Generate new token and put it's metadata to state.
We cancel all the pending reload timers, before setting a new timer.
"""
def handle_cast({:get_new_token}, %{"token_timer" => token_timer} = state)
when token_timer != nil do
Process.cancel_timer(token_timer)
handle_cast({:get_new_token}, state)
end
def handle_cast({:get_new_token}, %{"token_retries" => token_retries} = state)
when token_retries >= @token_max_retries do
Logger.error fn() -> "Artsy JWT was not loaded, total attempts: #{token_retries}" end
{:noreply, state}
end
def handle_cast({:get_new_token}, %{"token_retries" => token_retries} = state) do
with {:ok, token, expire} <- get_token() do
timer = Process.send_after(self(), {:get_new_token}, get_token_delay(expire))
new_state = %{
state | "token" => token,
"token_expire_ts" => expire,
"token_timer" => timer,
"token_retries" => 0
}
{:noreply, new_state}
else
_ ->
Logger.warn fn() -> "Artsy JWT was not loaded" end
timer = Process.send_after(self(), {:get_new_token}, @token_failure_delay)
{:noreply, %{state | "token_timer" => timer, "token_retries" => token_retries + 1}}
end
end
@doc """
Helper function to read global config in scope of this module.
"""
def config, do: Application.get_env(:artsy, Artsy)
def config(key, default \\ nil) do
config() |> Keyword.get(key, default) |> resolve_config(default)
end
@doc """
Append REST API main url.
"""
@spec process_url(String.t) :: String.t
def process_url(url) do
config(:url) <> url
end
@doc """
Generate new JWT to access Artsy API.
"""
@spec get_token() :: {:ok, String.t, Integer} | {:error, Exception.t}
def get_token do
url = "/tokens/xapp_token?client_id=#{config(:client_id)}&client_secret=#{config(:client_secret)}"
with {:ok, %{body: json_body, status_code: 201}} <- Artsy.post(url, ""),
{:ok, %{"token" => token, "expires_at" => expires_at}} <- Poison.decode(json_body),
{:ok, datetime, _offset} <- DateTime.from_iso8601(expires_at)
do
{:ok, token, DateTime.to_unix(datetime)}
else
er ->
IO.inspect er
{:error, Artsy.TokenError}
end
end
@spec get_token_delay(integer) :: integer
defp get_token_delay(expire) do
reload_delay = (expire - :os.system_time(:seconds) - @token_reload) * 1000
get_token_delay(:valid?, reload_delay)
end
defp get_token_delay(:valid?, delay) when delay < @token_min_delay, do: @token_min_delay
defp get_token_delay(:valid?, delay), do: delay
# Add security header
defp process_request_headers(headers) when is_map(headers) do
Enum.into(headers, headers())
end
defp process_request_headers(headers), do: headers ++ headers()
# Default headers added to all requests
defp headers do
[
{"Content-Type", "application/json"},
{"Accept", "application/json"}
]
end
defp headers(:token, token), do: [{"X-XAPP-Token", token}]
defp resolve_config({:system, var_name}, default),
do: System.get_env(var_name) || default
defp resolve_config(value, _default),
do: value
end
| 30.29588 | 106 | 0.642601 |
f7cf745e99e822246dafb2566a6d41890c153c6c | 86 | exs | Elixir | lib/mix/test/fixtures/test_stale/test/b_test_stale.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/mix/test/fixtures/test_stale/test/b_test_stale.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/mix/test/fixtures/test_stale/test/b_test_stale.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | defmodule BTest do
use ExUnit.Case
test "f" do
assert B.f() == :ok
end
end
| 10.75 | 23 | 0.604651 |
f7cf8aed0c289e0915a5ab5bbbaa3a5942b28af5 | 222 | exs | Elixir | priv/repo/migrations/20200904204200_update_users1.exs | petermm/kandesk | 6d940efcd1023b4667904b5a7a3a517be013702c | [
"MIT"
] | 158 | 2020-09-01T13:21:59.000Z | 2022-03-31T12:41:03.000Z | priv/repo/migrations/20200904204200_update_users1.exs | petermm/kandesk | 6d940efcd1023b4667904b5a7a3a517be013702c | [
"MIT"
] | 8 | 2020-09-01T21:09:05.000Z | 2022-01-20T21:46:18.000Z | priv/repo/migrations/20200904204200_update_users1.exs | petermm/kandesk | 6d940efcd1023b4667904b5a7a3a517be013702c | [
"MIT"
] | 18 | 2020-09-01T13:23:45.000Z | 2022-02-15T09:47:39.000Z | defmodule Kandesk.Repo.Migrations.UpdateUsers1 do
use Ecto.Migration
def change do
alter table(:users) do
add :timezone, :text
end
execute ~s(UPDATE users set timezone = 'Europe/Paris';)
end
end
| 18.5 | 59 | 0.689189 |
f7cfbdf606b32c5a3b0b22b7d1d4d9e51a48d02f | 3,381 | ex | Elixir | apps/andi/lib/andi/application.ex | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 26 | 2019-09-20T23:54:45.000Z | 2020-08-20T14:23:32.000Z | apps/andi/lib/andi/application.ex | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 757 | 2019-08-15T18:15:07.000Z | 2020-09-18T20:55:31.000Z | apps/andi/lib/andi/application.ex | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 9 | 2019-11-12T16:43:46.000Z | 2020-03-25T16:23:16.000Z | defmodule Andi.Application do
@moduledoc false
use Application
use Properties, otp_app: :andi
require Logger
@instance_name Andi.instance_name()
getter(:brook, generic: true)
getter(:kafka_endpoints, generic: true)
getter(:dead_letter_topic, generic: true)
getter(:secrets_endpoint, generic: true)
def start(_type, _args) do
set_guardian_db_config()
children =
[
{Phoenix.PubSub, [name: Andi.PubSub, adapter: Phoenix.PubSub.PG2]},
AndiWeb.Endpoint,
ecto_repo(),
private_access_processes()
]
|> TelemetryEvent.config_init_server(@instance_name)
|> List.flatten()
set_auth0_credentials()
set_aws_keys()
opts = [strategy: :one_for_one, name: Andi.Supervisor]
Supervisor.start_link(children, opts)
end
defp private_access_processes() do
if Andi.private_access?() do
[
guardian_db_sweeper(),
{Brook, brook()},
Andi.DatasetCache,
Andi.Migration.Migrations,
Andi.Scheduler,
elsa()
]
else
[]
end
end
defp elsa() do
case kafka_endpoints() do
nil ->
[]
_ ->
{Elsa.Supervisor,
endpoints: kafka_endpoints(),
name: :andi_elsa,
connection: :andi_reader,
group_consumer: [
name: "andi_reader",
group: "andi_reader_group",
topics: [dead_letter_topic()],
handler: Andi.MessageHandler,
handler_init_args: [],
config: [
begin_offset: :latest
]
]}
end
end
defp ecto_repo do
Application.get_env(:andi, Andi.Repo)
|> case do
nil -> []
_ -> Supervisor.Spec.worker(Andi.Repo, [])
end
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
AndiWeb.Endpoint.config_change(changed, removed)
:ok
end
def is_invalid_env_variable(var) do
is_nil(var) || String.length(var) == 0
end
def get_env_variable(var_name, throw_if_absent) do
var = System.get_env(var_name)
if is_invalid_env_variable(var) do
Logger.warn("Required environment variable #{var_name} is nil.")
if throw_if_absent do
raise RuntimeError,
message: "Could not start application, required #{var_name} is nil."
end
end
var
end
def set_auth0_credentials() do
Application.put_env(:ueberauth, Ueberauth.Strategy.Auth0.OAuth,
domain: get_env_variable("AUTH0_DOMAIN", false),
client_id: get_env_variable("AUTH0_CLIENT_ID", false),
client_secret: get_env_variable("AUTH0_CLIENT_SECRET", false)
)
end
def set_aws_keys() do
Application.put_env(:ex_aws, :access_key_id, get_env_variable("AWS_ACCESS_KEY_ID", true))
Application.put_env(:ex_aws, :secret_access_key, get_env_variable("AWS_ACCESS_KEY_SECRET", true))
end
defp guardian_db_sweeper do
Application.get_env(:andi, Guardian.DB)
|> case do
nil ->
[]
_config ->
Supervisor.Spec.worker(Guardian.DB.Token.SweeperServer, [])
end
end
defp set_guardian_db_config do
Application.get_env(:andi, Guardian.DB)
|> case do
nil ->
[]
config ->
Application.put_env(:guardian, Guardian.DB, config)
end
end
end
| 23.643357 | 101 | 0.637977 |
f7cfd33fa1d23f96562df1c37f250bdd7eb2cbe4 | 3,761 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/dimension_values.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/dimension_values.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/dimension_values.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V33.Api.DimensionValues do
@moduledoc """
API calls for all endpoints tagged `DimensionValues`.
"""
alias GoogleApi.DFAReporting.V33.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Retrieves list of report dimension values for a list of filters.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - The DFA user profile ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:maxResults` (*type:* `integer()`) - Maximum number of results to return.
* `:pageToken` (*type:* `String.t`) - The value of the nextToken from the previous result page.
* `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.DimensionValueRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.DimensionValueList{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_dimension_values_query(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V33.Model.DimensionValueList.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def dfareporting_dimension_values_query(
connection,
profile_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:maxResults => :query,
:pageToken => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/dimensionvalues/query", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.DimensionValueList{}])
end
end
| 41.788889 | 187 | 0.660197 |
f7cfe19cbd82544a5942a03d4a062b681129bdb4 | 3,714 | ex | Elixir | lib/fixes/refactor/map_join.ex | doorgan/credo_fixes | e64869bbd53644bb11480f925540cf75c73432af | [
"Apache-2.0"
] | null | null | null | lib/fixes/refactor/map_join.ex | doorgan/credo_fixes | e64869bbd53644bb11480f925540cf75c73432af | [
"Apache-2.0"
] | null | null | null | lib/fixes/refactor/map_join.ex | doorgan/credo_fixes | e64869bbd53644bb11480f925540cf75c73432af | [
"Apache-2.0"
] | null | null | null | defmodule CredoFixes.Fixes.Refactor.MapJoin do
@behaviour CredoFixes.Fixer
@impl true
def get_fixes(source) do
ast = Sourceror.parse_string!(source)
{_, fixes} = Macro.prewalk(ast, [], &get_fix/2)
Enum.reverse(fixes)
end
# TODO find a better way to do this, because there's a lot of
# repetition the fixes can explode in complexity if they need to work over
# several permutations of the same code
# Enum.join(Enum.map(a, b), c)
defp get_fix(
{{:., _, [{:__aliases__, _, [:Enum]}, :join]}, join_meta,
[{{:., _, [{:__aliases__, _, [:Enum]}, :map]}, map_meta, map_args}, joiner]} = ast,
fixes
) do
range = Sourceror.get_range(ast)
quoted = {{:., [], [{:__aliases__, [], [:Enum]}, :map_join]}, join_meta, map_args ++ [joiner]}
quoted =
Sourceror.append_comments(
quoted,
map_meta[:leading_comments]
)
fix = %{
change: Sourceror.to_string(quoted),
range: range
}
{quoted, [fix | fixes]}
end
# Enum.map(a, b) |> Enum.join(c)
defp get_fix(
{:|>, pipe_meta,
[
{{:., _, [{:__aliases__, _, [:Enum]}, :map]}, map_meta, map_args},
{{:., _, [{:__aliases__, _, [:Enum]}, :join]}, join_meta, joiner}
]} = ast,
fixes
) do
range = Sourceror.get_range(ast)
quoted = {{:., [], [{:__aliases__, [], [:Enum]}, :map_join]}, map_meta, map_args ++ joiner}
quoted =
Sourceror.append_comments(
quoted,
pipe_meta[:leading_comments] ++ join_meta[:leading_comments]
)
fix = %{
change: Sourceror.to_string(quoted),
range: range
}
{quoted, [fix | fixes]}
end
# a |> Enum.map(b) |> Enum.join(c)
defp get_fix(
{:|>, pipe1_meta,
[
{:|>, pipe2_meta,
[
base_arg,
{{:., _, [{:__aliases__, _, [:Enum]}, :map]}, map_meta, map_args}
]},
{{:., _, [{:__aliases__, _, [:Enum]}, :join]}, join_meta, join_args}
]} = ast,
fixes
) do
range = Sourceror.get_range(ast)
args = map_args ++ join_args
map_meta =
Keyword.update(map_meta, :leading_comments, [], fn comments ->
pipe2_meta[:leading_comments] ++ comments ++ join_meta[:leading_comments]
end)
|> Keyword.delete(:closing)
quoted =
{:|>, pipe1_meta,
[
base_arg,
{{:., [], [{:__aliases__, [], [:Enum]}, :map_join]}, map_meta, args}
]}
fix = %{
change: Sourceror.to_string(quoted),
range: range
}
{quoted, [fix | fixes]}
end
# Enum.join(a |> Enum.map(b), c)
defp get_fix(
{{:., _, [{:__aliases__, _, [:Enum]}, :join]}, join_meta,
[
{:|>, pipe_meta,
[
base_arg,
{{:., map_alias_meta, [{:__aliases__, _, [:Enum]}, :map]}, map_meta, map_args}
]},
joiner
]} = ast,
fixes
) do
range = Sourceror.get_range(ast)
joiner = Sourceror.prepend_comments(joiner, pipe_meta[:trailing_comments])
args = [base_arg] ++ map_args ++ [joiner]
meta =
Keyword.update(join_meta, :leading_comments, [], fn comments ->
comments ++
map_alias_meta[:leading_comments] ++
pipe_meta[:leading_comments] ++ map_meta[:leading_comments]
end)
|> Keyword.delete(:closing)
quoted = {{:., [], [{:__aliases__, [], [:Enum]}, :map_join]}, meta, args}
fix = %{
change: Sourceror.to_string(quoted),
range: range
}
{quoted, [fix | fixes]}
end
defp get_fix(ast, fixes) do
{ast, fixes}
end
end
| 25.265306 | 98 | 0.526387 |
f7cffa5010e6c7f151b82120b7203838f36414e9 | 2,230 | ex | Elixir | clients/classroom/lib/google_api/classroom/v1/model/guardian.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/classroom/lib/google_api/classroom/v1/model/guardian.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/classroom/lib/google_api/classroom/v1/model/guardian.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Classroom.V1.Model.Guardian do
@moduledoc """
Association between a student and a guardian of that student. The guardian
may receive information about the student's course work.
## Attributes
* `guardianId` (*type:* `String.t`, *default:* `nil`) - Identifier for the guardian.
* `guardianProfile` (*type:* `GoogleApi.Classroom.V1.Model.UserProfile.t`, *default:* `nil`) - User profile for the guardian.
* `invitedEmailAddress` (*type:* `String.t`, *default:* `nil`) - The email address to which the initial guardian invitation was sent.
This field is only visible to domain administrators.
* `studentId` (*type:* `String.t`, *default:* `nil`) - Identifier for the student to whom the guardian relationship applies.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:guardianId => String.t(),
:guardianProfile => GoogleApi.Classroom.V1.Model.UserProfile.t(),
:invitedEmailAddress => String.t(),
:studentId => String.t()
}
field(:guardianId)
field(:guardianProfile, as: GoogleApi.Classroom.V1.Model.UserProfile)
field(:invitedEmailAddress)
field(:studentId)
end
defimpl Poison.Decoder, for: GoogleApi.Classroom.V1.Model.Guardian do
def decode(value, options) do
GoogleApi.Classroom.V1.Model.Guardian.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Classroom.V1.Model.Guardian do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.448276 | 137 | 0.723767 |
f7d0645b1853e452d9c141c2b975ba93b942ae63 | 1,701 | ex | Elixir | clients/android_publisher/lib/google_api/android_publisher/v3/model/apks_add_externally_hosted_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/android_publisher/lib/google_api/android_publisher/v3/model/apks_add_externally_hosted_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/android_publisher/lib/google_api/android_publisher/v3/model/apks_add_externally_hosted_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.AndroidPublisher.V3.Model.ApksAddExternallyHostedRequest do
@moduledoc """
## Attributes
* `externallyHostedApk` (*type:* `GoogleApi.AndroidPublisher.V3.Model.ExternallyHostedApk.t`, *default:* `nil`) - The definition of the externally-hosted APK and where it is located.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:externallyHostedApk => GoogleApi.AndroidPublisher.V3.Model.ExternallyHostedApk.t()
}
field(:externallyHostedApk, as: GoogleApi.AndroidPublisher.V3.Model.ExternallyHostedApk)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidPublisher.V3.Model.ApksAddExternallyHostedRequest do
def decode(value, options) do
GoogleApi.AndroidPublisher.V3.Model.ApksAddExternallyHostedRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidPublisher.V3.Model.ApksAddExternallyHostedRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.191489 | 186 | 0.771311 |
f7d089141df64c3183d4bf4d6dbc4cd70692c32f | 201 | exs | Elixir | demo/sitBRU_Demo/test/controllers/page_controller_test.exs | ceedee666/sitbru_2016 | b23b4a81a1daec87801c6435ff04d32be043e0b9 | [
"MIT"
] | null | null | null | demo/sitBRU_Demo/test/controllers/page_controller_test.exs | ceedee666/sitbru_2016 | b23b4a81a1daec87801c6435ff04d32be043e0b9 | [
"MIT"
] | null | null | null | demo/sitBRU_Demo/test/controllers/page_controller_test.exs | ceedee666/sitbru_2016 | b23b4a81a1daec87801c6435ff04d32be043e0b9 | [
"MIT"
] | null | null | null | defmodule SitBRU_Demo.PageControllerTest do
use SitBRU_Demo.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 22.333333 | 60 | 0.686567 |
f7d08f4d8e5c5750245f1900da0d109e87e6e0e9 | 1,594 | ex | Elixir | lib/dialyxir/warnings/unmatched_return.ex | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | 1,455 | 2015-01-03T02:53:19.000Z | 2022-03-12T00:31:25.000Z | lib/dialyxir/warnings/unmatched_return.ex | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | 330 | 2015-05-14T13:53:13.000Z | 2022-03-29T17:12:23.000Z | lib/dialyxir/warnings/unmatched_return.ex | staring-frog/dialyxir | b78735f75b325238b7db20d9eed22f018cca5f26 | [
"Apache-2.0"
] | 146 | 2015-02-03T18:19:43.000Z | 2022-03-07T10:05:20.000Z | defmodule Dialyxir.Warnings.UnmatchedReturn do
@moduledoc """
The invoked expression returns a union of types and the call does
not match on its return types using e.g. a case or wildcard.
## Example
defmodule Example do
require Integer
def ok() do
n = :rand.uniform(100)
multiple_returns(n)
:ok
end
defp multiple_returns(n) do
if Integer.is_even(n) do
:ok
else
{:error, "error"}
end
end
end
This would NOT result in a warning:
defmodule Example do
require Integer
def ok() do
n = :rand.uniform(100)
multiple_returns(n)
:ok
end
defp multiple_returns(n) do
if Integer.is_even(n) do
:ok
else
:error
end
end
end
"""
@behaviour Dialyxir.Warning
@impl Dialyxir.Warning
@spec warning() :: :unmatched_return
def warning(), do: :unmatched_return
@impl Dialyxir.Warning
@spec format_short([String.t()]) :: String.t()
def format_short(_) do
"The expression produces multiple types, but none are matched."
end
@impl Dialyxir.Warning
@spec format_long([String.t()]) :: String.t()
def format_long([type]) do
pretty_type = Erlex.pretty_print_type(type)
"""
The expression produces a value of type:
#{pretty_type}
but this value is unmatched.
"""
end
@impl Dialyxir.Warning
@spec explain() :: String.t()
def explain() do
@moduledoc
end
end
| 19.439024 | 67 | 0.580301 |
f7d0940f4e60aaea5efef08df319b57389a77084 | 1,364 | exs | Elixir | mix.exs | wisq/ex_co2_mini | b949f6a13abcf27b78ec0cdeee16f746ab4b2360 | [
"Apache-2.0"
] | 1 | 2019-02-13T04:07:52.000Z | 2019-02-13T04:07:52.000Z | mix.exs | wisq/ex_co2_mini | b949f6a13abcf27b78ec0cdeee16f746ab4b2360 | [
"Apache-2.0"
] | null | null | null | mix.exs | wisq/ex_co2_mini | b949f6a13abcf27b78ec0cdeee16f746ab4b2360 | [
"Apache-2.0"
] | null | null | null | defmodule ExCO2Mini.MixProject do
use Mix.Project
def project do
[
app: :ex_co2_mini,
version: "0.1.3",
elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps(),
docs: docs(),
description: description(),
package: package(),
compilers: [:elixir_make] ++ Mix.compilers()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
defp description do
"""
ExCO2Mini is a library to read carbon dioxide and temperature data from
the CO2Mini USB sensor, also known as the RAD-0301.
"""
end
defp package do
[
files: ["lib", "src", "mix.exs", "Makefile", "README.md", "LICENSE"],
maintainers: ["Adrian Irving-Beer"],
licenses: ["Apache Version 2.0"],
links: %{GitHub: "https://github.com/wisq/ex_co2_mini"}
]
end
defp docs do
[
main: "readme",
extras: ["README.md"]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
{:elixir_make, "~> 0.4", runtime: false},
{:ex_doc, "~> 0.10", only: :dev},
{:version_tasks, "~> 0.11.1", only: :dev}
]
end
end
| 23.118644 | 87 | 0.571114 |
f7d0a04a769b8cc08b24ddf816a92c60654c6a30 | 4,076 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/autoscaling_policy.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/autoscaling_policy.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/autoscaling_policy.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.AutoscalingPolicy do
@moduledoc """
Cloud Autoscaler policy.
## Attributes
* `coolDownPeriodSec` (*type:* `integer()`, *default:* `nil`) - The number of seconds that the autoscaler should wait before it starts collecting information from a new instance. This prevents the autoscaler from collecting information when the instance is initializing, during which the collected usage would not be reliable. The default time autoscaler waits is 60 seconds.
Virtual machine initialization times might vary because of numerous factors. We recommend that you test how long an instance may take to initialize. To do this, create an instance and time the startup process.
* `cpuUtilization` (*type:* `GoogleApi.Compute.V1.Model.AutoscalingPolicyCpuUtilization.t`, *default:* `nil`) - Defines the CPU utilization policy that allows the autoscaler to scale based on the average CPU utilization of a managed instance group.
* `customMetricUtilizations` (*type:* `list(GoogleApi.Compute.V1.Model.AutoscalingPolicyCustomMetricUtilization.t)`, *default:* `nil`) - Configuration parameters of autoscaling based on a custom metric.
* `loadBalancingUtilization` (*type:* `GoogleApi.Compute.V1.Model.AutoscalingPolicyLoadBalancingUtilization.t`, *default:* `nil`) - Configuration parameters of autoscaling based on load balancer.
* `maxNumReplicas` (*type:* `integer()`, *default:* `nil`) - The maximum number of instances that the autoscaler can scale up to. This is required when creating or updating an autoscaler. The maximum number of replicas should not be lower than minimal number of replicas.
* `minNumReplicas` (*type:* `integer()`, *default:* `nil`) - The minimum number of replicas that the autoscaler can scale down to. This cannot be less than 0. If not provided, autoscaler will choose a default value depending on maximum number of instances allowed.
* `mode` (*type:* `String.t`, *default:* `nil`) - Defines operating mode for this policy.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:coolDownPeriodSec => integer(),
:cpuUtilization => GoogleApi.Compute.V1.Model.AutoscalingPolicyCpuUtilization.t(),
:customMetricUtilizations =>
list(GoogleApi.Compute.V1.Model.AutoscalingPolicyCustomMetricUtilization.t()),
:loadBalancingUtilization =>
GoogleApi.Compute.V1.Model.AutoscalingPolicyLoadBalancingUtilization.t(),
:maxNumReplicas => integer(),
:minNumReplicas => integer(),
:mode => String.t()
}
field(:coolDownPeriodSec)
field(:cpuUtilization, as: GoogleApi.Compute.V1.Model.AutoscalingPolicyCpuUtilization)
field(:customMetricUtilizations,
as: GoogleApi.Compute.V1.Model.AutoscalingPolicyCustomMetricUtilization,
type: :list
)
field(:loadBalancingUtilization,
as: GoogleApi.Compute.V1.Model.AutoscalingPolicyLoadBalancingUtilization
)
field(:maxNumReplicas)
field(:minNumReplicas)
field(:mode)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.AutoscalingPolicy do
def decode(value, options) do
GoogleApi.Compute.V1.Model.AutoscalingPolicy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.AutoscalingPolicy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 52.935065 | 379 | 0.751227 |
f7d0a1d575896518ee7d4c3e512b5b119ef1d857 | 4,015 | ex | Elixir | lib/changelog/schema/episode/episode_request.ex | sdrees/changelog.com | 955cdcf93d74991062f19a03e34c9f083ade1705 | [
"MIT"
] | 1 | 2019-11-02T08:32:25.000Z | 2019-11-02T08:32:25.000Z | lib/changelog/schema/episode/episode_request.ex | sdrees/changelog.com | 955cdcf93d74991062f19a03e34c9f083ade1705 | [
"MIT"
] | null | null | null | lib/changelog/schema/episode/episode_request.ex | sdrees/changelog.com | 955cdcf93d74991062f19a03e34c9f083ade1705 | [
"MIT"
] | null | null | null | defmodule Changelog.EpisodeRequest do
use Changelog.Schema
alias Changelog.{Episode, Podcast, Person}
defenum(Status, declined: -1, fresh: 0, pending: 1, failed: 2)
schema "episode_requests" do
field :status, Status, default: :fresh
field :hosts, :string
field :guests, :string
field :topics, :string
field :pitch, :string
field :pronunciation, :string
field :decline_message, :string, default: ""
belongs_to :podcast, Podcast
belongs_to :submitter, Person
has_one :episode, Episode, foreign_key: :request_id
timestamps()
end
def fresh(query \\ __MODULE__), do: from(q in query, where: q.status == ^:fresh)
def active(query \\ __MODULE__), do: from(q in query, where: q.status in [^:fresh, ^:pending])
def pending(query \\ __MODULE__), do: from(q in query, where: q.status == ^:pending)
def declined(query \\ __MODULE__), do: from(q in query, where: q.status == ^:declined)
def failed(query \\ __MODULE__), do: from(q in query, where: q.status == ^:failed)
def with_episode(query \\ __MODULE__) do
from(q in query, join: e in Episode, on: q.id == e.request_id)
end
def with_published_episode(query \\ __MODULE__) do
from(q in query, join: e in Episode, on: q.id == e.request_id, where: e.published)
end
def with_unpublished_episode(query \\ __MODULE__) do
from(q in query, join: e in Episode, on: q.id == e.request_id, where: not(e.published))
end
def sans_episode(query \\ __MODULE__) do
from(
q in query,
left_join: e in Episode,
on: q.id == e.request_id,
where: is_nil(e.id)
)
end
def admin_changeset(struct, params \\ %{}) do
struct
|> cast(params, ~w(podcast_id submitter_id hosts guests topics pitch pronunciation)a)
|> validate_required([:podcast_id, :submitter_id, :pitch])
|> foreign_key_constraint(:podcast_id)
end
def submission_changeset(struct, params \\ %{}) do
struct
|> cast(params, ~w(podcast_id submitter_id hosts guests topics pitch pronunciation)a)
|> validate_required([:podcast_id, :submitter_id, :pitch])
|> validate_length(:topics, max: 140, message: "Keep it tweet size, please (OG 140 chars)")
|> foreign_key_constraint(:podcast_id)
end
def preload_all(request) do
request
|> preload_episode()
|> preload_podcast()
|> preload_submitter()
end
def preload_episode(query = %Ecto.Query{}), do: Ecto.Query.preload(query, :episode)
def preload_episode(request), do: Repo.preload(request, :episode)
def preload_podcast(query = %Ecto.Query{}), do: Ecto.Query.preload(query, :podcast)
def preload_podcast(request), do: Repo.preload(request, :podcast)
def preload_submitter(query = %Ecto.Query{}), do: Ecto.Query.preload(query, :submitter)
def preload_submitter(request), do: Repo.preload(request, :submitter)
def is_undecided(%{episode: episode}) when is_map(episode), do: false
def is_undecided(%{status: status}), do: Enum.member?(~w(fresh pending)a, status)
def is_pendable(%{episode: episode}) when is_map(episode), do: false
def is_pendable(%{status: status}), do: Enum.member?(~w(fresh)a, status)
def is_archived(%{status: status}), do: Enum.member?(~w(failed declined)a, status)
def is_complete(%{episode: episode}) when is_map(episode), do: episode.published
def is_complete(%{episode: nil}), do: false
def decline!(request), do: update_status!(request, :declined)
def decline!(request, ""), do: decline!(request)
def decline!(request, message) do
request
|> change(%{decline_message: message})
|> update_status!(:declined)
end
def fail!(request), do: update_status!(request, :failed)
def fail!(request, ""), do: fail!(request)
def fail!(request, message) do
request
|> change(%{decline_message: message})
|> update_status!(:failed)
end
def pend!(request), do: update_status!(request, :pending)
defp update_status!(request, status) do
request |> change(%{status: status}) |> Repo.update!()
end
end
| 33.739496 | 96 | 0.685928 |
f7d0d8c5faae2b0ab92709ebf72b5186fb6175ad | 328 | ex | Elixir | lib/rdf/xsd/datatypes/unsigned_long.ex | pukkamustard/rdf-ex | c459d8e7fa548fdfad82643338b68decf380a296 | [
"MIT"
] | 53 | 2017-06-25T22:20:44.000Z | 2020-04-27T17:27:51.000Z | lib/rdf/xsd/datatypes/unsigned_long.ex | pukkamustard/rdf-ex | c459d8e7fa548fdfad82643338b68decf380a296 | [
"MIT"
] | 7 | 2017-06-25T00:29:11.000Z | 2020-03-11T00:23:47.000Z | lib/rdf/xsd/datatypes/unsigned_long.ex | pukkamustard/rdf-ex | c459d8e7fa548fdfad82643338b68decf380a296 | [
"MIT"
] | 3 | 2020-07-03T13:25:36.000Z | 2021-04-04T12:33:51.000Z | defmodule RDF.XSD.UnsignedLong do
use RDF.XSD.Datatype.Restriction,
name: "unsignedLong",
id: RDF.Utils.Bootstrapping.xsd_iri("unsignedLong"),
base: RDF.XSD.NonNegativeInteger
def_facet_constraint RDF.XSD.Facets.MinInclusive, 0
def_facet_constraint RDF.XSD.Facets.MaxInclusive, 18_446_744_073_709_551_615
end
| 32.8 | 78 | 0.795732 |
f7d0f071b85d5d346e691a3851d3311af4ab7e16 | 1,444 | exs | Elixir | test/test_helper.exs | gitlunar/stripity-stripe | a45079ec3fcc2c80f686297614f78c52c41c6b80 | [
"BSD-3-Clause"
] | 1 | 2020-05-03T15:41:49.000Z | 2020-05-03T15:41:49.000Z | test/test_helper.exs | gitlunar/stripity-stripe | a45079ec3fcc2c80f686297614f78c52c41c6b80 | [
"BSD-3-Clause"
] | null | null | null | test/test_helper.exs | gitlunar/stripity-stripe | a45079ec3fcc2c80f686297614f78c52c41c6b80 | [
"BSD-3-Clause"
] | 2 | 2016-08-23T21:06:49.000Z | 2020-02-13T16:04:16.000Z | ExUnit.start
#Stripe.start
ExUnit.configure [exclude: [disabled: true], seed: 0 ]
defmodule Helper do
def create_test_plans do
Stripe.Plans.create [id: "test-std", name: "Test Plan Standard", amount: 100, interval: "month"]
Stripe.Plans.create [id: "test-dlx", name: "Test Plan Deluxe", amount: 1000, interval: "month"]
end
def create_test_plan id do
Stripe.Plans.create [id: id, name: "Test Plan #{id}", amount: 100, interval: "month"]
end
def delete_test_plan id do
Stripe.Plans.delete id
end
def delete_test_plans do
Stripe.Plans.delete "test-std"
Stripe.Plans.delete "test-dlx"
end
def create_test_token do
params = [
card: [
number: "4242424242424242",
exp_month: 8,
exp_year: 2016,
cvc: "314"
]
]
{:ok, token} = Stripe.Tokens.create(params)
token
end
def create_test_customer( email ) do
new_customer = [
email: "#{email}",
description: "Test Account",
card: [
number: "4111111111111111",
exp_month: 01,
exp_year: 2018,
cvc: 123,
name: "Joe Test User"
]
]
{:ok, res} = Stripe.Customers.create new_customer
res
end
def create_test_account(email) do
new_account = [
email: email,
managed: true,
legal_entity: [
type: "individual"
]
]
{:ok, res} = Stripe.Accounts.create new_account
res
end
end
| 22.5625 | 100 | 0.609418 |
f7d118f3da761ad766730e11140fed41d6facfec | 71 | exs | Elixir | test/test_helper.exs | shawn-mcginty/phoenix-webpack-skeleton | 311559c860da97157d643d06cd2601aee81c6a3a | [
"MIT"
] | null | null | null | test/test_helper.exs | shawn-mcginty/phoenix-webpack-skeleton | 311559c860da97157d643d06cd2601aee81c6a3a | [
"MIT"
] | null | null | null | test/test_helper.exs | shawn-mcginty/phoenix-webpack-skeleton | 311559c860da97157d643d06cd2601aee81c6a3a | [
"MIT"
] | null | null | null | ExUnit.start
Ecto.Adapters.SQL.Sandbox.mode(TimeVoice.Repo, :manual)
| 14.2 | 55 | 0.788732 |
f7d11dec29ea2c3498ee64fbb7e56f754448ca5b | 19,906 | exs | Elixir | test/phoenix_live_view/html_engine_test.exs | alexpls/phoenix_live_view | a95b3e9e6a12aee6f1c3eb07e01bd2a1cac05008 | [
"MIT"
] | null | null | null | test/phoenix_live_view/html_engine_test.exs | alexpls/phoenix_live_view | a95b3e9e6a12aee6f1c3eb07e01bd2a1cac05008 | [
"MIT"
] | null | null | null | test/phoenix_live_view/html_engine_test.exs | alexpls/phoenix_live_view | a95b3e9e6a12aee6f1c3eb07e01bd2a1cac05008 | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveView.HTMLEngineTest do
use ExUnit.Case, async: true
import Phoenix.LiveView.Helpers, only: [sigil_H: 2, render_block: 1, render_block: 2]
alias Phoenix.LiveView.HTMLEngine
alias Phoenix.LiveView.HTMLTokenizer.ParseError
defp eval(string, assigns \\ %{}, opts \\ []) do
opts =
Keyword.merge(opts,
file: __ENV__.file,
engine: HTMLEngine,
subengine: Phoenix.LiveView.Engine
)
EEx.eval_string(string, [assigns: assigns], opts)
end
defp render(string, assigns \\ %{}) do
string
|> eval(assigns)
|> Phoenix.HTML.Safe.to_iodata()
|> IO.iodata_to_binary()
end
defmacrop compile(string) do
quote do
unquote(EEx.compile_string(string, file: __ENV__.file, engine: HTMLEngine))
|> Phoenix.HTML.Safe.to_iodata()
|> IO.iodata_to_binary()
end
end
def assigns_component(assigns) do
~H"<%= inspect(Map.delete(assigns, :__changed__)) %>"
end
def remote_function_component(assigns) do
~H"REMOTE COMPONENT: Value: <%= @value %>"
end
def remote_function_component_with_inner_content(assigns) do
~H"REMOTE COMPONENT: Value: <%= @value %>, Content: <%= render_block(@inner_block) %>"
end
def remote_function_component_with_inner_content_args(assigns) do
~H"""
REMOTE COMPONENT WITH ARGS: Value: <%= @value %>
<%= render_block(@inner_block, %{
downcase: String.downcase(@value),
upcase: String.upcase(@value)
}) %>
"""
end
defp local_function_component(assigns) do
~H"LOCAL COMPONENT: Value: <%= @value %>"
end
defp local_function_component_with_inner_content(assigns) do
~H"LOCAL COMPONENT: Value: <%= @value %>, Content: <%= render_block(@inner_block) %>"
end
defp local_function_component_with_inner_content_args(assigns) do
~H"""
LOCAL COMPONENT WITH ARGS: Value: <%= @value %>
<%= render_block(@inner_block, %{
downcase: String.downcase(@value),
upcase: String.upcase(@value)
}) %>
"""
end
test "handles text" do
assert render("Hello") == "Hello"
end
test "handles regular blocks" do
assert render("""
Hello <%= if true do %>world!<% end %>
""") == "Hello world!"
end
test "handles html blocks with regular blocks" do
assert render("""
Hello <div>w<%= if true do %>orld<% end %>!</div>
""") == "Hello <div>world!</div>"
end
test "handles string attributes" do
assert render("""
Hello <div name="my name" phone="111">text</div>
""") == "Hello <div name=\"my name\" phone=\"111\">text</div>"
end
test "handles string attribute value keeping special chars unchanged" do
assert render("<div name='1 < 2'/>") == "<div name='1 < 2'></div>"
end
test "handles boolean attributes" do
assert render("""
Hello <div hidden>text</div>
""") == "Hello <div hidden>text</div>"
end
test "handles interpolated attributes" do
assert render("""
Hello <div name={to_string(123)} phone={to_string(456)}>text</div>
""") == "Hello <div name=\"123\" phone=\"456\">text</div>"
end
test "handles interpolated attribute value containing special chars" do
assert render("<div name={@val}/>", %{val: "1 < 2"}) == "<div name=\"1 < 2\"></div>"
end
test "handles interpolated attributes with strings" do
assert render("""
<div name={String.upcase("abc")}>text</div>
""") == "<div name=\"ABC\">text</div>"
end
test "handles interpolated attributes with curly braces" do
assert render("""
<div name={elem({"abc"}, 0)}>text</div>
""") == "<div name=\"abc\">text</div>"
end
test "handles dynamic attributes" do
assert render("Hello <div {@attrs}>text</div>", %{attrs: [name: "1", phone: to_string(2)]}) ==
"Hello <div name=\"1\" phone=\"2\">text</div>"
end
test "keeps attribute ordering" do
assigns = %{attrs1: [d1: "1"], attrs2: [d2: "2"]}
template = ~S(<div {@attrs1} sd1={1} s1="1" {@attrs2} s2="2" sd2={2} />)
assert render(template, assigns) ==
~S(<div d1="1" sd1="1" s1="1" d2="2" s2="2" sd2="2"></div>)
assert %Phoenix.LiveView.Rendered{static: ["<div", "", " s1=\"1\"", " s2=\"2\"", "></div>"]} =
eval(template, assigns)
end
test "optimizes attributes with literal string values" do
assigns = %{unsafe: "<foo>", safe: {:safe, "<foo>"}}
# binaries are extracted out
template = ~S(<div id={"<foo>"} />)
assert render(template, assigns) == ~S(<div id="<foo>"></div>)
assert %Phoenix.LiveView.Rendered{static: ["<div id=\"<foo>\"></div>"]} =
eval(template, assigns)
# binary concatenation is extracted out
template = ~S(<div id={"pre-" <> @unsafe} />)
assert render(template, assigns) == ~S(<div id="pre-<foo>"></div>)
assert %Phoenix.LiveView.Rendered{static: ["<div id=\"pre-", "\"></div>"]} =
eval(template, assigns)
template = ~S(<div id={"pre-" <> @unsafe <> "-pos"} />)
assert render(template, assigns) == ~S(<div id="pre-<foo>-pos"></div>)
assert %Phoenix.LiveView.Rendered{static: ["<div id=\"pre-", "-pos\"></div>"]} =
eval(template, assigns)
# interpolation is extracted out
template = ~S(<div id={"pre-#{@unsafe}-pos"} />)
assert render(template, assigns) == ~S(<div id="pre-<foo>-pos"></div>)
assert %Phoenix.LiveView.Rendered{static: ["<div id=\"pre-", "-pos\"></div>"]} =
eval(template, assigns)
# mixture of interpolation and binary concatenation is extracted out
template = ~S(<div id={"pre-" <> "#{@unsafe}-pos"} />)
assert render(template, assigns) == ~S(<div id="pre-<foo>-pos"></div>)
assert %Phoenix.LiveView.Rendered{static: ["<div id=\"pre-", "-pos\"></div>"]} =
eval(template, assigns)
# raises if not a binary
assert_raise ArgumentError, "expected a binary in <>, got: {:safe, \"<foo>\"}", fn ->
render(~S(<div id={"pre-" <> @safe} />), assigns)
end
end
test "optimizes class attributes" do
assigns = %{
nil_assign: nil,
true_assign: true,
false_assign: false,
unsafe: "<foo>",
safe: {:safe, "<foo>"},
list: ["safe", false, nil, "<unsafe>"]
}
assert %Phoenix.LiveView.Rendered{static: ["<div class=\"", "\"></div>"]} =
eval(~S(<div class={@safe} />), assigns)
template = ~S(<div class={@nil_assign} />)
assert render(template, assigns) == ~S(<div class=""></div>)
template = ~S(<div class={@false_assign} />)
assert render(template, assigns) == ~S(<div class=""></div>)
template = ~S(<div class={@true_assign} />)
assert render(template, assigns) == ~S(<div class=""></div>)
template = ~S(<div class={@unsafe} />)
assert render(template, assigns) == ~S(<div class="<foo>"></div>)
template = ~S(<div class={@safe} />)
assert render(template, assigns) == ~S(<div class="<foo>"></div>)
template = ~S(<div class={@list} />)
assert render(template, assigns) == ~S(<div class="safe <unsafe>"></div>)
end
test "optimizes attributes that can be empty" do
assigns = %{
nil_assign: nil,
true_assign: true,
false_assign: false,
unsafe: "<foo>",
safe: {:safe, "<foo>"},
list: ["safe", false, nil, "<unsafe>"]
}
assert %Phoenix.LiveView.Rendered{static: ["<div style=\"", "\"></div>"]} =
eval(~S(<div style={@safe} />), assigns)
template = ~S(<div style={@nil_assign} />)
assert render(template, assigns) == ~S(<div style=""></div>)
template = ~S(<div style={@false_assign} />)
assert render(template, assigns) == ~S(<div style=""></div>)
template = ~S(<div style={@true_assign} />)
assert render(template, assigns) == ~S(<div style=""></div>)
template = ~S(<div style={@unsafe} />)
assert render(template, assigns) == ~S(<div style="<foo>"></div>)
template = ~S(<div style={@safe} />)
assert render(template, assigns) == ~S(<div style="<foo>"></div>)
end
test "handle void elements" do
assert render("""
<div><br></div>\
""") == "<div><br></div>"
end
test "handle void elements with attributes" do
assert render("""
<div><br attr='1'></div>\
""") == "<div><br attr='1'></div>"
end
test "handle self close void elements" do
assert render("<hr/>") == "<hr>"
end
test "handle self close void elements with attributes" do
assert render(~S(<hr id="1"/>)) == ~S(<hr id="1">)
end
test "handle self close elements" do
assert render("<div/>") == "<div></div>"
end
test "handle self close elements with attributes" do
assert render("<div attr='1'/>") == "<div attr='1'></div>"
end
describe "handle function components" do
test "remote call (self close)" do
assigns = %{}
assert compile("<Phoenix.LiveView.HTMLEngineTest.remote_function_component value='1'/>") ==
"REMOTE COMPONENT: Value: 1"
end
test "remote call from alias (self close)" do
alias Phoenix.LiveView.HTMLEngineTest
assigns = %{}
assert compile("<HTMLEngineTest.remote_function_component value='1'/>") ==
"REMOTE COMPONENT: Value: 1"
end
test "remote call with inner content" do
assigns = %{}
assert compile("""
<Phoenix.LiveView.HTMLEngineTest.remote_function_component_with_inner_content value='1'>
The inner content
</Phoenix.LiveView.HTMLEngineTest.remote_function_component_with_inner_content>
""") == "REMOTE COMPONENT: Value: 1, Content: \n The inner content\n"
end
test "remote call with inner content with args" do
expected = """
REMOTE COMPONENT WITH ARGS: Value: aBcD
Upcase: ABCD
Downcase: abcd
"""
assigns = %{}
assert compile("""
<Phoenix.LiveView.HTMLEngineTest.remote_function_component_with_inner_content_args
value="aBcD"
let={%{upcase: upcase, downcase: downcase}}
>
Upcase: <%= upcase %>
Downcase: <%= downcase %>
</Phoenix.LiveView.HTMLEngineTest.remote_function_component_with_inner_content_args>
""") =~ expected
end
test "raise on remote call with inner content passing non-matching args" do
message = ~r"""
cannot match arguments sent from `render_block/2` against the pattern in `let`.
Expected a value matching `%{wrong: _}`, got: `%{downcase: "abcd", upcase: "ABCD"}`.
"""
assigns = %{}
assert_raise(RuntimeError, message, fn ->
compile("""
<Phoenix.LiveView.HTMLEngineTest.remote_function_component_with_inner_content_args
{[value: "aBcD"]}
let={%{wrong: _}}
>
...
</Phoenix.LiveView.HTMLEngineTest.remote_function_component_with_inner_content_args>
""")
end)
end
test "raise on remote call passing args to self close components" do
message = ~r".exs:2: cannot use `let` on a component without inner content"
assert_raise(CompileError, message, fn ->
eval("""
<br>
<Phoenix.LiveView.HTMLEngineTest.remote_function_component value='1' let={var}/>
""")
end)
end
test "local call (self close)" do
assigns = %{}
assert compile("<.local_function_component value='1'/>") ==
"LOCAL COMPONENT: Value: 1"
end
test "local call with inner content" do
assigns = %{}
assert compile("""
<.local_function_component_with_inner_content value='1'>
The inner content
</.local_function_component_with_inner_content>
""") == "LOCAL COMPONENT: Value: 1, Content: \n The inner content\n"
end
test "local call with inner content with args" do
expected = """
LOCAL COMPONENT WITH ARGS: Value: aBcD
Upcase: ABCD
Downcase: abcd
"""
assigns = %{}
assert compile("""
<.local_function_component_with_inner_content_args
value="aBcD"
let={%{upcase: upcase, downcase: downcase}}
>
Upcase: <%= upcase %>
Downcase: <%= downcase %>
</.local_function_component_with_inner_content_args>
""") =~ expected
assert compile("""
<.local_function_component_with_inner_content_args
{[value: "aBcD"]}
let={%{upcase: upcase, downcase: downcase}}
>
Upcase: <%= upcase %>
Downcase: <%= downcase %>
</.local_function_component_with_inner_content_args>
""") =~ expected
end
test "raise on local call with inner content passing non-matching args" do
message = ~r"""
cannot match arguments sent from `render_block/2` against the pattern in `let`.
Expected a value matching `%{wrong: _}`, got: `%{downcase: "abcd", upcase: "ABCD"}`.
"""
assigns = %{}
assert_raise(RuntimeError, message, fn ->
compile("""
<.local_function_component_with_inner_content_args
{[value: "aBcD"]}
let={%{wrong: _}}
>
...
</.local_function_component_with_inner_content_args>
""")
end)
end
test "raise on local call passing args to self close components" do
message = ~r".exs:2: cannot use `let` on a component without inner content"
assert_raise(CompileError, message, fn ->
eval("""
<br>
<.local_function_component value='1' let={var}/>
""")
end)
end
test "raise on duplicated `let`" do
message =
~r".exs:4:(8:)? cannot define multiple `let` attributes. Another `let` has already been defined at line 3"
assert_raise(ParseError, message, fn ->
eval("""
<br>
<Phoenix.LiveView.HTMLEngineTest.remote_function_component value='1'
let={var1}
let={var2}
/>
""")
end)
assert_raise(ParseError, message, fn ->
eval("""
<br>
<.local_function_component value='1'
let={var1}
let={var2}
/>
""")
end)
end
test "empty attributes" do
assigns = %{}
assert compile("<.assigns_component />") == "%{}"
end
test "dynamic attributes" do
assigns = %{attrs: [name: "1", phone: true]}
assert compile("<.assigns_component {@attrs} />") ==
"%{name: "1", phone: true}"
end
test "sorts attributes by group: static + dynamic" do
assigns = %{attrs1: [d1: "1"], attrs2: [d2: "2", d3: "3"]}
assert compile(
"<.assigns_component d1=\"one\" {@attrs1} d=\"middle\" {@attrs2} d2=\"two\" />"
) ==
"%{d: "middle", d1: "one", d2: "two", d3: "3"}"
end
end
describe "tracks root" do
test "valid cases" do
assert eval("<foo></foo>").root == true
assert eval("<foo><%= 123 %></foo>").root == true
assert eval("<foo><bar></bar></foo>").root == true
assert eval("<foo><br /></foo>").root == true
assert eval("<foo />").root == true
assert eval("<br />").root == true
assert eval("<br>").root == true
assert eval(" <foo></foo> ").root == true
assert eval("\n\n<foo></foo>\n").root == true
end
test "invalid cases" do
assert eval("").root == false
assert eval("<foo></foo><bar></bar>").root == false
assert eval("<foo></foo><bar></bar>").root == false
assert eval("<br /><br />").root == false
assert eval("<%= 123 %>").root == false
assert eval("<foo></foo><%= 123 %>").root == false
assert eval("<%= 123 %><foo></foo>").root == false
assert eval("123<foo></foo>").root == false
assert eval("<foo></foo>123").root == false
assert eval("<.to_string />").root == false
assert eval("<.to_string></.to_string>").root == false
assert eval("<Kernel.to_string />").root == false
assert eval("<Kernel.to_string></Kernel.to_string>").root == false
end
end
describe "tag validations" do
test "handles script" do
assert render("<script>a = '<a>';<%= :b %> = '<b>';</script>") ==
"<script>a = '<a>';b = '<b>';</script>"
end
test "unmatched open/close tags" do
message =
~r".exs:4:(1:)? unmatched closing tag. Expected </div> for <div> at line 2, got: </span>"
assert_raise(ParseError, message, fn ->
eval("""
<br>
<div>
text
</span>
""")
end)
end
test "unmatched open/close tags with nested tags" do
message =
~r".exs:6:(1:)? unmatched closing tag. Expected </div> for <div> at line 2, got: </span>"
assert_raise(ParseError, message, fn ->
eval("""
<br>
<div>
<p>
text
</p>
</span>
""")
end)
end
test "invalid remote tag" do
message = ~r".exs:1:(1:)? invalid tag <Foo>"
assert_raise(ParseError, message, fn ->
eval("""
<Foo foo="bar" />
""")
end)
end
test "missing open tag" do
message = ~r".exs:2:(3:)? missing opening tag for </span>"
assert_raise(ParseError, message, fn ->
eval("""
text
</span>
""")
end)
end
test "missing closing tag" do
message = ~r/.exs:2:(1:)? end of file reached without closing tag for <div>/
assert_raise(ParseError, message, fn ->
eval("""
<br>
<div foo={@foo}>
""")
end)
message = ~r/.exs:2:(3:)? end of file reached without closing tag for <span>/
assert_raise(ParseError, message, fn ->
eval("""
text
<span foo={@foo}>
text
""")
end)
end
test "invalid tag name" do
message = ~r/.exs:2:(3:)? invalid tag <Oops>/
assert_raise(ParseError, message, fn ->
eval("""
<br>
<Oops foo={@foo}>
Bar
</Oops>
""")
end)
end
test "invalid tag" do
message = ~r/.exs:1:(11:)? expected closing `}` for expression/
assert_raise(ParseError, message, fn ->
eval("""
<div foo={<%= @foo %>}>bar</div>
""")
end)
end
end
describe "handle errors in expressions" do
if Version.match?(System.version(), ">= 1.12.0") do
test "inside attribute values" do
assert_raise(SyntaxError, "nofile:12:22: syntax error before: ','", fn ->
opts = [line: 10, indentation: 8]
eval(
"""
text
<%= "interpolation" %>
<div class={[,]}/>
""",
[],
opts
)
end)
end
test "inside root attribute value" do
assert_raise(SyntaxError, "nofile:12:16: syntax error before: ','", fn ->
opts = [line: 10, indentation: 8]
eval(
"""
text
<%= "interpolation" %>
<div {[,]}/>
""",
[],
opts
)
end)
end
else
test "older versions cannot provide correct line on errors" do
assert_raise(SyntaxError, ~r/nofile:2/, fn ->
opts = [line: 10, indentation: 8]
eval(
"""
text
<%= "interpolation" %>
<div class={[,]}/>
""",
[],
opts
)
end)
end
end
end
end
| 29.754858 | 114 | 0.55104 |
f7d12f3bd42fb991ee9d664fd43a8fa9d0760a94 | 2,252 | exs | Elixir | test/elrondex/check_online_transaction_test.exs | victorflx/elrondex | a90521ce5e39ad37453dcb53f527b8311ae1ae4f | [
"MIT"
] | 8 | 2021-10-02T16:25:19.000Z | 2022-02-03T17:50:34.000Z | test/elrondex/check_online_transaction_test.exs | victorflx/elrondex | a90521ce5e39ad37453dcb53f527b8311ae1ae4f | [
"MIT"
] | 1 | 2022-01-19T12:10:49.000Z | 2022-01-19T12:10:49.000Z | test/elrondex/check_online_transaction_test.exs | victorflx/elrondex | a90521ce5e39ad37453dcb53f527b8311ae1ae4f | [
"MIT"
] | 2 | 2022-01-10T07:48:16.000Z | 2022-02-06T17:05:57.000Z | defmodule Elrondex.CheckOnlineTransactionTest do
alias Elrondex.{Account, Transaction, Network, REST}
use ExUnit.Case
test "sign and verify transaction" do
mainnet = Network.mainnet()
{:ok, config} = REST.get_network_config(mainnet)
mainnet =
mainnet
|> Network.config(config)
tx_hash = "eb06e3154318d81a33a7570226614d1350f2ea54e00e8afd96ff849280210070"
{:ok, tx} = REST.get_transaction(mainnet, tx_hash)
# IO.inspect(tx)
# TODO fast load to Transaction record
to_check = %Transaction{
nonce: Map.get(tx, "nonce"),
value: Map.get(tx, "value"),
receiver: Map.get(tx, "receiver"),
sender: Map.get(tx, "sender"),
gasPrice: Map.get(tx, "gasPrice"),
gasLimit: Map.get(tx, "gasLimit"),
# we assume it is all the time nil
data: Map.get(tx, "data"),
chainID: mainnet.erd_chain_id,
version: mainnet.erd_min_transaction_version,
signature: Map.get(tx, "signature")
}
# IO.inspect(to_check)
sender_account = Account.from_address(to_check.sender)
assert Transaction.sign_verify(to_check, sender_account) == true
end
test "sign and verify transaction with data" do
mainnet = Network.mainnet()
{:ok, config} = REST.get_network_config(mainnet)
mainnet =
mainnet
|> Network.config(config)
tx_hash = "1f41005cf8cee55d6f977c93033c57a23c65899971c2da72a08cb9babf5f3ca4"
{:ok, tx} = REST.get_transaction(mainnet, tx_hash)
# IO.inspect(tx)
# TODO fast load to Transaction record
to_check = %Transaction{
nonce: Map.get(tx, "nonce"),
value: Map.get(tx, "value"),
receiver: Map.get(tx, "receiver"),
sender: Map.get(tx, "sender"),
gasPrice: Map.get(tx, "gasPrice"),
gasLimit: Map.get(tx, "gasLimit"),
# we assume it is all the time not nil
data: Base.decode64!(Map.get(tx, "data")),
chainID: mainnet.erd_chain_id,
version: mainnet.erd_min_transaction_version,
signature: Map.get(tx, "signature")
}
IO.inspect(to_check)
sender_account = Account.from_address(to_check.sender)
assert Transaction.sign_verify(to_check, sender_account) == true
assert Transaction.sign_verify(to_check) == true
end
end
| 30.849315 | 80 | 0.671403 |
f7d1348c58aec0e0f5b85784d11d8e3339b8bc2b | 153 | ex | Elixir | lib/types/get_parameter_value_struct.ex | smiyabe/cwmp_ex | 9db322497aa3208b5985ccf496ada5286cde3925 | [
"Artistic-2.0"
] | 3 | 2017-11-29T05:07:35.000Z | 2019-12-18T17:16:41.000Z | lib/types/get_parameter_value_struct.ex | smiyabe/cwmp_ex | 9db322497aa3208b5985ccf496ada5286cde3925 | [
"Artistic-2.0"
] | 1 | 2021-12-02T19:35:28.000Z | 2022-03-29T09:40:52.000Z | lib/types/get_parameter_value_struct.ex | smiyabe/cwmp_ex | 9db322497aa3208b5985ccf496ada5286cde3925 | [
"Artistic-2.0"
] | 2 | 2017-11-29T05:07:30.000Z | 2020-11-10T07:10:42.000Z | defmodule CWMP.Protocol.Messages.GetParameterValuesStruct do
@derive [Poison.Encoder]
defstruct name: nil,
type: nil
end
| 25.5 | 60 | 0.653595 |
f7d14d6c43bc13c09fb9b6af7433868e37177fde | 472 | ex | Elixir | lib/events_tools/venues/building.ex | Apps-Team/conferencetools | ce2e16a3e4a521dc4682e736a209e6dd380c050d | [
"Apache-2.0"
] | null | null | null | lib/events_tools/venues/building.ex | Apps-Team/conferencetools | ce2e16a3e4a521dc4682e736a209e6dd380c050d | [
"Apache-2.0"
] | 6 | 2017-10-05T20:16:34.000Z | 2017-10-05T20:36:11.000Z | lib/events_tools/venues/building.ex | apps-team/events-tools | ce2e16a3e4a521dc4682e736a209e6dd380c050d | [
"Apache-2.0"
] | null | null | null | defmodule EventsTools.Venues.Building do
use Ecto.Schema
import Ecto.Changeset
alias EventsTools.Venues.Building
schema "buildings" do
field :name, :string
belongs_to :venue, EventsTools.Venues.Venue # this was added
has_many :halls, EventsTools.Venues.Hall # this was added
timestamps()
end
@doc false
def changeset(%Building{} = building, attrs) do
building
|> cast(attrs, [:name])
|> validate_required([:name])
end
end
| 21.454545 | 65 | 0.697034 |