hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7fdd219a316342b665e04bca6dd263e15b6931a | 3,973 | exs | Elixir | test/json_schema_test_suite/draft4/properties_test.exs | kianmeng/xema | a990d64fb4bcd708249514daa55426ee003da25d | [
"MIT"
] | 49 | 2018-06-05T09:42:19.000Z | 2022-02-15T12:50:51.000Z | test/json_schema_test_suite/draft4/properties_test.exs | kianmeng/xema | a990d64fb4bcd708249514daa55426ee003da25d | [
"MIT"
] | 152 | 2017-06-11T13:43:06.000Z | 2022-01-09T17:13:45.000Z | test/json_schema_test_suite/draft4/properties_test.exs | kianmeng/xema | a990d64fb4bcd708249514daa55426ee003da25d | [
"MIT"
] | 6 | 2019-05-31T05:41:47.000Z | 2021-12-14T08:09:36.000Z | defmodule JsonSchemaTestSuite.Draft4.PropertiesTest do
use ExUnit.Case
import Xema, only: [valid?: 2]
describe ~s|object properties validation| do
setup do
%{
schema:
Xema.from_json_schema(
%{"properties" => %{"bar" => %{"type" => "string"}, "foo" => %{"type" => "integer"}}},
draft: "draft4",
atom: :force
)
}
end
test ~s|both properties present and valid is valid|, %{schema: schema} do
assert valid?(schema, %{"bar" => "baz", "foo" => 1})
end
test ~s|one property invalid is invalid|, %{schema: schema} do
refute valid?(schema, %{"bar" => %{}, "foo" => 1})
end
test ~s|both properties invalid is invalid|, %{schema: schema} do
refute valid?(schema, %{"bar" => %{}, "foo" => []})
end
test ~s|doesn't invalidate other properties|, %{schema: schema} do
assert valid?(schema, %{"quux" => []})
end
test ~s|ignores arrays|, %{schema: schema} do
assert valid?(schema, [])
end
test ~s|ignores other non-objects|, %{schema: schema} do
assert valid?(schema, 12)
end
end
describe ~s|properties, patternProperties, additionalProperties interaction| do
setup do
%{
schema:
Xema.from_json_schema(
%{
"additionalProperties" => %{"type" => "integer"},
"patternProperties" => %{"f.o" => %{"minItems" => 2}},
"properties" => %{
"bar" => %{"type" => "array"},
"foo" => %{"maxItems" => 3, "type" => "array"}
}
},
draft: "draft4",
atom: :force
)
}
end
test ~s|property validates property|, %{schema: schema} do
assert valid?(schema, %{"foo" => [1, 2]})
end
test ~s|property invalidates property|, %{schema: schema} do
refute valid?(schema, %{"foo" => [1, 2, 3, 4]})
end
test ~s|patternProperty invalidates property|, %{schema: schema} do
refute valid?(schema, %{"foo" => []})
end
test ~s|patternProperty validates nonproperty|, %{schema: schema} do
assert valid?(schema, %{"fxo" => [1, 2]})
end
test ~s|patternProperty invalidates nonproperty|, %{schema: schema} do
refute valid?(schema, %{"fxo" => []})
end
test ~s|additionalProperty ignores property|, %{schema: schema} do
assert valid?(schema, %{"bar" => []})
end
test ~s|additionalProperty validates others|, %{schema: schema} do
assert valid?(schema, %{"quux" => 3})
end
test ~s|additionalProperty invalidates others|, %{schema: schema} do
refute valid?(schema, %{"quux" => "foo"})
end
end
describe ~s|properties with escaped characters| do
setup do
%{
schema:
Xema.from_json_schema(
%{
"properties" => %{
"foo\tbar" => %{"type" => "number"},
"foo\nbar" => %{"type" => "number"},
"foo\fbar" => %{"type" => "number"},
"foo\rbar" => %{"type" => "number"},
"foo\"bar" => %{"type" => "number"},
"foo\\bar" => %{"type" => "number"}
}
},
draft: "draft4",
atom: :force
)
}
end
test ~s|object with all numbers is valid|, %{schema: schema} do
assert valid?(schema, %{
"foo\tbar" => 1,
"foo\nbar" => 1,
"foo\fbar" => 1,
"foo\rbar" => 1,
"foo\"bar" => 1,
"foo\\bar" => 1
})
end
test ~s|object with strings is invalid|, %{schema: schema} do
refute valid?(schema, %{
"foo\tbar" => "1",
"foo\nbar" => "1",
"foo\fbar" => "1",
"foo\rbar" => "1",
"foo\"bar" => "1",
"foo\\bar" => "1"
})
end
end
end
| 28.582734 | 98 | 0.481752 |
f7fdd6ac6c00c7eedb59736e95ad9b92f8d67c38 | 570 | exs | Elixir | config/bbb.exs | trarbr/nerves_livebook | ac5a5f7f8b80fb0c63cfe81e565c439e912973dc | [
"Apache-2.0"
] | 51 | 2021-09-21T12:23:41.000Z | 2022-03-31T08:37:17.000Z | config/bbb.exs | trarbr/nerves_livebook | ac5a5f7f8b80fb0c63cfe81e565c439e912973dc | [
"Apache-2.0"
] | 37 | 2021-09-21T11:35:28.000Z | 2022-03-18T13:00:31.000Z | config/bbb.exs | trarbr/nerves_livebook | ac5a5f7f8b80fb0c63cfe81e565c439e912973dc | [
"Apache-2.0"
] | 7 | 2021-09-26T22:33:35.000Z | 2022-02-20T10:59:29.000Z | import Config
# Configure the network using vintage_net
# See https://github.com/nerves-networking/vintage_net for more information
config :vintage_net,
config: [
{"usb0", %{type: VintageNetDirect}},
{"eth0", %{type: VintageNetEthernet, ipv4: %{method: :dhcp}}},
{"wlan0", %{type: VintageNetWiFi}}
]
# Beagleboards typically have 4 LEDs
#
# beaglebone:green:usr0 is a heartbeat
# beaglebone:green:usr1 is mmc0 activity
# beaglebone:green:usr2 is unset
# beaglebone:green:usr3 is mmc1 activity
config :nerves_livebook, :ui, led: "beaglebone:green:usr2"
| 30 | 75 | 0.731579 |
f7fddcc0bddf8609b4b6de52b4e99b517af3511b | 1,579 | exs | Elixir | config/config.exs | pedromcorreia/portal-construindo-sabere | 116402e21d9c1e7b02be2966460c90dcea7d09cd | [
"MIT"
] | null | null | null | config/config.exs | pedromcorreia/portal-construindo-sabere | 116402e21d9c1e7b02be2966460c90dcea7d09cd | [
"MIT"
] | null | null | null | config/config.exs | pedromcorreia/portal-construindo-sabere | 116402e21d9c1e7b02be2966460c90dcea7d09cd | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :portalcs,
ecto_repos: [Portalcs.Repo]
# Configures the endpoint
config :portalcs, PortalcsWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "v2w5jJ7dRUSsbWfgcEaBGdzHiGfweKO4cpOBLJ+An0cVPP+drpTwJFyoCxyNFho8",
render_errors: [view: PortalcsWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: Portalcs.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
# %% Coherence Configuration %% Don't remove this line
config :coherence,
user_schema: Portalcs.Coherence.User,
repo: Portalcs.Repo,
module: Portalcs,
web_module: PortalcsWeb,
router: PortalcsWeb.Router,
messages_backend: PortalcsWeb.Coherence.Messages,
logged_out_url: "/",
email_from_name: "Your Name",
email_from_email: "[email protected]",
opts: [:authenticatable, :recoverable, :lockable, :trackable, :unlockable_with_token, :invitable, :registerable]
config :coherence, PortalcsWeb.Coherence.Mailer,
adapter: Swoosh.Adapters.Sendgrid,
api_key: "your api key here"
# %% End Coherence Configuration %%
| 34.326087 | 114 | 0.760608 |
f7fde215e2783155886fd86c75315844d6090238 | 1,083 | ex | Elixir | lib/nostalgic_games_web/controllers/console_controller.ex | rafaelcorazzi/nostalgic_games | 34734aa3a89194730d0cc5e137f3db5f597979d4 | [
"MIT"
] | null | null | null | lib/nostalgic_games_web/controllers/console_controller.ex | rafaelcorazzi/nostalgic_games | 34734aa3a89194730d0cc5e137f3db5f597979d4 | [
"MIT"
] | null | null | null | lib/nostalgic_games_web/controllers/console_controller.ex | rafaelcorazzi/nostalgic_games | 34734aa3a89194730d0cc5e137f3db5f597979d4 | [
"MIT"
] | null | null | null | defmodule NostalgicGamesWeb.ConsoleController do
use NostalgicGamesWeb, :controller
action_fallback NostalgicGamesWeb.FallbackController
def create(conn, params \\ :empty) do
params
|> NostalgicGames.create_console()
|> handle_response(conn, "create.json", :ok)
end
def delete(conn, %{"id" => id}) do
id
|> NostalgicGames.delete_console()
|> handle_delete(conn)
end
defp handle_delete({:ok, _}, conn) do
conn
|> put_status(:no_content)
|> text("")
end
defp handle_delete({:error, _reason} = error, _conn), do: error
def index(conn, _params) do
consoles = NostalgicGames.read_all_console()
render(conn, "index.json", consoles: consoles)
end
def show(conn, %{"id" => id}) do
id
|> NostalgicGames.read_console()
|> handle_response(conn, "show.json", :ok)
end
defp handle_response({:ok, console}, conn, view, status) do
conn
|> put_status(status)
|> render(view, console: console)
end
defp handle_response({:error, _changeset} = error, _conn, _view, _status), do: error
end
| 22.102041 | 86 | 0.666667 |
f7fe11a32ba95c9205bbfc6ae61ee731171c40cc | 5,116 | ex | Elixir | lib/bmp280/sensor/bme680_sensor.ex | dkuku/bmp280 | e7cb8fcef0043a354a9c2dde0946ab1d033c0b59 | [
"Apache-2.0"
] | 11 | 2020-09-17T08:03:11.000Z | 2021-04-28T16:20:13.000Z | lib/bmp280/sensor/bme680_sensor.ex | dkuku/bmp280 | e7cb8fcef0043a354a9c2dde0946ab1d033c0b59 | [
"Apache-2.0"
] | 13 | 2021-01-31T23:08:26.000Z | 2021-03-27T23:08:42.000Z | lib/bmp280/sensor/bme680_sensor.ex | dkuku/bmp280 | e7cb8fcef0043a354a9c2dde0946ab1d033c0b59 | [
"Apache-2.0"
] | 3 | 2021-09-05T08:48:27.000Z | 2021-10-21T17:14:43.000Z | defmodule BMP280.BME680Sensor do
@moduledoc false
alias BMP280.{BME680Calibration, BME680Comm, Calc, Comm, Measurement}
@behaviour BMP280.Sensor
@type raw_samples() :: %{
raw_pressure: non_neg_integer(),
raw_temperature: non_neg_integer(),
raw_humidity: non_neg_integer(),
raw_gas_resistance: non_neg_integer(),
raw_gas_range: non_neg_integer()
}
@type heater_duration_ms() :: 1..4032
@type heater_temperature_c() :: 200..400
@heater_temperature_c 300
@heater_duration_ms 100
@ambient_temperature_c 25
@impl true
def init(%{transport: transport} = initial_state) do
with :ok <- Comm.reset(transport),
{:ok, cal_binary} <- BME680Comm.read_calibration(transport),
calibration <- BME680Calibration.from_binary(cal_binary),
:ok <- BME680Comm.set_oversampling(transport),
:ok <- BME680Comm.set_filter(transport),
:ok <- BME680Comm.enable_gas_sensor(transport),
:ok <-
BME680Comm.set_gas_heater_temperature(
transport,
heater_resistance_code(calibration, @heater_temperature_c, @ambient_temperature_c)
),
:ok <-
BME680Comm.set_gas_heater_duration(
transport,
heater_duration_code(@heater_duration_ms)
),
:ok <- BME680Comm.set_gas_heater_profile(transport, 0),
do: %{initial_state | calibration: calibration}
end
@impl true
def read(%{transport: transport} = state) do
case BME680Comm.read_raw_samples(transport) do
{:ok, raw_samples} -> {:ok, measurement_from_raw_samples(raw_samples, state)}
error -> error
end
end
@spec measurement_from_raw_samples(<<_::80>>, BMP280.Sensor.t()) :: BMP280.Measurement.t()
def measurement_from_raw_samples(raw_samples, state) do
<<raw_pressure::20, _::4, raw_temperature::20, _::4, raw_humidity::16, _::16>> = raw_samples
<<_::64, raw_gas_resistance::10, _::2, raw_gas_range::4>> = raw_samples
%{calibration: calibration, sea_level_pa: sea_level_pa} = state
temperature_c = BME680Calibration.raw_to_temperature(calibration, raw_temperature)
pressure_pa = BME680Calibration.raw_to_pressure(calibration, temperature_c, raw_pressure)
humidity_rh = BME680Calibration.raw_to_humidity(calibration, temperature_c, raw_humidity)
gas_resistance_ohms =
BME680Calibration.raw_to_gas_resistance(
calibration,
raw_gas_resistance,
raw_gas_range
)
# Derived calculations
altitude_m = Calc.pressure_to_altitude(pressure_pa, sea_level_pa)
dew_point_c = Calc.dew_point(humidity_rh, temperature_c)
%Measurement{
temperature_c: temperature_c,
pressure_pa: pressure_pa,
altitude_m: altitude_m,
humidity_rh: humidity_rh,
dew_point_c: dew_point_c,
gas_resistance_ohms: gas_resistance_ohms,
timestamp_ms: System.monotonic_time(:millisecond)
}
end
@doc """
Convert the heater temperature into a register code.
## Examples
iex> cal = %{
...> par_gh1: -30,
...> par_gh2: -5969,
...> par_gh3: 18,
...> res_heat_val: 50,
...> res_heat_range: 1,
...> range_switching_error: 1
...> }
iex> BME680Sensor.heater_resistance_code(cal, 300, 28)
112
"""
@spec heater_resistance_code(BME680Calibration.t(), heater_temperature_c(), integer()) ::
integer()
def heater_resistance_code(cal, heater_temp_c, amb_temp_c) do
%{
par_gh1: par_gh1,
par_gh2: par_gh2,
par_gh3: par_gh3,
res_heat_range: res_heat_range,
res_heat_val: res_heat_val
} = cal
var1 = par_gh1 / 16.0 + 49.0
var2 = par_gh2 / 32_768.0 * 0.0005 + 0.00235
var3 = par_gh3 / 1024.0
var4 = var1 * (1.0 + var2 * heater_temp_c)
var5 = var4 + var3 * amb_temp_c
round(
3.4 *
(var5 * (4.0 / (4.0 + res_heat_range)) *
(1.0 /
(1.0 +
res_heat_val * 0.002)) - 25)
)
end
@doc """
Convert the heater duration milliseconds into a register code. Heating durations between 1 ms and
4032 ms can be configured. In practice, approximately 20–30 ms are necessary for the heater to
reach the intended target temperature.
## Examples
iex> BME680Sensor.heater_duration_code(63)
63
iex> BME680Sensor.heater_duration_code(64)
80
iex> BME680Sensor.heater_duration_code(100)
89
iex> BME680Sensor.heater_duration_code(4032)
255
iex> BME680Sensor.heater_duration_code(4033)
** (FunctionClauseError) no function clause matching in BMP280.BME680Sensor.heater_duration_code/2
"""
@spec heater_duration_code(heater_duration_ms(), non_neg_integer()) :: non_neg_integer()
def heater_duration_code(duration, factor \\ 0)
def heater_duration_code(duration, factor) when duration in 64..4032 do
duration |> div(4) |> heater_duration_code(factor + 1)
end
def heater_duration_code(duration, factor) when duration in 1..63 do
duration + factor * 64
end
end
| 32.794872 | 104 | 0.668296 |
f7fe1875be162988813cfb8f58907837494d10de | 648 | exs | Elixir | mix.exs | mosic/exdisque | e0d940cc03ea2ca5a1aa58c67e0d92fcdbe0bf87 | [
"MIT"
] | 18 | 2015-05-14T20:42:26.000Z | 2017-11-13T18:25:52.000Z | mix.exs | mosic/exdisque | e0d940cc03ea2ca5a1aa58c67e0d92fcdbe0bf87 | [
"MIT"
] | null | null | null | mix.exs | mosic/exdisque | e0d940cc03ea2ca5a1aa58c67e0d92fcdbe0bf87 | [
"MIT"
] | 1 | 2018-11-18T08:51:24.000Z | 2018-11-18T08:51:24.000Z | defmodule ExDisque.Mixfile do
use Mix.Project
def project do
[
app: :exdisque,
version: "0.0.1",
elixir: "~> 1.0",
name: "exdisque",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package(),
description: "Elixir client library for Disque: https://github.com/antirez/disque"
]
end
def application do
[]
end
defp deps do
[{:eredis, "~> 1.0"}]
end
defp package do
[
contributors: ["Miloš Mošić"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/mosic/exdisque"}
]
end
end
| 19.058824 | 88 | 0.558642 |
f7fe496a1d7cd44d2291ec2a9155dc3f3ae96596 | 117 | ex | Elixir | web/views/layout_view.ex | jschoch/reflux_eventbroker_react_phoenix_elixir | 31a6ca7ccf5914a5c02f16c5a3cbf0bdb3d0be2d | [
"MIT"
] | 5 | 2015-08-18T09:25:47.000Z | 2021-01-19T04:52:18.000Z | web/views/layout_view.ex | jschoch/reflux_eventbroker_react_phoenix_elixir | 31a6ca7ccf5914a5c02f16c5a3cbf0bdb3d0be2d | [
"MIT"
] | 1 | 2015-08-26T17:37:23.000Z | 2015-08-29T12:38:56.000Z | web/views/layout_view.ex | jschoch/reflux_eventbroker_react_phoenix_elixir | 31a6ca7ccf5914a5c02f16c5a3cbf0bdb3d0be2d | [
"MIT"
] | null | null | null | defmodule RefluxEventbrokerReactPhoenixElixir.LayoutView do
use RefluxEventbrokerReactPhoenixElixir.Web, :view
end
| 29.25 | 59 | 0.888889 |
f7fe7b4612564d4a024f11fd8c421c6f07e02092 | 1,936 | ex | Elixir | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/create_feed_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/create_feed_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/create_feed_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudAsset.V1.Model.CreateFeedRequest do
@moduledoc """
Create asset feed request.
## Attributes
* `feed` (*type:* `GoogleApi.CloudAsset.V1.Model.Feed.t`, *default:* `nil`) - Required. The feed details. The field `name` must be empty and it will be generated in the format of: projects/project_number/feeds/feed_id folders/folder_number/feeds/feed_id organizations/organization_number/feeds/feed_id
* `feedId` (*type:* `String.t`, *default:* `nil`) - Required. This is the client-assigned asset feed identifier and it needs to be unique under a specific parent project/folder/organization.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:feed => GoogleApi.CloudAsset.V1.Model.Feed.t() | nil,
:feedId => String.t() | nil
}
field(:feed, as: GoogleApi.CloudAsset.V1.Model.Feed)
field(:feedId)
end
defimpl Poison.Decoder, for: GoogleApi.CloudAsset.V1.Model.CreateFeedRequest do
def decode(value, options) do
GoogleApi.CloudAsset.V1.Model.CreateFeedRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudAsset.V1.Model.CreateFeedRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.72 | 305 | 0.742252 |
f7feaf9442f6e4e47d5ec119df0fe6edda75cfeb | 1,532 | exs | Elixir | test/elixir_bank_web/resolvers/user_test.exs | spacexcorp/elixir-bank | b5cf0592598da469e3c2eb43befeb0a45592ad0a | [
"MIT"
] | null | null | null | test/elixir_bank_web/resolvers/user_test.exs | spacexcorp/elixir-bank | b5cf0592598da469e3c2eb43befeb0a45592ad0a | [
"MIT"
] | null | null | null | test/elixir_bank_web/resolvers/user_test.exs | spacexcorp/elixir-bank | b5cf0592598da469e3c2eb43befeb0a45592ad0a | [
"MIT"
] | null | null | null | defmodule ElixirBankWeb.Resolvers.UserTest do
use ElixirBankWeb.ConnCase
alias Ecto.UUID
alias ElixirBank.Accounts.{Profile, User}
alias ElixirBank.Repo
alias ElixirBankWeb.Resolvers
import ElixirBank.Factory
describe "get_user/2" do
test "should return a user when the context does have an user_id and user does exist" do
{:ok, user} = insert(User)
info = %{context: %{user_id: user.id}}
assert {:ok, fetched_user = %User{}} = Resolvers.User.get_user(nil, info)
assert fetched_user.id == user.id
end
test "should return an error when the context does have an user_id and user does not exist" do
info = %{context: %{user_id: UUID.generate()}}
assert {:error, "user does not exist"} == Resolvers.User.get_user(nil, info)
end
test "should return an error when the context does not have an user_id" do
assert {:error, "user does not exist"} == Resolvers.User.get_user(nil, nil)
end
end
describe "create_user/2" do
test "should return a created user with auth token and a profile when attrs are valid" do
{:ok, attrs} = params_for(User)
assert {:ok, user = %User{}} = Resolvers.User.create_user(attrs, nil)
refute user.auth_tokens == []
profile = Profile |> Repo.all() |> Enum.find(&(&1.user_id == user.id))
refute is_nil(profile)
end
test "should return an error when attrs are not valid" do
assert {:error, "could not create user"} == Resolvers.User.create_user(%{}, nil)
end
end
end
| 31.916667 | 98 | 0.672977 |
f7febf8fc28392ee750700545b2cbffdbc0ca437 | 1,557 | ex | Elixir | lib/lowendinsight_get/counter_agent.ex | haldihri3/lowendinsight-get | b414f288e7a9f409a1ec408b6515fd1cb95e6316 | [
"BSD-3-Clause"
] | null | null | null | lib/lowendinsight_get/counter_agent.ex | haldihri3/lowendinsight-get | b414f288e7a9f409a1ec408b6515fd1cb95e6316 | [
"BSD-3-Clause"
] | null | null | null | lib/lowendinsight_get/counter_agent.ex | haldihri3/lowendinsight-get | b414f288e7a9f409a1ec408b6515fd1cb95e6316 | [
"BSD-3-Clause"
] | null | null | null | defmodule LowendinsightGet.CounterAgent do
use Agent
require Logger
def new_counter(number_of_processes) do
Agent.start_link(fn -> { %{}, %{completed: 0, total: number_of_processes}} end,
name: :counter)
end
def get() do
Agent.get(:counter, fn state -> state end)
end
def add(pid, url) do
Agent.update(:counter, fn {proc, log} ->
Logger.info("running process ##{map_size(proc) + 1} -> #{url}")
{Map.put(proc, pid, :running), log}
end)
update()
end
def increment(pid) do
Agent.update(:counter, fn {proc, log} ->
if Map.fetch(proc, pid) == {:ok, :running} do
log_status({proc, log})
{Map.put(proc, pid, :completed), Map.put(log, :completed, log.completed + 1)}
else {proc, log}
end
end)
end
def update() do
{proc, _log} = get()
Enum.filter(proc, fn {_pid, status} -> status == :running end)
|> Enum.each(fn {pid, _status} -> if !Process.alive?(pid), do: increment(pid) end)
end
def log_status({proc, log}) do
completed = log.completed + 1
cond do
log.total > log.completed ->
Logger.info("completed #{round(completed / log.total * 100)}% " <>
(if (completed < log.total), do: " ", else: "") <> "| running: #{map_size(proc) - completed} | total urls: #{log.total}")
:logged
true -> :no_log
end
end
def update_and_stop() do
update()
Agent.stop(:counter)
end
end | 29.377358 | 134 | 0.550417 |
f7fece24b6508b0ddb5ba5ae9011c6b6a1a58d89 | 1,594 | ex | Elixir | lib/mutable.ex | melpon/mutable | 4238000126d4954fa8fa8c6eb5277ef3e8cc9576 | [
"MIT"
] | 2 | 2017-12-13T00:11:21.000Z | 2018-01-29T01:48:50.000Z | lib/mutable.ex | melpon/mutable | 4238000126d4954fa8fa8c6eb5277ef3e8cc9576 | [
"MIT"
] | null | null | null | lib/mutable.ex | melpon/mutable | 4238000126d4954fa8fa8c6eb5277ef3e8cc9576 | [
"MIT"
] | null | null | null | defmodule Mutable do
@moduledoc """
Temporary side-effect module
"""
@mutable_key :mutable_key
@mutable_undefined :mutable_undefined
def run(keyword, fun) do
for {key, value} <- keyword do
push(key, value)
end
try do
fun.()
after
for {key, _} <- keyword do
pop(key)
end
end
end
defp push(key, value) do
mkey = {@mutable_key, key}
values = Process.get(mkey, [])
Process.put(mkey, [value | values])
end
defp pop(key) do
mkey = {@mutable_key, key}
case Process.get(mkey, @mutable_undefined) do
@mutable_undefined -> raise KeyError, key: key, term: get()
[_] -> Process.delete(mkey)
[_ | values] -> Process.put(mkey, values)
end
end
def get() do
Process.get()
|> Enum.filter(fn
{{@mutable_key, _key}, _values} -> true
_ -> false
end)
|> Enum.map(fn {{@mutable_key, key}, [value | _]} -> {key, value} end)
end
def get(key) do
mkey = {@mutable_key, key}
case Process.get(mkey, @mutable_undefined) do
@mutable_undefined -> raise KeyError, key: key, term: get()
[value | _] -> value
end
end
def put(key, value) do
mkey = {@mutable_key, key}
case Process.get(mkey, @mutable_undefined) do
@mutable_undefined ->
raise KeyError, key: key, term: get()
[old_value | values] ->
_ = Process.put(mkey, [value | values])
old_value
end
end
def update(key, fun) do
value = get(key)
new_value = fun.(value)
put(key, new_value)
{new_value, value}
end
end
| 20.701299 | 74 | 0.585947 |
f7fed3c6978b179febb43ebd4a3b21e4592d0208 | 2,560 | exs | Elixir | test/utils_test.exs | c4710n/ex_alipay | 56108ec974dc33ac026d7499140876da0396ab87 | [
"MIT"
] | 4 | 2019-08-08T15:23:57.000Z | 2021-07-22T07:56:12.000Z | test/utils_test.exs | c4710n/ex_alipay | 56108ec974dc33ac026d7499140876da0396ab87 | [
"MIT"
] | 3 | 2019-11-19T11:01:25.000Z | 2022-03-22T09:34:15.000Z | test/utils_test.exs | c4710n/ex_alipay | 56108ec974dc33ac026d7499140876da0396ab87 | [
"MIT"
] | 6 | 2019-09-10T10:49:09.000Z | 2022-01-24T13:57:53.000Z | defmodule ExAlipayUtilsTest do
use ExUnit.Case
doctest ExAlipay
alias ExAlipay.Utils
defmodule AlipayClient do
use ExAlipay.Client,
appid: "2019111111",
pid: "2019121212",
public_key:
~s(-----BEGIN PUBLIC KEY-----\nMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCfq836Ik1FTMdzLF8PwHuUZhkfSikepLOXCBGXs+dNHq7+jBK58veZjTGlDQFF5x06O28Cf0n2DkalGoOw6zDzTyUBzGmdH3n89uh7imFDATxZjDSMVLkdEVivpFePuyBnl78udqrLHG+Tjgqts1/DPAFbDdIwVQy+xrSnVvLJ/QIDAQAB\n-----END PUBLIC KEY-----),
private_key:
~s(-----BEGIN RSA PRIVATE KEY-----\nMIICXAIBAAKBgQCfq836Ik1FTMdzLF8PwHuUZhkfSikepLOXCBGXs+dNHq7+jBK58veZjTGlDQFF5x06O28Cf0n2DkalGoOw6zDzTyUBzGmdH3n89uh7imFDATxZjDSMVLkdEVivpFePuyBnl78udqrLHG+Tjgqts1/DPAFbDdIwVQy+xrSnVvLJ/QIDAQABAoGAPi7XmemP9EQxjM4j+2t39VRJxmDIYNG9yzzuNQlwNB2WAzYj+N0BxoAxbFkDPOkD/fC1i+BsunHW22fXD6iYuBomuO8DERatA1Hp36/jLoJLnfxQw/w/ToC68i8wuOMe0iyVUNrV+T/ecYMvYLTtEzw8jB4NfvaBpZnUEy261XUCQQDm2CZYwRnmP9diMh7mKQHdCTUQ5crWyqImy8F0Y10gMO4j/kchWqR+746GapwutJnt7MnwJr4lO5E7Y5W3HI2zAkEAsRIjyDFIcHZWf6/qnvSJbI5fxUrr2WTMa8ZS6z+Ik0ueXoE1KnS1v1CabD+/8ynCsXixycVvHhZx9xqntS5RjwJADm1z+BgZhkp3K6v2QmxNsYLhziyOgN4pREN3085iA6ELQTSjPXJs1YIjZkNDf6fJ9xTViizhtXIDobKXqNogAQJAKOwSTO/m1+bhcr0LMhU9tVLqG0SHYUSEYdwBydBzFeeCAEFIMjmqzz4nkiDhkabzEeTc4c65MXDqgbstSxgbTQJBALkt3Xjun50XUDFY4YIVIj8c3Zi74HpXl667lzstf2sk8hwB7SLg3zT53o2RUjam4jk1GjFp8B68xT5B5WY2jOM=\n-----END RSA PRIVATE KEY-----),
sandbox?: true
def client, do: @client
end
test "create sign" do
params = %{b: "b", a: "a"}
sign_str = Utils.create_sign_str(params)
assert sign_str == "a=a&b=b"
client = AlipayClient.client()
assert Utils.create_sign(client, params) == Utils.create_sign(client, sign_str)
end
test "create request" do
content = %{b: "b", a: "a"}
client = AlipayClient.client()
request_str = Utils.build_request_str(client, :page_pay, content, %{})
request_url = Utils.build_request_url(client, :page_pay, content, %{})
gate_way = Utils.get_gateway(client)
assert String.starts_with?(request_url, gate_way)
assert not String.starts_with?(request_str, gate_way)
params = URI.decode_query(request_str)
%{
"app_id" => "2019111111",
"biz_content" => _biz_content,
"charset" => "utf-8",
"format" => "JSON",
"method" => "page_pay",
"sign" => sign,
"sign_type" => "RSA2",
"timestamp" => _timestamp,
"version" => "1.0"
} = params
{^sign, params} = Map.pop(params, "sign")
assert sign == Utils.create_sign(client, params)
end
end
| 47.407407 | 889 | 0.75625 |
f7fef09b16e27ef34eb4d401bf96ef88d8e00708 | 1,123 | exs | Elixir | config/config.exs | MiguelERuiz/pokedex | e7faaa54f6486442a0a3768563d3decf8c37f5b1 | [
"MIT"
] | null | null | null | config/config.exs | MiguelERuiz/pokedex | e7faaa54f6486442a0a3768563d3decf8c37f5b1 | [
"MIT"
] | null | null | null | config/config.exs | MiguelERuiz/pokedex | e7faaa54f6486442a0a3768563d3decf8c37f5b1 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :pokedex, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:pokedex, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.225806 | 73 | 0.751558 |
f7ff2553665418dc427cded9ac4efa56141c4043 | 842 | exs | Elixir | config/config.exs | erickgnavarro/phoenix_demo_chat | 71749db3fb4e8012047e8d857b0a096a36dbe375 | [
"MIT"
] | 2 | 2015-05-12T04:00:03.000Z | 2016-05-02T06:05:25.000Z | config/config.exs | erickgnavarro/phoenix_demo_chat | 71749db3fb4e8012047e8d857b0a096a36dbe375 | [
"MIT"
] | null | null | null | config/config.exs | erickgnavarro/phoenix_demo_chat | 71749db3fb4e8012047e8d857b0a096a36dbe375 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# Configures the endpoint
config :demo_chat, DemoChat.Endpoint,
url: [host: "localhost"],
root: Path.expand("..", __DIR__),
secret_key_base: "cToR/bYBF6bS1AaJTrLADUWO0eRMx1uy+DuqdJPFvAGMtyZqfCmPhwC2oDbjGxpV",
debug_errors: false,
pubsub: [name: DemoChat.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 33.68 | 86 | 0.755344 |
f7ff563e89fc63c2dc8003ba9b34be818d4765f8 | 128 | exs | Elixir | .iex.exs | kevinastone/phoenixbin | 8b7326b5de1fe9961c1a2d7971a3d4abe7178829 | [
"MIT"
] | 18 | 2015-11-18T09:52:34.000Z | 2021-04-27T19:38:08.000Z | .iex.exs | kevinastone/phoenixbin | 8b7326b5de1fe9961c1a2d7971a3d4abe7178829 | [
"MIT"
] | 3 | 2017-01-11T18:55:39.000Z | 2021-06-15T05:46:34.000Z | .iex.exs | kevinastone/phoenixbin | 8b7326b5de1fe9961c1a2d7971a3d4abe7178829 | [
"MIT"
] | 7 | 2016-08-17T10:24:20.000Z | 2020-07-10T13:00:36.000Z | alias Requestbox.Session
alias Requestbox.Request
alias Requestbox.Vanity
alias Requestbox.Repo
alias Requestbox.Router.Helpers
| 21.333333 | 31 | 0.875 |
f7ff5a6c321745ac3e61321d90e98d82670822bf | 980 | ex | Elixir | lib/teslamate/convert.ex | markusdd/teslamate | baada9c5455da2d3c400bf916ab29834772f6a88 | [
"MIT"
] | null | null | null | lib/teslamate/convert.ex | markusdd/teslamate | baada9c5455da2d3c400bf916ab29834772f6a88 | [
"MIT"
] | null | null | null | lib/teslamate/convert.ex | markusdd/teslamate | baada9c5455da2d3c400bf916ab29834772f6a88 | [
"MIT"
] | null | null | null | defmodule TeslaMate.Convert do
def mph_to_kmh(nil), do: nil
def mph_to_kmh(mph), do: round(mph * 1.60934)
def miles_to_km(nil, _precision), do: nil
def miles_to_km(miles, precision), do: Float.round(miles / 0.62137, precision)
def km_to_miles(nil, _precision), do: nil
def km_to_miles(km, precision), do: Float.round(km * 0.62137, precision)
def m_to_ft(nil), do: nil
def m_to_ft(m), do: m * 3.28084
def ft_to_m(nil), do: nil
def ft_to_m(ft), do: ft / 3.28084
@minute 60
@hour @minute * 60
@day @hour * 24
@week @day * 7
@divisor [@week, @day, @hour, @minute, 1]
def sec_to_str(sec) when sec < 5, do: nil
def sec_to_str(sec) when is_number(sec) do
{_, [s, m, h, d, w]} =
Enum.reduce(@divisor, {sec, []}, fn divisor, {n, acc} ->
{rem(n, divisor), [div(n, divisor) | acc]}
end)
["#{w} wk", "#{d} d", "#{h} h", "#{m} min", "#{s} s"]
|> Enum.reject(&String.starts_with?(&1, "0"))
|> Enum.take(2)
end
end
| 27.222222 | 80 | 0.596939 |
f7ff6026824dfcc4e9d6f65881b986ba8cdf4e83 | 436 | ex | Elixir | lib/timeularex/api.ex | r-frederick/timeularex | 9ede39350b4d0095300d291fb2ab1049461e3381 | [
"MIT"
] | 1 | 2018-06-01T19:14:57.000Z | 2018-06-01T19:14:57.000Z | lib/timeularex/api.ex | r-frederick/timeularex | 9ede39350b4d0095300d291fb2ab1049461e3381 | [
"MIT"
] | null | null | null | lib/timeularex/api.ex | r-frederick/timeularex | 9ede39350b4d0095300d291fb2ab1049461e3381 | [
"MIT"
] | null | null | null | defmodule Timeularex.API do
use HTTPoison.Base
alias Timeularex.Config
@base_url "https://api.timeular.com/api/v2"
def process_url(url) do
@base_url <> url
end
def process_request_body(body) do
body
|> Poison.encode!
end
def process_request_headers(headers) do
headers ++ [{"Content-type", "application/json"}]
end
def process_response_body(body) do
body
|> Poison.decode!
end
end
| 17.44 | 53 | 0.681193 |
f7ff6ec2fa745acd5362ce6b606268e1077a4686 | 1,253 | ex | Elixir | lib/tasks/version/next.ex | bulld0zer/elixir-version-release | f659bf0bf21167684eb12cf6be3d38bc9291d64b | [
"MIT"
] | 16 | 2020-08-25T16:30:47.000Z | 2022-03-23T12:41:55.000Z | lib/tasks/version/next.ex | bulld0zer/elixir-version-release | f659bf0bf21167684eb12cf6be3d38bc9291d64b | [
"MIT"
] | 18 | 2021-03-23T05:54:50.000Z | 2022-03-24T08:03:21.000Z | lib/tasks/version/next.ex | bulld0zer/elixir-version-release | f659bf0bf21167684eb12cf6be3d38bc9291d64b | [
"MIT"
] | 1 | 2021-05-12T09:05:35.000Z | 2021-05-12T09:05:35.000Z | defmodule Mix.Tasks.Version.Next do
use Mix.Task
require Logger
alias VersionRelease.Changelog
alias VersionRelease.Config
alias VersionRelease.Git
alias VersionRelease.Hex
alias VersionRelease.Version
def run(opts) do
opts
|> Config.create()
|> Changelog.get_release_changes()
|> Git.is_clean()
|> bump_version()
|> Changelog.update()
|> Version.update_mix_file()
|> Git.tag_with_new_version()
|> Hex.publish()
|> Version.next_dev_iteration()
|> Git.merge()
|> Git.push()
end
defp bump_version(
%{
changelog: %{
changes: %{
major: major_changes,
minor: minor_changes,
patch: patch_changes
}
}
} = config
) do
%{
major_count: major_changes |> Enum.count(),
minor_count: minor_changes |> Enum.count(),
patch_count: patch_changes |> Enum.count()
}
|> case do
%{major_count: count} when count > 0 ->
Mix.Tasks.Version.Major.bump_major(config)
%{minor_count: count} when count > 0 ->
Mix.Tasks.Version.Minor.bump_minor(config)
_ ->
Mix.Tasks.Version.Patch.bump_patch(config)
end
end
end
| 22.781818 | 50 | 0.588188 |
f7ffaa81273cccb16b679faa2b037496c68c37a8 | 34,358 | ex | Elixir | lib/phoenix_live_view/helpers.ex | thenrio/phoenix_live_view | e415eb845cfe2008e1e1ee1494872b296fda567a | [
"MIT"
] | 1 | 2020-09-09T09:10:01.000Z | 2020-09-09T09:10:01.000Z | lib/phoenix_live_view/helpers.ex | lurnid/phoenix_live_view | e415eb845cfe2008e1e1ee1494872b296fda567a | [
"MIT"
] | null | null | null | lib/phoenix_live_view/helpers.ex | lurnid/phoenix_live_view | e415eb845cfe2008e1e1ee1494872b296fda567a | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveView.Helpers do
@moduledoc """
A collection of helpers to be imported into your views.
"""
# TODO: Convert all functions with the `live_` prefix to function components?
alias Phoenix.LiveView
alias Phoenix.LiveView.{Component, Socket, Static}
@doc """
Provides `~L` sigil with HTML safe Live EEx syntax inside source files.
iex> ~L"\""
...> Hello <%= "world" %>
...> "\""
{:safe, ["Hello ", "world", "\\n"]}
"""
@doc deprecated: "Use ~H instead"
defmacro sigil_L({:<<>>, meta, [expr]}, []) do
options = [
engine: Phoenix.LiveView.Engine,
file: __CALLER__.file,
line: __CALLER__.line + 1,
indentation: meta[:indentation] || 0
]
EEx.compile_string(expr, options)
end
@doc ~S'''
Provides `~H` sigil with HTML-safe and HTML-aware syntax inside source files.
> Note: `HEEx` requires Elixir >= `1.12.0` in order to provide accurate
> file:line:column information in error messages. Earlier Elixir versions will
> work but will show inaccurate error messages.
`HEEx` is a HTML-aware and component-friendly extension of `EEx` that provides:
* Built-in handling of HTML attributes
* An HTML-like notation for injecting function components
* Compile-time validation of the structure of the template
* The ability to minimize the amount of data sent over the wire
## Example
~H"""
<div title="My div" class={@class}>
<p>Hello <%= @name %></p>
<MyApp.Weather.city name="Kraków"/>
</div>
"""
## Syntax
`HEEx` is built on top of Embedded Elixir (`EEx`), a templating syntax that uses
`<%= ... %>` for interpolating results. In this section, we are going to cover the
basic constructs in `HEEx` templates as well as its syntax extensions.
### Interpolation
Both `HEEx` and `EEx` templates use `<%= ... %>` for interpolating code inside the body
of HTML tags:
<p>Hello, <%= @name %></p>
Similarly, conditionals and other block Elixir constructs are supported:
<%= if @show_greeting? do %>
<p>Hello, <%= @name %></p>
<% end %>
Note we don't include the equal sign `=` in the closing `<% end %>` tag
(because the closing tag does not output anything).
There is one important difference between `HEEx` and Elixir's builtin `EEx`.
`HEEx` uses a specific annotation for interpolating HTML tags and attributes.
Let's check it out.
### HEEx extension: Defining attributes
Since `HEEx` must parse and validate the HTML structure, code interpolation using
`<%= ... %>` and `<% ... %>` are restricted to the body (inner content) of the
HTML/component nodes and it cannot be applied within tags.
For instance, the following syntax is invalid:
<div class="<%= @class %>">
...
</div>
Instead do:
<div class={@class}>
...
</div>
You can put any Elixir expression between `{ ... }`. For example, if you want
to set classes, where some are static and others are dynamic, you can using
string interpolation:
<div class={"btn btn-#{@type}"}>
...
</div>
The following attributes values have special meaning:
* `true` - if a value is `true`, the attribute is rendered with no value at all.
For example, `<input required={true}>` is the same as `<input required>`;
* `false` or `nil` - if a value is `false` or `nil`, the attribute is not rendered;
* `list` (only for the `class` attribute) - each element of the list is considered
as a different class. `nil` and `false` elements are discarded.
For multiple dynamic attributes, you can use the same notation but without
assigning the expression to any specific attribute.
<div {@dynamic_attrs}>
...
</div>
The expression inside `{...}` must be either a keyword list or a map containing
the key-value pairs representing the dynamic attributes.
### HEEx extension: Defining function components
Function components are stateless components implemented as pure functions
with the help of the `Phoenix.Component` module. They can be either local
(same module) or remote (external module).
`HEEx` allows invoking these function components directly in the template
using an HTML-like notation. For example, a remote function:
<MyApp.Weather.city name="Kraków"/>
A local function can be invoked with a leading dot:
<.city name="Kraków"/>
where the component could be defined as follows:
defmodule MyApp.Weather do
use Phoenix.Component
def city(assigns) do
~H"""
The chosen city is: <%= @name %>.
"""
end
def country(assigns) do
~H"""
The chosen country is: <%= @name %>.
"""
end
end
It is typically best to group related functions into a single module, as
opposed to having many modules with a single `render/1` function. Function
components support other important features, such as slots. You can learn
more about components in `Phoenix.Component`.
'''
defmacro sigil_H({:<<>>, meta, [expr]}, []) do
unless Macro.Env.has_var?(__CALLER__, {:assigns, nil}) do
raise "~H requires a variable named \"assigns\" to exist and be set to a map"
end
options = [
engine: Phoenix.LiveView.HTMLEngine,
file: __CALLER__.file,
line: __CALLER__.line + 1,
module: __CALLER__.module,
indentation: meta[:indentation] || 0
]
EEx.compile_string(expr, options)
end
@doc ~S'''
Filters the assigns as a list of keywords for use in dynamic tag attributes.
Useful for transforming caller assigns into dynamic attributes while
stripping reserved keys from the result.
## Examples
Imagine the following `my_link` component which allows a caller
to pass a `new_window` assign, along with any other attributes they
would like to add to the element, such as class, data attributes, etc:
<.my_link href="/" id={@id} new_window={true} class="my-class">Home</.my_link>
We could support the dynamic attributes with the following component:
def my_link(assigns) do
target = if assigns[:new_window], do: "_blank", else: false
extra = assigns_to_attributes(assigns, [:new_window])
assigns =
assigns
|> Phoenix.LiveView.assign(:target, target)
|> Phoenix.LiveView.assign(:extra, extra)
~H"""
<a href={@href} target={@target} {@extra}>
<%= render_slot(@inner_block) %>
</a>
"""
end
The optional second argument to `assigns_to_attributes` takes a list of keys to exclude
which will typically be the keys reserved by the component itself which either
do not belong in the markup, or are already handled explicitly by the component.
'''
def assigns_to_attributes(assigns, exclude \\ []) do
excluded_keys = [:__changed__, :__slot__, :inner_block, :myself, :flash, :socket] ++ exclude
for {key, val} <- assigns, key not in excluded_keys, into: [], do: {key, val}
end
@doc false
def live_patch(opts) when is_list(opts) do
live_link("patch", Keyword.fetch!(opts, :do), Keyword.delete(opts, :do))
end
@doc """
Generates a link that will patch the current LiveView.
When navigating to the current LiveView,
`c:Phoenix.LiveView.handle_params/3` is
immediately invoked to handle the change of params and URL state.
Then the new state is pushed to the client, without reloading the
whole page while also maintaining the current scroll position.
For live redirects to another LiveView, use `live_redirect/2`.
## Options
* `:to` - the required path to link to.
* `:replace` - the flag to replace the current history or push a new state.
Defaults `false`.
All other options are forwarded to the anchor tag.
## Examples
<%= live_patch "home", to: Routes.page_path(@socket, :index) %>
<%= live_patch "next", to: Routes.live_path(@socket, MyLive, @page + 1) %>
<%= live_patch to: Routes.live_path(@socket, MyLive, dir: :asc), replace: false do %>
Sort By Price
<% end %>
"""
def live_patch(text, opts)
def live_patch(%Socket{}, _) do
raise """
you are invoking live_patch/2 with a socket but a socket is not expected.
If you want to live_patch/2 inside a LiveView, use push_patch/2 instead.
If you are inside a template, make the sure the first argument is a string.
"""
end
def live_patch(opts, do: block) when is_list(opts) do
live_link("patch", block, opts)
end
def live_patch(text, opts) when is_list(opts) do
live_link("patch", text, opts)
end
@doc false
def live_redirect(opts) when is_list(opts) do
live_link("redirect", Keyword.fetch!(opts, :do), Keyword.delete(opts, :do))
end
@doc """
Generates a link that will redirect to a new LiveView of the same live session.
The current LiveView will be shut down and a new one will be mounted
in its place, without reloading the whole page. This can
also be used to remount the same LiveView, in case you want to start
fresh. If you want to navigate to the same LiveView without remounting
it, use `live_patch/2` instead.
*Note*: The live redirects are only supported between two LiveViews defined
under the same live session. See `Phoenix.LiveView.Router.live_session/3` for
more details.
## Options
* `:to` - the required path to link to.
* `:replace` - the flag to replace the current history or push a new state.
Defaults `false`.
All other options are forwarded to the anchor tag.
## Examples
<%= live_redirect "home", to: Routes.page_path(@socket, :index) %>
<%= live_redirect "next", to: Routes.live_path(@socket, MyLive, @page + 1) %>
<%= live_redirect to: Routes.live_path(@socket, MyLive, dir: :asc), replace: false do %>
Sort By Price
<% end %>
"""
def live_redirect(text, opts)
def live_redirect(%Socket{}, _) do
raise """
you are invoking live_redirect/2 with a socket but a socket is not expected.
If you want to live_redirect/2 inside a LiveView, use push_redirect/2 instead.
If you are inside a template, make the sure the first argument is a string.
"""
end
def live_redirect(opts, do: block) when is_list(opts) do
live_link("redirect", block, opts)
end
def live_redirect(text, opts) when is_list(opts) do
live_link("redirect", text, opts)
end
defp live_link(type, block_or_text, opts) do
uri = Keyword.fetch!(opts, :to)
replace = Keyword.get(opts, :replace, false)
kind = if replace, do: "replace", else: "push"
data = [phx_link: type, phx_link_state: kind]
opts =
opts
|> Keyword.update(:data, data, &Keyword.merge(&1, data))
|> Keyword.put(:href, uri)
Phoenix.HTML.Tag.content_tag(:a, Keyword.delete(opts, :to), do: block_or_text)
end
@doc """
Renders a LiveView within a template.
This is useful in two situations:
* When rendering a child LiveView inside a LiveView
* When rendering a LiveView inside a regular (non-live) controller/view
## Options
* `:session` - a map of binary keys with extra session data to be
serialized and sent to the client. All session data currently in
the connection is automatically available in LiveViews. You can
use this option to provide extra data. Remember all session data
is serialized and sent to the client, so you should always
keep the data in the session to a minimum. For example, instead
of storing a User struct, you should store the "user_id" and load
the User when the LiveView mounts.
* `:container` - an optional tuple for the HTML tag and DOM
attributes to be used for the LiveView container. For example:
`{:li, style: "color: blue;"}`. By default it uses the module
definition container. See the "Containers" section below for more
information.
* `:id` - both the DOM ID and the ID to uniquely identify a LiveView.
An `:id` is automatically generated when rendering root LiveViews
but it is a required option when rendering a child LiveView.
* `:sticky` - an optional flag to maintain the LiveView across
live redirects, even if it is nested within another LiveView.
## Examples
When rendering from a controller/view, you can call:
<%= live_render(@conn, MyApp.ThermostatLive) %>
Or:
<%= live_render(@conn, MyApp.ThermostatLive, session: %{"home_id" => @home.id}) %>
Within another LiveView, you must pass the `:id` option:
<%= live_render(@socket, MyApp.ThermostatLive, id: "thermostat") %>
## Containers
When a `LiveView` is rendered, its contents are wrapped in a container.
By default, the container is a `div` tag with a handful of `LiveView`
specific attributes.
The container can be customized in different ways:
* You can change the default `container` on `use Phoenix.LiveView`:
use Phoenix.LiveView, container: {:tr, id: "foo-bar"}
* You can override the container tag and pass extra attributes when
calling `live_render` (as well as on your `live` call in your router):
live_render socket, MyLiveView, container: {:tr, class: "highlight"}
"""
def live_render(conn_or_socket, view, opts \\ [])
def live_render(%Plug.Conn{} = conn, view, opts) do
case Static.render(conn, view, opts) do
{:ok, content, _assigns} ->
content
{:stop, _} ->
raise RuntimeError, "cannot redirect from a child LiveView"
end
end
def live_render(%Socket{} = parent, view, opts) do
Static.nested_render(parent, view, opts)
end
@doc """
A function component for rendering `Phoenix.LiveComponent`
within a parent LiveView.
While `LiveView`s can be nested, each LiveView starts its
own process. A `LiveComponent` provides similar functionality
to `LiveView`, except they run in the same process as the
`LiveView`, with its own encapsulated state. That's why they
are called stateful components.
See `Phoenix.LiveComponent` for more information.
## Examples
`.live_component` requires the component `:module` and its
`:id` to be given:
<.live_component module={MyApp.WeatherComponent} id="thermostat" city="Kraków" />
The `:id` is used to identify this `LiveComponent` throughout the
LiveView lifecycle. Note the `:id` won't necessarily be used as the
DOM ID. That's up to the component.
"""
def live_component(assigns) when is_map(assigns) do
id = assigns[:id]
{module, assigns} =
assigns
|> Map.delete(:__changed__)
|> Map.pop(:module)
if module == nil or not is_atom(module) do
raise ArgumentError,
".live_component expects module={...} to be given and to be an atom, " <>
"got: #{inspect(module)}"
end
if id == nil do
raise ArgumentError, ".live_component expects id={...} to be given, got: nil"
end
case module.__live__() do
%{kind: :component} ->
%Component{id: id, assigns: assigns, component: module}
%{kind: kind} ->
raise ArgumentError, "expected #{inspect(module)} to be a component, but it is a #{kind}"
end
end
def live_component(component) when is_atom(component) do
IO.warn(
"<%= live_component Component %> is deprecated, " <>
"please use <.live_component module={Component} id=\"hello\" /> inside HEEx templates instead"
)
Phoenix.LiveView.Helpers.__live_component__(component.__live__(), %{}, nil)
end
@doc """
Deprecated API for rendering `LiveComponent`.
## Upgrading
In order to migrate from `<%= live_component ... %>` to `<.live_component>`,
you must first:
1. Migrate from `~L` sigil and `.leex` templates to
`~H` sigil and `.heex` templates
2. Then instead of:
```
<%= live_component MyModule, id: "hello" do %>
...
<% end %>
```
You should do:
```
<.live_component module={MyModule} id="hello">
...
</.live_component>
```
3. If your component is using `render_block/2`, replace
it by `render_slot/2`
"""
@doc deprecated: "Use .live_component (live_component/1) instead"
defmacro live_component(component, assigns, do_block \\ []) do
if is_assign?(:socket, component) do
IO.warn(
"passing the @socket to live_component is no longer necessary, " <>
"please remove the socket argument",
Macro.Env.stacktrace(__CALLER__)
)
end
{inner_block, do_block, assigns} =
case {do_block, assigns} do
{[do: do_block], _} -> {rewrite_do!(do_block, :inner_block, __CALLER__), [], assigns}
{_, [do: do_block]} -> {rewrite_do!(do_block, :inner_block, __CALLER__), [], []}
{_, _} -> {nil, do_block, assigns}
end
if match?({:__aliases__, _, _}, component) or is_atom(component) or is_list(assigns) or
is_map(assigns) do
quote do
Phoenix.LiveView.Helpers.__live_component__(
unquote(component).__live__(),
unquote(assigns),
unquote(inner_block)
)
end
else
quote do
case unquote(component) do
%Phoenix.LiveView.Socket{} ->
Phoenix.LiveView.Helpers.__live_component__(
unquote(assigns).__live__(),
unquote(do_block),
unquote(inner_block)
)
component ->
Phoenix.LiveView.Helpers.__live_component__(
component.__live__(),
unquote(assigns),
unquote(inner_block)
)
end
end
end
end
@doc false
def __live_component__(%{kind: :component, module: component}, assigns, inner)
when is_list(assigns) or is_map(assigns) do
assigns = assigns |> Map.new() |> Map.put_new(:id, nil)
assigns = if inner, do: Map.put(assigns, :inner_block, inner), else: assigns
id = assigns[:id]
# TODO: Remove logic from Diff once stateless components are removed.
# TODO: Remove live_component arity checks from Engine
if is_nil(id) and
(function_exported?(component, :handle_event, 3) or
function_exported?(component, :preload, 1)) do
raise "a component #{inspect(component)} that has implemented handle_event/3 or preload/1 " <>
"requires an :id assign to be given"
end
%Component{id: id, assigns: assigns, component: component}
end
def __live_component__(%{kind: kind, module: module}, assigns, _inner)
when is_list(assigns) or is_map(assigns) do
raise "expected #{inspect(module)} to be a component, but it is a #{kind}"
end
defp rewrite_do!(do_block, key, caller) do
if Macro.Env.has_var?(caller, {:assigns, nil}) do
rewrite_do(do_block, key)
else
raise ArgumentError,
"cannot use live_component because the assigns var is unbound/unset"
end
end
@doc """
Renders a component defined by the given function.
This function is rarely invoked directly by users. Instead, it is used by `~H`
to render `Phoenix.Component`s. For example, the following:
<MyApp.Weather.city name="Kraków" />
Is the same as:
<%= component(&MyApp.Weather.city/1, name: "Kraków") %>
"""
def component(func, assigns \\ [])
when (is_function(func, 1) and is_list(assigns)) or is_map(assigns) do
assigns =
case assigns do
%{__changed__: _} -> assigns
_ -> assigns |> Map.new() |> Map.put_new(:__changed__, nil)
end
case func.(assigns) do
%Phoenix.LiveView.Rendered{} = rendered ->
rendered
%Phoenix.LiveView.Component{} = component ->
component
other ->
raise RuntimeError, """
expected #{inspect(func)} to return a %Phoenix.LiveView.Rendered{} struct
Ensure your render function uses ~H to define its template.
Got:
#{inspect(other)}
"""
end
end
@doc """
Renders the `@inner_block` assign of a component with the given `argument`.
<%= render_block(@inner_block, value: @value)
This function is deprecated for function components. Use `render_slot/2`
instead.
"""
@doc deprecated: "Use render_slot/2 instead"
defmacro render_block(inner_block, argument \\ []) do
quote do
unquote(__MODULE__).__render_block__(unquote(inner_block)).(
var!(changed, Phoenix.LiveView.Engine),
unquote(argument)
)
end
end
@doc false
def __render_block__([%{inner_block: fun}]), do: fun
def __render_block__(fun), do: fun
@doc ~S'''
Renders a slot entry with the given optional `argument`.
<%= render_slot(@inner_block, @form) %>
If multiple slot entries are defined for the same slot,
`render_slot/2` will automatically render all entries,
merging their contents. In case you want to use the entries'
attributes, you need to iterate over the list to access each
slot individually.
For example, imagine a table component:
<.table rows={@users}>
<:col let={user} label="Name">
<%= user.name %>
</:col>
<:col let={user} label="Address">
<%= user.address %>
</:col>
</.table>
At the top level, we pass the rows as an assign and we define
a `:col` slot for each column we want in the table. Each
column also has a `label`, which we are going to use in the
table header.
Inside the component, you can render the table with headers,
rows, and columns:
def table(assigns) do
~H"""
<table>
<tr>
<%= for col <- @col do %>
<th><%= col.label %></th>
<% end %>
</tr>
<%= for row <- @rows do %>
<tr>
<%= for col <- @col do %>
<td><%= render_slot(col, row) %></td>
<% end %>
</tr>
<% end %>
</table>
"""
end
'''
defmacro render_slot(slot, argument \\ nil) do
quote do
unquote(__MODULE__).__render_slot__(
var!(changed, Phoenix.LiveView.Engine),
unquote(slot),
unquote(argument)
)
end
end
@doc false
def __render_slot__(_, [], _), do: ""
def __render_slot__(changed, [entry], argument) do
call_inner_block!(entry, changed, argument)
end
def __render_slot__(changed, entries, argument) when is_list(entries) do
assigns = %{}
~H"""
<%= for entry <- entries do %><%= call_inner_block!(entry, changed, argument) %><% end %>
"""
end
def __render_slot__(changed, entry, argument) when is_map(entry) do
entry.inner_block.(changed, argument)
end
defp call_inner_block!(entry, changed, argument) do
if !entry.inner_block do
message = "attempted to render slot <:#{entry.__slot__}> but the slot has no inner content"
raise RuntimeError, message
end
entry.inner_block.(changed, argument)
end
@doc """
Define a inner block, generally used by slots.
This macro is mostly used by HTML engines that provides
a `slot` implementation and rarely called directly. The
`name` must be the assign name the slot/block will be stored
under.
If you're using HEEx templates, you should use its higher
level `<:slot>` notation instead. See `Phoenix.Component`
for more information.
"""
defmacro inner_block(name, do: do_block) do
rewrite_do(do_block, name)
end
defp rewrite_do([{:->, meta, _} | _] = do_block, key) do
inner_fun = {:fn, meta, do_block}
quote do
fn parent_changed, arg ->
var!(assigns) =
unquote(__MODULE__).__assigns__(var!(assigns), unquote(key), parent_changed)
_ = var!(assigns)
unquote(inner_fun).(arg)
end
end
end
defp rewrite_do(do_block, key) do
quote do
fn parent_changed, arg ->
var!(assigns) =
unquote(__MODULE__).__assigns__(var!(assigns), unquote(key), parent_changed)
_ = var!(assigns)
unquote(do_block)
end
end
end
@doc false
def __assigns__(assigns, key, parent_changed) do
# If the component is in its initial render (parent_changed == nil)
# or the slot/block key is in parent_changed, then we render the
# function with the assigns as is.
#
# Otherwise, we will set changed to an empty list, which is the same
# as marking everything as not changed. This is correct because
# parent_changed will always be marked as changed whenever any of the
# assigns it references inside is changed. It will also be marked as
# changed if it has any variable (such as the ones coming from let).
if is_nil(parent_changed) or Map.has_key?(parent_changed, key) do
assigns
else
Map.put(assigns, :__changed__, %{})
end
end
@doc """
Returns the flash message from the LiveView flash assign.
## Examples
<p class="alert alert-info"><%= live_flash(@flash, :info) %></p>
<p class="alert alert-danger"><%= live_flash(@flash, :error) %></p>
"""
def live_flash(%_struct{} = other, _key) do
raise ArgumentError, "live_flash/2 expects a @flash assign, got: #{inspect(other)}"
end
def live_flash(%{} = flash, key), do: Map.get(flash, to_string(key))
@doc """
Returns the entry errors for an upload.
The following errors may be returned:
* `:too_many_files` - The number of selected files exceeds the `:max_entries` constraint
## Examples
def error_to_string(:too_many_files), do: "You have selected too many files"
<%= for err <- upload_errors(@uploads.avatar) do %>
<div class="alert alert-danger">
<%= error_to_string(err) %>
</div>
<% end %>
"""
def upload_errors(%Phoenix.LiveView.UploadConfig{} = conf) do
for {ref, error} <- conf.errors, ref == conf.ref, do: error
end
@doc """
Returns the entry errors for an upload.
The following errors may be returned:
* `:too_large` - The entry exceeds the `:max_file_size` constraint
* `:not_accepted` - The entry does not match the `:accept` MIME types
## Examples
def error_to_string(:too_large), do: "Too large"
def error_to_string(:not_accepted), do: "You have selected an unacceptable file type"
<%= for entry <- @uploads.avatar.entries do %>
<%= for err <- upload_errors(@uploads.avatar, entry) do %>
<div class="alert alert-danger">
<%= error_to_string(err) %>
</div>
<% end %>
<% end %>
"""
def upload_errors(
%Phoenix.LiveView.UploadConfig{} = conf,
%Phoenix.LiveView.UploadEntry{} = entry
) do
for {ref, error} <- conf.errors, ref == entry.ref, do: error
end
@doc """
Generates an image preview on the client for a selected file.
## Examples
<%= for entry <- @uploads.avatar.entries do %>
<%= live_img_preview entry, width: 75 %>
<% end %>
"""
def live_img_preview(%Phoenix.LiveView.UploadEntry{ref: ref} = entry, opts \\ []) do
attrs =
Keyword.merge(opts,
id: "phx-preview-#{ref}",
data_phx_upload_ref: entry.upload_ref,
data_phx_entry_ref: ref,
data_phx_hook: "Phoenix.LiveImgPreview",
data_phx_update: "ignore"
)
assigns = LiveView.assign(%{__changed__: nil}, attrs: attrs)
~H"<img {@attrs}/>"
end
@doc """
Builds a file input tag for a LiveView upload.
Options may be passed through to the tag builder for custom attributes.
## Drag and Drop
Drag and drop is supported by annotating the droppable container with a `phx-drop-target`
attribute pointing to the DOM ID of the file input. By default, the file input ID is the
upload `ref`, so the following markup is all that is required for drag and drop support:
<div class="container" phx-drop-target="<%= @uploads.avatar.ref %>">
...
<%= live_file_input @uploads.avatar %>
</div>
## Examples
<%= live_file_input @uploads.avatar %>
"""
def live_file_input(%Phoenix.LiveView.UploadConfig{} = conf, opts \\ []) do
if opts[:id], do: raise(ArgumentError, "the :id cannot be overridden on a live_file_input")
opts =
if conf.max_entries > 1 do
Keyword.put(opts, :multiple, true)
else
opts
end
preflighted_entries = for entry <- conf.entries, entry.preflighted?, do: entry
done_entries = for entry <- conf.entries, entry.done?, do: entry
valid? = Enum.any?(conf.entries) && Enum.empty?(conf.errors)
Phoenix.HTML.Tag.content_tag(
:input,
"",
Keyword.merge(opts,
type: "file",
id: conf.ref,
name: conf.name,
accept: if(conf.accept != :any, do: conf.accept),
phx_hook: "Phoenix.LiveFileUpload",
data_phx_update: "ignore",
data_phx_upload_ref: conf.ref,
data_phx_active_refs: Enum.map_join(conf.entries, ",", & &1.ref),
data_phx_done_refs: Enum.map_join(done_entries, ",", & &1.ref),
data_phx_preflighted_refs: Enum.map_join(preflighted_entries, ",", & &1.ref),
data_phx_auto_upload: valid? && conf.auto_upload?
)
)
end
@doc """
Renders a title tag with automatic prefix/suffix on `@page_title` updates.
## Examples
<%= live_title_tag assigns[:page_title] || "Welcome", prefix: "MyApp – " %>
<%= live_title_tag assigns[:page_title] || "Welcome", suffix: " – MyApp" %>
"""
def live_title_tag(title, opts \\ []) do
title_tag(title, opts[:prefix], opts[:suffix], opts)
end
defp title_tag(title, nil = _prefix, "" <> suffix, _opts) do
Phoenix.HTML.Tag.content_tag(:title, title <> suffix, data: [suffix: suffix])
end
defp title_tag(title, "" <> prefix, nil = _suffix, _opts) do
Phoenix.HTML.Tag.content_tag(:title, prefix <> title, data: [prefix: prefix])
end
defp title_tag(title, "" <> pre, "" <> post, _opts) do
Phoenix.HTML.Tag.content_tag(:title, pre <> title <> post, data: [prefix: pre, suffix: post])
end
defp title_tag(title, _prefix = nil, _postfix = nil, []) do
Phoenix.HTML.Tag.content_tag(:title, title)
end
defp title_tag(_title, _prefix = nil, _suffix = nil, opts) do
raise ArgumentError,
"live_title_tag/2 expects a :prefix and/or :suffix option, got: #{inspect(opts)}"
end
@doc """
Renders a form function component.
This function is built on top of `Phoenix.HTML.Form.form_for/4`. For
more information about options and how to build inputs, see
`Phoenix.HTML.Form`.
## Options
The `:for` assign is the form's source data and the optional `:action`
assign can be provided for the form's action. Additionally accepts
the same options as `Phoenix.HTML.Form.form_for/4` as optional assigns:
* `:as` - the server side parameter in which all params for this
form will be collected (i.e. `as: :user_params` would mean all fields
for this form will be accessed as `conn.params.user_params` server
side). Automatically inflected when a changeset is given.
* `:method` - the HTTP method. If the method is not "get" nor "post",
an input tag with name `_method` is generated along-side the form tag.
Defaults to "post".
* `:multipart` - when true, sets enctype to "multipart/form-data".
Required when uploading files
* `:csrf_token` - for "post" requests, the form tag will automatically
include an input tag with name `_csrf_token`. When set to false, this
is disabled
* `:errors` - use this to manually pass a keyword list of errors to the form
(for example from `conn.assigns[:errors]`). This option is only used when a
connection is used as the form source and it will make the errors available
under `f.errors`
* `:id` - the ID of the form attribute. If an ID is given, all form inputs
will also be prefixed by the given ID
All further assigns will be passed to the form tag.
## Examples
<.form let={f} for={@changeset}>
<%= text_input f, :name %>
</.form>
<.form let={user_form} for={@changeset} as="user" multipart {@extra}>
<%= text_input user_form, :name %>
</.form>
"""
def form(assigns) do
# Extract options and then to the same call as form_for
action = assigns[:action] || "#"
form_for = assigns[:for] || raise ArgumentError, "missing :for assign to form"
form_options = assigns_to_attributes(assigns, [:action, :for])
# Since FormData may add options, read the actual options from form
%{options: opts} =
form = %Phoenix.HTML.Form{
Phoenix.HTML.FormData.to_form(form_for, form_options)
| action: action
}
# And then process method, csrf_token, and multipart as in form_tag
{method, opts} = Keyword.pop(opts, :method, "post")
{method, hidden_method} = form_method(method)
{csrf_token, opts} =
Keyword.pop_lazy(opts, :csrf_token, fn ->
if method == "post", do: Plug.CSRFProtection.get_csrf_token_for(action)
end)
opts =
case Keyword.pop(opts, :multipart, false) do
{false, opts} -> opts
{true, opts} -> Keyword.put(opts, :enctype, "multipart/form-data")
end
# Finally we can render the form
assigns =
LiveView.assign(assigns,
form: form,
csrf_token: csrf_token,
hidden_method: hidden_method,
attrs: [action: action, method: method] ++ opts
)
~H"""
<form {@attrs}>
<%= if @hidden_method && @hidden_method not in ~w(get post) do %>
<input name="_method" type="hidden" value={@hidden_method}>
<% end %>
<%= if @csrf_token do %>
<input name="_csrf_token" type="hidden" value={@csrf_token}>
<% end %>
<%= render_slot(@inner_block, @form) %>
</form>
"""
end
defp form_method(method) when method in ~w(get post), do: {method, nil}
defp form_method(method) when is_binary(method), do: {"post", method}
defp is_assign?(assign_name, expression) do
match?({:@, _, [{^assign_name, _, _}]}, expression) or
match?({^assign_name, _, _}, expression) or
match?({{:., _, [{:assigns, _, nil}, ^assign_name]}, _, []}, expression)
end
end
| 31.579044 | 102 | 0.643693 |
f7ffb8efedf2c632839ace7a95cf9a697cd49668 | 39,305 | ex | Elixir | clients/monitoring/lib/google_api/monitoring/v3/api/services.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/monitoring/lib/google_api/monitoring/v3/api/services.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/monitoring/lib/google_api/monitoring/v3/api/services.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Monitoring.V3.Api.Services do
@moduledoc """
API calls for all endpoints tagged `Services`.
"""
alias GoogleApi.Monitoring.V3.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Create a Service.
## Parameters
* `connection` (*type:* `GoogleApi.Monitoring.V3.Connection.t`) - Connection to server
* `v3_id` (*type:* `String.t`) - Part of `parent`. Required. Resource name of the parent workspace. The format is: projects/[PROJECT_ID_OR_NUMBER]
* `v3_id1` (*type:* `String.t`) - Part of `parent`. See documentation of `v3Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:serviceId` (*type:* `String.t`) - Optional. The Service id to use for this Service. If omitted, an id will be generated instead. Must match the pattern [a-z0-9\\-]+
* `:body` (*type:* `GoogleApi.Monitoring.V3.Model.Service.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Monitoring.V3.Model.Service{}}` on success
* `{:error, info}` on failure
"""
@spec monitoring_services_create(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Monitoring.V3.Model.Service.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def monitoring_services_create(connection, v3_id, v3_id1, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:serviceId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v3/{v3Id}/{v3Id1}/services", %{
"v3Id" => URI.encode(v3_id, &URI.char_unreserved?/1),
"v3Id1" => URI.encode(v3_id1, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Monitoring.V3.Model.Service{}])
end
@doc """
Soft delete this Service.
## Parameters
* `connection` (*type:* `GoogleApi.Monitoring.V3.Connection.t`) - Connection to server
* `v3_id` (*type:* `String.t`) - Part of `name`. Required. Resource name of the Service to delete. The format is: projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]
* `v3_id1` (*type:* `String.t`) - Part of `name`. See documentation of `v3Id`.
* `services_id` (*type:* `String.t`) - Part of `name`. See documentation of `v3Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Monitoring.V3.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec monitoring_services_delete(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Monitoring.V3.Model.Empty.t()} | {:ok, Tesla.Env.t()} | {:error, any()}
def monitoring_services_delete(
connection,
v3_id,
v3_id1,
services_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v3/{v3Id}/{v3Id1}/services/{servicesId}", %{
"v3Id" => URI.encode(v3_id, &URI.char_unreserved?/1),
"v3Id1" => URI.encode(v3_id1, &URI.char_unreserved?/1),
"servicesId" => URI.encode(services_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Monitoring.V3.Model.Empty{}])
end
@doc """
Get the named Service.
## Parameters
* `connection` (*type:* `GoogleApi.Monitoring.V3.Connection.t`) - Connection to server
* `v3_id` (*type:* `String.t`) - Part of `name`. Required. Resource name of the Service. The format is: projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]
* `v3_id1` (*type:* `String.t`) - Part of `name`. See documentation of `v3Id`.
* `services_id` (*type:* `String.t`) - Part of `name`. See documentation of `v3Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Monitoring.V3.Model.Service{}}` on success
* `{:error, info}` on failure
"""
@spec monitoring_services_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Monitoring.V3.Model.Service.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def monitoring_services_get(
connection,
v3_id,
v3_id1,
services_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v3/{v3Id}/{v3Id1}/services/{servicesId}", %{
"v3Id" => URI.encode(v3_id, &URI.char_unreserved?/1),
"v3Id1" => URI.encode(v3_id1, &URI.char_unreserved?/1),
"servicesId" => URI.encode(services_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Monitoring.V3.Model.Service{}])
end
@doc """
List Services for this workspace.
## Parameters
* `connection` (*type:* `GoogleApi.Monitoring.V3.Connection.t`) - Connection to server
* `v3_id` (*type:* `String.t`) - Part of `parent`. Required. Resource name of the parent containing the listed services, either a project or a Monitoring Workspace. The formats are: projects/[PROJECT_ID_OR_NUMBER] workspaces/[HOST_PROJECT_ID_OR_NUMBER]
* `v3_id1` (*type:* `String.t`) - Part of `parent`. See documentation of `v3Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - A filter specifying what Services to return. The filter currently supports the following fields: - `identifier_case` - `app_engine.module_id` - `cloud_endpoints.service` (reserved for future use) - `mesh_istio.mesh_uid` - `mesh_istio.service_namespace` - `mesh_istio.service_name` - `cluster_istio.location` (deprecated) - `cluster_istio.cluster_name` (deprecated) - `cluster_istio.service_namespace` (deprecated) - `cluster_istio.service_name` (deprecated) identifier_case refers to which option in the identifier oneof is populated. For example, the filter identifier_case = "CUSTOM" would match all services with a value for the custom field. Valid options are "CUSTOM", "APP_ENGINE", "MESH_ISTIO", plus "CLUSTER_ISTIO" (deprecated) and "CLOUD_ENDPOINTS" (reserved for future use).
* `:pageSize` (*type:* `integer()`) - A non-negative number that is the maximum number of results to return. When 0, use default page size.
* `:pageToken` (*type:* `String.t`) - If this field is not empty then it must contain the nextPageToken value returned by a previous call to this method. Using this field causes the method to return additional results from the previous method call.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Monitoring.V3.Model.ListServicesResponse{}}` on success
* `{:error, info}` on failure
"""
@spec monitoring_services_list(Tesla.Env.client(), String.t(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Monitoring.V3.Model.ListServicesResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def monitoring_services_list(connection, v3_id, v3_id1, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v3/{v3Id}/{v3Id1}/services", %{
"v3Id" => URI.encode(v3_id, &URI.char_unreserved?/1),
"v3Id1" => URI.encode(v3_id1, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Monitoring.V3.Model.ListServicesResponse{}])
end
@doc """
Update this Service.
## Parameters
* `connection` (*type:* `GoogleApi.Monitoring.V3.Connection.t`) - Connection to server
* `v3_id` (*type:* `String.t`) - Part of `service.name`. Resource name for this Service. The format is: projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]
* `v3_id1` (*type:* `String.t`) - Part of `service.name`. See documentation of `v3Id`.
* `services_id` (*type:* `String.t`) - Part of `service.name`. See documentation of `v3Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - A set of field paths defining which fields to use for the update.
* `:body` (*type:* `GoogleApi.Monitoring.V3.Model.Service.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Monitoring.V3.Model.Service{}}` on success
* `{:error, info}` on failure
"""
@spec monitoring_services_patch(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Monitoring.V3.Model.Service.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def monitoring_services_patch(
connection,
v3_id,
v3_id1,
services_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v3/{v3Id}/{v3Id1}/services/{servicesId}", %{
"v3Id" => URI.encode(v3_id, &URI.char_unreserved?/1),
"v3Id1" => URI.encode(v3_id1, &URI.char_unreserved?/1),
"servicesId" => URI.encode(services_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Monitoring.V3.Model.Service{}])
end
@doc """
Create a ServiceLevelObjective for the given Service.
## Parameters
* `connection` (*type:* `GoogleApi.Monitoring.V3.Connection.t`) - Connection to server
* `v3_id` (*type:* `String.t`) - Part of `parent`. Required. Resource name of the parent Service. The format is: projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]
* `v3_id1` (*type:* `String.t`) - Part of `parent`. See documentation of `v3Id`.
* `services_id` (*type:* `String.t`) - Part of `parent`. See documentation of `v3Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:serviceLevelObjectiveId` (*type:* `String.t`) - Optional. The ServiceLevelObjective id to use for this ServiceLevelObjective. If omitted, an id will be generated instead. Must match the pattern [a-z0-9\\-]+
* `:body` (*type:* `GoogleApi.Monitoring.V3.Model.ServiceLevelObjective.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Monitoring.V3.Model.ServiceLevelObjective{}}` on success
* `{:error, info}` on failure
"""
@spec monitoring_services_service_level_objectives_create(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Monitoring.V3.Model.ServiceLevelObjective.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def monitoring_services_service_level_objectives_create(
connection,
v3_id,
v3_id1,
services_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:serviceLevelObjectiveId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v3/{v3Id}/{v3Id1}/services/{servicesId}/serviceLevelObjectives", %{
"v3Id" => URI.encode(v3_id, &URI.char_unreserved?/1),
"v3Id1" => URI.encode(v3_id1, &URI.char_unreserved?/1),
"servicesId" => URI.encode(services_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Monitoring.V3.Model.ServiceLevelObjective{}])
end
@doc """
Delete the given ServiceLevelObjective.
## Parameters
* `connection` (*type:* `GoogleApi.Monitoring.V3.Connection.t`) - Connection to server
* `v3_id` (*type:* `String.t`) - Part of `name`. Required. Resource name of the ServiceLevelObjective to delete. The format is: projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME]
* `v3_id1` (*type:* `String.t`) - Part of `name`. See documentation of `v3Id`.
* `services_id` (*type:* `String.t`) - Part of `name`. See documentation of `v3Id`.
* `service_level_objectives_id` (*type:* `String.t`) - Part of `name`. See documentation of `v3Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Monitoring.V3.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec monitoring_services_service_level_objectives_delete(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Monitoring.V3.Model.Empty.t()} | {:ok, Tesla.Env.t()} | {:error, any()}
def monitoring_services_service_level_objectives_delete(
connection,
v3_id,
v3_id1,
services_id,
service_level_objectives_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url(
"/v3/{v3Id}/{v3Id1}/services/{servicesId}/serviceLevelObjectives/{serviceLevelObjectivesId}",
%{
"v3Id" => URI.encode(v3_id, &URI.char_unreserved?/1),
"v3Id1" => URI.encode(v3_id1, &URI.char_unreserved?/1),
"servicesId" => URI.encode(services_id, &URI.char_unreserved?/1),
"serviceLevelObjectivesId" =>
URI.encode(service_level_objectives_id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Monitoring.V3.Model.Empty{}])
end
@doc """
Get a ServiceLevelObjective by name.
## Parameters
* `connection` (*type:* `GoogleApi.Monitoring.V3.Connection.t`) - Connection to server
* `v3_id` (*type:* `String.t`) - Part of `name`. Required. Resource name of the ServiceLevelObjective to get. The format is: projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME]
* `v3_id1` (*type:* `String.t`) - Part of `name`. See documentation of `v3Id`.
* `services_id` (*type:* `String.t`) - Part of `name`. See documentation of `v3Id`.
* `service_level_objectives_id` (*type:* `String.t`) - Part of `name`. See documentation of `v3Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:view` (*type:* `String.t`) - View of the ServiceLevelObjective to return. If DEFAULT, return the ServiceLevelObjective as originally defined. If EXPLICIT and the ServiceLevelObjective is defined in terms of a BasicSli, replace the BasicSli with a RequestBasedSli spelling out how the SLI is computed.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Monitoring.V3.Model.ServiceLevelObjective{}}` on success
* `{:error, info}` on failure
"""
@spec monitoring_services_service_level_objectives_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Monitoring.V3.Model.ServiceLevelObjective.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def monitoring_services_service_level_objectives_get(
connection,
v3_id,
v3_id1,
services_id,
service_level_objectives_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:view => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/v3/{v3Id}/{v3Id1}/services/{servicesId}/serviceLevelObjectives/{serviceLevelObjectivesId}",
%{
"v3Id" => URI.encode(v3_id, &URI.char_unreserved?/1),
"v3Id1" => URI.encode(v3_id1, &URI.char_unreserved?/1),
"servicesId" => URI.encode(services_id, &URI.char_unreserved?/1),
"serviceLevelObjectivesId" =>
URI.encode(service_level_objectives_id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Monitoring.V3.Model.ServiceLevelObjective{}])
end
@doc """
List the ServiceLevelObjectives for the given Service.
## Parameters
* `connection` (*type:* `GoogleApi.Monitoring.V3.Connection.t`) - Connection to server
* `v3_id` (*type:* `String.t`) - Part of `parent`. Required. Resource name of the parent containing the listed SLOs, either a project or a Monitoring Workspace. The formats are: projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] workspaces/[HOST_PROJECT_ID_OR_NUMBER]/services/-
* `v3_id1` (*type:* `String.t`) - Part of `parent`. See documentation of `v3Id`.
* `services_id` (*type:* `String.t`) - Part of `parent`. See documentation of `v3Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - A filter specifying what ServiceLevelObjectives to return.
* `:pageSize` (*type:* `integer()`) - A non-negative number that is the maximum number of results to return. When 0, use default page size.
* `:pageToken` (*type:* `String.t`) - If this field is not empty then it must contain the nextPageToken value returned by a previous call to this method. Using this field causes the method to return additional results from the previous method call.
* `:view` (*type:* `String.t`) - View of the ServiceLevelObjectives to return. If DEFAULT, return each ServiceLevelObjective as originally defined. If EXPLICIT and the ServiceLevelObjective is defined in terms of a BasicSli, replace the BasicSli with a RequestBasedSli spelling out how the SLI is computed.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Monitoring.V3.Model.ListServiceLevelObjectivesResponse{}}` on success
* `{:error, info}` on failure
"""
@spec monitoring_services_service_level_objectives_list(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Monitoring.V3.Model.ListServiceLevelObjectivesResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def monitoring_services_service_level_objectives_list(
connection,
v3_id,
v3_id1,
services_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query,
:view => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v3/{v3Id}/{v3Id1}/services/{servicesId}/serviceLevelObjectives", %{
"v3Id" => URI.encode(v3_id, &URI.char_unreserved?/1),
"v3Id1" => URI.encode(v3_id1, &URI.char_unreserved?/1),
"servicesId" => URI.encode(services_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.Monitoring.V3.Model.ListServiceLevelObjectivesResponse{}]
)
end
@doc """
Update the given ServiceLevelObjective.
## Parameters
* `connection` (*type:* `GoogleApi.Monitoring.V3.Connection.t`) - Connection to server
* `v3_id` (*type:* `String.t`) - Part of `serviceLevelObjective.name`. Resource name for this ServiceLevelObjective. The format is: projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME]
* `v3_id1` (*type:* `String.t`) - Part of `serviceLevelObjective.name`. See documentation of `v3Id`.
* `services_id` (*type:* `String.t`) - Part of `serviceLevelObjective.name`. See documentation of `v3Id`.
* `service_level_objectives_id` (*type:* `String.t`) - Part of `serviceLevelObjective.name`. See documentation of `v3Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - A set of field paths defining which fields to use for the update.
* `:body` (*type:* `GoogleApi.Monitoring.V3.Model.ServiceLevelObjective.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Monitoring.V3.Model.ServiceLevelObjective{}}` on success
* `{:error, info}` on failure
"""
@spec monitoring_services_service_level_objectives_patch(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Monitoring.V3.Model.ServiceLevelObjective.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def monitoring_services_service_level_objectives_patch(
connection,
v3_id,
v3_id1,
services_id,
service_level_objectives_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url(
"/v3/{v3Id}/{v3Id1}/services/{servicesId}/serviceLevelObjectives/{serviceLevelObjectivesId}",
%{
"v3Id" => URI.encode(v3_id, &URI.char_unreserved?/1),
"v3Id1" => URI.encode(v3_id1, &URI.char_unreserved?/1),
"servicesId" => URI.encode(services_id, &URI.char_unreserved?/1),
"serviceLevelObjectivesId" =>
URI.encode(service_level_objectives_id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Monitoring.V3.Model.ServiceLevelObjective{}])
end
end
| 47.584746 | 827 | 0.618267 |
f7ffbe0adb822f92ddbaa4e1309d6fbf43b9e54a | 773 | ex | Elixir | apps/mishka_content/lib/mishka_content.ex | mojtaba-naserei/mishka-cms | 1f31f61347bab1aae6ba0d47c5515a61815db6c9 | [
"Apache-2.0"
] | 3 | 2021-06-27T10:26:51.000Z | 2022-01-10T13:56:08.000Z | apps/mishka_content/lib/mishka_content.ex | iArazar/mishka-cms | 8b579101d607d91e80834527c1508fe5f4ceefef | [
"Apache-2.0"
] | null | null | null | apps/mishka_content/lib/mishka_content.ex | iArazar/mishka-cms | 8b579101d607d91e80834527c1508fe5f4ceefef | [
"Apache-2.0"
] | null | null | null | defmodule MishkaContent do
def db_content_activity_error(section, action, db_error) do
MishkaContent.General.Activity.create_activity_by_task(%{
type: "db",
section: section,
section_id: nil,
action: action,
priority: "high",
status: "error",
user_id: nil
}, %{
db_rescue_struct: db_error.__struct__,
message: Map.get(db_error, :message),
values: Map.get(db_error, :value),
type: Map.get(db_error, :type),
}
)
end
def get_size_of_words(string, count) when not is_nil(string) do
string
|> String.split(" ")
|> Enum.with_index(fn element, index -> if index <= count, do: element end)
|> Enum.reject(fn item -> is_nil(item) end)
|> Enum.join(" ")
end
end
| 27.607143 | 79 | 0.620957 |
f7ffdca3121951c0734e7576395d1be5ea7e8a4c | 1,023 | exs | Elixir | mix.exs | grain-team/vttyl | ad61a827ed6270a500145e127c2bb28d0ac63b7c | [
"MIT"
] | 4 | 2019-05-30T18:12:08.000Z | 2021-11-02T22:09:10.000Z | mix.exs | grain-team/vttyl | ad61a827ed6270a500145e127c2bb28d0ac63b7c | [
"MIT"
] | 2 | 2019-08-02T18:38:17.000Z | 2019-12-12T23:31:42.000Z | mix.exs | grain-team/vttyl | ad61a827ed6270a500145e127c2bb28d0ac63b7c | [
"MIT"
] | null | null | null | defmodule Vttyl.MixProject do
use Mix.Project
@version "0.4.1"
@repo_url "https://github.com/grain-team/vttyl"
def project do
[
app: :vttyl,
version: @version,
elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package(),
source_url: @repo_url,
homepage_url: @repo_url,
name: "Vttyl",
description: "A dead simple vtt parser.",
docs: [
main: "readme",
source_ref: "v#{@version}",
source_url: @repo_url,
extras: ["README.md"]
]
]
end
def application do
[
extra_applications: [:logger]
]
end
defp package do
[
maintainers: ["Matt Lewis", "Ryan Johnson"],
licenses: ["MIT"],
links: %{
"GitHub" => @repo_url,
"Made by Grain" => "https://grain.co"
}
]
end
defp deps do
[
{:ex_doc, "~> 0.20", only: :dev},
{:dialyxir, "~> 1.0.0-rc.6", only: [:dev, :test], runtime: false}
]
end
end
| 19.673077 | 71 | 0.519062 |
f7ffed860dc12e495be4ba7332fc32953fc4395b | 1,033 | exs | Elixir | test/pton_web/plugs/rate_limit_test.exs | casey-chow/pton.co | c794dc6903326dc827f5cbd5c7a8e35868a0fa45 | [
"MIT"
] | null | null | null | test/pton_web/plugs/rate_limit_test.exs | casey-chow/pton.co | c794dc6903326dc827f5cbd5c7a8e35868a0fa45 | [
"MIT"
] | 4 | 2017-10-18T15:52:20.000Z | 2017-10-19T00:18:38.000Z | test/pton_web/plugs/rate_limit_test.exs | casey-chow/pton.co | c794dc6903326dc827f5cbd5c7a8e35868a0fa45 | [
"MIT"
] | null | null | null | defmodule PtonWeb.Plugs.RateLimitTest do
use PtonWeb.ConnCase
alias PtonWeb.Plugs.RateLimit
describe "rate limiting with authentication" do
test "allows requests through if within limits", %{conn: conn} do
conn = conn
|> RateLimit.rate_limit(max_requests: 9, interval_seconds: 10000)
assert conn.status != 429
end
test "blocks requests if exceeds limits", %{conn: conn} do
conn |> RateLimit.rate_limit(max_requests: 1, interval_seconds: 10000)
conn = conn |> RateLimit.rate_limit(max_requests: 1, interval_seconds: 10000)
assert conn.status == 429
end
test "recognizes and buckets off of user if logged in", %{conn: conn} do
user = build(:user)
conn
|> assign(:user, user)
|> RateLimit.rate_limit_authentication(max_requests: 1, interval_seconds: 10000)
conn = conn
|> assign(:user, user)
|> RateLimit.rate_limit_authentication(max_requests: 1, interval_seconds: 10000)
assert conn.status == 429
end
end
end
| 28.694444 | 86 | 0.682478 |
f7fff38177b6d5a26a879ba40c8f29386cf32aee | 416 | ex | Elixir | lib/syn_osc/voice/parameter.ex | camshaft/syn_osc_ex | c7ed257f2ea772f197096d356467e812a0d5e59a | [
"MIT"
] | 1 | 2020-12-08T01:44:03.000Z | 2020-12-08T01:44:03.000Z | lib/syn_osc/voice/parameter.ex | camshaft/syn_osc_ex | c7ed257f2ea772f197096d356467e812a0d5e59a | [
"MIT"
] | null | null | null | lib/syn_osc/voice/parameter.ex | camshaft/syn_osc_ex | c7ed257f2ea772f197096d356467e812a0d5e59a | [
"MIT"
] | null | null | null | defmodule SynOSC.Voice.Parameter do
defstruct id: nil,
voice: nil,
parameter: nil,
relative: false,
value: nil
end
defimpl OSC.Encoder, for: SynOSC.Voice.Parameter do
use SynOSC.Voice
def encode(message, options) do
message
|> call_param()
|> set_arguments(format_value(message))
|> OSC.Encoder.encode(options)
end
def flag(_), do: []
end
| 19.809524 | 51 | 0.620192 |
7900239116f7c2b9a49eae5a12f3aa3698473237 | 15,134 | ex | Elixir | apps/omg_watcher/lib/omg_watcher/block_getter.ex | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 177 | 2018-08-24T03:51:02.000Z | 2020-05-30T13:29:25.000Z | apps/omg_watcher/lib/omg_watcher/block_getter.ex | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 1,042 | 2018-08-25T00:52:39.000Z | 2020-06-01T05:15:17.000Z | apps/omg_watcher/lib/omg_watcher/block_getter.ex | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 47 | 2018-08-24T12:06:33.000Z | 2020-04-28T11:49:25.000Z | # Copyright 2019-2020 OMG Network Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.BlockGetter do
@moduledoc """
Downloads blocks from child chain, validates them and updates watcher state.
Manages concurrent downloading and stateless-validation of blocks.
Detects byzantine behaviors like invalid blocks and block withholding and exposes those events.
Responsible for processing all block submissions and processing them once, regardless of the reorg situation.
Note that `BlockGetter` shouldn't have any finality margin configured, i.e. it should be prepared to be served events
from zero-confirmation Ethereum blocks from the `OMG.Watcher.RootChainCoordinator`.
The flow of getting blocks is as follows:
- `BlockGetter` tracks the top child block number mined in the root chain contract (by doing `eth_call` on the
ethereum node)
- if this is newer than local state, it gets the hash of the block from the contract (another `eth_call`)
- with the hash it calls `block.get` on the child chain server
- if this succeeds it continues to statelessly validate the block (recover transactions, calculate Merkle root)
- if this fails (e.g. timeout) it goes into a `PotentialWithholding` state and tries to see if the problem
resolves. If not it ends up reporting a `block_withholding` byzantine event
- it holds such downloaded block until `OMG.Watcher.RootChainCoordinator` allows the blocks submitted at given Ethereum
heights to be applied
- Applies the block by statefully validating and executing the txs on `OMG.Watcher.State`
- after the block is fully validated it gathers all the updates to `OMG.DB` and executes them. This includes marking
a respective Ethereum height (that contained the `BlockSubmitted` event) as processed
- checks in to `OMG.Watcher.RootChainCoordinator` to let other services know about progress
The process of downloading and stateless validation of blocks is done in `Task`s for concurrency.
See `OMG.Watcher.BlockGetter.Core` for the implementation of the business logic for the getter.
"""
use GenServer
require Logger
use Spandex.Decorators
alias OMG.Eth.RootChain
alias OMG.Watcher.BlockGetter.BlockApplication
alias OMG.Watcher.BlockGetter.Core
alias OMG.Watcher.BlockGetter.Status
alias OMG.Watcher.ExitProcessor
alias OMG.Watcher.HttpRPC.Client
alias OMG.Watcher.RootChainCoordinator
alias OMG.Watcher.RootChainCoordinator.SyncGuide
alias OMG.Watcher.State
@doc """
Retrieves the freshest information about `OMG.Watcher.BlockGetter`'s status, as stored by the slave process `Status`.
"""
@spec get_events() :: {:ok, Core.chain_ok_response_t()}
def get_events(), do: __MODULE__.Status.get_events()
def start_link(args) do
GenServer.start_link(__MODULE__, args, name: __MODULE__)
end
@doc """
Reads the status of block getting and application from `OMG.DB`, reads the current state of the contract and root
chain and starts the pollers that will take care of getting blocks.
"""
def init(args) do
child_block_interval = Keyword.fetch!(args, :child_block_interval)
# how many eth blocks backward can change during an reorg
block_getter_reorg_margin = Keyword.fetch!(args, :block_getter_reorg_margin)
maximum_block_withholding_time_ms = Keyword.fetch!(args, :maximum_block_withholding_time_ms)
maximum_number_of_unapplied_blocks = Keyword.fetch!(args, :maximum_number_of_unapplied_blocks)
block_getter_loops_interval_ms = Keyword.fetch!(args, :block_getter_loops_interval_ms)
child_chain_url = Keyword.fetch!(args, :child_chain_url)
contract_deployment_height = Keyword.fetch!(args, :contract_deployment_height)
# TODO rethink posible solutions see issue #724
# if we do not wait here, `ExitProcessor.check_validity()` may timeouts,
# which causes State and BlockGetter to reboot, fetches entire UTXO set again, and then timeout...
exit_processor_initial_results = ExitProcessor.check_validity(10 * 60_000)
# State treats current as the next block to be executed or a block that is being executed
# while top block number is a block that has been formed (they differ by the interval)
{current_block_height, state_at_block_beginning} = State.get_status()
child_top_block_number = current_block_height - child_block_interval
{:ok, last_synced_height} = OMG.DB.get_single_value(:last_block_getter_eth_height)
synced_height = max(contract_deployment_height, last_synced_height)
{:ok, state} =
Core.init(
child_top_block_number,
child_block_interval,
synced_height,
block_getter_reorg_margin,
state_at_block_beginning,
exit_processor_initial_results,
maximum_block_withholding_time_ms: maximum_block_withholding_time_ms,
maximum_number_of_unapplied_blocks: maximum_number_of_unapplied_blocks,
# NOTE: not elegant, but this should limit the number of heavy-lifting workers and chance to starve the rest
maximum_number_of_pending_blocks: System.schedulers(),
block_getter_loops_interval_ms: block_getter_loops_interval_ms,
child_chain_url: child_chain_url
)
:ok = check_in_to_coordinator(synced_height)
{:ok, _} = schedule_sync_height(block_getter_loops_interval_ms)
{:ok, _} = schedule_producer(block_getter_loops_interval_ms)
{:ok, _} = __MODULE__.Status.start_link()
:ok = update_status(state)
metrics_collection_interval = Keyword.fetch!(args, :metrics_collection_interval)
{:ok, _} = :timer.send_interval(metrics_collection_interval, self(), :send_metrics)
_ =
Logger.info(
"Started #{inspect(__MODULE__)}, synced_height: #{inspect(synced_height)} maximum_block_withholding_time_ms: #{
maximum_block_withholding_time_ms
}"
)
{:ok, state}
end
# :apply_block pipeline of steps
@doc """
Read top down:
- (execute_transactions) Stateful validation and execution of transactions on `OMG.Watcher.State`. Reacts in case that returns any failed transactions.
- (run_block_download_task) Schedules more blocks to download in case some work downloading is finished and we want to progress.
- (close_and_apply_block) Marks a block as applied and updates `OMG.DB` values. Also commits the updates to `OMG.DB` that `OMG.Watcher.State` handed off
containing the data coming from the newly applied block.
- (check_validity) Updates its view of validity of the chain.
"""
def handle_continue({:apply_block_step, :execute_transactions, block_application}, state) do
tx_exec_results = for(tx <- block_application.transactions, do: OMG.Watcher.State.exec(tx, :ignore_fees))
case Core.validate_executions(tx_exec_results, block_application, state) do
{:ok, state} ->
if Code.ensure_loaded?(OMG.WatcherInfo.DB.Block),
do: Kernel.apply(OMG.WatcherInfo.BlockApplicator, :insert_block!, [block_application])
{:noreply, state, {:continue, {:apply_block_step, :run_block_download_task, block_application}}}
{{:error, _} = error, new_state} ->
:ok = update_status(new_state)
_ = Logger.error("Invalid block #{inspect(block_application.number)}, because of #{inspect(error)}")
{:noreply, new_state}
end
end
def handle_continue({:apply_block_step, :run_block_download_task, block_application}, state) do
{:noreply, run_block_download_task(state),
{:continue, {:apply_block_step, :close_and_apply_block, block_application}}}
end
def handle_continue({:apply_block_step, :close_and_apply_block, block_application}, state) do
{:ok, db_updates_from_state} = OMG.Watcher.State.close_block()
{state, synced_height, db_updates} = Core.apply_block(state, block_application)
_ = Logger.debug("Synced height update: #{inspect(db_updates)}")
:ok = OMG.DB.multi_update(db_updates ++ db_updates_from_state)
:ok = check_in_to_coordinator(synced_height)
_ =
Logger.info(
"Applied block: \##{inspect(block_application.number)}, from eth height: #{
inspect(block_application.eth_height)
} " <>
"with #{inspect(length(block_application.transactions))} txs"
)
{:noreply, state, {:continue, {:apply_block_step, :check_validity}}}
end
def handle_continue({:apply_block_step, :check_validity}, state) do
exit_processor_results = ExitProcessor.check_validity()
state = Core.consider_exits(state, exit_processor_results)
:ok = update_status(state)
{:noreply, state}
end
@doc """
Statefully apply a statelessly validated block, coming in as a `BlockApplication` structure.
"""
def handle_cast({:apply_block, %BlockApplication{} = block_application}, state) do
case Core.chain_ok(state) do
{:ok, _} ->
{:noreply, state, {:continue, {:apply_block_step, :execute_transactions, block_application}}}
error ->
:ok = update_status(state)
_ =
Logger.warn(
"Chain already invalid before applying block #{inspect(block_application.number)} because of #{
inspect(error)
}"
)
{:noreply, state}
end
end
@spec handle_info(
:producer
| {reference(), {:downloaded_block, {:ok, map}}}
| {reference(), {:downloaded_block, {:error, Core.block_error()}}}
| {:DOWN, reference(), :process, pid, :normal},
Core.t()
) :: {:noreply, Core.t()} | {:stop, :normal, Core.t()}
def handle_info(msg, state)
def handle_info(:producer, state), do: do_producer(state)
def handle_info({_ref, {:downloaded_block, response}}, state), do: do_downloaded_block(response, state)
def handle_info({:DOWN, _ref, :process, _pid, :normal} = _process, state), do: {:noreply, state}
def handle_info(:sync, state), do: do_sync(state)
def handle_info(:send_metrics, state) do
:ok = :telemetry.execute([:process, __MODULE__], %{}, state)
{:noreply, state}
end
def handle_info({:ssl_closed, _}, state) do
# eat this bug https://github.com/benoitc/hackney/issues/464
{:noreply, state}
end
#
# Private functions
#
defp do_producer(state) do
case Core.chain_ok(state) do
{:ok, _} ->
new_state = run_block_download_task(state)
{:ok, _} = schedule_producer(state.config.block_getter_loops_interval_ms)
:ok = update_status(new_state)
{:noreply, new_state}
{:error, _} = error ->
:ok = update_status(state)
_ = Logger.warn("Chain invalid when trying to download blocks, because of #{inspect(error)}, won't try again")
{:noreply, state}
end
end
defp do_downloaded_block(response, state) do
# 1/ process the block that arrived and consume
case Core.handle_downloaded_block(state, response) do
{:ok, state} ->
state = run_block_download_task(state)
:ok = update_status(state)
{:noreply, state}
{{:error, _} = error, state} ->
:ok = update_status(state)
_ = Logger.error("Error while handling downloaded block because of #{inspect(error)}")
{:noreply, state}
end
end
defp do_sync(state) do
with {:ok, _} <- Core.chain_ok(state),
%SyncGuide{sync_height: next_synced_height} <- RootChainCoordinator.get_sync_info() do
{block_from, block_to} = Core.get_eth_range_for_block_submitted_events(state, next_synced_height)
{:ok, submissions} = get_block_submitted_events(block_from, block_to)
{blocks_to_apply, synced_height, db_updates, state} =
Core.get_blocks_to_apply(state, submissions, next_synced_height)
_ = Logger.debug("Synced height is #{inspect(synced_height)}, got #{length(blocks_to_apply)} blocks to apply")
Enum.each(blocks_to_apply, &GenServer.cast(__MODULE__, {:apply_block, &1}))
:ok = OMG.DB.multi_update(db_updates)
:ok = check_in_to_coordinator(synced_height)
{:ok, _} = schedule_sync_height(state.config.block_getter_loops_interval_ms)
:ok = update_status(state)
:ok = publish_events(submissions)
{:noreply, state}
else
:nosync ->
:ok = check_in_to_coordinator(state.synced_height)
:ok = update_status(state)
{:ok, _} = schedule_sync_height(state.config.block_getter_loops_interval_ms)
{:noreply, state}
{:error, _} = error ->
:ok = update_status(state)
_ = Logger.warn("Chain invalid when trying to sync, because of #{inspect(error)}, won't try again")
{:noreply, state}
end
end
@decorate trace(tracer: OMG.Watcher.Tracer, type: :backend, service: :block_getter)
defp get_block_submitted_events(block_from, block_to) do
RootChain.get_block_submitted_events(block_from, block_to)
end
defp run_block_download_task(state) do
next_child = RootChain.next_child_block()
{new_state, blocks_numbers} = Core.get_numbers_of_blocks_to_download(state, next_child)
Enum.each(
blocks_numbers,
# captures the result in handle_info/2 with the atom: downloaded_block
&Task.async(fn ->
{:downloaded_block, download_block(&1, state.config.child_chain_url)}
end)
)
new_state
end
defp schedule_sync_height(block_getter_loops_interval_ms) do
:timer.send_after(block_getter_loops_interval_ms, self(), :sync)
end
defp schedule_producer(block_getter_loops_interval_ms) do
:timer.send_after(block_getter_loops_interval_ms, self(), :producer)
end
@spec download_block(pos_integer(), String.t()) :: Core.validate_download_response_result_t()
defp download_block(requested_number, child_chain_url) do
{requested_hash, block_timestamp} = RootChain.blocks(requested_number)
response = Client.get_block(requested_hash, child_chain_url)
Core.validate_download_response(
response,
requested_hash,
requested_number,
block_timestamp,
:os.system_time(:millisecond)
)
end
defp check_in_to_coordinator(synced_height), do: RootChainCoordinator.check_in(synced_height, :block_getter)
defp update_status(%Core{} = state), do: Status.update(Core.chain_ok(state))
defp publish_events([%{event_signature: event_signature} | _] = data) do
# event signature is string with a method name with arguments,
# for example: BlockSubmitted(uint256)
[event_signature, _] = String.split(event_signature, "(")
{:root_chain, event_signature}
|> OMG.Bus.Event.new(:data, data)
|> OMG.Bus.direct_local_broadcast()
end
defp publish_events([]), do: :ok
end
| 41.69146 | 154 | 0.720167 |
7900405fd7df9c23a55d67068557b178bddf1444 | 689 | ex | Elixir | lib/gupsy_th.ex | flixbi/element-parsers | b92ef1cff139130acbac4f40d0d48568a3de6590 | [
"MIT"
] | 1 | 2021-11-10T18:06:59.000Z | 2021-11-10T18:06:59.000Z | lib/gupsy_th.ex | SeppPenner/element-parsers | 8a2594e0f15ca7177f6782d0441f25e3e55b8416 | [
"MIT"
] | null | null | null | lib/gupsy_th.ex | SeppPenner/element-parsers | 8a2594e0f15ca7177f6782d0441f25e3e55b8416 | [
"MIT"
] | null | null | null | defmodule Parser do
use Platform.Parsing.Behaviour
require Logger
# ELEMENT IoT Parser for Gupsy temperature and humidity sensor
# According to documentation provided by Gupsy
#
# Changelog:
# 2019-xx-xx [jb]: Initial implementation.
# 2019-09-06 [jb]: Added parsing catchall for unknown payloads.
#
def parse(<<humid::big-16, temp::big-16, vbat::big-16>>, _meta) do
%{
humid: (125*humid)/(65536)-6,
temp: (175.72*temp)/(65536)-46.85,
vbat: 10027.008/vbat,
}
end
def parse(payload, meta) do
Logger.warn("Could not parse payload #{inspect payload} with frame_port #{inspect get_in(meta, [:meta, :frame_port])}")
[]
end
end
| 26.5 | 123 | 0.660377 |
79005a984776daf5d9df468356b37a303f3bfe52 | 8,134 | ex | Elixir | lib/smppsend.ex | galina/smppsend | 8ab15487a11b1c968e9f160515025e2c670f0521 | [
"Apache-2.0"
] | null | null | null | lib/smppsend.ex | galina/smppsend | 8ab15487a11b1c968e9f160515025e2c670f0521 | [
"Apache-2.0"
] | null | null | null | lib/smppsend.ex | galina/smppsend | 8ab15487a11b1c968e9f160515025e2c670f0521 | [
"Apache-2.0"
] | null | null | null | defmodule SMPPSend do
require Logger
use Dye
@switches [
help: :boolean,
version: :boolean,
bind_mode: :string,
host: :string,
port: :integer,
system_id: :string,
password: :string,
system_type: :string,
interface_version: :integer,
addr_ton: :integer,
addr_npi: :integer,
address_range: :string,
submit_sm: :boolean,
service_type: :string,
source_addr_ton: :integer,
source_addr_npi: :integer,
source_addr: :string,
dest_addr_ton: :integer,
dest_addr_npi: :integer,
destination_addr: :string,
esm_class: :integer,
protocol_id: :integer,
priority_flag: :integer,
schedule_delivery_time: :string,
validity_period: :string,
registered_delivery: :integer,
replace_if_present_flag: :integer,
data_coding: :integer,
sm_default_msg_id: :integer,
short_message: :string,
split_max_bytes: :integer,
udh: :boolean,
udh_ref: :integer,
udh_total_parts: :integer,
udh_part_num: :integer,
ucs2: :boolean,
binary: :boolean,
gsm: :boolean,
latin1: :boolean,
wait_dlrs: :integer,
wait: :boolean,
tls: :boolean
]
@defaults [
bind_mode: "tx",
esm_class: 0,
short_message: "",
submit_sm: false,
auto_split: false,
udh: false,
udh_ref: 0,
udh_total_parts: 1,
udh_part_num: 1,
ucs2: false,
binary: false,
latin1: false,
gsm: false,
wait: false
]
@required [
:bind_mode,
:host,
:port,
:system_id,
:password,
:submit_sm
]
@exit_code_ok 0
@exit_code_error 1
def main(args) do
code = chain(args, [
&parse/1,
&convert_tlvs/1,
&validate_unknown/1,
&set_defaults/1,
&show_help/1,
&show_version/1,
&validate_missing/1,
&decode_hex_string/1,
&encode/1,
&trap_exit/1,
&bind/1,
&send_messages/1,
&wait_dlrs/1,
&wait/1,
&unbind/1,
])
Logger.flush
System.halt(code)
end
defp parse(args) do
{parsed, remaining, invalid} = OptionParser.parse(args, switches: @switches, allow_nonexistent_atoms: true)
cond do
length(invalid) > 0 -> {:error, "Invalid options: #{format_keys(invalid)}"}
length(remaining) > 0 -> {:error, "Redundant command line arguments: #{format_keys(remaining)}"}
true -> {:ok, parsed}
end
end
defp convert_tlvs(opts) do
case SMPPSend.TlvParser.convert_tlvs(opts) do
{:ok, opts} -> {:ok, opts}
{:error, message, key} -> {:error, "Error parsing tlv option #{format_keys(key)}: #{message}"}
end
end
defp validate_unknown(opts) do
case SMPPSend.OptionHelpers.find_unknown(opts, [:tlvs | @switches |> Keyword.keys]) do
[] -> {:ok, opts}
unknown -> {:error, "Unrecognized options: #{format_keys(unknown)}"}
end
end
defp set_defaults(opts) do
{:ok, SMPPSend.OptionHelpers.set_defaults(opts, @defaults)}
end
defp validate_missing(opts) do
case SMPPSend.OptionHelpers.find_missing(opts, @required) do
[] -> {:ok, opts}
missing -> {:error, "Missing options: #{format_keys(missing)}"}
end
end
defp show_help(opts) do
if opts[:help] do
IO.puts(SMPPSend.Usage.help)
:exit
else
{:ok, opts}
end
end
defp show_version(opts) do
if opts[:version] do
IO.puts(SMPPSend.Version.version)
:exit
else
{:ok, opts}
end
end
defp decode_hex_string(opts) do
if opts[:binary] do
case SMPPSend.OptionHelpers.decode_hex_string(opts, :short_message) do
{:ok, new_opts} ->
tlvs = opts[:tlvs]
{:ok, message_payload_id} = SMPPEX.Protocol.TlvFormat.id_by_name(:message_payload)
case SMPPSend.OptionHelpers.decode_hex_string(tlvs, message_payload_id) do
{:ok, new_tlvs} -> {:ok, Keyword.put(new_opts, :tlvs, new_tlvs)}
{:error, error} -> {:error, "Failed to decode message_payload: #{error}"}
end
{:error, error} -> {:error, "Failed to decode short_message: #{error}"}
end
else
{:ok, opts}
end
end
defp encoding_function(opts) do
cond do
opts[:ucs2] -> {:ucs2, &SMPPSend.OptionHelpers.convert_to_ucs2/2}
opts[:gsm] -> {:gsm, &SMPPSend.OptionHelpers.convert_to_gsm/2}
opts[:latin1] -> {:latin1, &SMPPSend.OptionHelpers.convert_to_latin1/2}
true -> {:noenc, fn(opts, _) -> {:ok, opts} end}
end
end
defp encode(opts) do
{encoding_name, encoding_fn} = encoding_function(opts)
case encoding_fn.(opts, :short_message) do
{:ok, new_opts} ->
tlvs = opts[:tlvs]
{:ok, message_payload_id} = SMPPEX.Protocol.TlvFormat.id_by_name(:message_payload)
case encoding_fn.(tlvs, message_payload_id) do
{:ok, new_tlvs} -> {:ok, Keyword.put(new_opts, :tlvs, new_tlvs)}
{:error, error} -> {:error, "Failed to convert message_payload to #{encoding_name}: #{error}"}
end
{:error, error} -> {:error, "Failed to convert short_message to #{encoding_name}: #{error}"}
end
end
defp trap_exit(opts) do
Process.flag(:trap_exit, true)
{:ok, opts}
end
defp bind(opts) do
host = opts[:host]
port = opts[:port]
case SMPPSend.PduHelpers.bind(opts) do
{:ok, bind} ->
case SMPPSend.ESMEHelpers.connect(host, port, bind, session_opts(opts)) do
{:ok, esme} -> {:ok, {esme, opts}}
{:error, error} -> {:error, "Connecting SMSC failed: #{inspect error}"}
end
{:error, _error} = error -> error
end
end
defp session_opts(opts) do
session_opts = []
if opts[:tls] do
session_opts
|> Keyword.put(:transport, :ranch_ssl)
else
session_opts
end
end
defp send_messages({esme, opts}) do
if opts[:submit_sm] do
if opts[:udh] && opts[:split_max_bytes] do
{:error, "Options --udh and --split-max-bytes can't be used together"}
else
submit_sms = cond do
opts[:udh] -> SMPPSend.PduHelpers.submit_sms(opts, :custom_udh)
opts[:split_max_bytes] -> SMPPSend.PduHelpers.submit_sms(opts, :auto_split)
true -> SMPPSend.PduHelpers.submit_sms(opts, :none)
end
case submit_sms do
{:ok, pdus} ->
case SMPPSend.ESMEHelpers.send_messages(esme, pdus) do
{:ok, message_ids} -> {:ok, {esme, opts, message_ids}}
{:error, error} -> {:error, "Message submit failed, #{error}"}
end
{:error, _error} = error -> error
end
end
else
{:ok, {esme, opts, []}}
end
end
defp wait_dlrs({esme, opts, message_ids}) do
if opts[:wait_dlrs] do
case SMPPSend.ESMEHelpers.wait_dlrs(esme, message_ids, opts[:wait_dlrs]) do
:ok ->
Logger.info("Dlrs for all sent messages received")
{:ok, {esme, opts}}
{:error, error} -> {:error, "Waiting dlrs failed: #{error}"}
end
else
{:ok, {esme, opts}}
end
end
defp wait({esme, opts}) do
if opts[:wait] do
SMPPSend.ESMEHelpers.wait_infinitely(esme)
else
{:ok, {esme, opts}}
end
end
def unbind({esme, _opts}) do
case SMPPSend.ESMEHelpers.unbind(esme) do
:ok -> :exit
{:error, error} -> {:error, "Unbind failed: #{error}"}
end
end
defp format_keys(keys) when not is_list(keys), do: format_keys([keys])
defp format_keys(keys) do
keys |> Enum.map(&original_key/1) |> Enum.map(&inspect/1) |> Enum.join(", ")
end
defp original_key({key, _}), do: to_string(key)
defp original_key(key) when is_binary(key), do: key
defp original_key(key) do
key_s = to_string(key)
prefix = case String.starts_with?(key_s, "--") do
true -> ""
false -> "--"
end
prefix <> Regex.replace(~r/_/, key_s, "-")
end
defp chain(arg, [fun | funs]) do
case fun.(arg) do
{:ok, res} -> chain(res, funs)
{:error, error} ->
IO.puts :stderr, ~s/#{error}/Rd
@exit_code_error
:exit -> @exit_code_ok
end
end
defp chain(_, []), do: @exit_code_ok
end
| 25.659306 | 111 | 0.604623 |
790063e0272f7c5de4295962d5d45b9e0d637a81 | 3,817 | ex | Elixir | lib/trademark_free_strategic_land_warfare/players/johnb.ex | alanvoss/trademark_free_strategic_land_warfare | d13030adb1936849c5d363e0f4365f087394e6fa | [
"MIT"
] | 1 | 2020-06-30T16:37:50.000Z | 2020-06-30T16:37:50.000Z | lib/trademark_free_strategic_land_warfare/players/johnb.ex | alanvoss/trademark_free_strategic_land_warfare | d13030adb1936849c5d363e0f4365f087394e6fa | [
"MIT"
] | null | null | null | lib/trademark_free_strategic_land_warfare/players/johnb.ex | alanvoss/trademark_free_strategic_land_warfare | d13030adb1936849c5d363e0f4365f087394e6fa | [
"MIT"
] | 13 | 2020-06-04T23:10:19.000Z | 2020-06-05T01:25:26.000Z | defmodule TrademarkFreeStrategicLandWarfare.Players.JohnB do
alias TrademarkFreeStrategicLandWarfare.{Board, Player, Piece}
@behaviour Player
@type direction() :: :north | :west | :east | :south
@type count() :: Integer.t()
@type state() :: any()
@spec name() :: binary()
def name() do
"JohnB"
end
# This is the main "improvement" for my bot - place the flag behind bombs and sergeants.
@spec initial_pieces_placement() :: nonempty_list([Atom.t(), ...])
def initial_pieces_placement() do
[ :scout, :scout, :miner, :marshall, :scout, :scout, :bomb, :sergeant, :scout, :lieutenant,
:general, :major, :lieutenant, :captain, :captain, :spy, :sergeant, :major, :major, :lieutenant,
:miner, :miner, :lieutenant, :scout, :captain, :sergeant, :bomb, :sergeant, :colonel, :miner,
:scout, :bomb, :scout, :colonel, :captain, :bomb, :flag, :bomb, :bomb, :miner]
|> Enum.chunk_every(10)
end
@spec turn(
%TrademarkFreeStrategicLandWarfare.Board{},
%TrademarkFreeStrategicLandWarfare.Player{},
state()
) :: {binary(), direction(), count(), state()}
def turn(%Board{rows: rows} = board, %Player{number: number}, state) do
# find all eligible pieces
move_partitioned_pieces =
rows
|> List.flatten()
|> Enum.flat_map(fn
%Piece{player: ^number, name: name} = piece when name not in [:bomb, :flag] -> [piece]
_ -> []
end)
|> partition_by_move(board)
# select from them, biasing towards pieces that can win, then those that can advance,
# then west/east, then move backward
eligible_moves =
Enum.find(
[
Map.get(move_partitioned_pieces, :win, []),
Map.get(
move_partitioned_pieces,
Board.maybe_invert_player_direction(:north, number),
[]
),
Map.get(move_partitioned_pieces, :east, []) ++
Map.get(move_partitioned_pieces, :west, []),
Map.get(
move_partitioned_pieces,
Board.maybe_invert_player_direction(:south, number),
[]
)
],
fn list -> length(list) > 0 end
)
# randomly select one from the list returned
case eligible_moves do
nil ->
raise "no move possible"
moves ->
moves
|> Enum.random()
|> Tuple.append(state)
end
end
defp partition_by_move(pieces, board) do
# TODO: reduce_while and halt when preferred one found (win, progressing forward)
Enum.reduce(pieces, %{}, fn piece, acc ->
Enum.reduce([:north, :west, :east, :south], acc, fn direction, dir_acc ->
case Board.move(board, piece.player, piece.uuid, direction, 1) do
{:ok, :win, _} ->
# this shouldn't ever get hit, because we'll never know as a player
# where the opponent's flag is without trying to capture it. putting
# this here for that note, and just in case.
Map.update(
dir_acc,
:win,
[{piece.uuid, direction, 1}],
&[{piece.uuid, direction, 1} | &1]
)
{:error, :unknown_result} ->
# allowed move, but masked piece. include in the possibles.
Map.update(
dir_acc,
direction,
[{piece.uuid, direction, 1}],
&[{piece.uuid, direction, 1} | &1]
)
{:ok, %Board{}} ->
# allowed move -- no differentiation on whether attack happened
Map.update(
dir_acc,
direction,
[{piece.uuid, direction, 1}],
&[{piece.uuid, direction, 1} | &1]
)
_ ->
dir_acc
end
end)
end)
end
end
| 33.191304 | 102 | 0.56065 |
7900654e82f3ff0d6eb8775a74600a9e542a2966 | 1,440 | exs | Elixir | chapter_5/stack.exs | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | 3 | 2021-04-22T11:55:58.000Z | 2021-08-22T13:19:56.000Z | chapter_5/stack.exs | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | null | null | null | chapter_5/stack.exs | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | 3 | 2021-04-22T21:19:45.000Z | 2021-08-22T13:20:03.000Z | defmodule Stack do
@timeout_limit 1000
def start(initial_stack \\ []), do: spawn(fn -> loop(initial_stack) end)
def push_async(pid, element) do
send(pid, {:push, self(), element})
:ok
end
def pop_async(pid) do
send(pid, {:pop, self()})
:ok
end
def get_result do
receive do
{:push_result, result} -> result
{:pop_result, result} -> result
after
@timeout_limit -> {:error, :timeout}
end
end
def push_sync(pid, element) do
push_async(pid, element)
get_result()
end
def pop_sync(pid) do
pop_async(pid)
get_result()
end
defp loop(stack) do
new_stack =
receive do
{:push, caller, element} -> handle_push(caller, element, stack)
{:pop, caller} -> handle_pop(caller, stack)
end
loop(new_stack)
end
defp handle_push(caller, element, stack) do
new_stack = push_element(element, stack)
send(caller, {:push_result, new_stack})
new_stack
end
defp handle_pop(caller, [head | tails]) do
send(caller, {:pop_result, head})
tails
end
defp handle_pop(caller, []) do
send(caller, {:pop_result, nil})
[]
end
defp push_element(element, stack), do: [element | stack]
end
pid = Stack.start([1, 2, 3, 4, 5])
Stack.push_async(pid, 66)
Stack.pop_async(pid)
Stack.get_result()
Stack.get_result()
Stack.push_sync(pid, 10)
Stack.pop_sync(pid)
Stack.pop_sync(pid)
Stack.pop_sync(pid)
| 18.227848 | 74 | 0.638889 |
79006e475821b3686fcd23bee654132bd6a06ee7 | 2,064 | ex | Elixir | lib/maru/builder/pipeline/dsls.ex | davidenko87/maru | 62b6ca0e42dd550683af8743f50e67048c4027d0 | [
"BSD-3-Clause"
] | 819 | 2016-11-25T07:12:04.000Z | 2022-03-16T06:59:36.000Z | lib/maru/builder/pipeline/dsls.ex | davidenko87/maru | 62b6ca0e42dd550683af8743f50e67048c4027d0 | [
"BSD-3-Clause"
] | 71 | 2016-11-24T20:19:09.000Z | 2021-06-09T10:12:23.000Z | lib/maru/builder/pipeline/dsls.ex | davidenko87/maru | 62b6ca0e42dd550683af8743f50e67048c4027d0 | [
"BSD-3-Clause"
] | 56 | 2015-01-10T23:34:12.000Z | 2016-11-17T00:13:56.000Z | defmodule Maru.Builder.Pipeline.DSLs do
alias Maru.Resource.MaruPlug
@doc """
Define pipeline block of current endpoint.
"""
defmacro pipeline(block) do
quote do
import Kernel, only: []
import Maru.Resource.DSLs, only: []
import Maru.Builder.Pipeline.DSLs,
only: [
plug: 1,
plug: 2,
plug_overridable: 2,
plug_overridable: 3
]
unquote(block)
import Maru.Builder.Pipeline.DSLs, only: [pipeline: 1]
import Maru.Resource.DSLs
import Kernel
end
end
@doc """
Push a `Plug` struct to current resource scope.
"""
defmacro plug(plug)
defmacro plug({:when, _, [plug, guards]}) do
do_plug(nil, plug, [], guards)
end
defmacro plug(plug) do
do_plug(nil, plug, [], true)
end
@doc """
Push a `Plug` struct with options and guards to current resource scope.
"""
defmacro plug(plug, opts)
defmacro plug(plug, {:when, _, [opts, guards]}) do
do_plug(nil, plug, opts, guards)
end
defmacro plug(plug, opts) do
do_plug(nil, plug, opts, true)
end
@doc """
Push a overridable `Plug` struct to current resource scope.
"""
defmacro plug_overridable(name, plug)
defmacro plug_overridable(name, {:when, _, [plug, guards]}) do
do_plug(name, plug, [], guards)
end
defmacro plug_overridable(name, plug) do
do_plug(name, plug, [], true)
end
@doc """
Push a overridable `Plug` struct with options and guards to current resource scope.
"""
defmacro plug_overridable(name, plug, opts)
defmacro plug_overridable(name, plug, {:when, _, [opts, guards]}) do
do_plug(name, plug, opts, guards)
end
defmacro plug_overridable(name, plug, opts) do
do_plug(name, plug, opts, true)
end
defp do_plug(name, plug, opts, guards) do
quote do
MaruPlug.push(
%MaruPlug{
name: unquote(name),
plug: unquote(plug),
options: unquote(opts),
guards: unquote(Macro.escape(guards))
},
__ENV__
)
end
end
end
| 22.193548 | 85 | 0.621609 |
79007b1ac137504ecaf5d02685f48f4dfb5ebe47 | 175 | ex | Elixir | gateway/lib/mnesia/upgrade.ex | xonnect/server-ce | 7ec2e689a19098a17dfc5eaf190d56f29fb45263 | [
"BSD-2-Clause"
] | null | null | null | gateway/lib/mnesia/upgrade.ex | xonnect/server-ce | 7ec2e689a19098a17dfc5eaf190d56f29fb45263 | [
"BSD-2-Clause"
] | null | null | null | gateway/lib/mnesia/upgrade.ex | xonnect/server-ce | 7ec2e689a19098a17dfc5eaf190d56f29fb45263 | [
"BSD-2-Clause"
] | null | null | null | defmodule Mix.Tasks.Upgrade do
use Mix.Task
alias Mnesia.Cache, as: Cache
use Cache
def run(_) do
Amnesia.start
# ... upgrade here
Amnesia.stop
end
end
| 14.583333 | 31 | 0.662857 |
790094f1b6fdc673489b1243d43fd7429ba92ae4 | 55,588 | ex | Elixir | lib/data_layer.ex | michaelst/ash_postgres | 026417f678ce98626915b15264105560f149a2a5 | [
"MIT"
] | null | null | null | lib/data_layer.ex | michaelst/ash_postgres | 026417f678ce98626915b15264105560f149a2a5 | [
"MIT"
] | null | null | null | lib/data_layer.ex | michaelst/ash_postgres | 026417f678ce98626915b15264105560f149a2a5 | [
"MIT"
] | null | null | null | defmodule AshPostgres.DataLayer do
@manage_tenant %Ash.Dsl.Section{
name: :manage_tenant,
describe: """
Configuration for the behavior of a resource that manages a tenant
""",
examples: [
"""
manage_tenant do
template ["organization_", :id]
create? true
update? false
end
"""
],
schema: [
template: [
type: {:custom, __MODULE__, :tenant_template, []},
required: true,
doc: """
A template that will cause the resource to create/manage the specified schema.
Use this if you have a resource that, when created, it should create a new tenant
for you. For example, if you have a `customer` resource, and you want to create
a schema for each customer based on their id, e.g `customer_10` set this option
to `["customer_", :id]`. Then, when this is created, it will create a schema called
`["customer_", :id]`, and run your tenant migrations on it. Then, if you were to change
that customer's id to `20`, it would rename the schema to `customer_20`. Generally speaking
you should avoid changing the tenant id.
"""
],
create?: [
type: :boolean,
default: true,
doc: "Whether or not to automatically create a tenant when a record is created"
],
update?: [
type: :boolean,
default: true,
doc: "Whether or not to automatically update the tenant name if the record is udpated"
]
]
}
@reference %Ash.Dsl.Entity{
name: :reference,
describe: """
Configures the reference for a relationship in resource migrations.
Keep in mind that multiple relationships can theoretically involve the same destination and foreign keys.
In those cases, you only need to configure the `reference` behavior for one of them. Any conflicts will result
in an error, across this resource and any other resources that share a table with this one. For this reason,
instead of adding a reference configuration for `:nothing`, its best to just leave the configuration out, as that
is the default behavior if *no* relationship anywhere has configured the behavior of that reference.
""",
examples: [
"reference :post, on_delete: :delete, on_update: :update, name: \"comments_to_posts_fkey\""
],
args: [:relationship],
target: AshPostgres.Reference,
schema: AshPostgres.Reference.schema()
}
@references %Ash.Dsl.Section{
name: :references,
describe: """
A section for configuring the references (foreign keys) in resource migrations.
This section is only relevant if you are using the migration generator with this resource.
Otherwise, it has no effect.
""",
examples: [
"""
references do
reference :post, on_delete: :delete, on_update: :update, name: "comments_to_posts_fkey"
end
"""
],
entities: [@reference],
schema: [
polymorphic_on_delete: [
type: {:one_of, [:delete, :nilify, :nothing, :restrict]},
doc:
"For polymorphic resources, configures the on_delete behavior of the automatically generated foreign keys to source tables."
],
polymorphic_on_update: [
type: {:one_of, [:update, :nilify, :nothing, :restrict]},
doc:
"For polymorphic resources, configures the on_update behavior of the automatically generated foreign keys to source tables."
],
polymorphic_name: [
type: {:one_of, [:update, :nilify, :nothing, :restrict]},
doc:
"For polymorphic resources, configures the on_update behavior of the automatically generated foreign keys to source tables."
]
]
}
@postgres %Ash.Dsl.Section{
name: :postgres,
describe: """
Postgres data layer configuration
""",
sections: [
@manage_tenant,
@references
],
modules: [
:repo
],
examples: [
"""
postgres do
repo MyApp.Repo
table "organizations"
end
"""
],
schema: [
repo: [
type: :atom,
required: true,
doc:
"The repo that will be used to fetch your data. See the `AshPostgres.Repo` documentation for more"
],
migrate?: [
type: :boolean,
default: true,
doc:
"Whether or not to include this resource in the generated migrations with `mix ash.generate_migrations`"
],
base_filter_sql: [
type: :string,
doc:
"A raw sql version of the base_filter, e.g `representative = true`. Required if trying to create a unique constraint on a resource with a base_filter"
],
skip_unique_indexes: [
type: {:custom, __MODULE__, :validate_skip_unique_indexes, []},
default: false,
doc: "Skip generating unique indexes when generating migrations"
],
unique_index_names: [
type: :any,
default: [],
doc: """
A list of unique index names that could raise errors, or an mfa to a function that takes a changeset
and returns the list. Must be in the format `{[:affected, :keys], "name_of_constraint"}` or `{[:affected, :keys], "name_of_constraint", "custom error message"}`
"""
],
foreign_key_names: [
type: :any,
default: [],
doc: """
A list of foreign keys that could raise errors, or an mfa to a function that takes a changeset and returns the list.
Must be in the format `{:key, "name_of_constraint"}` or `{:key, "name_of_constraint", "custom error message"}`
"""
],
table: [
type: :string,
doc:
"The table to store and read the resource from. Required unless `polymorphic?` is true."
],
polymorphic?: [
type: :boolean,
default: false,
doc: """
Declares this resource as polymorphic.
Polymorphic resources cannot be read or updated unless the table is provided in the query/changeset context.
For example:
PolymorphicResource
|> Ash.Query.set_context(%{data_layer: %{table: "table"}})
|> MyApi.read!()
When relating to polymorphic resources, you'll need to use the `context` option on relationships,
e.g
belongs_to :polymorphic_association, PolymorphicResource,
context: %{data_layer: %{table: "table"}}
"""
]
]
}
alias Ash.Filter
alias Ash.Query.{BooleanExpression, Not, Ref}
alias Ash.Query.Function.{Ago, Contains}
alias Ash.Query.Operator.IsNil
alias AshPostgres.Functions.{Fragment, TrigramSimilarity, Type}
import AshPostgres, only: [repo: 1]
@behaviour Ash.DataLayer
@sections [@postgres]
@moduledoc """
A postgres data layer that levereges Ecto's postgres capabilities.
# Table of Contents
#{Ash.Dsl.Extension.doc_index(@sections)}
#{Ash.Dsl.Extension.doc(@sections)}
"""
use Ash.Dsl.Extension,
sections: @sections,
transformers: [
AshPostgres.Transformers.VerifyRepo,
AshPostgres.Transformers.EnsureTableOrPolymorphic
]
@doc false
def tenant_template(value) do
value = List.wrap(value)
if Enum.all?(value, &(is_binary(&1) || is_atom(&1))) do
{:ok, value}
else
{:error, "Expected all values for `manages_tenant` to be strings or atoms"}
end
end
@doc false
def validate_skip_unique_indexes(indexes) do
indexes = List.wrap(indexes)
if Enum.all?(indexes, &is_atom/1) do
{:ok, indexes}
else
{:error, "All indexes to skip must be atoms"}
end
end
import Ecto.Query, only: [from: 2, subquery: 1]
@impl true
def can?(_, :async_engine), do: true
def can?(_, :transact), do: true
def can?(_, :composite_primary_key), do: true
def can?(_, :upsert), do: true
def can?(resource, {:join, other_resource}) do
data_layer = Ash.DataLayer.data_layer(resource)
other_data_layer = Ash.DataLayer.data_layer(other_resource)
data_layer == other_data_layer and repo(data_layer) == repo(other_data_layer)
end
def can?(resource, {:lateral_join, other_resource}) do
data_layer = Ash.DataLayer.data_layer(resource)
other_data_layer = Ash.DataLayer.data_layer(other_resource)
data_layer == other_data_layer and repo(data_layer) == repo(other_data_layer)
end
def can?(_, :boolean_filter), do: true
def can?(_, {:aggregate, :count}), do: true
def can?(_, {:aggregate, :sum}), do: true
def can?(_, :aggregate_filter), do: true
def can?(_, :aggregate_sort), do: true
def can?(_, :create), do: true
def can?(_, :read), do: true
def can?(_, :update), do: true
def can?(_, :destroy), do: true
def can?(_, :filter), do: true
def can?(_, :limit), do: true
def can?(_, :offset), do: true
def can?(_, :multitenancy), do: true
def can?(_, {:filter_expr, _}), do: true
def can?(_, :nested_expressions), do: true
def can?(_, {:query_aggregate, :count}), do: true
def can?(_, :sort), do: true
def can?(_, :distinct), do: true
def can?(_, {:sort, _}), do: true
def can?(_, _), do: false
@impl true
def in_transaction?(resource) do
repo(resource).in_transaction?()
end
@impl true
def limit(query, nil, _), do: {:ok, query}
def limit(query, limit, _resource) do
{:ok, from(row in query, limit: ^limit)}
end
@impl true
def source(resource) do
AshPostgres.table(resource) || ""
end
@impl true
def set_context(resource, data_layer_query, context) do
if context[:data_layer][:table] do
{:ok,
%{
data_layer_query
| from: %{data_layer_query.from | source: {context[:data_layer][:table], resource}}
}}
else
{:ok, data_layer_query}
end
end
@impl true
def offset(query, nil, _), do: query
def offset(%{offset: old_offset} = query, 0, _resource) when old_offset in [0, nil] do
{:ok, query}
end
def offset(query, offset, _resource) do
{:ok, from(row in query, offset: ^offset)}
end
@impl true
def run_query(query, resource) do
if AshPostgres.polymorphic?(resource) && no_table?(query) do
raise_table_error!(resource, :read)
else
{:ok, repo(resource).all(query, repo_opts(query))}
end
end
defp no_table?(%{from: %{source: {"", _}}}), do: true
defp no_table?(_), do: false
defp repo_opts(%Ash.Changeset{tenant: tenant, resource: resource}) do
repo_opts(%{tenant: tenant, resource: resource})
end
defp repo_opts(%{tenant: tenant, resource: resource}) when not is_nil(tenant) do
if Ash.Resource.Info.multitenancy_strategy(resource) == :context do
[prefix: tenant]
else
[]
end
end
defp repo_opts(_), do: []
@impl true
def functions(resource) do
config = repo(resource).config()
functions = [AshPostgres.Functions.Type, AshPostgres.Functions.Fragment]
if "pg_trgm" in (config[:installed_extensions] || []) do
functions ++
[
AshPostgres.Functions.TrigramSimilarity
]
else
functions
end
end
@impl true
def run_aggregate_query(query, aggregates, resource) do
subquery = from(row in subquery(query), select: %{})
query =
Enum.reduce(
aggregates,
subquery,
&add_subquery_aggregate_select(&2, &1, resource)
)
{:ok, repo(resource).one(query, repo_opts(query))}
end
@impl true
def set_tenant(_resource, query, tenant) do
{:ok, Ecto.Query.put_query_prefix(query, to_string(tenant))}
end
@impl true
def run_aggregate_query_with_lateral_join(
query,
aggregates,
root_data,
source_resource,
destination_resource,
source_field,
destination_field
) do
lateral_join_query =
lateral_join_query(
query,
root_data,
source_resource,
source_field,
destination_field
)
subquery = from(row in subquery(lateral_join_query), select: %{})
query =
Enum.reduce(
aggregates,
subquery,
&add_subquery_aggregate_select(&2, &1, destination_resource)
)
{:ok, repo(source_resource).one(query, repo_opts(:query))}
end
@impl true
def run_query_with_lateral_join(
query,
root_data,
source_resource,
_destination_resource,
source_field,
destination_field
) do
query =
lateral_join_query(
query,
root_data,
source_resource,
source_field,
destination_field
)
{:ok, repo(source_resource).all(query, repo_opts(query))}
end
defp lateral_join_query(
query,
root_data,
source_resource,
source_field,
destination_field
) do
source_values = Enum.map(root_data, &Map.get(&1, source_field))
subquery =
subquery(
from(destination in query,
where:
field(destination, ^destination_field) ==
field(parent_as(:source_record), ^source_field)
)
)
source_resource
|> Ash.Query.new()
|> Ash.Query.data_layer_query()
|> case do
{:ok, data_layer_query} ->
from(source in data_layer_query,
as: :source_record,
where: field(source, ^source_field) in ^source_values,
inner_lateral_join: destination in ^subquery,
on: field(source, ^source_field) == field(destination, ^destination_field),
select: destination
)
{:error, error} ->
{:error, error}
end
end
@impl true
def resource_to_query(resource, _),
do: Ecto.Queryable.to_query({AshPostgres.table(resource) || "", resource})
@impl true
def create(resource, changeset) do
changeset.data
|> Map.update!(:__meta__, &Map.put(&1, :source, table(resource, changeset)))
|> ecto_changeset(changeset, :create)
|> repo(resource).insert(repo_opts(changeset))
|> handle_errors()
|> case do
{:ok, result} ->
maybe_create_tenant!(resource, result)
{:ok, result}
{:error, error} ->
{:error, error}
end
end
defp maybe_create_tenant!(resource, result) do
if AshPostgres.manage_tenant_create?(resource) do
tenant_name = tenant_name(resource, result)
AshPostgres.MultiTenancy.create_tenant!(tenant_name, repo(resource))
else
:ok
end
end
defp maybe_update_tenant(resource, changeset, result) do
if AshPostgres.manage_tenant_update?(resource) do
changing_tenant_name? =
resource
|> AshPostgres.manage_tenant_template()
|> Enum.filter(&is_atom/1)
|> Enum.any?(&Ash.Changeset.changing_attribute?(changeset, &1))
if changing_tenant_name? do
old_tenant_name = tenant_name(resource, changeset.data)
new_tenant_name = tenant_name(resource, result)
AshPostgres.MultiTenancy.rename_tenant(repo(resource), old_tenant_name, new_tenant_name)
end
end
:ok
end
defp tenant_name(resource, result) do
resource
|> AshPostgres.manage_tenant_template()
|> Enum.map_join(fn item ->
if is_binary(item) do
item
else
result
|> Map.get(item)
|> to_string()
end
end)
end
defp handle_errors({:error, %Ecto.Changeset{errors: errors}}) do
{:error, Enum.map(errors, &to_ash_error/1)}
end
defp handle_errors({:ok, val}), do: {:ok, val}
defp to_ash_error({field, {message, vars}}) do
Ash.Error.Changes.InvalidAttribute.exception(field: field, message: message, vars: vars)
end
defp ecto_changeset(record, changeset, type) do
ecto_changeset =
record
|> set_table(changeset, type)
|> Ecto.Changeset.change(changeset.attributes)
case type do
:create ->
ecto_changeset
|> add_unique_indexes(record.__struct__, changeset.tenant, changeset)
|> add_my_foreign_key_constraints(record.__struct__)
|> add_configured_foreign_key_constraints(record.__struct__)
type when type in [:upsert, :update] ->
ecto_changeset
|> add_unique_indexes(record.__struct__, changeset.tenant, changeset)
|> add_my_foreign_key_constraints(record.__struct__)
|> add_related_foreign_key_constraints(record.__struct__)
|> add_configured_foreign_key_constraints(record.__struct__)
:delete ->
ecto_changeset
|> add_unique_indexes(record.__struct__, changeset.tenant, changeset)
|> add_related_foreign_key_constraints(record.__struct__)
|> add_configured_foreign_key_constraints(record.__struct__)
end
end
defp set_table(record, changeset, operation) do
if AshPostgres.polymorphic?(record.__struct__) do
table = changeset.context[:data_layer][:table] || AshPostgres.table(record.__struct)
if table do
Ecto.put_meta(record, source: table)
else
raise_table_error!(changeset.resource, operation)
end
else
record
end
end
defp add_related_foreign_key_constraints(changeset, resource) do
# TODO: this doesn't guarantee us to get all of them, because if something is related to this
# schema and there is no back-relation, then this won't catch it's foreign key constraints
resource
|> Ash.Resource.Info.relationships()
|> Enum.map(& &1.destination)
|> Enum.uniq()
|> Enum.flat_map(fn related ->
related
|> Ash.Resource.Info.relationships()
|> Enum.filter(&(&1.destination == resource))
|> Enum.map(&Map.take(&1, [:source, :source_field, :destination_field]))
end)
|> Enum.uniq()
|> Enum.reduce(changeset, fn %{
source: source,
source_field: source_field,
destination_field: destination_field
},
changeset ->
Ecto.Changeset.foreign_key_constraint(changeset, destination_field,
name: "#{AshPostgres.table(source)}_#{source_field}_fkey",
message: "would leave records behind"
)
end)
end
defp add_my_foreign_key_constraints(changeset, resource) do
resource
|> Ash.Resource.Info.relationships()
|> Enum.reduce(changeset, &Ecto.Changeset.foreign_key_constraint(&2, &1.source_field))
end
defp add_configured_foreign_key_constraints(changeset, resource) do
resource
|> AshPostgres.foreign_key_names()
|> case do
{m, f, a} -> List.wrap(apply(m, f, [changeset | a]))
value -> List.wrap(value)
end
|> Enum.reduce(changeset, fn
{key, name}, changeset ->
Ecto.Changeset.foreign_key_constraint(changeset, key, name: name)
{key, name, message}, changeset ->
Ecto.Changeset.foreign_key_constraint(changeset, key, name: name, message: message)
end)
end
defp add_unique_indexes(changeset, resource, tenant, ash_changeset) do
changeset =
resource
|> Ash.Resource.Info.identities()
|> Enum.reduce(changeset, fn identity, changeset ->
name =
if tenant do
"#{tenant}_#{table(resource, ash_changeset)}_#{identity.name}_unique_index"
else
"#{table(resource, ash_changeset)}_#{identity.name}_unique_index"
end
opts =
if Map.get(identity, :message) do
[name: name, message: identity.message]
else
[name: name]
end
Ecto.Changeset.unique_constraint(changeset, identity.keys, opts)
end)
names =
resource
|> AshPostgres.unique_index_names()
|> case do
{m, f, a} -> List.wrap(apply(m, f, [changeset | a]))
value -> List.wrap(value)
end
names = [
{Ash.Resource.Info.primary_key(resource), table(resource, ash_changeset) <> "_pkey"} | names
]
Enum.reduce(names, changeset, fn
{keys, name}, changeset ->
Ecto.Changeset.unique_constraint(changeset, List.wrap(keys), name: name)
{keys, name, message}, changeset ->
Ecto.Changeset.unique_constraint(changeset, List.wrap(keys), name: name, message: message)
end)
end
@impl true
def upsert(resource, changeset) do
repo_opts =
changeset
|> repo_opts()
|> Keyword.put(:on_conflict, {:replace, Map.keys(changeset.attributes)})
|> Keyword.put(:conflict_target, Ash.Resource.Info.primary_key(resource))
if AshPostgres.manage_tenant_update?(resource) do
{:error, "Cannot currently upsert a resource that owns a tenant"}
else
changeset.data
|> Map.update!(:__meta__, &Map.put(&1, :source, table(resource, changeset)))
|> ecto_changeset(changeset, :upsert)
|> repo(resource).insert(repo_opts)
|> handle_errors()
end
end
@impl true
def update(resource, changeset) do
changeset.data
|> Map.update!(:__meta__, &Map.put(&1, :source, table(resource, changeset)))
|> ecto_changeset(changeset, :update)
|> repo(resource).update(repo_opts(changeset))
|> handle_errors()
|> case do
{:ok, result} ->
maybe_update_tenant(resource, changeset, result)
{:ok, result}
{:error, error} ->
{:error, error}
end
end
@impl true
def destroy(resource, %{data: record} = changeset) do
record
|> ecto_changeset(changeset, :delete)
|> repo(resource).delete(repo_opts(changeset))
|> case do
{:ok, _record} ->
:ok
{:error, error} ->
handle_errors({:error, error})
end
end
@impl true
def sort(query, sort, resource) do
query = default_bindings(query, resource)
sort
|> sanitize_sort()
|> Enum.reduce({:ok, query}, fn {order, sort}, {:ok, query} ->
binding =
case Map.fetch(query.__ash_bindings__.aggregates, sort) do
{:ok, binding} ->
binding
:error ->
0
end
new_query =
Map.update!(query, :order_bys, fn order_bys ->
order_bys = order_bys || []
sort_expr = %Ecto.Query.QueryExpr{
expr: [
{order, {{:., [], [{:&, [], [binding]}, sort]}, [], []}}
]
}
order_bys ++ [sort_expr]
end)
{:ok, new_query}
end)
end
@impl true
def distinct(query, distinct_on, resource) do
query = default_bindings(query, resource)
query =
query
|> default_bindings(resource)
|> Map.update!(:distinct, fn distinct ->
distinct =
distinct ||
%Ecto.Query.QueryExpr{
expr: []
}
expr =
Enum.map(distinct_on, fn distinct_on_field ->
binding =
case Map.fetch(query.__ash_bindings__.aggregates, distinct_on_field) do
{:ok, binding} ->
binding
:error ->
0
end
{:asc, {{:., [], [{:&, [], [binding]}, distinct_on_field]}, [], []}}
end)
%{distinct | expr: distinct.expr ++ expr}
end)
{:ok, query}
end
defp sanitize_sort(sort) do
sort
|> List.wrap()
|> Enum.map(fn
{sort, :asc_nils_last} -> {:asc_nulls_last, sort}
{sort, :asc_nils_first} -> {:asc_nulls_first, sort}
{sort, :desc_nils_last} -> {:desc_nulls_last, sort}
{sort, :desc_nils_first} -> {:desc_nulls_first, sort}
{sort, order} -> {order, sort}
sort -> sort
end)
end
@impl true
def filter(query, %{expression: false}, _resource) do
impossible_query = from(row in query, where: false)
{:ok, Map.put(impossible_query, :__impossible__, true)}
end
def filter(query, filter, _resource) do
relationship_paths =
filter
|> Filter.relationship_paths()
|> Enum.map(fn path ->
if can_inner_join?(path, filter) do
{:inner, relationship_path_to_relationships(filter.resource, path)}
else
{:left, relationship_path_to_relationships(filter.resource, path)}
end
end)
new_query =
query
|> join_all_relationships(relationship_paths)
|> add_filter_expression(filter)
{:ok, new_query}
end
defp default_bindings(query, resource) do
Map.put_new(query, :__ash_bindings__, %{
current: Enum.count(query.joins) + 1,
aggregates: %{},
bindings: %{0 => %{path: [], type: :root, source: resource}}
})
end
@known_inner_join_operators [
Eq,
GreaterThan,
GreaterThanOrEqual,
In,
LessThanOrEqual,
LessThan,
NotEq
]
|> Enum.map(&Module.concat(Ash.Query.Operator, &1))
@known_inner_join_functions [
Ago,
Contains
]
|> Enum.map(&Module.concat(Ash.Query.Function, &1))
@known_inner_join_predicates @known_inner_join_functions ++ @known_inner_join_operators
# For consistency's sake, this logic was removed.
# We can revisit it sometime though.
defp can_inner_join?(path, expr, seen_an_or? \\ false)
defp can_inner_join?(path, %{expression: expr}, seen_an_or?),
do: can_inner_join?(path, expr, seen_an_or?)
defp can_inner_join?(_path, expr, _seen_an_or?) when expr in [nil, true, false], do: true
defp can_inner_join?(path, %BooleanExpression{op: :and, left: left, right: right}, seen_an_or?) do
can_inner_join?(path, left, seen_an_or?) || can_inner_join?(path, right, seen_an_or?)
end
defp can_inner_join?(path, %BooleanExpression{op: :or, left: left, right: right}, _) do
can_inner_join?(path, left, true) && can_inner_join?(path, right, true)
end
defp can_inner_join?(
_,
%Not{},
_
) do
false
end
defp can_inner_join?(
search_path,
%struct{__operator__?: true, left: %Ref{relationship_path: relationship_path}},
seen_an_or?
)
when search_path == relationship_path and struct in @known_inner_join_predicates do
not seen_an_or?
end
defp can_inner_join?(
search_path,
%struct{__operator__?: true, right: %Ref{relationship_path: relationship_path}},
seen_an_or?
)
when search_path == relationship_path and struct in @known_inner_join_predicates do
not seen_an_or?
end
defp can_inner_join?(
search_path,
%struct{__function__?: true, arguments: arguments},
seen_an_or?
)
when struct in @known_inner_join_predicates do
if Enum.any?(arguments, &match?(%Ref{relationship_path: ^search_path}, &1)) do
not seen_an_or?
else
true
end
end
defp can_inner_join?(_, _, _), do: false
@impl true
def add_aggregate(query, aggregate, _resource) do
resource = aggregate.resource
query = default_bindings(query, resource)
{query, binding} =
case get_binding(resource, aggregate.relationship_path, query, :aggregate) do
nil ->
relationship = Ash.Resource.Info.relationship(resource, aggregate.relationship_path)
subquery = aggregate_subquery(relationship, aggregate)
new_query =
join_all_relationships(
query,
[
{{:aggregate, aggregate.name, subquery},
relationship_path_to_relationships(resource, aggregate.relationship_path)}
]
)
{new_query, get_binding(resource, aggregate.relationship_path, new_query, :aggregate)}
binding ->
{query, binding}
end
query_with_aggregate_binding =
put_in(
query.__ash_bindings__.aggregates,
Map.put(query.__ash_bindings__.aggregates, aggregate.name, binding)
)
new_query =
query_with_aggregate_binding
|> add_aggregate_to_subquery(resource, aggregate, binding)
|> select_aggregate(resource, aggregate)
{:ok, new_query}
end
defp select_aggregate(query, resource, aggregate) do
binding = get_binding(resource, aggregate.relationship_path, query, :aggregate)
query =
if query.select do
query
else
from(row in query,
select: row,
select_merge: %{aggregates: %{}}
)
end
%{query | select: add_to_select(query.select, binding, aggregate)}
end
defp add_to_select(
%{expr: {:merge, _, [first, {:%{}, _, [{:aggregates, {:%{}, [], fields}}]}]}} = select,
binding,
%{load: nil} = aggregate
) do
accessed =
if aggregate.kind == :first do
{:fragment, [],
[
expr: {{:., [], [{:&, [], [binding]}, aggregate.name]}, [], []},
raw: "[1]"
]}
else
{{:., [], [{:&, [], [binding]}, aggregate.name]}, [], []}
end
field =
{:type, [],
[
accessed,
Ash.Type.ecto_type(aggregate.type)
]}
field_with_default =
if is_nil(aggregate.default_value) do
field
else
{:coalesce, [],
[
field,
aggregate.default_value
]}
end
new_fields = [
{aggregate.name, field_with_default}
| fields
]
%{select | expr: {:merge, [], [first, {:%{}, [], [{:aggregates, {:%{}, [], new_fields}}]}]}}
end
defp add_to_select(
%{expr: expr} = select,
binding,
%{load: load_as} = aggregate
) do
accessed =
if aggregate.kind == :first do
{:fragment, [],
[
raw: "",
expr: {{:., [], [{:&, [], [binding]}, aggregate.name]}, [], []},
raw: "[1]"
]}
else
{{:., [], [{:&, [], [binding]}, aggregate.name]}, [], []}
end
field =
{:type, [],
[
accessed,
Ash.Type.ecto_type(aggregate.type)
]}
field_with_default =
if is_nil(aggregate.default_value) do
field
else
{:coalesce, [],
[
field,
aggregate.default_value
]}
end
%{select | expr: {:merge, [], [expr, {:%{}, [], [{load_as, field_with_default}]}]}}
end
defp add_aggregate_to_subquery(query, resource, aggregate, binding) do
new_joins =
List.update_at(query.joins, binding - 1, fn join ->
aggregate_query =
if aggregate.authorization_filter do
{:ok, filter} =
filter(
join.source.from.source.query,
aggregate.authorization_filter,
Ash.Resource.Info.related(resource, aggregate.relationship_path)
)
filter
else
join.source.from.source.query
end
new_aggregate_query = add_subquery_aggregate_select(aggregate_query, aggregate, resource)
put_in(join.source.from.source.query, new_aggregate_query)
end)
%{
query
| joins: new_joins
}
end
defp aggregate_subquery(relationship, aggregate) do
query =
from(row in relationship.destination,
group_by: ^relationship.destination_field,
select: field(row, ^relationship.destination_field)
)
if aggregate.query && aggregate.query.tenant do
Ecto.Query.put_query_prefix(query, aggregate.query.tenant)
else
query
end
end
defp order_to_postgres_order(dir) do
case dir do
:asc -> nil
:asc_nils_last -> " ASC NULLS LAST"
:asc_nils_first -> " ASC NULLS FIRST"
:desc -> " DESC"
:desc_nils_last -> " DESC NULLS LAST"
:desc_nils_first -> " DESC NULLS FIRST"
end
end
defp add_subquery_aggregate_select(query, %{kind: :first} = aggregate, _resource) do
query = default_bindings(query, aggregate.resource)
key = aggregate.field
type = Ash.Type.ecto_type(aggregate.type)
field =
if aggregate.query && aggregate.query.sort && aggregate.query.sort != [] do
sort_expr =
aggregate.query.sort
|> Enum.map(fn {sort, order} ->
case order_to_postgres_order(order) do
nil ->
[expr: {{:., [], [{:&, [], [0]}, sort]}, [], []}]
order ->
[expr: {{:., [], [{:&, [], [0]}, sort]}, [], []}, raw: order]
end
end)
|> Enum.intersperse(raw: ", ")
|> List.flatten()
{:fragment, [],
[
raw: "array_agg(",
expr: {{:., [], [{:&, [], [0]}, key]}, [], []},
raw: "ORDER BY "
] ++
sort_expr ++ [raw: ")"]}
else
{:fragment, [],
[
raw: "array_agg(",
expr: {{:., [], [{:&, [], [0]}, key]}, [], []},
raw: ")"
]}
end
{params, filtered} =
if aggregate.query && aggregate.query.filter &&
not match?(%Ash.Filter{expression: nil}, aggregate.query.filter) do
{params, expr} =
filter_to_expr(
aggregate.query.filter,
query.__ash_bindings__.bindings,
query.select.params
)
{params, {:filter, [], [field, expr]}}
else
{[], field}
end
cast = {:type, [], [filtered, {:array, type}]}
new_expr = {:merge, [], [query.select.expr, {:%{}, [], [{aggregate.name, cast}]}]}
%{query | select: %{query.select | expr: new_expr, params: params}}
end
defp add_subquery_aggregate_select(query, %{kind: kind} = aggregate, resource)
when kind in [:count, :sum] do
query = default_bindings(query, aggregate.resource)
key = aggregate.field || List.first(Ash.Resource.Info.primary_key(resource))
type = Ash.Type.ecto_type(aggregate.type)
field = {kind, [], [{{:., [], [{:&, [], [0]}, key]}, [], []}]}
{params, filtered} =
if aggregate.query && aggregate.query.filter &&
not match?(%Ash.Filter{expression: nil}, aggregate.query.filter) do
{params, expr} =
filter_to_expr(
aggregate.query.filter,
query.__ash_bindings__.bindings,
query.select.params
)
{params, {:filter, [], [field, expr]}}
else
{[], field}
end
cast = {:type, [], [filtered, type]}
new_expr = {:merge, [], [query.select.expr, {:%{}, [], [{aggregate.name, cast}]}]}
%{query | select: %{query.select | expr: new_expr, params: params}}
end
defp relationship_path_to_relationships(resource, path, acc \\ [])
defp relationship_path_to_relationships(_resource, [], acc), do: Enum.reverse(acc)
defp relationship_path_to_relationships(resource, [relationship | rest], acc) do
relationship = Ash.Resource.Info.relationship(resource, relationship)
relationship_path_to_relationships(relationship.destination, rest, [relationship | acc])
end
defp join_all_relationships(query, relationship_paths, path \\ [], source \\ nil) do
query = default_bindings(query, source)
Enum.reduce(relationship_paths, query, fn
{_join_type, []}, query ->
query
{join_type, [relationship | rest_rels]}, query ->
source = source || relationship.source
current_path = path ++ [relationship]
current_join_type =
case join_type do
{:aggregate, _name, _agg} when rest_rels != [] ->
:left
other ->
other
end
if has_binding?(source, Enum.reverse(current_path), query, current_join_type) do
query
else
joined_query =
join_relationship(
query,
relationship,
Enum.map(path, & &1.name),
current_join_type,
source
)
joined_query_with_distinct = add_distinct(relationship, join_type, joined_query)
join_all_relationships(
joined_query_with_distinct,
[{join_type, rest_rels}],
current_path,
source
)
end
end)
end
defp has_binding?(resource, path, query, {:aggregate, _, _}),
do: has_binding?(resource, path, query, :aggregate)
defp has_binding?(resource, candidate_path, %{__ash_bindings__: _} = query, type) do
Enum.any?(query.__ash_bindings__.bindings, fn
{_, %{path: path, source: source, type: ^type}} ->
Ash.SatSolver.synonymous_relationship_paths?(resource, path, candidate_path, source)
_ ->
false
end)
end
defp has_binding?(_, _, _, _), do: false
defp get_binding(resource, path, %{__ash_bindings__: _} = query, type) do
paths =
Enum.flat_map(query.__ash_bindings__.bindings, fn
{binding, %{path: path, type: ^type}} ->
[{binding, path}]
_ ->
[]
end)
Enum.find_value(paths, fn {binding, candidate_path} ->
Ash.SatSolver.synonymous_relationship_paths?(resource, candidate_path, path) && binding
end)
end
defp get_binding(_, _, _, _), do: nil
defp add_distinct(relationship, join_type, joined_query) do
if relationship.cardinality == :many and join_type == :left && !joined_query.distinct do
from(row in joined_query,
distinct: ^Ash.Resource.Info.primary_key(relationship.destination)
)
else
joined_query
end
end
defp join_relationship(query, relationship, path, join_type, source) do
case Map.get(query.__ash_bindings__.bindings, path) do
%{type: existing_join_type} when join_type != existing_join_type ->
raise "unreachable?"
nil ->
do_join_relationship(query, relationship, path, join_type, source)
_ ->
query
end
end
defp do_join_relationship(query, %{type: :many_to_many} = relationship, path, kind, source) do
relationship_through = maybe_get_resource_query(relationship.through)
relationship_destination =
Ecto.Queryable.to_query(maybe_get_resource_query(relationship.destination))
current_binding =
Enum.find_value(query.__ash_bindings__.bindings, 0, fn {binding, data} ->
if data.type == kind && data.path == Enum.reverse(path) do
binding
end
end)
new_query =
case kind do
{:aggregate, _, subquery} ->
subquery =
subquery(
from(destination in subquery,
where:
field(destination, ^relationship.destination_field) ==
field(
parent_as(:rel_through),
^relationship.destination_field_on_join_table
)
)
)
from([{row, current_binding}] in query,
left_join: through in ^relationship_through,
as: :rel_through,
on:
field(row, ^relationship.source_field) ==
field(through, ^relationship.source_field_on_join_table),
left_lateral_join: destination in ^subquery,
on:
field(destination, ^relationship.destination_field) ==
field(through, ^relationship.destination_field_on_join_table)
)
:inner ->
from([{row, current_binding}] in query,
join: through in ^relationship_through,
on:
field(row, ^relationship.source_field) ==
field(through, ^relationship.source_field_on_join_table),
join: destination in ^relationship_destination,
on:
field(destination, ^relationship.destination_field) ==
field(through, ^relationship.destination_field_on_join_table)
)
_ ->
from([{row, current_binding}] in query,
left_join: through in ^relationship_through,
on:
field(row, ^relationship.source_field) ==
field(through, ^relationship.source_field_on_join_table),
left_join: destination in ^relationship_destination,
on:
field(destination, ^relationship.destination_field) ==
field(through, ^relationship.destination_field_on_join_table)
)
end
join_path =
Enum.reverse([String.to_existing_atom(to_string(relationship.name) <> "_join_assoc") | path])
full_path = Enum.reverse([relationship.name | path])
binding_data =
case kind do
{:aggregate, name, _agg} ->
%{type: :aggregate, name: name, path: full_path, source: source}
_ ->
%{type: kind, path: full_path, source: source}
end
new_query
|> add_binding(%{path: join_path, type: :left, source: source})
|> add_binding(binding_data)
end
defp do_join_relationship(query, relationship, path, kind, source) do
relationship_destination =
Ecto.Queryable.to_query(maybe_get_resource_query(relationship.destination))
current_binding =
Enum.find_value(query.__ash_bindings__.bindings, 0, fn {binding, data} ->
if data.type == kind && data.path == Enum.reverse(path) do
binding
end
end)
new_query =
case kind do
{:aggregate, _, subquery} ->
subquery =
from(
sub in subquery(
from(destination in subquery,
where:
field(destination, ^relationship.destination_field) ==
field(parent_as(:rel_source), ^relationship.source_field)
)
),
select: field(sub, ^relationship.destination_field)
)
from([{row, current_binding}] in query,
as: :rel_source,
left_lateral_join: destination in ^subquery,
on:
field(row, ^relationship.source_field) ==
field(destination, ^relationship.destination_field)
)
:inner ->
from([{row, current_binding}] in query,
join: destination in ^relationship_destination,
on:
field(row, ^relationship.source_field) ==
field(destination, ^relationship.destination_field)
)
_ ->
from([{row, current_binding}] in query,
left_join: destination in ^relationship_destination,
on:
field(row, ^relationship.source_field) ==
field(destination, ^relationship.destination_field)
)
end
full_path = Enum.reverse([relationship.name | path])
binding_data =
case kind do
{:aggregate, name, _agg} ->
%{type: :aggregate, name: name, path: full_path, source: source}
_ ->
%{type: kind, path: full_path, source: source}
end
new_query
|> add_binding(binding_data)
end
defp add_filter_expression(query, filter) do
wheres =
filter
|> split_and_statements()
|> Enum.map(fn filter ->
{params, expr} = filter_to_expr(filter, query.__ash_bindings__.bindings, [])
%Ecto.Query.BooleanExpr{
expr: expr,
op: :and,
params: params
}
end)
%{query | wheres: query.wheres ++ wheres}
end
defp split_and_statements(%Filter{expression: expression}) do
split_and_statements(expression)
end
defp split_and_statements(%BooleanExpression{op: :and, left: left, right: right}) do
split_and_statements(left) ++ split_and_statements(right)
end
defp split_and_statements(%Not{expression: %Not{expression: expression}}) do
split_and_statements(expression)
end
defp split_and_statements(%Not{
expression: %BooleanExpression{op: :or, left: left, right: right}
}) do
split_and_statements(%BooleanExpression{
op: :and,
left: %Not{expression: left},
right: %Not{expression: right}
})
end
defp split_and_statements(other), do: [other]
defp filter_to_expr(expr, bindings, params, embedded? \\ false, type \\ nil)
defp filter_to_expr(%Filter{expression: expression}, bindings, params, embedded?, type) do
filter_to_expr(expression, bindings, params, embedded?, type)
end
# A nil filter means "everything"
defp filter_to_expr(nil, _, _, _, _), do: {[], true}
# A true filter means "everything"
defp filter_to_expr(true, _, _, _, _), do: {[], true}
# A false filter means "nothing"
defp filter_to_expr(false, _, _, _, _), do: {[], false}
defp filter_to_expr(expression, bindings, params, embedded?, type) do
do_filter_to_expr(expression, bindings, params, embedded?, type)
end
defp do_filter_to_expr(expr, bindings, params, embedded?, type \\ nil)
defp do_filter_to_expr(
%BooleanExpression{op: op, left: left, right: right},
bindings,
params,
embedded?,
_type
) do
{params, left_expr} = do_filter_to_expr(left, bindings, params, embedded?)
{params, right_expr} = do_filter_to_expr(right, bindings, params, embedded?)
{params, {op, [], [left_expr, right_expr]}}
end
defp do_filter_to_expr(%Not{expression: expression}, bindings, params, embedded?, _type) do
{params, new_expression} = do_filter_to_expr(expression, bindings, params, embedded?)
{params, {:not, [], [new_expression]}}
end
defp do_filter_to_expr(
%TrigramSimilarity{arguments: [arg1, arg2], embedded?: pred_embedded?},
bindings,
params,
embedded?,
_type
) do
{params, arg1} = do_filter_to_expr(arg1, bindings, params, pred_embedded? || embedded?)
{params, arg2} = do_filter_to_expr(arg2, bindings, params, pred_embedded? || embedded?)
{params, {:fragment, [], [raw: "similarity(", expr: arg1, raw: ", ", expr: arg2, raw: ")"]}}
end
defp do_filter_to_expr(
%Type{arguments: [arg1, arg2], embedded?: pred_embedded?},
bindings,
params,
embedded?,
_type
)
when pred_embedded? or embedded? do
{params, arg1} = do_filter_to_expr(arg1, bindings, params, true)
{params, arg2} = do_filter_to_expr(arg2, bindings, params, true)
case maybe_ecto_type(arg2) do
nil ->
{params, {:type, [], [arg1, arg2]}}
type ->
case arg1 do
%{__predicate__?: _} ->
{params, {:type, [], [arg1, arg2]}}
value ->
{params, %Ecto.Query.Tagged{value: value, type: type}}
end
end
end
defp do_filter_to_expr(
%Type{arguments: [arg1, arg2], embedded?: pred_embedded?},
bindings,
params,
embedded?,
_type
) do
{params, arg1} = do_filter_to_expr(arg1, bindings, params, pred_embedded? || embedded?)
{params, arg2} = do_filter_to_expr(arg2, bindings, params, pred_embedded? || embedded?)
arg2 = maybe_ecto_type(arg2)
{params, {:type, [], [arg1, arg2]}}
end
defp do_filter_to_expr(
%Fragment{arguments: arguments, embedded?: pred_embedded?},
bindings,
params,
embedded?,
_type
) do
{params, fragment_data} =
Enum.reduce(arguments, {params, []}, fn
{:raw, str}, {params, fragment_data} ->
{params, fragment_data ++ [{:raw, str}]}
{:expr, expr}, {params, fragment_data} ->
{params, expr} = do_filter_to_expr(expr, bindings, params, pred_embedded? || embedded?)
{params, fragment_data ++ [{:expr, expr}]}
end)
{params, {:fragment, [], fragment_data}}
end
defp do_filter_to_expr(
%IsNil{left: left, right: right, embedded?: pred_embedded?},
bindings,
params,
embedded?,
_type
) do
{params, left_expr} = do_filter_to_expr(left, bindings, params, pred_embedded? || embedded?)
{params, right_expr} = do_filter_to_expr(right, bindings, params, pred_embedded? || embedded?)
{params,
{:==, [],
[
{:is_nil, [], [left_expr]},
right_expr
]}}
end
defp do_filter_to_expr(
%Ago{arguments: [left, right], embedded?: _pred_embedded?},
_bindings,
params,
_embedded?,
_type
)
when is_integer(left) and (is_binary(right) or is_atom(right)) do
{params ++ [{DateTime.utc_now(), {:param, :any_datetime}}],
{:datetime_add, [], [{:^, [], [Enum.count(params)]}, left * -1, to_string(right)]}}
end
defp do_filter_to_expr(
%Contains{arguments: [left, %Ash.CiString{} = right], embedded?: pred_embedded?},
bindings,
params,
embedded?,
type
) do
do_filter_to_expr(
%Fragment{
embedded?: pred_embedded?,
arguments: [
raw: "strpos(",
expr: left,
raw: "::citext, ",
expr: right,
raw: ") > 0"
]
},
bindings,
params,
embedded?,
type
)
end
defp do_filter_to_expr(
%Contains{arguments: [left, right], embedded?: pred_embedded?},
bindings,
params,
embedded?,
type
) do
do_filter_to_expr(
%Fragment{
embedded?: pred_embedded?,
arguments: [
raw: "strpos(",
expr: left,
raw: ", ",
expr: right,
raw: ") > 0"
]
},
bindings,
params,
embedded?,
type
)
end
defp do_filter_to_expr(
%mod{
__predicate__?: _,
left: left,
right: right,
embedded?: pred_embedded?,
operator: op
},
bindings,
params,
embedded?,
_type
) do
{left_type, right_type} =
case determine_type(mod, left) do
nil ->
case determine_type(mod, right, true) do
nil ->
{nil, nil}
left_type ->
{left_type, nil}
end
right_type ->
if vague?(right_type) do
case determine_type(mod, right, true) do
nil ->
{nil, right_type}
left_type ->
{left_type, nil}
end
else
{nil, right_type}
end
end
{params, left_expr} =
do_filter_to_expr(left, bindings, params, pred_embedded? || embedded?, left_type)
{params, right_expr} =
do_filter_to_expr(right, bindings, params, pred_embedded? || embedded?, right_type)
{params,
{op, [],
[
left_expr,
right_expr
]}}
end
defp do_filter_to_expr(
%Ref{attribute: %{name: name}} = ref,
bindings,
params,
_embedded?,
_type
) do
{params, {{:., [], [{:&, [], [ref_binding(ref, bindings)]}, name]}, [], []}}
end
defp do_filter_to_expr({:embed, other}, _bindings, params, _true, _type) do
{params, other}
end
defp do_filter_to_expr(%Ash.CiString{string: string}, bindings, params, embedded?, type) do
do_filter_to_expr(
%Fragment{
embedded?: embedded?,
arguments: [
raw: "",
expr: string,
raw: "::citext"
]
},
bindings,
params,
embedded?,
type
)
end
defp do_filter_to_expr(%MapSet{} = mapset, bindings, params, embedded?, type) do
do_filter_to_expr(Enum.to_list(mapset), bindings, params, embedded?, type)
end
defp do_filter_to_expr(other, _bindings, params, true, _type) do
{params, other}
end
defp do_filter_to_expr(value, _bindings, params, false, type) do
type = type || :any
value = last_ditch_cast(value, type)
{params ++ [{value, type}], {:^, [], [Enum.count(params)]}}
end
defp maybe_ecto_type({:array, type}), do: {:array, maybe_ecto_type(type)}
defp maybe_ecto_type(type) when is_atom(type) do
if Ash.Type.ash_type?(type) do
Ash.Type.ecto_type(type)
end
end
defp maybe_ecto_type(_type), do: nil
defp last_ditch_cast(value, :string) when is_atom(value) do
to_string(value)
end
defp last_ditch_cast(value, _type) do
value
end
defp determine_type(mod, ref, flip? \\ false)
defp determine_type(mod, %Ref{attribute: %{type: type}}, flip?) do
Enum.find_value(mod.types(), fn types ->
types =
case types do
:same ->
[type]
:any ->
[]
other when is_list(other) ->
other =
if flip? do
Enum.reverse(other)
else
other
end
Enum.map(other, fn
{:array, :any} ->
{:in, :any}
{:array, :same} ->
{:in, type}
{:array, type} ->
{:in, type}
type ->
type
end)
other ->
[other]
end
types
|> Enum.sort_by(&vague?/1)
|> Enum.at(0)
|> case do
nil ->
nil
{:in, :any} ->
{:in, :any}
{:in, type} ->
if Ash.Type.ash_type?(type) do
{:in, Ash.Type.storage_type(type)}
else
{:in, type}
end
type ->
if Ash.Type.ash_type?(type) do
Ash.Type.storage_type(type)
else
type
end
end
end)
end
defp determine_type(_mod, _, _), do: nil
defp vague?({:in, :any}), do: true
defp vague?(:any), do: true
defp vague?(_), do: false
defp ref_binding(ref, bindings) do
case ref.attribute do
%Ash.Resource.Attribute{} ->
Enum.find_value(bindings, fn {binding, data} ->
data.path == ref.relationship_path && data.type in [:inner, :left, :root] && binding
end)
%Ash.Query.Aggregate{} = aggregate ->
Enum.find_value(bindings, fn {binding, data} ->
data.path == aggregate.relationship_path && data.type == :aggregate && binding
end)
end
end
defp add_binding(query, data) do
current = query.__ash_bindings__.current
bindings = query.__ash_bindings__.bindings
new_ash_bindings = %{
query.__ash_bindings__
| bindings: Map.put(bindings, current, data),
current: current + 1
}
%{query | __ash_bindings__: new_ash_bindings}
end
@impl true
def transaction(resource, func) do
repo(resource).transaction(func)
end
@impl true
def rollback(resource, term) do
repo(resource).rollback(term)
end
defp maybe_get_resource_query(resource) do
case Ash.Query.data_layer_query(Ash.Query.new(resource), only_validate_filter?: false) do
{:ok, query} -> query
{:error, error} -> {:error, error}
end
end
defp table(resource, changeset) do
changeset.context[:data_layer][:table] || AshPostgres.table(resource)
end
defp raise_table_error!(resource, operation) do
if AshPostgres.polymorphic?(resource) do
raise """
Could not determine table for #{operation} on #{inspect(resource)}.
Polymorphic resources require that the `data_layer[:table]` context is provided.
See the guide on polymorphic resources for more information.
"""
else
raise """
Could not determine table for #{operation} on #{inspect(resource)}.
"""
end
end
end
| 28.668386 | 168 | 0.590073 |
7900a025b812c62951db168201f18a530d124429 | 1,535 | ex | Elixir | apps/rex_web/lib/rex_web/handlers/node_handler.ex | baymax42/rex | 7c8571ac308960973fea9e8df77a6f1ad5c16906 | [
"MIT"
] | null | null | null | apps/rex_web/lib/rex_web/handlers/node_handler.ex | baymax42/rex | 7c8571ac308960973fea9e8df77a6f1ad5c16906 | [
"MIT"
] | 16 | 2020-05-18T20:06:29.000Z | 2020-06-08T14:32:11.000Z | apps/rex_web/lib/rex_web/handlers/node_handler.ex | baymax42/rex | 7c8571ac308960973fea9e8df77a6f1ad5c16906 | [
"MIT"
] | null | null | null | require Logger
defmodule RexWeb.NodeHandler do
@moduledoc """
Module containing logic for node specific operations.
*handle_join*
This method is used to handle node joining the rendering.
"""
alias RexData.{Worker, Utils}
alias RexData.Worker.Node
@spec handle_join(Node.t()) :: {:ok, map} | {:error, map}
def handle_join(%{"node_id" => node_id}) when node_id != "" do
Logger.info("Trying to join with id: #{node_id}")
case Worker.get_node?(node_id) do
nil ->
{:error,
%{
reason: "not_found",
details: "Node ID does not exist"
}}
_ ->
{:ok, %{node_id: node_id}}
end
end
def handle_join(payload) do
Logger.info("Node info: #{inspect(payload)}")
node_id = Ecto.UUID.generate()
Logger.info("Generated UUID: #{node_id}")
Map.put(payload, "node_id", node_id)
|> Worker.create_node()
|> case do
{:ok, _} ->
Logger.info("Node #{node_id} has joined")
{:ok, %{node_id: node_id}}
{:error, %{errors: errors}} ->
{:error,
%{
reason: "validation",
details: Utils.format_validation_errors(errors)
}}
end
end
@spec handle_leave!(String.t()) :: map
def handle_leave!(_node_id) do
# if Nodes.node_exists?(node_id) do
# Nodes.get_node(node_id)
# |> Nodes.delete_node()
# Logger.info("node:#{node_id} has been cleaned up")
# end
# Logger.info("node:#{node_id}")
# :ok
end
end
| 24.758065 | 64 | 0.574593 |
7900c34d77046296d0f8dae5e80d3e5bdce14e30 | 104 | exs | Elixir | test/hooks/paginate_test.exs | ramansah/rummage_ecto | 0f24fdccfe504e3c5b8337698446c17fefc60766 | [
"MIT"
] | 1 | 2019-02-11T19:54:24.000Z | 2019-02-11T19:54:24.000Z | test/hooks/paginate_test.exs | ramansah/rummage_ecto | 0f24fdccfe504e3c5b8337698446c17fefc60766 | [
"MIT"
] | null | null | null | test/hooks/paginate_test.exs | ramansah/rummage_ecto | 0f24fdccfe504e3c5b8337698446c17fefc60766 | [
"MIT"
] | 2 | 2019-11-02T21:36:27.000Z | 2021-03-02T15:58:31.000Z | defmodule Rummage.Ecto.Hook.PaginateTest do
use ExUnit.Case
doctest Rummage.Ecto.Hook.Paginate
end
| 17.333333 | 43 | 0.807692 |
7900c9fdd0f459f51014a9790fe166f1e9ac94f0 | 178 | exs | Elixir | priv/repo/migrations/20171029211840_increase_thread_title.exs | making3/summoner-alerts-service | b560d53cb39048049f52dd99d796eab52544da9d | [
"MIT"
] | null | null | null | priv/repo/migrations/20171029211840_increase_thread_title.exs | making3/summoner-alerts-service | b560d53cb39048049f52dd99d796eab52544da9d | [
"MIT"
] | null | null | null | priv/repo/migrations/20171029211840_increase_thread_title.exs | making3/summoner-alerts-service | b560d53cb39048049f52dd99d796eab52544da9d | [
"MIT"
] | null | null | null | defmodule SAS.Repo.Migrations.IncreaseThreadTitle do
use Ecto.Migration
def change do
alter table(:threads) do
modify :title, :string, size: 300
end
end
end
| 17.8 | 52 | 0.707865 |
7900d4c1b2e5aab3fb028b3cbc100397482752d5 | 1,499 | ex | Elixir | apps/socket/test/support/data_case.ex | oestrich/grapevine | 7fb745a3a6e4eb68bd761baa190b2df32fa1f73d | [
"MIT"
] | 107 | 2018-10-05T18:20:32.000Z | 2022-02-28T04:02:50.000Z | apps/socket/test/support/data_case.ex | oestrich/grapevine | 7fb745a3a6e4eb68bd761baa190b2df32fa1f73d | [
"MIT"
] | 33 | 2018-10-05T14:11:18.000Z | 2022-02-10T22:19:18.000Z | apps/socket/test/support/data_case.ex | oestrich/grapevine | 7fb745a3a6e4eb68bd761baa190b2df32fa1f73d | [
"MIT"
] | 18 | 2019-02-03T03:08:20.000Z | 2021-12-28T04:29:36.000Z | defmodule GrapevineSocket.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias GrapevineData.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import GrapevineSocket.DataCase
import GrapevineSocket.TestHelpers
alias GrapevineSocket.TestHelpers
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(GrapevineData.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(GrapevineData.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transform changeset errors to a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Enum.reduce(opts, message, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 26.298246 | 77 | 0.691795 |
7900f5d210c9202076fe9619b8ebaed17760ec97 | 8,259 | ex | Elixir | lib/mix/lib/mix/config.ex | evalphobia/elixir | a07a2362e5827b09d8b27be2c1ad2980d25b9768 | [
"Apache-2.0"
] | 1 | 2017-07-25T21:46:25.000Z | 2017-07-25T21:46:25.000Z | lib/mix/lib/mix/config.ex | evalphobia/elixir | a07a2362e5827b09d8b27be2c1ad2980d25b9768 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/config.ex | evalphobia/elixir | a07a2362e5827b09d8b27be2c1ad2980d25b9768 | [
"Apache-2.0"
] | 1 | 2017-07-25T21:46:48.000Z | 2017-07-25T21:46:48.000Z | defmodule Mix.Config do
@moduledoc ~S"""
Module for defining, reading and merging app configurations.
Most commonly, this module is used to define your own configuration:
use Mix.Config
config :plug,
key1: "value1",
key2: "value2"
import_config "#{Mix.env}.exs"
All `config/*` macros, including `import_config/1`, are used
to help define such configuration files.
Furthermore, this module provides functions like `read!/1`,
`merge/2` and friends which help manipulate configurations
in general.
Configuration set using `Mix.Config` will set the application env, so
that `Application.get_env/3` and other `Application` functions can be used
at run or compile time to retrieve or change the configuration.
For example, the `:key1` value from application `:plug` (see above) can be
retrieved with:
"value1" = Application.fetch_env!(:plug, :key1)
"""
defmodule LoadError do
defexception [:file, :error]
def message(%LoadError{file: file, error: error}) do
"could not load config #{Path.relative_to_cwd(file)}\n " <>
"#{Exception.format_banner(:error, error)}"
end
end
@doc false
defmacro __using__(_) do
quote do
import Mix.Config, only: [config: 2, config: 3, import_config: 1]
{:ok, agent} = Mix.Config.Agent.start_link
var!(config_agent, Mix.Config) = agent
end
end
@doc """
Configures the given application.
Keyword lists are always deep merged.
## Examples
The given `opts` are merged into the existing configuration
for the given `app`. Conflicting keys are overridden by the
ones specified in `opts`. For example, the declaration below:
config :lager,
log_level: :warn,
mode: :truncate
config :lager,
log_level: :info,
threshold: 1024
Will have a final configuration of:
[log_level: :info, mode: :truncate, threshold: 1024]
This final configuration can be retrieved at run or compile time:
Application.get_all_env(:lager)
"""
defmacro config(app, opts) do
quote do
Mix.Config.Agent.merge var!(config_agent, Mix.Config), [{unquote(app), unquote(opts)}]
end
end
@doc """
Configures the given key for the given application.
Keyword lists are always deep merged.
## Examples
The given `opts` are merged into the existing values for `key`
in the given `app`. Conflicting keys are overridden by the
ones specified in `opts`. For example, given the two configurations
below:
config :ecto, Repo,
log_level: :warn,
adapter: Ecto.Adapters.Postgres
config :ecto, Repo,
log_level: :info,
pool_size: 10
the final value of the configuration for the `Repo` key in the `:ecto`
application will be:
[log_level: :info, pool_size: 10, adapter: Ecto.Adapters.Postgres]
This final value can be retrieved at runtime or compile time with:
Application.get_env(:ecto, Repo)
"""
defmacro config(app, key, opts) do
quote do
Mix.Config.Agent.merge var!(config_agent, Mix.Config),
[{unquote(app), [{unquote(key), unquote(opts)}]}]
end
end
@doc ~S"""
Imports configuration from the given file or files.
If `path_or_wildcard` is a wildcard, then all the files
matching that wildcard will be imported; if no file matches
the wildcard, no errors are raised. If `path_or_wildcard` is
not a wildcard but a path to a single file, then that file is
imported; in case the file doesn't exist, an error is raised.
This behaviour is analogous to the one for `read_wildcard!/1`.
If path/wildcard is a relative path/wildcard, it will be expanded relatively
to the directory the current configuration file is in.
## Examples
This is often used to emulate configuration across environments:
import_config "#{Mix.env}.exs"
Or to import files from children in umbrella projects:
import_config "../apps/*/config/config.exs"
"""
defmacro import_config(path_or_wildcard) do
loaded_paths_quote =
unless {:loaded_paths, Mix.Config} in __CALLER__.vars do
quote do
var!(loaded_paths, Mix.Config) = [__ENV__.file]
end
end
quote do
unquote(loaded_paths_quote)
Mix.Config.Agent.merge(
var!(config_agent, Mix.Config),
Mix.Config.read_wildcard!(Path.expand(unquote(path_or_wildcard), __DIR__), var!(loaded_paths, Mix.Config))
)
end
end
@doc """
Reads and validates a configuration file.
`file` is the path to the configuration file to be read. If that file doesn't
exist or if there's an error loading it, a `Mix.Config.LoadError` exception
will be raised.
`loaded_paths` is a list of configuration files that have been previously
read. If `file` exists in `loaded_paths`, a `Mix.Config.LoadError` exception
will be raised.
"""
def read!(file, loaded_paths \\ []) do
try do
if file in loaded_paths do
raise ArgumentError, message: "recursive load of #{file} detected"
end
{config, binding} = Code.eval_string File.read!(file), [{{:loaded_paths, Mix.Config}, [file | loaded_paths]}], [file: file, line: 1]
config = case List.keyfind(binding, {:config_agent, Mix.Config}, 0) do
{_, agent} -> get_config_and_stop_agent(agent)
nil -> config
end
validate!(config)
config
rescue
e in [LoadError] -> reraise(e, System.stacktrace)
e -> reraise(LoadError, [file: file, error: e], System.stacktrace)
end
end
defp get_config_and_stop_agent(agent) do
config = Mix.Config.Agent.get(agent)
Mix.Config.Agent.stop(agent)
config
end
@doc """
Reads many configuration files given by wildcard into a single config.
Raises an error if `path` is a concrete filename (with no wildcards)
but the corresponding file does not exist; if `path` matches no files,
no errors are raised.
`loaded_paths` is a list of configuration files that have been previously
read.
"""
def read_wildcard!(path, loaded_paths \\ []) do
paths = if String.contains?(path, ~w(* ? [ {))do
Path.wildcard(path)
else
[path]
end
Enum.reduce(paths, [], &merge(&2, read!(&1, loaded_paths)))
end
@doc """
Persists the given configuration by modifying
the configured applications environment.
`config` should be a list of `{app, app_config}` tuples or a
`%{app => app_config}` map where `app` are the applications to
be configured and `app_config` are the configuration (as key-value
pairs) for each of those applications.
Returns the configured applications.
## Examples
Mix.Config.persist(logger: [level: :error], my_app: [my_config: 1])
#=> [:logger, :my_app]
"""
def persist(config) do
for {app, kw} <- config do
for {k, v} <- kw do
Application.put_env(app, k, v, persistent: true)
end
app
end
end
@doc """
Validates a configuration.
"""
def validate!(config) do
if is_list(config) do
Enum.all?(config, fn
{app, value} when is_atom(app) ->
if Keyword.keyword?(value) do
true
else
raise ArgumentError,
"expected config for app #{inspect app} to return keyword list, got: #{inspect value}"
end
_ ->
false
end)
else
raise ArgumentError,
"expected config file to return keyword list, got: #{inspect config}"
end
end
@doc """
Merges two configurations.
The configuration of each application is merged together
with the values in the second one having higher preference
than the first in case of conflicts.
## Examples
iex> Mix.Config.merge([app: [k: :v1]], [app: [k: :v2]])
[app: [k: :v2]]
iex> Mix.Config.merge([app1: []], [app2: []])
[app1: [], app2: []]
"""
def merge(config1, config2) do
Keyword.merge(config1, config2, fn _, app1, app2 ->
Keyword.merge(app1, app2, &deep_merge/3)
end)
end
defp deep_merge(_key, value1, value2) do
if Keyword.keyword?(value1) and Keyword.keyword?(value2) do
Keyword.merge(value1, value2, &deep_merge/3)
else
value2
end
end
end
| 27.714765 | 138 | 0.660734 |
79012493a810a1627bd8ced11d716e688670463b | 14,256 | ex | Elixir | lib/tortoise311.ex | ngenic/tortoise311 | 18aa234c26f0e1d7b3dcac5dad562231346a20ef | [
"Apache-2.0"
] | 2 | 2021-12-23T21:51:31.000Z | 2022-02-20T22:29:24.000Z | lib/tortoise311.ex | ngenic/tortoise311 | 18aa234c26f0e1d7b3dcac5dad562231346a20ef | [
"Apache-2.0"
] | 11 | 2021-11-18T01:23:15.000Z | 2022-03-28T18:15:18.000Z | lib/tortoise311.ex | ngenic/tortoise311 | 18aa234c26f0e1d7b3dcac5dad562231346a20ef | [
"Apache-2.0"
] | 3 | 2021-11-19T16:23:38.000Z | 2022-03-18T11:36:54.000Z | defmodule Tortoise311 do
@moduledoc """
A MQTT client for Elixir.
`Tortoise311` provides ways of publishing messages to, and receiving
messages from one or many MQTT brokers via TCP or SSL. The design
philosophy of Tortoise311 is to hide the protocol specific details from
the user, and expose interfaces and a connection life cycle that
should feel natural to Elixir, while not limiting the capability of
what one can do with the MQTT protocol.
First off, connection to a broker happens through a connection
specification. This results in a process that can be supervised,
either by the application the connection should live and die with,
or by being supervised by the Tortoise311 application itself. Once the
connection is established the Tortoise311 application should do its
best to keep that connection open, by automatically sending keep
alive messages (as the protocol specifies), and eventually attempt
to reconnect if the connection should drop.
Secondly, a connection is specified with a user defined callback
module, following the `Tortoise311.Handler`-behaviour, which allow the
user to hook into certain events happening in the life cycle of the
connection. This way code can get executed when:
- The connection is established
- The client has been disconnected from the broker
- A topic filter subscription has been accepted (or declined)
- A topic filter has been successfully unsubscribed
- A message is received on one of the subscribed topic filters
Besides this there are hooks for the usual life-cycle events one
would expect, such as `init/1` and `terminate/2`.
Thirdly, publishing is handled in such a way that the semantics of
the levels of Quality of Service, specified by the MQTT protocol, is
mapped to the Elixir message passing semantics. Tortoise311 expose an
interface for publishing messages that hide the protocol details of
message delivery (retrieval of acknowledge, release, complete
messages) and instead provide `Tortoise311.publish/4` which will
deliver the message to the broker and receive a response in the
process mailbox when a message with a QoS>0 has been handed to the
server. This allow the user to keep track of the messages that has
been delivered, or simply by using the `Tortoise311.publish_sync/4`
form that will block the calling process until the message has been
safely handed to the broker. Messages with QoS1 or QoS2 are stored
in a process until they are delivered, so once they are published
the client should retry delivery to make sure they reach their
destination.
An alternative way of posting messages is implemented in
`Tortoise311.Pipe`, which provide a data structure that among other
things keep a reference to the connection socket. This allow for an
efficient way of posting messages because the data can get shot
directly onto the wire without having to copy the message between
processes (unless the message has a QoS of 1 or 2, in which case
they will end up in a process to ensure they will get
delivered). The pipe will automatically renew its connection socket
if the connection has been dropped, so ideally this message sending
approach should be fast and efficient.
"""
alias Tortoise311.Connection
alias Tortoise311.Connection.Inflight
alias Tortoise311.Package
@typedoc """
An identifier used to identify the client on the server.
Most servers accept a maximum of 23 UTF-8 encode bytes for a client
id, and only the characters:
- "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
Tortoise311 accept atoms as client ids but they it will be converted to
a string before going on the wire. Be careful with atoms such as
`Example` because they are expanded to the atom `:"Elixir.Example"`,
it is really easy to hit the maximum byte limit. Solving this is
easy, just add a `:` before the client id such as `:Example`.
"""
@type client_id() :: atom() | String.t()
@typedoc """
A 16-bit number identifying a message in a message exchange.
Some MQTT packages are part of a message exchange and need an
identifier so the server and client can distinct between multiple
in-flight messages.
Tortoise311 will assign package identifier to packages that need them,
so outside of tests (where it is beneficial to assert on the
identifier of a package) it should be set by tortoise itself; so
just leave it as `nil`.
"""
@type package_identifier() :: 0x0001..0xFFFF | nil
@typedoc """
What Quality of Service (QoS) mode should be used.
Quality of Service is one of 0, 1, and 2 denoting the following:
- `0` no quality of service. The message is a fire and forget.
- `1` at least once delivery. The receiver will respond with an
acknowledge message, so the sender will be certain that the
message has reached the destination. It is possible that a message
will be delivered twice though, as the package identifier for a
publish will be relinquished when the message has been
acknowledged, so a package with the same identifier will be
treated as a new message though it might be a re-transmission.
- `2` exactly once delivery. The receiver will only receive the
message once. This happens by having a more elaborate message
exchange than the QoS=1 variant.
There are a difference in the semantics of assigning a QoS to a
publish and a subscription. When assigned to a publish the message
will get delivered to the server with the requested QoS; that is if
it accept that level of QoS for the given topic.
When used in the context of a subscription it should be read as *the
maximum QoS*. When messages are published to the subscribed topic
the message will get on-warded with the same topic as it was
delivered with, or downgraded to the maximum QoS of the subscription
for the given subscribing client. That is, if the client subscribe
with a maximum QoS=2 and a message is published to said topic with a
QoS=1, the message will get downgraded to QoS=1 when on-warded to
the client.
"""
@type qos() :: 0..2
@typedoc """
A topic for a message.
According to the MQTT 3.1.1 specification a valid topic must be at
least one character long. They are case sensitive and can include
space characters.
MQTT topics consist of topic levels which are delimited with forward
slashes `/`. A topic with a leading or trailing forward slash is
allowed but they create distinct topics from the ones without;
`/sports/tennis/results` are different from
`sports/tennis/results`. While a topic level normally require at
least one character the topic `/` (a single forward slash) is valid.
The server will drop the connection if it receive an invalid topic.
"""
@type topic() :: String.t()
@typedoc """
A topic filter for a subscription.
The topic filter is different from a `topic` because it is allowed
to contain wildcard characters:
- `+` is a single level wildcard which is allowed to stand on any
position in the topic filter. For instance: `sport/+/results` will
match `sport/tennis/results`, `sport/soccer/results`, etc.
- `#` is a multi-level wildcard and is only allowed to be on the
last position of the topic filter. For instance: `sport/#` will
match `sport/tennis/results`, `sport/tennis/announcements`, etc.
The server will reject any invalid topic filter and close the
connection.
"""
@type topic_filter() :: String.t()
@typedoc """
An optional message payload.
A message can optionally have a payload. The payload is a series of
bytes and for MQTT 3.1.1 the payload has no defined structure; any
series of bytes will do, and the client has to make sense of it.
The payload will be `nil` if there is no payload. This is done to
distinct between a zero byte binary and an empty payload.
"""
@type payload() :: binary() | nil
@doc """
Publish a message to the MQTT broker.
The publish function requires a `client_id` and a valid MQTT
topic. If no `payload` is set an empty zero byte message will get
send to the broker.
Optionally an options list can get passed to the publish, making it
possible to specify if the message should be retained on the server,
and with what quality of service the message should be published
with.
* `retain` indicates, when set to `true`, that the broker should
retain the message for the topic. Retained messages are
delivered to clients when they subscribe to the topic. Only one
message at a time can be retained for a given topic, so sending
a new one will overwrite the old. `retain` defaults to `false`.
* `qos` set the quality of service, and integer of 0, 1, or 2. The
`qos` defaults to `0`.
Publishing a message with the payload *hello* to to topic *foo/bar*
with a *QoS1* could look like this:
Tortoise311.publish("client_id", "foo/bar", "hello", qos: 1)
Notice that if you want to send a message with an empty payload with
options you will have to set to payload to nil like this:
Tortoise311.publish("client_id", "foo/bar", nil, retain: true)
## Return Values
The specified Quality of Service for a given publish will alter the
behaviour of the return value. When publishing a message with a QoS0
an `:ok` will simply get returned. This is because a QoS0 is a "fire
and forget." There are no quality of service so no efforts are made
to ensure that the message will reach its destination (though it very
likely will).
:ok = Tortoise311.publish("client_id", "foo/bar", nil, qos: 0)
When a message is published using either a QoS1 or QoS2, Tortoise311
will ensure that the message is delivered. A unique reference will
get returned and eventually a message will get delivered to the
process mailbox, containing the result of the publish when it has
been handed over:
{:ok, ref} = Tortoise311.publish("client_id", "foo/bar", nil, qos: 2)
receive do
{{Tortoise311, "client_id"}, ^ref, result} ->
IO.inspect({:result, result})
after
5000 ->
{:error, :timeout}
end
Be sure to implement a `handle_info/2` in `GenServer` processes that
publish messages using Tortoise311.publish/4. Notice that the returned
message has a structure:
{{Tortoise311, "client_id"}, ^ref, result}
It is possible to send to multiple clients and blanket match on
results designated for a given client id, and the message is tagged
with `Tortoise311` so it is easy to see where the message originated
from.
"""
@spec publish(client_id(), topic(), payload, [options]) ::
:ok | {:ok, reference()} | {:error, :unknown_connection} | {:error, :timeout}
when payload: binary() | nil,
options:
{:qos, qos()}
| {:retain, boolean()}
| {:identifier, package_identifier()}
| {:timeout, non_neg_integer()}
def publish(client_id, topic, payload \\ nil, opts \\ []) do
qos = Keyword.get(opts, :qos, 0)
publish = %Package.Publish{
topic: topic,
qos: qos,
payload: payload,
retain: Keyword.get(opts, :retain, false)
}
timeout = Keyword.get(opts, :timeout, default_timeout())
with {:ok, {transport, socket}} <- Connection.connection(client_id, timeout: timeout) do
case publish do
%Package.Publish{qos: 0} ->
encoded_publish = Package.encode(publish)
apply(transport, :send, [socket, encoded_publish])
%Package.Publish{qos: qos} when qos in [1, 2] ->
Inflight.track(client_id, {:outgoing, publish})
end
else
{:error, :unknown_connection} ->
{:error, :unknown_connection}
{:error, :timeout} ->
{:error, :timeout}
end
end
@doc """
Synchronously send a message to the MQTT broker.
This is very similar to `Tortoise311.publish/4` with the difference
that it will block the calling process until the message has been
handed over to the server; the configuration options are the same
with the addition of the `timeout` option which specifies how long
we are willing to wait for a reply. Per default the timeout is set
to `Tortoise311.default_timeout()`, it is advisable to set it to a reasonable amount in
milliseconds as it otherwise could block forever.
msg = "Hello, from the World of Tomorrow !"
case Tortoise311.publish_sync("my_client_id", "foo/bar", msg, qos: 2, timeout: 200) do
:ok ->
:done
{:error, :timeout} ->
:timeout
end
Notice: It does not make sense to use `publish_sync/4` on a publish
that has a QoS=0, because that will return instantly anyways. It is
made possible for consistency, and it is the default QoS.
See the documentation for `Tortoise311.publish/4` for configuration.
"""
@spec publish_sync(client_id(), topic(), payload, [options]) ::
:ok | {:error, :unknown_connection} | {:error, :timeout}
when payload: binary() | nil,
options:
{:qos, qos()}
| {:retain, boolean()}
| {:identifier, package_identifier()}
| {:timeout, timeout()}
def publish_sync(client_id, topic, payload \\ nil, opts \\ []) do
timeout = Keyword.get(opts, :timeout, default_timeout())
qos = Keyword.get(opts, :qos, 0)
publish = %Package.Publish{
topic: topic,
qos: qos,
payload: payload,
retain: Keyword.get(opts, :retain, false)
}
with {:ok, {transport, socket}} <- Connection.connection(client_id, timeout: timeout) do
case publish do
%Package.Publish{qos: 0} ->
encoded_publish = Package.encode(publish)
apply(transport, :send, [socket, encoded_publish])
%Package.Publish{qos: qos} when qos in [1, 2] ->
Inflight.track_sync(client_id, {:outgoing, publish}, timeout)
end
else
{:error, :unknown_connection} ->
{:error, :unknown_connection}
{:error, :timeout} ->
{:error, :timeout}
end
end
@doc "The default timeout value"
def default_timeout(), do: 60_000
end
| 40.385269 | 92 | 0.708895 |
79013bfb4d65378d5fb68d9fb649278ef5d7e44b | 2,770 | ex | Elixir | lib/system/rabbitmq/publisher.ex | exponentially/extreme_system | c3a63af286ace236726b32cc28b7b5445a9a9a29 | [
"MIT"
] | 9 | 2017-03-16T00:19:09.000Z | 2022-01-28T21:22:21.000Z | lib/system/rabbitmq/publisher.ex | exponentially/extreme_system | c3a63af286ace236726b32cc28b7b5445a9a9a29 | [
"MIT"
] | 16 | 2018-09-05T07:44:17.000Z | 2019-04-29T09:18:09.000Z | lib/system/rabbitmq/publisher.ex | exponentially/extreme_system | c3a63af286ace236726b32cc28b7b5445a9a9a29 | [
"MIT"
] | 7 | 2017-12-24T22:36:25.000Z | 2020-08-26T17:40:45.000Z | defmodule Extreme.System.RabbitMQ.Publisher do
alias Extreme.System.RabbitMQ.ChannelManager
use GenServer
use AMQP
require Logger
## Client API
def start_link(channel_manager, publisher_name, targets, opts) when is_list(targets),
do: GenServer.start_link(__MODULE__, {channel_manager, publisher_name, targets}, opts)
def publish(server, command, metadata \\ [])
def publish(server, {:exchange, name, route, payload}, metadata),
do: GenServer.call(server, {:publish, :exchange, name, route, payload, metadata})
def publish(server, {:queue, name, payload}, metadata),
do: GenServer.call(server, {:publish, :queue, name, payload, metadata})
## Server Callbacks
def init({channel_manager, publisher_name, targets}) do
targets =
targets
|> Enum.map(&declare(channel_manager, publisher_name, &1))
|> Enum.into(%{})
Logger.info(fn -> "Declared push targets: #{targets |> Map.keys() |> Enum.join(", ")}" end)
{:ok, %{targets: targets}}
end
def handle_call({:publish, :exchange, name, route, payload, metadata}, _from, state) do
{:ok, chan} =
case state.targets["exchange:" <> name] do
nil -> {:error, "Exchange #{name} is not registered with this publisher"}
chan -> {:ok, chan}
end
response = _publish(route, payload, metadata, chan, name)
{:reply, response, state}
end
def handle_call({:publish, :queue, name, payload, metadata}, _from, state) do
{:ok, chan} =
case state.targets["queue:" <> name] do
nil -> {:error, "Queue #{name} is not registered with this publisher"}
chan -> {:ok, chan}
end
response = _publish(name, payload, metadata, chan)
{:reply, response, state}
end
defp declare(channel_manager, publisher_name, {:exchange, %{name: name} = definition}) do
chan = ChannelManager.get_channel(channel_manager, publisher_name, "exchange:#{name}")
if definition[:options],
do: :ok = Exchange.declare(chan, name, definition.type, definition.options)
{"exchange:" <> name, chan}
end
defp declare(channel_manager, publisher_name, {:queue, %{name: name} = definition}) do
chan = ChannelManager.get_channel(channel_manager, publisher_name, "queue:#{name}")
if definition[:options],
do: {:ok, %{queue: ^name}} = Queue.declare(chan, name, definition.options)
{"queue:" <> name, chan}
end
defp _publish(route, command, metadata, chan, exchange \\ "") do
metadata = Enum.into(metadata, [])
Logger.metadata(metadata)
Logger.info(fn -> "Publishing to #{exchange} exchange on route #{route}" end)
Logger.debug(fn -> inspect(command) end)
:ok = Basic.publish(chan, exchange, route, command, headers: metadata)
Logger.metadata([])
end
end
| 34.197531 | 95 | 0.66426 |
790148f4e489eac7630be8aaee2023aed3b267b3 | 1,105 | ex | Elixir | lib/mysimplelist_web/live/page_live.ex | ScorpionResponse/mysimplelist | 3c792373cc372ab5e196fe109b9dae68b97b7220 | [
"Apache-2.0"
] | null | null | null | lib/mysimplelist_web/live/page_live.ex | ScorpionResponse/mysimplelist | 3c792373cc372ab5e196fe109b9dae68b97b7220 | [
"Apache-2.0"
] | null | null | null | lib/mysimplelist_web/live/page_live.ex | ScorpionResponse/mysimplelist | 3c792373cc372ab5e196fe109b9dae68b97b7220 | [
"Apache-2.0"
] | null | null | null | defmodule MysimplelistWeb.PageLive do
use MysimplelistWeb, :live_view
@impl true
def mount(_params, _session, socket) do
{:ok, assign(socket, query: "", results: %{})}
end
@impl true
def handle_event("suggest", %{"q" => query}, socket) do
{:noreply, assign(socket, results: search(query), query: query)}
end
@impl true
def handle_event("search", %{"q" => query}, socket) do
case search(query) do
%{^query => vsn} ->
{:noreply, redirect(socket, external: "https://hexdocs.pm/#{query}/#{vsn}")}
_ ->
{:noreply,
socket
|> put_flash(:error, "No dependencies found matching \"#{query}\"")
|> assign(results: %{}, query: query)}
end
end
defp search(query) do
if not MysimplelistWeb.Endpoint.config(:code_reloader) do
raise "action disabled when not in development"
end
for {app, desc, vsn} <- Application.started_applications(),
app = to_string(app),
String.starts_with?(app, query) and not List.starts_with?(desc, ~c"ERTS"),
into: %{},
do: {app, vsn}
end
end
| 27.625 | 84 | 0.60724 |
790155398fc4b47b6dc1be5b6afa9c2727d88807 | 1,757 | ex | Elixir | lib/koans/10_structs.ex | serpient/elixir-koans | d1d9d6a643cc821f9067d987330fbf78ba1d4463 | [
"MIT"
] | null | null | null | lib/koans/10_structs.ex | serpient/elixir-koans | d1d9d6a643cc821f9067d987330fbf78ba1d4463 | [
"MIT"
] | null | null | null | lib/koans/10_structs.ex | serpient/elixir-koans | d1d9d6a643cc821f9067d987330fbf78ba1d4463 | [
"MIT"
] | null | null | null | defmodule Structs do
use Koans
@intro "Structs"
defmodule Person do
defstruct [:name, :age]
end
koan "Structs are defined and named after a module" do
person = %Person{}
assert person == %Person{}
end
koan "Unless previously defined, fields begin as nil" do
nobody = %Person{}
assert nobody.age == nil
end
koan "You can pass initial values to structs" do
joe = %Person{name: "Joe", age: 23}
assert joe.name == "Joe"
end
koan "Update fields with the cons '|' operator" do
joe = %Person{name: "Joe", age: 23}
older = %{joe | age: joe.age + 10}
assert older.age == 33
end
koan "Struct can be treated like maps" do
silvia = %Person{age: 22, name: "Silvia"}
assert Map.fetch(silvia, :age) == {:ok, 22}
end
defmodule Plane do
defstruct passengers: 0, maker: :boeing
end
defmodule Airline do
defstruct plane: %Plane{}, name: "Southwest"
end
koan "Use the put_in macro to replace a nested value" do
airline = %Airline{plane: %Plane{maker: :boeing}}
assert put_in(airline.plane.maker, :airbus) == %Structs.Airline{name: "Southwest", plane: %Structs.Plane{maker: :airbus, passengers: 0}}
end
koan "Use the update_in macro to modify a nested value" do
airline = %Airline{plane: %Plane{maker: :boeing, passengers: 200}}
assert update_in(airline.plane.passengers, fn x -> x + 2 end) == %Structs.Airline{name: "Southwest", plane: %Structs.Plane{maker: :boeing, passengers: 202}}
end
koan "Use the put_in macro with atoms to replace a nested value in a non-struct" do
airline = %{plane: %{maker: :boeing}, name: "Southwest"}
assert put_in(airline[:plane][:maker], :cessna) == %{name: "Southwest", plane: %{maker: :cessna}}
end
end
| 29.283333 | 160 | 0.659078 |
7901c530d8b3bc6275eebe98b75822d723b4bef4 | 82 | ex | Elixir | test/support/web/views/layout_view.ex | danschultzer/coherence_assent | 538e7e4aba3146c9bf4ac7798fea4b8a0ff099d5 | [
"Unlicense",
"MIT"
] | 22 | 2017-09-15T17:52:31.000Z | 2018-10-07T02:36:27.000Z | test/support/web/views/layout_view.ex | danschultzer/coherence_oauth2 | 538e7e4aba3146c9bf4ac7798fea4b8a0ff099d5 | [
"Unlicense",
"MIT"
] | 15 | 2017-11-01T15:39:37.000Z | 2019-03-11T18:02:04.000Z | test/support/web/views/layout_view.ex | danschultzer/coherence_oauth2 | 538e7e4aba3146c9bf4ac7798fea4b8a0ff099d5 | [
"Unlicense",
"MIT"
] | 9 | 2017-09-18T20:48:06.000Z | 2018-12-05T15:24:24.000Z | defmodule CoherenceAssent.LayoutView do
use CoherenceAssent.Test.Web, :view
end
| 20.5 | 39 | 0.829268 |
7901cd8990a506ba48679b20d0f310298d965468 | 521 | ex | Elixir | lib/mix/tasks/confispex.report.ex | prosapient/confispex | 340d3dd8fe9bd5673bb07ccb5da23298c293898c | [
"Apache-2.0"
] | 7 | 2021-05-12T11:03:47.000Z | 2021-12-16T08:26:28.000Z | lib/mix/tasks/confispex.report.ex | prosapient/confispex | 340d3dd8fe9bd5673bb07ccb5da23298c293898c | [
"Apache-2.0"
] | 2 | 2021-11-20T23:09:33.000Z | 2021-12-03T22:09:46.000Z | lib/mix/tasks/confispex.report.ex | prosapient/confispex | 340d3dd8fe9bd5673bb07ccb5da23298c293898c | [
"Apache-2.0"
] | 2 | 2021-07-19T09:40:39.000Z | 2021-11-20T20:09:02.000Z | defmodule Mix.Tasks.Confispex.Report do
use Mix.Task
@shortdoc "Print report to stdout"
@moduledoc """
#{@shortdoc}
## Examples
$ mix confispex.report
$ mix confispex.report --mode=brief
$ mix confispex.report --mode=detailed
"""
@requirements ["app.config"]
def run(args) do
{opts, []} = OptionParser.parse!(args, switches: [mode: :string])
mode =
case opts[:mode] do
"detailed" -> :detailed
_ -> :brief
end
Confispex.report(mode)
end
end
| 19.296296 | 69 | 0.604607 |
7901da73fb452217605e869288cd700cf22a7e7e | 198 | ex | Elixir | web/views/procedure_view.ex | marick/elixir-critter4us | eeea901c1debf6c77969d80a55320daf909df053 | [
"MIT"
] | null | null | null | web/views/procedure_view.ex | marick/elixir-critter4us | eeea901c1debf6c77969d80a55320daf909df053 | [
"MIT"
] | null | null | null | web/views/procedure_view.ex | marick/elixir-critter4us | eeea901c1debf6c77969d80a55320daf909df053 | [
"MIT"
] | null | null | null | defmodule Critter4us.ProcedureView do
use Critter4us.Web, :view
def sorted_procedures(procedures) do
Enum.sort(procedures, &(String.downcase(&1.name) < String.downcase(&2.name)))
end
end
| 24.75 | 81 | 0.747475 |
79022ca87f2f32e262169061ac1d3a2808144a4f | 195 | ex | Elixir | lib/bolt_sips/internals/bolt_protocol_v2.ex | cheerfulstoic/bolt_sips | e86d6443f69d59f6cc41ecae5d0718ed05ea4904 | [
"Apache-2.0"
] | 242 | 2016-09-09T22:32:00.000Z | 2022-02-20T18:50:29.000Z | lib/bolt_sips/internals/bolt_protocol_v2.ex | cheerfulstoic/bolt_sips | e86d6443f69d59f6cc41ecae5d0718ed05ea4904 | [
"Apache-2.0"
] | 100 | 2016-10-18T04:19:09.000Z | 2021-11-15T19:14:47.000Z | lib/bolt_sips/internals/bolt_protocol_v2.ex | cheerfulstoic/bolt_sips | e86d6443f69d59f6cc41ecae5d0718ed05ea4904 | [
"Apache-2.0"
] | 51 | 2016-10-31T20:05:52.000Z | 2022-01-20T11:45:49.000Z | defmodule Bolt.Sips.Internals.BoltProtocolV2 do
@moduledoc false
# There's no specific messagee for Bolt V2
# This file exists only to fill the gap between the 2 bolt protocol versions
end
| 32.5 | 78 | 0.784615 |
79023c8c3884d035ca8ee672d66fab8085f351b0 | 16,718 | exs | Elixir | lib/mix/test/mix/umbrella_test.exs | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 2 | 2018-11-15T06:38:14.000Z | 2018-11-17T18:03:14.000Z | lib/mix/test/mix/umbrella_test.exs | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/umbrella_test.exs | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule Mix.UmbrellaTest do
use MixTest.Case
@moduletag apps: [:foo, :bar]
test "apps_paths" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
assert Mix.Project.apps_paths() == nil
Mix.Project.in_project(:umbrella, ".", fn _ ->
assert Mix.Project.apps_paths() == %{bar: "apps/bar", foo: "apps/foo"}
assert_received {:mix_shell, :error,
["warning: path \"apps/dont_error_on_missing_mixfile\"" <> _]}
refute_received {:mix_shell, :error, ["warning: path \"apps/dont_error_on_files\"" <> _]}
end)
end)
end
test "apps_paths with selection" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
Mix.Project.in_project(:umbrella, ".", [apps: [:foo, :bar]], fn _ ->
File.mkdir_p!("apps/errors")
File.write!("apps/errors/mix.exs", "raise :oops")
assert Mix.Project.apps_paths() == %{bar: "apps/bar", foo: "apps/foo"}
end)
end)
end
test "compiles umbrella" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
Mix.Project.in_project(:umbrella, ".", fn _ ->
Mix.Task.run("deps")
assert_received {:mix_shell, :info, ["* bar (apps/bar) (mix)"]}
assert_received {:mix_shell, :info, ["* foo (apps/foo) (mix)"]}
# Ensure we can compile and run checks
Mix.Task.run("deps.compile")
Mix.Task.run("deps.loadpaths")
Mix.Task.run("compile", ["--verbose"])
assert_received {:mix_shell, :info, ["==> bar"]}
assert_received {:mix_shell, :info, ["Generated bar app"]}
assert File.regular?("_build/dev/lib/bar/ebin/Elixir.Bar.beam")
assert_received {:mix_shell, :info, ["==> foo"]}
assert_received {:mix_shell, :info, ["Generated foo app"]}
assert File.regular?("_build/dev/lib/foo/ebin/Elixir.Foo.beam")
# Ensure foo was loaded and in the same env as Mix.env
assert_received {:mix_shell, :info, [":foo env is dev"]}
assert_received {:mix_shell, :info, [":bar env is dev"]}
Mix.Task.clear()
Mix.Task.run("app.start", ["--no-compile"])
end)
end)
end
test "compiles umbrella with protocol consolidation" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
Mix.Project.in_project(:umbrella, ".", fn _ ->
Mix.Task.run("compile", ["--verbose"])
assert_received {:mix_shell, :info, ["Generated bar app"]}
assert_received {:mix_shell, :info, ["Generated foo app"]}
assert File.regular?("_build/dev/consolidated/Elixir.Enumerable.beam")
purge([Enumerable])
assert Mix.Tasks.App.Start.run([])
assert Protocol.consolidated?(Enumerable)
end)
end)
end
test "recursively compiles umbrella with protocol consolidation" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
Mix.Project.in_project(:umbrella, ".", fn _ ->
defmodule Elixir.Mix.Tasks.Umbrella.Recur do
use Mix.Task
@recursive true
def run(_) do
assert Mix.Task.recursing?()
Mix.Task.run("compile", ["--verbose"])
end
end
Mix.Task.run("umbrella.recur")
assert_received {:mix_shell, :info, ["Generated bar app"]}
assert_received {:mix_shell, :info, ["Generated foo app"]}
assert File.regular?("_build/dev/consolidated/Elixir.Enumerable.beam")
purge([Enumerable])
assert Mix.Tasks.App.Start.run([])
assert Protocol.consolidated?(Enumerable)
end)
end)
end
defmodule UmbrellaDeps do
def project do
[apps_path: "apps", deps: [{:some_dep, path: "deps/some_dep"}]]
end
end
test "loads umbrella dependencies" do
Mix.Project.push(UmbrellaDeps)
in_fixture("umbrella_dep/deps/umbrella", fn ->
File.mkdir_p!("deps/some_dep/ebin")
File.mkdir_p!("_build/dev/lib/some_dep/ebin")
File.mkdir_p!("_build/dev/lib/foo/ebin")
File.mkdir_p!("_build/dev/lib/bar/ebin")
Mix.Task.run("loadpaths", ["--no-deps-check", "--no-elixir-version-check"])
assert to_charlist(Path.expand("_build/dev/lib/some_dep/ebin")) in :code.get_path()
assert to_charlist(Path.expand("_build/dev/lib/foo/ebin")) in :code.get_path()
assert to_charlist(Path.expand("_build/dev/lib/bar/ebin")) in :code.get_path()
end)
end
test "loads umbrella child dependencies in all environments" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
Mix.Project.in_project(:umbrella, ".", fn _ ->
File.write!("apps/bar/mix.exs", """
defmodule Bar.MixProject do
use Mix.Project
def project do
[app: :bar,
version: "0.1.0",
deps: [{:git_repo, git: MixTest.Case.fixture_path("git_repo"), only: :other}]]
end
end
""")
# Does not fetch when filtered
Mix.Tasks.Deps.Get.run(["--only", "dev"])
refute_received {:mix_shell, :info, ["* Getting git_repo" <> _]}
# But works across all environments
Mix.Tasks.Deps.Get.run([])
assert_received {:mix_shell, :info, ["* Getting git_repo" <> _]}
# Does not show by default
Mix.Tasks.Deps.run([])
refute_received {:mix_shell, :info, ["* git_repo" <> _]}
# But shows on proper environment
Mix.env(:other)
Mix.Tasks.Deps.run([])
assert_received {:mix_shell, :info, ["* git_repo " <> _]}
end)
end)
after
Mix.env(:test)
end
test "loads umbrella child optional dependencies" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
Mix.Project.in_project(:umbrella, ".", fn _ ->
File.write!("apps/bar/mix.exs", """
defmodule Bar.MixProject do
use Mix.Project
def project do
[app: :bar,
version: "0.1.0",
deps: [{:git_repo, git: MixTest.Case.fixture_path("git_repo"), optional: true}]]
end
end
""")
Mix.Tasks.Deps.run([])
assert_received {:mix_shell, :info, ["* git_repo " <> _]}
end)
end)
end
test "loads umbrella sibling dependencies with :in_umbrella" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
Mix.Project.in_project(:umbrella, ".", fn _ ->
File.write!("apps/bar/mix.exs", """
defmodule Bar.MixProject do
use Mix.Project
def project do
[app: :bar,
version: "0.1.0",
deps: [{:foo, in_umbrella: true}]]
end
end
""")
# Running from umbrella should not cause conflicts
Mix.Tasks.Deps.Get.run([])
Mix.Tasks.Run.run([])
end)
end)
end
test "umbrella sibling dependencies conflicts with :in_umbrella" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
Mix.Project.in_project(:umbrella, ".", fn _ ->
File.write!("apps/bar/mix.exs", """
defmodule Bar.MixProject do
use Mix.Project
def project do
[app: :bar,
version: "0.1.0",
deps: [{:foo, in_umbrella: true, env: :unknown}]]
end
end
""")
assert_raise Mix.Error, fn ->
Mix.Tasks.Deps.Get.run([])
end
assert_received {:mix_shell, :error, ["Dependencies have diverged:"]}
assert_received {:mix_shell, :error,
[" the dependency foo in mix.exs is overriding a child" <> _]}
end)
end)
end
## Umbrellas as a dependency
test "list deps for umbrella as dependency" do
in_fixture("umbrella_dep", fn ->
Mix.Project.in_project(:umbrella_dep, ".", fn _ ->
Mix.Task.run("deps")
assert_received {:mix_shell, :info, ["* umbrella (deps/umbrella) (mix)"]}
assert_received {:mix_shell, :info, ["* foo (apps/foo) (mix)"]}
end)
end)
end
test "compile for umbrella as dependency" do
in_fixture("umbrella_dep", fn ->
Mix.Project.in_project(:umbrella_dep, ".", fn _ ->
Mix.Task.run("deps.compile")
assert Bar.bar() == "hello world"
end)
end)
end
defmodule CycleDeps do
def project do
[
app: :umbrella_dep,
deps: [
{:bar, path: "deps/umbrella/apps/bar"},
{:umbrella, path: "deps/umbrella"}
]
]
end
end
test "handles dependencies with cycles" do
Mix.Project.push(CycleDeps)
in_fixture("umbrella_dep", fn ->
assert Enum.map(Mix.Dep.load_on_environment([]), & &1.app) == [:foo, :bar, :umbrella]
end)
end
test "handles dependencies with cycles and overridden deps" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
Mix.Project.in_project(:umbrella, ".", fn _ ->
File.write!("apps/foo/mix.exs", """
defmodule Foo.MixProject do
use Mix.Project
def project do
# Ensure we have the proper environment
:dev = Mix.env
[app: :foo,
version: "0.1.0",
deps: [{:bar, in_umbrella: true}]]
end
end
""")
File.write!("apps/bar/mix.exs", """
defmodule Bar.MixProject do
use Mix.Project
def project do
# Ensure we have the proper environment
:dev = Mix.env
[app: :bar,
version: "0.1.0",
deps: [{:a, path: "deps/a"},
{:b, path: "deps/b"}]]
end
end
""")
assert Enum.map(Mix.Dep.load_on_environment([]), & &1.app) == [:a, :b, :bar, :foo]
end)
end)
end
test "uses dependency aliases" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
Mix.Project.in_project(:umbrella, ".", fn _ ->
File.write!("apps/bar/mix.exs", """
defmodule Bar.MixProject do
use Mix.Project
def project do
[app: :bar,
version: "0.1.0",
aliases: ["compile.all": fn _ -> Mix.shell.info "no compile bar" end]]
end
end
""")
Mix.Task.run("compile", ["--verbose"])
assert_receive {:mix_shell, :info, ["no compile bar"]}
refute_receive {:mix_shell, :info, ["Compiled lib/bar.ex"]}
end)
end)
end
test "recompiles after runtime path dependency changes" do
in_fixture("umbrella_dep/deps/umbrella/apps", fn ->
Mix.Project.in_project(:bar, "bar", fn _ ->
Mix.Task.run("compile", ["--verbose"])
assert_received {:mix_shell, :info, ["Generated foo app"]}
assert_received {:mix_shell, :info, ["Generated bar app"]}
assert File.regular?("_build/dev/lib/foo/ebin/Elixir.Foo.beam")
assert File.regular?("_build/dev/lib/bar/ebin/Elixir.Bar.beam")
# Noop by default
assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == {:noop, []}
Mix.Shell.Process.flush()
# Ok but no compilation when there is no runtime dependency
mtime = File.stat!("_build/dev/lib/bar/.mix/compile.elixir").mtime
ensure_touched("_build/dev/lib/foo/ebin/Elixir.Foo.beam", mtime)
mtime = File.stat!("_build/dev/lib/bar/.mix/compile.elixir").mtime
ensure_touched("_build/dev/lib/foo/.mix/compile.elixir", mtime)
assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == {:ok, []}
refute_received {:mix_shell, :info, ["Compiled " <> _]}
# Add runtime dependency
File.write!("lib/bar.ex", """
defmodule Bar do
def bar, do: Foo.foo
end
""")
assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == {:ok, []}
assert_received {:mix_shell, :info, ["Compiled lib/bar.ex"]}
end)
end)
end
test "recompiles after compile time path dependency changes" do
in_fixture("umbrella_dep/deps/umbrella/apps", fn ->
Mix.Project.in_project(:bar, "bar", fn _ ->
Mix.Task.run("compile", ["--verbose"])
# Add compile time dependency
File.write!("lib/bar.ex", "defmodule Bar, do: Foo.foo")
assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == {:ok, []}
assert_receive {:mix_shell, :info, ["Compiled lib/bar.ex"]}
# Recompiles for compile time dependencies
mtime = File.stat!("_build/dev/lib/bar/.mix/compile.elixir").mtime
ensure_touched("_build/dev/lib/foo/ebin/Elixir.Foo.beam", mtime)
ensure_touched("_build/dev/lib/foo/.mix/compile.elixir", mtime)
assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == {:ok, []}
assert_receive {:mix_shell, :info, ["Compiled lib/bar.ex"]}
end)
end)
end
test "recompiles after struct path dependency changes" do
in_fixture("umbrella_dep/deps/umbrella/apps", fn ->
Mix.Project.in_project(:bar, "bar", fn _ ->
File.write!("../foo/lib/foo.ex", "defmodule Foo, do: defstruct [:bar]")
Mix.Task.run("compile", ["--verbose"])
# Add struct dependency
File.write!("lib/bar.ex", "defmodule Bar, do: %Foo{bar: true}")
assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == {:ok, []}
assert_receive {:mix_shell, :info, ["Compiled lib/bar.ex"]}
# Recompiles for struct dependencies
mtime = File.stat!("_build/dev/lib/bar/.mix/compile.elixir").mtime
ensure_touched("_build/dev/lib/foo/ebin/Elixir.Foo.beam", mtime)
ensure_touched("_build/dev/lib/foo/.mix/compile.elixir", mtime)
assert Mix.Tasks.Compile.Elixir.run(["--verbose"]) == {:ok, []}
assert_receive {:mix_shell, :info, ["Compiled lib/bar.ex"]}
end)
end)
end
test "reconsolidates after path dependency changes" do
in_fixture("umbrella_dep/deps/umbrella/apps", fn ->
Mix.Project.in_project(:bar, "bar", fn _ ->
# Add a protocol dependency
File.write!("../foo/lib/foo.ex", """
defprotocol Foo do
def foo(arg)
end
defimpl Foo, for: List do
def foo(list), do: list
end
""")
Mix.Task.run("compile")
assert File.regular?("_build/dev/lib/bar/consolidated/Elixir.Foo.beam")
assert Mix.Tasks.Compile.Protocols.run([]) == :noop
# Mark protocol as outdated
File.touch!("_build/dev/lib/bar/consolidated/Elixir.Foo.beam", {{2010, 1, 1}, {0, 0, 0}})
mtime = File.stat!("_build/dev/lib/bar/.mix/compile.protocols").mtime
ensure_touched("_build/dev/lib/foo/ebin/Elixir.Foo.beam", mtime)
assert Mix.Tasks.Compile.Protocols.run([]) == :ok
# Check new timestamp
mtime = File.stat!("_build/dev/lib/bar/consolidated/Elixir.Foo.beam").mtime
assert mtime > {{2010, 1, 1}, {0, 0, 0}}
end)
end)
end
test "reconsolidates using umbrella parent information on shared _build" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
File.write!("apps/bar/lib/bar.ex", """
defprotocol Bar do
def bar(arg)
end
defimpl Bar, for: List do
def bar(list), do: list
end
""")
Mix.Project.in_project(:foo, "apps/foo", [build_path: "../../_build"], fn _ ->
Mix.Task.run("compile.protocols")
refute Code.ensure_loaded?(Bar)
end)
Mix.Project.in_project(:umbrella, ".", fn _ ->
Mix.Task.run("compile.protocols")
Mix.Task.run("app.start")
assert Protocol.consolidated?(Bar)
end)
end)
end
test "reconsolidates using umbrella child information on shared _build" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
File.write!("apps/bar/lib/bar.ex", """
defprotocol Bar do
def foo(arg)
end
defimpl Bar, for: List do
def foo(list), do: list
end
""")
Mix.Project.in_project(:umbrella, ".", fn _ ->
Mix.Task.run("compile.protocols")
end)
# Emulate the dependency being removed
Mix.Project.in_project(:foo, "apps/foo", [build_path: "../../_build", deps: []], fn _ ->
File.rm_rf("../../_build/dev/lib/bar")
Mix.Task.run("compile.protocols")
end)
end)
end
test "apps cannot refer to themselves as a dep" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
Mix.Project.in_project(:umbrella, ".", fn _ ->
File.write!("apps/bar/mix.exs", """
defmodule Bar.MixProject do
use Mix.Project
def project do
[app: :bar,
version: "0.1.0",
deps: [{:bar, in_umbrella: true}]]
end
end
""")
assert_raise Mix.Error, "App bar lists itself as a dependency", fn ->
Mix.Task.run("deps.get", ["--verbose"]) == [:ok, :ok]
end
end)
end)
end
end
| 32.399225 | 97 | 0.58117 |
7902514d36d20e8ccdd8170b4aec9bc04decd757 | 8,904 | exs | Elixir | apps/state/config/config.exs | paulswartz/api | 7d892ce0fa84eaea92758f90f072bfcd0ce91883 | [
"MIT"
] | null | null | null | apps/state/config/config.exs | paulswartz/api | 7d892ce0fa84eaea92758f90f072bfcd0ce91883 | [
"MIT"
] | null | null | null | apps/state/config/config.exs | paulswartz/api | 7d892ce0fa84eaea92758f90f072bfcd0ce91883 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :state, :route_pattern,
ignore_override_prefixes: %{
# don't ignore Foxboro via Fairmount trips
"CR-Franklin-Foxboro-" => false,
# ignore North Station Green-D patterns
"Green-D-1-1" => true,
"Green-D-3-1" => true,
# don't ignore Rockport Branch shuttles
"Shuttle-BeverlyRockport-0-0" => false,
"Shuttle-BeverlyRockport-0-1" => false,
"Shuttle-ManchesterGloucester-0-0" => false,
"Shuttle-ManchesterGloucester-0-1" => false,
"Shuttle-ManchesterRockport-0-0" => false,
"Shuttle-ManchesterRockport-0-1" => false,
"Shuttle-RockportWestGloucester-0-0" => false,
"Shuttle-RockportWestGloucester-0-1" => false,
# don't ignore Fitchburg Line shuttles to/from Alewife
"Shuttle-AlewifeLittletonExpress-0-0" => false,
"Shuttle-AlewifeLittletonExpress-0-1" => false,
"Shuttle-AlewifeLittletonLocal-0-0" => false,
"Shuttle-AlewifeLittletonLocal-0-1" => false,
# don't ignore Newton Connection RailBus for Worcester Line
"Shuttle-NewtonHighlandsWellesleyFarms-0-0" => false,
"Shuttle-NewtonHighlandsWellesleyFarms-0-1" => false,
# don't ignore Providence trains stopping at Forest Hills
"CR-Providence-d01bc229-0" => false
}
config :state, :shape,
prefix_overrides: %{
# Green Line
# Green-B (Lechmere)
"810_0004" => -1,
# Green-B (Lechmere)
"810_0005" => -1,
# Green-B (Lechmere)
"810_0006" => -1,
# Green-B (Lechmere)
"810_0007" => -1,
# Green-B (Lechmere)
"810_0008" => -1,
# Green-B (North Station)
"811_0007" => -1,
# Green-B (North Station)
"811_0008" => -1,
# Green-B (North Station)
"811_0009" => -1,
# Green-B (North Station)
"811_0010" => -1,
# Green-B (North Station)
"811_0011" => -1,
# Green-B (North Station)
"811_0012" => -1,
# Green-B
"813_0003" => 2,
# Green-B
"813_0004" => 2,
# Green-B (Blandford)
"803_0001" => -1,
# Green-B (Blandford)
"803_0002" => -1,
# Green-B (Blandford)
"803t0001" => -1,
# Green-B (Blandford)
"803t0003" => -1,
# Green-C (Lechmere)
"830_0003" => -1,
# Green-C (Lechmere)
"830_0004" => -1,
# Green-C (Lechmere)
"830_0005" => -1,
# Green-C (Lechmere)
"830_0006" => -1,
# Green-C (Park)
"833t0001" => -1,
# Green-C (Park)
"833t0002" => -1,
# Green-C (Park)
"833_0001" => -1,
# Green-C (Park)
"833_0002" => -1,
# Green-C
"831_0008" => 2,
# Green-C
"831_0009" => 2,
# Green-D (Lechmere)
"840_0004" => -1,
# Green-D (Lechmere)
"840_0005" => -1,
# Green-D (Lechmere)
"840_0008" => -1,
# Green-D (Lechmere)
"840_0009" => -1,
# Green-D (North Station)
"841_0005" => -1,
# Green-D (North Station)
"841_0006" => -1,
# Green-D (Lechmere)
"850_0006" => -1,
# Green-D (Lechmere)
"850_0007" => -1,
# Green-D (Lechmere)
"850_0010" => -1,
# Green-D (Lechmere)
"850_0011" => -1,
# Green-D (North Station)
"851_0008" => -1,
# Green-D (North Station)
"851_0009" => -1,
# Green-D (North Station)
"851_0010" => -1,
# Green-D (Newton Highlands)
"858_0002" => -1,
# Green-D (Newton Highlands)
"858t0001" => -1,
# Green-D (Newton Highlands)
"858t0002" => -1,
# Green-E (Prudential)
"881_0012" => -1,
# Green-E (Prudential)
"881_0013" => -1,
# Green-E (shuttle bus)
"6020021" => -1,
"6020022" => -1,
# Order the Red Line Ashmont first, and change the northbound names to
# the names of the branch.
"931_0009" => 2,
"931_0010" => 2,
"933_0009" => 1,
"933_0010" => 1,
# Silver Line
# SL1: last trip, goes right by South Station
"7410023" => -1,
# SL2
"7420025" => 3,
# SL2 listed as _ in shaperoutevariants, but not actually primary
"7420016" => -1,
# Providence
"9890008" => {nil, "Wickford Junction - South Station"},
"9890009" => {nil, "South Station - Wickford Junction"},
"9890003" => {nil, "Stoughton - South Station"},
# Newburyport
"9810006" => {nil, "Rockport - North Station"},
"9810001" => {nil, "Newburyport - North Station"},
# Alternate Routes
# Haverhill / Lowell wildcat trip
"9820004" => -1,
# Bus overrides
# Route 9 inbound to Copley
"090145" => 3,
# Route 39
"390068" => 3,
# Route 66
"660085" => 3
},
suffix_overrides: %{
# shuttles are all -1 priority
"-S" => -1
}
# Overrides for the stop ordering on routes where the trips themselves aren't enough
config :state, :stops_on_route,
stop_order_overrides: %{
{"CR-Franklin", 0} => [
["Norwood Central", "Windsor Gardens", "Plimptonville", "Walpole"],
["place-FB-0148", "place-FB-0166", "place-FB-0177", "place-FB-0191"],
["Walpole", "Foxboro", "Norfolk"],
["place-FB-0191", "place-FS-0049", "place-FB-0230"]
],
{"CR-Franklin", 1} => [
["Norfolk", "Foxboro", "Walpole"],
["place-FB-0230", "place-FS-0049", "place-FB-0191"]
],
{"CR-Fairmount", 0} => [
["Readville", "Dedham Corp Center", "Foxboro"],
["place-DB-0095", "place-FB-0118", "place-FS-0049"]
],
{"CR-Fairmount", 1} => [
["Foxboro", "Dedham Corp Center", "Readville"],
["place-FS-0049", "place-FB-0118", "place-DB-0095"]
],
{"CR-Fitchburg", 0} => [
["place-portr", "place-alfcl", "place-FR-0064"],
["place-FR-0253", "place-FR-0301", "place-FR-0361"]
],
{"CR-Fitchburg", 1} => [
["place-FR-0361", "place-FR-0301", "place-FR-0253"],
["place-FR-0064", "place-alfcl", "place-portr"]
],
{"CR-Newburyport", 0} => [
[
"Beverly",
"North Beverly",
"Hamilton/Wenham",
"Ipswich",
"Rowley",
"Newburyport",
"Montserrat",
"Prides Crossing",
"Beverly Farms",
"Manchester",
"West Gloucester",
"Gloucester",
"Rockport"
],
[
"place-ER-0183",
"place-ER-0208",
"place-ER-0227",
"place-ER-0276",
"place-ER-0312",
"place-ER-0362",
"place-GB-0198",
"place-GB-0222",
"place-GB-0229",
"place-GB-0254",
"place-GB-0296",
"place-GB-0316",
"place-GB-0353"
]
],
{"CR-Newburyport", 1} => [
[
"Rockport",
"Gloucester",
"West Gloucester",
"Manchester",
"Beverly Farms",
"Prides Crossing",
"Montserrat",
"Newburyport",
"Rowley",
"Ipswich",
"Hamilton/Wenham",
"North Beverly",
"Beverly"
],
[
"place-GB-0353",
"place-GB-0316",
"place-GB-0296",
"place-GB-0254",
"place-GB-0229",
"place-GB-0222",
"place-GB-0198",
"place-ER-0362",
"place-ER-0312",
"place-ER-0276",
"place-ER-0227",
"place-ER-0208",
"place-ER-0183"
]
],
{"CR-Worcester", 0} => [
[
"place-WML-0035",
"place-newtn",
"place-WML-0081",
"place-WML-0091",
"place-WML-0102",
"place-river",
"place-WML-0125"
]
],
{"CR-Worcester", 1} => [
[
"place-WML-0125",
"place-river",
"place-WML-0102",
"place-WML-0091",
"place-WML-0081",
"place-newtn",
"place-WML-0035"
]
],
{"CR-Providence", 0} => [
[
"place-rugg",
"place-forhl",
"place-NEC-2203"
]
]
},
not_on_route: %{
{"CR-Franklin", 0} => [
"place-DB-2265",
"place-DB-2258",
"place-DB-2249",
"place-DB-2240",
"place-DB-2230",
"place-DB-2222",
"place-DB-2205"
],
{"CR-Franklin", 1} => [
"place-DB-2265",
"place-DB-2258",
"place-DB-2249",
"place-DB-2240",
"place-DB-2230",
"place-DB-2222",
"place-DB-2205"
],
{"CR-Fairmount", 0} => [
"place-FB-0166",
"place-FB-0148",
"place-FB-0143",
"place-FB-0125",
"place-FB-0109"
],
{"CR-Fairmount", 1} => [
"place-FB-0166",
"place-FB-0148",
"place-FB-0143",
"place-FB-0125",
"place-FB-0109"
],
{"Green-D", 0} => [
"place-lech",
"place-spmnl",
"place-north",
"place-haecl"
],
{"Green-D", 1} => [
"place-lech",
"place-spmnl",
"place-north",
"place-haecl"
],
{"Green-E", 0} => [
"place-lech",
"14159",
"21458"
],
{"Green-E", 1} => [
"place-lech",
"14155",
"21458"
]
}
import_config "#{Mix.env()}.exs"
| 25.295455 | 84 | 0.518756 |
790251cddd9175d7c53f3631ba494cb190c6dd97 | 3,198 | exs | Elixir | apps/site/test/site_web/controllers/transit_near_me/location_test.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 42 | 2019-05-29T16:05:30.000Z | 2021-08-09T16:03:37.000Z | apps/site/test/site_web/controllers/transit_near_me/location_test.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 872 | 2019-05-29T17:55:50.000Z | 2022-03-30T09:28:43.000Z | apps/site/test/site_web/controllers/transit_near_me/location_test.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 12 | 2019-07-01T18:33:21.000Z | 2022-03-10T02:13:57.000Z | defmodule SiteWeb.TransitNearMeController.LocationTest do
use ExUnit.Case, async: true
alias GoogleMaps.Geocode.Address
alias SiteWeb.TransitNearMeController.Location
@address %Address{
latitude: 42.351,
longitude: -71.066,
formatted: "10 Park Plaza, Boston, MA, 02116"
}
def geocode_fn("10 Park Plaza, Boston, MA, 02116") do
send(self(), :geocode)
{:ok, [@address]}
end
def reverse_geocode_fn(42.351, -71.066) do
send(self(), :reverse_geocode)
{:ok, [@address]}
end
def reverse_geocode_fn(0.0, 0.0) do
send(self(), :reverse_geocode)
{:error, :zero_results}
end
setup do
{:ok,
opts: [
geocode_fn: &geocode_fn/1,
reverse_geocode_fn: &reverse_geocode_fn/2
]}
end
describe "get_location/2" do
test "can take unnested lat/lng values", %{opts: opts} do
params = %{
"latitude" => "42.351",
"longitude" => "-71.066",
"location" => %{
"address" => "10 Park Plaza, Boston, MA, 02116"
}
}
assert Location.get(params, opts) == {:ok, [@address]}
refute_receive :geocode
refute_receive :reverse_geocode
end
test "does not attempt any geocoding if all params are provided", %{opts: opts} do
params = %{
"location" => %{
"latitude" => "42.351",
"longitude" => "-71.066",
"address" => "10 Park Plaza, Boston, MA, 02116"
}
}
assert Location.get(params, opts) == {:ok, [@address]}
refute_receive :geocode
refute_receive :reverse_geocode
end
test "geocodes address if lat/lng is not provided", %{opts: opts} do
params = %{
"location" => %{
"address" => "10 Park Plaza, Boston, MA, 02116"
}
}
assert Location.get(params, opts) == {:ok, [@address]}
assert_receive :geocode
refute_receive :reverse_geocode
end
test "geocodes address if lat/lng are not floats", %{opts: opts} do
params = %{
"location" => %{
"latitude" => "",
"longitude" => "",
"address" => "10 Park Plaza, Boston, MA, 02116"
}
}
assert Location.get(params, opts) == {:ok, [@address]}
assert_receive :geocode
refute_receive :reverse_geocode
end
test "reverse geocodes lat/lng if address is not provided", %{opts: opts} do
params = %{
"location" => %{
"latitude" => "42.351",
"longitude" => "-71.066"
}
}
assert Location.get(params, opts) == {:ok, [@address]}
refute_receive :geocode
assert_receive :reverse_geocode
end
test "returns an error if there is an error with reverse geocoding", %{
opts: opts
} do
params = %{
"location" => %{
"latitude" => "0.0",
"longitude" => "0.0"
}
}
assert Location.get(params, opts) == {:error, :zero_results}
refute_receive :geocode
assert_receive :reverse_geocode
end
test "returns :no_address if params don't include address or lat/lng", %{opts: opts} do
assert Location.get(%{}, opts) == :no_address
end
end
end
| 24.227273 | 91 | 0.568793 |
790274ac4c81525dabb115858e1f980d040f6125 | 3,661 | ex | Elixir | lib/ex_aws/config/auth_cache.ex | steele-d/ex_aws | 2ffd0a3311d06512418fdda87ebd9b7f1b8d37a5 | [
"MIT"
] | null | null | null | lib/ex_aws/config/auth_cache.ex | steele-d/ex_aws | 2ffd0a3311d06512418fdda87ebd9b7f1b8d37a5 | [
"MIT"
] | null | null | null | lib/ex_aws/config/auth_cache.ex | steele-d/ex_aws | 2ffd0a3311d06512418fdda87ebd9b7f1b8d37a5 | [
"MIT"
] | null | null | null | defmodule ExAws.Config.AuthCache do
@moduledoc false
use GenServer
# http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html
@refresh_lead_time 300_000
@instance_auth_key :aws_instance_auth
defmodule AuthConfigAdapter do
@moduledoc false
@doc "Compute the awscli auth information."
@callback adapt_auth_config(auth :: map, profile :: String.t(), expiration :: integer) :: any
end
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, :ok, Keyword.put(opts, :name, __MODULE__))
end
def get(config) do
:ets.lookup(__MODULE__, @instance_auth_key)
|> refresh_auth_if_required(config)
end
def get(profile, expiration) do
case :ets.lookup(__MODULE__, :awscli) do
[{:awscli, auth_config}] ->
auth_config
[] ->
GenServer.call(__MODULE__, {:refresh_awscli_config, profile, expiration}, 30_000)
end
end
## Callbacks
def init(:ok) do
ets = :ets.new(__MODULE__, [:named_table, read_concurrency: true])
{:ok, ets}
end
def handle_call({:refresh_auth, config}, _from, ets) do
auth = refresh_auth(config, ets)
{:reply, auth, ets}
end
def handle_call({:refresh_awscli_config, profile, expiration}, _from, ets) do
auth = refresh_awscli_config(profile, expiration, ets)
{:reply, auth, ets}
end
def handle_info({:refresh_auth, config}, ets) do
refresh_auth(config, ets)
{:noreply, ets}
end
def handle_info({:refresh_awscli_config, profile, expiration}, ets) do
refresh_awscli_config(profile, expiration, ets)
{:noreply, ets}
end
def refresh_awscli_config(profile, expiration, ets) do
Process.send_after(self(), {:refresh_awscli_config, profile, expiration}, expiration)
auth = ExAws.Config.awscli_auth_credentials(profile)
auth =
case ExAws.Config.awscli_auth_adapter() do
nil ->
auth
adapter ->
adapter.adapt_auth_config(auth, profile, expiration)
end
:ets.insert(ets, {:awscli, auth})
auth
end
defp refresh_auth_if_required([], config) do
GenServer.call(__MODULE__, {:refresh_auth, config}, 30_000)
end
defp refresh_auth_if_required([{_key, cached_auth}], config) do
if next_refresh_in(cached_auth) > 0 do
cached_auth
else
GenServer.call(__MODULE__, {:refresh_auth, config}, 30_000)
end
end
defp refresh_auth(config, ets) do
:ets.lookup(__MODULE__, @instance_auth_key)
|> refresh_auth_if_stale(config, ets)
end
defp refresh_auth_if_stale([], config, ets) do
refresh_auth_now(config, ets)
end
defp refresh_auth_if_stale([{_key, cached_auth}], config, ets) do
if next_refresh_in(cached_auth) > @refresh_lead_time do
# we still have a valid auth token, so simply return that
cached_auth
else
refresh_auth_now(config, ets)
end
end
defp refresh_auth_if_stale(_, config, ets), do: refresh_auth_now(config, ets)
defp refresh_auth_now(config, ets) do
auth = ExAws.InstanceMeta.security_credentials(config)
:ets.insert(ets, {@instance_auth_key, auth})
Process.send_after(__MODULE__, {:refresh_auth, config}, next_refresh_in(auth))
auth
end
defp next_refresh_in(%{expiration: expiration}) do
try do
expires_in_ms =
expiration
|> NaiveDateTime.from_iso8601!()
|> NaiveDateTime.diff(NaiveDateTime.utc_now(), :millisecond)
# refresh lead_time before auth expires, unless the time has passed
# otherwise refresh needed now
max(0, expires_in_ms - @refresh_lead_time)
rescue
_e -> 0
end
end
defp next_refresh_in(_), do: 0
end
| 26.338129 | 97 | 0.689975 |
79028ae7ed88488a703e80816513afaad62878d6 | 2,347 | exs | Elixir | test/server_tests/monitored_items_test.exs | juljimm/opex62541 | c44c157213a8a3fb07283f6e697e6dd018693315 | [
"MIT"
] | 11 | 2020-04-24T20:54:23.000Z | 2022-01-11T03:13:13.000Z | test/server_tests/monitored_items_test.exs | juljimm/opex62541 | c44c157213a8a3fb07283f6e697e6dd018693315 | [
"MIT"
] | 3 | 2021-02-25T10:03:49.000Z | 2021-09-15T03:37:12.000Z | test/server_tests/monitored_items_test.exs | juljimm/opex62541 | c44c157213a8a3fb07283f6e697e6dd018693315 | [
"MIT"
] | 4 | 2020-09-28T16:18:43.000Z | 2021-09-09T16:29:14.000Z | defmodule ServerMonitoredItemsTest do
use ExUnit.Case
alias OpcUA.{NodeId, Server, QualifiedName}
setup do
{:ok, pid} = OpcUA.Server.start_link()
Server.set_default_config(pid)
{:ok, ns_index} = OpcUA.Server.add_namespace(pid, "Room")
# Object Node
requested_new_node_id =
NodeId.new(ns_index: ns_index, identifier_type: "string", identifier: "R1_TS1_VendorName")
parent_node_id = NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 85)
reference_type_node_id = NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 35)
browse_name = QualifiedName.new(ns_index: ns_index, name: "Temperature sensor")
type_definition = NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 58)
:ok = Server.add_object_node(pid,
requested_new_node_id: requested_new_node_id,
parent_node_id: parent_node_id,
reference_type_node_id: reference_type_node_id,
browse_name: browse_name,
type_definition: type_definition
)
# Variable Node
requested_new_node_id =
NodeId.new(ns_index: ns_index, identifier_type: "string", identifier: "R1_TS1_Temperature")
parent_node_id =
NodeId.new(ns_index: ns_index, identifier_type: "string", identifier: "R1_TS1_VendorName")
reference_type_node_id = NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 47)
browse_name = QualifiedName.new(ns_index: ns_index, name: "Temperature")
type_definition = NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 63)
:ok = Server.add_variable_node(pid,
requested_new_node_id: requested_new_node_id,
parent_node_id: parent_node_id,
reference_type_node_id: reference_type_node_id,
browse_name: browse_name,
type_definition: type_definition
)
%{pid: pid, ns_index: ns_index}
end
test "Add & delete a monitored Item", state do
node_id = NodeId.new(ns_index: state.ns_index, identifier_type: "string", identifier: "R1_TS1_Temperature")
assert {:ok, 1} == Server.add_monitored_item(state.pid, monitored_item: node_id, sampling_time: 1000.0)
# Expected error with a undefined monitored item.
assert {:error, "BadMonitoredItemIdInvalid"} == Server.delete_monitored_item(state.pid, 10)
assert :ok == Server.delete_monitored_item(state.pid, 1)
end
end
| 40.465517 | 111 | 0.735407 |
79028f8f2a17b6d9cad616e19eac7d28e57d7548 | 1,845 | ex | Elixir | clients/tasks/lib/google_api/tasks/v1/model/tasks.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/tasks/lib/google_api/tasks/v1/model/tasks.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/tasks/lib/google_api/tasks/v1/model/tasks.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Tasks.V1.Model.Tasks do
@moduledoc """
## Attributes
* `etag` (*type:* `String.t`, *default:* `nil`) - ETag of the resource.
* `items` (*type:* `list(GoogleApi.Tasks.V1.Model.Task.t)`, *default:* `nil`) - Collection of tasks.
* `kind` (*type:* `String.t`, *default:* `nil`) - Type of the resource. This is always "tasks#tasks".
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Token used to access the next page of this result.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:etag => String.t(),
:items => list(GoogleApi.Tasks.V1.Model.Task.t()),
:kind => String.t(),
:nextPageToken => String.t()
}
field(:etag)
field(:items, as: GoogleApi.Tasks.V1.Model.Task, type: :list)
field(:kind)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.Tasks.V1.Model.Tasks do
def decode(value, options) do
GoogleApi.Tasks.V1.Model.Tasks.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Tasks.V1.Model.Tasks do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.946429 | 113 | 0.686179 |
7902b86ac7de1c752a87d62d24066c66c57fcaca | 476 | exs | Elixir | elixir/discuss/config/test.exs | enricocolasante/sandbox | ba006c3e45bacef064d90cb9861c637c98545cfc | [
"MIT"
] | null | null | null | elixir/discuss/config/test.exs | enricocolasante/sandbox | ba006c3e45bacef064d90cb9861c637c98545cfc | [
"MIT"
] | null | null | null | elixir/discuss/config/test.exs | enricocolasante/sandbox | ba006c3e45bacef064d90cb9861c637c98545cfc | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :discuss, Discuss.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :discuss, Discuss.Repo,
adapter: Ecto.Adapters.MySQL,
username: "root",
password: "",
database: "discuss_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 23.8 | 56 | 0.72479 |
7902c8322d5442b42950e4f9572693b5105b1027 | 1,233 | exs | Elixir | test/ex_twilio/parser_test.exs | techgaun/ex_twilio | b22f5421c4c709232b5ef53191cc19d5ffb5c99c | [
"MIT"
] | null | null | null | test/ex_twilio/parser_test.exs | techgaun/ex_twilio | b22f5421c4c709232b5ef53191cc19d5ffb5c99c | [
"MIT"
] | null | null | null | test/ex_twilio/parser_test.exs | techgaun/ex_twilio | b22f5421c4c709232b5ef53191cc19d5ffb5c99c | [
"MIT"
] | null | null | null | defmodule ExTwilio.ParserTest do
use ExUnit.Case
import ExTwilio.Parser
defmodule Resource do
defstruct sid: nil
end
doctest ExTwilio.Parser
test ".parse should decode a successful response into a named struct" do
response = %{body: "{ \"sid\": \"unique_id\" }", status_code: 200}
assert {:ok, %Resource{sid: "unique_id"}} == parse(response, Resource)
end
test ".parse should return an error when response is 400" do
response = %{body: "{ \"message\": \"Error message\" }", status_code: 400}
assert {:error, "Error message", 400} == parse(response, Resource)
end
test ".parse should return :ok when response is 204 'No Content'" do
response = %{body: "", status_code: 204}
assert :ok == parse(response, Resource)
end
test ".parse_list should decode into a list of named structs" do
json = """
{
"resources": [{
"sid": "first"
}, {
"sid": "second"
}],
"next_page": 10
}
"""
response = %{body: json, status_code: 200}
expected = [%Resource{sid: "first"}, %Resource{sid: "second"}]
metadata = %{"next_page" => 10}
assert {:ok, expected, metadata} == parse_list(response, Resource, "resources")
end
end
| 27.4 | 83 | 0.621249 |
7902d6469170d78771f6cfab28004760cb71e3f5 | 453 | exs | Elixir | priv/repo/migrations/20160918003206_fix_mixed_case_slugs.exs | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | 275 | 2015-06-23T00:20:51.000Z | 2021-08-19T16:17:37.000Z | priv/repo/migrations/20160918003206_fix_mixed_case_slugs.exs | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | 1,304 | 2015-06-26T02:11:54.000Z | 2019-12-12T21:08:00.000Z | priv/repo/migrations/20160918003206_fix_mixed_case_slugs.exs | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | 140 | 2016-01-01T18:19:47.000Z | 2020-11-22T06:24:47.000Z | defmodule CodeCorps.Repo.Migrations.FixMixedCaseSlugs do
use Ecto.Migration
alias CodeCorps.Repo
alias CodeCorps.SluggedRoute
def up do
SluggedRoute
|> Repo.all
|> Repo.preload([:user, :organization])
|> Enum.each(fn record ->
SluggedRoute.changeset(record)
|> Ecto.Changeset.put_change(:slug, Inflex.parameterize(record.slug))
|> Repo.update!
end)
end
def down do
# Nothing to do here
end
end
| 20.590909 | 75 | 0.679912 |
7902d8970c9933f8cdb0558523545ce89b193a73 | 1,734 | exs | Elixir | test/console/cron/jobs_test.exs | pluralsh/console | 38a446ce1bc2f7bc3e904fcacb102d3d57835ada | [
"Apache-2.0"
] | 6 | 2021-11-17T21:10:49.000Z | 2022-02-16T19:45:28.000Z | test/console/cron/jobs_test.exs | pluralsh/console | 38a446ce1bc2f7bc3e904fcacb102d3d57835ada | [
"Apache-2.0"
] | 18 | 2021-11-25T04:31:06.000Z | 2022-03-27T04:54:00.000Z | test/console/cron/jobs_test.exs | pluralsh/console | 38a446ce1bc2f7bc3e904fcacb102d3d57835ada | [
"Apache-2.0"
] | null | null | null | defmodule Console.Cron.JobsTest do
use Console.DataCase, async: true
alias Console.Cron.Jobs
describe "#prune_builds/0" do
test "It will delete expired builds" do
keep = insert_list(2, :build)
expire = insert_list(2, :build, inserted_at: Timex.now() |> Timex.shift(days: -100))
{_, _} = Jobs.prune_builds()
for b <- keep,
do: assert refetch(b)
for b <- expire,
do: refute refetch(b)
end
end
describe "#prune_invites/0" do
test "It will delete expired invites" do
keep = insert_list(2, :invite)
expire = insert_list(2, :invite, inserted_at: Timex.now() |> Timex.shift(days: -8))
{_, _} = Jobs.prune_invites()
for invite <- keep,
do: assert refetch(invite)
for invite <- expire,
do: refute refetch(invite)
end
end
describe "#prune_notifications/0" do
test "it will delete old notifications" do
keep = insert_list(3, :notification)
expire = insert_list(3, :notification, inserted_at: Timex.now() |> Timex.shift(days: -40))
{_, _} = Jobs.prune_notifications()
for notif <- keep,
do: assert refetch(notif)
for notif <- expire,
do: refute refetch(notif)
end
end
describe "#fail_builds/0" do
test "old running builds will be auto-failed" do
old = insert(:build, status: :running, pinged_at: Timex.now() |> Timex.shift(hours: -1))
new = insert(:build, status: :running, pinged_at: Timex.now())
Jobs.fail_builds()
assert refetch(old).status == :failed
refute refetch(new).status == :failed
assert_receive {:event, %Console.PubSub.BuildFailed{item: build}}
assert build.id == old.id
end
end
end
| 26.272727 | 96 | 0.625144 |
7903379492ef7e3ff6211dc850dd4c70076c3e6f | 3,478 | ex | Elixir | apps/api_web/lib/api_web/user.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | null | null | null | apps/api_web/lib/api_web/user.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | null | null | null | apps/api_web/lib/api_web/user.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 1 | 2019-09-09T20:40:13.000Z | 2019-09-09T20:40:13.000Z | defmodule ApiWeb.User do
@moduledoc """
Struct for respresenting a user during a request.
"""
@default_version Application.get_env(:api_web, :versions)[:default]
defstruct [
:id,
:type,
:limit,
:version,
:allowed_domains
]
@typedoc """
The anonymous user's IP Address
1. `String.t` - the `X-Forwarded-For` header
2. `:inet.ip_address` - the IP address of the `Plug.Conn.t`
"""
@type anon_id :: :inet.ip_address() | String.t()
@typedoc """
The default version for the given key.
"""
@type version :: String.t()
@typedoc """
The list of domains to match for the access-control-allow-origin header
"""
@type allowed_domains :: String.t()
@typedoc """
The max number of requests per day that the user can make.
"""
@type requests_per_day :: integer
@typedoc """
Whether the user is an anonymous user or a registered user.
* `:anon` - An anonymous user that is tracked by IP address
* `:registered` - The user registered for an API key and are tracked by `id`.
"""
@type type :: :registered | :anon
@typedoc """
There are two types of users `:anon` and `:registered`
## `:anon`
* `:id` - The effective IP address
* `:limit` - anonymous users cannot have a requests per day limit increase from the default
(#{ApiWeb.RateLimiter.max_anon_per_interval() * ApiWeb.RateLimiter.intervals_per_day()}), which is indicated by `nil`.
* `:type` - `:anon`
## `:registered`
* `:id` - The API key used by the user for the API request
* `:limit` - `nil` indicates the default, registered user limit
(#{
ApiWeb.RateLimiter.max_registered_per_interval() * ApiWeb.RateLimiter.intervals_per_day()
}); `integer` is increased
requests per day granted after the user requested an increase.
* `:type` - `:registered`
"""
@type t ::
%__MODULE__{
id: anon_id,
version: version,
limit: nil,
type: :anon,
allowed_domains: allowed_domains
}
| %__MODULE__{
id: ApiAccounts.Key.key(),
version: version,
limit: requests_per_day | nil,
type: :registered,
allowed_domains: allowed_domains
}
@doc """
Creates an anonymous user with a given identifier.
iex> ApiWeb.User.anon("some_id")
%ApiWeb.User{id: "some_id", type: :anon, limit: nil, version: "#{@default_version}", allowed_domains: "*"}
"""
@spec anon(any) :: t
def anon(id),
do: %__MODULE__{id: id, type: :anon, version: @default_version, allowed_domains: "*"}
@doc """
Creates a user struct from a valid Key.
iex(1)> key = %ApiAccounts.Key{key: "key", user_id: "1", daily_limit: 10, api_version: "2017-11-28"}
iex(2)> ApiWeb.User.from_key(key)
%ApiWeb.User{id: "key", limit: 10, type: :registered, version: "2017-11-28", allowed_domains: "*"}
"""
@spec from_key(ApiAccounts.Key.t()) :: t
def from_key(%ApiAccounts.Key{
key: key,
daily_limit: limit,
api_version: version,
allowed_domains: allowed_domains
}) do
version = version || @default_version
%__MODULE__{
id: key,
limit: limit,
type: :registered,
version: version,
allowed_domains: nil_or_allowed_domains(allowed_domains)
}
end
defp nil_or_allowed_domains(nil), do: "*"
defp nil_or_allowed_domains(allowed_domains), do: allowed_domains
end
| 27.824 | 124 | 0.625359 |
7903383b1448d3a47f6401253fdfd5c9874baa39 | 465 | ex | Elixir | lib/rihanna_ui/schemas/etf.ex | nested-tech/rihanna_ui | 89ad67e443c9fd6d014f56c2300e285572a570e8 | [
"MIT"
] | null | null | null | lib/rihanna_ui/schemas/etf.ex | nested-tech/rihanna_ui | 89ad67e443c9fd6d014f56c2300e285572a570e8 | [
"MIT"
] | null | null | null | lib/rihanna_ui/schemas/etf.ex | nested-tech/rihanna_ui | 89ad67e443c9fd6d014f56c2300e285572a570e8 | [
"MIT"
] | null | null | null | defmodule RihannaUI.ETF do
@behaviour Ecto.Type
def type, do: :bytea
def load(serialized_mfa) when is_binary(serialized_mfa) do
{:ok, :erlang.binary_to_term(serialized_mfa)}
end
def load(nil, _), do: {:ok, nil}
def load(_, _), do: :error
def dump(mfa) do
{:ok, :erlang.term_to_binary(mfa)}
end
def cast(mfa = {mod, fun, args}) when is_atom(mod) and is_atom(fun) and is_list(args) do
{:ok, mfa}
end
def cast(_), do: :error
end
| 21.136364 | 90 | 0.658065 |
790367f1f4dbf63a573e936f1826aefed1734512 | 657 | ex | Elixir | lib/checkout/token.ex | belloq/checkout_elixir | b36391000f502c714649153a39e332b53fe8a24d | [
"MIT"
] | 1 | 2019-07-23T08:44:17.000Z | 2019-07-23T08:44:17.000Z | lib/checkout/token.ex | belloq/checkout_elixir | b36391000f502c714649153a39e332b53fe8a24d | [
"MIT"
] | 2 | 2018-09-13T08:31:52.000Z | 2020-03-26T13:03:14.000Z | lib/checkout/token.ex | belloq/checkout_elixir | b36391000f502c714649153a39e332b53fe8a24d | [
"MIT"
] | 2 | 2019-10-17T17:14:24.000Z | 2020-05-18T12:45:20.000Z | defmodule Checkout.Token do
@endpoint "tokens"
@doc """
Create a Payment Token with the given payment data.
Checkout API reference: https://api-reference.checkout.com/#tag/Tokens
## Example
```
Checkout.Token.create(%{
type: "applepay",
token_data: %{
version: "EC_v1",
data: "t7GeajLB9skXB...EE2QE=",
signature: "MIAGCSq...f0AAAAAAAA=",
header: %{
ephemeralPublicKey: "MFkwEwY...r3K/zlsw==",
publicKeyHash: "tqYV+t...gEz6Nl0=",
transactionId: "3cee896791...d17b4"
}
})
```
"""
def create(params) do
Checkout.make_request(:post, @endpoint, params, false)
end
end
| 23.464286 | 72 | 0.616438 |
7903716082ffe396f6d2a7c604c82a277826de0c | 86 | exs | Elixir | .check.exs | thepeoplesbourgeois/phoenix_live_controller | 04286b79697e9a7a62abb37efd7725dd9124a6c3 | [
"MIT"
] | 35 | 2020-04-15T17:15:36.000Z | 2021-11-08T08:50:50.000Z | .check.exs | thepeoplesbourgeois/phoenix_live_controller | 04286b79697e9a7a62abb37efd7725dd9124a6c3 | [
"MIT"
] | 3 | 2020-04-15T14:07:28.000Z | 2022-03-25T23:08:23.000Z | .check.exs | thepeoplesbourgeois/phoenix_live_controller | 04286b79697e9a7a62abb37efd7725dd9124a6c3 | [
"MIT"
] | 3 | 2020-11-08T15:37:31.000Z | 2022-03-23T05:36:54.000Z | [
tools: [
{:credo, false},
{:dialyzer, false},
{:sobelow, false}
]
]
| 10.75 | 23 | 0.465116 |
7903ab6bc4792c9e0fe13efe16b6423a61842ea3 | 501 | exs | Elixir | test/03_geology_test.exs | skovmand/advent_of_code_2020 | 40c795a2bd4c0378fd7ecffeb32f7fba6dcfcfdc | [
"MIT"
] | null | null | null | test/03_geology_test.exs | skovmand/advent_of_code_2020 | 40c795a2bd4c0378fd7ecffeb32f7fba6dcfcfdc | [
"MIT"
] | null | null | null | test/03_geology_test.exs | skovmand/advent_of_code_2020 | 40c795a2bd4c0378fd7ecffeb32f7fba6dcfcfdc | [
"MIT"
] | null | null | null | defmodule Advent20.TreesTest do
use ExUnit.Case, async: true
alias Advent20.Geology
@input_filename Path.expand("03_geology.txt", "input_files")
test "1: Count trees encountered with slope right 3, 1 down" do
assert Geology.count_trees(@input_filename, {3, 1}) == 162
end
test "2: Calculate product of multiple slopes" do
slopes = [{1, 1}, {3, 1}, {5, 1}, {7, 1}, {1, 2}]
assert Geology.multiple_slope_tree_count_product(@input_filename, slopes) == 3_064_612_320
end
end
| 29.470588 | 94 | 0.702595 |
7903b07db3a08b8bb74d67a229d2f6fd0a00caae | 2,437 | ex | Elixir | clients/redis/lib/google_api/redis/v1beta1/model/location.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/redis/lib/google_api/redis/v1beta1/model/location.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/redis/lib/google_api/redis/v1beta1/model/location.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Redis.V1beta1.Model.Location do
@moduledoc """
A resource that represents Google Cloud Platform location.
## Attributes
* `displayName` (*type:* `String.t`, *default:* `nil`) - The friendly name for this location, typically a nearby city name. For example, "Tokyo".
* `labels` (*type:* `map()`, *default:* `nil`) - Cross-service attributes for the location. For example {"cloud.googleapis.com/region": "us-east1"}
* `locationId` (*type:* `String.t`, *default:* `nil`) - Resource ID for the region. For example: "us-east1".
* `metadata` (*type:* `map()`, *default:* `nil`) - Output only. The set of available zones in the location. The map is keyed by the lowercase ID of each zone, as defined by Compute Engine. These keys can be specified in `location_id` or `alternative_location_id` fields when creating a Redis instance.
* `name` (*type:* `String.t`, *default:* `nil`) - Full resource name for the region. For example: "projects/example-project/locations/us-east1".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:displayName => String.t() | nil,
:labels => map() | nil,
:locationId => String.t() | nil,
:metadata => map() | nil,
:name => String.t() | nil
}
field(:displayName)
field(:labels, type: :map)
field(:locationId)
field(:metadata, type: :map)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.Redis.V1beta1.Model.Location do
def decode(value, options) do
GoogleApi.Redis.V1beta1.Model.Location.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Redis.V1beta1.Model.Location do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.305085 | 305 | 0.697989 |
7903d36fde603d96743ae893e7bbbae0e758b6ed | 913 | exs | Elixir | mix.exs | CarloHFR/NautilusGateway | 26211948c5f9127e6662a90e41df5b43b2408372 | [
"MIT"
] | null | null | null | mix.exs | CarloHFR/NautilusGateway | 26211948c5f9127e6662a90e41df5b43b2408372 | [
"MIT"
] | null | null | null | mix.exs | CarloHFR/NautilusGateway | 26211948c5f9127e6662a90e41df5b43b2408372 | [
"MIT"
] | null | null | null | defmodule Nautilus.MixProject do
use Mix.Project
def project do
[
app: :nautilus,
version: "0.1.0",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps(),
#Docs
name: "Nautilus Gateway",
docs: [
main: "readme",
api_reference: false,
formatters: ["html"],
extras: ["README.md", "LICENSE"],
nest_modules_by_prefix: [Nautilus.Core, Nautilus.Adapters, Nautilus.Ports]
]
]
end
def application do
[
extra_applications: [:logger],
mod: {Nautilus, []}
]
end
defp deps do
[
{:uuid, "~> 1.1"},
{:ranch, "~> 2.0"},
{:ex_doc, "~> 0.23.0", only: :dev, runtime: false},
]
end
end
| 21.232558 | 90 | 0.421687 |
7903f5b9270adcd3427975bb4cc1415fb1de2c73 | 769 | ex | Elixir | test/support/channel_case.ex | bmartin2015/ex_bin | efa6755271f28fb67db862ac849c4449a067e9a4 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | bmartin2015/ex_bin | efa6755271f28fb67db862ac849c4449a067e9a4 | [
"MIT"
] | 2 | 2021-03-09T15:45:09.000Z | 2021-05-10T09:11:46.000Z | test/support/channel_case.ex | bmartin2015/ex_bin | efa6755271f28fb67db862ac849c4449a067e9a4 | [
"MIT"
] | null | null | null | defmodule ExBinWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint ExBinWeb.Endpoint
end
end
setup _tags do
:ok
end
end
| 24.03125 | 59 | 0.725618 |
790404afb3e4b709509a0e514138319e08f075b3 | 1,328 | ex | Elixir | apps/welcome2_web/lib/welcome2_web/controllers/game_controller.ex | philihp/welcome2_umbrella | f5f494b98fe4b64a3e1bbfc1b8b432aad7f8c3b2 | [
"MIT"
] | null | null | null | apps/welcome2_web/lib/welcome2_web/controllers/game_controller.ex | philihp/welcome2_umbrella | f5f494b98fe4b64a3e1bbfc1b8b432aad7f8c3b2 | [
"MIT"
] | 13 | 2020-03-22T08:00:57.000Z | 2022-03-07T16:35:36.000Z | apps/welcome2_web/lib/welcome2_web/controllers/game_controller.ex | philihp/welcome2_umbrella | f5f494b98fe4b64a3e1bbfc1b8b432aad7f8c3b2 | [
"MIT"
] | null | null | null | defmodule Welcome2Web.GameController do
use Welcome2Web, :controller
def index(conn, _params) do
conn
|> render("index.html")
end
def new(conn, _params) do
game_conn = Welcome2Game.new_game()
game_view = Welcome2Game.make_move(game_conn, {})
moves = game_view[:moves]
conn
|> put_session(:game_conn, game_conn)
|> assign(:game, game_view)
|> assign(:moves, moves)
|> assign(:action, "")
|> render("new.html")
end
def advance(conn, params) do
action =
params["action"]
|> String.split()
|> to_move
game_conn = get_session(conn, :game_conn)
game_view = Welcome2Game.make_move(game_conn, action)
moves = game_view[:moves]
conn
|> assign(:game, game_view)
|> assign(:moves, moves)
|> assign(:action, action)
|> render("new.html")
end
defp to_move([]) do
:identity
end
defp to_move([command]) do
String.to_atom(command)
end
defp to_move([command | params]) do
List.to_tuple([String.to_atom(command) | Enum.map(params, &to_param/1)])
end
defp to_param(param) when param in ["a", "b", "c"] do
String.to_atom(param)
end
defp to_param(param) do
param |> Integer.parse() |> to_num
end
defp to_num(:error) do
999
end
defp to_num({digit, _}) do
digit
end
end
| 19.820896 | 76 | 0.628012 |
79040d32b225fa40e3c4cc56adeb504c894e20e1 | 719 | ex | Elixir | lib/data/stats/type.ex | the3hm/seraph_mud | c098a22184a0a6bcbd56cacf7b0f176dd173260d | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | lib/data/stats/type.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | lib/data/stats/type.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Data.Stats.Type do
@moduledoc """
Ecto type for a database column matching a stat type
"""
alias Data.Stats
@behaviour Ecto.Type
@impl Ecto.Type
def type, do: :string
@impl Ecto.Type
def cast(stat) do
fields = Stats.basic_fields() |> Enum.map(&to_string/1)
case stat in fields do
true ->
{:ok, String.to_atom(stat)}
false ->
:error
end
end
@impl Ecto.Type
def load(stat) do
{:ok, String.to_atom(stat)}
end
@impl Ecto.Type
def dump(stat) when is_atom(stat) do
{:ok, to_string(stat)}
end
def dump(_), do: :error
@impl true
def embed_as(_), do: :self
@impl true
def equal?(term1, term2), do: term1 == term2
end
| 16.340909 | 59 | 0.616134 |
79041de69e8232c4fd491c5f3581f8033daa3c98 | 955 | exs | Elixir | config/prod.secret.exs | JohnB/todays_pizza | 816c8f4f566558bbf8910ef6dd20e363bcddf8fd | [
"MIT"
] | null | null | null | config/prod.secret.exs | JohnB/todays_pizza | 816c8f4f566558bbf8910ef6dd20e363bcddf8fd | [
"MIT"
] | null | null | null | config/prod.secret.exs | JohnB/todays_pizza | 816c8f4f566558bbf8910ef6dd20e363bcddf8fd | [
"MIT"
] | null | null | null | # In this file, we load production configuration and secrets
# from environment variables. You can also hardcode secrets,
# although such is generally not recommended and you have to
# remember to add this file to your .gitignore.
use Mix.Config
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :todays_pizza, TodaysPizzaWeb.Endpoint,
http: [
port: String.to_integer(System.get_env("PORT") || "4000"),
transport_options: [socket_opts: [:inet6]]
],
secret_key_base: secret_key_base
# ## Using releases (Elixir v1.9+)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :todays_pizza, TodaysPizzaWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
| 31.833333 | 65 | 0.735079 |
79044681583448928f56c593188d611038befb93 | 670 | ex | Elixir | samples/client/petstore/elixir/lib/open_api_petstore/model/additional_properties_class.ex | kymbalon/openapi-generator | 8327a920408314aacb7d00f64285ae88e9195633 | [
"Apache-2.0"
] | 2 | 2019-04-17T19:01:32.000Z | 2019-04-17T19:05:56.000Z | samples/client/petstore/elixir/lib/open_api_petstore/model/additional_properties_class.ex | kymbalon/openapi-generator | 8327a920408314aacb7d00f64285ae88e9195633 | [
"Apache-2.0"
] | 7 | 2021-03-01T21:26:03.000Z | 2022-02-27T10:10:20.000Z | samples/client/petstore/elixir/lib/open_api_petstore/model/additional_properties_class.ex | kymbalon/openapi-generator | 8327a920408314aacb7d00f64285ae88e9195633 | [
"Apache-2.0"
] | 4 | 2019-04-08T17:06:09.000Z | 2020-06-09T18:16:08.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule OpenAPIPetstore.Model.AdditionalPropertiesClass do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"map_property",
:"map_of_map_property"
]
@type t :: %__MODULE__{
:"map_property" => %{optional(String.t) => String.t} | nil,
:"map_of_map_property" => %{optional(String.t) => %{optional(String.t) => String.t}} | nil
}
end
defimpl Poison.Decoder, for: OpenAPIPetstore.Model.AdditionalPropertiesClass do
def decode(value, _options) do
value
end
end
| 23.928571 | 94 | 0.692537 |
79046b678ebf83ab306dfc06026a40c0dff59d27 | 13,268 | ex | Elixir | lib/ash_csv/data_layer.ex | TheFirstAvenger/ash_csv | deeb9ad1ce50c28ba83247970a21cd28494f31ab | [
"MIT"
] | null | null | null | lib/ash_csv/data_layer.ex | TheFirstAvenger/ash_csv | deeb9ad1ce50c28ba83247970a21cd28494f31ab | [
"MIT"
] | null | null | null | lib/ash_csv/data_layer.ex | TheFirstAvenger/ash_csv | deeb9ad1ce50c28ba83247970a21cd28494f31ab | [
"MIT"
] | null | null | null | defmodule AshCsv.DataLayer do
@moduledoc "The data layer implementation for AshCsv"
@behaviour Ash.DataLayer
alias Ash.Actions.Sort
alias Ash.Dsl.Extension
alias Ash.Filter.{Expression, Not, Predicate}
alias Ash.Filter.Predicate.{Eq, GreaterThan, In, LessThan}
@impl true
def can?(_, :read), do: true
def can?(_, :create), do: true
def can?(_, :update), do: true
def can?(_, :destroy), do: true
def can?(_, :sort), do: true
def can?(_, :filter), do: true
def can?(_, :limit), do: true
def can?(_, :offset), do: true
def can?(_, :boolean_filter), do: true
def can?(_, :transact), do: true
def can?(_, :delete_with_query), do: false
def can?(_, {:filter_predicate, _, %In{}}), do: true
def can?(_, {:filter_predicate, _, %Eq{}}), do: true
def can?(_, {:filter_predicate, _, %LessThan{}}), do: true
def can?(_, {:filter_predicate, _, %GreaterThan{}}), do: true
def can?(_, {:sort, _}), do: true
def can?(_, _), do: false
@csv %Ash.Dsl.Section{
name: :csv,
schema: [
file: [
type: :string,
doc: "The file to read the data from",
required: true
],
create?: [
type: :boolean,
doc:
"Whether or not the file should be created if it does not exist (this will only happen on writes)",
default: false
],
header?: [
type: :boolean,
default: false,
doc: "If the csv file has a header that should be skipped"
],
separator: [
type: {:custom, __MODULE__, :separator_opt, []},
default: ?,,
doc: "The separator to use, defaults to a comma. Pass in a character (not a string)."
],
columns: [
type: {:custom, __MODULE__, :columns_opt, []},
default: [],
doc: "The order that the attributes appear in the columns of the CSV"
]
]
}
def file(resource) do
resource
|> Extension.get_opt([:csv], :file, "", true)
|> Path.expand(File.cwd!())
end
def columns(resource) do
Extension.get_opt(resource, [:csv], :columns, [], true)
end
def separator(resource) do
Extension.get_opt(resource, [:csv], :separator, nil, true)
end
def header?(resource) do
Extension.get_opt(resource, [:csv], :header?, nil, true)
end
def create?(resource) do
Extension.get_opt(resource, [:csv], :create?, nil, true)
end
@impl true
def limit(query, offset, _), do: {:ok, %{query | limit: offset}}
@impl true
def offset(query, offset, _), do: {:ok, %{query | offset: offset}}
@impl true
def filter(query, filter, _resource) do
{:ok, %{query | filter: filter}}
end
@impl true
def sort(query, sort, _resource) do
{:ok, %{query | sort: sort}}
end
@doc false
def columns_opt(columns) do
if Enum.all?(columns, &is_atom/1) do
{:ok, columns}
else
{:error, "Expected all columns to be atoms"}
end
end
use Extension, sections: [@csv]
defmodule Query do
@moduledoc false
defstruct [:resource, :sort, :filter, :limit, :offset]
end
@impl true
def run_query(query, resource) do
case read_file(resource) do
{:ok, results} ->
offset_records =
results
|> filter_matches(query.filter)
|> Sort.runtime_sort(query.sort)
|> Enum.drop(query.offset || 0)
if query.limit do
{:ok, Enum.take(offset_records, query.limit)}
else
{:ok, offset_records}
end
{:error, error} ->
{:error, error}
end
rescue
e in File.Error ->
if create?(resource) do
{:ok, []}
else
{:error, e}
end
end
@impl true
def create(resource, changeset) do
case run_query(%Query{resource: resource}, resource) do
{:ok, records} ->
create_from_records(records, resource, changeset)
{:error, error} ->
{:error, error}
end
end
@impl true
def update(resource, changeset) do
resource
|> do_read_file()
|> do_update(resource, changeset)
end
@impl true
def destroy(%resource{} = record) do
resource
|> do_read_file()
|> do_destroy(resource, record)
end
defp cast_stored(resource, keys) do
Enum.reduce_while(keys, {:ok, resource.__struct__}, fn {key, value}, {:ok, record} ->
with attribute when not is_nil(attribute) <- Ash.Resource.attribute(resource, key),
{:ok, loaded} <- Ash.Type.cast_stored(attribute.type, value) do
{:cont, {:ok, struct(record, [{key, loaded}])}}
else
nil ->
{:halt, {:error, "#{key} is not an attribute"}}
:error ->
{:halt, {:error, "#{key} could not be loaded"}}
end
end)
end
@impl true
def resource_to_query(resource) do
%Query{resource: resource}
end
@impl true
def transaction(resource, fun) do
file = file(resource)
:global.trans({{:csv, file}, System.unique_integer()}, fn ->
try do
Process.put({:csv_in_transaction, file(resource)}, true)
{:res, fun.()}
catch
{{:csv_rollback, ^file}, value} ->
{:error, value}
end
end)
|> case do
{:res, result} -> {:ok, result}
{:error, error} -> {:error, error}
:aborted -> {:error, "transaction failed"}
end
end
@impl true
def rollback(resource, error) do
throw({{:csv_rollback, file(resource)}, error})
end
@impl true
def in_transaction?(resource) do
Process.get({:csv_in_transaction, file(resource)}, false) == true
end
def filter_matches(records, nil), do: records
def filter_matches(records, filter) do
Enum.filter(records, &matches_filter?(&1, filter.expression))
end
defp matches_filter?(_record, nil), do: true
defp matches_filter?(_record, boolean) when is_boolean(boolean), do: boolean
defp matches_filter?(
record,
%Predicate{
predicate: predicate,
attribute: %{name: name},
relationship_path: []
}
) do
matches_predicate?(record, name, predicate)
end
defp matches_filter?(record, %Expression{op: :and, left: left, right: right}) do
matches_filter?(record, left) && matches_filter?(record, right)
end
defp matches_filter?(record, %Expression{op: :or, left: left, right: right}) do
matches_filter?(record, left) || matches_filter?(record, right)
end
defp matches_filter?(record, %Not{expression: expression}) do
not matches_filter?(record, expression)
end
defp matches_predicate?(record, field, %Eq{value: predicate_value}) do
Map.fetch(record, field) == {:ok, predicate_value}
end
defp matches_predicate?(record, field, %LessThan{value: predicate_value}) do
case Map.fetch(record, field) do
{:ok, value} -> value < predicate_value
:error -> false
end
end
defp matches_predicate?(record, field, %GreaterThan{value: predicate_value}) do
case Map.fetch(record, field) do
{:ok, value} -> value > predicate_value
:error -> false
end
end
defp matches_predicate?(record, field, %In{values: predicate_values}) do
case Map.fetch(record, field) do
{:ok, value} -> value in predicate_values
:error -> false
end
end
# sobelow_skip ["Traversal.FileModule"]
defp do_destroy({:ok, results}, resource, record) do
columns = columns(resource)
pkey = Ash.Resource.primary_key(resource)
changeset_pkey = Map.take(record, pkey)
results
|> Enum.reduce_while({:ok, []}, fn result, {:ok, results} ->
key_vals =
columns
|> Enum.zip(result)
|> Enum.reject(fn {key, _value} ->
key == :_
end)
cast(resource, key_vals, pkey, changeset_pkey, result, results)
end)
|> case do
{:ok, rows} ->
lines =
rows
|> CSV.encode(separator: separator(resource))
|> Enum.to_list()
resource
|> file()
|> File.write(lines, [:write])
|> case do
:ok ->
:ok
{:error, error} ->
{:error, "Error while writing to CSV: #{inspect(error)}"}
end
end
end
defp do_destroy({:error, error}, _, _), do: {:error, error}
defp cast(resource, key_vals, pkey, changeset_pkey, result, results) do
case cast_stored(resource, key_vals) do
{:ok, casted} ->
if Map.take(casted, pkey) == changeset_pkey do
{:cont, {:ok, results}}
else
{:cont, {:ok, [result | results]}}
end
{:error, error} ->
{:halt, {:error, error}}
end
end
defp do_update({:error, error}, _, _) do
{:error, error}
end
# sobelow_skip ["Traversal.FileModule"]
defp do_update({:ok, results}, resource, changeset) do
columns = columns(resource)
pkey = Ash.Resource.primary_key(resource)
changeset_pkey =
Enum.into(pkey, %{}, fn key ->
{key, Ash.Changeset.get_attribute(changeset, key)}
end)
results
|> Enum.reduce_while({:ok, []}, fn result, {:ok, results} ->
key_vals =
columns
|> Enum.zip(result)
|> Enum.reject(fn {key, _value} ->
key == :_
end)
dump(resource, changeset, results, result, key_vals, pkey, changeset_pkey)
end)
|> case do
{:ok, rows} ->
lines =
rows
|> CSV.encode(separator: separator(resource))
|> Enum.to_list()
resource
|> file()
|> File.write(lines, [:write])
|> case do
:ok ->
{:ok, struct(changeset.data, changeset.attributes)}
{:error, error} ->
{:error, "Error while writing to CSV: #{inspect(error)}"}
end
end
end
defp dump(resource, changeset, results, result, key_vals, pkey, changeset_pkey) do
case cast_stored(resource, key_vals) do
{:ok, casted} ->
if Map.take(casted, pkey) == changeset_pkey do
dump_row(resource, changeset, results)
else
{:cont, {:ok, [result | results]}}
end
{:error, error} ->
{:halt, {:error, error}}
end
end
defp dump_row(resource, changeset, results) do
Enum.reduce_while(columns(resource), {:ok, []}, fn key, {:ok, row} ->
type = Ash.Resource.attribute(resource, key).type
value = Ash.Changeset.get_attribute(changeset, key)
case Ash.Type.dump_to_native(type, value) do
{:ok, value} ->
{:cont, {:ok, [to_string(value) | row]}}
:error ->
{:halt, {:error, "Could not dump #{key} to native type"}}
end
end)
|> case do
{:ok, new_row} ->
{:cont, {:ok, [new_row | results]}}
{:error, error} ->
{:halt, {:error, error}}
end
end
defp read_file(resource) do
columns = columns(resource)
resource
|> do_read_file()
|> case do
{:ok, results} ->
do_cast_stored(results, columns, resource)
{:error, error} ->
{:error, error}
end
end
defp do_cast_stored(results, columns, resource) do
results
|> Enum.reduce_while({:ok, []}, fn result, {:ok, results} ->
key_vals =
columns
|> Enum.zip(result)
|> Enum.reject(fn {key, _value} ->
key == :_
end)
case cast_stored(resource, key_vals) do
{:ok, casted} -> {:cont, {:ok, [casted | results]}}
{:error, error} -> {:halt, {:error, error}}
end
end)
end
defp do_read_file(resource) do
amount_to_drop =
if header?(resource) do
1
else
0
end
resource
|> file()
|> File.stream!()
|> Stream.drop(amount_to_drop)
|> CSV.decode(separator: separator(resource))
|> Enum.reduce_while({:ok, []}, fn
{:ok, result}, {:ok, results} ->
{:cont, {:ok, [result | results]}}
{:error, error}, _ ->
{:halt, {:error, error}}
end)
end
# sobelow_skip ["Traversal.FileModule"]
defp create_from_records(records, resource, changeset) do
pkey = Ash.Resource.primary_key(resource)
pkey_value = Map.take(changeset.attributes, pkey)
if Enum.any?(records, fn record -> Map.take(record, pkey) == pkey_value end) do
{:error, "Record is not unique"}
else
row =
Enum.reduce_while(columns(resource), {:ok, []}, fn key, {:ok, row} ->
type = Ash.Resource.attribute(resource, key).type
value = Map.get(changeset.attributes, key)
case Ash.Type.dump_to_native(type, value) do
{:ok, value} ->
{:cont, {:ok, [to_string(value) | row]}}
:error ->
{:halt, {:error, "Could not dump #{key} to native type"}}
end
end)
case row do
{:ok, row} ->
lines =
[Enum.reverse(row)]
|> CSV.encode(separator: separator(resource))
|> Enum.to_list()
resource
|> file()
|> File.write(lines, [:append])
|> case do
:ok ->
{:ok, struct(resource, changeset.attributes)}
{:error, error} ->
{:error, "Error while writing to CSV: #{inspect(error)}"}
end
{:error, error} ->
{:error, error}
end
end
end
end
| 25.964775 | 109 | 0.57243 |
79048316deb5fd3c7603138ed1c143d9598820b8 | 19,206 | ex | Elixir | lib/phoenix_live_view/html_engine.ex | khionu/phoenix_live_view | 4ada690fb1579eebf787bd7459233086b383bd7b | [
"MIT"
] | null | null | null | lib/phoenix_live_view/html_engine.ex | khionu/phoenix_live_view | 4ada690fb1579eebf787bd7459233086b383bd7b | [
"MIT"
] | null | null | null | lib/phoenix_live_view/html_engine.ex | khionu/phoenix_live_view | 4ada690fb1579eebf787bd7459233086b383bd7b | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveView.HTMLEngine do
@moduledoc """
The HTMLEngine that powers `.heex` templates and the `~H` sigil.
"""
# TODO: Use @impl true instead of @doc false when we require Elixir v1.12
alias Phoenix.LiveView.HTMLTokenizer
alias Phoenix.LiveView.HTMLTokenizer.ParseError
@behaviour Phoenix.Template.Engine
@doc false
def compile(path, _name) do
trim = Application.get_env(:phoenix, :trim_on_html_eex_engine, true)
EEx.compile_file(path, engine: __MODULE__, line: 1, trim: trim)
end
@behaviour EEx.Engine
@doc false
def init(opts) do
{subengine, opts} = Keyword.pop(opts, :subengine, Phoenix.LiveView.Engine)
{module, opts} = Keyword.pop(opts, :module)
unless subengine do
raise ArgumentError, ":subengine is missing for HTMLEngine"
end
%{
tokens: [],
subengine: subengine,
substate: subengine.init([]),
module: module,
file: Keyword.get(opts, :file, "nofile"),
indentation: Keyword.get(opts, :indentation, 0)
}
end
## These callbacks return AST
@doc false
def handle_body(state) do
tokens =
state.tokens
|> strip_text_space()
|> Enum.reverse()
|> strip_text_space()
token_state =
state
|> token_state()
|> handle_tokens(tokens)
validate_unclosed_tags!(token_state)
opts = [root: token_state.root || false]
ast = invoke_subengine(token_state, :handle_body, [opts])
# Do not require if calling module is helpers. Fix for elixir < 1.12
# TODO remove after Elixir >= 1.12 support
if state.module === Phoenix.LiveView.Helpers do
ast
else
quote do
require Phoenix.LiveView.Helpers
unquote(ast)
end
end
end
defp validate_unclosed_tags!(%{tags: []} = state) do
state
end
defp validate_unclosed_tags!(%{tags: [tag | _]} = state) do
{:tag_open, name, _attrs, %{line: line, column: column}} = tag
file = state.file
message = "end of file reached without closing tag for <#{name}>"
raise ParseError, line: line, column: column, file: file, description: message
end
@doc false
def handle_end(state) do
state
|> token_state()
|> update_subengine(:handle_begin, [])
|> handle_tokens(Enum.reverse(state.tokens))
|> invoke_subengine(:handle_end, [])
end
defp token_state(%{subengine: subengine, substate: substate, file: file}) do
%{
subengine: subengine,
substate: substate,
file: file,
stack: [],
tags: [],
root: nil
}
end
defp handle_tokens(token_state, tokens) do
Enum.reduce(tokens, token_state, &handle_token/2)
end
## These callbacks update the state
@doc false
def handle_begin(state) do
%{state | tokens: []}
end
@doc false
def handle_text(state, text) do
handle_text(state, [], text)
end
def handle_text(%{file: file, indentation: indentation, tokens: tokens} = state, meta, text) do
tokens = HTMLTokenizer.tokenize(text, file, indentation, meta, tokens)
%{state | tokens: tokens}
end
@doc false
def handle_expr(%{tokens: tokens} = state, marker, expr) do
%{state | tokens: [{:expr, marker, expr} | tokens]}
end
## Helpers
defp push_substate_to_stack(%{substate: substate, stack: stack} = state) do
%{state | stack: [{:substate, substate} | stack]}
end
defp pop_substate_from_stack(%{stack: [{:substate, substate} | stack]} = state) do
%{state | stack: stack, substate: substate}
end
defp invoke_subengine(%{subengine: subengine, substate: substate}, :handle_text, args) do
# TODO: Remove this once we require Elixir v1.12
if function_exported?(subengine, :handle_text, 3) do
apply(subengine, :handle_text, [substate | args])
else
apply(subengine, :handle_text, [substate | tl(args)])
end
end
defp invoke_subengine(%{subengine: subengine, substate: substate}, fun, args) do
apply(subengine, fun, [substate | args])
end
defp update_subengine(state, fun, args) do
%{state | substate: invoke_subengine(state, fun, args)}
end
defp push_tag(state, token) do
# If we have a void tag, we don't actually push it into the stack.
with {:tag_open, name, _attrs, _meta} <- token,
true <- void?(name) do
state
else
_ -> %{state | tags: [token | state.tags]}
end
end
defp pop_tag!(
%{tags: [{:tag_open, tag_name, _attrs, _meta} = tag | tags]} = state,
{:tag_close, tag_name, _}
) do
{tag, %{state | tags: tags}}
end
defp pop_tag!(
%{tags: [{:tag_open, tag_open_name, _attrs, tag_open_meta} | _]} = state,
{:tag_close, tag_close_name, tag_close_meta}
) do
%{line: line, column: column} = tag_close_meta
file = state.file
message = """
unmatched closing tag. Expected </#{tag_open_name}> for <#{tag_open_name}> \
at line #{tag_open_meta.line}, got: </#{tag_close_name}>\
"""
raise ParseError, line: line, column: column, file: file, description: message
end
defp pop_tag!(state, {:tag_close, tag_name, tag_meta}) do
%{line: line, column: column} = tag_meta
file = state.file
message = "missing opening tag for </#{tag_name}>"
raise ParseError, line: line, column: column, file: file, description: message
end
## handle_token
# Expr
defp handle_token({:expr, marker, expr}, state) do
state
|> set_root_on_not_tag()
|> update_subengine(:handle_expr, [marker, expr])
end
# Text
defp handle_token({:text, text, %{line_end: line, column_end: column}}, state) do
state
|> set_root_on_not_tag()
|> update_subengine(:handle_text, [[line: line, column: column], text])
end
# Remote function component (self close)
defp handle_token(
{:tag_open, <<first, _::binary>> = tag_name, attrs, %{self_close: true}} = tag_meta,
state
)
when first in ?A..?Z do
file = state.file
{mod, fun} = decompose_remote_component_tag!(tag_name, tag_meta, file)
{let, assigns} = handle_component_attrs(attrs, file)
raise_if_let!(let, file)
ast =
quote do
Phoenix.LiveView.Helpers.component(&(unquote(mod).unquote(fun) / 1), unquote(assigns))
end
state
|> set_root_on_not_tag()
|> update_subengine(:handle_expr, ["=", ast])
end
# Remote function component (with inner content)
defp handle_token({:tag_open, <<first, _::binary>> = tag_name, attrs, tag_meta}, state)
when first in ?A..?Z do
mod_fun = decompose_remote_component_tag!(tag_name, tag_meta, state.file)
token = {:tag_open, tag_name, attrs, Map.put(tag_meta, :mod_fun, mod_fun)}
state
|> set_root_on_not_tag()
|> push_tag(token)
|> push_substate_to_stack()
|> update_subengine(:handle_begin, [])
end
defp handle_token({:tag_close, <<first, _::binary>>, _tag_close_meta} = token, state)
when first in ?A..?Z do
{{:tag_open, _name, attrs, %{mod_fun: {mod, fun}, line: line}}, state} =
pop_tag!(state, token)
{let, assigns} = handle_component_attrs(attrs, state.file)
clauses = build_component_clauses(let, state)
ast =
quote line: line do
Phoenix.LiveView.Helpers.component(
&(unquote(mod).unquote(fun) / 1),
unquote(assigns),
do: unquote(clauses)
)
end
state
|> pop_substate_from_stack()
|> update_subengine(:handle_expr, ["=", ast])
end
# Local function component (self close)
defp handle_token(
{:tag_open, "." <> name, attrs, %{self_close: true, line: line}},
state
) do
fun = String.to_atom(name)
file = state.file
{let, assigns} = handle_component_attrs(attrs, file)
raise_if_let!(let, file)
ast =
quote line: line do
Phoenix.LiveView.Helpers.component(
&(unquote(Macro.var(fun, __MODULE__)) / 1),
unquote(assigns)
)
end
state
|> set_root_on_not_tag()
|> update_subengine(:handle_expr, ["=", ast])
end
# Local function component (with inner content)
defp handle_token({:tag_open, "." <> _, _attrs, _tag_meta} = token, state) do
state
|> set_root_on_not_tag()
|> push_tag(token)
|> push_substate_to_stack()
|> update_subengine(:handle_begin, [])
end
defp handle_token({:tag_close, "." <> fun_name, _tag_close_meta} = token, state) do
{{:tag_open, _name, attrs, %{line: line}}, state} = pop_tag!(state, token)
fun = String.to_atom(fun_name)
{let, assigns} = handle_component_attrs(attrs, state.file)
clauses = build_component_clauses(let, state)
ast =
quote line: line do
Phoenix.LiveView.Helpers.component(
&(unquote(Macro.var(fun, __MODULE__)) / 1),
unquote(assigns),
do: unquote(clauses)
)
end
state
|> pop_substate_from_stack()
|> update_subengine(:handle_expr, ["=", ast])
end
# HTML element (self close)
defp handle_token({:tag_open, name, attrs, %{self_close: true} = tag_meta}, state) do
suffix = if void?(name), do: ">", else: "></#{name}>"
state
|> set_root_on_tag()
|> handle_tag_and_attrs(name, attrs, suffix, to_location(tag_meta))
end
# HTML element
defp handle_token({:tag_open, name, attrs, tag_meta} = token, state) do
state
|> set_root_on_tag()
|> push_tag(token)
|> handle_tag_and_attrs(name, attrs, ">", to_location(tag_meta))
end
defp handle_token({:tag_close, name, tag_meta} = token, state) do
{{:tag_open, _name, _attrs, _tag_meta}, state} = pop_tag!(state, token)
update_subengine(state, :handle_text, [to_location(tag_meta), "</#{name}>"])
end
# Root tracking
defp strip_text_space(tokens) do
with [{:text, text, _} | rest] <- tokens,
"" <- String.trim_leading(text) do
strip_text_space(rest)
else
_ -> tokens
end
end
defp set_root_on_not_tag(%{root: root, tags: tags} = state) do
if tags == [] and root != false do
%{state | root: false}
else
state
end
end
defp set_root_on_tag(state) do
case state do
%{root: nil, tags: []} -> %{state | root: true}
%{root: true, tags: []} -> %{state | root: false}
%{root: bool} when is_boolean(bool) -> state
end
end
## handle_tag_and_attrs
defp handle_tag_and_attrs(state, name, attrs, suffix, meta) do
state
|> update_subengine(:handle_text, [meta, "<#{name}"])
|> handle_tag_attrs(meta, attrs)
|> update_subengine(:handle_text, [meta, suffix])
end
defp handle_tag_attrs(state, meta, attrs) do
Enum.reduce(attrs, state, fn
{:root, {:expr, value, %{line: line, column: col}}}, state ->
attrs = Code.string_to_quoted!(value, line: line, column: col)
handle_attrs_escape(state, meta, attrs)
{name, {:expr, value, %{line: line, column: col}}}, state ->
attr = Code.string_to_quoted!(value, line: line, column: col)
handle_attr_escape(state, meta, name, attr)
{name, {:string, value, %{delimiter: ?"}}}, state ->
update_subengine(state, :handle_text, [meta, ~s( #{name}="#{value}")])
{name, {:string, value, %{delimiter: ?'}}}, state ->
update_subengine(state, :handle_text, [meta, ~s( #{name}='#{value}')])
{name, nil}, state ->
update_subengine(state, :handle_text, [meta, " #{name}"])
end)
end
defp handle_attrs_escape(state, meta, attrs) do
ast =
quote line: meta[:line] do
Phoenix.HTML.Tag.attributes_escape(unquote(attrs))
end
update_subengine(state, :handle_expr, ["=", ast])
end
defp handle_attr_escape(state, meta, name, value) do
case extract_binaries(value, true, []) do
:error ->
if fun = empty_attribute_encoder(name) do
ast =
quote line: meta[:line] do
{:safe, unquote(__MODULE__).unquote(fun)(unquote(value))}
end
state
|> update_subengine(:handle_text, [meta, ~s( #{name}=")])
|> update_subengine(:handle_expr, ["=", ast])
|> update_subengine(:handle_text, [meta, ~s(")])
else
handle_attrs_escape(state, meta, [{safe_unless_special(name), value}])
end
binaries ->
state
|> update_subengine(:handle_text, [meta, ~s( #{name}=")])
|> handle_binaries(meta, binaries)
|> update_subengine(:handle_text, [meta, ~s(")])
end
end
defp handle_binaries(state, meta, binaries) do
binaries
|> Enum.reverse()
|> Enum.reduce(state, fn
{:text, value}, state ->
update_subengine(state, :handle_text, [meta, binary_encode(value)])
{:binary, value}, state ->
ast =
quote line: meta[:line] do
{:safe, unquote(__MODULE__).binary_encode(unquote(value))}
end
update_subengine(state, :handle_expr, ["=", ast])
end)
end
defp extract_binaries({:<>, _, [left, right]}, _root?, acc) do
extract_binaries(right, false, extract_binaries(left, false, acc))
end
defp extract_binaries({:<<>>, _, parts} = bin, _root?, acc) do
Enum.reduce(parts, acc, fn
part, acc when is_binary(part) ->
[{:text, part} | acc]
{:"::", _, [binary, {:binary, _, _}]}, acc ->
[{:binary, binary} | acc]
_, _ ->
throw(:unknown_part)
end)
catch
:unknown_part -> [{:binary, bin} | acc]
end
defp extract_binaries(binary, _root?, acc) when is_binary(binary), do: [{:text, binary} | acc]
defp extract_binaries(value, false, acc), do: [{:binary, value} | acc]
defp extract_binaries(_value, true, _acc), do: :error
defp empty_attribute_encoder("class"), do: :class_attribute_encode
defp empty_attribute_encoder("style"), do: :empty_attribute_encoder
defp empty_attribute_encoder(_), do: nil
@doc false
def class_attribute_encode([_ | _] = list),
do: list |> Enum.filter(& &1) |> Enum.join(" ") |> Phoenix.HTML.Engine.encode_to_iodata!()
def class_attribute_encode(other),
do: empty_attribute_encode(other)
@doc false
def empty_attribute_encode(nil), do: ""
def empty_attribute_encode(false), do: ""
def empty_attribute_encode(true), do: ""
def empty_attribute_encode(value), do: Phoenix.HTML.Engine.encode_to_iodata!(value)
@doc false
def binary_encode(value) when is_binary(value) do
value
|> Phoenix.HTML.Engine.encode_to_iodata!()
|> IO.iodata_to_binary()
end
def binary_encode(value) do
raise ArgumentError, "expected a binary in <>, got: #{inspect(value)}"
end
defp safe_unless_special("aria"), do: "aria"
defp safe_unless_special("class"), do: "class"
defp safe_unless_special("data"), do: "data"
defp safe_unless_special(name), do: {:safe, name}
## handle_component_attrs
defp handle_component_attrs(attrs, file) do
{lets, entries} =
case build_component_attrs(attrs) do
{lets, [], []} -> {lets, [{:%{}, [], []}]}
{lets, r, []} -> {lets, r}
{lets, r, d} -> {lets, r ++ [{:%{}, [], d}]}
end
let =
case lets do
[] ->
nil
[let] ->
let
[{_, meta}, {_, previous_meta} | _] ->
message = """
cannot define multiple `let` attributes. \
Another `let` has already been defined at line #{previous_meta.line}\
"""
raise ParseError,
line: meta.line,
column: meta.column,
file: file,
description: message
end
assigns =
Enum.reduce(entries, fn expr, acc ->
quote do: Map.merge(unquote(acc), unquote(expr))
end)
{let, assigns}
end
defp build_component_attrs(attrs) do
build_component_attrs(attrs, {[], [], []})
end
defp build_component_attrs([], {lets, r, d}) do
{lets, Enum.reverse(r), Enum.reverse(d)}
end
defp build_component_attrs(
[{:root, {:expr, value, %{line: line, column: col}}} | attrs],
{lets, r, d}
) do
quoted_value = Code.string_to_quoted!(value, line: line, column: col)
quoted_value = quote do: Map.new(unquote(quoted_value))
build_component_attrs(attrs, {lets, [quoted_value | r], d})
end
defp build_component_attrs(
[{"let", {:expr, value, %{line: line, column: col} = meta}} | attrs],
{lets, r, d}
) do
quoted_value = Code.string_to_quoted!(value, line: line, column: col)
build_component_attrs(attrs, {[{quoted_value, meta} | lets], r, d})
end
defp build_component_attrs(
[{name, {:expr, value, %{line: line, column: col}}} | attrs],
{lets, r, d}
) do
quoted_value = Code.string_to_quoted!(value, line: line, column: col)
build_component_attrs(attrs, {lets, r, [{String.to_atom(name), quoted_value} | d]})
end
defp build_component_attrs([{name, {:string, value, _}} | attrs], {lets, r, d}) do
build_component_attrs(attrs, {lets, r, [{String.to_atom(name), value} | d]})
end
defp build_component_attrs([{name, nil} | attrs], {lets, r, d}) do
build_component_attrs(attrs, {lets, r, [{String.to_atom(name), true} | d]})
end
defp decompose_remote_component_tag!(tag_name, tag_meta, file) do
case String.split(tag_name, ".") |> Enum.reverse() do
[<<first, _::binary>> = fun_name | rest] when first in ?a..?z ->
aliases = rest |> Enum.reverse() |> Enum.map(&String.to_atom/1)
fun = String.to_atom(fun_name)
{{:__aliases__, [], aliases}, fun}
_ ->
%{line: line, column: column} = tag_meta
message = "invalid tag <#{tag_name}>"
raise ParseError, line: line, column: column, file: file, description: message
end
end
@doc false
def __unmatched_let__!(pattern, value) do
message = """
cannot match arguments sent from `render_block/2` against the pattern in `let`.
Expected a value matching `#{pattern}`, got: `#{inspect(value)}`.
"""
stacktrace =
self()
|> Process.info(:current_stacktrace)
|> elem(1)
|> Enum.drop(2)
reraise(message, stacktrace)
end
defp raise_if_let!(let, file) do
with {_pattern, %{line: line}} <- let do
message = "cannot use `let` on a component without inner content"
raise CompileError, line: line, file: file, description: message
end
end
defp build_component_clauses(let, state) do
case let do
{pattern, %{line: line}} ->
quote line: line do
unquote(pattern) ->
unquote(invoke_subengine(state, :handle_end, []))
end ++
quote line: line, generated: true do
other ->
Phoenix.LiveView.HTMLEngine.__unmatched_let__!(
unquote(Macro.to_string(pattern)),
other
)
end
_ ->
quote do
_ -> unquote(invoke_subengine(state, :handle_end, []))
end
end
end
## Helpers
for void <- ~w(area base br col hr img input link meta param command keygen source) do
defp void?(unquote(void)), do: true
end
defp void?(_), do: false
defp to_location(%{line: line, column: column}), do: [line: line, column: column]
end
| 28.924699 | 97 | 0.618088 |
790492e794a7960c950987f6e71687a541e8737e | 1,236 | exs | Elixir | mix.exs | feng19/ex_aliyun_mns | 09551d693bb3ce39b01a99c0698e245511d40218 | [
"MIT"
] | null | null | null | mix.exs | feng19/ex_aliyun_mns | 09551d693bb3ce39b01a99c0698e245511d40218 | [
"MIT"
] | null | null | null | mix.exs | feng19/ex_aliyun_mns | 09551d693bb3ce39b01a99c0698e245511d40218 | [
"MIT"
] | null | null | null | defmodule ExAliyun.MNS.MixProject do
use Mix.Project
def project do
[
app: :ex_aliyun_mns,
name: "ExAliyun.MNS",
version: "1.2.0",
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
deps: deps(),
docs: docs(),
description: "Alibaba Cloud Message Notification Service (MNS) SDK",
package: package()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger, :eex],
mod: {ExAliyun.MNS.Application, []}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:tesla, "~> 1.4"},
{:finch, "~> 0.5"},
{:timex, "~> 3.6"},
{:sax_map, "~> 1.0"},
{:msgpax, "~> 2.2", only: :test},
{:jason, "~> 1.2", only: :test},
{:ex_doc, "~> 0.21", only: :dev, runtime: false}
]
end
defp package() do
[
files: ["lib", "mix.exs", "README.md", "LICENSE.md"],
maintainers: ["Xin Zou"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/xinz/ex_aliyun_mns"}
]
end
defp docs do
[
main: "readme",
formatter_opts: [gfm: true],
extras: ["README.md"]
]
end
end
| 22.071429 | 74 | 0.527508 |
7904978441350cfd2af9c819a814fbb60e42fd3e | 1,244 | ex | Elixir | lib/credo/cli/command/info/info_output.ex | jlgeering/credo | b952190ed758c262aa0d9bbee01227f9b1f0c63b | [
"MIT"
] | null | null | null | lib/credo/cli/command/info/info_output.ex | jlgeering/credo | b952190ed758c262aa0d9bbee01227f9b1f0c63b | [
"MIT"
] | null | null | null | lib/credo/cli/command/info/info_output.ex | jlgeering/credo | b952190ed758c262aa0d9bbee01227f9b1f0c63b | [
"MIT"
] | null | null | null | defmodule Credo.CLI.Command.Info.InfoOutput do
use Credo.CLI.Output.FormatDelegator,
default: Credo.CLI.Command.Info.Output.Default,
json: Credo.CLI.Command.Info.Output.Json
alias Credo.CLI.Output.UI
def print(exec, info) do
format_mod = format_mod(exec)
format_mod.print(exec, info)
end
def print_help(exec) do
usage = ["Usage: ", :olive, "mix credo info [options]"]
description = """
Shows information about Credo and its environment.
"""
example = [
"Example: ",
:olive,
:faint,
"$ mix credo info --format=json --verbose"
]
options = """
Info options:
-c, --checks Only include checks that match the given strings
-C, --config-name Use the given config instead of "default"
-i, --ignore-checks Ignore checks that match the given strings
--format Display the list in a specific format (oneline,flycheck)
--verbose Display more information (e.g. checked files)
General options:
-v, --version Show version
-h, --help Show this help
"""
UI.puts(usage)
UI.puts(description)
UI.puts(example)
UI.puts(options)
exec
end
end
| 24.392157 | 84 | 0.611736 |
7904eb2953f17732b5bfd34008db6abcecffd688 | 1,833 | ex | Elixir | homesynck/lib/homesynck_web/telemetry.ex | PJS4-projet-gestionnaire-de-contact/Gambett-Server | fa30a85ffa6da480e177d40364dd59f73f0ad892 | [
"MIT"
] | 5 | 2021-03-28T21:20:41.000Z | 2021-07-02T22:46:29.000Z | homesynck/lib/homesynck_web/telemetry.ex | PJS4-projet-gestionnaire-de-contact/Gambett-Server | fa30a85ffa6da480e177d40364dd59f73f0ad892 | [
"MIT"
] | null | null | null | homesynck/lib/homesynck_web/telemetry.ex | PJS4-projet-gestionnaire-de-contact/Gambett-Server | fa30a85ffa6da480e177d40364dd59f73f0ad892 | [
"MIT"
] | null | null | null | defmodule HomesynckWeb.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# Database Metrics
summary("homesynck.repo.query.total_time", unit: {:native, :millisecond}),
summary("homesynck.repo.query.decode_time", unit: {:native, :millisecond}),
summary("homesynck.repo.query.query_time", unit: {:native, :millisecond}),
summary("homesynck.repo.query.queue_time", unit: {:native, :millisecond}),
summary("homesynck.repo.query.idle_time", unit: {:native, :millisecond}),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {HomesynckWeb, :count_users, []}
]
end
end
| 32.732143 | 86 | 0.674304 |
7904f231f2a72bef288260225321b2677038d1b9 | 1,353 | ex | Elixir | lib/example_template.ex | flixbi/element-parsers | b92ef1cff139130acbac4f40d0d48568a3de6590 | [
"MIT"
] | 1 | 2021-11-10T18:06:59.000Z | 2021-11-10T18:06:59.000Z | lib/example_template.ex | SeppPenner/element-parsers | 8a2594e0f15ca7177f6782d0441f25e3e55b8416 | [
"MIT"
] | null | null | null | lib/example_template.ex | SeppPenner/element-parsers | 8a2594e0f15ca7177f6782d0441f25e3e55b8416 | [
"MIT"
] | null | null | null | defmodule Parser do
use Platform.Parsing.Behaviour
require Logger
#
# Parser for device EXAMPLE that will provide EXAMPLE data.
#
# Changelog:
# 2019-05-09 [jb]: Initial implementation according to "Example-Payload-v1.pdf"
#
def parse(<<v1, v2, v3>>, %{meta: %{frame_port: 1}}) do
%{
type: :boot,
version: "#{v1}.#{v2}.#{v3}",
}
end
def parse(<<distance::32>>, %{meta: %{frame_port: 2}}) do
%{
type: :measurement,
distance: distance,
}
end
def parse(payload, meta) do
Logger.warn("Could not parse payload #{inspect payload} with frame_port #{inspect get_in(meta, [:meta, :frame_port])}")
[]
end
# Define fields with human readable name and a SI unit if available.
def fields() do
[
# The first field should be a numeric value, so it can be used for graphs.
%{
field: "distance",
display: "Distanz",
unit: "cm",
},
%{
field: "type",
display: "Typ",
},
]
end
def tests() do
[
# Test format:
# {:parse_hex, received_payload_as_hex, meta_map, expected_result},
{:parse_hex, "010203", %{meta: %{frame_port: 1}}, %{type: :boot, version: "1.2.3"}},
{:parse_hex, "12345678", %{meta: %{frame_port: 2}}, %{type: :measurement, distance: 305419896}},
]
end
end
| 24.160714 | 123 | 0.577236 |
790553294369f2a6e042051b023ee0bff38dfc71 | 6,844 | exs | Elixir | test/gears_test.exs | machine-manager/gears | e9e48ff76e96c0671e753f842502888a518cb61a | [
"MIT"
] | null | null | null | test/gears_test.exs | machine-manager/gears | e9e48ff76e96c0671e753f842502888a518cb61a | [
"MIT"
] | null | null | null | test/gears_test.exs | machine-manager/gears | e9e48ff76e96c0671e753f842502888a518cb61a | [
"MIT"
] | null | null | null | alias Gears.{LangUtil, FileUtil, StringUtil, TableFormatter}
defmodule Gears.LangUtilTest do
use ExUnit.Case
test "oper_if works on binaries" do
import LangUtil, only: [oper_if: 3]
s = "hello"
out = {s, &Kernel.<>/2}
|> oper_if(true, " ")
|> oper_if(false, "mars")
|> oper_if(true, "world")
|> elem(0)
assert out == "hello world"
end
test "oper_if works on lists" do
import LangUtil, only: [oper_if: 3]
s = ["hello"]
out = {s, &Kernel.++/2}
|> oper_if(true, [" "])
|> oper_if(false, ["mars"])
|> oper_if(true, ["world"])
|> elem(0)
assert out == ["hello", " ", "world"]
end
test "oper_if doesn't evaluate expression unless condition is truthy" do
import LangUtil, only: [oper_if: 3]
s = "hello"
out = {s, &Kernel.<>/2}
|> oper_if(false, "#{:foo + 1}")
|> oper_if(nil, "#{:foo + 2}")
|> elem(0)
assert out == "hello"
end
test "ok_or_raise works" do
import LangUtil
assert :ok == ok_or_raise(:ok)
assert_raise ArithmeticError, fn ->
ok_or_raise({:error, ArithmeticError})
end
end
end
defmodule Gears.FileUtilTest do
use ExUnit.Case
test "temp_path returns a string" do
assert is_binary(FileUtil.temp_path("prefix", "jpg"))
end
test "temporary path starts with System.tmp_dir/prefix" do
prefix = "prefix"
path = FileUtil.temp_path(prefix, "jpg")
assert String.starts_with?(path, Path.join(System.tmp_dir, prefix))
end
test "if extension provided, temporary path ends with '.extension'" do
extension = "jpg"
path = FileUtil.temp_path("prefix", extension)
assert String.ends_with?(path, ".#{extension}")
assert not String.ends_with?(path, "..#{extension}")
end
test "if extension empty, temporary path does not end with '.'" do
extension = ""
path = FileUtil.temp_path("prefix", extension)
assert not String.ends_with?(path, ".")
end
test "if extension not provided, temporary path does not end with '.'" do
path = FileUtil.temp_path("prefix")
assert not String.ends_with?(path, ".")
end
test "symlink?" do
assert not FileUtil.symlink?("/")
assert not FileUtil.symlink?("/tmp")
assert FileUtil.symlink?("/vmlinuz")
end
test "exists" do
assert FileUtil.exists?(__ENV__.file)
refute FileUtil.exists?("_missing.txt")
end
test "exists with dangling symlink" do
invalid_file = FileUtil.temp_path("invalid_file")
dest = FileUtil.temp_path("dangling_symlink")
File.ln_s(invalid_file, dest)
try do
assert FileUtil.exists?(dest)
after
File.rm(dest)
end
end
end
defmodule Gears.StringUtilTest do
use ExUnit.Case
test "grep" do
assert StringUtil.grep("hello\nworld\norange", ~r"l") == ["hello", "world"]
assert StringUtil.grep("hello\nworld\norange", ~r"^.{6}$") == ["orange"]
assert StringUtil.grep("hello\nworld\norange", ~r"^$") == []
assert StringUtil.grep("hello\nworld\norange\n", ~r"^$") == [""]
end
test "remove_empty_lines" do
assert StringUtil.remove_empty_lines("") == ""
assert StringUtil.remove_empty_lines("\n") == ""
assert StringUtil.remove_empty_lines("\nline") == "line"
assert StringUtil.remove_empty_lines("\n\nline") == "line"
assert StringUtil.remove_empty_lines("\n\nline\n") == "line\n"
assert StringUtil.remove_empty_lines("\n\nline\n\n") == "line\n"
assert StringUtil.remove_empty_lines("hello\nworld") == "hello\nworld"
assert StringUtil.remove_empty_lines("hello\nworld\n") == "hello\nworld\n"
assert StringUtil.remove_empty_lines("hello\n\n\nworld\n\n\n") == "hello\nworld\n"
end
test "counted_noun" do
assert StringUtil.counted_noun(0, "unit", "units") == "0 units"
assert StringUtil.counted_noun(1, "unit", "units") == "1 unit"
assert StringUtil.counted_noun(2, "unit", "units") == "2 units"
end
test "half_width_length" do
assert StringUtil.half_width_length("") == 0
assert StringUtil.half_width_length("h") == 1
assert StringUtil.half_width_length("hi") == 2
assert StringUtil.half_width_length("末") == 2
assert StringUtil.half_width_length("末未") == 4
assert StringUtil.half_width_length("末未.") == 5
end
test "strip_ansi" do
assert StringUtil.strip_ansi("") == ""
assert StringUtil.strip_ansi("hi") == "hi"
assert StringUtil.strip_ansi(bolded("hi")) == "hi"
end
defp bolded(s) do
"#{IO.ANSI.bright()}#{s}#{IO.ANSI.normal()}"
end
end
defmodule Gears.TableFormatterTest do
use ExUnit.Case
@bad_data [[1, "hello", -0.555], [1000000000, "world", ""], [3, "longer data", 3.5]]
@data [["1", "hello", "-0.555"], ["1000000000", "world", ""], ["3", "longer data", "3.5"]]
test "table formatter with default padding" do
# Note that strings in the last column are not padded
assert TableFormatter.format(@data) |> IO.iodata_to_binary ==
"""
1 hello -0.555
1000000000 world
3 longer data 3.5
"""
end
test "table formatter with padding of 2" do
assert TableFormatter.format(@data, padding: 2) |> IO.iodata_to_binary ==
"""
1 hello -0.555
1000000000 world
3 longer data 3.5
"""
end
test "table formatter with padding of 0" do
assert TableFormatter.format(@data, padding: 0) |> IO.iodata_to_binary ==
"""
1 hello -0.555
1000000000world
3 longer data3.5
"""
end
test "table formatter with a width_fn that strips ANSI" do
underlined_data = @data |> Enum.map(fn row -> row |> Enum.map(&underlined/1) end)
assert TableFormatter.format(underlined_data, width_fn: &(&1 |> strip_ansi |> String.length))
|> IO.iodata_to_binary |> strip_ansi ==
"""
1 hello -0.555
1000000000 world
3 longer data 3.5
"""
end
test "table formatter with a different number of columns in some rows" do
data = [
["hello"],
["hello", "world"],
["longer", "world"],
[],
["longer", "world", "another string", "end"],
["longer", "world", "another longer string", "end"],
["hello", "world"],
]
assert TableFormatter.format(data) |> IO.iodata_to_binary ==
"""
hello
hello world
longer world
longer world another string end
longer world another longer string end
hello world
"""
end
defp underlined(s) do
"#{IO.ANSI.underline()}#{s}#{IO.ANSI.no_underline()}"
end
defp strip_ansi(s) do
# Based on https://github.com/chalk/ansi-regex/blob/dce3806b159260354de1a77c1db543a967f7218f/index.js
s |> String.replace(~r/[\x{001b}\x{009b}][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/, "")
end
test "table formatter with 0 rows" do
assert TableFormatter.format([], padding: 1) |> IO.iodata_to_binary == ""
end
test "raises error on non-string values" do
assert_raise ArgumentError, ~r/^All values /, fn -> TableFormatter.format(@bad_data) end
end
end
| 28.756303 | 109 | 0.646552 |
7905669b5da26408afa62c9cde922442af74696e | 3,520 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/path_rule.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/path_rule.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/path_rule.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.PathRule do
@moduledoc """
A path-matching rule for a URL. If matched, will use the specified BackendService to handle the traffic arriving at this URL.
## Attributes
* `paths` (*type:* `list(String.t)`, *default:* `nil`) - The list of path patterns to match. Each must start with / and the only place a * is allowed is at the end following a /. The string fed to the path matcher does not include any text after the first ? or #, and those chars are not allowed here.
* `routeAction` (*type:* `GoogleApi.Compute.V1.Model.HttpRouteAction.t`, *default:* `nil`) - In response to a matching path, the load balancer performs advanced routing actions like URL rewrites, header transformations, etc. prior to forwarding the request to the selected backend. If routeAction specifies any weightedBackendServices, service must not be set. Conversely if service is set, routeAction cannot contain any weightedBackendServices.
Only one of routeAction or urlRedirect must be set.
UrlMaps for external HTTP(S) load balancers support only the urlRewrite action within a pathRule's routeAction.
* `service` (*type:* `String.t`, *default:* `nil`) - The full or partial URL of the backend service resource to which traffic is directed if this rule is matched. If routeAction is additionally specified, advanced routing actions like URL Rewrites, etc. take effect prior to sending the request to the backend. However, if service is specified, routeAction cannot contain any weightedBackendService s. Conversely, if routeAction specifies any weightedBackendServices, service must not be specified.
Only one of urlRedirect, service or routeAction.weightedBackendService must be set.
* `urlRedirect` (*type:* `GoogleApi.Compute.V1.Model.HttpRedirectAction.t`, *default:* `nil`) - When a path pattern is matched, the request is redirected to a URL specified by urlRedirect.
If urlRedirect is specified, service or routeAction must not be set.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:paths => list(String.t()),
:routeAction => GoogleApi.Compute.V1.Model.HttpRouteAction.t(),
:service => String.t(),
:urlRedirect => GoogleApi.Compute.V1.Model.HttpRedirectAction.t()
}
field(:paths, type: :list)
field(:routeAction, as: GoogleApi.Compute.V1.Model.HttpRouteAction)
field(:service)
field(:urlRedirect, as: GoogleApi.Compute.V1.Model.HttpRedirectAction)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.PathRule do
def decode(value, options) do
GoogleApi.Compute.V1.Model.PathRule.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.PathRule do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 58.666667 | 503 | 0.749716 |
790570bba04e90f59f44d25d400c8d734ce2110a | 4,069 | ex | Elixir | deps/distillery/lib/mix/lib/releases/models/app.ex | renatoalmeidaoliveira/Merlin | 92a0e318872190cdfa07ced85ee54cc69cad5c14 | [
"Apache-2.0"
] | null | null | null | deps/distillery/lib/mix/lib/releases/models/app.ex | renatoalmeidaoliveira/Merlin | 92a0e318872190cdfa07ced85ee54cc69cad5c14 | [
"Apache-2.0"
] | null | null | null | deps/distillery/lib/mix/lib/releases/models/app.ex | renatoalmeidaoliveira/Merlin | 92a0e318872190cdfa07ced85ee54cc69cad5c14 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Releases.App do
@moduledoc """
Represents important metadata about a given application.
"""
defstruct name: nil,
vsn: nil,
applications: [],
included_applications: [],
unhandled_deps: [],
start_type: nil,
path: nil
@type start_type :: :permanent | :temporary | :transient | :load | :none
@type t :: %__MODULE__{
name: atom(),
vsn: String.t,
applications: [atom()],
included_applications: [atom()],
unhandled_deps: [atom()],
start_type: start_type,
path: nil | String.t
}
@doc """
Create a new Application struct from an application name
"""
@spec new(atom) :: nil | __MODULE__.t | {:error, String.t}
def new(name), do: new(name, nil)
@doc """
Same as new/1, but specify the application's start type
"""
@spec new(atom, start_type | nil) :: nil | __MODULE__.t | {:error, String.t}
def new(name, start_type)
when is_atom(name) and start_type in [nil, :permanent, :temporary, :transient, :load, :none] do
dep = Enum.find(Mix.Dep.loaded([]), fn %Mix.Dep{app: ^name} -> true; _ -> false end)
cond do
is_nil(dep) ->
do_new(name, start_type)
Keyword.get(dep.opts, :runtime) === false ->
nil
:else ->
do_new(name, start_type)
end
end
def new(name, start_type), do: {:error, {:apps, {:invalid_start_type, name, start_type}}}
defp do_new(name, start_type) do
_ = Application.load(name)
case Application.spec(name) do
nil -> nil
spec ->
vsn = '#{Keyword.get(spec, :vsn)}'
deps = get_dependencies(name)
apps = Keyword.get(spec, :applications, [])
included = Keyword.get(spec, :included_applications, [])
path = Application.app_dir(name)
missing = MapSet.new(deps)
|> MapSet.difference(MapSet.union(MapSet.new(apps), MapSet.new(included)))
|> MapSet.to_list
%__MODULE__{name: name, vsn: vsn,
start_type: start_type,
applications: apps,
included_applications: included,
unhandled_deps: missing,
path: path}
end
end
@doc """
Determines if the provided start type is a valid one.
"""
@spec valid_start_type?(atom) :: boolean()
def valid_start_type?(start_type)
when start_type in [:permanent, :temporary, :transient, :load, :none],
do: true
def valid_start_type?(_), do: false
# Gets a list of all applications which are children
# of this application.
defp get_dependencies(name) do
Mix.Dep.loaded_by_name([name], [])
|> Stream.flat_map(fn %Mix.Dep{deps: deps} -> deps end)
|> Stream.filter(&include_dep?/1)
|> Enum.map(&map_dep/1)
rescue
Mix.Error -> # This is a top-level app
cond do
Mix.Project.umbrella? ->
# find the app in the umbrella
app_path = Path.join(Mix.Project.config[:apps_path], "#{name}")
cond do
File.exists?(app_path) ->
Mix.Project.in_project(name, app_path, fn mixfile ->
mixfile.project[:deps]
|> Stream.filter(&include_dep?/1)
|> Enum.map(&map_dep/1)
end)
:else ->
[]
end
:else ->
Mix.Project.config[:deps]
|> Stream.filter(&include_dep?/1)
|> Enum.map(&map_dep/1)
end
end
defp include_dep?({_, _}), do: true
defp include_dep?({_, _, opts}), do: include_dep?(opts)
defp include_dep?(%Mix.Dep{opts: opts}), do: include_dep?(opts)
defp include_dep?(opts) when is_list(opts) do
if Keyword.get(opts, :runtime) == false do
false
else
case Keyword.get(opts, :only) do
nil -> true
envs when is_list(envs) -> Enum.member?(envs, :prod)
env when is_atom(env) -> env == :prod
end
end
end
defp map_dep({a, _}), do: a
defp map_dep({a, _, _opts}), do: a
defp map_dep(%Mix.Dep{app: a}), do: a
end
| 31.542636 | 99 | 0.57508 |
79059f2dd992bf073061d080756434259fe0c748 | 104 | exs | Elixir | test/ambassador/subdivision_test.exs | Recruitee/ambassador | 309e70f679b83609ba17fee5f43e8e69e0f66fa3 | [
"MIT"
] | 1 | 2020-10-29T11:28:32.000Z | 2020-10-29T11:28:32.000Z | test/ambassador/subdivision_test.exs | Recruitee/ambassador | 309e70f679b83609ba17fee5f43e8e69e0f66fa3 | [
"MIT"
] | 2 | 2020-10-23T22:27:34.000Z | 2021-12-29T06:00:52.000Z | test/ambassador/subdivision_test.exs | Recruitee/ambassador | 309e70f679b83609ba17fee5f43e8e69e0f66fa3 | [
"MIT"
] | 2 | 2020-10-15T17:48:11.000Z | 2021-08-04T12:59:38.000Z | defmodule Cadastre.SubdivisionTest do
use ExUnit.Case, async: true
doctest Cadastre.Subdivision
end
| 20.8 | 37 | 0.817308 |
7905b7059f0ea2901e1aa6e2042311584df1bfe3 | 4,346 | exs | Elixir | test/phoenix/digester_test.exs | manukall/phoenix | 5ce19ebb63b1b31e37da5fe6465edb6c4850772e | [
"MIT"
] | 1 | 2019-06-11T20:22:21.000Z | 2019-06-11T20:22:21.000Z | test/phoenix/digester_test.exs | DavidAlphaFox/phoenix | 560131ab3b48c6b6cf864a3d20b7667e40990237 | [
"MIT"
] | null | null | null | test/phoenix/digester_test.exs | DavidAlphaFox/phoenix | 560131ab3b48c6b6cf864a3d20b7667e40990237 | [
"MIT"
] | null | null | null | defmodule Phoenix.DigesterTest do
use ExUnit.Case, async: true
test "fails when the given paths are invalid" do
assert {:error, :invalid_path} = Phoenix.Digester.compile("nonexistent path", "/ ?? /path")
end
test "digests and compress files" do
output_path = Path.join("tmp", "phoenix_digest")
input_path = "test/fixtures/digest/priv/static/"
File.rm_rf!(output_path)
assert :ok = Phoenix.Digester.compile(input_path, output_path)
output_files = assets_files(output_path)
assert "phoenix.png" in output_files
refute "phoenix.png.gz" in output_files
assert "app.js" in output_files
assert "app.js.gz" in output_files
assert "css/app.css" in output_files
assert "css/app.css.gz" in output_files
assert "manifest.json" in output_files
assert Enum.any?(output_files, &(String.match?(&1, ~r/(phoenix-[a-fA-F\d]{32}.png)/)))
refute Enum.any?(output_files, &(String.match?(&1, ~r/(phoenix-[a-fA-F\d]{32}.png.gz)/)))
json =
Path.join(output_path, "manifest.json")
|> File.read!()
|> Poison.decode!()
assert json["phoenix.png"] =~ ~r"phoenix-[a-fA-F\d]{32}.png"
end
test "digests and compress nested files" do
output_path = Path.join("tmp", "phoenix_digest_nested")
input_path = "test/fixtures/digest/priv/"
File.rm_rf!(output_path)
assert :ok = Phoenix.Digester.compile(input_path, output_path)
output_files = assets_files(output_path)
assert "static/phoenix.png" in output_files
refute "static/phoenix.png.gz" in output_files
assert "manifest.json" in output_files
assert Enum.any?(output_files, &(String.match?(&1, ~r/(phoenix-[a-fA-F\d]{32}.png)/)))
refute Enum.any?(output_files, &(String.match?(&1, ~r/(phoenix-[a-fA-F\d]{32}.png.gz)/)))
json =
Path.join(output_path, "manifest.json")
|> File.read!()
|> Poison.decode!()
assert json["static/phoenix.png"] =~ ~r"static/phoenix-[a-fA-F\d]{32}.png"
end
test "doesn't duplicate files when digesting and compressing twice" do
input_path = Path.join("tmp", "phoenix_digest_twice")
input_file = Path.join(input_path, "file.js")
File.rm_rf!(input_path)
File.mkdir_p!(input_path)
File.write!(input_file, "console.log('test');")
assert :ok = Phoenix.Digester.compile(input_path, input_path)
assert :ok = Phoenix.Digester.compile(input_path, input_path)
output_files = assets_files(input_path)
refute "file.js.gz.gz" in output_files
refute "manifest.json.gz" in output_files
refute Enum.any?(output_files, & &1 =~ ~r/file-[a-fA-F\d]{32}.[\w|\d]*.[-[a-fA-F\d]{32}/)
end
test "digests only absolute and relative asset paths found within stylesheets" do
output_path = Path.join("tmp", "phoenix_digest_stylesheets")
input_path = "test/fixtures/digest/priv/static/"
File.rm_rf!(output_path)
assert :ok = Phoenix.Digester.compile(input_path, output_path)
digested_css_filename =
assets_files(output_path)
|> Enum.find(&(&1 =~ ~r"app-[a-fA-F\d]{32}.css"))
digested_css =
Path.join(output_path, digested_css_filename)
|> File.read!()
refute digested_css =~ ~r"/phoenix.png"
refute digested_css =~ ~r"../images/relative.png"
assert digested_css =~ ~r"/phoenix-[a-fA-F\d]{32}.png"
assert digested_css =~ ~r"../images/relative-[a-fA-F\d]{32}.png"
refute digested_css =~ ~r"http://www.phoenixframework.org/absolute-[a-fA-F\d]{32}.png"
assert digested_css =~ ~r"http://www.phoenixframework.org/absolute.png"
end
test "does not digest assets within undigested files" do
output_path = Path.join("tmp", "phoenix_digest_stylesheets_undigested")
input_path = "test/fixtures/digest/priv/static/"
File.rm_rf!(output_path)
assert :ok = Phoenix.Digester.compile(input_path, output_path)
undigested_css =
Path.join(output_path, "css/app.css")
|> File.read!()
assert undigested_css =~ ~r"/phoenix.png"
assert undigested_css =~ ~r"../images/relative.png"
refute undigested_css =~ ~r"/phoenix-[a-fA-F\d]{32}.png"
refute undigested_css =~ ~r"../images/relative-[a-fA-F\d]{32}.png"
end
defp assets_files(path) do
path
|> Path.join("**/*")
|> Path.wildcard
|> Enum.filter(&(!File.dir?(&1)))
|> Enum.map(&(Path.relative_to(&1, path)))
end
end
| 35.333333 | 95 | 0.669581 |
7905db8fd1725d8af4138aca70f07706f8a80dc2 | 359 | ex | Elixir | lib/couch_gears/mochiweb/body_parser.ex | Zatvobor/couch_gears | 5ebfdec4d79fb3734d7d2cb36e495b6e408eac21 | [
"Apache-2.0"
] | 1 | 2015-09-29T03:07:04.000Z | 2015-09-29T03:07:04.000Z | lib/couch_gears/mochiweb/body_parser.ex | Zatvobor/couch_gears | 5ebfdec4d79fb3734d7d2cb36e495b6e408eac21 | [
"Apache-2.0"
] | null | null | null | lib/couch_gears/mochiweb/body_parser.ex | Zatvobor/couch_gears | 5ebfdec4d79fb3734d7d2cb36e495b6e408eac21 | [
"Apache-2.0"
] | null | null | null | defmodule CouchGears.Mochiweb.BodyParser do
@moduledoc false
@doc false
def parse(conn) do
{ content_type, body } = { conn.req_headers("Content-Type"), conn.raw_req_body }
parse_body(content_type, body)
end
@doc false
def parse_body("application/json", body), do: :ejson.decode(body)
@doc false
def parse_body(_, body), do: body
end | 23.933333 | 84 | 0.707521 |
79061665eb22b39b8e554f092eadc58a86d36954 | 1,151 | exs | Elixir | config/config.exs | iobee-io/scenic_live_reload | 2a7268d065c60cf009864571ba91d112f478802b | [
"Apache-2.0"
] | 13 | 2019-07-08T20:25:56.000Z | 2021-06-14T14:46:40.000Z | scenic_live_reload/config/config.exs | QuantumProductions/scenic_font_test | ff8d0df6ade399039b9d9e816e398cb1ad80a7db | [
"BSD-3-Clause"
] | 26 | 2019-07-09T18:56:17.000Z | 2022-03-23T18:16:46.000Z | scenic_live_reload/config/config.exs | QuantumProductions/scenic_font_test | ff8d0df6ade399039b9d9e816e398cb1ad80a7db | [
"BSD-3-Clause"
] | 3 | 2021-09-27T13:29:59.000Z | 2021-10-04T15:25:46.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# third-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :scenic_live_reload, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:scenic_live_reload, :key)
#
# You can also configure a third-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 37.129032 | 73 | 0.755864 |
79062195a7c38d24b4e43220add678c51f89b023 | 1,179 | exs | Elixir | apps/ewallet_db/priv/repo/migrations/20171120031341_create_transfers.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet_db/priv/repo/migrations/20171120031341_create_transfers.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet_db/priv/repo/migrations/20171120031341_create_transfers.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWalletDB.Repo.Migrations.CreateTransfers do
use Ecto.Migration
def change do
create table(:transfer, primary_key: false) do
add :id, :uuid, primary_key: true
add :idempotency_token, :string, null: false
add :status, :string, default: "pending", null: false
add :type, :string, default: "internal", null: false
add :payload, :binary, null: false
add :ledger_response, :binary
add :metadata, :binary
add :encryption_version, :binary
timestamps()
end
create unique_index(:transfer, [:idempotency_token])
end
end
| 34.676471 | 74 | 0.719254 |
79062fbc7cdae1ebe2e5b7f772065e7d46d0feb0 | 1,924 | ex | Elixir | {{cookiecutter.project_slug}}/apps/{{cookiecutter.phoenix_app_slug}}_web/lib/{{cookiecutter.phoenix_app_slug}}_web/endpoint.ex | ibakami/cookiecutter-elixir-phoenix | 672b9e05f40b01a810a073a9712fc3300c396e40 | [
"MIT"
] | 14 | 2019-08-01T07:55:50.000Z | 2021-04-24T09:14:09.000Z | {{cookiecutter.project_slug}}/apps/{{cookiecutter.phoenix_app_slug}}_web/lib/{{cookiecutter.phoenix_app_slug}}_web/endpoint.ex | ibakami/cookiecutter-elixir-phoenix | 672b9e05f40b01a810a073a9712fc3300c396e40 | [
"MIT"
] | 1 | 2019-08-02T03:03:40.000Z | 2019-08-02T03:03:40.000Z | {{cookiecutter.project_slug}}/apps/{{cookiecutter.phoenix_app_slug}}_web/lib/{{cookiecutter.phoenix_app_slug}}_web/endpoint.ex | ibakami/cookiecutter-elixir-phoenix | 672b9e05f40b01a810a073a9712fc3300c396e40 | [
"MIT"
] | null | null | null | defmodule {{cookiecutter.phoenix_app_module}}Web.Endpoint do
use Phoenix.Endpoint, otp_app: :{{cookiecutter.phoenix_app_slug}}_web
use Absinthe.Phoenix.Endpoint
socket "/socket", {{cookiecutter.phoenix_app_module}}Web.UserSocket,
websocket: true,
longpoll: false
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
# This will enable us to fetch the manifest + sw on the root of the project
at: "/",
from: :{{cookiecutter.phoenix_app_slug}}_web,
gzip: false,
only: ~w(manifest.json css fonts images js favicon.ico robots.txt service-worker.js)
plug Plug.Static,
# All our frontend static files will go on assets folder
at: "/assets",
from: :{{cookiecutter.phoenix_app_slug}}_web,
gzip: true,
only: ~w(css fonts images js favicon.ico service-worker.js robots.txt assets-manifest.json),
only_matching: ~w(precache-manifest)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_{{cookiecutter.phoenix_app_slug}}_web_key",
signing_salt: "jMyK4u8L"
plug {{cookiecutter.phoenix_app_module}}Web.Router
end
| 33.754386 | 96 | 0.724012 |
7906324223b0a2d79aeb1bd9154a959f0e1cabf4 | 8,473 | ex | Elixir | lib/documents/session/models/session_state.ex | YgorCastor/ravix | ad3d191e2682b87d533a2d41899ed10e00b4dc55 | [
"Apache-2.0"
] | 9 | 2022-01-11T10:01:54.000Z | 2022-03-31T06:50:59.000Z | lib/documents/session/models/session_state.ex | YgorCastor/ravix | ad3d191e2682b87d533a2d41899ed10e00b4dc55 | [
"Apache-2.0"
] | 2 | 2022-03-21T20:13:13.000Z | 2022-03-28T21:07:47.000Z | lib/documents/session/models/session_state.ex | YgorCastor/ravix | ad3d191e2682b87d533a2d41899ed10e00b4dc55 | [
"Apache-2.0"
] | null | null | null | defmodule Ravix.Documents.Session.State do
@moduledoc """
A session state representation
## Fields
- store: The Store module for which the session belongs
- session_id: The uuid of this session
- database: for which database this session belongs
- conventions: Document conventions for this session
- documents_by_id: Loaded documents in this session
- defer_commands: Commands that will be deferred when the session is persisted
- deleted_entities: Documents that will be deleted when the session is persisted
- running_queries: RQL queries running for this session
- last_session_call: When the last session call was executed
- number_of_requests: Number os requests that will be executed at this session persistence
"""
defstruct store: nil,
session_id: nil,
database: nil,
conventions: nil,
documents_by_id: %{},
defer_commands: [],
deleted_entities: [],
running_queries: %{},
last_session_call: nil,
number_of_requests: 0
require OK
alias Ravix.Documents.Session.State, as: SessionState
alias Ravix.Documents.Session.Validations
alias Ravix.Documents.Session.SessionDocument
alias Ravix.Documents.Conventions
import Ravix.RQL.Query
import Ravix.RQL.Tokens.Condition
@type t :: %SessionState{
store: atom() | nil,
session_id: bitstring(),
database: String.t(),
conventions: Conventions.t(),
documents_by_id: map(),
defer_commands: list(),
deleted_entities: list(),
running_queries: map(),
last_session_call: DateTime.t() | nil,
number_of_requests: non_neg_integer()
}
@doc """
Increments the number of requests count
## Parameters
- session_state: the session state
## Returns
- updated session state
"""
@spec increment_request_count(SessionState.t()) :: SessionState.t()
def increment_request_count(%SessionState{} = session_state) do
%SessionState{
session_state
| number_of_requests: session_state.number_of_requests + 1
}
end
@doc """
Updates the last session call time
## Parameters
- session_state: the session state
## Returns
- updated session state
"""
@spec update_last_session_call(SessionState.t()) :: SessionState.t()
def update_last_session_call(%SessionState{} = session_state) do
%SessionState{
session_state
| last_session_call: Timex.now()
}
end
@doc """
Adds a document to the session
## Parameters
- state: the session state
- key: the key where the document will be related to
- entity: the document to be persisted
- change_vector: the concurrency change vector string
- original_document: if it's a update, this is the document before the change
## Returns
- `{:ok, updated_state}`
- `{:error, :document_already_deferred}` if the document id is in a deferred command
- `{:error, :document_deleted}` if the document is marked for delete
"""
def register_document(
%SessionState{} = state,
key,
entity,
change_vector,
original_document \\ nil
) do
OK.for do
_ <- Validations.document_not_in_deferred_command(state, key)
_ <- Validations.document_not_deleted(state, key)
after
%SessionState{
state
| documents_by_id:
Map.put(state.documents_by_id, key, %SessionDocument{
entity: entity,
key: key,
original_value: original_document,
change_vector: change_vector
})
}
end
end
@doc """
Marks a document to be deleted
## Parameters
- state: the session state
- document_id: the document id to be deleted
## Returns
- `{:ok, state}`
- `{:error, :document_already_deferred}` if the document id is in a deferred command
- `{:error, :document_deleted}` if the document is already marked for delete
- `{:error, :document_not_in_session}` is the document is not loaded in the session
"""
@spec mark_document_for_exclusion(SessionState.t(), bitstring()) ::
{:error, atom()} | {:ok, SessionState.t()}
def mark_document_for_exclusion(
%SessionState{} = state,
document_id
) do
OK.for do
_ <- Validations.document_not_in_deferred_command(state, document_id)
_ <- Validations.document_not_deleted(state, document_id)
document <- Validations.document_in_session?(state, document_id)
after
{_, updated_documents} = Map.pop(state.documents_by_id, document_id)
%SessionState{
state
| deleted_entities: state.deleted_entities ++ [document],
documents_by_id: updated_documents
}
end
end
@doc """
Updates the session with RavenDB responses
## Parameters
- session_state: the session state
- updates: List of updates to be applied to the session
## Returns
- the updated session
"""
@spec update_session(SessionState.t(), maybe_improper_list) :: SessionState.t()
def update_session(%SessionState{} = session_state, []), do: session_state
def update_session(%SessionState{} = session_state, updates) when is_list(updates) do
update = Enum.at(updates, 0)
updated_state =
case update do
{:ok, :update_document, document} ->
update_document(session_state, document)
{:ok, :delete_document, document_id} ->
delete_document(session_state, document_id)
{:error, :not_implemented, _action_type} ->
session_state
end
remaining_updates = Enum.drop(updates, 1)
update_session(updated_state, remaining_updates)
end
@doc """
Fetches a document from the session
## Paremeters
- state: the session state
- document_id: the document id
## Returns
- `{:ok, document}`
- `{:error, :document_not_found}` if there is no document with the informed id on the session
"""
@spec fetch_document(SessionState.t(), any) :: {:error, :document_not_found} | {:ok, map()}
def fetch_document(_state, document_id) when document_id == nil,
do: {:error, :document_not_found}
def fetch_document(%SessionState{} = state, document_id) do
case state.documents_by_id[document_id] do
nil -> {:error, :document_not_found}
document -> {:ok, document}
end
end
@doc """
Clear the deferred commands from the session
"""
@spec clear_deferred_commands(SessionState.t()) :: SessionState.t()
def clear_deferred_commands(%SessionState{} = state) do
%SessionState{
state
| defer_commands: []
}
end
@doc """
Clear the deleted entities from the session
"""
@spec clear_deleted_entities(SessionState.t()) :: SessionState.t()
def clear_deleted_entities(%SessionState{} = state) do
%SessionState{
state
| deleted_entities: []
}
end
def clear_tmp_keys(%SessionState{} = state) do
%SessionState{
state
| documents_by_id:
state.documents_by_id
|> Map.reject(fn {k, _v} -> String.contains?(k, "tmp_") end)
}
end
defp update_document(session_state, document) do
{:ok, document} =
case document.entity do
nil -> fetch_entity_from_db(session_state, document)
_ -> {:ok, document}
end
%SessionState{
session_state
| documents_by_id: Map.put(session_state.documents_by_id, document.key, document)
}
end
defp delete_document(session_state, document_id) do
%SessionState{
session_state
| documents_by_id: Map.delete(session_state.documents_by_id, document_id)
}
end
@dialyzer {:nowarn_function, fetch_entity_from_db: 2}
defp fetch_entity_from_db(%SessionState{} = session_state, %SessionDocument{} = document) do
OK.for do
session_id <- session_state.store.open_session()
collection = fetch_collection(document)
result <-
from(collection)
|> where(equal_to("id()", document.key))
|> list_all(session_id)
entity = Enum.at(result["Results"], 0) |> Map.drop(["@metadata"])
after
%SessionDocument{
document
| entity: entity
}
end
end
@spec fetch_collection(SessionDocument.t()) :: String.t()
defp fetch_collection(document) do
case document.metadata["@collection"] do
nil -> "@all_docs"
"@empty" -> "@all_docs"
collection -> collection
end
end
end
| 28.918089 | 95 | 0.662457 |
79063f0fb7a7ea2dfaf4b6d2ebd7eb0d39537083 | 7,215 | ex | Elixir | clients/games/lib/google_api/games/v1/api/snapshots.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/api/snapshots.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/games/lib/google_api/games/v1/api/snapshots.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Games.V1.Api.Snapshots do
@moduledoc """
API calls for all endpoints tagged `Snapshots`.
"""
alias GoogleApi.Games.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Retrieves the metadata for a given snapshot ID.
## Parameters
* `connection` (*type:* `GoogleApi.Games.V1.Connection.t`) - Connection to server
* `snapshot_id` (*type:* `String.t`) - The ID of the snapshot.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:language` (*type:* `String.t`) - The preferred language to use for strings returned by this method.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Games.V1.Model.Snapshot{}}` on success
* `{:error, info}` on failure
"""
@spec games_snapshots_get(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Games.V1.Model.Snapshot.t()} | {:ok, Tesla.Env.t()} | {:error, any()}
def games_snapshots_get(connection, snapshot_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:language => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/games/v1/snapshots/{snapshotId}", %{
"snapshotId" => URI.encode(snapshot_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Games.V1.Model.Snapshot{}])
end
@doc """
Retrieves a list of snapshots created by your application for the player corresponding to the player ID.
## Parameters
* `connection` (*type:* `GoogleApi.Games.V1.Connection.t`) - Connection to server
* `player_id` (*type:* `String.t`) - A player ID. A value of `me` may be used in place of the authenticated player's ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:language` (*type:* `String.t`) - The preferred language to use for strings returned by this method.
* `:maxResults` (*type:* `integer()`) - The maximum number of snapshot resources to return in the response, used for paging. For any response, the actual number of snapshot resources returned may be less than the specified `maxResults`.
* `:pageToken` (*type:* `String.t`) - The token returned by the previous request.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Games.V1.Model.SnapshotListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec games_snapshots_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Games.V1.Model.SnapshotListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def games_snapshots_list(connection, player_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:language => :query,
:maxResults => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/games/v1/players/{playerId}/snapshots", %{
"playerId" => URI.encode(player_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Games.V1.Model.SnapshotListResponse{}])
end
end
| 47.467105 | 244 | 0.637422 |
79066f06d82f52fff41dd4c8ed8f8cc23766a01c | 203 | exs | Elixir | apps/nerves_hub_web_core/priv/repo/migrations/20190429173128_add_needs_attention_to_devices.exs | tonnenpinguin/nerves_hub_web | 9d36921eb7e20d20a3e3bd308cc98ad7b60cfa72 | [
"Apache-2.0"
] | 111 | 2018-07-25T01:07:51.000Z | 2022-01-25T17:03:01.000Z | apps/nerves_hub_web_core/priv/repo/migrations/20190429173128_add_needs_attention_to_devices.exs | tonnenpinguin/nerves_hub_web | 9d36921eb7e20d20a3e3bd308cc98ad7b60cfa72 | [
"Apache-2.0"
] | 361 | 2018-07-22T12:53:00.000Z | 2022-03-31T18:50:34.000Z | apps/nerves_hub_web_core/priv/repo/migrations/20190429173128_add_needs_attention_to_devices.exs | tonnenpinguin/nerves_hub_web | 9d36921eb7e20d20a3e3bd308cc98ad7b60cfa72 | [
"Apache-2.0"
] | 54 | 2018-08-26T02:58:04.000Z | 2022-03-09T10:12:19.000Z | defmodule NervesHubWebCore.Repo.Migrations.AddNeedsAttentionToDevices do
use Ecto.Migration
def change do
alter table(:devices) do
add(:healthy, :boolean, default: true)
end
end
end
| 20.3 | 72 | 0.738916 |
790674d0205c51edf55a3db08d478ff1018e747d | 1,499 | ex | Elixir | lib/clamex/output.ex | szajbus/clamex | aeff9f3723922cdc13e7318956d062d33aeab6f4 | [
"MIT"
] | 3 | 2019-01-29T10:08:38.000Z | 2022-01-21T11:17:27.000Z | lib/clamex/output.ex | szajbus/clamex | aeff9f3723922cdc13e7318956d062d33aeab6f4 | [
"MIT"
] | 1 | 2020-09-07T11:01:12.000Z | 2020-09-07T13:16:51.000Z | lib/clamex/output.ex | szajbus/clamex | aeff9f3723922cdc13e7318956d062d33aeab6f4 | [
"MIT"
] | null | null | null | defmodule Clamex.Output do
@moduledoc """
Utilities to process scanner output.
"""
@doc """
Extract error message from scanner's output.
Depending on error scenario and scanner used, error messages included in the
output may be formatted in different ways.
The error message is extracted in following way:
- extract message from first line prefixed with "ERROR: "
- if not found, extract message from first line prefixed with "WARNING: "
- if not found, take the first line in full as the message
"""
@spec extract_error(output :: String.t()) :: atom() | String.t()
def extract_error(output) do
lines = String.split(output, "\n")
first_error(lines) || first_warning(lines) || first_line(lines)
end
defp first_error(lines) do
lines
|> Enum.find(&(&1 =~ ~r{^ERROR: }))
|> strip_prefix
|> match_message
end
defp first_warning(lines) do
lines
|> Enum.find(&(&1 =~ ~r{^WARNING: }))
|> strip_prefix
|> match_message
end
defp first_line(lines) do
lines
|> List.first()
|> match_message
end
defp strip_prefix("ERROR: " <> message), do: message
defp strip_prefix("WARNING: " <> message), do: message
defp strip_prefix(message), do: message
defp match_message(nil), do: nil
defp match_message(message) do
cond do
message =~ ~r{Can't access file} -> :cannot_access_file
message =~ ~r{Could not connect to clamd} -> :cannot_connect_to_clamd
true -> message
end
end
end
| 25.844828 | 78 | 0.666444 |
790676a1799070e908b374b36a8f214f694e64c8 | 2,592 | ex | Elixir | lib/exc_error.ex | miros/exc_error | c4f7da518025f7fbe74f5a6a896e845aaf11f4cd | [
"Apache-2.0"
] | 8 | 2020-03-25T10:05:40.000Z | 2021-11-20T10:56:17.000Z | lib/exc_error.ex | miros/exc_error | c4f7da518025f7fbe74f5a6a896e845aaf11f4cd | [
"Apache-2.0"
] | 2 | 2019-08-23T11:21:47.000Z | 2020-12-22T10:46:59.000Z | lib/exc_error.ex | miros/exc_error | c4f7da518025f7fbe74f5a6a896e845aaf11f4cd | [
"Apache-2.0"
] | null | null | null | defmodule ExcError do
@default_fields [:cause]
defmodule Helpers do
def to_s({:error, error}), do: to_s(error)
def to_s(term) do
if String.Chars.impl_for(term) do
to_string(term)
else
inspect(term)
end
end
end
defmacro define(name, options \\ []) do
record_fields = prepare_record_fields(options) ++ @default_fields
quote location: :keep do
defmodule unquote(name) do
defexception(unquote(record_fields))
unquote do
unless type_defined?(options[:do]) do
quote do
@type t :: %__MODULE__{
unquote_splicing(field_types(record_fields))
}
end
end
end
import unquote(__MODULE__).Helpers
def wrap(_cause, options \\ [])
def wrap({:error, error}, options),
do: wrap(error, options)
def wrap(%ErlangError{original: error}, options),
do: wrap(error, options)
def wrap(cause, options), do: struct(__MODULE__, Keyword.put(options, :cause, cause))
defoverridable wrap: 1, wrap: 2
@impl Exception
def message(exc), do: unquote(__MODULE__).default_message(exc)
defoverridable message: 1
unquote(Keyword.get(options, :do, :nop))
defimpl String.Chars do
def to_string(exc), do: unquote(name).message(exc)
end
end
end
end
defmacro define(name, fields, options) when is_list(fields) do
options = fields ++ options
quote do
unquote(__MODULE__).define(unquote(name), unquote(options))
end
end
def default_message(exc) do
default_name = exc.__struct__ |> to_string() |> String.split(".") |> List.last()
msg = Map.get(exc, :message) || default_name
if exc.cause do
"#{msg}; cause: #{Helpers.to_s(exc.cause)}"
else
msg
end
end
defp prepare_record_fields(options) do
fields = options |> Enum.reject(&match?({:do, _}, &1))
if Enum.empty?(fields), do: [:message], else: fields
end
@term_type {:term, [], Elixir}
defp field_types(fields) do
Enum.map(fields, fn
{key, _} -> {key, @term_type}
key -> {key, @term_type}
end)
end
defp type_defined?(nil), do: false
defp type_defined?({:__block__, _, children}) when is_list(children),
do: Enum.any?(children, &struct_typespec?/1)
defp type_defined?(expr), do: struct_typespec?(expr)
defp struct_typespec?({:@, _, [{:type, _, [{:"::", _, [{:t, _, _} | _]}]}]}), do: true
defp struct_typespec?(_), do: false
end
| 25.165049 | 93 | 0.599151 |
790677d531d6ae3543a51b754a4720f990dfc9c8 | 6,151 | exs | Elixir | test/bson/bson_test.exs | szTheory/bsoneach | deb0a6721518ed9712cef4fc952df5ae3ba23c69 | [
"MIT"
] | 8 | 2016-08-05T19:28:16.000Z | 2021-07-15T22:00:30.000Z | test/bson/bson_test.exs | szTheory/bsoneach | deb0a6721518ed9712cef4fc952df5ae3ba23c69 | [
"MIT"
] | 5 | 2016-08-29T13:07:15.000Z | 2016-11-24T16:58:01.000Z | test/bson/bson_test.exs | szTheory/bsoneach | deb0a6721518ed9712cef4fc952df5ae3ba23c69 | [
"MIT"
] | 5 | 2018-06-19T02:43:46.000Z | 2021-07-15T22:00:36.000Z | defmodule BSONTest do
use ExUnit.Case, async: true
import BSON.Decoder, only: [decode: 1]
@map1 %{"hello" => "world"}
@bin1 <<22, 0, 0, 0, 2, 104, 101, 108, 108, 111, 0, 6, 0, 0, 0, 119,
111, 114, 108, 100, 0, 0>>
@map2 %{"BSON" => ["awesome", 5.05, 1986]}
@bin2 <<49, 0, 0, 0, 4, 66, 83, 79, 78, 0, 38, 0, 0, 0, 2, 48,
0, 8, 0, 0, 0, 97, 119, 101, 115, 111, 109, 101, 0, 1, 49, 0,
51, 51, 51, 51, 51, 51, 20, 64, 16, 50, 0, 194, 7, 0, 0, 0,
0>>
@map3 %{"a" => %{"b" => %{}, "c" => %{"d" => nil}}}
@bin3 <<32, 0, 0, 0, 3, 97, 0, 24, 0, 0, 0, 3, 98, 0, 5, 0,
0, 0, 0, 3, 99, 0, 8, 0, 0, 0, 10, 100, 0, 0, 0, 0>>
@map4 %{"a" => [], "b" => [1, 2, 3], "c" => [1.1, "2", true]}
@bin4 <<74, 0, 0, 0, 4, 97, 0, 5, 0, 0, 0, 0, 4, 98, 0, 26,
0, 0, 0, 16, 48, 0, 1, 0, 0, 0, 16, 49, 0, 2, 0, 0,
0, 16, 50, 0, 3, 0, 0, 0, 0, 4, 99, 0, 29, 0, 0, 0,
1, 48, 0, 154, 153, 153, 153, 153, 153, 241, 63, 2, 49, 0, 2, 0,
0, 0, 50, 0, 8, 50, 0, 1, 0, 0>>
@map5 %{"a" => 123.0}
@bin5 <<16, 0, 0, 0, 1, 97, 0, 0, 0, 0, 0, 0, 192, 94, 64, 0>>
@map6 %{"b" => "123"}
@bin6 <<16, 0, 0, 0, 2, 98, 0, 4, 0, 0, 0, 49, 50, 51, 0, 0>>
@map7 %{"c" => %{}}
@bin7 <<13, 0, 0, 0, 3, 99, 0, 5, 0, 0, 0, 0, 0>>
@map8 %{"d" => []}
@bin8 <<13, 0, 0, 0, 4, 100, 0, 5, 0, 0, 0, 0, 0>>
@map9 %{"e" => %BSON.Types.Binary{binary: <<1, 2, 3>>, subtype: :generic}}
@bin9 <<16, 0, 0, 0, 5, 101, 0, 3, 0, 0, 0, 0, 1, 2, 3, 0>>
@map10 %{"f" => %BSON.Types.ObjectId{value: <<0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11>>}}
@bin10 <<20, 0, 0, 0, 7, 102, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 0>>
@map11 %{"g" => true}
@bin11 <<9, 0, 0, 0, 8, 103, 0, 1, 0>>
@map12 %{"h" => %BSON.Types.DateTime{utc: 12345}}
@bin12 <<16, 0, 0, 0, 9, 104, 0, 57, 48, 0, 0, 0, 0, 0, 0, 0>>
@map13 %{"i" => nil}
@bin13 <<8, 0, 0, 0, 10, 105, 0, 0>>
@map14 %{"j" => %BSON.Types.JavaScript{code: "1 + 2"}}
@bin14 <<18, 0, 0, 0, 13, 106, 0, 6, 0, 0, 0, 49, 32, 43, 32, 50, 0, 0>>
@map15 %{"k" => %BSON.Types.JavaScript{code: "a + b", scope: %{"a" => 2, "b" => 2}}}
@bin15 <<41, 0, 0, 0, 15, 107, 0, 33, 0, 0, 0, 6, 0, 0, 0, 97, 32,
43, 32, 98, 0, 19, 0, 0, 0, 16, 97, 0, 2, 0, 0, 0, 16, 98, 0, 2, 0, 0, 0, 0, 0>>
@map16 %{"l" => 12345}
@bin16 <<12, 0, 0, 0, 16, 108, 0, 57, 48, 0, 0, 0>>
@map17 %{"m" => %BSON.Types.Timestamp{value: 12345678}}
@bin17 <<16, 0, 0, 0, 17, 109, 0, 78, 97, 188, 0, 0, 0, 0, 0, 0>>
@map18 %{"n" => 123456789123456}
@bin18 <<16, 0, 0, 0, 18, 110, 0, 128, 145, 15, 134, 72, 112, 0, 0, 0>>
@map19 %{"o" => :BSON_min}
@bin19 <<8, 0, 0, 0, 19, 111, 0, 0>>
@map20 %{"p" => :BSON_max}
@bin20 <<8, 0, 0, 0, 20, 112, 0, 0>>
test "encode" do
assert encode(@map1) == @bin1
assert encode(@map2) == @bin2
assert encode(@map3) == @bin3
assert encode(@map4) == @bin4
assert encode(@map5) == @bin5
assert encode(@map6) == @bin6
assert encode(@map7) == @bin7
assert encode(@map8) == @bin8
assert encode(@map9) == @bin9
assert encode(@map10) == @bin10
assert encode(@map11) == @bin11
assert encode(@map12) == @bin12
assert encode(@map13) == @bin13
assert encode(@map14) == @bin14
assert encode(@map15) == @bin15
assert encode(@map16) == @bin16
assert encode(@map17) == @bin17
assert encode(@map18) == @bin18
assert encode(@map19) == @bin19
assert encode(@map20) == @bin20
end
test "decode" do
assert decode(@bin1) == @map1
assert decode(@bin2) == @map2
assert decode(@bin3) == @map3
assert decode(@bin4) == @map4
assert decode(@bin5) == @map5
assert decode(@bin6) == @map6
assert decode(@bin7) == @map7
assert decode(@bin8) == @map8
assert decode(@bin9) == @map9
assert decode(@bin10) == @map10
assert decode(@bin11) == @map11
assert decode(@bin12) == @map12
assert decode(@bin13) == @map13
assert decode(@bin14) == @map14
assert decode(@bin15) == @map15
assert decode(@bin16) == @map16
assert decode(@bin17) == @map17
assert decode(@bin18) == @map18
assert decode(@bin19) == @map19
assert decode(@bin20) == @map20
end
test "keywords" do
keyword = [set: [title: "x"]]
map = %{"set" => %{"title" => "x"}}
encoded = <<28, 0, 0, 0, 3, 115, 101, 116, 0, 18, 0, 0, 0, 2, 116, 105, 116, 108, 101, 0, 2, 0, 0, 0, 120, 0, 0, 0>>
assert encode(keyword) == encoded
assert encode(map) == encoded
assert decode(encoded) == map
end
test "encode atom" do
assert encode(%{hello: "world"}) == @bin1
end
test "encode atom value" do
assert encode(%{"hello" => :world}) == @bin1
end
test "decode BSON symbol into string" do
encoded = <<22, 0, 0, 0, 14, 104, 101, 108, 108, 111, 0, 6, 0, 0, 0, 119, 111, 114, 108, 100, 0, 0>>
assert decode(encoded) == @map1
end
@map_pos_inf %{"a" => :inf}
@bin_pos_inf <<16, 0, 0, 0, 1, 97, 0, 0, 0, 0, 0, 0, 0, 240::little-integer-size(8), 127::little-integer-size(8), 0>>
@map_neg_inf %{"a" => :"-inf"}
@bin_neg_inf <<16, 0, 0, 0, 1, 97, 0, 0, 0, 0, 0, 0, 0, 240::little-integer-size(8), 255::little-integer-size(8), 0>>
@map_nan %{"a" => :NaN}
@bin_nan <<16, 0, 0, 0, 1, 97, 0, 0, 0, 0, 0, 0, 0, 248::little-integer-size(8), 127::little-integer-size(8), 0>>
@tag bson_wip: true
test "decode float NaN" do
assert decode(@bin_nan) == @map_nan
end
@tag bson_wip: true
test "encode float NaN" do
assert encode(@map_nan) == @bin_nan
end
@tag bson_wip: true
test "decode float positive Infinity" do
assert decode(@bin_pos_inf) == @map_pos_inf
end
@tag bson_wip: true
test "encode float positive Infinity" do
assert encode(@map_pos_inf) == @bin_pos_inf
end
@tag bson_wip: true
test "decode float negative Infinity" do
assert decode(@bin_neg_inf) == @map_neg_inf
end
@tag bson_wip: true
test "encode float negative Infinity" do
assert encode(@map_neg_inf) == @bin_neg_inf
end
defp encode(value) do
value |> BSON.Encoder.encode |> IO.iodata_to_binary
end
end
| 32.718085 | 120 | 0.517802 |
79067b84c12e818b22e4067cb1c4f75fa6b48480 | 1,546 | ex | Elixir | apps/meeple_web/lib/meeple_web/views/error_helpers.ex | grrrisu/meeple | 428762a58a94306a6643b09c08d72fb2883a0309 | [
"MIT"
] | null | null | null | apps/meeple_web/lib/meeple_web/views/error_helpers.ex | grrrisu/meeple | 428762a58a94306a6643b09c08d72fb2883a0309 | [
"MIT"
] | 13 | 2021-12-24T23:44:10.000Z | 2022-03-04T20:56:28.000Z | apps/meeple_web/lib/meeple_web/views/error_helpers.ex | grrrisu/meeple | 428762a58a94306a6643b09c08d72fb2883a0309 | [
"MIT"
] | null | null | null | defmodule MeepleWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_name(form, field)
)
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(MeepleWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(MeepleWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.208333 | 75 | 0.664942 |
79068f666da00479fb511e440ee6d95b35ab82d8 | 236 | ex | Elixir | web/controllers/hello_controller.ex | mallond/elixir-mix | dfdec1faace5e4457cecf4a34c83ca3c9c0f0f5f | [
"MIT"
] | 1 | 2017-10-05T08:53:03.000Z | 2017-10-05T08:53:03.000Z | web/controllers/hello_controller.ex | mallond/elixir-mix | dfdec1faace5e4457cecf4a34c83ca3c9c0f0f5f | [
"MIT"
] | null | null | null | web/controllers/hello_controller.ex | mallond/elixir-mix | dfdec1faace5e4457cecf4a34c83ca3c9c0f0f5f | [
"MIT"
] | 1 | 2019-04-06T02:33:19.000Z | 2019-04-06T02:33:19.000Z | defmodule ElixirMix.HelloController do
use Phoenix.Controller
require Logger
plug :action
# Hello Demo
def index(conn, _params) do
Logger.info "Hello from the Hello Controller"
render conn, "index.html"
end
end | 14.75 | 49 | 0.720339 |
7906cd445f2b7fccffd974ae010d42e7711f7dda | 909 | exs | Elixir | lib/euler_046.exs | sorentwo/euler | 76244a0ef3dcfa17d6b9571daa5d0b46f09057f4 | [
"MIT"
] | 8 | 2015-11-04T05:03:05.000Z | 2022-01-25T19:34:46.000Z | lib/euler_046.exs | sorentwo/euler | 76244a0ef3dcfa17d6b9571daa5d0b46f09057f4 | [
"MIT"
] | null | null | null | lib/euler_046.exs | sorentwo/euler | 76244a0ef3dcfa17d6b9571daa5d0b46f09057f4 | [
"MIT"
] | null | null | null | defmodule EulerFortySix do
import PrimeSieve
@moduledoc """
It was proposed by Christian Goldbach that every odd composite number
can be written as the sum of a prime and twice a square.
9 = 7 + 2×1^2
15 = 7 + 2×2^2
21 = 3 + 2×3^2
25 = 7 + 2×3^2
27 = 19 + 2×2^2
33 = 31 + 2×1^2
It turns out that the conjecture was false.
What is the smallest odd composite that cannot be written as the sum
of a prime and twice a square?
"""
@primes primes(5000)
def solve do
uncomposable!(7)
end
def uncomposable!(n) do
if prime?(n) || composable?(n) do
uncomposable!(n + 2)
else
n
end
end
def composable?(n) do
smaller = Enum.take_while(@primes, &(&1 < n))
Enum.any?(smaller, &(squarable?(n, &1)))
end
defp squarable?(n, x) do
root = :math.sqrt((n - x) / 2)
trunc(root) == root
end
end
IO.puts inspect(EulerFortySix.solve)
| 19.340426 | 71 | 0.619362 |
7906cf105965f23ee528d579e74a751f013cb923 | 4,033 | ex | Elixir | lib/mix/ecto.ex | victorsolis/ecto | 6c0dbf1ee2afd9b5bdf1f3feee8d361c8197c99a | [
"Apache-2.0"
] | null | null | null | lib/mix/ecto.ex | victorsolis/ecto | 6c0dbf1ee2afd9b5bdf1f3feee8d361c8197c99a | [
"Apache-2.0"
] | null | null | null | lib/mix/ecto.ex | victorsolis/ecto | 6c0dbf1ee2afd9b5bdf1f3feee8d361c8197c99a | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Ecto do
# Conveniences for writing Mix.Tasks in Ecto.
@moduledoc false
@doc """
Parses the repository option from the given list.
If no repo option is given, we get one from the environment.
"""
@spec parse_repo([term]) :: [Ecto.Repo.t]
def parse_repo(args) do
parse_repo(args, [])
end
defp parse_repo([key, value|t], acc) when key in ~w(--repo -r) do
parse_repo t, [Module.concat([value])|acc]
end
defp parse_repo([_|t], acc) do
parse_repo t, acc
end
defp parse_repo([], []) do
if app = Keyword.get(Mix.Project.config, :app) do
case Application.get_env(app, :app_repo) do
nil ->
case Application.get_env(app, :app_namespace, app) do
^app -> app |> to_string |> Mix.Utils.camelize
mod -> mod |> inspect
end |> Module.concat(Repo)
repo ->
repo
end |> List.wrap
else
Mix.raise "No repository available. Please pass a repo with the -r option."
end
end
defp parse_repo([], acc) do
Enum.reverse(acc)
end
@doc """
Ensures the given module is a repository.
"""
def ensure_repo(repos, args) when is_list(repos) do
Enum.map repos, &ensure_repo(&1, args)
end
@spec ensure_repo(module, list) :: Ecto.Repo.t | no_return
def ensure_repo(repo, args) do
Mix.Task.run "loadpaths", args
unless "--no-compile" in args do
Mix.Project.compile(args)
end
case Code.ensure_compiled(repo) do
{:module, _} ->
if function_exported?(repo, :__repo__, 0) do
repo
else
Mix.raise "module #{inspect repo} is not a Ecto.Repo. " <>
"Please pass a repo with the -r option."
end
{:error, error} ->
Mix.raise "could not load #{inspect repo}, error: #{inspect error}. " <>
"Please pass a repo with the -r option."
end
end
@doc """
Ensures the given repository is started and running.
"""
@spec ensure_started(Ecto.Repo.t) :: Ecto.Repo.t | no_return
def ensure_started(repo) do
{:ok, _} = Application.ensure_all_started(:ecto)
case repo.start_link do
{:ok, pid} -> {:ok, pid}
{:error, {:already_started, _}} -> {:ok, nil}
{:error, error} ->
Mix.raise "could not start repo #{inspect repo}, error: #{inspect error}"
end
end
@doc """
Ensures the given pid for repo is stopped.
"""
def ensure_stopped(repo, pid) do
# Silence the logger to avoid application down messages.
Logger.remove_backend(:console)
repo.stop(pid)
after
Logger.add_backend(:console, flush: true)
end
@doc """
Gets the migrations path from a repository.
"""
@spec migrations_path(Ecto.Repo.t) :: String.t
def migrations_path(repo) do
Path.join(repo_priv(repo), "migrations")
end
@doc """
Returns the private repository path.
"""
def repo_priv(repo) do
config = repo.config()
Application.app_dir(Keyword.fetch!(config, :otp_app),
config[:priv] || "priv/#{repo |> Module.split |> List.last |> Macro.underscore}")
end
@doc """
Asks if the user wants to open a file based on ECTO_EDITOR.
"""
@spec open?(binary) :: boolean
def open?(file) do
editor = System.get_env("ECTO_EDITOR") || ""
if editor != "" do
:os.cmd(to_char_list(editor <> " " <> inspect(file)))
true
else
false
end
end
@doc """
Gets a path relative to the application path.
Raises on umbrella application.
"""
def no_umbrella!(task) do
if Mix.Project.umbrella? do
Mix.raise "cannot run task #{inspect task} from umbrella application"
end
end
@doc """
Returns `true` if module implements behaviour.
"""
def ensure_implements(module, behaviour, message) do
all = Keyword.take(module.__info__(:attributes), [:behaviour])
unless [behaviour] in Keyword.values(all) do
Mix.raise "Expected #{inspect module} to implement #{inspect behaviour} " <>
"in order to #{message}"
end
end
end
| 26.708609 | 87 | 0.620134 |
7906e606f77fb35b10ab2ab89ffb283354362fb7 | 15,788 | ex | Elixir | lib/cms/v2/protocols/proto.ex | noizu/KitchenSink | 34f51fb93dfa913ba7be411475d02520d537e676 | [
"MIT"
] | 2 | 2019-04-15T22:17:59.000Z | 2022-01-03T15:35:36.000Z | lib/cms/v2/protocols/proto.ex | noizu/KitchenSink | 34f51fb93dfa913ba7be411475d02520d537e676 | [
"MIT"
] | null | null | null | lib/cms/v2/protocols/proto.ex | noizu/KitchenSink | 34f51fb93dfa913ba7be411475d02520d537e676 | [
"MIT"
] | null | null | null | #-------------------------------------------------------------------------------
# Author: Keith Brings
# Copyright (C) 2020 Noizu Labs, Inc. All rights reserved.
#-------------------------------------------------------------------------------
defprotocol Noizu.Cms.V2.Proto do
@fallback_to_any false
#--------------------------------
# @tags
#--------------------------------
def tags(ref, context, options)
def tags!(ref, context, options)
#--------------------------------
# @type
#--------------------------------
def type(ref, context, options)
def type!(ref, context, options)
#--------------------------------
# @is_cms_entity
#--------------------------------
def is_cms_entity?(ref, context, options)
def is_cms_entity!(ref, context, options)
#--------------------------------
# @is_versioning_record
#--------------------------------
def is_versioning_record?(ref, context, options)
def is_versioning_record!(ref, context, options)
#--------------------------------
# @is_revision_record
#--------------------------------
def is_revision_record?(ref, context, options)
def is_revision_record!(ref, context, options)
#--------------------------------
# @versioned_identifier
#--------------------------------
def versioned_identifier(ref, context, options)
def versioned_identifier!(ref, context, options)
#--------------------------------
# @update_article_identifier
#--------------------------------
def update_article_identifier(ref, context, options)
def update_article_identifier!(ref, context, options)
#--------------------------------
# @article_identifier
#--------------------------------
def article_identifier(ref, context, options)
def article_identifier!(ref, context, options)
#--------------------------------
# @
#--------------------------------
def versioned_ref(ref, context, options)
def versioned_ref!(ref, context, options)
#--------------------------------
# @article_ref
#--------------------------------
def article_ref(ref, context, options)
def article_ref!(ref, context, options)
#--------------------------------
# @get_article
#--------------------------------
def get_article(ref, context, options)
def get_article!(ref, context, options)
#--------------------------------
# @compress_archive
#--------------------------------
def compress_archive(ref, context, options)
def compress_archive!(ref, context, options)
#--------------------------------
# @set_version
#--------------------------------
def set_version(ref, version, context, options)
def set_version!(ref, version, context, options)
#--------------------------------
# @get_version
#--------------------------------
def get_version(ref, context, options)
def get_version!(ref, context, options)
#--------------------------------
# @set_revision
#--------------------------------
def set_revision(ref, revision, context, options)
def set_revision!(ref, revision, context, options)
#--------------------------------
# @get_revision
#--------------------------------
def get_revision(ref, context, options)
def get_revision!(ref, context, options)
#--------------------------------
# @set_parent
#--------------------------------
def set_parent(ref, version, context, options)
def set_parent!(ref, version, context, options)
#--------------------------------
# @get_parent
#--------------------------------
def get_parent(ref, context, options)
def get_parent!(ref, context, options)
#--------------------------------
# @get_article_info
#--------------------------------
def get_article_info(ref, context, options)
def get_article_info!(ref, context, options)
#--------------------------------
# @init_article_info
#--------------------------------
def init_article_info(ref, context, options)
def init_article_info!(ref, context, options)
#--------------------------------
# @update_article_info
#--------------------------------
def update_article_info(ref, context, options)
def update_article_info!(ref, context, options)
#--------------------------------
# @set_article_info
#--------------------------------
def set_article_info(ref, article_info, context, options)
def set_article_info!(ref, article_info, context, options)
end # end defprotocol
#=====================================================
#
#=====================================================
defimpl Noizu.Cms.V2.Proto, for: [Tuple, BitString] do
#----------------------
#
#----------------------
def tags(ref, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.tags(entity, context, options)
end
end
def tags!(ref, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.tags!(entity, context, options)
end
end
#----------------------
#
#----------------------
def type(ref, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.type(entity, context, options)
end
end
def type!(ref, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.type!(entity, context, options)
end
end
#----------------------
#
#----------------------
def is_cms_entity?(ref, context, options) do
try do
case ref do
{:ref, m, _} -> m.is_cms_entity?(ref, context, options)
_ -> false
end
rescue _e -> false
end
end
def is_cms_entity!(ref, context, options) do
try do
case ref do
{:ref, m, _} -> m.is_cms_entity!(ref, context, options)
_ -> false
end
rescue _e -> false
end
end
#----------------------
#
#----------------------
def is_versioning_record?(ref, context, options) do
try do
case ref do
{:ref, m, _} -> m.is_versioning_record?(ref, context, options)
_ -> false
end
rescue _e -> false
end
end
def is_versioning_record!(ref, context, options) do
try do
case ref do
{:ref, m, _} -> m.is_versioning_record!(ref, context, options)
_ -> false
end
rescue _e -> false
end
end
#----------------------
#
#----------------------
def is_revision_record?(ref, context, options) do
try do
case ref do
{:ref, m, _} -> m.is_revision_record?(ref, context, options)
_ -> false
end
rescue _e -> false
end
end
def is_revision_record!(ref, context, options) do
try do
case ref do
{:ref, m, _} -> m.is_revision_record!(ref, context, options)
_ -> false
end
rescue _e -> false
end
end
#----------------------
#
#----------------------
def versioned_identifier(ref, context, options) do
case ref do
{:ref, _module, {:revision, {_identifier, _version, _revision}} = v_id} -> v_id
_ ->
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.versioned_identifier(entity, context, options)
else
throw "Invalid Entity"
end
end
end
def versioned_identifier!(ref, context, options) do
case ref do
{:ref, _module, {:revision, {_identifier, _version, _revision}} = v_id} -> v_id
_ ->
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.versioned_identifier!(entity, context, options)
else
throw "Invalid Entity"
end
end
end
#----------------------
# update_article_identifier
#----------------------
def update_article_identifier(ref, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.update_article_identifier(entity, context, options)
end
end
def update_article_identifier!(ref, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.update_article_identifier!(entity, context, options)
end
end
def article_identifier(ref, context, options) do
case ref do
{:ref, _module, {:revision, {identifier, _version, _revision}}} -> identifier
_ ->
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.article_identifier(entity, context, options)
else
throw "Invalid Entity"
end
end
end
def article_identifier!(ref, context, options) do
case ref do
{:ref, _module, {:revision, {identifier, _version, _revision}}} -> identifier
_ ->
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.article_identifier!(entity, context, options)
else
throw "Invalid Entity"
end
end
end
def versioned_ref(ref, context, options) do
case ref do
v_ref = {:ref, _module, {:revision, {_identifier, _version, _revision}}} -> v_ref
_ ->
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.versioned_ref(entity, context, options)
else
throw "Invalid Entity"
end
end
end
def versioned_ref!(ref, context, options) do
case ref do
v_ref = {:ref, _module, {:revision, {_identifier, _version, _revision}}} -> v_ref
_ ->
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.versioned_ref!(entity, context, options)
else
throw "Invalid Entity"
end
end
end
def article_ref(ref, context, options) do
case ref do
_v_ref = {:ref, m, {:revision, {identifier, _version, _revision}}} -> {:ref, m, identifier}
{:ref, _m, _id} -> ref
_ ->
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.article_ref(entity, context, options)
else
throw "Invalid Entity"
end
end
end
def article_ref!(ref, context, options) do
case ref do
{:ref, m, {:revision, {identifier, _version, _revision}}} -> {:ref, m, identifier}
{:ref, _m, _id} -> ref
_ ->
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.article_ref!(entity, context, options)
else
throw "Invalid Entity"
end
end
end
#----------------------
#
#----------------------
def compress_archive(ref, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.compress_archive(entity, context, options)
end
end
def compress_archive!(ref, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.compress_archive!(entity, context, options)
end
end
#----------------------
#
#----------------------
def get_article(ref, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.get_article(entity, context, options)
end
end
def get_article!(ref, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.get_article!(entity, context, options)
end
end
#----------------------
#
#----------------------
def set_version(ref, version, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.set_version(entity, version, context, options)
end
end
def set_version!(ref, version, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.set_version!(entity, version, context, options)
end
end
#----------------------
#
#----------------------
def get_version(ref, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.get_version(entity, context, options)
end
end
def get_version!(ref, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.get_version!(entity, context, options)
end
end
#----------------------
#
#----------------------
def set_revision(ref, revision, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.set_revision(entity, revision, context, options)
end
end
def set_revision!(ref, revision, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.set_revision!(entity, revision, context, options)
end
end
#----------------------
#
#----------------------
def get_revision(ref, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.get_revision(entity, context, options)
end
end
def get_revision!(ref, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.get_revision!(entity, context, options)
end
end
#----------------------
#
#----------------------
def set_parent(ref, version, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.set_parent(entity, version, context, options)
end
end
def set_parent!(ref, version, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.set_parent!(entity, version, context, options)
end
end
#----------------------
#
#----------------------
def get_parent(ref, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.get_parent(entity, context, options)
end
end
def get_parent!(ref, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.get_parent!(entity, context, options)
end
end
#----------------------
#
#----------------------
def get_article_info(ref, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.get_article_info(entity, context, options)
end
end
def get_article_info!(ref, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.get_article_info!(entity, context, options)
end
end
#--------------------------------
# @init_article_info
#--------------------------------
def init_article_info(ref, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.init_article_info(entity, context, options)
end
end
def init_article_info!(ref, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.init_article_info!(entity, context, options)
end
end
#--------------------------------
# @update_article_info
#--------------------------------
def update_article_info(ref, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.update_article_info(entity, context, options)
end
end
def update_article_info!(ref, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.update_article_info!(entity, context, options)
end
end
#----------------------
#
#----------------------
def set_article_info(ref, article_info, context, options) do
if (entity = Noizu.ERP.entity(ref)) do
Noizu.Cms.V2.Proto.set_article_info(entity, article_info, context, options)
end
end
def set_article_info!(ref, article_info, context, options) do
if (entity = Noizu.ERP.entity!(ref)) do
Noizu.Cms.V2.Proto.set_article_info!(entity, article_info, context, options)
end
end
end
| 28.446847 | 98 | 0.515328 |
79070206e56d2f9e183b16f687ff653b325c00ee | 508 | exs | Elixir | benchmark/R5_000.KS.U2.F2.exs | kianmeng/csv_schema | c1e45e5139faeb033f8b3106dacfe288aec24b5c | [
"MIT"
] | 12 | 2019-01-15T13:07:09.000Z | 2022-01-22T08:15:46.000Z | benchmark/R5_000.KS.U2.F2.exs | kianmeng/csv_schema | c1e45e5139faeb033f8b3106dacfe288aec24b5c | [
"MIT"
] | 5 | 2019-01-17T11:46:00.000Z | 2021-12-21T09:42:14.000Z | benchmark/R5_000.KS.U2.F2.exs | kianmeng/csv_schema | c1e45e5139faeb033f8b3106dacfe288aec24b5c | [
"MIT"
] | 1 | 2019-09-25T09:58:07.000Z | 2019-09-25T09:58:07.000Z | defmodule R5_000.KS.U2.F2 do
@moduledoc false
use Csv.Schema
import Csv.Schema.Parser
schema path: "data/dataset_5_000.csv" do
field :id, "id", key: true, parser: &integer!/1
field :first_name, "first_name", filter_by: true
field :last_name, "last_name"
field :email, "email", unique: true
field :gender, "gender", filter_by: true
field :ip_address, "ip_address", unique: true
field :date_of_birth, "date_of_birth"
end
def description, do: ["5_000", true, 2, 2]
end
| 28.222222 | 52 | 0.683071 |
790734054e1d1b215396e8bdcb69cbadc8386469 | 4,070 | ex | Elixir | lib/codes/codes_d30.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_d30.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_d30.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_D30 do
alias IcdCode.ICDCode
def _D3000 do
%ICDCode{full_code: "D3000",
category_code: "D30",
short_code: "00",
full_name: "Benign neoplasm of unspecified kidney",
short_name: "Benign neoplasm of unspecified kidney",
category_name: "Benign neoplasm of unspecified kidney"
}
end
def _D3001 do
%ICDCode{full_code: "D3001",
category_code: "D30",
short_code: "01",
full_name: "Benign neoplasm of right kidney",
short_name: "Benign neoplasm of right kidney",
category_name: "Benign neoplasm of right kidney"
}
end
def _D3002 do
%ICDCode{full_code: "D3002",
category_code: "D30",
short_code: "02",
full_name: "Benign neoplasm of left kidney",
short_name: "Benign neoplasm of left kidney",
category_name: "Benign neoplasm of left kidney"
}
end
def _D3010 do
%ICDCode{full_code: "D3010",
category_code: "D30",
short_code: "10",
full_name: "Benign neoplasm of unspecified renal pelvis",
short_name: "Benign neoplasm of unspecified renal pelvis",
category_name: "Benign neoplasm of unspecified renal pelvis"
}
end
def _D3011 do
%ICDCode{full_code: "D3011",
category_code: "D30",
short_code: "11",
full_name: "Benign neoplasm of right renal pelvis",
short_name: "Benign neoplasm of right renal pelvis",
category_name: "Benign neoplasm of right renal pelvis"
}
end
def _D3012 do
%ICDCode{full_code: "D3012",
category_code: "D30",
short_code: "12",
full_name: "Benign neoplasm of left renal pelvis",
short_name: "Benign neoplasm of left renal pelvis",
category_name: "Benign neoplasm of left renal pelvis"
}
end
def _D3020 do
%ICDCode{full_code: "D3020",
category_code: "D30",
short_code: "20",
full_name: "Benign neoplasm of unspecified ureter",
short_name: "Benign neoplasm of unspecified ureter",
category_name: "Benign neoplasm of unspecified ureter"
}
end
def _D3021 do
%ICDCode{full_code: "D3021",
category_code: "D30",
short_code: "21",
full_name: "Benign neoplasm of right ureter",
short_name: "Benign neoplasm of right ureter",
category_name: "Benign neoplasm of right ureter"
}
end
def _D3022 do
%ICDCode{full_code: "D3022",
category_code: "D30",
short_code: "22",
full_name: "Benign neoplasm of left ureter",
short_name: "Benign neoplasm of left ureter",
category_name: "Benign neoplasm of left ureter"
}
end
def _D303 do
%ICDCode{full_code: "D303",
category_code: "D30",
short_code: "3",
full_name: "Benign neoplasm of bladder",
short_name: "Benign neoplasm of bladder",
category_name: "Benign neoplasm of bladder"
}
end
def _D304 do
%ICDCode{full_code: "D304",
category_code: "D30",
short_code: "4",
full_name: "Benign neoplasm of urethra",
short_name: "Benign neoplasm of urethra",
category_name: "Benign neoplasm of urethra"
}
end
def _D308 do
%ICDCode{full_code: "D308",
category_code: "D30",
short_code: "8",
full_name: "Benign neoplasm of other specified urinary organs",
short_name: "Benign neoplasm of other specified urinary organs",
category_name: "Benign neoplasm of other specified urinary organs"
}
end
def _D309 do
%ICDCode{full_code: "D309",
category_code: "D30",
short_code: "9",
full_name: "Benign neoplasm of urinary organ, unspecified",
short_name: "Benign neoplasm of urinary organ, unspecified",
category_name: "Benign neoplasm of urinary organ, unspecified"
}
end
end
| 32.822581 | 76 | 0.609828 |
79073ff022aac17f48a44372654c3b8b653af433 | 7,890 | ex | Elixir | lib/oban/config.ex | stevelead/oban | 96f8ce2a8bf9f37f49052512176a73b136bd3731 | [
"Apache-2.0"
] | null | null | null | lib/oban/config.ex | stevelead/oban | 96f8ce2a8bf9f37f49052512176a73b136bd3731 | [
"Apache-2.0"
] | null | null | null | lib/oban/config.ex | stevelead/oban | 96f8ce2a8bf9f37f49052512176a73b136bd3731 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Config do
@moduledoc """
The Config struct validates and encapsulates Oban instance state.
Options passed to `Oban.start_link/1` are validated and stored in a config struct. Internal
modules and plugins are always passed the config with a `:conf` key.
"""
@type t :: %__MODULE__{
dispatch_cooldown: pos_integer(),
engine: module(),
notifier: module(),
name: Oban.name(),
node: binary(),
plugins: [module() | {module() | Keyword.t()}],
prefix: binary(),
queues: [{atom(), Keyword.t()}],
repo: module(),
shutdown_grace_period: timeout(),
log: false | Logger.level(),
get_dynamic_repo: nil | (() -> pid() | atom())
}
@type option :: {:name, module()} | {:conf, t()}
@enforce_keys [:node, :repo]
defstruct dispatch_cooldown: 5,
engine: Oban.Queue.BasicEngine,
notifier: Oban.Notifiers.Postgres,
name: Oban,
node: nil,
plugins: [],
prefix: "public",
queues: [],
repo: nil,
shutdown_grace_period: :timer.seconds(15),
log: false,
get_dynamic_repo: nil
defguardp is_pos_integer(interval) when is_integer(interval) and interval > 0
@doc false
@spec new(Keyword.t()) :: t()
def new(opts) when is_list(opts) do
opts =
opts
|> crontab_to_plugin()
|> poll_interval_to_plugin()
|> Keyword.put_new(:node, node_name())
|> Keyword.update(:plugins, [], &(&1 || []))
|> Keyword.update(:queues, [], &(&1 || []))
|> Keyword.delete(:circuit_backoff)
|> Enum.reject(&(&1 == {:notifier, Oban.PostgresNotifier}))
Enum.each(opts, &validate_opt!/1)
opts =
opts
|> Keyword.update!(:queues, &parse_queues/1)
|> Keyword.update!(:plugins, &normalize_plugins/1)
struct!(__MODULE__, opts)
end
@doc false
@spec node_name(%{optional(binary()) => binary()}) :: binary()
def node_name(env \\ System.get_env()) do
cond do
Node.alive?() ->
to_string(node())
Map.has_key?(env, "DYNO") ->
Map.get(env, "DYNO")
true ->
:inet.gethostname()
|> elem(1)
|> to_string()
end
end
@doc false
@spec to_ident(t()) :: binary()
def to_ident(%__MODULE__{name: name, node: node}) do
inspect(name) <> "." <> to_string(node)
end
@doc false
@spec match_ident?(t(), binary()) :: boolean()
def match_ident?(%__MODULE__{} = conf, ident) when is_binary(ident) do
to_ident(conf) == ident
end
# Helpers
@cron_keys [:crontab, :timezone]
defp crontab_to_plugin(opts) do
case {opts[:plugins], opts[:crontab]} do
{plugins, [_ | _]} when is_list(plugins) or is_nil(plugins) ->
{cron_opts, base_opts} = Keyword.split(opts, @cron_keys)
plugin = {Oban.Plugins.Cron, cron_opts}
Keyword.update(base_opts, :plugins, [plugin], &[plugin | &1])
_ ->
Keyword.drop(opts, @cron_keys)
end
end
defp poll_interval_to_plugin(opts) do
case {opts[:plugins], opts[:poll_interval]} do
{plugins, interval} when (is_list(plugins) or is_nil(plugins)) and is_integer(interval) ->
plugin = {Oban.Plugins.Stager, interval: interval}
opts
|> Keyword.delete(:poll_interval)
|> Keyword.update(:plugins, [plugin], &[plugin | &1])
{plugins, nil} when is_list(plugins) or is_nil(plugins) ->
plugin = Oban.Plugins.Stager
Keyword.update(opts, :plugins, [plugin], &[plugin | &1])
_ ->
Keyword.drop(opts, [:poll_interval])
end
end
defp validate_opt!({:dispatch_cooldown, cooldown}) do
unless is_pos_integer(cooldown) do
raise ArgumentError,
"expected :dispatch_cooldown to be a positive integer, got: #{inspect(cooldown)}"
end
end
defp validate_opt!({:engine, engine}) do
unless Code.ensure_loaded?(engine) and function_exported?(engine, :init, 2) do
raise ArgumentError,
"expected :engine to be an Oban.Queue.Engine, got: #{inspect(engine)}"
end
end
defp validate_opt!({:notifier, notifier}) do
unless Code.ensure_loaded?(notifier) and function_exported?(notifier, :listen, 2) do
raise ArgumentError,
"expected :notifier to be an Oban.Notifier, got: #{inspect(notifier)}"
end
end
defp validate_opt!({:name, _}), do: :ok
defp validate_opt!({:node, node}) do
unless is_binary(node) and String.trim(node) != "" do
raise ArgumentError,
"expected :node to be a non-empty binary, got: #{inspect(node)}"
end
end
defp validate_opt!({:plugins, plugins}) do
unless is_list(plugins) do
raise ArgumentError, "expected :plugins to be a list, got #{inspect(plugins)}"
end
Enum.each(plugins, &validate_plugin/1)
end
defp validate_opt!({:prefix, prefix}) do
unless is_binary(prefix) and Regex.match?(~r/^[a-z0-9_]+$/i, prefix) do
raise ArgumentError,
"expected :prefix to be a binary with alphanumeric characters, got: #{inspect(prefix)}"
end
end
defp validate_opt!({:queues, queues}) do
unless Keyword.keyword?(queues) and Enum.all?(queues, &valid_queue?/1) do
raise ArgumentError,
"expected :queues to be a keyword list of {atom, integer} pairs or " <>
"a list of {atom, keyword} pairs, got: #{inspect(queues)}"
end
end
defp validate_opt!({:repo, repo}) do
unless Code.ensure_loaded?(repo) and function_exported?(repo, :__adapter__, 0) do
raise ArgumentError,
"expected :repo to be an Ecto.Repo, got: #{inspect(repo)}"
end
end
defp validate_opt!({:shutdown_grace_period, period}) do
unless is_pos_integer(period) do
raise ArgumentError,
"expected :shutdown_grace_period to be a positive integer, got: #{inspect(period)}"
end
end
@log_levels ~w(false emergency alert critical error warning warn notice info debug)a
defp validate_opt!({:log, log}) do
unless log in @log_levels do
raise ArgumentError,
"expected :log to be one of #{inspect(@log_levels)}, got: #{inspect(log)}"
end
end
defp validate_opt!({:get_dynamic_repo, fun}) do
unless is_nil(fun) or is_function(fun, 0) do
raise ArgumentError,
"expected :get_dynamic_repo to be nil or a zero arity function, got: #{inspect(fun)}"
end
end
defp validate_opt!(option) do
raise ArgumentError, "unknown option provided #{inspect(option)}"
end
defp valid_queue?({_name, opts}) do
is_pos_integer(opts) or Keyword.keyword?(opts)
end
defp validate_plugin(plugin) when not is_tuple(plugin), do: validate_plugin({plugin, []})
defp validate_plugin({plugin, opts}) do
unless is_atom(plugin) do
raise ArgumentError, "plugin #{inspect(plugin)} is not a valid module"
end
unless Code.ensure_loaded?(plugin) do
raise ArgumentError, "plugin #{plugin} could not be found"
end
unless function_exported?(plugin, :init, 1) do
raise ArgumentError,
"plugin #{plugin} is not a valid plugin because it does not provide an `init/1` function"
end
unless Keyword.keyword?(opts) do
raise ArgumentError, "expected options to be a keyword, got #{inspect(opts)}"
end
end
defp parse_queues(queues) do
for {name, value} <- queues do
opts = if is_integer(value), do: [limit: value], else: value
{name, opts}
end
end
# Manually specified plugins will be overwritten by auto-specified plugins unless we reverse the
# plugin list. The order doesn't matter as they are supervised one-for-one.
defp normalize_plugins(plugins) do
plugins
|> Enum.reverse()
|> Enum.uniq_by(fn
{module, _opts} -> module
module -> module
end)
end
end
| 30.114504 | 101 | 0.625729 |
79074378c2fc06812580b23ee04cd8d98211ca1d | 336 | exs | Elixir | priv/repo/migrations/20210923114510_create_profiles.exs | audacioustux/philopets | 9380416937d635d4b1f5e13fa6f8b52ee603addf | [
"blessing"
] | null | null | null | priv/repo/migrations/20210923114510_create_profiles.exs | audacioustux/philopets | 9380416937d635d4b1f5e13fa6f8b52ee603addf | [
"blessing"
] | null | null | null | priv/repo/migrations/20210923114510_create_profiles.exs | audacioustux/philopets | 9380416937d635d4b1f5e13fa6f8b52ee603addf | [
"blessing"
] | null | null | null | defmodule Philopets.Repo.Migrations.CreateProfiles do
use Ecto.Migration
def change do
create table(:profiles) do
add(:display_name, :string, null: false)
add(:account_id, references(:accounts, on_delete: :delete_all), null: false)
timestamps()
end
create(index(:profiles, [:account_id]))
end
end
| 22.4 | 82 | 0.693452 |
790768ad4891d140c097d048a01d4608bb2aba9b | 1,232 | exs | Elixir | config/dev.exs | tjefferson08/frizzle-server | 8db3f31e4dae6583603fd2006415129f623827ba | [
"MIT"
] | null | null | null | config/dev.exs | tjefferson08/frizzle-server | 8db3f31e4dae6583603fd2006415129f623827ba | [
"MIT"
] | null | null | null | config/dev.exs | tjefferson08/frizzle-server | 8db3f31e4dae6583603fd2006415129f623827ba | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :frizzle, Frizzle.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: []
# Watch static and templates for browser reloading.
config :frizzle, Frizzle.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
config :frizzle, Frizzle.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "frizzle_dev",
hostname: "localhost",
pool_size: 10,
extensions: [{Geo.PostGIS.Extension, []}]
| 28 | 68 | 0.706981 |
79078a8e4530ed18a43a80e987d1c3f1e1a80bfa | 1,956 | ex | Elixir | apps/artemis_web/lib/artemis_web/view_helpers/query_params.ex | artemis-platform/artemis_dashboard | 5ab3f5ac4c5255478bbebf76f0e43b44992e3cab | [
"MIT"
] | 9 | 2019-08-19T19:56:34.000Z | 2022-03-22T17:56:38.000Z | apps/artemis_web/lib/artemis_web/view_helpers/query_params.ex | chrislaskey/atlas_dashboard | 9009ef5aac8fefba126fa7d3e3b82d1b610ee6fe | [
"MIT"
] | 7 | 2019-07-12T21:41:01.000Z | 2020-08-17T21:29:22.000Z | apps/artemis_web/lib/artemis_web/view_helpers/query_params.ex | chrislaskey/atlas_dashboard | 9009ef5aac8fefba126fa7d3e3b82d1b610ee6fe | [
"MIT"
] | 2 | 2019-07-05T22:51:47.000Z | 2019-08-19T19:56:37.000Z | defmodule ArtemisWeb.ViewHelper.QueryParams do
use Phoenix.HTML
@doc """
Adds or drops values from existing query params.
Drops any key/value pairs where the final value is either:
nil
"" # Empty Bitstring
[] # Empty List
%{} # Empty Map
"""
def update_query_params(current_query_params, values) do
values =
values
|> Enum.into(%{})
|> Artemis.Helpers.keys_to_strings()
current_query_params
|> Artemis.Helpers.deep_merge(values)
|> Artemis.Helpers.deep_drop_by_value(nil)
|> Artemis.Helpers.deep_drop_by_value("")
|> Artemis.Helpers.deep_drop_by_value([])
|> Artemis.Helpers.deep_drop_by_value(%{})
end
@doc """
Renders a button for setting query params in the URL
"""
def query_param_button(conn, label, values) do
current_query_params = conn.query_params
updated_query_params = ArtemisWeb.ViewHelper.QueryParams.update_query_params(current_query_params, values)
updated_query_string = Plug.Conn.Query.encode(updated_query_params)
path = "#{conn.request_path}?#{updated_query_string}"
active? =
case current_query_params != nil do
true ->
updated_size = Artemis.Helpers.deep_size(updated_query_params)
updated_set = MapSet.new(updated_query_params)
current_size = Artemis.Helpers.deep_size(current_query_params)
current_set = MapSet.new(current_query_params)
add? = current_size <= updated_size
present? = updated_query_params != %{}
subset? = MapSet.subset?(updated_set, current_set)
add? && present? && subset?
false ->
false
end
class =
case active? do
true -> "ui basic button blue"
false -> "ui basic button"
end
options = [
class: class,
onclick: "location.href='#{path}'",
type: "button"
]
content_tag(:button, label, options)
end
end
| 27.166667 | 110 | 0.651329 |
7907a5440c09e8ed7ab0e4d7ff12d1f10926cf5f | 1,642 | ex | Elixir | lib/telepath_web.ex | jdangerx/telepath | 75314db05dd97ea86b5a828ab673fed33ecad5e1 | [
"MIT"
] | null | null | null | lib/telepath_web.ex | jdangerx/telepath | 75314db05dd97ea86b5a828ab673fed33ecad5e1 | [
"MIT"
] | null | null | null | lib/telepath_web.ex | jdangerx/telepath | 75314db05dd97ea86b5a828ab673fed33ecad5e1 | [
"MIT"
] | null | null | null | defmodule TelepathWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use TelepathWeb, :controller
use TelepathWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: TelepathWeb
import Plug.Conn
import TelepathWeb.Gettext
alias TelepathWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/telepath_web/templates",
namespace: TelepathWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import TelepathWeb.ErrorHelpers
import TelepathWeb.Gettext
alias TelepathWeb.Router.Helpers, as: Routes
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import TelepathWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23.457143 | 83 | 0.69123 |