hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7393e8c4404e43423b938f536ef7f43441876cb5 | 59,759 | ex | Elixir | lib/elixir/lib/calendar.ex | TurtleAI/elixir | 2fb41ebef4d06315dd6c05ee00899572b27ee50a | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/calendar.ex | TurtleAI/elixir | 2fb41ebef4d06315dd6c05ee00899572b27ee50a | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/calendar.ex | TurtleAI/elixir | 2fb41ebef4d06315dd6c05ee00899572b27ee50a | [
"Apache-2.0"
] | null | null | null | defmodule Calendar do
@moduledoc """
This module defines the responsibilities for working with
calendars, dates, times and datetimes in Elixir.
Currently it defines types and the minimal implementation
for a calendar behaviour in Elixir. The goal of the Calendar
features in Elixir is to provide a base for interoperability
instead of full-featured datetime API.
For the actual date, time and datetime structures, see `Date`,
`Time`, `NaiveDateTime` and `DateTime`.
Note the year, month, day, etc designations are overspecified
(i.e. an integer instead of 1..12 for months) because different
calendars may have a different number of days per month, months per year and so on.
"""
@type year :: integer
@type month :: integer
@type day :: integer
@type hour :: 0..23
@type minute :: 0..59
@typedoc "From 0 to 60 to account for leap seconds"
@type second :: 0..60
@typedoc """
Microseconds with stored precision.
The precision represents the number of digits
that must be used when representing the microseconds
to external format. If the precision is 0, it means
microseconds must be skipped.
"""
@type microsecond :: {0..999_999, 0..6}
@typedoc "A calendar implementation"
@type calendar :: module
@typedoc "The time zone ID according to the IANA tz database (e.g. Europe/Zurich)"
@type time_zone :: String.t
@typedoc "The time zone abbreviation (e.g. CET or CEST or BST etc.)"
@type zone_abbr :: String.t
@typedoc "The time zone UTC offset in seconds"
@type utc_offset :: integer
@typedoc "The time zone standard offset in seconds (not zero in summer times)"
@type std_offset :: integer
@typedoc "Any map/struct that contains the date fields"
@type date :: %{calendar: calendar, year: year, month: month, day: day}
@typedoc "Any map/struct that contains the time fields"
@type time :: %{hour: hour, minute: minute, second: second, microsecond: microsecond}
@typedoc "Any map/struct that contains the naive_datetime fields"
@type naive_date_time :: %{calendar: calendar, year: year, month: month, day: day,
hour: hour, minute: minute, second: second, microsecond: microsecond}
@typedoc "Any map/struct that contains the datetime fields"
@type date_time :: %{calendar: calendar, year: year, month: month, day: day,
hour: hour, minute: minute, second: second, microsecond: microsecond,
time_zone: time_zone, zone_abbr: zone_abbr, utc_offset: utc_offset, std_offset: std_offset}
@doc """
Returns the last day of the month for the given year-month pair.
"""
@callback last_day_of_month(year, month) :: day
@doc """
Returns true if the given year is a leap year.
A leap year is a year of a longer length than normal. The exact meaning
is up to the calendar. A calendar must return `false` if it does not support
the concept of leap years.
"""
@callback leap_year?(year) :: boolean
@doc """
Calculates the day of the week from the given `year`, `month`, and `day`.
"""
@callback day_of_week(year, month, day) :: non_neg_integer()
@doc """
Converts the date into a string according to the calendar.
"""
@callback date_to_string(year, month, day) :: String.t
@doc """
Coverts the date time (without time zone) into a string according to the calendar.
"""
@callback naive_datetime_to_string(year, month, day, hour, minute, second, microsecond) :: String.t
@doc """
Coverts the date time (with time zone) into a string according to the calendar.
"""
@callback datetime_to_string(year, month, day, hour, minute, second, microsecond,
time_zone, zone_abbr, utc_offset, std_offset) :: String.t
end
defmodule Date do
@moduledoc """
A Date struct and functions.
The Date struct contains the fields year, month, day and calendar.
New dates can be built with the `new/3` function or using the `~D`
sigil:
iex> ~D[2000-01-01]
~D[2000-01-01]
Both `new/3` and sigil return a struct where the date fields can
be accessed directly:
iex> date = ~D[2000-01-01]
iex> date.year
2000
iex> date.month
1
The functions on this module work with the `Date` struct as well
as any struct that contains the same fields as the `Date` struct,
such as `NaiveDateTime` and `DateTime`. Such functions expect
`Calendar.date` in their typespecs (instead of `t`).
Developers should avoid creating the Date struct directly and
instead rely on the functions provided by this module as well as
the ones in 3rd party calendar libraries.
"""
@enforce_keys [:year, :month, :day]
defstruct [:year, :month, :day, calendar: Calendar.ISO]
@type t :: %Date{year: Calendar.year, month: Calendar.month,
day: Calendar.day, calendar: Calendar.calendar}
@doc """
Returns the current date in UTC.
## Examples
iex> date = Date.utc_today()
iex> date.year >= 2016
true
"""
@spec utc_today() :: t
def utc_today() do
{:ok, {year, month, day}, _, _} = Calendar.ISO.from_unix(:os.system_time, :native)
%Date{year: year, month: month, day: day}
end
@doc """
Returns true if the year in `date` is a leap year.
## Examples
iex> Date.leap_year?(~D[2000-01-01])
true
iex> Date.leap_year?(~D[2001-01-01])
false
iex> Date.leap_year?(~D[2004-01-01])
true
iex> Date.leap_year?(~D[1900-01-01])
false
iex> Date.leap_year?(~N[2004-01-01 01:23:45])
true
"""
@spec leap_year?(Calendar.date) :: boolean()
def leap_year?(%{calendar: calendar, year: year}) do
calendar.leap_year?(year)
end
@doc """
Builds a new ISO date.
Expects all values to be integers. Returns `{:ok, date}` if each
entry fits its appropriate range, returns `{:error, reason}` otherwise.
## Examples
iex> Date.new(2000, 1, 1)
{:ok, ~D[2000-01-01]}
iex> Date.new(2000, 13, 1)
{:error, :invalid_date}
iex> Date.new(2000, 2, 29)
{:ok, ~D[2000-02-29]}
iex> Date.new(2000, 2, 30)
{:error, :invalid_date}
iex> Date.new(2001, 2, 29)
{:error, :invalid_date}
"""
@spec new(Calendar.year, Calendar.month, Calendar.day) :: {:ok, t} | {:error, atom}
def new(year, month, day) do
Calendar.ISO.date(year, month, day)
end
@doc """
Converts the given date to a string according to its calendar.
### Examples
iex> Date.to_string(~D[2000-02-28])
"2000-02-28"
iex> Date.to_string(~N[2000-02-28 01:23:45])
"2000-02-28"
"""
@spec to_string(Calendar.date) :: String.t
def to_string(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.date_to_string(year, month, day)
end
@doc """
Parses the extended "Date and time of day" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Timezone offset may be included in the string but they will be
simply discarded as such information is not included in naive date
times.
Time representations with reduced accuracy are not supported.
## Examples
iex> Date.from_iso8601("2015-01-23")
{:ok, ~D[2015-01-23]}
iex> Date.from_iso8601("2015:01:23")
{:error, :invalid_format}
iex> Date.from_iso8601("2015-01-32")
{:error, :invalid_date}
"""
@spec from_iso8601(String.t) :: {:ok, t} | {:error, atom}
def from_iso8601(<<year::4-bytes, ?-, month::2-bytes, ?-, day::2-bytes>>) do
with {year, ""} <- Integer.parse(year),
{month, ""} <- Integer.parse(month),
{day, ""} <- Integer.parse(day) do
new(year, month, day)
else
_ -> {:error, :invalid_format}
end
end
def from_iso8601(<<_::binary>>) do
{:error, :invalid_format}
end
@doc """
Parses the extended "Date and time of day" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Raises if the format is invalid.
## Examples
iex> Date.from_iso8601!("2015-01-23")
~D[2015-01-23]
iex> Date.from_iso8601!("2015:01:23")
** (ArgumentError) cannot parse "2015:01:23" as date, reason: :invalid_format
"""
@spec from_iso8601!(String.t) :: t | no_return
def from_iso8601!(string) do
case from_iso8601(string) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError, "cannot parse #{inspect string} as date, reason: #{inspect reason}"
end
end
@doc """
Converts the given datetime to
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Only supports converting datetimes which are in the ISO calendar,
attempting to convert datetimes from other calendars will raise.
### Examples
iex> Date.to_iso8601(~D[2000-02-28])
"2000-02-28"
iex> Date.to_iso8601(~N[2000-02-28 01:23:45])
"2000-02-28"
"""
@spec to_iso8601(Calendar.date) :: String.t
def to_iso8601(%{calendar: Calendar.ISO, year: year, month: month, day: day}) do
Calendar.ISO.date_to_iso8601(year, month, day)
end
@doc """
Converts a `Date` struct to an Erlang date tuple.
Only supports converting dates which are in the ISO calendar,
attempting to convert dates from other calendars will raise.
## Examples
iex> Date.to_erl(~D[2000-01-01])
{2000, 1, 1}
iex> Date.to_erl(~N[2000-01-01 01:23:45])
{2000, 1, 1}
"""
@spec to_erl(Calendar.date) :: :calendar.date
def to_erl(%{calendar: Calendar.ISO, year: year, month: month, day: day}) do
{year, month, day}
end
@doc """
Converts an Erlang date tuple to a `Date` struct.
Attempting to convert an invalid ISO calendar date will produce an error tuple.
## Examples
iex> Date.from_erl({2000, 1, 1})
{:ok, ~D[2000-01-01]}
iex> Date.from_erl({2000, 13, 1})
{:error, :invalid_date}
"""
@spec from_erl(:calendar.date) :: {:ok, t} | {:error, atom}
def from_erl({year, month, day}) do
new(year, month, day)
end
@doc """
Converts an Erlang date tuple but raises for invalid dates.
## Examples
iex> Date.from_erl!({2000, 1, 1})
~D[2000-01-01]
iex> Date.from_erl!({2000, 13, 1})
** (ArgumentError) cannot convert {2000, 13, 1} to date, reason: :invalid_date
"""
@spec from_erl!(:calendar.date) :: t | no_return
def from_erl!(tuple) do
case from_erl(tuple) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError, "cannot convert #{inspect tuple} to date, reason: #{inspect reason}"
end
end
@doc """
Compares two `Date` structs.
Returns `:gt` if first date is later than the second
and `:lt` for vice versa. If the two dates are equal
`:eq` is returned.
## Examples
iex> Date.compare(~D[2016-04-16], ~D[2016-04-28])
:lt
This function can also be used to compare across more
complex calendar types by considering only the date fields:
iex> Date.compare(~D[2016-04-16], ~N[2016-04-28 01:23:45])
:lt
iex> Date.compare(~D[2016-04-16], ~N[2016-04-16 01:23:45])
:eq
iex> Date.compare(~N[2016-04-16 12:34:56], ~N[2016-04-16 01:23:45])
:eq
"""
@spec compare(Calendar.date, Calendar.date) :: :lt | :eq | :gt
def compare(date1, date2) do
case {to_erl(date1), to_erl(date2)} do
{first, second} when first > second -> :gt
{first, second} when first < second -> :lt
_ -> :eq
end
end
@doc """
Calculates the day of the week of a given `Date` struct.
Returns the day of the week as an integer. For the ISO 8601
calendar (the default), it is an integer from 1 to 7, where
1 is Monday and 7 is Sunday.
## Examples
iex> Date.day_of_week(~D[2016-10-31])
1
iex> Date.day_of_week(~D[2016-11-01])
2
iex> Date.day_of_week(~N[2016-11-01 01:23:45])
2
"""
@spec day_of_week(Calendar.date) :: non_neg_integer()
def day_of_week(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.day_of_week(year, month, day)
end
## Helpers
defimpl String.Chars do
def to_string(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.date_to_string(year, month, day)
end
end
defimpl Inspect do
def inspect(%{calendar: Calendar.ISO, year: year, month: month, day: day}, _) do
"~D[" <> Calendar.ISO.date_to_string(year, month, day) <> "]"
end
def inspect(date, opts) do
Inspect.Any.inspect(date, opts)
end
end
end
defmodule Time do
@moduledoc """
A Time struct and functions.
The Time struct contains the fields hour, minute, second and microseconds.
New times can be built with the `new/4` function or using the `~T`
sigil:
iex> ~T[23:00:07.001]
~T[23:00:07.001]
Both `new/4` and sigil return a struct where the time fields can
be accessed directly:
iex> time = ~T[23:00:07.001]
iex> time.hour
23
iex> time.microsecond
{1000, 3}
The functions on this module work with the `Time` struct as well
as any struct that contains the same fields as the `Time` struct,
such as `NaiveDateTime` and `DateTime`. Such functions expect
`Calendar.time` in their typespecs (instead of `t`).
Developers should avoid creating the Time struct directly and
instead rely on the functions provided by this module as well as
the ones in 3rd party calendar libraries.
"""
@enforce_keys [:hour, :minute, :second]
defstruct [:hour, :minute, :second, microsecond: {0, 0}]
@type t :: %Time{hour: Calendar.hour, minute: Calendar.minute,
second: Calendar.second, microsecond: Calendar.microsecond}
@doc """
Returns the current time in UTC.
## Examples
iex> time = Time.utc_now()
iex> time.hour >= 0
true
"""
@spec utc_now() :: t
def utc_now() do
{:ok, _, {hour, minute, second}, microsecond} = Calendar.ISO.from_unix(:os.system_time, :native)
%Time{hour: hour, minute: minute, second: second, microsecond: microsecond}
end
@doc """
Builds a new time.
Expects all values to be integers. Returns `{:ok, time}` if each
entry fits its appropriate range, returns `{:error, reason}` otherwise.
Note a time may have 60 seconds in case of leap seconds.
## Examples
iex> Time.new(0, 0, 0, 0)
{:ok, ~T[00:00:00.000000]}
iex> Time.new(23, 59, 59, 999_999)
{:ok, ~T[23:59:59.999999]}
iex> Time.new(23, 59, 60, 999_999)
{:ok, ~T[23:59:60.999999]}
# Time with microseconds and their precision
iex> Time.new(23, 59, 60, {10_000, 2})
{:ok, ~T[23:59:60.01]}
iex> Time.new(24, 59, 59, 999_999)
{:error, :invalid_time}
iex> Time.new(23, 60, 59, 999_999)
{:error, :invalid_time}
iex> Time.new(23, 59, 61, 999_999)
{:error, :invalid_time}
iex> Time.new(23, 59, 59, 1_000_000)
{:error, :invalid_time}
"""
@spec new(Calendar.hour, Calendar.minute, Calendar.second, Calendar.microsecond) ::
{:ok, Time.t} | {:error, atom}
def new(hour, minute, second, microsecond \\ {0, 0})
def new(hour, minute, second, microsecond) when is_integer(microsecond) do
new(hour, minute, second, {microsecond, 6})
end
def new(hour, minute, second, {microsecond, precision})
when is_integer(hour) and is_integer(minute) and is_integer(second) and
is_integer(microsecond) and is_integer(precision) do
if hour in 0..23 and minute in 0..59 and second in 0..60 and
microsecond in 0..999_999 and precision in 0..6 do
{:ok, %Time{hour: hour, minute: minute, second: second, microsecond: {microsecond, precision}}}
else
{:error, :invalid_time}
end
end
@doc """
Converts the given time to a string.
### Examples
iex> Time.to_string(~T[23:00:00])
"23:00:00"
iex> Time.to_string(~T[23:00:00.001])
"23:00:00.001"
iex> Time.to_string(~T[23:00:00.123456])
"23:00:00.123456"
iex> Time.to_string(~N[2015-01-01 23:00:00.001])
"23:00:00.001"
iex> Time.to_string(~N[2015-01-01 23:00:00.123456])
"23:00:00.123456"
"""
@spec to_string(Calendar.time) :: String.t
def to_string(%{hour: hour, minute: minute, second: second, microsecond: microsecond}) do
Calendar.ISO.time_to_string(hour, minute, second, microsecond)
end
@doc """
Parses the extended "Local time" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Timezone offset may be included in the string but they will be
simply discarded as such information is not included in times.
As specified in the standard, the separator "T" may be omitted if
desired as there is no ambiguity within this function.
Time representations with reduced accuracy are not supported.
## Examples
iex> Time.from_iso8601("23:50:07")
{:ok, ~T[23:50:07]}
iex> Time.from_iso8601("23:50:07Z")
{:ok, ~T[23:50:07]}
iex> Time.from_iso8601("T23:50:07Z")
{:ok, ~T[23:50:07]}
iex> Time.from_iso8601("23:50:07.0123456")
{:ok, ~T[23:50:07.012345]}
iex> Time.from_iso8601("23:50:07.123Z")
{:ok, ~T[23:50:07.123]}
iex> Time.from_iso8601("2015:01:23 23-50-07")
{:error, :invalid_format}
iex> Time.from_iso8601("23:50:07A")
{:error, :invalid_format}
iex> Time.from_iso8601("23:50:07.")
{:error, :invalid_format}
iex> Time.from_iso8601("23:50:61")
{:error, :invalid_time}
"""
@spec from_iso8601(String.t) :: {:ok, t} | {:error, atom}
def from_iso8601(<<?T, h, rest::binary>>) when h in ?0..?9 do
from_iso8601(<<h, rest::binary>>)
end
def from_iso8601(<<hour::2-bytes, ?:, min::2-bytes, ?:, sec::2-bytes, rest::binary>>) do
with {hour, ""} <- Integer.parse(hour),
{min, ""} <- Integer.parse(min),
{sec, ""} <- Integer.parse(sec),
{microsec, rest} <- Calendar.ISO.parse_microsecond(rest),
{_offset, ""} <- Calendar.ISO.parse_offset(rest) do
new(hour, min, sec, microsec)
else
_ -> {:error, :invalid_format}
end
end
def from_iso8601(<<_::binary>>) do
{:error, :invalid_format}
end
@doc """
Parses the extended "Local time" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Raises if the format is invalid.
## Examples
iex> Time.from_iso8601!("23:50:07.123Z")
~T[23:50:07.123]
iex> Time.from_iso8601!("2015:01:23 23-50-07")
** (ArgumentError) cannot parse "2015:01:23 23-50-07" as time, reason: :invalid_format
"""
@spec from_iso8601!(String.t) :: t | no_return
def from_iso8601!(string) do
case from_iso8601(string) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError, "cannot parse #{inspect string} as time, reason: #{inspect reason}"
end
end
@doc """
Converts the given time to
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
### Examples
iex> Time.to_iso8601(~T[23:00:13])
"23:00:13"
iex> Time.to_iso8601(~T[23:00:13.001])
"23:00:13.001"
iex> Time.to_iso8601(~N[2015-01-01 23:00:13])
"23:00:13"
iex> Time.to_iso8601(~N[2015-01-01 23:00:13.001])
"23:00:13.001"
"""
@spec to_iso8601(Calendar.time) :: String.t
def to_iso8601(%{hour: hour, minute: minute, second: second, microsecond: microsecond}) do
Calendar.ISO.time_to_iso8601(hour, minute, second, microsecond)
end
@doc """
Converts a `Time` struct to an Erlang time tuple.
WARNING: Loss of precision may occur, as Erlang time tuples
only contain hours/minutes/seconds.
## Examples
iex> Time.to_erl(~T[23:30:15.999])
{23, 30, 15}
iex> Time.to_erl(~N[2015-01-01 23:30:15.999])
{23, 30, 15}
"""
@spec to_erl(Calendar.time) :: :calendar.time
def to_erl(%{hour: hour, minute: minute, second: second}) do
{hour, minute, second}
end
@doc """
Converts an Erlang time tuple to a `Time` struct.
## Examples
iex> Time.from_erl({23, 30, 15}, {5000, 3})
{:ok, ~T[23:30:15.005]}
iex> Time.from_erl({24, 30, 15})
{:error, :invalid_time}
"""
@spec from_erl(:calendar.time, Calendar.microsecond) :: {:ok, t} | {:error, atom}
def from_erl({hour, minute, second}, microsecond \\ {0, 0}) do
new(hour, minute, second, microsecond)
end
@doc """
Converts an Erlang time tuple to a `Time` struct.
## Examples
iex> Time.from_erl!({23, 30, 15})
~T[23:30:15]
iex> Time.from_erl!({23, 30, 15}, {5000, 3})
~T[23:30:15.005]
iex> Time.from_erl!({24, 30, 15})
** (ArgumentError) cannot convert {24, 30, 15} to time, reason: :invalid_time
"""
@spec from_erl!(:calendar.time, Calendar.microsecond) :: t | no_return
def from_erl!(tuple, microsecond \\ {0, 0}) do
case from_erl(tuple, microsecond) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError, "cannot convert #{inspect tuple} to time, reason: #{inspect reason}"
end
end
@doc """
Compares two `Time` structs.
Returns `:gt` if first time is later than the second
and `:lt` for vice versa. If the two times are equal
`:eq` is returned
## Examples
iex> Time.compare(~T[16:04:16], ~T[16:04:28])
:lt
iex> Time.compare(~T[16:04:16.01], ~T[16:04:16.001])
:gt
This function can also be used to compare across more
complex calendar types by considering only the time fields:
iex> Time.compare(~N[2015-01-01 16:04:16], ~N[2015-01-01 16:04:28])
:lt
iex> Time.compare(~N[2015-01-01 16:04:16.01], ~N[2000-01-01 16:04:16.001])
:gt
"""
@spec compare(Calendar.time, Calendar.time) :: :lt | :eq | :gt
def compare(time1, time2) do
case {to_tuple(time1), to_tuple(time2)} do
{first, second} when first > second -> :gt
{first, second} when first < second -> :lt
_ -> :eq
end
end
## Helpers
defp to_tuple(%{hour: hour, minute: minute, second: second, microsecond: {microsecond, _precision}}) do
{hour, minute, second, microsecond}
end
defimpl String.Chars do
def to_string(%{hour: hour, minute: minute, second: second, microsecond: microsecond}) do
Calendar.ISO.time_to_string(hour, minute, second, microsecond)
end
end
defimpl Inspect do
def inspect(%{hour: hour, minute: minute, second: second, microsecond: microsecond}, _) do
"~T[" <> Calendar.ISO.time_to_string(hour, minute, second, microsecond) <> "]"
end
end
end
defmodule NaiveDateTime do
@moduledoc """
A NaiveDateTime struct (without a time zone) and functions.
The NaiveDateTime struct contains the fields year, month, day, hour,
minute, second, microsecond and calendar. New naive datetimes can be
built with the `new/7` function or using the `~N` sigil:
iex> ~N[2000-01-01 23:00:07]
~N[2000-01-01 23:00:07]
Both `new/7` and sigil return a struct where the date fields can
be accessed directly:
iex> naive = ~N[2000-01-01 23:00:07]
iex> naive.year
2000
iex> naive.second
7
The naive bit implies this datetime representation does
not have a time zone. This means the datetime may not
actually exist in certain areas in the world even though
it is valid.
For example, when daylight saving changes are applied
by a region, the clock typically moves forward or backward
by one hour. This means certain datetimes never occur or
may occur more than once. Since `NaiveDateTime` is not
validated against a time zone, such errors would go unnoticed.
Developers should avoid creating the NaiveDateTime struct directly
and instead rely on the functions provided by this module as well
as the ones in 3rd party calendar libraries.
"""
@enforce_keys [:year, :month, :day, :hour, :minute, :second]
defstruct [:year, :month, :day, :hour, :minute, :second, microsecond: {0, 0}, calendar: Calendar.ISO]
@type t :: %NaiveDateTime{year: Calendar.year, month: Calendar.month, day: Calendar.day,
calendar: Calendar.calendar, hour: Calendar.hour, minute: Calendar.minute,
second: Calendar.second, microsecond: Calendar.microsecond}
@doc """
Returns the current naive datetime in UTC.
Prefer using `DateTime.utc_now/0` when possible as, opposite
to `NaiveDateTime`, it will keep the time zone information.
## Examples
iex> naive_datetime = NaiveDateTime.utc_now()
iex> naive_datetime.year >= 2016
true
"""
@spec utc_now() :: t
def utc_now() do
{:ok, {year, month, day}, {hour, minute, second}, microsecond} =
Calendar.ISO.from_unix(:os.system_time, :native)
%NaiveDateTime{year: year, month: month, day: day,
hour: hour, minute: minute, second: second,
microsecond: microsecond}
end
@doc """
Builds a new ISO naive datetime.
Expects all values to be integers. Returns `{:ok, naive_datetime}`
if each entry fits its appropriate range, returns `{:error, reason}`
otherwise.
## Examples
iex> NaiveDateTime.new(2000, 1, 1, 0, 0, 0)
{:ok, ~N[2000-01-01 00:00:00]}
iex> NaiveDateTime.new(2000, 13, 1, 0, 0, 0)
{:error, :invalid_date}
iex> NaiveDateTime.new(2000, 2, 29, 0, 0, 0)
{:ok, ~N[2000-02-29 00:00:00]}
iex> NaiveDateTime.new(2000, 2, 30, 0, 0, 0)
{:error, :invalid_date}
iex> NaiveDateTime.new(2001, 2, 29, 0, 0, 0)
{:error, :invalid_date}
iex> NaiveDateTime.new(2000, 1, 1, 23, 59, 59, {0, 1})
{:ok, ~N[2000-01-01 23:59:59.0]}
iex> NaiveDateTime.new(2000, 1, 1, 23, 59, 59, 999_999)
{:ok, ~N[2000-01-01 23:59:59.999999]}
iex> NaiveDateTime.new(2000, 1, 1, 23, 59, 60, 999_999)
{:ok, ~N[2000-01-01 23:59:60.999999]}
iex> NaiveDateTime.new(2000, 1, 1, 24, 59, 59, 999_999)
{:error, :invalid_time}
iex> NaiveDateTime.new(2000, 1, 1, 23, 60, 59, 999_999)
{:error, :invalid_time}
iex> NaiveDateTime.new(2000, 1, 1, 23, 59, 61, 999_999)
{:error, :invalid_time}
iex> NaiveDateTime.new(2000, 1, 1, 23, 59, 59, 1_000_000)
{:error, :invalid_time}
"""
@spec new(Calendar.year, Calendar.month, Calendar.day,
Calendar.hour, Calendar.minute, Calendar.second, Calendar.microsecond) ::
{:ok, t} | {:error, atom}
def new(year, month, day, hour, minute, second, microsecond \\ {0, 0}) do
with {:ok, date} <- Calendar.ISO.date(year, month, day),
{:ok, time} <- Time.new(hour, minute, second, microsecond),
do: new(date, time)
end
@doc """
Builds a naive datetime from date and time structs.
## Examples
iex> NaiveDateTime.new(~D[2010-01-13], ~T[23:00:07.005])
{:ok, ~N[2010-01-13 23:00:07.005]}
"""
@spec new(Date.t, Time.t) :: {:ok, t}
def new(date, time)
def new(%Date{calendar: calendar, year: year, month: month, day: day},
%Time{hour: hour, minute: minute, second: second, microsecond: microsecond}) do
{:ok, %NaiveDateTime{calendar: calendar, year: year, month: month, day: day,
hour: hour, minute: minute, second: second, microsecond: microsecond}}
end
@doc """
Adds a specified amount of time to a `NaiveDateTime`.
Accepts an `integer` in any `unit` available from `t:System.time_unit/0`.
Negative values will be move backwards in time.
## Examples
# adds seconds by default
iex> NaiveDateTime.add(~N[2014-10-02 00:29:10], 2)
~N[2014-10-02 00:29:12]
# accepts negative offsets
iex> NaiveDateTime.add(~N[2014-10-02 00:29:10], -2)
~N[2014-10-02 00:29:08]
# can work with other units
iex> NaiveDateTime.add(~N[2014-10-02 00:29:10], 2_000, :millisecond)
~N[2014-10-02 00:29:12]
# keeps the same precision
iex> NaiveDateTime.add(~N[2014-10-02 00:29:10.021], 21, :second)
~N[2014-10-02 00:29:31.021]
# changes below the precision will not be visible
iex> hidden = NaiveDateTime.add(~N[2014-10-02 00:29:10], 21, :millisecond)
iex> hidden.microsecond # ~N[2014-10-02 00:29:10]
{21000, 0}
# from gregorian seconds
iex> NaiveDateTime.add(~N[0000-01-01 00:00:00], 63579428950)
~N[2014-10-02 00:29:10]
"""
@spec add(t, integer, System.time_unit) :: t
def add(%NaiveDateTime{microsecond: {_microsecond, precision}} = naive_datetime,
integer, unit \\ :second) when is_integer(integer) do
ndt_microsecond = to_microsecond(naive_datetime)
added_microsecond = System.convert_time_unit(integer, unit, :microsecond)
sum = ndt_microsecond + added_microsecond
microsecond = rem(sum, 1_000_000)
{{year, month, day}, {hour, minute, second}} =
sum |> div(1_000_000) |> :calendar.gregorian_seconds_to_datetime
%NaiveDateTime{year: year, month: month, day: day,
hour: hour, minute: minute, second: second,
microsecond: {microsecond, precision}}
end
@doc """
Subtract `naive_datetime2` from `naive_datetime1`.
The answer can be returned in any `unit` available from `t:System.time_unit/0`.
## Examples
iex> NaiveDateTime.diff(~N[2014-10-02 00:29:12], ~N[2014-10-02 00:29:10])
2
iex> NaiveDateTime.diff(~N[2014-10-02 00:29:12], ~N[2014-10-02 00:29:10], :microsecond)
2_000_000
iex> NaiveDateTime.diff(~N[2014-10-02 00:29:10.042], ~N[2014-10-02 00:29:10.021], :millisecond)
21
# to gregorian seconds
iex> NaiveDateTime.diff(~N[2014-10-02 00:29:10], ~N[0000-01-01 00:00:00])
63579428950
"""
@spec diff(t, t, System.time_unit) :: integer
def diff(%NaiveDateTime{} = naive_datetime1,
%NaiveDateTime{} = naive_datetime2,
unit \\ :second) do
ndt1_microsecond = to_microsecond(naive_datetime1)
ndt2_microsecond = to_microsecond(naive_datetime2)
difference = ndt1_microsecond - ndt2_microsecond
System.convert_time_unit(difference, :microsecond, unit)
end
@doc """
Converts a `NaiveDateTime` into a `Date`.
Because `Date` does not hold time information,
data will be lost during the conversion.
## Examples
iex> NaiveDateTime.to_date(~N[2002-01-13 23:00:07])
~D[2002-01-13]
"""
@spec to_date(t) :: Date.t
def to_date(%NaiveDateTime{year: year, month: month, day: day, calendar: calendar}) do
%Date{year: year, month: month, day: day, calendar: calendar}
end
@doc """
Converts a `NaiveDateTime` into `Time`.
Because `Time` does not hold date information,
data will be lost during the conversion.
## Examples
iex> NaiveDateTime.to_time(~N[2002-01-13 23:00:07])
~T[23:00:07]
"""
@spec to_time(t) :: Time.t
def to_time(%NaiveDateTime{hour: hour, minute: minute, second: second, microsecond: microsecond}) do
%Time{hour: hour, minute: minute, second: second, microsecond: microsecond}
end
@doc """
Converts the given naive datetime to a string according to its calendar.
### Examples
iex> NaiveDateTime.to_string(~N[2000-02-28 23:00:13])
"2000-02-28 23:00:13"
iex> NaiveDateTime.to_string(~N[2000-02-28 23:00:13.001])
"2000-02-28 23:00:13.001"
This function can also be used to convert a DateTime to a string without
the time zone information:
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> NaiveDateTime.to_string(dt)
"2000-02-29 23:00:07"
"""
@spec to_string(Calendar.naive_datetime) :: String.t
def to_string(%{calendar: calendar, year: year, month: month, day: day,
hour: hour, minute: minute, second: second, microsecond: microsecond}) do
calendar.naive_datetime_to_string(year, month, day, hour, minute, second, microsecond)
end
@doc """
Parses the extended "Date and time of day" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Timezone offset may be included in the string but they will be
simply discarded as such information is not included in naive date
times.
As specified in the standard, the separator "T" may be omitted if
desired as there is no ambiguity within this function.
Time representations with reduced accuracy are not supported.
## Examples
iex> NaiveDateTime.from_iso8601("2015-01-23 23:50:07")
{:ok, ~N[2015-01-23 23:50:07]}
iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07")
{:ok, ~N[2015-01-23 23:50:07]}
iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07Z")
{:ok, ~N[2015-01-23 23:50:07]}
iex> NaiveDateTime.from_iso8601("2015-01-23 23:50:07.0")
{:ok, ~N[2015-01-23 23:50:07.0]}
iex> NaiveDateTime.from_iso8601("2015-01-23 23:50:07.0123456")
{:ok, ~N[2015-01-23 23:50:07.012345]}
iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07.123Z")
{:ok, ~N[2015-01-23 23:50:07.123]}
iex> NaiveDateTime.from_iso8601("2015-01-23P23:50:07")
{:error, :invalid_format}
iex> NaiveDateTime.from_iso8601("2015:01:23 23-50-07")
{:error, :invalid_format}
iex> NaiveDateTime.from_iso8601("2015-01-23 23:50:07A")
{:error, :invalid_format}
iex> NaiveDateTime.from_iso8601("2015-01-23 23:50:61")
{:error, :invalid_time}
iex> NaiveDateTime.from_iso8601("2015-01-32 23:50:07")
{:error, :invalid_date}
iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07.123+02:30")
{:ok, ~N[2015-01-23 23:50:07.123]}
iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07.123+00:00")
{:ok, ~N[2015-01-23 23:50:07.123]}
iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07.123-02:30")
{:ok, ~N[2015-01-23 23:50:07.123]}
iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07.123-00:00")
{:error, :invalid_format}
iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07.123-00:60")
{:error, :invalid_format}
iex> NaiveDateTime.from_iso8601("2015-01-23T23:50:07.123-24:00")
{:error, :invalid_format}
"""
@spec from_iso8601(String.t) :: {:ok, t} | {:error, atom}
def from_iso8601(<<year::4-bytes, ?-, month::2-bytes, ?-, day::2-bytes, sep,
hour::2-bytes, ?:, min::2-bytes, ?:, sec::2-bytes, rest::binary>>) when sep in [?\s, ?T] do
with {year, ""} <- Integer.parse(year),
{month, ""} <- Integer.parse(month),
{day, ""} <- Integer.parse(day),
{hour, ""} <- Integer.parse(hour),
{min, ""} <- Integer.parse(min),
{sec, ""} <- Integer.parse(sec),
{microsec, rest} <- Calendar.ISO.parse_microsecond(rest),
{_offset, ""} <- Calendar.ISO.parse_offset(rest) do
new(year, month, day, hour, min, sec, microsec)
else
_ -> {:error, :invalid_format}
end
end
def from_iso8601(<<_::binary>>) do
{:error, :invalid_format}
end
@doc """
Parses the extended "Date and time of day" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Raises if the format is invalid.
## Examples
iex> NaiveDateTime.from_iso8601!("2015-01-23T23:50:07.123Z")
~N[2015-01-23 23:50:07.123]
iex> NaiveDateTime.from_iso8601!("2015-01-23P23:50:07")
** (ArgumentError) cannot parse "2015-01-23P23:50:07" as naive datetime, reason: :invalid_format
"""
@spec from_iso8601!(String.t) :: t | no_return
def from_iso8601!(string) do
case from_iso8601(string) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError, "cannot parse #{inspect string} as naive datetime, reason: #{inspect reason}"
end
end
@doc """
Converts the given naive datetime to
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Only supports converting naive datetimes which are in the ISO calendar,
attempting to convert naive datetimes from other calendars will raise.
### Examples
iex> NaiveDateTime.to_iso8601(~N[2000-02-28 23:00:13])
"2000-02-28T23:00:13"
iex> NaiveDateTime.to_iso8601(~N[2000-02-28 23:00:13.001])
"2000-02-28T23:00:13.001"
This function can also be used to convert a DateTime to ISO8601 without
the time zone information:
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> NaiveDateTime.to_iso8601(dt)
"2000-02-29T23:00:07"
"""
@spec to_iso8601(Calendar.naive_datetime) :: String.t
def to_iso8601(%{year: year, month: month, day: day,
hour: hour, minute: minute, second: second, microsecond: microsecond}) do
Calendar.ISO.naive_datetime_to_iso8601(year, month, day, hour, minute, second, microsecond)
end
@doc """
Converts a `NaiveDateTime` struct to an Erlang datetime tuple.
Only supports converting naive datetimes which are in the ISO calendar,
attempting to convert naive datetimes from other calendars will raise.
WARNING: Loss of precision may occur, as Erlang time tuples only store
hour/minute/second.
## Examples
iex> NaiveDateTime.to_erl(~N[2000-01-01 13:30:15])
{{2000, 1, 1}, {13, 30, 15}}
This function can also be used to convert a DateTime to a erl format
without the time zone information:
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> NaiveDateTime.to_erl(dt)
{{2000, 2, 29}, {23, 00, 07}}
"""
@spec to_erl(t) :: :calendar.datetime
def to_erl(%{calendar: Calendar.ISO, year: year, month: month, day: day,
hour: hour, minute: minute, second: second}) do
{{year, month, day}, {hour, minute, second}}
end
@doc """
Converts an Erlang datetime tuple to a `NaiveDateTime` struct.
Attempting to convert an invalid ISO calendar date will produce an error tuple.
## Examples
iex> NaiveDateTime.from_erl({{2000, 1, 1}, {13, 30, 15}})
{:ok, ~N[2000-01-01 13:30:15]}
iex> NaiveDateTime.from_erl({{2000, 1, 1}, {13, 30, 15}}, {5000, 3})
{:ok, ~N[2000-01-01 13:30:15.005]}
iex> NaiveDateTime.from_erl({{2000, 13, 1}, {13, 30, 15}})
{:error, :invalid_date}
iex> NaiveDateTime.from_erl({{2000, 13, 1},{13, 30, 15}})
{:error, :invalid_date}
"""
@spec from_erl(:calendar.datetime, Calendar.microsecond) :: {:ok, t} | {:error, atom}
def from_erl({{year, month, day}, {hour, minute, second}}, microsecond \\ {0, 0}) do
new(year, month, day, hour, minute, second, microsecond)
end
@doc """
Converts an Erlang datetime tuple to a `NaiveDateTime` struct.
Raises if the datetime is invalid.
Attempting to convert an invalid ISO calendar date will produce an error tuple.
## Examples
iex> NaiveDateTime.from_erl!({{2000, 1, 1}, {13, 30, 15}})
~N[2000-01-01 13:30:15]
iex> NaiveDateTime.from_erl!({{2000, 1, 1}, {13, 30, 15}}, {5000, 3})
~N[2000-01-01 13:30:15.005]
iex> NaiveDateTime.from_erl!({{2000, 13, 1}, {13, 30, 15}})
** (ArgumentError) cannot convert {{2000, 13, 1}, {13, 30, 15}} to naive datetime, reason: :invalid_date
"""
@spec from_erl!(:calendar.datetime, Calendar.microsecond) :: t | no_return
def from_erl!(tuple, microsecond \\ {0, 0}) do
case from_erl(tuple, microsecond) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError, "cannot convert #{inspect tuple} to naive datetime, reason: #{inspect reason}"
end
end
@doc """
Compares two `NaiveDateTime` structs.
Returns `:gt` if first is later than the second
and `:lt` for vice versa. If the two NaiveDateTime
are equal `:eq` is returned
## Examples
iex> NaiveDateTime.compare(~N[2016-04-16 13:30:15], ~N[2016-04-28 16:19:25])
:lt
iex> NaiveDateTime.compare(~N[2016-04-16 13:30:15.1], ~N[2016-04-16 13:30:15.01])
:gt
This function can also be used to compare a DateTime without
the time zone information:
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> NaiveDateTime.compare(dt, ~N[2000-02-29 23:00:07])
:eq
iex> NaiveDateTime.compare(dt, ~N[2000-01-29 23:00:07])
:gt
iex> NaiveDateTime.compare(dt, ~N[2000-03-29 23:00:07])
:lt
"""
@spec compare(Calendar.naive_datetime, Calendar.naive_datetime) :: :lt | :eq | :gt
def compare(naive_datetime1, naive_datetime2) do
case {to_tuple(naive_datetime1), to_tuple(naive_datetime2)} do
{first, second} when first > second -> :gt
{first, second} when first < second -> :lt
_ -> :eq
end
end
## Helpers
defp to_microsecond(%{calendar: Calendar.ISO, year: year, month: month, day: day,
hour: hour, minute: minute, second: second, microsecond: {microsecond, _precision}}) do
second = :calendar.datetime_to_gregorian_seconds(
{{year, month, day}, {hour, minute, second}}
)
second * 1_000_000 + microsecond
end
defp to_tuple(%{calendar: Calendar.ISO, year: year, month: month, day: day,
hour: hour, minute: minute, second: second, microsecond: {microsecond, _precision}}) do
{year, month, day, hour, minute, second, microsecond}
end
defimpl String.Chars do
def to_string(%{calendar: calendar, year: year, month: month, day: day,
hour: hour, minute: minute, second: second, microsecond: microsecond}) do
calendar.naive_datetime_to_string(year, month, day, hour, minute, second, microsecond)
end
end
defimpl Inspect do
def inspect(%{calendar: Calendar.ISO, year: year, month: month, day: day,
hour: hour, minute: minute, second: second, microsecond: microsecond}, _) do
formatted = Calendar.ISO.naive_datetime_to_string(year, month, day, hour, minute, second, microsecond)
"~N[" <> formatted <> "]"
end
def inspect(naive, opts) do
Inspect.Any.inspect(naive, opts)
end
end
end
defmodule DateTime do
@moduledoc """
A datetime implementation with a time zone.
This datetime can be seen as an ephemeral snapshot
of a datetime at a given time zone. For such purposes,
it also includes both UTC and Standard offsets, as
well as the zone abbreviation field used exclusively
for formatting purposes.
Developers should avoid creating the DateTime struct directly
and instead rely on the functions provided by this module as
well as the ones in 3rd party calendar libraries.
## Where are my functions?
You will notice this module only contains conversion
functions as well as functions that work on UTC. This
is because a proper DateTime implementation requires a
TimeZone database which currently is not provided as part
of Elixir.
Such may be addressed in upcoming versions, meanwhile,
use 3rd party packages to provide DateTime building and
similar functionality with time zone backing.
"""
@enforce_keys [:year, :month, :day, :hour, :minute, :second,
:time_zone, :zone_abbr, :utc_offset, :std_offset]
defstruct [:year, :month, :day, :hour, :minute, :second, :time_zone,
:zone_abbr, :utc_offset, :std_offset, microsecond: {0, 0}, calendar: Calendar.ISO]
@type t :: %__MODULE__{year: Calendar.year, month: Calendar.month, day: Calendar.day,
calendar: Calendar.calendar, hour: Calendar.hour, minute: Calendar.minute,
second: Calendar.second, microsecond: Calendar.microsecond,
time_zone: Calendar.time_zone, zone_abbr: Calendar.zone_abbr,
utc_offset: Calendar.utc_offset, std_offset: Calendar.std_offset}
@unix_epoch :calendar.datetime_to_gregorian_seconds {{1970, 1, 1}, {0, 0, 0}}
@doc """
Returns the current datetime in UTC.
## Examples
iex> datetime = DateTime.utc_now()
iex> datetime.time_zone
"Etc/UTC"
"""
@spec utc_now() :: DateTime.t
def utc_now() do
:os.system_time |> from_unix!(:native)
end
@doc """
Converts the given Unix time to DateTime.
The integer can be given in different unit
according to `System.convert_time_unit/3` and it will
be converted to microseconds internally.
Unix times are always in UTC and therefore the DateTime
will be returned in UTC.
## Examples
iex> DateTime.from_unix(1464096368)
{:ok, %DateTime{calendar: Calendar.ISO, day: 24, hour: 13, microsecond: {0, 0}, minute: 26,
month: 5, second: 8, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0,
year: 2016, zone_abbr: "UTC"}}
iex> DateTime.from_unix(1432560368868569, :microsecond)
{:ok, %DateTime{calendar: Calendar.ISO, day: 25, hour: 13, microsecond: {868569, 6}, minute: 26,
month: 5, second: 8, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0,
year: 2015, zone_abbr: "UTC"}}
The unit can also be an integer as in `t:System.time_unit/0`:
iex> DateTime.from_unix(1432560368868569, 1024)
{:ok, %DateTime{calendar: Calendar.ISO, day: 23, hour: 22, microsecond: {211914, 3}, minute: 53,
month: 1, second: 43, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0,
year: 46302, zone_abbr: "UTC"}}
Negative Unix times are supported, up to -#{@unix_epoch} seconds,
which is equivalent to "0000-01-01T00:00:00Z" or 0 gregorian seconds.
iex> DateTime.from_unix(-12345678910)
{:ok, %DateTime{calendar: Calendar.ISO, day: 13, hour: 4, microsecond: {0, 0}, minute: 44,
month: 10, second: 50, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0,
year: 1578, zone_abbr: "UTC"}}
When a Unix time before that moment is passed to `from_unix/2`, `:error` will be returned.
"""
@spec from_unix(integer, :native | System.time_unit) :: {:ok, DateTime.t} | {:error, atom}
def from_unix(integer, unit \\ :second) when is_integer(integer) do
case Calendar.ISO.from_unix(integer, unit) do
{:ok, {year, month, day}, {hour, minute, second}, microsecond} ->
{:ok, %DateTime{year: year, month: month, day: day,
hour: hour, minute: minute, second: second, microsecond: microsecond,
std_offset: 0, utc_offset: 0, zone_abbr: "UTC", time_zone: "Etc/UTC"}}
{:error, _} = error ->
error
end
end
@doc """
Converts the given Unix time to DateTime.
The integer can be given in different unit
according to `System.convert_time_unit/3` and it will
be converted to microseconds internally.
Unix times are always in UTC and therefore the DateTime
will be returned in UTC.
## Examples
iex> DateTime.from_unix!(1464096368)
%DateTime{calendar: Calendar.ISO, day: 24, hour: 13, microsecond: {0, 0}, minute: 26,
month: 5, second: 8, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0,
year: 2016, zone_abbr: "UTC"}
iex> DateTime.from_unix!(1432560368868569, :microsecond)
%DateTime{calendar: Calendar.ISO, day: 25, hour: 13, microsecond: {868569, 6}, minute: 26,
month: 5, second: 8, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0,
year: 2015, zone_abbr: "UTC"}
Negative Unix times are supported, up to -#{@unix_epoch} seconds,
which is equivalent to "0000-01-01T00:00:00Z" or 0 gregorian seconds.
iex> DateTime.from_unix(-12345678910)
{:ok, %DateTime{calendar: Calendar.ISO, day: 13, hour: 4, microsecond: {0, 0}, minute: 44,
month: 10, second: 50, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0,
year: 1578, zone_abbr: "UTC"}}
When a Unix time before that moment is passed to `from_unix!/2`, an ArgumentError will be raised.
"""
@spec from_unix!(non_neg_integer, :native | System.time_unit) :: DateTime.t
def from_unix!(integer, unit \\ :second) when is_atom(unit) do
case from_unix(integer, unit) do
{:ok, datetime} ->
datetime
{:error, :invalid_unix_time} ->
raise ArgumentError, "invalid Unix time #{integer}"
end
end
@doc """
Converts the given NaiveDateTime to DateTime.
It expects a time zone to put the NaiveDateTime in.
Currently it only supports "Etc/UTC" as time zone.
## Examples
iex> DateTime.from_naive(~N[2016-05-24 13:26:08.003], "Etc/UTC")
{:ok, %DateTime{calendar: Calendar.ISO, day: 24, hour: 13, microsecond: {3000, 3}, minute: 26,
month: 5, second: 8, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0,
year: 2016, zone_abbr: "UTC"}}
"""
@spec from_naive(NaiveDateTime.t, Calendar.time_zone) :: {:ok, DateTime.t}
def from_naive(naive_datetime, time_zone)
def from_naive(%NaiveDateTime{hour: hour, minute: minute, second: second, microsecond: microsecond,
year: year, month: month, day: day}, "Etc/UTC") do
{:ok, %DateTime{year: year, month: month, day: day,
hour: hour, minute: minute, second: second, microsecond: microsecond,
std_offset: 0, utc_offset: 0, zone_abbr: "UTC", time_zone: "Etc/UTC"}}
end
@doc """
Converts the given NaiveDateTime to DateTime.
It expects a time zone to put the NaiveDateTime in.
Currently it only supports "Etc/UTC" as time zone.
## Examples
iex> DateTime.from_naive!(~N[2016-05-24 13:26:08.003], "Etc/UTC")
%DateTime{calendar: Calendar.ISO, day: 24, hour: 13, microsecond: {3000, 3}, minute: 26,
month: 5, second: 8, std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0,
year: 2016, zone_abbr: "UTC"}
"""
@spec from_naive!(non_neg_integer, :native | System.time_unit) :: DateTime.t
def from_naive!(naive_datetime, time_zone) do
case from_naive(naive_datetime, time_zone) do
{:ok, datetime} ->
datetime
{:error, reason} ->
raise ArgumentError, "cannot parse #{inspect naive_datetime} to datetime, reason: #{inspect reason}"
end
end
@doc """
Converts the given DateTime to Unix time.
The DateTime is expected to be using the ISO calendar
with a year greater than or equal to 0.
It will return the integer with the given unit,
according to `System.convert_time_unit/3`.
## Examples
iex> 1464096368 |> DateTime.from_unix!() |> DateTime.to_unix()
1464096368
iex> dt = %DateTime{calendar: Calendar.ISO, day: 20, hour: 18, microsecond: {273806, 6},
...> minute: 58, month: 11, second: 19, time_zone: "America/Montevideo",
...> utc_offset: -10800, std_offset: 3600, year: 2014, zone_abbr: "UYST"}
iex> DateTime.to_unix(dt)
1416517099
iex> flamel = %DateTime{calendar: Calendar.ISO, day: 22, hour: 8, microsecond: {527771, 6},
...> minute: 2, month: 3, second: 25, std_offset: 0, time_zone: "Etc/UTC",
...> utc_offset: 0, year: 1418, zone_abbr: "UTC"}
iex> DateTime.to_unix(flamel)
-17412508655
"""
@spec to_unix(DateTime.t, System.time_unit) :: non_neg_integer
def to_unix(datetime, unit \\ :second)
def to_unix(%DateTime{calendar: Calendar.ISO, std_offset: std_offset, utc_offset: utc_offset,
hour: hour, minute: minute, second: second, microsecond: {microsecond, _},
year: year, month: month, day: day}, unit) when year >= 0 do
seconds =
:calendar.datetime_to_gregorian_seconds({{year, month, day}, {hour, minute, second}})
|> Kernel.-(utc_offset)
|> Kernel.-(std_offset)
System.convert_time_unit((seconds - @unix_epoch) * 1_000_000 + microsecond, :microsecond, unit)
end
@doc """
Converts a `DateTime` into a `NaiveDateTime`.
Because `NaiveDateTime` does not hold time zone information,
any time zone related data will be lost during the conversion.
## Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 1},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_naive(dt)
~N[2000-02-29 23:00:07.0]
"""
def to_naive(%DateTime{year: year, month: month, day: day, calendar: calendar,
hour: hour, minute: minute, second: second, microsecond: microsecond}) do
%NaiveDateTime{year: year, month: month, day: day, calendar: calendar,
hour: hour, minute: minute, second: second, microsecond: microsecond}
end
@doc """
Converts a `DateTime` into a `Date`.
Because `Date` does not hold time nor time zone information,
data will be lost during the conversion.
## Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_date(dt)
~D[2000-02-29]
"""
def to_date(%DateTime{year: year, month: month, day: day, calendar: calendar}) do
%Date{year: year, month: month, day: day, calendar: calendar}
end
@doc """
Converts a `DateTime` into `Time`.
Because `Time` does not hold date nor time zone information,
data will be lost during the conversion.
## Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 1},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_time(dt)
~T[23:00:07.0]
"""
def to_time(%DateTime{hour: hour, minute: minute, second: second, microsecond: microsecond}) do
%Time{hour: hour, minute: minute, second: second, microsecond: microsecond}
end
@doc """
Converts the given datetime to
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601) format.
Only supports converting datetimes which are in the ISO calendar,
attempting to convert datetimes from other calendars will raise.
WARNING: the ISO 8601 datetime format does not contain the time zone nor
its abbreviation, which means information is lost when converting to such
format. This is also why this module does not provide a `from_iso8601/1`
function, as it is impossible to build a proper `DateTime` from only the
information in the ISO 8601 string.
### Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_iso8601(dt)
"2000-02-29T23:00:07+01:00"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "UTC",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"}
iex> DateTime.to_iso8601(dt)
"2000-02-29T23:00:07Z"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.to_iso8601(dt)
"2000-02-29T23:00:07-04:00"
"""
@spec to_iso8601(DateTime.t) :: String.t
def to_iso8601(%{calendar: Calendar.ISO, year: year, month: month, day: day,
hour: hour, minute: minute, second: second, microsecond: microsecond,
time_zone: time_zone, zone_abbr: zone_abbr, utc_offset: utc_offset, std_offset: std_offset}) do
Calendar.ISO.datetime_to_iso8601(year, month, day, hour, minute, second, microsecond,
time_zone, zone_abbr, utc_offset, std_offset)
end
@doc """
Converts the given datetime to a string according to its calendar.
### Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_string(dt)
"2000-02-29 23:00:07+01:00 CET Europe/Warsaw"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "UTC",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"}
iex> DateTime.to_string(dt)
"2000-02-29 23:00:07Z"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.to_string(dt)
"2000-02-29 23:00:07-04:00 AMT America/Manaus"
"""
@spec to_string(DateTime.t) :: String.t
def to_string(%{calendar: calendar, year: year, month: month, day: day,
hour: hour, minute: minute, second: second, microsecond: microsecond,
time_zone: time_zone, zone_abbr: zone_abbr, utc_offset: utc_offset, std_offset: std_offset}) do
calendar.datetime_to_string(year, month, day, hour, minute, second, microsecond,
time_zone, zone_abbr, utc_offset, std_offset)
end
defimpl String.Chars do
def to_string(%{calendar: calendar, year: year, month: month, day: day,
hour: hour, minute: minute, second: second, microsecond: microsecond,
time_zone: time_zone, zone_abbr: zone_abbr, utc_offset: utc_offset, std_offset: std_offset}) do
calendar.datetime_to_string(year, month, day, hour, minute, second, microsecond,
time_zone, zone_abbr, utc_offset, std_offset)
end
end
@doc """
Compares two `DateTime` structs.
Returns `:gt` if first datetime is later than the second
and `:lt` for vice versa. If the two datetimes are equal
`:eq` is returned.
Note that both utc and stc offsets will be taken into
account when comparison is done.
## Examples
iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> dt2 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.compare(dt1, dt2)
:gt
"""
@spec compare(DateTime.t, DateTime.t) :: :lt | :eq | :gt
def compare(%DateTime{} = datetime1, %DateTime{} = datetime2) do
case {to_unix(datetime1, :microsecond), to_unix(datetime2, :microsecond)} do
{first, second} when first > second -> :gt
{first, second} when first < second -> :lt
_ -> :eq
end
end
end
| 35.090429 | 115 | 0.633695 |
7393ebf424e96f2aa57139fa1c196bc6e573bd96 | 1,177 | exs | Elixir | config/deploy.exs | sunnys/dbfs | bf0e01b969d1107d12b8b7c0130dad17187b027b | [
"MIT"
] | 57 | 2018-01-22T22:42:16.000Z | 2022-02-18T05:20:25.000Z | config/deploy.exs | sunnys/dbfs | bf0e01b969d1107d12b8b7c0130dad17187b027b | [
"MIT"
] | 2 | 2018-05-14T20:50:41.000Z | 2021-12-12T18:15:59.000Z | config/deploy.exs | sunnys/dbfs | bf0e01b969d1107d12b8b7c0130dad17187b027b | [
"MIT"
] | 15 | 2017-11-11T04:25:04.000Z | 2022-03-07T04:55:19.000Z | use Bootleg.Config
# Configure the following roles to match your environment.
# `build` defines what remote server your distillery release should be built on.
#
# Some available options are:
# - `user`: ssh username to use for SSH authentication to the role's hosts
# - `password`: password to be used for SSH authentication
# - `identity`: local path to an identity file that will be used for SSH authentication instead of a password
# - `workspace`: remote file system path to be used for building and deploying this Elixir project
role :build, "dbfs.newyork", workspace: "/tmp/bootleg/build", user: "root", password: "87654321"
# Phoenix has some extra build steps such as asset digesting that need to be done during
# compilation. To have bootleeg handle that for you, include the additional package
# `bootleg_phoenix` to your `deps` list. This will automatically perform the additional steps
# required for building phoenix releases.
#
# ```
# # mix.exs
# def deps do
# [{:distillery, "~> 1.5"},
# {:bootleg, "~> 0.6"},
# {:bootleg_phoenix, "~> 0.2"}]
# end
# ```
# For more about `bootleg_phoenix` see: https://github.com/labzero/bootleg_phoenix
| 37.967742 | 110 | 0.722175 |
7393fdf5407c509171014a2775d5996905a25900 | 10,496 | exs | Elixir | test/metrics_test.exs | wojtekmach/telemetry_metrics_prometheus_core | d5ecf3d2964b1a25827b3058c55202123263e690 | [
"Apache-2.0"
] | null | null | null | test/metrics_test.exs | wojtekmach/telemetry_metrics_prometheus_core | d5ecf3d2964b1a25827b3058c55202123263e690 | [
"Apache-2.0"
] | 1 | 2019-07-04T14:50:29.000Z | 2019-07-04T14:50:29.000Z | test/metrics_test.exs | Gazler/telemetry_metrics_prometheus_core | 6a08e81d2a0f897a3e2c050d0399c2cb9a94ab93 | [
"Apache-2.0"
] | 1 | 2020-03-30T18:50:32.000Z | 2020-03-30T18:50:32.000Z | defmodule TelemetryMetricsPrometheus.Core.MetricsTest do
use ExUnit.Case
alias Telemetry.Metrics
alias TelemetryMetricsPrometheus.Core.{Counter, Distribution, LastValue, Sum}
import ExUnit.CaptureLog
setup do
tid = :ets.new(:test_table, [:named_table, :public, :set, {:write_concurrency, true}])
dist_tid =
:ets.new(:test_dist_table, [
:named_table,
:public,
:duplicate_bag,
{:write_concurrency, true}
])
%{tid: tid, dist_tid: dist_tid}
end
describe "counter" do
# for metric <- [Counter, Distribution, LastValue, Sum]
test "registers a handler", %{tid: tid} do
metric =
Metrics.counter("http.request.count",
description: "HTTP Request Count",
unit: :each,
tags: [:status]
)
{:ok, handler_id} = Counter.register(metric, tid, self())
handlers = :telemetry.list_handlers([])
assert Enum.any?(handlers, &match?(^handler_id, &1.id))
cleanup(tid)
end
test "records a times series for each tag kv pair", %{tid: tid} do
metric =
Metrics.counter("http.request.count",
description: "HTTP Request Count",
unit: :each,
tags: [:method, :status]
)
{:ok, _handler_id} = Counter.register(metric, tid, self())
:telemetry.execute([:http, :request], %{latency: 17}, %{method: "GET", status: 200})
:telemetry.execute([:http, :request], %{latency: 20}, %{method: "GET", status: 200})
:telemetry.execute([:http, :request], %{latency: 22}, %{method: "GET", status: 404})
[t1] = :ets.lookup(tid, {metric.name, %{method: "GET", status: 200}})
[t2] = :ets.lookup(tid, {metric.name, %{method: "GET", status: 404}})
assert elem(t1, 1) == 2
assert elem(t2, 1) == 1
cleanup(tid)
end
end
describe "gauge" do
# for metric <- [Counter, Distribution, LastValue, Sum]
test "registers a handler", %{tid: tid} do
metric =
Metrics.last_value("vm.memory.total",
description: "BEAM VM memory",
unit: :bytes,
tags: []
)
{:ok, handler_id} = LastValue.register(metric, tid, self())
handlers = :telemetry.list_handlers([])
assert Enum.any?(handlers, &match?(^handler_id, &1.id))
cleanup(tid)
end
test "records a times series for each tag kv pair", %{tid: tid} do
metric =
Metrics.last_value("vm.memory.total",
description: "BEAM VM memory",
unit: :bytes,
tags: [:some_tag]
)
{:ok, _handler_id} = LastValue.register(metric, tid, self())
:telemetry.execute([:vm, :memory], %{total: 200_000, system: 1_000}, %{some_tag: "a"})
[t1] = :ets.lookup(tid, {metric.name, %{some_tag: "a"}})
:telemetry.execute([:vm, :memory], %{total: 190_000, system: 998}, %{some_tag: "b"})
[t2] = :ets.lookup(tid, {metric.name, %{some_tag: "b"}})
:telemetry.execute([:vm, :memory], %{total: 210_000, system: 1_100}, %{some_tag: "a"})
[t3] = :ets.lookup(tid, {metric.name, %{some_tag: "a"}})
assert elem(t1, 1) == 200_000
assert elem(t2, 1) == 190_000
assert elem(t3, 1) == 210_000
cleanup(tid)
end
end
describe "sum" do
# for metric <- [Counter, Distribution, LastValue, Sum]
test "registers a handler", %{tid: tid} do
metric =
Metrics.sum("cache.invalidation.total",
description: "Total cache invalidations",
measurement: :count,
unit: :each,
tags: [:name]
)
{:ok, handler_id} = Sum.register(metric, tid, self())
handlers = :telemetry.list_handlers([])
assert Enum.any?(handlers, &match?(^handler_id, &1.id))
cleanup(tid)
end
test "records a times series for each tag kv pair", %{tid: tid} do
metric =
Metrics.sum("cache.invalidation.total",
description: "Total cache invalidations",
measurement: :count,
unit: :each,
tags: [:name]
)
{:ok, _handler_id} = Sum.register(metric, tid, self())
:telemetry.execute([:cache, :invalidation], %{count: 23}, %{name: "users"})
[t1] = :ets.lookup(tid, {metric.name, %{name: "users"}})
:telemetry.execute([:cache, :invalidation], %{count: 3}, %{name: "clients"})
[t2] = :ets.lookup(tid, {metric.name, %{name: "clients"}})
:telemetry.execute([:cache, :invalidation], %{count: 5}, %{name: "users"})
[t3] = :ets.lookup(tid, {metric.name, %{name: "users"}})
assert elem(t1, 1) == 23
assert elem(t2, 1) == 3
assert elem(t3, 1) == 28
cleanup(tid)
end
end
describe "histogram" do
# for metric <- [Counter, Distribution, LastValue, Sum]
test "registers a handler", %{dist_tid: tid} do
metric =
Metrics.distribution("some.plug.call.duration",
buckets: [
0.005,
0.01,
0.025,
0.05,
0.075,
0.1,
0.15,
0.2,
0.3,
0.5,
1,
2.5,
5.0,
7.5,
10.0
],
description: "Request length",
event_name: [:some, :plug, :call, :stop],
measurement: :duration,
unit: {:native, :second},
tags: [:method, :path_root],
tag_values: fn %{conn: conn} ->
%{
method: conn.method,
path_root: List.first(conn.path_info) || ""
}
end
)
{:ok, handler_id} = Distribution.register(metric, tid, self())
handlers = :telemetry.list_handlers([])
assert Enum.any?(handlers, &match?(^handler_id, &1.id))
cleanup(tid)
end
test "records a times series for each tag kv pair", %{dist_tid: tid} do
buckets = [
0.005,
0.01,
0.025,
0.05,
0.075,
0.1,
0.15,
0.2,
0.3,
0.5,
1,
2.5,
5.0,
7.5,
10.0
]
metric =
Metrics.distribution("some.plug.call.duration",
buckets: buckets,
description: "Plug call duration",
event_name: [:some, :plug, :call, :stop],
measurement: :duration,
unit: {:native, :second},
tags: [:method, :path_root],
tag_values: fn %{conn: conn} ->
%{
method: conn.method,
path_root: List.first(conn.path_info) || ""
}
end
)
{:ok, _handler_id} = Distribution.register(metric, tid, self())
:telemetry.execute([:some, :plug, :call, :stop], %{duration: 5.6e7}, %{
conn: %{method: "GET", path_info: ["users", "123"]}
})
:telemetry.execute([:some, :plug, :call, :stop], %{duration: 1.1e8}, %{
conn: %{method: "POST", path_info: ["products", "238"]}
})
:telemetry.execute([:some, :plug, :call, :stop], %{duration: 8.7e7}, %{
conn: %{method: "GET", path_info: ["users", "123"]}
})
# , %{method: "GET", path_root: "users"}}
key_1 = metric.name
samples = :ets.lookup(tid, key_1)
assert length(samples) == 3
assert hd(samples) ==
{[:some, :plug, :call, :duration], {%{method: "GET", path_root: "users"}, 0.056}}
cleanup(tid)
end
end
describe "error handling" do
test "detaches handler for missing measurement", %{tid: tid, dist_tid: dist_tid} do
[
{Metrics.last_value("test.event.measure",
measurement: :nonexistent
), LastValue, tid},
{Metrics.sum("test.event.measure",
measurement: :nonexistent
), Sum, tid},
{Metrics.distribution("test.event.measure",
buckets: [1, 2, 3],
measurement: :nonexistent
), Distribution, dist_tid}
]
|> Enum.each(fn {metric, module, table} ->
{:ok, _handler_id} = apply(module, :register, [metric, table, self()])
assert capture_log(fn ->
:telemetry.execute(metric.event_name, %{measure: 1})
end) =~ "Measurement not found"
assert :telemetry.list_handlers(metric.event_name) == []
end)
end
test "detaches handler for non-numeric measurement", %{tid: tid, dist_tid: dist_tid} do
[
{Metrics.last_value("test.event.measure",
measurement: :measure
), LastValue, tid},
{Metrics.sum("test.event.measure",
measurement: :measure
), Sum, tid},
{Metrics.distribution("test.event.measure",
buckets: [1, 2, 3],
measurement: :measure
), Distribution, dist_tid}
]
|> Enum.each(fn {metric, module, table} ->
{:ok, _handler_id} = apply(module, :register, [metric, table, self()])
assert capture_log(fn ->
:telemetry.execute(metric.event_name, %{measure: "a"})
end) =~ "Expected measurement to be a number"
assert :telemetry.list_handlers(metric.event_name) == []
end)
end
test "detaches handler for missing tags", %{tid: tid, dist_tid: dist_tid} do
[
{Metrics.counter("test.event.measure",
measurement: :measure,
tags: [:missing_tag]
), Counter, tid},
{Metrics.last_value("test.event.measure",
measurement: :measure,
tags: [:missing_tag]
), LastValue, tid},
{Metrics.sum("test.event.measure",
measurement: :measure,
tags: [:missing_tag]
), Sum, tid},
{Metrics.distribution("test.event.measure",
buckets: [1, 2, 3],
measurement: :measure,
tags: [:missing_tag]
), Distribution, dist_tid}
]
|> Enum.each(fn {metric, module, table} ->
{:ok, _handler_id} = apply(module, :register, [metric, table, self()])
assert capture_log(fn ->
:telemetry.execute(metric.event_name, %{measure: 1}, %{})
end) =~ "Tags missing from tag_values"
assert :telemetry.list_handlers(metric.event_name) == []
end)
end
end
def cleanup(tid) do
:ets.delete_all_objects(tid)
:telemetry.list_handlers([])
|> Enum.each(&:telemetry.detach(&1.id))
end
def fetch_metric(table_id, key) do
case :ets.lookup(table_id, key) do
[result] -> result
_ -> :error
end
end
end
| 29.988571 | 96 | 0.540873 |
73940236b2b595557efae280f1cc5d6debdc9a9e | 5,800 | exs | Elixir | test/CLI_test.exs | DiodonHystrix/prettier_eex_formatter | 954e0efc4cb4b4e4930f15f4bb57d55db2d301c4 | [
"MIT"
] | 2 | 2021-02-03T16:27:59.000Z | 2021-04-24T16:17:01.000Z | test/CLI_test.exs | DiodonHystrix/prettier_eex_formatter | 954e0efc4cb4b4e4930f15f4bb57d55db2d301c4 | [
"MIT"
] | 3 | 2020-06-01T10:47:51.000Z | 2020-06-06T09:46:41.000Z | test/CLI_test.exs | DiodonHystrix/prettier_eex_formatter | 954e0efc4cb4b4e4930f15f4bb57d55db2d301c4 | [
"MIT"
] | null | null | null | defmodule PrettierEexFormatter.CLITest do
use ExUnit.Case
import ExUnit.CaptureIO
import CLIHelper
test "formats multiline expression" do
bad = """
link "óww",
"ew",
Routes.some_looooooooooooooooooonger_path(@conn, :create),
"Ewe"
"""
assert_format bad, """
link(
"óww",
"ew",
Routes.some_looooooooooooooooooonger_path(@conn, :create),
"Ewe"
)
"""
assert_format bad,
"""
link(
"óww",
"ew",
Routes.some_looooooooooooooooooonger_path(
@conn,
:create
),
"Ewe"
)
""",
line_length: 40
assert_format bad,
"""
link "óww",
"ew",
Routes.some_looooooooooooooooooonger_path(@conn, :create),
"Ewe"
""",
no_parens: ["link"]
assert_format bad,
"""
link "óww",
"ew",
Routes.some_looooooooooooooooooonger_path(
@conn,
:create
),
"Ewe"
""",
no_parens: ["link"],
line_length: 40
end
test "greets the world naa" do
bad = [
"""
link "óww",
"ew",
Routes.some_looooooooooooooooooonger_path(@conn, :create),
"Ewe" ,
fn(ef,fe)->
""",
"""
link "óww",
"ew",
Routes.some_looooooooooooooooooonger_path(@conn, :create),
"Ewe" ,
fn->
"""
]
assert_format bad,
[
"""
link "óww",
"ew",
Routes.some_looooooooooooooooooonger_path(@conn, :create),
"Ewe",
fn ef, fe ->
""",
"""
link "óww",
"ew",
Routes.some_looooooooooooooooooonger_path(@conn, :create),
"Ewe",
fn ->
"""
]
assert_format bad,
[
"""
link "óww",
"ew",
Routes.some_looooooooooooooooooonger_path(
@conn,
:create
),
"Ewe",
fn ef, fe ->
""",
"""
link "óww",
"ew",
Routes.some_looooooooooooooooooonger_path(
@conn,
:create
),
"Ewe",
fn ->
"""
],
line_length: 40
assert_format bad,
[
"""
link "óww",
"ew",
Routes.some_looooooooooooooooooonger_path(@conn, :create),
"Ewe",
fn ef, fe ->
""",
"""
link "óww",
"ew",
Routes.some_looooooooooooooooooonger_path(@conn, :create),
"Ewe",
fn ->
"""
],
no_parens: "link"
assert_format bad,
[
"""
link "óww",
"ew",
Routes.some_looooooooooooooooooonger_path(
@conn,
:create
),
"Ewe",
fn ef, fe ->
""",
"""
link "óww",
"ew",
Routes.some_looooooooooooooooooonger_path(
@conn,
:create
),
"Ewe",
fn ->
"""
],
no_parens: "link",
line_length: 40
end
test "function call with do" do
bad = """
link "óww",
"ew",
"Ewe"
do
"""
assert_format bad,
"""
link "óww",
"ew",
"Ewe" do
"""
assert_format bad,
"""
link "óww",
"ew",
"Ewe" do
""",
no_parens: "link"
end
test "no args exits with status 1" do
assert "Formatter was called without any arguments.\n" ==
capture_io(:stderr, fn ->
assert {:shutdown, 1} == catch_exit(PrettierEexFormatter.CLI.main([]))
end)
end
test "unknown flags exits with status 1" do
assert """
Couldn't process some of the flags from arguments:
-ww?
Permitted flags:
--line-length
--no-parens
""" ==
capture_io(:stderr, fn ->
assert {:shutdown, 1} == catch_exit(PrettierEexFormatter.CLI.main(["-ww?"]))
end)
end
end
| 27.358491 | 91 | 0.313276 |
7394092b7fc3bbbb01679219ffffb7c9c308e8d4 | 3,493 | ex | Elixir | lib/phoenix_react_playground/content/content.ex | Cherchercher/CommentBox | 844f2aa736da8a07a666b398eb5b581d3b83b8ef | [
"Unlicense"
] | null | null | null | lib/phoenix_react_playground/content/content.ex | Cherchercher/CommentBox | 844f2aa736da8a07a666b398eb5b581d3b83b8ef | [
"Unlicense"
] | 4 | 2020-07-17T10:56:38.000Z | 2021-09-01T07:27:44.000Z | lib/phoenix_react_playground/content/content.ex | Cherchercher/CommentBox | 844f2aa736da8a07a666b398eb5b581d3b83b8ef | [
"Unlicense"
] | null | null | null | defmodule PhoenixReactPlayground.Content do
@moduledoc """
The Content context.
"""
import Ecto.Query, warn: false
alias PhoenixReactPlayground.Repo
alias PhoenixReactPlayground.Content.Comment
alias PhoenixReactPlayground.Content.Topic
@doc """
Returns the list of comments.
## Examples
iex> list_comments()
[%Comment{}, ...]
"""
def list_comments() do
Repo.all(Comment)
end
@doc """
Gets a single comment.
Raises `Ecto.NoResultsError` if the Comment does not exist.
## Examples
iex> get_comment!(123)
%Comment{}
iex> get_comment!(456)
** (Ecto.NoResultsError)
"""
def get_comment!(id), do: Repo.get!(Comment, id)
@doc """
Creates a comment.
## Examples
iex> create_comment(%{field: value})
{:ok, %Comment{}}
iex> create_comment(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_comment(attrs \\ %{}) do
%Comment{}
|> Comment.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a comment.
## Examples
iex> update_comment(comment, %{field: new_value})
{:ok, %Comment{}}
iex> update_comment(comment, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_comment(%Comment{} = comment, attrs) do
comment
|> Comment.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Comment.
## Examples
iex> delete_comment(comment)
{:ok, %Comment{}}
iex> delete_comment(comment)
{:error, %Ecto.Changeset{}}
"""
def delete_comment(%Comment{} = comment) do
Repo.delete(comment)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking comment changes.
## Examples
iex> change_comment(comment)
%Ecto.Changeset{source: %Comment{}}
"""
def change_comment(%Comment{} = comment) do
Comment.changeset(comment, %{})
end
alias PhoenixReactPlayground.Content.Topic
@doc """
Returns the list of topics.
## Examples
iex> list_topics()
[%Topic{}, ...]
"""
def list_topics do
Repo.all(Topic)
end
@doc """
Gets a single topic.
Raises `Ecto.NoResultsError` if the Topic does not exist.
## Examples
iex> get_topic!(123)
%Topic{}
iex> get_topic!(456)
** (Ecto.NoResultsError)
"""
def get_topic!(id), do: Repo.get!(Topic, id)
@doc """
Creates a topic.
## Examples
iex> create_topic(%{field: value})
{:ok, %Topic{}}
iex> create_topic(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_topic(attrs \\ %{}) do
%Topic{}
|> Topic.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a topic.
## Examples
iex> update_topic(topic, %{field: new_value})
{:ok, %Topic{}}
iex> update_topic(topic, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_topic(%Topic{} = topic, attrs) do
topic
|> Topic.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Topic.
## Examples
iex> delete_topic(topic)
{:ok, %Topic{}}
iex> delete_topic(topic)
{:error, %Ecto.Changeset{}}
"""
def delete_topic(%Topic{} = topic) do
Repo.delete(topic)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking topic changes.
## Examples
iex> change_topic(topic)
%Ecto.Changeset{source: %Topic{}}
"""
def change_topic(%Topic{} = topic) do
Topic.changeset(topic, %{})
end
end
| 17.292079 | 62 | 0.595763 |
73941b581b6aa8c26f600c1a32ceaa0b6a19d4e3 | 705 | ex | Elixir | hippo-backend/lib/hippo_web/gettext.ex | Tmw/Hippo | 6b100f4eb43c74dffff38dd974a00266f3dc99a2 | [
"MIT"
] | 8 | 2019-05-18T10:14:06.000Z | 2021-07-31T20:58:55.000Z | hippo-backend/lib/hippo_web/gettext.ex | yvc74/Hippo | 4a1784c67bdbe073dafaf9aea66660d5b3c7ed5e | [
"MIT"
] | 7 | 2019-07-20T17:48:03.000Z | 2022-02-26T10:41:56.000Z | hippo-backend/lib/hippo_web/gettext.ex | yvc74/Hippo | 4a1784c67bdbe073dafaf9aea66660d5b3c7ed5e | [
"MIT"
] | 1 | 2019-07-04T02:30:09.000Z | 2019-07-04T02:30:09.000Z | defmodule HippoWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import HippoWeb.Gettext
# Simple translation
gettext("Here is the string to translate")
# Plural translation
ngettext("Here is the string to translate",
"Here are the strings to translate",
3)
# Domain-based translation
dgettext("errors", "Here is the error message to translate")
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :hippo
end
| 28.2 | 72 | 0.675177 |
7394266429375f1633455a5674afa80d36dd63a7 | 1,700 | ex | Elixir | clients/classroom/lib/google_api/classroom/v1/model/link.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/classroom/lib/google_api/classroom/v1/model/link.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/classroom/lib/google_api/classroom/v1/model/link.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Classroom.V1.Model.Link do
@moduledoc """
URL item.
## Attributes
* `thumbnailUrl` (*type:* `String.t`, *default:* `nil`) - URL of a thumbnail image of the target URL. Read-only.
* `title` (*type:* `String.t`, *default:* `nil`) - Title of the target of the URL. Read-only.
* `url` (*type:* `String.t`, *default:* `nil`) - URL to link to. This must be a valid UTF-8 string containing between 1 and 2024 characters.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:thumbnailUrl => String.t(),
:title => String.t(),
:url => String.t()
}
field(:thumbnailUrl)
field(:title)
field(:url)
end
defimpl Poison.Decoder, for: GoogleApi.Classroom.V1.Model.Link do
def decode(value, options) do
GoogleApi.Classroom.V1.Model.Link.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Classroom.V1.Model.Link do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.075472 | 144 | 0.697647 |
73947e227a93f4e3a5c6e2706e074adb46c613de | 14,424 | ex | Elixir | clients/admin/lib/google_api/admin/directory_v1/api/notifications.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/admin/lib/google_api/admin/directory_v1/api/notifications.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/admin/lib/google_api/admin/directory_v1/api/notifications.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Admin.Directory_v1.Api.Notifications do
@moduledoc """
API calls for all endpoints tagged `Notifications`.
"""
alias GoogleApi.Admin.Directory_v1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Deletes a notification
## Parameters
* `connection` (*type:* `GoogleApi.Admin.Directory_v1.Connection.t`) - Connection to server
* `customer` (*type:* `String.t`) - The unique ID for the customer's G Suite account. The customerId is also returned as part of the Users resource.
* `notification_id` (*type:* `String.t`) - The unique ID of the notification.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %{}}` on success
* `{:error, info}` on failure
"""
@spec directory_notifications_delete(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, nil} | {:ok, Tesla.Env.t()} | {:error, Tesla.Env.t()}
def directory_notifications_delete(
connection,
customer,
notification_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/customer/{customer}/notifications/{notificationId}", %{
"customer" => URI.encode(customer, &URI.char_unreserved?/1),
"notificationId" => URI.encode(notification_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
@doc """
Retrieves a notification.
## Parameters
* `connection` (*type:* `GoogleApi.Admin.Directory_v1.Connection.t`) - Connection to server
* `customer` (*type:* `String.t`) - The unique ID for the customer's G Suite account. The customerId is also returned as part of the Users resource.
* `notification_id` (*type:* `String.t`) - The unique ID of the notification.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Admin.Directory_v1.Model.Notification{}}` on success
* `{:error, info}` on failure
"""
@spec directory_notifications_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Admin.Directory_v1.Model.Notification.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def directory_notifications_get(
connection,
customer,
notification_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/customer/{customer}/notifications/{notificationId}", %{
"customer" => URI.encode(customer, &URI.char_unreserved?/1),
"notificationId" => URI.encode(notification_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Admin.Directory_v1.Model.Notification{}])
end
@doc """
Retrieves a list of notifications.
## Parameters
* `connection` (*type:* `GoogleApi.Admin.Directory_v1.Connection.t`) - Connection to server
* `customer` (*type:* `String.t`) - The unique ID for the customer's G Suite account.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:language` (*type:* `String.t`) - The ISO 639-1 code of the language notifications are returned in. The default is English (en).
* `:maxResults` (*type:* `integer()`) - Maximum number of notifications to return per page. The default is 100.
* `:pageToken` (*type:* `String.t`) - The token to specify the page of results to retrieve.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Admin.Directory_v1.Model.Notifications{}}` on success
* `{:error, info}` on failure
"""
@spec directory_notifications_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Admin.Directory_v1.Model.Notifications.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def directory_notifications_list(connection, customer, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:language => :query,
:maxResults => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/customer/{customer}/notifications", %{
"customer" => URI.encode(customer, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Admin.Directory_v1.Model.Notifications{}])
end
@doc """
Updates a notification. This method supports patch semantics.
## Parameters
* `connection` (*type:* `GoogleApi.Admin.Directory_v1.Connection.t`) - Connection to server
* `customer` (*type:* `String.t`) - The unique ID for the customer's G Suite account.
* `notification_id` (*type:* `String.t`) - The unique ID of the notification.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.Admin.Directory_v1.Model.Notification.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Admin.Directory_v1.Model.Notification{}}` on success
* `{:error, info}` on failure
"""
@spec directory_notifications_patch(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Admin.Directory_v1.Model.Notification.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def directory_notifications_patch(
connection,
customer,
notification_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/customer/{customer}/notifications/{notificationId}", %{
"customer" => URI.encode(customer, &URI.char_unreserved?/1),
"notificationId" => URI.encode(notification_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Admin.Directory_v1.Model.Notification{}])
end
@doc """
Updates a notification.
## Parameters
* `connection` (*type:* `GoogleApi.Admin.Directory_v1.Connection.t`) - Connection to server
* `customer` (*type:* `String.t`) - The unique ID for the customer's G Suite account.
* `notification_id` (*type:* `String.t`) - The unique ID of the notification.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.Admin.Directory_v1.Model.Notification.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Admin.Directory_v1.Model.Notification{}}` on success
* `{:error, info}` on failure
"""
@spec directory_notifications_update(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Admin.Directory_v1.Model.Notification.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def directory_notifications_update(
connection,
customer,
notification_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/customer/{customer}/notifications/{notificationId}", %{
"customer" => URI.encode(customer, &URI.char_unreserved?/1),
"notificationId" => URI.encode(notification_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Admin.Directory_v1.Model.Notification{}])
end
end
| 41.687861 | 187 | 0.624861 |
739498c70b92cd26b0e0bc15bd510dac8f36b1e2 | 589 | exs | Elixir | sentences-smash/mix.exs | crsanti/codewars-elixir | 7e7d9bceea5db8b965ecc1e17be52bc2aeafa4f0 | [
"MIT"
] | 2 | 2021-08-18T11:31:31.000Z | 2021-08-24T00:25:08.000Z | sentences-smash/mix.exs | crsanti/codewars-elixir | 7e7d9bceea5db8b965ecc1e17be52bc2aeafa4f0 | [
"MIT"
] | null | null | null | sentences-smash/mix.exs | crsanti/codewars-elixir | 7e7d9bceea5db8b965ecc1e17be52bc2aeafa4f0 | [
"MIT"
] | null | null | null | defmodule SentenceSmasher.MixProject do
use Mix.Project
def project do
[
app: :sentence_smasher,
version: "0.1.0",
elixir: "~> 1.6",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"},
]
end
end
| 20.310345 | 88 | 0.587436 |
739499bd8db3e7145b3e8503f18a1c9b4472dee9 | 14,115 | exs | Elixir | test/space_ex/event_test.exs | wisq/space_ex | 56a87bdd67af093ad70e2843ff4cf963ddf7018c | [
"Apache-2.0"
] | 7 | 2018-01-30T14:09:13.000Z | 2021-01-22T22:38:32.000Z | test/space_ex/event_test.exs | wisq/space_ex | 56a87bdd67af093ad70e2843ff4cf963ddf7018c | [
"Apache-2.0"
] | null | null | null | test/space_ex/event_test.exs | wisq/space_ex | 56a87bdd67af093ad70e2843ff4cf963ddf7018c | [
"Apache-2.0"
] | 1 | 2018-01-30T14:09:17.000Z | 2018-01-30T14:09:17.000Z | defmodule SpaceEx.EventTest do
use ExUnit.Case, async: true
@moduletag :capture_log
require SpaceEx.ProcedureCall
alias SpaceEx.{Event, Stream, ProcedureCall, Protobufs, KRPC, KRPC.Expression, Types, API}
alias Stream.Result
alias SpaceEx.Protobufs.{
StreamUpdate,
StreamResult,
ProcedureResult,
Argument
}
alias SpaceEx.Test.MockConnection
import SpaceEx.Test.ConnectionHelper, only: [send_message: 2]
test "create/2 calls KRPC.add_event with encoded Expression" do
state = MockConnection.start(real_stream: true)
conn = state.conn
# To create an expression, the only reply we need is a server-side ID.
MockConnection.add_result_value(<<86>>, conn)
# Prepare the simplest possible boolean expression.
# (Yes, this really works. It's boolean, after all.)
paused_call = KRPC.paused(conn) |> ProcedureCall.create()
expr = Expression.call(conn, paused_call)
assert expr.id == <<86>>
# Prepare two replies:
# ... the server's AddEvent reply ...
Protobufs.Event.new(stream: Protobufs.Stream.new(id: 99))
|> Protobufs.Event.encode()
|> MockConnection.add_result_value(conn)
# ... and a StartStream reply, empty.
MockConnection.add_result_value("", conn)
# Create the event (which is really just a stream.)
assert %Stream{id: 99} = Event.create(expr)
# Now check the requests we got.
# (Expressions will be tested in their own test module.)
assert [_create_expr, add_event, start_stream] = MockConnection.dump_calls(conn)
# KRPC.add_event with expression ID (86).
assert add_event.service == "KRPC"
assert add_event.procedure == "AddEvent"
assert [%Argument{value: <<86>>}] = add_event.arguments
# KRPC.start_stream with stream ID (99).
assert start_stream.service == "KRPC"
assert start_stream.procedure == "StartStream"
assert [%Argument{value: <<99>>}] = start_stream.arguments
end
test "wait/1 blocks until event stream receives FIRST result" do
state = MockConnection.start(real_stream: true)
conn = state.conn
# Create a dummy Expression reference.
type = %API.Type.Class{name: "Expression"}
expr = Types.decode(<<42>>, type, conn)
# Prepare the AddEvent and StartStream replies.
Protobufs.Event.new(stream: Protobufs.Stream.new(id: 66))
|> Protobufs.Event.encode()
|> MockConnection.add_result_value(conn)
MockConnection.add_result_value("", conn)
# Create the Event/Stream.
assert %Stream{id: 66} = event = Event.create(expr)
assert [_add_event, _start_stream] = MockConnection.dump_calls(conn)
me = self()
# Now, we wait.
spawn_link(fn ->
send(me, {:first, Event.wait(event, timeout: 100, remove: false)})
send(me, {:second, Event.wait(event, timeout: 100, remove: false)})
send(me, {:third, Event.wait(event, timeout: 100, remove: false)})
end)
# Send the result down the wire.
true_result = ProcedureResult.new(value: <<1>>)
send_stream_result(state.stream_socket, 66, true_result)
# All three should arrive immediately, since Event.wait == Stream.get.
assert_receive({:first, true})
assert_receive({:second, true})
assert_receive({:third, true})
end
test "wait/1 calls remove/1 by default" do
state = MockConnection.start(real_stream: true)
conn = state.conn
# Create a dummy Expression reference.
type = %API.Type.Class{name: "Expression"}
expr = Types.decode(<<42>>, type, conn)
# AddEvent reply:
Protobufs.Event.new(stream: Protobufs.Stream.new(id: 66))
|> Protobufs.Event.encode()
|> MockConnection.add_result_value(conn)
# StartStream reply:
MockConnection.add_result_value("", conn)
# RemoveStream reply:
MockConnection.add_result_value("", conn)
me = self()
# We need to create the event in a subprocess, so that `Event.remove` works as intended.
spawn_link(fn ->
assert %Stream{id: 66, pid: stream_pid} = event = Event.create(expr)
send(me, {:stream_pid, stream_pid})
send(me, {:wait, Event.wait(event, timeout: 500)})
end)
# Should not get a reply yet.
refute_receive({:wait, true})
# Monitor the stream PID so we can properly assert on death.
assert_received({:stream_pid, stream_pid})
ref = Process.monitor(stream_pid)
# Send the result down the wire.
true_result = ProcedureResult.new(value: <<1>>)
send_stream_result(state.stream_socket, 66, true_result)
# We should receive a value, and the Stream should terminate.
assert_receive({:wait, true})
assert_receive({:DOWN, ^ref, :process, ^stream_pid, :normal})
procedures = MockConnection.dump_calls(conn) |> Enum.map(& &1.procedure)
assert procedures == ["AddEvent", "StartStream", "RemoveStream"]
end
# It may seem redundant to test Event lifecycles, since they're just Streams, but
# it's important to make sure that `Event.create/2` triggers the same bonding logic
# as `Stream.create/2` et al.
test "event stream process removes itself and exits if ALL launching processes call `remove/1`" do
state = MockConnection.start(real_stream: true)
conn = state.conn
# Create a dummy Expression reference.
type = %API.Type.Class{name: "Expression"}
expr = Types.decode(<<42>>, type, conn)
me = self()
# We'll launch three Events (Streams) in separate processes
# and then individually trigger removes in each.
{pids, stream_pids} =
Enum.map(1..3, fn index ->
# Prepare the AddEvent and StartStream replies.
Protobufs.Event.new(stream: Protobufs.Stream.new(id: 66))
|> Protobufs.Event.encode()
|> MockConnection.add_result_value(conn)
MockConnection.add_result_value("", conn)
pid =
spawn_link(fn ->
assert %Stream{id: 66} = event = Event.create(expr)
send(me, {:stream, index, event})
assert_receive :remove, 1000
Event.remove(event)
assert_receive :exit, 1000
end)
# Keep this inside the loop, so we don't have race conditions
# with regards to the ordering of [AddEvent, StartStream] responses.
assert_receive {:stream, ^index, %Stream{pid: stream_pid}}
{pid, stream_pid}
end)
|> Enum.unzip()
# We should three of the same stream_pid.
assert [stream_pid, stream_pid, stream_pid] = stream_pids
# And three pairs of [add, start] requests.
assert [add, start, add, start, add, start] = MockConnection.dump_calls(state.conn)
# Prepare the RemoveStream response and monitor the stream PID.
MockConnection.add_result_value("", state.conn)
ref = Process.monitor(stream_pid)
# Remove them in a random order, to avoid any ordering favouratism.
[pid1, pid2, pid3] = Enum.shuffle(pids)
# First and second removes do nothing.
send(pid1, :remove)
send(pid2, :remove)
refute_receive {:DOWN, ^ref, :process, ^stream_pid, _reason}
assert [] = MockConnection.dump_calls(state.conn)
# Third remove is the trigger.
send(pid3, :remove)
assert_receive {:DOWN, ^ref, :process, ^stream_pid, reason}
assert reason == :normal
# Check the RemoveStream request.
assert [remove] = MockConnection.dump_calls(state.conn)
assert remove.service == "KRPC"
assert remove.procedure == "RemoveStream"
assert [%Argument{value: <<66>>}] = remove.arguments
Enum.each(pids, &send(&1, :exit))
end
test "event stream process removes itself and exits if ALL launching processes exit" do
state = MockConnection.start(real_stream: true)
conn = state.conn
# Create a dummy Expression reference.
type = %API.Type.Class{name: "Expression"}
expr = Types.decode(<<42>>, type, conn)
me = self()
# We'll launch three Events (Streams) in separate processes
# and then individually trigger exits in each.
{pids, stream_pids} =
Enum.map(1..3, fn index ->
# Prepare the AddEvent and StartStream replies.
Protobufs.Event.new(stream: Protobufs.Stream.new(id: 66))
|> Protobufs.Event.encode()
|> MockConnection.add_result_value(conn)
MockConnection.add_result_value("", conn)
pid =
spawn_link(fn ->
assert %Stream{id: 66} = event = Event.create(expr)
send(me, {:stream, index, event})
assert_receive :exit, 1000
end)
# Keep this inside the loop, so we don't have race conditions
# with regards to the ordering of [AddEvent, StartStream] responses.
assert_receive {:stream, ^index, %Stream{pid: stream_pid}}
{pid, stream_pid}
end)
|> Enum.unzip()
# We should three of the same stream_pid.
assert [stream_pid, stream_pid, stream_pid] = stream_pids
# And three pairs of [add, start] requests.
assert [add, start, add, start, add, start] = MockConnection.dump_calls(state.conn)
# Prepare the RemoveStream response and monitor the stream PID.
MockConnection.add_result_value("", state.conn)
ref = Process.monitor(stream_pid)
# Exit in a random order, to avoid any ordering favouratism.
[pid1, pid2, pid3] = Enum.shuffle(pids)
# First and second exits do nothing.
send(pid1, :exit)
send(pid2, :exit)
refute_receive {:DOWN, ^ref, :process, ^stream_pid, _reason}
assert [] = MockConnection.dump_calls(state.conn)
# Third exit is the last needed.
send(pid3, :exit)
assert_receive {:DOWN, ^ref, :process, ^stream_pid, reason}
assert reason == :normal
# Check RemoveStream request.
assert [remove] = MockConnection.dump_calls(state.conn)
assert remove.service == "KRPC"
assert remove.procedure == "RemoveStream"
assert [%Argument{value: <<66>>}] = remove.arguments
end
test "create(expr, start: false) and start/1" do
state = MockConnection.start()
# Create a dummy Expression reference.
type = %API.Type.Class{name: "Expression"}
expr = Types.decode(<<42>>, type, state.conn)
# KRPC.add_stream response:
Protobufs.Event.new(stream: Protobufs.Stream.new(id: 23))
|> Protobufs.Event.encode()
|> MockConnection.add_result_value(state.conn)
assert %Stream{id: 23} = stream = Event.create(expr, start: false)
assert [add_event] = MockConnection.dump_calls(state.conn)
# KRPC.start response:
MockConnection.add_result_value(<<>>, state.conn)
assert :ok = Event.start(stream)
assert [start_stream] = MockConnection.dump_calls(state.conn)
assert add_event.service == "KRPC"
assert add_event.procedure == "AddEvent"
assert [%Argument{value: <<42>>}] = add_event.arguments
assert start_stream.service == "KRPC"
assert start_stream.procedure == "StartStream"
assert [%Argument{value: <<23>>}] = start_stream.arguments
end
test "create/2 with rate option sets rate before starting stream" do
state = MockConnection.start()
# Create a dummy Expression reference.
type = %API.Type.Class{name: "Expression"}
expr = Types.decode(<<42>>, type, state.conn)
# KRPC.add_event response:
Protobufs.Event.new(stream: Protobufs.Stream.new(id: 123))
|> Protobufs.Event.encode()
|> MockConnection.add_result_value(state.conn)
# KRPC.set_stream_rate response:
MockConnection.add_result_value(<<>>, state.conn)
# KRPC.start_stream response:
MockConnection.add_result_value(<<>>, state.conn)
assert %Stream{id: 123} = Event.create(expr, rate: 5)
assert [add_event, set_rate, start_stream] = MockConnection.dump_calls(state.conn)
assert add_event.service == "KRPC"
assert add_event.procedure == "AddEvent"
assert set_rate.service == "KRPC"
assert set_rate.procedure == "SetStreamRate"
# 5.0 as a float
assert [%Argument{value: <<123>>}, %Argument{value: <<0, 0, 160, 64>>}] = set_rate.arguments
assert start_stream.service == "KRPC"
assert start_stream.procedure == "StartStream"
assert [%Argument{value: <<123>>}] = start_stream.arguments
end
test "set_rate/2" do
state = MockConnection.start()
# Dummy Stream:
event = %Stream{id: 76, conn: state.conn, pid: nil, decoder: nil}
# KRPC.set_stream_rate response:
MockConnection.add_result_value(<<>>, state.conn)
assert :ok = Event.set_rate(event, 100)
assert [call] = MockConnection.dump_calls(state.conn)
assert call.service == "KRPC"
assert call.procedure == "SetStreamRate"
# 100.0 as a float
assert [%Argument{value: <<76>>}, %Argument{value: <<0, 0, 200, 66>>}] = call.arguments
end
test "subscribe/2" do
state = MockConnection.start(real_stream: true)
conn = state.conn
# Create a dummy Expression reference:
type = %API.Type.Class{name: "Expression"}
expr = Types.decode(<<42>>, type, conn)
# AddEvent reply:
Protobufs.Event.new(stream: Protobufs.Stream.new(id: 66))
|> Protobufs.Event.encode()
|> MockConnection.add_result_value(conn)
# StartStream reply:
MockConnection.add_result_value(<<>>, conn)
# Create the Event/Stream:
assert %Stream{id: 66} = event = Event.create(expr)
assert [_add_event, _start_stream] = MockConnection.dump_calls(conn)
ref = Process.monitor(event.pid)
# Subscribe to the new event:
assert :ok = Event.subscribe(event)
# Prepare the RemoveStream reply:
MockConnection.add_result_value(<<>>, conn)
# Send the result down the wire:
true_result = ProcedureResult.new(value: <<1>>)
send_stream_result(state.stream_socket, 66, true_result)
# Receive the stream result:
assert_receive {:stream_result, 66, %Result{value: <<1>>}}
# Stream should be removed and terminated:
assert_receive {:DOWN, ^ref, :process, _pid, :normal}
assert [remove] = MockConnection.dump_calls(conn)
assert remove.procedure == "RemoveStream"
end
defp send_stream_result(socket, id, result) do
StreamUpdate.new(results: [StreamResult.new(id: id, result: result)])
|> StreamUpdate.encode()
|> send_message(socket)
end
end
| 34.511002 | 100 | 0.674318 |
7394a3c4a079db5746e838be7d31981dd2dd0649 | 1,791 | ex | Elixir | lib/gps_web/telemetry.ex | JesseHerrick/phoenix_liveview_gps | 711b0cd3eb7277e12be3bac26aa950914c4cf8e7 | [
"MIT"
] | 1 | 2021-03-28T08:22:40.000Z | 2021-03-28T08:22:40.000Z | lib/gps_web/telemetry.ex | JesseHerrick/phoenix_liveview_gps | 711b0cd3eb7277e12be3bac26aa950914c4cf8e7 | [
"MIT"
] | null | null | null | lib/gps_web/telemetry.ex | JesseHerrick/phoenix_liveview_gps | 711b0cd3eb7277e12be3bac26aa950914c4cf8e7 | [
"MIT"
] | 1 | 2021-01-15T12:40:12.000Z | 2021-01-15T12:40:12.000Z | defmodule GpsWeb.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# Database Metrics
summary("gps.repo.query.total_time", unit: {:native, :millisecond}),
summary("gps.repo.query.decode_time", unit: {:native, :millisecond}),
summary("gps.repo.query.query_time", unit: {:native, :millisecond}),
summary("gps.repo.query.queue_time", unit: {:native, :millisecond}),
summary("gps.repo.query.idle_time", unit: {:native, :millisecond}),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {GpsWeb, :count_users, []}
]
end
end
| 31.982143 | 86 | 0.666667 |
7394a79b6346515351338392d69a9428a909c1eb | 1,315 | exs | Elixir | test/models/session_test.exs | aramisf/caravan | add305c70ee1ceb4a3dff9a7ed606a182e6f12d7 | [
"MIT"
] | null | null | null | test/models/session_test.exs | aramisf/caravan | add305c70ee1ceb4a3dff9a7ed606a182e6f12d7 | [
"MIT"
] | null | null | null | test/models/session_test.exs | aramisf/caravan | add305c70ee1ceb4a3dff9a7ed606a182e6f12d7 | [
"MIT"
] | null | null | null | defmodule Caravan.SessionTest do
use Caravan.ModelCase
alias Caravan.Repo
alias Caravan.User
alias Caravan.Session
@valid_attrs %{email: "[email protected]", password: "password"}
@invalid_attrs %{}
@user_attrs %{email: "[email protected]", password: "password", name: "João"}
test "changeset with valid attributes" do
changeset = Session.changeset(%Session{}, @valid_attrs)
assert changeset.valid?
end
test "changeset with invalid attributes" do
changeset = Session.changeset(%Session{}, @invalid_attrs)
refute changeset.valid?
end
test "email must contain at least an @" do
attrs = %{@valid_attrs | email: "joao.com"}
assert {:email, "has invalid format"} in errors_on(%Session{}, attrs)
end
test "find_user_and_validate_password validates user existence" do
result = Session.find_user_and_validate_password(@valid_attrs)
assert {:error, changeset} = result
assert changeset.errors == [email: {"is not registered", []}]
end
test "changeset validates password" do
Repo.insert(User.creation_changeset(%User{}, @user_attrs))
attrs = %{@valid_attrs | password: "lolo"}
result = Session.find_user_and_validate_password(attrs)
assert {:error, changeset} = result
assert changeset.errors == [password: {"is invalid", []}]
end
end
| 31.309524 | 75 | 0.707224 |
7394bff8e6fe0cd0d71f320224a6a1deaff80ea4 | 1,963 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/data_source_spec.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/sheets/lib/google_api/sheets/v4/model/data_source_spec.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/sheets/lib/google_api/sheets/v4/model/data_source_spec.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.DataSourceSpec do
@moduledoc """
This specifies the details of the data source. For example, for BigQuery, this specifies information about the BigQuery source.
## Attributes
* `bigQuery` (*type:* `GoogleApi.Sheets.V4.Model.BigQueryDataSourceSpec.t`, *default:* `nil`) - A BigQueryDataSourceSpec.
* `parameters` (*type:* `list(GoogleApi.Sheets.V4.Model.DataSourceParameter.t)`, *default:* `nil`) - The parameters of the data source, used when querying the data source.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:bigQuery => GoogleApi.Sheets.V4.Model.BigQueryDataSourceSpec.t() | nil,
:parameters => list(GoogleApi.Sheets.V4.Model.DataSourceParameter.t()) | nil
}
field(:bigQuery, as: GoogleApi.Sheets.V4.Model.BigQueryDataSourceSpec)
field(:parameters, as: GoogleApi.Sheets.V4.Model.DataSourceParameter, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.DataSourceSpec do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.DataSourceSpec.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.DataSourceSpec do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.26 | 175 | 0.745797 |
7394e8d395773f4b41c0e44f776e52c7532b4596 | 3,098 | exs | Elixir | test/cluster_test.exs | arjan/sworm | a76a5fed2108db2a58615cd2d1721139d608ef23 | [
"MIT"
] | 21 | 2019-05-15T09:43:20.000Z | 2021-01-12T10:04:47.000Z | test/cluster_test.exs | arjan/sworm | a76a5fed2108db2a58615cd2d1721139d608ef23 | [
"MIT"
] | 1 | 2019-05-30T09:18:28.000Z | 2019-08-16T13:41:38.000Z | test/cluster_test.exs | arjan/sworm | a76a5fed2108db2a58615cd2d1721139d608ef23 | [
"MIT"
] | null | null | null | defmodule Swurm do
use Sworm
defmodule TestServer do
use GenServer
def start_link() do
GenServer.start_link(__MODULE__, [])
end
def init(a) do
{:ok, a}
end
end
def start_one(name) do
Swurm.register_name(name, TestServer, :start_link, [])
end
end
defmodule SwormClusterTest do
use ExUnit.ClusteredCase
import Sworm.Support.Helpers
scenario "given a healthy cluster",
cluster_size: 2,
boot_timeout: 20_000,
stdout: :standard_error do
node_setup do
{:ok, _} = Application.ensure_all_started(:sworm)
{:ok, pid} = Swurm.start_link()
Process.unlink(pid)
:ok
end
test "can call on all nodes", %{cluster: c} do
assert [[], []] =
Cluster.members(c) |> Enum.map(fn n -> Cluster.call(n, Swurm, :registered, []) end)
end
test "register process on one server; registration is propagated to other nodes", %{
cluster: c
} do
n = Cluster.random_member(c)
Cluster.call(n, Swurm, :start_one, ["hi"])
# settle
wait_until(fn ->
match?(
[[{"hi", _}], [{"hi", _}]],
Cluster.members(c) |> Enum.map(fn n -> Cluster.call(n, Swurm, :registered, []) end)
)
end)
# now stop it
[[{"hi", p}] | _] =
Cluster.members(c) |> Enum.map(fn n -> Cluster.call(n, Swurm, :registered, []) end)
GenServer.stop(p)
# settle
wait_until(fn ->
[[], []] ==
Cluster.members(c) |> Enum.map(fn n -> Cluster.call(n, Swurm, :registered, []) end)
end)
end
end
scenario "given a cluster that is shutting down",
cluster_size: 2,
boot_timeout: 20_000,
stdout: :standard_error do
node_setup do
{:ok, _} = Application.ensure_all_started(:sworm)
{:ok, pid} = Swurm.start_link()
Process.unlink(pid)
:ok
end
test "register process on one server; process moves to other node when it goes down", %{
cluster: c
} do
n = Cluster.random_member(c)
Cluster.call(n, Swurm, :start_one, ["hi"])
wait_until(fn ->
match?([_], Cluster.call(n, Swurm, :registered, []))
end)
[{"hi", pid}] = Cluster.call(n, Swurm, :registered, [])
target_node = node(pid)
[other_node] = Cluster.members(c) -- [target_node]
wait_until(fn ->
[{"hi", pid}] == Cluster.call(other_node, Swurm, :registered, [])
end)
Cluster.stop_node(c, target_node)
wait_until(fn ->
[{"hi", pid}] = Cluster.call(other_node, Swurm, :registered, [])
# process now runs on the other node
node(pid) == other_node
end)
end
test "directory is updated when nodes join and leave", %{
cluster: c
} do
[a, b] = Cluster.members(c)
wait_until(fn ->
match?([_, _], Cluster.call(a, Sworm.DirectoryManager, :nodes_for_sworm, [Swurm]))
end)
Cluster.stop_node(c, b)
wait_until(fn ->
match?([_], Cluster.call(a, Sworm.DirectoryManager, :nodes_for_sworm, [Swurm]))
end)
end
end
end
| 23.830769 | 98 | 0.575533 |
7395408cea4df396a67b2e4316bea779fe3fb273 | 971 | ex | Elixir | apps/astarte_realm_management_api/lib/astarte_realm_management_api_web/controllers/version_controller.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 191 | 2018-03-30T13:23:08.000Z | 2022-03-02T12:05:32.000Z | apps/astarte_realm_management_api/lib/astarte_realm_management_api_web/controllers/version_controller.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 402 | 2018-03-30T13:37:00.000Z | 2022-03-31T16:47:10.000Z | apps/astarte_realm_management_api/lib/astarte_realm_management_api_web/controllers/version_controller.ex | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 24 | 2018-03-30T13:29:48.000Z | 2022-02-28T11:10:26.000Z | #
# This file is part of Astarte.
#
# Copyright 2021 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Astarte.RealmManagement.APIWeb.VersionController do
use Astarte.RealmManagement.APIWeb, :controller
@version Mix.Project.config()[:version]
plug Astarte.RealmManagement.APIWeb.Plug.LogRealm
plug Astarte.RealmManagement.APIWeb.Plug.AuthorizePath
def show(conn, _params) do
render(conn, "show.json", %{version: @version})
end
end
| 31.322581 | 74 | 0.763131 |
73956dbc3f91ba80163dd656d959a5aa963687c6 | 846 | ex | Elixir | game_server_umbrella/apps/game_server/lib/game_server/game_supervisor.ex | jburky/dara-dots | 7a67de3d77448792435ccbf9fb8291e821fed4ac | [
"MIT"
] | 6 | 2021-03-25T21:02:28.000Z | 2022-02-10T08:11:28.000Z | game_server_umbrella/apps/game_server/lib/game_server/game_supervisor.ex | jburky/dara-dots | 7a67de3d77448792435ccbf9fb8291e821fed4ac | [
"MIT"
] | 20 | 2020-09-02T23:17:29.000Z | 2022-03-27T22:54:20.000Z | game_server_umbrella/apps/game_server/lib/game_server/game_supervisor.ex | jburky/dara-dots | 7a67de3d77448792435ccbf9fb8291e821fed4ac | [
"MIT"
] | 2 | 2021-03-25T21:08:15.000Z | 2021-09-25T19:11:36.000Z | defmodule GameServer.RpsGameSupervisor do
@moduledoc """
Dynamic supervisor used to retrieve the PIDs
of running games given the ID generated when
the game process was started.
"""
use DynamicSupervisor
def start_link(init_arg) do
DynamicSupervisor.start_link(
__MODULE__,
init_arg,
name: __MODULE__
)
end
@impl true
def init(_init_arg) do
DynamicSupervisor.init(strategy: :one_for_one)
end
@doc """
Used to retrieve the process for an existing
game based on the ID or start a new game.
"""
def find_game(game_id) do
case start_child(game_id) do
{:ok, pid} -> pid
{:error, {:already_started, pid}} -> pid
end
end
def start_child(game_id) do
DynamicSupervisor.start_child(
__MODULE__,
{GameServer.RockPaperScissors, game_id}
)
end
end
| 21.15 | 50 | 0.682033 |
739582394b758d7beb1ea8e4e1300434f56e3ebc | 469 | ex | Elixir | lib/anchore_engine_api_server/model/credential_list.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | lib/anchore_engine_api_server/model/credential_list.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | lib/anchore_engine_api_server/model/credential_list.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule AnchoreEngineAPIServer.Model.CredentialList do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
]
@type t :: %__MODULE__{
}
end
defimpl Poison.Decoder, for: AnchoreEngineAPIServer.Model.CredentialList do
def decode(value, _options) do
value
end
end
| 18.038462 | 75 | 0.712154 |
7395a6b65091a7d7c6fc3b70178d5440452a396c | 1,605 | ex | Elixir | clients/content/lib/google_api/content/v21/model/liasettings_set_inventory_verification_contact_response.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v21/model/liasettings_set_inventory_verification_contact_response.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/content/lib/google_api/content/v21/model/liasettings_set_inventory_verification_contact_response.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V21.Model.LiasettingsSetInventoryVerificationContactResponse do
@moduledoc """
## Attributes
* `kind` (*type:* `String.t`, *default:* `nil`) - Identifies what kind of resource this is. Value: the fixed string "content#liasettingsSetInventoryVerificationContactResponse".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => String.t()
}
field(:kind)
end
defimpl Poison.Decoder,
for: GoogleApi.Content.V21.Model.LiasettingsSetInventoryVerificationContactResponse do
def decode(value, options) do
GoogleApi.Content.V21.Model.LiasettingsSetInventoryVerificationContactResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Content.V21.Model.LiasettingsSetInventoryVerificationContactResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.865385 | 181 | 0.753271 |
7395aa5750c366eb60c745d5b9b816ebbe94b049 | 3,377 | ex | Elixir | clients/safe_browsing/lib/google_api/safe_browsing/v4/api/full_hashes.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/safe_browsing/lib/google_api/safe_browsing/v4/api/full_hashes.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/safe_browsing/lib/google_api/safe_browsing/v4/api/full_hashes.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.SafeBrowsing.V4.Api.FullHashes do
@moduledoc """
API calls for all endpoints tagged `FullHashes`.
"""
alias GoogleApi.SafeBrowsing.V4.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Finds the full hashes that match the requested hash prefixes.
## Parameters
- connection (GoogleApi.SafeBrowsing.V4.Connection): Connection to server
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (FindFullHashesRequest):
## Returns
{:ok, %GoogleApi.SafeBrowsing.V4.Model.FindFullHashesResponse{}} on success
{:error, info} on failure
"""
@spec safebrowsing_full_hashes_find(Tesla.Env.client(), keyword()) ::
{:ok, GoogleApi.SafeBrowsing.V4.Model.FindFullHashesResponse.t()}
| {:error, Tesla.Env.t()}
def safebrowsing_full_hashes_find(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v4/fullHashes:find")
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.SafeBrowsing.V4.Model.FindFullHashesResponse{}]
)
end
end
| 40.202381 | 179 | 0.69233 |
7395b71f73e4164f9e0f9013539d79ac82bfbf8f | 1,030 | ex | Elixir | lib/graphql/types/revision.ex | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | lib/graphql/types/revision.ex | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | lib/graphql/types/revision.ex | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | defmodule Accent.GraphQL.Types.Revision do
use Absinthe.Schema.Notation
import Absinthe.Resolution.Helpers, only: [dataloader: 1]
import Accent.GraphQL.Helpers.Authorization
import Accent.GraphQL.Helpers.Fields
object :revision do
field(:id, :id)
field(:name, non_null(:string))
field(:is_master, non_null(:boolean), resolve: field_alias(:master))
field(:translations_count, non_null(:integer))
field(:conflicts_count, non_null(:integer))
field(:reviewed_count, non_null(:integer))
field(:inserted_at, non_null(:datetime))
field(:language, non_null(:language), resolve: dataloader(Accent.Language))
field :translations, non_null(:translations) do
arg(:page, :integer)
arg(:order, :string)
arg(:document, :id)
arg(:version, :id)
arg(:query, :string)
arg(:is_conflicted, :boolean)
arg(:reference_revision, :id)
resolve(revision_authorize(:index_translations, &Accent.GraphQL.Resolvers.Translation.list_revision/3))
end
end
end
| 32.1875 | 109 | 0.708738 |
7395d74ec707b0d080e5b824f322e6a02c6e62f7 | 1,779 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3_test_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3_test_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3_test_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3TestConfig do
@moduledoc """
Represents configurations for a test case.
## Attributes
* `flow` (*type:* `String.t`, *default:* `nil`) - Flow name. If not set, default start flow is assumed. Format: `projects//locations//agents//flows/`.
* `trackingParameters` (*type:* `list(String.t)`, *default:* `nil`) - Session parameters to be compared when calculating differences.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:flow => String.t(),
:trackingParameters => list(String.t())
}
field(:flow)
field(:trackingParameters, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3TestConfig do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3TestConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3TestConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.58 | 154 | 0.739741 |
7395ee1930fe45063ac5ed6b847f85b9f3bc8314 | 539 | ex | Elixir | test/support/test_case.ex | aleqsio/absinthe_plug | 94083e836944993f411f0c1510d153077b03e553 | [
"MIT"
] | 234 | 2016-03-02T07:38:25.000Z | 2022-03-14T19:44:17.000Z | test/support/test_case.ex | aleqsio/absinthe_plug | 94083e836944993f411f0c1510d153077b03e553 | [
"MIT"
] | 205 | 2016-03-02T13:52:53.000Z | 2022-03-31T23:31:36.000Z | test/support/test_case.ex | aleqsio/absinthe_plug | 94083e836944993f411f0c1510d153077b03e553 | [
"MIT"
] | 168 | 2016-03-02T09:23:56.000Z | 2022-03-31T23:27:58.000Z | defmodule Absinthe.Plug.TestCase do
defmacro __using__(_) do
quote do
use ExUnit.Case, async: true
use Plug.Test
import unquote(__MODULE__)
end
end
def call(conn, opts) do
conn
|> plug_parser
|> Absinthe.Plug.call(opts)
|> Map.update!(:resp_body, &Jason.decode!/1)
end
def plug_parser(conn) do
opts =
Plug.Parsers.init(
parsers: [:urlencoded, :multipart, :json, Absinthe.Plug.Parser],
json_decoder: Jason
)
Plug.Parsers.call(conn, opts)
end
end
| 19.25 | 72 | 0.627087 |
73960cb1f39935350cf61ce5d49d5f01f66c9fe0 | 1,881 | exs | Elixir | mix.exs | greenealexander/print_react | f6d29884cc11b0bbf143df866e08bfaf4607de0c | [
"MIT-0"
] | null | null | null | mix.exs | greenealexander/print_react | f6d29884cc11b0bbf143df866e08bfaf4607de0c | [
"MIT-0"
] | null | null | null | mix.exs | greenealexander/print_react | f6d29884cc11b0bbf143df866e08bfaf4607de0c | [
"MIT-0"
] | null | null | null | defmodule PrintReact.MixProject do
use Mix.Project
def project do
[
app: :print_react,
version: "0.1.0",
elixir: "~> 1.12",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {PrintReact.Application, []},
extra_applications: [:logger, :runtime_tools, :httpoison]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.8"},
{:phoenix_ecto, "~> 4.1"},
{:ecto_sql, "~> 3.4"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_dashboard, "~> 0.4"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:httpoison, "~> 1.7"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup", "cmd npm install --prefix assets"],
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"]
]
end
end
| 28.074627 | 84 | 0.580011 |
739621c9c7f9e1c075f407fcaf7f8da8c5820361 | 2,337 | exs | Elixir | mix.exs | herimedia/commanded-uniqueness-middleware | 508963e155a16797717dad9c35e81f9a64b0f5cf | [
"MIT"
] | null | null | null | mix.exs | herimedia/commanded-uniqueness-middleware | 508963e155a16797717dad9c35e81f9a64b0f5cf | [
"MIT"
] | null | null | null | mix.exs | herimedia/commanded-uniqueness-middleware | 508963e155a16797717dad9c35e81f9a64b0f5cf | [
"MIT"
] | null | null | null | defmodule Commanded.Middleware.Uniqueness.MixProject do
use Mix.Project
@version "0.6.1"
def project do
[
app: :commanded_uniqueness_middleware,
version: @version,
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
deps: deps(),
description: description(),
docs: docs(),
package: package(),
aliases: aliases(),
name: "Commanded Uniqueness Middleware",
source_url: "https://github.com/vheathen/commanded-uniqueness-middleware"
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger],
mod: {Commanded.Middleware.Uniqueness.Supervisor, []}
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:commanded, ">= 1.0.0 and < 1.2.0", runtime: false},
{:cachex, "~> 3.2.0", optional: true},
{:mix_test_watch, "~> 1.0", only: :dev},
{:faker, "~> 0.13", only: [:test, :dev]},
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false}
]
end
defp aliases do
[
test: ["test --no-start"],
"test.watch": ["test.watch --no-start"]
]
end
defp description do
"""
Use CommandedUniquenessMiddleware to ensure short-term value uniqueness,
usually during Commanded command dispatch.
"""
end
defp docs do
[
main: "getting-started",
canonical: "http://hexdocs.pm/commanded_uniqueness_middleware",
source_ref: "v#{@version}",
extra_section: "GUIDES",
extras: [
"guides/Getting Started.md"
],
groups_for_extras: [
Introduction: [
"guides/Getting Started.md"
]
]
]
end
defp package do
[
files: [
"lib",
"mix.exs",
".formatter.exs",
"README*",
"LICENSE*",
"test"
],
maintainers: ["Vladimir Drobyshevskiy"],
licenses: ["MIT"],
links: %{
"GitHub" => "https://github.com/vheathen/commanded-uniqueness-middleware",
"Docs" => "http://hexdocs.pm/commanded_uniqueness_middleware"
}
]
end
end
| 24.34375 | 82 | 0.580231 |
7396280f6f49dcc5604aed6c79d9c77839628e77 | 2,543 | exs | Elixir | test/mockery/history_test.exs | kianmeng/mockery | ad843783e890000627c51b30b69a115e73617542 | [
"Apache-2.0"
] | 85 | 2017-07-29T22:03:54.000Z | 2022-03-29T09:50:49.000Z | test/mockery/history_test.exs | kianmeng/mockery | ad843783e890000627c51b30b69a115e73617542 | [
"Apache-2.0"
] | 36 | 2017-07-30T10:12:44.000Z | 2021-12-23T12:25:42.000Z | test/mockery/history_test.exs | kianmeng/mockery | ad843783e890000627c51b30b69a115e73617542 | [
"Apache-2.0"
] | 13 | 2017-07-29T09:06:47.000Z | 2021-05-20T13:36:56.000Z | # credo:disable-for-this-file Credo.Check.Design.AliasUsage
defmodule Mockery.HistoryTest do
use ExUnit.Case
use Mockery
import IO.ANSI
test "failure with too few args is marked in red" do
enable_history()
Mockery.Utils.push_call(A, :fun, 1, ["a"])
tested = fn -> Mockery.Assertions.assert_called(A, :fun, ["a", "b"]) end
%{message: message} = assert_raise(ExUnit.AssertionError, tested)
assert message =~ ~s(#{red()}["a"]#{white()})
end
test "failure with too many args is marked in red" do
enable_history()
Mockery.Utils.push_call(A, :fun, 3, ["a", "b", "c"])
tested = fn -> Mockery.Assertions.assert_called(A, :fun, ["a", "b"]) end
%{message: message} = assert_raise(ExUnit.AssertionError, tested)
assert message =~ ~s(#{red()}["a", "b", "c"]#{white()})
end
test "failure with correct number of args marks matched args in green and unmatched in red" do
enable_history()
Mockery.Utils.push_call(A, :fun, 2, ["a", "c"])
tested = fn -> Mockery.Assertions.assert_called(A, :fun, ["a", "b"]) end
%{message: message} = assert_raise(ExUnit.AssertionError, tested)
assert message =~ ~s(#{white()}[#{green()}"a"#{white()}, #{red()}"c"#{white()}])
end
test "failure with correct number of args marks unbound args in green as they always match" do
enable_history()
Mockery.Utils.push_call(A, :fun, 2, ["a", "c"])
tested = fn -> Mockery.Assertions.assert_called(A, :fun, [_, "b"]) end
%{message: message} = assert_raise(ExUnit.AssertionError, tested)
assert message =~ ~s(#{white()}[#{green()}"a"#{white()}, #{red()}"c"#{white()}])
end
test "failure with correct number of args marks pinned args according to their correctness" do
enable_history()
Mockery.Utils.push_call(A, :fun, 2, ["a", "c"])
arg1 = "a"
arg2 = "b"
tested = fn -> Mockery.Assertions.assert_called(A, :fun, [^arg1, ^arg2]) end
%{message: message} = assert_raise(ExUnit.AssertionError, tested)
assert message =~ ~s(#{white()}[#{green()}"a"#{white()}, #{red()}"c"#{white()}])
end
test "enable_history/0" do
Mockery.History.enable_history()
assert Process.get(Mockery.History)
end
test "disable_history/0" do
Mockery.History.disable_history()
refute Process.get(Mockery.History)
end
# TODO remove in v3
test "enable_history/1" do
Mockery.History.enable_history(true)
assert Process.get(Mockery.History)
Mockery.History.enable_history(false)
refute Process.get(Mockery.History)
end
end
| 31.395062 | 96 | 0.651593 |
73962df533fb7765759567f58367d58cbd43234d | 5,787 | ex | Elixir | clients/storage_transfer/lib/google_api/storage_transfer/v1/model/object_conditions.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/storage_transfer/lib/google_api/storage_transfer/v1/model/object_conditions.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/storage_transfer/lib/google_api/storage_transfer/v1/model/object_conditions.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.StorageTransfer.V1.Model.ObjectConditions do
@moduledoc """
Conditions that determine which objects will be transferred. Applies only
to S3 and Cloud Storage objects.
The "last modification time" refers to the time of the
last change to the object's content or metadata — specifically, this is
the `updated` property of Cloud Storage objects and the `LastModified`
field of S3 objects.
## Attributes
* `excludePrefixes` (*type:* `list(String.t)`, *default:* `nil`) - `exclude_prefixes` must follow the requirements described for
include_prefixes.
The max size of `exclude_prefixes` is 1000.
* `includePrefixes` (*type:* `list(String.t)`, *default:* `nil`) - If `include_prefixes` is specified, objects that satisfy the object
conditions must have names that start with one of the `include_prefixes`
and that do not start with any of the exclude_prefixes. If
`include_prefixes` is not specified, all objects except those that have
names starting with one of the `exclude_prefixes` must satisfy the object
conditions.
Requirements:
* Each include-prefix and exclude-prefix can contain any sequence of
Unicode characters, to a max length of 1024 bytes when UTF8-encoded,
and must not contain Carriage Return or Line Feed characters. Wildcard
matching and regular expression matching are not supported.
* Each include-prefix and exclude-prefix must omit the leading slash.
For example, to include the `requests.gz` object in a transfer from
`s3://my-aws-bucket/logs/y=2015/requests.gz`, specify the include
prefix as `logs/y=2015/requests.gz`.
* None of the include-prefix or the exclude-prefix values can be empty,
if specified.
* Each include-prefix must include a distinct portion of the object
namespace. No include-prefix may be a prefix of another
include-prefix.
* Each exclude-prefix must exclude a distinct portion of the object
namespace. No exclude-prefix may be a prefix of another
exclude-prefix.
* If `include_prefixes` is specified, then each exclude-prefix must start
with the value of a path explicitly included by `include_prefixes`.
The max size of `include_prefixes` is 1000.
* `lastModifiedBefore` (*type:* `DateTime.t`, *default:* `nil`) - If specified, only objects with a "last modification time" before this
timestamp and objects that don't have a "last modification time" will be
transferred.
* `lastModifiedSince` (*type:* `DateTime.t`, *default:* `nil`) - If specified, only objects with a "last modification time" on or after
this timestamp and objects that don't have a "last modification time" are
transferred.
The `last_modified_since` and `last_modified_before` fields can be used
together for chunked data processing. For example, consider a script that
processes each day's worth of data at a time. For that you'd set each
of the fields as follows:
* `last_modified_since` to the start of the day
* `last_modified_before` to the end of the day
* `maxTimeElapsedSinceLastModification` (*type:* `String.t`, *default:* `nil`) - If specified, only objects with a "last modification time" on or after
`NOW` - `max_time_elapsed_since_last_modification` and objects that don't
have a "last modification time" are transferred.
For each TransferOperation started by this TransferJob,
`NOW` refers to the start_time of the
`TransferOperation`.
* `minTimeElapsedSinceLastModification` (*type:* `String.t`, *default:* `nil`) - If specified, only objects with a "last modification time" before
`NOW` - `min_time_elapsed_since_last_modification` and objects that don't
have a "last modification time" are transferred.
For each TransferOperation started by this TransferJob, `NOW`
refers to the start_time of the
`TransferOperation`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:excludePrefixes => list(String.t()),
:includePrefixes => list(String.t()),
:lastModifiedBefore => DateTime.t(),
:lastModifiedSince => DateTime.t(),
:maxTimeElapsedSinceLastModification => String.t(),
:minTimeElapsedSinceLastModification => String.t()
}
field(:excludePrefixes, type: :list)
field(:includePrefixes, type: :list)
field(:lastModifiedBefore, as: DateTime)
field(:lastModifiedSince, as: DateTime)
field(:maxTimeElapsedSinceLastModification)
field(:minTimeElapsedSinceLastModification)
end
defimpl Poison.Decoder, for: GoogleApi.StorageTransfer.V1.Model.ObjectConditions do
def decode(value, options) do
GoogleApi.StorageTransfer.V1.Model.ObjectConditions.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.StorageTransfer.V1.Model.ObjectConditions do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.860465 | 155 | 0.715742 |
73968317f84f01e6c6b05f55928e488aa9a05340 | 3,220 | ex | Elixir | lib/zcash_explorer_web/live/blockchain_info_live.ex | nighthawk-apps/zcash-explorer | 87e6c61d8665f79de9e5efae6ef924b6096091fa | [
"Apache-2.0"
] | 5 | 2021-11-04T20:19:35.000Z | 2022-02-15T06:55:49.000Z | lib/zcash_explorer_web/live/blockchain_info_live.ex | nighthawk-apps/zcash-explorer | 87e6c61d8665f79de9e5efae6ef924b6096091fa | [
"Apache-2.0"
] | 5 | 2021-09-12T01:36:25.000Z | 2022-02-18T07:28:42.000Z | lib/zcash_explorer_web/live/blockchain_info_live.ex | nighthawk-apps/zcash-explorer | 87e6c61d8665f79de9e5efae6ef924b6096091fa | [
"Apache-2.0"
] | 8 | 2021-07-23T17:11:41.000Z | 2022-03-17T17:07:55.000Z | defmodule ZcashExplorerWeb.BlockChainInfoLive do
use Phoenix.LiveView
@impl true
def render(assigns) do
~L"""
<div>
<dl class="mt-5 grid grid-cols-1 gap-5 sm:grid-cols-3">
<div class="px-4 py-5 bg-white shadow rounded-lg overflow-hidden sm:p-6">
<dt class="text-sm font-medium text-gray-500 truncate">
Blocks
</dt>
<dd class="mt-1 text-3xl font-semibold text-gray-900">
<%= @blockchain_info["blocks"] %>
</dd>
</div>
<div class="px-4 py-5 bg-white shadow rounded-lg overflow-hidden sm:p-6">
<dt class="text-sm font-medium text-gray-500 truncate">
Commitments
</dt>
<dd class="mt-1 text-3xl font-semibold text-gray-900">
<%= @blockchain_info["commitments"] %>
</dd>
</div>
<div class="px-4 py-5 bg-white shadow rounded-lg overflow-hidden sm:p-6">
<dt class="text-sm font-medium text-gray-500 truncate">
Difficulty
</dt>
<dd class="mt-1 text-3xl font-semibold text-gray-900">
<%= @blockchain_info["difficulty"] %>
</dd>
</div>
<div class="px-4 py-5 bg-white shadow rounded-lg overflow-hidden sm:p-6">
<dt class="text-sm font-medium text-gray-500 truncate">
Sprout pool
</dt>
<dd class="mt-1 text-3xl font-semibold text-gray-900">
<%= sprout_value(@blockchain_info["valuePools"]) %> ZEC
</dd>
</div>
<div class="px-4 py-5 bg-white shadow rounded-lg overflow-hidden sm:p-6">
<dt class="text-sm font-medium text-gray-500 truncate">
Sapling pool
</dt>
<dd class="mt-1 text-3xl font-semibold text-gray-900">
<%= sapling_value(@blockchain_info["valuePools"]) %> ZEC
</dd>
</div>
<div class="px-4 py-5 bg-white shadow rounded-lg overflow-hidden sm:p-6">
<dt class="text-sm font-medium text-gray-500 truncate">
zcashd version
</dt>
<dd class="mt-1 text-3xl font-semibold text-gray-900">
<%= @blockchain_info["build"] %>
</dd>
</div>
</dl>
</div>
"""
end
@impl true
def mount(_params, _session, socket) do
if connected?(socket), do: Process.send_after(self(), :update, 5000)
case Cachex.get(:app_cache, "metrics") do
{:ok, info} ->
{:ok, %{"build" => build}} = Cachex.get(:app_cache, "info")
info = Map.put(info, "build", build)
{:ok, assign(socket, :blockchain_info, info)}
{:error, _reason} ->
{:ok, assign(socket, :blockchain_info, "loading...")}
end
end
@impl true
def handle_info(:update, socket) do
Process.send_after(self(), :update, 15000)
{:ok, info} = Cachex.get(:app_cache, "metrics")
{:ok, %{"build" => build}} = Cachex.get(:app_cache, "info")
info = Map.put(info, "build", build)
{:noreply, assign(socket, :blockchain_info, info)}
end
defp sprout_value(value_pools) do
value_pools |> get_value_pools() |> Map.get("sprout")
end
defp sapling_value(value_pools) do
value_pools |> get_value_pools() |> Map.get("sapling")
end
defp get_value_pools(value_pools) do
Enum.map(value_pools, fn %{"id" => name, "chainValue" => value} -> {name, value} end)
|> Map.new()
end
end
| 30.377358 | 89 | 0.602174 |
739687c0dbbec531977f2d23d6d8c4390159a9eb | 2,295 | exs | Elixir | test/parser/variables_test.exs | boomba-bot/boomba | 285f24edd69736f3663e5b1c6efa4ee4d54a667e | [
"MIT"
] | null | null | null | test/parser/variables_test.exs | boomba-bot/boomba | 285f24edd69736f3663e5b1c6efa4ee4d54a667e | [
"MIT"
] | 12 | 2021-09-29T17:49:53.000Z | 2021-10-09T14:53:48.000Z | test/parser/variables_test.exs | boomba-bot/boomba | 285f24edd69736f3663e5b1c6efa4ee4d54a667e | [
"MIT"
] | null | null | null | defmodule BoombaTest.Parser.Variables do
use ExUnit.Case
alias Boomba.Parser.Variables
doctest(Boomba.Parser.Variables)
setup_all do
{:ok, %{message: %{author: %{id: "168706817348730881"}, content: "!cmd arg1 arg2 arg3 arg4"}}}
end
test "sender/source", state do
sender = Variables.variable("sender", state.message)
source = Variables.variable("source", state.message)
assert sender == "<@#{state.message.author.id}>"
assert source == "<@#{state.message.author.id}>"
end
test "random.pick", state do
reply = Variables.variable("random.pick abc def ghi", state.message)
assert reply in ["abc", "def", "ghi"]
end
test "random.pick quoted", state do
reply = Variables.variable("random.pick 'a bc' 'de f', 'g h i'", state.message)
assert reply in ["a bc", "de f", "g h i"]
end
test "random.num", state do
reply = Variables.variable("random.1-5", state.message)
assert reply in ["1", "2", "3", "4", "5"]
end
test "repeat", state do
reply = Variables.variable("repeat 3 test something ", state.message)
assert reply == "test something test something test something"
end
test "touser with args", state do
reply = Variables.variable("touser", state.message)
assert reply == state.message.content |> String.split(" ") |> Enum.at(1)
end
test "touser without args" do
reply = Variables.variable("touser", %{author: %{id: "user_id"}, content: "!cmd"})
assert reply == "<@user_id>"
end
test "time.TIMEZONE", state do
reply = Variables.variable("time.CET", state.message)
assert reply |> String.split(":") |> Enum.count() == 2
end
test "time.until", state do
reply = Variables.variable("time.until 19:25", state.message)
assert reply != "{invalid time}"
end
test "urlfetch", state do
reply = Variables.variable("urlfetch https://wttr.in/antwerp", state.message)
assert reply != "{server error}"
end
test "arg skip", state do
reply = Variables.variable("2:", state.message)
assert reply == "arg2 arg3 arg4"
end
test "arg range", state do
reply = Variables.variable("2:3", state.message)
assert reply == "arg2 arg3"
end
test "arg", state do
reply = Variables.variable("2", state.message)
assert reply == "arg2"
end
end
| 29.805195 | 98 | 0.651416 |
73968b606bebc8bf054223e57d781de8ecb62c09 | 1,038 | exs | Elixir | deps/argon2_elixir/mix.exs | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | deps/argon2_elixir/mix.exs | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | deps/argon2_elixir/mix.exs | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | defmodule Argon2.Mixfile do
use Mix.Project
@version "1.3.3"
@description """
Argon2 password hashing algorithm for Elixir
"""
def project do
[
app: :argon2_elixir,
version: @version,
elixir: "~> 1.4",
start_permanent: Mix.env() == :prod,
compilers: [:elixir_make] ++ Mix.compilers(),
description: @description,
package: package(),
source_url: "https://github.com/riverrun/argon2_elixir",
deps: deps()
]
end
def application do
[
extra_applications: [:logger, :crypto]
]
end
defp deps do
[
{:elixir_make, "~> 0.4", runtime: false},
{:ex_doc, "~> 0.19", only: :dev, runtime: false}
]
end
defp package do
[
files: ["lib", "c_src", "argon2/include", "argon2/src", "mix.exs", "Makefile*", "README.md"],
maintainers: ["David Whitlock"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/riverrun/argon2_elixir"}
]
end
end
| 22.565217 | 100 | 0.552023 |
7396ccf1e96df4391af8128f5bebc413adc4905c | 5,435 | exs | Elixir | test/components/form/checkbox_test.exs | leandrocp/surface | 6500a41285de37219b9f71c2a77132c1a84b824a | [
"MIT"
] | null | null | null | test/components/form/checkbox_test.exs | leandrocp/surface | 6500a41285de37219b9f71c2a77132c1a84b824a | [
"MIT"
] | null | null | null | test/components/form/checkbox_test.exs | leandrocp/surface | 6500a41285de37219b9f71c2a77132c1a84b824a | [
"MIT"
] | null | null | null | defmodule Surface.Components.Form.CheckboxTest do
use Surface.ConnCase, async: true
alias Surface.Components.Form
alias Surface.Components.Form.Checkbox
test "checkbox" do
html =
render_surface do
~H"""
<Checkbox form="user" field="admin" />
"""
end
assert html =~ """
<input id="user_admin" name="user[admin]" type="checkbox" value="true">
"""
end
test "checkbox with atom field" do
html =
render_surface do
~H"""
<Checkbox form="user" field={{ :admin }} />
"""
end
assert html =~ """
<input id="user_admin" name="user[admin]" type="checkbox" value="true">
"""
end
test "with form context" do
html =
render_surface do
~H"""
<Form for={{ :user }} csrf_token="test">
<Checkbox field={{ :admin }} />
</Form>
"""
end
assert html =~ """
<form action="#" method="post">\
<input name="_csrf_token" type="hidden" value="test">
<input name="user[admin]" type="hidden" value="false">\
<input id="user_admin" name="user[admin]" type="checkbox" value="true">
</form>
"""
end
test "setting the class" do
html =
render_surface do
~H"""
<Checkbox form="user" field="admin" class="checkbox" />
"""
end
assert html =~ ~r/class="checkbox"/
end
test "setting multiple classes" do
html =
render_surface do
~H"""
<Checkbox form="user" field="admin" class="checkbox primary" />
"""
end
assert html =~ ~r/class="checkbox primary"/
end
test "passing checked value" do
html =
render_surface do
~H"""
<Checkbox form="user" field="admin" checked_value="admin"/>
"""
end
assert html =~ """
<input id="user_admin" name="user[admin]" type="checkbox" value="admin">
"""
end
test "setting the value" do
html =
render_surface do
~H"""
<Checkbox value={{ true }}/>
"""
end
assert html =~ ~r/checked/
html =
render_surface do
~H"""
<Checkbox value={{ false }}/>
"""
end
refute html =~ ~r/checked/
end
test "setting the hidden_input" do
html =
render_surface do
~H"""
<Checkbox hidden_input={{ true }}/>
"""
end
assert html =~ ~r/hidden/
html =
render_surface do
~H"""
<Checkbox hidden_input={{ false }}/>
"""
end
refute html =~ ~r/hidden/
end
test "blur event with parent live view as target" do
html =
render_surface do
~H"""
<Checkbox form="user" field="admin" blur="my_blur" />
"""
end
assert html =~ """
<input id="user_admin" name="user[admin]" phx-blur="my_blur" type="checkbox" value="true">
"""
end
test "focus event with parent live view as target" do
html =
render_surface do
~H"""
<Checkbox form="user" field="admin" focus="my_focus" />
"""
end
assert html =~ """
<input id="user_admin" name="user[admin]" phx-focus="my_focus" type="checkbox" value="true">
"""
end
test "capture click event with parent live view as target" do
html =
render_surface do
~H"""
<Checkbox form="user" field="admin" capture_click="my_click" />
"""
end
assert html =~ """
<input id="user_admin" name="user[admin]" phx-capture-click="my_click" type="checkbox" value="true">
"""
end
test "keydown event with parent live view as target" do
html =
render_surface do
~H"""
<Checkbox form="user" field="admin" keydown="my_keydown" />
"""
end
assert html =~ """
<input id="user_admin" name="user[admin]" phx-keydown="my_keydown" type="checkbox" value="true">
"""
end
test "keyup event with parent live view as target" do
html =
render_surface do
~H"""
<Checkbox form="user" field="admin" keyup="my_keyup" />
"""
end
assert html =~ """
<input id="user_admin" name="user[admin]" phx-keyup="my_keyup" type="checkbox" value="true">
"""
end
test "passing other options" do
html =
render_surface do
~H"""
<Checkbox form="user" field="admin" opts={{ disabled: "disabled" }} />
"""
end
assert html =~ """
<input disabled="disabled" id="user_admin" name="user[admin]" type="checkbox" value="true">
"""
end
test "setting id and name through props" do
html =
render_surface do
~H"""
<Checkbox form="user" field="admin" id="is_admin" name="is_admin" />
"""
end
assert html =~ """
<input id="is_admin" name="is_admin" type="checkbox" value="true">
"""
end
end
defmodule Surface.Components.Form.CheckboxConfigTest do
use Surface.ConnCase
alias Surface.Components.Form.Checkbox
test ":default_class config" do
using_config Checkbox, default_class: "default_class" do
html =
render_surface do
~H"""
<Checkbox />
"""
end
assert html =~ ~r/class="default_class"/
end
end
end
| 22.932489 | 111 | 0.534499 |
7396d9ad41b15b023f9ad98bb95dff65dea76243 | 16,973 | exs | Elixir | test/blue_jet/identity/identity_test.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 44 | 2018-05-09T01:08:57.000Z | 2021-01-19T07:25:26.000Z | test/blue_jet/identity/identity_test.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 36 | 2018-05-08T23:59:54.000Z | 2018-09-28T13:50:30.000Z | test/blue_jet/identity/identity_test.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 9 | 2018-05-09T14:09:19.000Z | 2021-03-21T21:04:04.000Z | defmodule BlueJet.IdentityTest do
use BlueJet.ContextCase
alias BlueJet.Identity
alias BlueJet.Identity.Service
def get_account_membership(user, account) do
Service.get_account_membership(%{user_id: user.id}, %{account: account})
end
def create_email_verification_token(managed_user) do
expect(EventHandlerMock, :handle_event, fn(_, _) -> {:ok, nil} end)
{:ok, managed_user} = Service.create_email_verification_token(%{"user_id" => managed_user.id}, %{account: managed_user.account})
managed_user.email_verification_token
end
def create_password_reset_token(managed_user) do
expect(EventHandlerMock, :handle_event, fn(_, _) -> {:ok, nil} end)
{:ok, managed_user} = Service.create_password_reset_token(%{"username" => managed_user.username}, %{account: managed_user.account})
managed_user.password_reset_token
end
#
# MARK: Account
#
describe "get_account/1" do
test "when role is not authorized" do
request = %ContextRequest{}
{:error, :access_denied} = Identity.get_account(request)
end
test "when request is valid" do
account = account_fixture()
request = %ContextRequest{
vas: %{account_id: account.id, user_id: nil}
}
{:ok, _} = Identity.get_account(request)
end
end
describe "update_account/1" do
test "when role is not authorized" do
account = account_fixture()
user = managed_user_fixture(account, %{role: "customer"})
request = %ContextRequest{
vas: %{account_id: account.id, user_id: user.id}
}
{:error, :access_denied} = Identity.update_account(request)
end
test "when request is invalid" do
account = account_fixture()
user = managed_user_fixture(account)
request = %ContextRequest{
vas: %{account_id: account.id, user_id: user.id},
fields: %{
"name" => ""
}
}
{:error, %{errors: errors}} = Identity.update_account(request)
assert match_keys(errors, [:name])
end
test "when request is valid" do
account = account_fixture()
user = managed_user_fixture(account)
new_name = Faker.Company.name()
request = %ContextRequest{
vas: %{account_id: account.id, user_id: user.id},
fields: %{
"name" => new_name
}
}
{:ok, response} = Identity.update_account(request)
assert response.data.name == new_name
end
end
describe "reset_account/1" do
test "when role is not authorized" do
account = account_fixture()
user = managed_user_fixture(account, %{role: "customer"})
request = %ContextRequest{
vas: %{account_id: account.id, user_id: user.id}
}
{:error, :access_denied} = Identity.reset_account(request)
end
test "when request is for live account" do
account = account_fixture()
user = managed_user_fixture(account)
request = %ContextRequest{
vas: %{account_id: account.id, user_id: user.id}
}
{:error, :unprocessable_for_live_account} = Identity.reset_account(request)
end
test "when request is valid" do
account = account_fixture()
user = managed_user_fixture(account)
EventHandlerMock
|> expect(:handle_event, fn(event_name, data) ->
assert event_name == "identity:account.reset.success"
assert data.account.id == account.test_account_id
{:ok, nil}
end)
request = %ContextRequest{
vas: %{account_id: account.test_account_id, user_id: user.id}
}
{:ok, _} = Identity.reset_account(request)
end
end
#
# MARK: User
#
describe "create_user/1" do
test "when role is not authorized" do
account = account_fixture()
user = managed_user_fixture(account, %{role: "customer"})
request = %ContextRequest{
vas: %{account_id: account.id, user_id: user.id}
}
{:error, :access_denied} = Identity.create_user(request)
end
test "when role is anonymous" do
request = %ContextRequest{
vas: %{account_id: nil, user_id: nil},
fields: %{
"name" => Faker.Name.name(),
"email" => Faker.Internet.safe_email(),
"username" => Faker.Internet.safe_email(),
"password" => "test1234"
}
}
EventHandlerMock
|> expect(:handle_event, fn(event_name, data) ->
assert event_name == "identity:account.create.success"
assert match_keys(data, [:account])
{:ok, nil}
end)
|> expect(:handle_event, fn(event_name, data) ->
assert event_name == "identity:user.create.success"
assert match_keys(data, [:user, :account])
{:ok, nil}
end)
|> expect(:handle_event, fn(event_name, data) ->
assert event_name == "identity:email_verification_token.create.success"
assert match_keys(data, [:user])
{:ok, nil}
end)
{:ok, response} = Identity.create_user(request)
assert response.data.name == request.fields["name"]
assert response.data.email == request.fields["email"]
assert response.data.username == request.fields["username"]
end
test "when role is guest" do
account = account_fixture()
request = %ContextRequest{
vas: %{account_id: account.id, user_id: nil},
fields: %{
"name" => Faker.Name.name(),
"email" => Faker.Internet.safe_email(),
"username" => Faker.Internet.user_name(),
"password" => "test1234"
}
}
EventHandlerMock
|> expect(:handle_event, fn(event_name, data) ->
assert event_name == "identity:user.create.success"
assert match_keys(data, [:user, :account])
{:ok, nil}
end)
|> expect(:handle_event, fn(event_name, data) ->
assert event_name == "identity:email_verification_token.create.success"
assert match_keys(data, [:user])
{:ok, nil}
end)
{:ok, response} = Identity.create_user(request)
assert response.data.name == request.fields["name"]
assert response.data.email == request.fields["email"]
assert response.data.username == request.fields["username"]
end
end
describe "get_user/1" do
test "when role is not authorized" do
request = %ContextRequest{
vas: %{account_id: nil, user_id: nil}
}
{:error, :access_denied} = Identity.get_user(request)
end
test "when role is customer" do
account = account_fixture()
user = managed_user_fixture(account, %{role: "customer"})
request = %ContextRequest{
vas: %{account_id: account.id, user_id: user.id}
}
{:ok, response} = Identity.get_user(request)
assert response.data.id == user.id
end
test "when role is administrator" do
user = standard_user_fixture()
managed_user = managed_user_fixture(user.default_account)
request = %ContextRequest{
vas: %{account_id: user.default_account.id, user_id: user.id},
identifiers: %{"id" => managed_user.id}
}
{:ok, response} = Identity.get_user(request)
assert response.data.id == managed_user.id
end
end
describe "update_user/1" do
test "when role is not authorized" do
request = %ContextRequest{
vas: %{account_id: nil, user_id: nil}
}
{:error, :access_denied} = Identity.update_user(request)
end
test "when role is customer" do
account = account_fixture()
user = managed_user_fixture(account, %{role: "customer"})
request = %ContextRequest{
vas: %{account_id: account.id, user_id: user.id},
fields: %{"name" => Faker.Name.name()}
}
EventHandlerMock
|> expect(:handle_event, fn(event_name, data) ->
assert event_name == "identity:user.update.success"
assert match_keys(data, [:changeset, :account])
{:ok, nil}
end)
{:ok, response} = Identity.update_user(request)
assert response.data.id == user.id
assert response.data.name == request.fields["name"]
end
test "when role is administrator" do
user = standard_user_fixture()
managed_user = managed_user_fixture(user.default_account)
request = %ContextRequest{
vas: %{account_id: user.default_account.id, user_id: user.id},
identifiers: %{"id" => managed_user.id},
fields: %{"name" => Faker.Name.name()}
}
EventHandlerMock
|> expect(:handle_event, fn(event_name, data) ->
assert event_name == "identity:user.update.success"
assert match_keys(data, [:changeset, :account])
{:ok, nil}
end)
{:ok, response} = Identity.update_user(request)
assert response.data.id == managed_user.id
assert response.data.name == request.fields["name"]
end
end
describe "delete_user/1" do
test "when role is not authorized" do
request = %ContextRequest{
vas: %{account_id: nil, user_id: nil}
}
{:error, :access_denied} = Identity.delete_user(request)
end
test "when role is administrator" do
user = standard_user_fixture()
managed_user = managed_user_fixture(user.default_account)
request = %ContextRequest{
vas: %{account_id: user.default_account.id, user_id: user.id},
identifiers: %{"id" => managed_user.id}
}
{:ok, _} = Identity.delete_user(request)
end
end
#
# MARK: Account Membership
#
describe "list_account_membership/1" do
test "when no params and role is not authorized" do
request = %ContextRequest{
vas: %{account_id: nil, user_id: nil}
}
{:error, :access_denied} = Identity.list_account_membership(request)
end
test "when no params" do
user = standard_user_fixture()
request = %ContextRequest{
vas: %{account_id: user.default_account.id, user_id: user.id}
}
{:ok, response} = Identity.list_account_membership(request)
assert length(response.data) == 1
end
test "when target=user" do
user = standard_user_fixture()
request = %ContextRequest{
vas: %{account_id: user.default_account.id, user_id: user.id},
params: %{"target" => "user"}
}
{:ok, response} = Identity.list_account_membership(request)
assert length(response.data) == 1
end
end
describe "update_account_membership/1" do
test "when role is not authorized" do
request = %ContextRequest{
vas: %{account_id: nil, user_id: nil}
}
{:error, :access_denied} = Identity.update_account_membership(request)
end
test "when request is valid" do
user = standard_user_fixture()
account_membership =
user.default_account
|> managed_user_fixture()
|> get_account_membership(user.default_account)
request = %ContextRequest{
vas: %{account_id: user.default_account.id, user_id: user.id},
identifiers: %{"id" => account_membership.id},
fields: %{"role" => "developer"}
}
{:ok, response} = Identity.update_account_membership(request)
assert response.data.role == request.fields["role"]
end
end
#
# MARK: Email Verification Token
#
describe "create_email_verification_token/1" do
test "when role is not authorized" do
request = %ContextRequest{
vas: %{account_id: nil, user_id: nil}
}
{:error, :access_denied} = Identity.create_email_verification_token(request)
end
test "when request is valid" do
user = standard_user_fixture()
request = %ContextRequest{
vas: %{account_id: user.default_account.id, user_id: user.id}
}
EventHandlerMock
|> expect(:handle_event, fn(event_name, data) ->
assert event_name == "identity:email_verification_token.create.success"
assert match_keys(data, [:user])
{:ok, nil}
end)
{:ok, response} = Identity.create_email_verification_token(request)
assert response.data.id == user.id
assert response.data.email_verification_token
end
end
#
# MARK: Email Verification
#
describe "create_email_verification/1" do
test "when token is invalid" do
request = %ContextRequest{
vas: %{account_id: nil, user_id: nil},
fields: %{
"token" => "invalid"
}
}
{:error, %{errors: errors}} = Identity.create_email_verification(request)
assert match_keys(errors, [:token])
end
test "when token is valid" do
account = account_fixture()
managed_user = managed_user_fixture(account)
token = create_email_verification_token(managed_user)
request = %ContextRequest{
vas: %{account_id: account.id, user_id: nil},
fields: %{
"token" => token
}
}
EventHandlerMock
|> expect(:handle_event, fn(event_name, data) ->
assert event_name == "identity:email.verify.success"
assert match_keys(data, [:user])
{:ok, nil}
end)
{:ok, response} = Identity.create_email_verification(request)
assert response.data.email_verified
end
end
#
# MARK: Phone Verification Code
#
describe "create_phone_verification_code/1" do
test "when role is not authorized" do
request = %ContextRequest{
vas: %{account_id: nil, user_id: nil}
}
{:error, :access_denied} = Identity.create_phone_verification_code(request)
end
test "when request is valid" do
account = account_fixture()
request = %ContextRequest{
vas: %{account_id: account.id, user_id: nil},
fields: %{"phone_number" => "+1234567890"}
}
EventHandlerMock
|> expect(:handle_event, fn(event_name, data) ->
assert event_name == "identity:phone_verification_code.create.success"
assert match_keys(data, [:phone_verification_code])
{:ok, nil}
end)
{:ok, _} = Identity.create_phone_verification_code(request)
end
end
#
# MARK: Password Reset Token
#
describe "create_password_reset_token/1" do
test "when given username is not found" do
account = account_fixture()
request = %ContextRequest{
vas: %{account_id: account.id, user_id: nil},
fields: %{"username" => Faker.Internet.safe_email()}
}
EventHandlerMock
|> expect(:handle_event, fn(event_name, data) ->
assert event_name == "identity:password_reset_token.create.error.username_not_found"
assert match_keys(data, [:username, :account])
{:ok, nil}
end)
{:error, %{errors: errors}} = Identity.create_password_reset_token(request)
assert match_keys(errors, [:username])
end
test "when request is valid" do
account = account_fixture()
managed_user = managed_user_fixture(account)
request = %ContextRequest{
vas: %{account_id: account.id, user_id: nil},
fields: %{"username" => managed_user.username}
}
EventHandlerMock
|> expect(:handle_event, fn(event_name, data) ->
assert event_name == "identity:password_reset_token.create.success"
assert match_keys(data, [:user])
{:ok, nil}
end)
{:ok, response} = Identity.create_password_reset_token(request)
assert response.data.password_reset_token
end
end
#
# MARK: Password
#
describe "update_password/1" do
test "when request is invalid" do
account = account_fixture()
request = %ContextRequest{
vas: %{account_id: account.id, user_id: nil},
identifiers: %{"reset_token" => "invalid"},
fields: %{"value" => "test1234"}
}
{:error, %{errors: errors}} = Identity.update_password(request)
assert match_keys(errors, [:reset_token])
end
test "when request is valid" do
account = account_fixture()
managed_user = managed_user_fixture(account)
token = create_password_reset_token(managed_user)
request = %ContextRequest{
vas: %{account_id: account.id, user_id: nil},
identifiers: %{"reset_token" => token},
fields: %{"value" => "test1234"}
}
{:ok, response} = Identity.update_password(request)
assert response.data.reset_token == nil
end
end
#
# MARK: Refresh Token
#
describe "get_refresh_token/1" do
test "when role is not authorized" do
request = %ContextRequest{
vas: %{account_id: nil, user_id: nil}
}
{:error, :access_denied} = Identity.get_refresh_token(request)
end
test "when role is administrator" do
user = standard_user_fixture()
request = %ContextRequest{
vas: %{account_id: user.default_account.id, user_id: user.id}
}
{:ok, response} = Identity.get_refresh_token(request)
assert response.data.id
end
end
end
| 27.553571 | 135 | 0.62947 |
7396df156ad569d5bd959890dc15d59de403db6c | 2,815 | ex | Elixir | lib/expug.ex | rstacruz/expug | 683eb34abd9465f42d42cbe359fa9ae848f9ec3d | [
"MIT"
] | 89 | 2016-06-27T07:06:23.000Z | 2022-03-15T18:21:50.000Z | lib/expug.ex | rstacruz/exslim | 683eb34abd9465f42d42cbe359fa9ae848f9ec3d | [
"MIT"
] | 11 | 2016-07-28T17:12:39.000Z | 2021-01-22T02:54:58.000Z | lib/expug.ex | rstacruz/exslim | 683eb34abd9465f42d42cbe359fa9ae848f9ec3d | [
"MIT"
] | 4 | 2016-07-26T15:50:35.000Z | 2019-09-16T22:49:21.000Z | defmodule Expug do
@moduledoc ~S"""
Expug compiles templates to an eex template.
`to_eex/2` turns an Expug source into an EEx template.
iex> source = "div\n | Hello"
iex> Expug.to_eex(source)
{:ok, "<div>\nHello<%= \"\\n\" %></div>\n"}
`to_eex!/2` is the same, and instead returns the result or throws an
`Expug.Error`.
iex> source = "div\n | Hello"
iex> Expug.to_eex!(source)
"<div>\nHello<%= \"\\n\" %></div>\n"
## Errors
`to_eex/2` will give you this in case of an error:
{:error, %{
type: :parse_error,
position: {3, 2}, # line/col
... # other metadata
}}
Internally, the other classes will throw `%{type, position, ...}` which will
be caught here.
## The `raw` helper
Note that it needs `raw/1`, something typically provided by
[Phoenix.HTML](http://devdocs.io/phoenix/phoenix_html/phoenix.html#raw/1).
You don't need Phoenix.HTML however; a binding with `raw/1` would do.
iex> Expug.to_eex!(~s[div(role="alert")= @message])
"<div<%= raw(Expug.Runtime.attr(\"role\", \"alert\")) %>><%= \"\\n\" %><%= @message %><%= \"\\n\" %></div>\n"
## Internal notes
`Expug.to_eex/2` pieces together 4 steps into a pipeline:
- `tokenize/2` - turns source into tokens.
- `compile/2` - turns tokens into an AST.
- `build/2` - turns an AST into a line map.
- `stringify/2` - turns a line map into an EEx template.
## Also see
- `Expug.Tokenizer`
- `Expug.Compiler`
- `Expug.Builder`
- `Expug.Stringifier`
"""
defdelegate tokenize(source, opts), to: Expug.Tokenizer
defdelegate compile(tokens, opts), to: Expug.Compiler
defdelegate build(ast, opts), to: Expug.Builder
defdelegate stringify(lines, opts), to: Expug.Stringifier
@doc ~S"""
Compiles an Expug template to an EEx template.
Returns `{:ok, result}`, where `result` is an EEx string. On error, it will
return `{:error, ...}`.
## Options
All options are optional.
* `attr_helper` (String) - the attribute helper to use (default: `"Expug.Runtime.attr"`)
* `raw_helper` (String) - the raw helper to use (default: `"raw"`)
"""
def to_eex(source, opts \\ []) do
try do
eex = source
|> tokenize(opts)
|> compile(opts)
|> build(opts)
|> stringify(opts)
{:ok, eex}
catch %{type: _type} = err->
{:error, err}
end
end
@doc ~S"""
Compiles an Expug template to an EEx template and raises errors on failure.
Returns the EEx string on success. On failure, it raises `Expug.Error`.
"""
def to_eex!(source, opts \\ []) do
case to_eex(source, opts) do
{:ok, eex} ->
eex
{:error, err} ->
err = err |> Map.put(:source, source)
raise Expug.Error.exception(err)
end
end
end
| 28.15 | 115 | 0.601421 |
7396e4f9df42e98b113d6872a3f6fa3ecd7558bc | 7,670 | ex | Elixir | lib/botmodules/happiness.ex | cantsin/katakuri | f92c733b86ff8ca6f02a444ca773c25be9585d73 | [
"Apache-2.0"
] | 2 | 2015-04-11T04:30:34.000Z | 2015-04-18T19:12:25.000Z | lib/botmodules/happiness.ex | cantsin/katakuri | f92c733b86ff8ca6f02a444ca773c25be9585d73 | [
"Apache-2.0"
] | null | null | null | lib/botmodules/happiness.ex | cantsin/katakuri | f92c733b86ff8ca6f02a444ca773c25be9585d73 | [
"Apache-2.0"
] | null | null | null | # inspired by http://begriffs.com/posts/2015-03-15-tracking-joy-at-work.html
defmodule BotHappiness do
@behaviour BotModule
@moduledoc "Track happiness levels (with anonymized data). !happyme to opt in. !happystats for anonymized and aggregated statistics."
@description "Thank you for opting into our happiness survey!
How this works:
- I will randomly PM you once every three days (on average).
- I will ask you how you are feeling.
- Please respond with a number from 1 (very sad) to 5 (very happy).
The scale looks like this:
1: I'm having a terrible, horrible, no-good, very bad day.
2: Sigh. Today was not one of my better days.
3: Meh, I'm doing OK.
4: I'm doing well.
5: I'm ecstatic and on top of the world!
Please note that all data is anonymized. But don't just take my word for it -- you may verify the code at https://github.com/cantsin/katakuri/blob/master/lib/botmodules/happiness.ex.
To obtain anonymized and aggregated statistics at any time, type in !happystats. To opt out, type in !happyout. Thank you again!
"
@prompt "Hello, this is your friendly neighborhood bot checking in! How are you feeling today? Please type in a number from 1 (very sad) to 5 (very happy).
(If you no longer wish to receive these prompts, then please opt out by typing in !happyout.)"
@goodbye "OK! You have opted out of the happiness module (which makes me very sad)."
@polling_interval 15 # in seconds
@interval 3 * 24 * 60 * 60 # in seconds
require Logger
def doc, do: @moduledoc
def start do
HappinessDB.create
{:ok, timer_pid} = Task.start(fn -> happy_timer() end)
Agent.start_link(fn -> %{timer_pid: timer_pid} end, name: :happiness_timer)
query_for_happiness
end
def process_message(message) do
if Regex.match? ~r/^!happyme/, message.text do
result = HappinessDB.subscribe(message.user_id, true)
reply = case result do
:ok ->
HappinessDB.add_notification(message.user_id, random_interval)
@description
_ ->
"You are already subscribed."
end
Slack.send_direct(message.user_id, reply)
end
if Regex.match? ~r/^!happyout/, message.text do
result = HappinessDB.subscribe(message.user_id, false)
reply = case result do
:ok ->
HappinessDB.remove_notification(message.user_id)
@goodbye
_ ->
"You are already unsubscribed."
end
Slack.send_direct(message.user_id, reply)
end
if Regex.match? ~r/^!happystats/, message.text do
# TODO: make this more sophisticated -- graph the average over time.
result = HappinessDB.get_happiness_levels
count = Enum.reduce(result, 0, fn({val, _}, acc) -> acc + val end)
average = if count == 0 do
"not enough data!"
else
count / Enum.count(result)
end
reply = "Happiness average: #{average}"
Slack.send_message(message.channel, reply)
end
if expecting_reply? message.channel, message.user_id do
try do
case String.to_integer message.text do
x when x > 0 and x <= 5 ->
HappinessDB.save_reply(x)
HappinessDB.remove_notification(message.user_id)
Slack.send_direct(message.user_id, "Thank you!")
HappinessDB.add_notification(message.user_id, random_interval)
_ ->
Slack.send_direct(message.user_id, "Please give me a value between 1 (very sad) and 5 (very happy).")
end
rescue
_ in ArgumentError -> ()
end
end
end
def stop(_reason) do
end
defp expecting_reply?(channel, user_id) do
dms = Slack.get_direct_messages
in_private_conversation = Enum.find(dms, fn dm -> dm.id == channel end) |> is_map
awaiting_reply = HappinessDB.awaiting_reply?(user_id) |> is_map
in_private_conversation and awaiting_reply
end
defp next_notification do
notifications = HappinessDB.get_notifications
if Enum.count(notifications) == 0 do
@polling_interval # try again later.
else
{_, first_date} = List.first notifications
next_time = Enum.reduce(notifications, first_date, fn ({_, date}, acc) -> min(date, acc) end)
next_time = next_time |> SlackDatabase.timestamp_to_calendar
current_time = :calendar.universal_time
{days, {hours, minutes, seconds}} = :calendar.time_difference(next_time, current_time)
(days * 24 * 60 * 60) + (hours * 60 * 60) + (minutes * 60) + seconds
end
end
defp query_for_happiness do
pending = HappinessDB.get_current_notifications
Enum.each(pending, fn {username, _} ->
HappinessDB.remove_notification(username)
Slack.send_direct(username, @prompt)
HappinessDB.add_notification(username, random_interval)
end)
next_time = next_notification
next = max(0, next_time) + 5 * 60 # add some padding
timer_pid = Agent.get(:happiness_timer, &Map.get(&1, :timer_pid))
send(timer_pid, {:refresh, next, self()})
end
defp happy_timer() do
receive do
{:refresh, interval, _} ->
:timer.sleep(interval * 1000)
query_for_happiness
happy_timer()
end
end
defp random_interval do
:random.uniform * @interval + @interval / 2
end
end
defmodule HappinessDB do
@behaviour BotModule.DB
require Logger
def create do
SlackDatabase.write!("CREATE TABLE IF NOT EXISTS subscriptions(id serial PRIMARY KEY, username CHARACTER(9), subscribed BOOLEAN)")
SlackDatabase.write!("CREATE TABLE IF NOT EXISTS notifications(id serial PRIMARY KEY, username CHARACTER(9), date TIMESTAMPTZ)")
SlackDatabase.write!("CREATE TABLE IF NOT EXISTS happiness(id serial PRIMARY KEY, value INTEGER, created TIMESTAMPTZ DEFAULT current_timestamp)")
end
def save_reply(value) do
SlackDatabase.write!("INSERT INTO happiness(value) VALUES($1)", [value])
end
def add_notification(username, interval) do
# TODO: for some reason, '$2' is not inserted correctly. so do it manually (i know, i know.)
SlackDatabase.write!("INSERT INTO notifications(username, date) VALUES($1, NOW() + interval '" <> "#{round interval}" <> " seconds')", [username])
end
def remove_notification(username) do
SlackDatabase.write!("DELETE FROM notifications WHERE username = $1", [username])
end
def get_notifications do
result = SlackDatabase.query?("SELECT username, date FROM notifications")
result.rows
end
def get_current_notifications do
result = SlackDatabase.query?("SELECT username, date FROM notifications WHERE date <= NOW()")
result.rows
end
def subscribe(username, subscribed) do
result = SlackDatabase.query?("SELECT subscribed FROM subscriptions WHERE username = $1", [username])
if result.num_rows == 0 do
SlackDatabase.write!("INSERT INTO subscriptions(username, subscribed) VALUES($1, $2)", [username, subscribed])
:ok
else
SlackDatabase.write!("UPDATE subscriptions SET subscribed = $2 WHERE username = $1", [username, subscribed])
{current} = List.first result.rows
if current != subscribed do
:ok
else
:error
end
end
end
def get_happiness_levels do
result = SlackDatabase.query?("SELECT value, created FROM happiness")
result.rows
end
def awaiting_reply?(username) do
result = SlackDatabase.query?("SELECT date FROM notifications WHERE username = $1", [username])
if result.num_rows == 0 do
nil
else
{date} = List.first result.rows
date
end
end
end
| 35.674419 | 182 | 0.672751 |
7396faa9a9451d1993d69d3978790e292cad95f8 | 202 | exs | Elixir | test/skillset_web/controllers/page_controller_test.exs | LunarLogic/skillset | fe7eda83506ce45e2a60fd77bd17c3981b2ef254 | [
"MIT"
] | null | null | null | test/skillset_web/controllers/page_controller_test.exs | LunarLogic/skillset | fe7eda83506ce45e2a60fd77bd17c3981b2ef254 | [
"MIT"
] | null | null | null | test/skillset_web/controllers/page_controller_test.exs | LunarLogic/skillset | fe7eda83506ce45e2a60fd77bd17c3981b2ef254 | [
"MIT"
] | null | null | null | defmodule SkillsetWeb.PageControllerTest do
use SkillsetWeb.ConnCase
test "GET /", %{conn: conn} do
conn = get(conn, "/")
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 22.444444 | 60 | 0.683168 |
7397286f375fd2ff7c4bf62e1a1eeafa52e2dbd1 | 4,252 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/org_chart_spec.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/org_chart_spec.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/org_chart_spec.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.OrgChartSpec do
@moduledoc """
An <a href="/chart/interactive/docs/gallery/orgchart">org chart</a>.
Org charts require a unique set of labels in labels and may
optionally include parent_labels and tooltips.
parent_labels contain, for each node, the label identifying the parent
node. tooltips contain, for each node, an optional tooltip.
For example, to describe an OrgChart with Alice as the CEO, Bob as the
President (reporting to Alice) and Cathy as VP of Sales (also reporting to
Alice), have labels contain "Alice", "Bob", "Cathy",
parent_labels contain "", "Alice", "Alice" and tooltips contain
"CEO", "President", "VP Sales".
## Attributes
* `labels` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data containing the labels for all the nodes in the chart. Labels
must be unique.
* `nodeColor` (*type:* `GoogleApi.Sheets.V4.Model.Color.t`, *default:* `nil`) - The color of the org chart nodes.
* `nodeColorStyle` (*type:* `GoogleApi.Sheets.V4.Model.ColorStyle.t`, *default:* `nil`) - The color of the org chart nodes.
If node_color is also set, this field takes precedence.
* `nodeSize` (*type:* `String.t`, *default:* `nil`) - The size of the org chart nodes.
* `parentLabels` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data containing the label of the parent for the corresponding node.
A blank value indicates that the node has no parent and is a top-level
node.
This field is optional.
* `selectedNodeColor` (*type:* `GoogleApi.Sheets.V4.Model.Color.t`, *default:* `nil`) - The color of the selected org chart nodes.
* `selectedNodeColorStyle` (*type:* `GoogleApi.Sheets.V4.Model.ColorStyle.t`, *default:* `nil`) - The color of the selected org chart nodes.
If selected_node_color is also set, this field takes precedence.
* `tooltips` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data containing the tooltip for the corresponding node. A blank value
results in no tooltip being displayed for the node.
This field is optional.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:labels => GoogleApi.Sheets.V4.Model.ChartData.t(),
:nodeColor => GoogleApi.Sheets.V4.Model.Color.t(),
:nodeColorStyle => GoogleApi.Sheets.V4.Model.ColorStyle.t(),
:nodeSize => String.t(),
:parentLabels => GoogleApi.Sheets.V4.Model.ChartData.t(),
:selectedNodeColor => GoogleApi.Sheets.V4.Model.Color.t(),
:selectedNodeColorStyle => GoogleApi.Sheets.V4.Model.ColorStyle.t(),
:tooltips => GoogleApi.Sheets.V4.Model.ChartData.t()
}
field(:labels, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:nodeColor, as: GoogleApi.Sheets.V4.Model.Color)
field(:nodeColorStyle, as: GoogleApi.Sheets.V4.Model.ColorStyle)
field(:nodeSize)
field(:parentLabels, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:selectedNodeColor, as: GoogleApi.Sheets.V4.Model.Color)
field(:selectedNodeColorStyle, as: GoogleApi.Sheets.V4.Model.ColorStyle)
field(:tooltips, as: GoogleApi.Sheets.V4.Model.ChartData)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.OrgChartSpec do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.OrgChartSpec.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.OrgChartSpec do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 49.44186 | 162 | 0.715663 |
7397303acef238cc470058f1463841402d8702cc | 508 | exs | Elixir | examples/stripe/test/stripe_test.exs | pdgonzalez872/openapi | 3e598c005e2c5fa302d50fbeb88634aac4cd3d50 | [
"Apache-2.0"
] | 22 | 2021-03-07T03:33:50.000Z | 2021-09-08T14:31:03.000Z | examples/stripe/test/stripe_test.exs | pdgonzalez872/openapi | 3e598c005e2c5fa302d50fbeb88634aac4cd3d50 | [
"Apache-2.0"
] | null | null | null | examples/stripe/test/stripe_test.exs | pdgonzalez872/openapi | 3e598c005e2c5fa302d50fbeb88634aac4cd3d50 | [
"Apache-2.0"
] | 1 | 2021-08-13T12:37:00.000Z | 2021-08-13T12:37:00.000Z | defmodule StripeTest do
use ExUnit.Case, async: true
test "it works" do
assert {:get_account, 1} in Stripe.__info__(:functions)
assert {:get_charges, 1} in Stripe.__info__(:functions)
end
end
defmodule StripeIntegrationTest do
use ExUnit.Case, async: true
@moduletag :integration
test "it works" do
token = System.fetch_env!("STRIPE_TOKEN")
client = Stripe.new(token: token)
{:ok, response} = Stripe.get_account(client)
assert response.body["display_name"]
end
end
| 22.086957 | 59 | 0.71063 |
73973f0026655f989b7b55116234445f78664a6b | 1,733 | ex | Elixir | lib/advent_of_code/day_23_a.ex | rob-brown/AdventOfCode2017 | cb8a56fba4b1999820b3aec4c4f03d7094836484 | [
"MIT"
] | 3 | 2017-12-26T20:51:47.000Z | 2019-05-14T04:59:38.000Z | lib/advent_of_code/day_23_a.ex | rob-brown/AdventOfCode2017 | cb8a56fba4b1999820b3aec4c4f03d7094836484 | [
"MIT"
] | null | null | null | lib/advent_of_code/day_23_a.ex | rob-brown/AdventOfCode2017 | cb8a56fba4b1999820b3aec4c4f03d7094836484 | [
"MIT"
] | null | null | null | defmodule AdventOfCode.Day23A do
def run(input) do
input
|> Stream.map(&String.trim/1)
|> Stream.reject(& &1 == "")
|> Stream.map(& &1 |> String.split(" ") |> process)
|> Enum.to_list
|> step(0, %{"mul" => 0})
end
defp process(["set", a, b]), do: {:set, parse(a), parse(b)}
defp process(["sub", a, b]), do: {:sub, parse(a), parse(b)}
defp process(["mul", a, b]), do: {:mul, parse(a), parse(b)}
defp process(["jnz", a, b]), do: {:jnz, parse(a), parse(b)}
defp parse(string) do
case Integer.parse(string) do
{int, ""} ->
int
_ ->
string
end
end
defp step(commands, pc, env) do
case commands |> Enum.at(pc, :done) |> apply_cmd(pc, env) do
{new_pc, new_env} ->
step commands, new_pc, new_env
result ->
result
end
end
defp apply_cmd(:done, pc, env), do: Map.get(env, "mul")
defp apply_cmd({:set, a, b}, pc, env) do
{pc + 1, Map.put(env, a, lookup(b, env))}
end
defp apply_cmd({:sub, a, b}, pc, env) do
value = lookup(a, env) - lookup(b, env)
{pc + 1, Map.put(env, a, value)}
end
defp apply_cmd({:mul, a, b}, pc, env) do
value = lookup(a, env) * lookup(b, env)
new_env = env |> Map.put(a, value) |> Map.update("mul", 0, (& &1 + 1))
{pc + 1, new_env}
end
defp apply_cmd({:jnz, a, b}, pc, env) do
if lookup(a, env) != 0 do
{pc + lookup(b, env), env}
else
{pc + 1, env}
end
end
defp lookup(int, _) when is_integer(int), do: int
defp lookup(name, env) when is_binary(name), do: Map.get(env, name, 0)
def solve do
"day_23_input.txt"
|> Path.expand(__DIR__)
|> File.read!
|> String.split("\n")
|> run
end
end
| 25.485294 | 74 | 0.535488 |
7397448fb4fb2a66fc0f2213a3f5afe06def4cbd | 421 | exs | Elixir | test/cookbook_web/views/error_view_test.exs | joerichsen/live_view_cookbook | a211e6bcfaa872df120f186b3d65e0672f410365 | [
"MIT"
] | null | null | null | test/cookbook_web/views/error_view_test.exs | joerichsen/live_view_cookbook | a211e6bcfaa872df120f186b3d65e0672f410365 | [
"MIT"
] | 11 | 2021-12-19T09:07:30.000Z | 2022-01-01T17:54:43.000Z | test/cookbook_web/views/error_view_test.exs | joerichsen/live_view_cookbook | a211e6bcfaa872df120f186b3d65e0672f410365 | [
"MIT"
] | null | null | null | defmodule CookbookWeb.ErrorViewTest do
use CookbookWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(CookbookWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(CookbookWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 28.066667 | 93 | 0.736342 |
73974ef8452768d35706e06a05c2124fa5937432 | 6,441 | ex | Elixir | lib/aws/cloud_hsm_v2.ex | ahsandar/aws-elixir | 25de8b6c3a1401bde737cfc26b0679b14b058f23 | [
"Apache-2.0"
] | null | null | null | lib/aws/cloud_hsm_v2.ex | ahsandar/aws-elixir | 25de8b6c3a1401bde737cfc26b0679b14b058f23 | [
"Apache-2.0"
] | null | null | null | lib/aws/cloud_hsm_v2.ex | ahsandar/aws-elixir | 25de8b6c3a1401bde737cfc26b0679b14b058f23 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.CloudHSMV2 do
@moduledoc """
For more information about AWS CloudHSM, see [AWS
CloudHSM](http://aws.amazon.com/cloudhsm/) and the [AWS CloudHSM User
Guide](https://docs.aws.amazon.com/cloudhsm/latest/userguide/).
"""
@doc """
Copy an AWS CloudHSM cluster backup to a different region.
"""
def copy_backup_to_region(client, input, options \\ []) do
request(client, "CopyBackupToRegion", input, options)
end
@doc """
Creates a new AWS CloudHSM cluster.
"""
def create_cluster(client, input, options \\ []) do
request(client, "CreateCluster", input, options)
end
@doc """
Creates a new hardware security module (HSM) in the specified AWS CloudHSM
cluster.
"""
def create_hsm(client, input, options \\ []) do
request(client, "CreateHsm", input, options)
end
@doc """
Deletes a specified AWS CloudHSM backup. A backup can be restored up to 7
days after the DeleteBackup request is made. For more information on
restoring a backup, see `RestoreBackup`.
"""
def delete_backup(client, input, options \\ []) do
request(client, "DeleteBackup", input, options)
end
@doc """
Deletes the specified AWS CloudHSM cluster. Before you can delete a
cluster, you must delete all HSMs in the cluster. To see if the cluster
contains any HSMs, use `DescribeClusters`. To delete an HSM, use
`DeleteHsm`.
"""
def delete_cluster(client, input, options \\ []) do
request(client, "DeleteCluster", input, options)
end
@doc """
Deletes the specified HSM. To specify an HSM, you can use its identifier
(ID), the IP address of the HSM's elastic network interface (ENI), or the
ID of the HSM's ENI. You need to specify only one of these values. To find
these values, use `DescribeClusters`.
"""
def delete_hsm(client, input, options \\ []) do
request(client, "DeleteHsm", input, options)
end
@doc """
Gets information about backups of AWS CloudHSM clusters.
This is a paginated operation, which means that each response might contain
only a subset of all the backups. When the response contains only a subset
of backups, it includes a `NextToken` value. Use this value in a subsequent
`DescribeBackups` request to get more backups. When you receive a response
with no `NextToken` (or an empty or null value), that means there are no
more backups to get.
"""
def describe_backups(client, input, options \\ []) do
request(client, "DescribeBackups", input, options)
end
@doc """
Gets information about AWS CloudHSM clusters.
This is a paginated operation, which means that each response might contain
only a subset of all the clusters. When the response contains only a subset
of clusters, it includes a `NextToken` value. Use this value in a
subsequent `DescribeClusters` request to get more clusters. When you
receive a response with no `NextToken` (or an empty or null value), that
means there are no more clusters to get.
"""
def describe_clusters(client, input, options \\ []) do
request(client, "DescribeClusters", input, options)
end
@doc """
Claims an AWS CloudHSM cluster by submitting the cluster certificate issued
by your issuing certificate authority (CA) and the CA's root certificate.
Before you can claim a cluster, you must sign the cluster's certificate
signing request (CSR) with your issuing CA. To get the cluster's CSR, use
`DescribeClusters`.
"""
def initialize_cluster(client, input, options \\ []) do
request(client, "InitializeCluster", input, options)
end
@doc """
Gets a list of tags for the specified AWS CloudHSM cluster.
This is a paginated operation, which means that each response might contain
only a subset of all the tags. When the response contains only a subset of
tags, it includes a `NextToken` value. Use this value in a subsequent
`ListTags` request to get more tags. When you receive a response with no
`NextToken` (or an empty or null value), that means there are no more tags
to get.
"""
def list_tags(client, input, options \\ []) do
request(client, "ListTags", input, options)
end
@doc """
Restores a specified AWS CloudHSM backup that is in the `PENDING_DELETION`
state. For mor information on deleting a backup, see `DeleteBackup`.
"""
def restore_backup(client, input, options \\ []) do
request(client, "RestoreBackup", input, options)
end
@doc """
Adds or overwrites one or more tags for the specified AWS CloudHSM cluster.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes the specified tag or tags from the specified AWS CloudHSM cluster.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "cloudhsm"}
host = build_host("cloudhsmv2", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "BaldrApiService.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
| 35.39011 | 77 | 0.685918 |
739766c8af7cece2026fe066c211ef7c11dfbf78 | 3,570 | ex | Elixir | test/example_domain/bank_account/bank_account.ex | amatalai/commanded | 1e62888bdd733741fd08d42d9350282071764b35 | [
"MIT"
] | null | null | null | test/example_domain/bank_account/bank_account.ex | amatalai/commanded | 1e62888bdd733741fd08d42d9350282071764b35 | [
"MIT"
] | null | null | null | test/example_domain/bank_account/bank_account.ex | amatalai/commanded | 1e62888bdd733741fd08d42d9350282071764b35 | [
"MIT"
] | null | null | null | defmodule Commanded.ExampleDomain.BankAccount do
@moduledoc false
defstruct account_number: nil,
balance: 0,
state: nil
alias Commanded.ExampleDomain.BankAccount
defmodule Commands do
defmodule(OpenAccount, do: defstruct([:account_number, :initial_balance]))
defmodule(DepositMoney, do: defstruct([:account_number, :transfer_uuid, :amount]))
defmodule(WithdrawMoney, do: defstruct([:account_number, :transfer_uuid, :amount]))
defmodule(CloseAccount, do: defstruct([:account_number]))
end
defmodule Events do
defmodule(BankAccountOpened, do: defstruct([:account_number, :initial_balance]))
defmodule(MoneyDeposited, do: defstruct([:account_number, :transfer_uuid, :amount, :balance]))
defmodule(MoneyWithdrawn, do: defstruct([:account_number, :transfer_uuid, :amount, :balance]))
defmodule(AccountOverdrawn, do: defstruct([:account_number, :balance]))
defmodule(BankAccountClosed, do: defstruct([:account_number]))
end
alias Commands.{OpenAccount, DepositMoney, WithdrawMoney, CloseAccount}
alias Events.{
BankAccountOpened,
MoneyDeposited,
MoneyWithdrawn,
AccountOverdrawn,
BankAccountClosed
}
def open_account(%BankAccount{state: nil}, %OpenAccount{
account_number: account_number,
initial_balance: initial_balance
})
when is_number(initial_balance) and initial_balance > 0 do
%BankAccountOpened{account_number: account_number, initial_balance: initial_balance}
end
def deposit(%BankAccount{state: :active, balance: balance}, %DepositMoney{
account_number: account_number,
transfer_uuid: transfer_uuid,
amount: amount
})
when is_number(amount) and amount > 0 do
balance = balance + amount
%MoneyDeposited{
account_number: account_number,
transfer_uuid: transfer_uuid,
amount: amount,
balance: balance
}
end
def withdraw(%BankAccount{state: :active, balance: balance}, %WithdrawMoney{
account_number: account_number,
transfer_uuid: transfer_uuid,
amount: amount
})
when is_number(amount) and amount > 0 do
case balance - amount do
balance when balance < 0 ->
[
%MoneyWithdrawn{
account_number: account_number,
transfer_uuid: transfer_uuid,
amount: amount,
balance: balance
},
%AccountOverdrawn{account_number: account_number, balance: balance}
]
balance ->
%MoneyWithdrawn{
account_number: account_number,
transfer_uuid: transfer_uuid,
amount: amount,
balance: balance
}
end
end
def close_account(%BankAccount{state: :active}, %CloseAccount{account_number: account_number}) do
%BankAccountClosed{account_number: account_number}
end
# State mutators
def apply(%BankAccount{} = state, %BankAccountOpened{
account_number: account_number,
initial_balance: initial_balance
}) do
%BankAccount{state | account_number: account_number, balance: initial_balance, state: :active}
end
def apply(%BankAccount{} = state, %MoneyDeposited{balance: balance}),
do: %BankAccount{state | balance: balance}
def apply(%BankAccount{} = state, %MoneyWithdrawn{balance: balance}),
do: %BankAccount{state | balance: balance}
def apply(%BankAccount{} = state, %AccountOverdrawn{}), do: state
def apply(%BankAccount{} = state, %BankAccountClosed{}) do
%BankAccount{state | state: :closed}
end
end
| 32.162162 | 99 | 0.688515 |
7397a32fe8421d079aae0a5ef105284eaf4cd7c2 | 777 | ex | Elixir | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/packet_capture.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/packet_capture.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/model/packet_capture.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Network.Model.PacketCapture do
@moduledoc """
Parameters that define the create packet capture operation.
"""
@derive [Poison.Encoder]
defstruct [
:"properties"
]
@type t :: %__MODULE__{
:"properties" => PacketCaptureParameters
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.Network.Model.PacketCapture do
import Microsoft.Azure.Management.Network.Deserializer
def decode(value, options) do
value
|> deserialize(:"properties", :struct, Microsoft.Azure.Management.Network.Model.PacketCaptureParameters, options)
end
end
| 27.75 | 117 | 0.752896 |
739818561922b3cb64aa574a8a9f2fa49a925575 | 1,791 | ex | Elixir | clients/chat/lib/google_api/chat/v1/model/list_spaces_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/chat/lib/google_api/chat/v1/model/list_spaces_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/chat/lib/google_api/chat/v1/model/list_spaces_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Chat.V1.Model.ListSpacesResponse do
@moduledoc """
## Attributes
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Continuation token to retrieve the next page of results. It will be empty
for the last page of results. Tokens expire in an hour. An error is thrown
if an expired token is passed.
* `spaces` (*type:* `list(GoogleApi.Chat.V1.Model.Space.t)`, *default:* `nil`) - List of spaces in the requested (or first) page.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:nextPageToken => String.t(),
:spaces => list(GoogleApi.Chat.V1.Model.Space.t())
}
field(:nextPageToken)
field(:spaces, as: GoogleApi.Chat.V1.Model.Space, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Chat.V1.Model.ListSpacesResponse do
def decode(value, options) do
GoogleApi.Chat.V1.Model.ListSpacesResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Chat.V1.Model.ListSpacesResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.442308 | 136 | 0.721943 |
7398198e8fb4c1d4884ede99ff6003c9717145f7 | 3,342 | exs | Elixir | test/sanbase_web/graphql/metric/api_metric_metadata_test.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 1 | 2022-01-30T19:51:39.000Z | 2022-01-30T19:51:39.000Z | test/sanbase_web/graphql/metric/api_metric_metadata_test.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | null | null | null | test/sanbase_web/graphql/metric/api_metric_metadata_test.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | null | null | null | defmodule SanbaseWeb.Graphql.ApiMetricMetadataTest do
use SanbaseWeb.ConnCase, async: false
import Sanbase.Factory, only: [rand_str: 0]
import SanbaseWeb.Graphql.TestHelpers
alias Sanbase.Metric
test "returns data for all available metric", %{conn: conn} do
metrics =
Metric.available_metrics()
|> Enum.shuffle()
|> Enum.take(100)
aggregations = Metric.available_aggregations()
aggregations =
aggregations |> Enum.map(fn aggr -> aggr |> Atom.to_string() |> String.upcase() end)
for metric <- metrics do
%{"data" => %{"getMetric" => %{"metadata" => metadata}}} = get_metric_metadata(conn, metric)
assert metadata["metric"] == metric
assert match?(
%{"metric" => _, "defaultAggregation" => _, "minInterval" => _, "dataType" => _},
metadata
)
assert metadata["humanReadableName"] |> is_binary()
assert metadata["defaultAggregation"] in aggregations
assert metadata["minInterval"] in ["1m", "5m", "1h", "6h", "8h", "1d"]
assert metadata["dataType"] in ["TIMESERIES", "HISTOGRAM", "TABLE"]
assert metadata["isRestricted"] in [true, false]
assert Enum.all?(
metadata["availableSelectors"],
&Enum.member?(
[
"SLUG",
"SLUGS",
"MARKET_SEGMENTS",
"TEXT",
"LABEL",
"OWNER",
"HOLDERS_COUNT",
"SOURCE",
"LABEL_FQN",
"LABEL_FQNS",
"BLOCKCHAIN",
"BLOCKCHAIN_ADDRESS"
],
&1
)
)
assert Enum.all?(
metadata["availableAggregations"],
&Enum.member?(aggregations, &1)
)
assert is_nil(metadata["restrictedFrom"]) or
match?(
%DateTime{},
metadata["restrictedFrom"] |> Sanbase.DateTimeUtils.from_iso8601!()
)
assert is_nil(metadata["restrictedTo"]) or
match?(
%DateTime{},
metadata["restrictedTo"] |> Sanbase.DateTimeUtils.from_iso8601!()
)
end
end
test "returns error for unavailable metric", %{conn: conn} do
rand_metrics = Enum.map(1..20, fn _ -> rand_str() end)
rand_metrics = rand_metrics -- Metric.available_metrics()
# Do not mock the `histogram_data` function because it's the one that rejects
for metric <- rand_metrics do
%{
"errors" => [
%{"message" => error_message}
]
} = get_metric_metadata(conn, metric)
assert error_message == "The metric '#{metric}' is not supported or is mistyped."
end
end
defp get_metric_metadata(conn, metric) do
query = """
{
getMetric(metric: "#{metric}"){
metadata{
minInterval
defaultAggregation
availableAggregations
availableSelectors
dataType
metric
humanReadableName
isRestricted
restrictedFrom
restrictedTo
}
}
}
"""
conn
|> post("/graphql", query_skeleton(query))
|> json_response(200)
end
end
| 28.810345 | 98 | 0.530521 |
739851e486102a4db2bbf7571a333f000149247a | 1,815 | exs | Elixir | mix.exs | bmquinn/cardex | 012f541808dfa1b59418554ba087032f65ca8b71 | [
"Apache-2.0"
] | null | null | null | mix.exs | bmquinn/cardex | 012f541808dfa1b59418554ba087032f65ca8b71 | [
"Apache-2.0"
] | null | null | null | mix.exs | bmquinn/cardex | 012f541808dfa1b59418554ba087032f65ca8b71 | [
"Apache-2.0"
] | null | null | null | defmodule Cardex.MixProject do
use Mix.Project
def project do
[
app: :cardex,
version: "0.1.0",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Cardex.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.6.0-rc.0", override: true},
{:phoenix_html, "~> 3.0"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_view, "~> 0.16.0"},
{:phoenix_live_dashboard, "~> 0.5"},
{:telemetry_metrics, "~> 0.6"},
{:telemetry_poller, "~> 0.5"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:nimble_publisher, "~> 0.1.1"},
{:makeup_elixir, ">= 0.0.0"},
{:makeup_erlang, ">= 0.0.0"},
{:esbuild, "~> 0.2", runtime: Mix.env() == :dev}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "cmd npm install --prefix assets"],
"assets.deploy": ["esbuild default --minify", "phx.digest"]
]
end
end
| 27.5 | 84 | 0.579063 |
73986c8f792361aa346946c7efbbef0da0a69cd2 | 294 | ex | Elixir | lib/hl7/2.4/segments/cti.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.4/segments/cti.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.4/segments/cti.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_4.Segments.CTI do
@moduledoc false
require Logger
alias HL7.V2_4.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
sponsor_study_id: DataTypes.Ei,
study_phase_identifier: DataTypes.Ce,
study_scheduled_time_point: DataTypes.Ce
]
end
| 19.6 | 46 | 0.70068 |
7398771596a4140e2e3285c60191015b2ccfd658 | 531 | ex | Elixir | lib/radiator/media/episode_image.ex | optikfluffel/radiator | b1a1b966296fa6bf123e3a2455009ff52099ace6 | [
"MIT"
] | 1 | 2021-03-02T16:59:40.000Z | 2021-03-02T16:59:40.000Z | lib/radiator/media/episode_image.ex | optikfluffel/radiator | b1a1b966296fa6bf123e3a2455009ff52099ace6 | [
"MIT"
] | null | null | null | lib/radiator/media/episode_image.ex | optikfluffel/radiator | b1a1b966296fa6bf123e3a2455009ff52099ace6 | [
"MIT"
] | null | null | null | defmodule Radiator.Media.EpisodeImage do
use Arc.Definition
use Arc.Ecto.Definition
@versions [:original, :thumbnail]
def filename(version, {_file, _episode}) do
"cover_#{version}"
end
def storage_dir(_version, {_file, episode}) do
"episode/#{episode.id}"
end
def s3_object_headers(_version, {file, _episode}) do
[content_type: MIME.from_path(file.file_name)]
end
def transform(:thumbnail, _) do
{:convert, "-thumbnail 256x256^ -gravity center -extent 256x256 -format png", :png}
end
end
| 23.086957 | 87 | 0.706215 |
73987f23ab106551e586da2bd590e3ce31da61ca | 24,949 | ex | Elixir | lib/mix/lib/mix.ex | fmterrorf/elixir | eafb7b87756179adac5dc2bc11edcb04c1f187cc | [
"Apache-2.0"
] | 1 | 2019-06-27T08:47:13.000Z | 2019-06-27T08:47:13.000Z | lib/mix/lib/mix.ex | fmterrorf/elixir | eafb7b87756179adac5dc2bc11edcb04c1f187cc | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix.ex | fmterrorf/elixir | eafb7b87756179adac5dc2bc11edcb04c1f187cc | [
"Apache-2.0"
] | null | null | null | defmodule Mix do
@moduledoc ~S"""
Mix is a build tool that provides tasks for creating, compiling,
and testing Elixir projects, managing its dependencies, and more.
## Mix.Project
The foundation of Mix is a project. A project can be defined by using
`Mix.Project` in a module, usually placed in a file named `mix.exs`:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0"
]
end
end
See the `Mix.Project` module for detailed documentation on Mix projects.
Once the project is defined, a number of default Mix tasks can be run
directly from the command line:
* `mix compile` - compiles the current project
* `mix test` - runs tests for the given project
* `mix run` - runs a particular command inside the project
Each task has its own options and sometimes specific configuration
to be defined in the `project/0` function. You can use `mix help`
to list all available tasks and `mix help NAME` to show help for
a particular task.
The best way to get started with your first project is by calling
`mix new my_project` from the command line.
## Mix.Task
Tasks are what make Mix extensible.
Projects can extend Mix behaviour by adding their own tasks. For
example, adding the task below inside your project will
make it available to everyone that uses your project:
defmodule Mix.Tasks.Hello do
use Mix.Task
def run(_) do
Mix.shell().info("Hello world")
end
end
The task can now be invoked with `mix hello`.
See the `Mix.Task` behaviour for detailed documentation on Mix tasks.
## Dependencies
Mix also manages your dependencies and integrates nicely with the [Hex package
manager](https://hex.pm).
In order to use dependencies, you need to add a `:deps` key
to your project configuration. We often extract the list of dependencies
into its own function:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0",
deps: deps()
]
end
defp deps do
[
{:ecto, "~> 2.0"},
{:plug, github: "elixir-lang/plug"}
]
end
end
You can run `mix help deps` to learn more about dependencies in Mix.
## Environments
Mix supports different environments. Environments allow developers
to prepare and organize their project specifically for different
scenarios. By default, Mix provides three environments:
* `:dev` - the default environment
* `:test` - the environment `mix test` runs on
* `:prod` - the environment your dependencies run on
The environment can be changed via the command line by setting
the `MIX_ENV` environment variable, for example:
```bash
$ MIX_ENV=prod mix run server.exs
```
You can also specify that certain dependencies are available only for
certain environments:
{:some_test_dependency, "~> 1.0", only: :test}
The environment can be read via `Mix.env/0`.
## Targets
Besides environments, Mix supports targets. Targets are useful when a
project needs to compile to different architectures and some of the
dependencies are only available to some of them. By default, the target
is `:host` but it can be set via the `MIX_TARGET` environment variable.
The target can be read via `Mix.target/0`.
## Configuration
Mix allows you configure the application environment of your application
and of your dependencies. See the `Application` module to learn more about
the application environment. On this section, we will focus on how to configure
it at two distinct moments: build-time and runtime.
> Note: The application environment is discouraged for libraries. See Elixir's
> [Library Guidelines](https://hexdocs.pm/elixir/library-guidelines.html) for
> more information.
### Build-time configuration
Whenever you invoke a `mix` command, Mix loads the configuration
in `config/config.exs`, if said file exists. It is common for the
`config/config.exs` file itself to import other configuration based
on the current `MIX_ENV`, such as `config/dev.exs`, `config/test.exs`,
and `config/prod.exs`, by calling `Config.import_config/1`:
import Config
import_config "#{config_env()}.exs"
We say `config/config.exs` and all imported files are build-time
configuration as they are evaluated whenever you compile your code.
In other words, if your configuration does something like:
import Config
config :my_app, :secret_key, System.fetch_env!("MY_APP_SECRET_KEY")
The `:secret_key` key under `:my_app` will be computed on the host
machine before your code compiles. This can be an issue if the machine
compiling your code does not have access to all environment variables
used to run your code, as loading the config above will fail due to the
missing environment variable. Luckily, Mix also provides runtime
configuration, which should be preferred and we will see next.
### Runtime configuration
To enable runtime configuration in your release, all you need to do is
to create a file named `config/runtime.exs`:
import Config
config :my_app, :secret_key, System.fetch_env!("MY_APP_SECRET_KEY")
This file is executed whenever your project runs. If you assemble
a release with `mix release`, it also executes every time your release
starts.
## Aliases
Aliases are shortcuts or tasks specific to the current project.
In the [Mix.Task section](#module-mix-task), we have defined a task that would be
available to everyone using our project as a dependency. What if
we wanted the task to only be available for our project? Just
define an alias:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0",
aliases: aliases()
]
end
defp aliases do
[
c: "compile",
hello: &hello/1
]
end
defp hello(_) do
Mix.shell().info("Hello world")
end
end
In the example above, we have defined two aliases. One is `mix c`
which is a shortcut for `mix compile`. The other is named
`mix hello`, which is the equivalent to the `Mix.Tasks.Hello`
we have defined in the [Mix.Task section](#module-mix-task).
Aliases may also be lists, specifying multiple tasks to be run
consecutively:
[all: [&hello/1, "deps.get --only #{Mix.env()}", "compile"]]
In the example above, we have defined an alias named `mix all`,
that prints "Hello world", then fetches dependencies specific
to the current environment, and compiles the project.
Aliases can also be used to augment existing tasks. Let's suppose
you want to augment `mix clean` to clean another directory Mix does
not know about:
[clean: ["clean", &clean_extra/1]]
Where `&clean_extra/1` would be a function in your `mix.exs`
with extra cleanup logic.
Arguments given to the alias will be appended to the arguments of
the last task in the list. Except when overriding an existing task.
In this case, the arguments will be given to the original task,
in order to preserve semantics. For example, in the `:clean` alias
above, the arguments given to the alias will be passed to "clean"
and not to `clean_extra/1`.
Aliases defined in the current project do not affect its dependencies
and aliases defined in dependencies are not accessible from the
current project.
Aliases can be used very powerfully to also run Elixir scripts and
shell commands, for example:
# priv/hello1.exs
IO.puts("Hello One")
# priv/hello2.exs
IO.puts("Hello Two")
# priv/world.sh
#!/bin/sh
echo "world!"
# mix.exs
defp aliases do
[
some_alias: ["hex.info", "run priv/hello1.exs", "cmd priv/world.sh"]
]
end
In the example above we have created the alias `some_alias` that will
run the task `mix hex.info`, then `mix run` to run an Elixir script,
then `mix cmd` to execute a command line shell script. This shows how
powerful aliases mixed with Mix tasks can be.
Mix tasks are designed to run only once. This prevents the same task
from being executed multiple times. For example, if there are several tasks
depending on `mix compile`, the code will be compiled once. Tasks can
be executed again if they are explicitly reenabled using `Mix.Task.reenable/1`:
another_alias: [
"format --check-formatted priv/hello1.exs",
"cmd priv/world.sh",
fn _ -> Mix.Task.reenable("format") end,
"format --check-formatted priv/hello2.exs"
]
Some tasks are automatically reenabled though, as they are expected to
be invoked multiple times. They are: `mix cmd`, `mix do`, `mix loadconfig`,
`mix profile.cprof`, `mix profile.eprof`, `mix profile.fprof`, `mix run`,
and `mix xref`.
It is worth mentioning that some tasks, such as in the case of the
`mix format` command in the example above, can accept multiple files so it
could be rewritten as:
another_alias: ["format --check-formatted priv/hello1.exs priv/hello2.exs"]
## Environment variables
Several environment variables can be used to modify Mix's behaviour.
Mix responds to the following variables:
* `MIX_ARCHIVES` - specifies the directory into which the archives should be installed
(default: `~/.mix/archives`)
* `MIX_BUILD_ROOT` - sets the root directory where build artifacts
should be written to. For example, "_build". If `MIX_BUILD_PATH` is set, this
option is ignored.
* `MIX_BUILD_PATH` - sets the project `Mix.Project.build_path/0` config. This option
must always point to a subdirectory inside a temporary directory. For instance,
never "/tmp" or "_build" but "_build/PROD" or "/tmp/PROD", as required by Mix
* `MIX_DEPS_PATH` - sets the project `Mix.Project.deps_path/0` config for the current project (default: `deps`)
* `MIX_DEBUG` - outputs debug information about each task before running it
* `MIX_ENV` - specifies which environment should be used. See [Environments](#module-environments)
* `MIX_TARGET` - specifies which target should be used. See [Targets](#module-targets)
* `MIX_EXS` - changes the full path to the `mix.exs` file
* `MIX_HOME` - path to Mix's home directory, stores configuration files and scripts used by Mix
(default: `~/.mix`)
* `MIX_INSTALL_DIR` - (since v1.12.0) specifies directory where `Mix.install/2` keeps
install cache
* `MIX_INSTALL_FORCE` - (since v1.13.0) runs `Mix.install/2` with empty install cache
* `MIX_PATH` - appends extra code paths
* `MIX_QUIET` - does not print information messages to the terminal
* `MIX_REBAR3` - path to rebar3 command that overrides the one Mix installs
(default: `~/.mix/rebar3`)
* `MIX_XDG` - asks Mix to follow the [XDG Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html)
for its home directory and configuration files. This behaviour needs to
be opt-in due to backwards compatibility. `MIX_HOME` has higher preference
than `MIX_XDG`. If none of the variables are set, the default directory
`~/.mix` will be used
Environment variables that are not meant to hold a value (and act basically as
flags) should be set to either `1` or `true`, for example:
$ MIX_DEBUG=1 mix compile
"""
@mix_install_project __MODULE__.InstallProject
use Application
import Kernel, except: [raise: 2]
@doc false
def start do
{:ok, _} = Application.ensure_all_started(:mix)
:ok
end
@doc false
def start(_type, []) do
children = [Mix.State, Mix.TasksServer, Mix.ProjectStack]
opts = [strategy: :one_for_one, name: Mix.Supervisor, max_restarts: 0]
Supervisor.start_link(children, opts)
end
@doc """
Returns the current Mix environment.
This function should not be used at runtime in application code (as opposed
to infrastructure and build code like Mix tasks). Mix is a build tool and may
not be available after the code is compiled (for example in a release).
To differentiate the program behavior depending on the environment, it is
recommended to use application environment through `Application.get_env/3`.
Proper configuration can be set in config files, often per-environment
(see the `Config` module for more information).
"""
@spec env() :: atom()
def env do
# env is not available on bootstrapping, so set a :dev default
Mix.State.get(:env, :dev)
end
@doc """
Changes the current Mix environment to `env`.
Be careful when invoking this function as any project
configuration won't be reloaded.
This function should not be used at runtime in application code
(see `env/0` for more information).
"""
@spec env(atom()) :: :ok
def env(env) when is_atom(env) do
Mix.State.put(:env, env)
end
@doc """
Returns the Mix target.
"""
@spec target() :: atom()
def target do
# target is not available on bootstrapping, so set a :host default
Mix.State.get(:target, :host)
end
@doc """
Changes the current Mix target to `target`.
Be careful when invoking this function as any project
configuration won't be reloaded.
"""
@spec target(atom()) :: :ok
def target(target) when is_atom(target) do
Mix.State.put(:target, target)
end
@doc """
Returns the default compilers used by Mix.
It can be used in your `mix.exs` to prepend or
append new compilers to Mix:
def project do
[compilers: Mix.compilers() ++ [:foo, :bar]]
end
"""
@spec compilers() :: [atom()]
def compilers do
[:yecc, :leex, :erlang, :elixir, :app]
end
@doc """
Returns the current shell.
`shell/0` can be used as a wrapper for the current shell. It contains
conveniences for requesting information from the user, printing to the
shell and so forth. The Mix shell is swappable (see `shell/1`), allowing
developers to use a test shell that simply sends messages to the current
process instead of performing IO (see `Mix.Shell.Process`).
By default, this returns `Mix.Shell.IO`.
## Examples
Mix.shell().info("Preparing to do something dangerous...")
if Mix.shell().yes?("Are you sure?") do
# do something dangerous
end
"""
@spec shell() :: module
def shell do
Mix.State.get(:shell, Mix.Shell.IO)
end
@doc """
Sets the current shell.
As an argument you may pass `Mix.Shell.IO`, `Mix.Shell.Process`,
`Mix.Shell.Quiet`, or any module that implements the `Mix.Shell`
behaviour.
After calling this function, `shell` becomes the shell that is
returned by `shell/0`.
## Examples
iex> Mix.shell(Mix.Shell.IO)
:ok
You can use `shell/0` and `shell/1` to temporarily switch shells,
for example, if you want to run a Mix Task that normally produces
a lot of output:
shell = Mix.shell()
Mix.shell(Mix.Shell.Quiet)
try do
Mix.Task.run("noisy.task")
after
Mix.shell(shell)
end
"""
@spec shell(module) :: :ok
def shell(shell) do
Mix.State.put(:shell, shell)
end
@doc """
Returns `true` if Mix is in debug mode, `false` otherwise.
"""
@spec debug?() :: boolean()
def debug? do
Mix.State.get(:debug, false)
end
@doc """
Sets Mix debug mode.
"""
@spec debug(boolean()) :: :ok
def debug(debug) when is_boolean(debug) do
Mix.State.put(:debug, debug)
end
@doc """
Raises a Mix error that is nicely formatted, defaulting to exit status `1`.
"""
@spec raise(binary) :: no_return
def raise(message) do
__MODULE__.raise(message, exit_status: 1)
end
@doc """
Raises a Mix error that is nicely formatted.
## Options
* `:exit_status` - defines exit status, defaults to `1`
"""
@doc since: "1.12.3"
@spec raise(binary, exit_status: non_neg_integer()) :: no_return
def raise(message, opts) when is_binary(message) and is_list(opts) do
status =
opts[:exit_status] ||
if exit_code = opts[:exit_code] do
IO.warn(":exit_code is deprecated, use :exit_status instead")
exit_code
else
1
end
Kernel.raise(Mix.Error, mix: status, message: message)
end
@doc """
The path for local archives or escripts.
"""
@doc since: "1.10.0"
@spec path_for(:archives | :escripts) :: String.t()
def path_for(:archives) do
System.get_env("MIX_ARCHIVES") || Path.join(Mix.Utils.mix_home(), "archives")
end
def path_for(:escripts) do
Path.join(Mix.Utils.mix_home(), "escripts")
end
@doc """
Installs and starts dependencies.
The given `deps` should be in the same format as defined in a regular Mix
project. See `mix help deps` for more information. As a shortcut, an atom
can be given as dependency to mean the latest version. In other words,
specifying `:decimal` is the same as `{:decimal, ">= 0.0.0"}`.
After each successful installation, a given set of dependencies is cached
so starting another VM and calling `Mix.install/2` with the same dependencies
will avoid unnecessary downloads and compilations. The location of the cache
directory can be controlled using the `MIX_INSTALL_DIR` environment variable.
This function can only be called outside of a Mix project and only with the
same dependencies in the given VM.
**Note:** this feature is currently experimental and it may change
in future releases.
## Options
* `:force` - if `true`, runs with empty install cache. This is useful when you want
to update your dependencies or your install got into an inconsistent state.
To use this option, you can also set the `MIX_INSTALL_FORCE` environment variable.
(Default: `false`)
* `:verbose` - if `true`, prints additional debugging information
(Default: `false`)
* `:consolidate_protocols` - if `true`, runs protocol
consolidation via the `mix compile.protocols` task (Default: `true`)
* `:elixir` - if set, ensures the current Elixir version matches the given
version requirement (Default: `nil`)
* `:config` (since v1.13.0) - a keyword list of keyword lists with application
configuration to be set before the apps loaded. The configuration is part of
the `Mix.install/2` cache, so different configurations will lead to different
apps
* `:system_env` (since v1.13.0) - a list or a map of system environment variable
names as binary keys and their respective values as binaries. The system environment
is made part of the `Mix.install/2` cache, so different configurations will lead
to different apps
## Examples
To install `:decimal` and `:jason`:
Mix.install([
:decimal,
{:jason, "~> 1.0"}
])
Using `:nx`, `:exla`, and configure the underlying applications
and environment variables:
Mix.install(
[:nx, :exla],
config: [
nx: [default_backend: EXLA]
],
system_env: [
{"XLA_TARGET", "cuda111"}
]
)
## Limitations
There is one limitation to `Mix.install/2`, which is actually an Elixir
behaviour. If you are installing a dependency that defines a struct or
macro, you cannot use the struct or macro immediately after the install
call. For example, this won't work:
Mix.install([:decimal])
%Decimal{} = Decimal.new(42)
That's because Elixir first expands all structs and all macros, and then
it executes the code. This means that, by the time Elixir tries to expand
the `%Decimal{}` struct, the dependency has not been installed yet.
Luckily this has a straightforward solution, which is move the code to
inside a module:
Mix.install([:decimal])
defmodule Script do
def run do
%Decimal{} = Decimal.new(42)
end
end
Script.run()
The contents inside `defmodule` will only be expanded and executed
after `Mix.install/2` runs, which means that any struct, macros,
and imports will be correctly handled.
"""
@doc since: "1.12.0"
def install(deps, opts \\ [])
def install(deps, opts) when is_list(deps) and is_list(opts) do
Mix.start()
if Mix.Project.get() do
Mix.raise("Mix.install/2 cannot be used inside a Mix project")
end
elixir_requirement = opts[:elixir]
elixir_version = System.version()
if !!elixir_requirement and not Version.match?(elixir_version, elixir_requirement) do
Mix.raise(
"Mix.install/2 declared it supports only Elixir #{elixir_requirement} " <>
"but you're running on Elixir #{elixir_version}"
)
end
deps =
Enum.map(deps, fn
dep when is_atom(dep) ->
{dep, ">= 0.0.0"}
{app, opts} when is_atom(app) and is_list(opts) ->
{app, maybe_expand_path_dep(opts)}
{app, requirement, opts} when is_atom(app) and is_binary(requirement) and is_list(opts) ->
{app, requirement, maybe_expand_path_dep(opts)}
other ->
other
end)
config = Keyword.get(opts, :config, [])
system_env = Keyword.get(opts, :system_env, [])
consolidate_protocols? = Keyword.get(opts, :consolidate_protocols, true)
id =
{deps, config, system_env, consolidate_protocols?}
|> :erlang.term_to_binary()
|> :erlang.md5()
|> Base.encode16(case: :lower)
force? = System.get_env("MIX_INSTALL_FORCE") in ["1", "true"] or !!opts[:force]
case Mix.State.get(:installed) do
nil ->
Application.put_all_env(config, persistent: true)
System.put_env(system_env)
installs_root =
System.get_env("MIX_INSTALL_DIR") || Path.join(Mix.Utils.mix_cache(), "installs")
version = "elixir-#{System.version()}-erts-#{:erlang.system_info(:version)}"
dir = Path.join([installs_root, version, id])
if opts[:verbose] do
Mix.shell().info("Mix.install/2 using #{dir}")
end
if force? do
File.rm_rf!(dir)
end
config = [
version: "0.1.0",
build_embedded: false,
build_per_environment: true,
build_path: "_build",
lockfile: "mix.lock",
deps_path: "deps",
deps: deps,
app: :mix_install,
erlc_paths: ["src"],
elixirc_paths: ["lib"],
compilers: [],
consolidate_protocols: consolidate_protocols?
]
started_apps = Application.started_applications()
:ok = Mix.Local.append_archives()
:ok = Mix.ProjectStack.push(@mix_install_project, config, "nofile")
build_dir = Path.join(dir, "_build")
try do
run_deps? = not File.dir?(build_dir)
File.mkdir_p!(dir)
File.cd!(dir, fn ->
if run_deps? do
Mix.Task.rerun("deps.get")
end
Mix.Task.rerun("deps.loadpaths")
# Hex and SSL can use a good amount of memory after the registry fetching,
# so we stop any app started during deps resolution.
stop_apps(Application.started_applications() -- started_apps)
Mix.Task.rerun("compile")
end)
for app <- Mix.Project.deps_apps() do
Application.ensure_all_started(app)
end
Mix.State.put(:installed, id)
:ok
after
Mix.ProjectStack.pop()
end
^id when not force? ->
:ok
_ ->
Mix.raise("Mix.install/2 can only be called with the same dependencies in the given VM")
end
end
@doc """
Returns whether `Mix.install/2` was called in the current node.
"""
@doc since: "1.13.0"
def installed? do
Mix.State.get(:installed) != nil
end
defp stop_apps([]), do: :ok
defp stop_apps(apps) do
:logger.add_primary_filter(:silence_app_exit, {&silence_app_exit/2, []})
Enum.each(apps, fn {app, _, _} -> Application.stop(app) end)
:logger.remove_primary_filter(:silence_app_exit)
:ok
end
defp silence_app_exit(
%{
msg:
{:report,
%{
label: {:application_controller, :exit},
report: [application: _, exited: :stopped] ++ _
}}
},
_extra
) do
:stop
end
defp silence_app_exit(_message, _extra) do
:ignore
end
defp maybe_expand_path_dep(opts) do
if Keyword.has_key?(opts, :path) do
Keyword.update!(opts, :path, &Path.expand/1)
else
opts
end
end
@doc false
def install?, do: Mix.Project.get() == @mix_install_project
end
| 31.38239 | 148 | 0.664516 |
739937e3a1420f37979490b0803fc0e87e76facf | 433 | ex | Elixir | apps/tai/lib/tai/venue_adapters/huobi/products.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 276 | 2018-01-16T06:36:06.000Z | 2021-03-20T21:48:01.000Z | apps/tai/lib/tai/venue_adapters/huobi/products.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 78 | 2020-10-12T06:21:43.000Z | 2022-03-28T09:02:00.000Z | apps/tai/lib/tai/venue_adapters/huobi/products.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | 43 | 2018-06-09T09:54:51.000Z | 2021-03-07T07:35:17.000Z | defmodule Tai.VenueAdapters.Huobi.Products do
alias ExHuobi.Futures
def products(venue_id) do
with {:ok, future_instruments} <- Futures.Contracts.get() do
future_products =
future_instruments |> Enum.map(&Tai.VenueAdapters.Huobi.Product.build(&1, venue_id))
products = future_products
{:ok, products}
end
end
defdelegate to_symbol(instrument_id), to: Tai.VenueAdapters.Huobi.Product
end
| 25.470588 | 92 | 0.720554 |
739943ede311467a5c86fc33d0034cea90846a2e | 336 | ex | Elixir | lib/glimesh_web/live/homepage_live.ex | wolfcomp/glimesh.tv | 3953e07946aabe85fe90d9d0f36df833b22d262a | [
"MIT"
] | null | null | null | lib/glimesh_web/live/homepage_live.ex | wolfcomp/glimesh.tv | 3953e07946aabe85fe90d9d0f36df833b22d262a | [
"MIT"
] | null | null | null | lib/glimesh_web/live/homepage_live.ex | wolfcomp/glimesh.tv | 3953e07946aabe85fe90d9d0f36df833b22d262a | [
"MIT"
] | null | null | null | defmodule GlimeshWeb.HomepageLive do
use GlimeshWeb, :live_view
@impl true
def mount(_params, session, socket) do
# If the viewer is logged in set their locale, otherwise it defaults to English
if session["locale"], do: Gettext.put_locale(session["locale"])
{:ok, socket |> assign(:page_title, "Glimesh")}
end
end
| 28 | 83 | 0.714286 |
739947df63a9b91d66a3cf8b16456344a0bdde32 | 71 | ex | Elixir | data/web/web/views/layout_view.ex | lydiadwyer/trains_elixir | 16da18d4582307f4967b6cce7320e9aa08a849c3 | [
"Apache-2.0"
] | null | null | null | data/web/web/views/layout_view.ex | lydiadwyer/trains_elixir | 16da18d4582307f4967b6cce7320e9aa08a849c3 | [
"Apache-2.0"
] | null | null | null | data/web/web/views/layout_view.ex | lydiadwyer/trains_elixir | 16da18d4582307f4967b6cce7320e9aa08a849c3 | [
"Apache-2.0"
] | null | null | null | defmodule TrainsElixir.LayoutView do
use TrainsElixir.Web, :view
end
| 17.75 | 36 | 0.816901 |
7399621c7d5056befd8536ddbe63640f22d7467d | 187 | exs | Elixir | chat-api/test/controllers/page_controller_test.exs | antonpetkoff/smolltolk | 64c6d4a52e7624302ad87a072fc108fe4c1382ec | [
"MIT"
] | null | null | null | chat-api/test/controllers/page_controller_test.exs | antonpetkoff/smolltolk | 64c6d4a52e7624302ad87a072fc108fe4c1382ec | [
"MIT"
] | 3 | 2018-01-23T10:53:08.000Z | 2018-01-23T10:58:23.000Z | chat-api/test/controllers/page_controller_test.exs | antonpetkoff/smolltolk | 64c6d4a52e7624302ad87a072fc108fe4c1382ec | [
"MIT"
] | null | null | null | defmodule Chat.PageControllerTest do
use Chat.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 20.777778 | 60 | 0.663102 |
73996287998283132a2a216503f32ea0dfe9f4ae | 785 | ex | Elixir | lib/populate/utils.ex | thusfresh/populate | 2fd05244ff76e678b0608eabd029334f4bbcf99a | [
"BSD-3-Clause"
] | null | null | null | lib/populate/utils.ex | thusfresh/populate | 2fd05244ff76e678b0608eabd029334f4bbcf99a | [
"BSD-3-Clause"
] | null | null | null | lib/populate/utils.ex | thusfresh/populate | 2fd05244ff76e678b0608eabd029334f4bbcf99a | [
"BSD-3-Clause"
] | null | null | null | defmodule Populate.Utils do
@moduledoc """
Utilities for the populate library.
"""
@doc """
Set the random number generators seed.
NB: This is fixed between erlang VM runs -- for different random values,
this must be called.
"""
def reseed(seed \\ :erlang.timestamp) do
:random.seed(seed)
end
@doc """
Create a random string of the provided length, from the list of chars
provided.
NB: for a new sequence of random strings, `random_seed/0` must be called
first
"""
def rando(length \\ 10, range \\ Enum.into(48..122, []), acc \\ [])
def rando(0, _, acc) do
:erlang.list_to_binary(acc)
end
def rando(length, range, acc) do
n = length(range) |> :random.uniform
rando(length - 1, range, [Enum.at(range, n - 1) | acc])
end
end
| 24.53125 | 74 | 0.650955 |
7399dcd7eccdc038b4792f702f8fab6a3f2a43f2 | 801 | ex | Elixir | lib/ex_polygon/rest/market_holidays.ex | aai/ex_polygon | 3bd6d8d0f1fbe4cd3fa939751c5ff00813eeeba3 | [
"MIT"
] | null | null | null | lib/ex_polygon/rest/market_holidays.ex | aai/ex_polygon | 3bd6d8d0f1fbe4cd3fa939751c5ff00813eeeba3 | [
"MIT"
] | null | null | null | lib/ex_polygon/rest/market_holidays.ex | aai/ex_polygon | 3bd6d8d0f1fbe4cd3fa939751c5ff00813eeeba3 | [
"MIT"
] | null | null | null | defmodule ExPolygon.Rest.MarketHolidays do
@moduledoc """
Returns a call to "Market Holidays" Polygon.io
"""
@type holiday :: ExPolygon.MarketHoliday.t()
@type api_key :: ExPolygon.Rest.HTTPClient.api_key()
@type shared_error_reasons :: ExPolygon.Rest.HTTPClient.shared_error_reasons()
@path "/v1/marketstatus/upcoming"
@spec query(api_key) :: {:ok, holiday} | {:error, shared_error_reasons}
def query(api_key) do
with {:ok, data} <- ExPolygon.Rest.HTTPClient.get(@path, %{}, api_key) do
parse_response(data)
end
end
defp parse_response(data) do
holidays =
data
|> Enum.map(
&Mapail.map_to_struct(&1, ExPolygon.MarketHoliday, transformations: [:snake_case])
)
|> Enum.map(fn {:ok, t} -> t end)
{:ok, holidays}
end
end
| 26.7 | 90 | 0.666667 |
7399f30e5076521ef5e346a6f9302661a91e7b93 | 5,608 | ex | Elixir | lib/plug_rails_cookie_session_store.ex | RudolfMan/plug_rails_cookie_session_store | 4c1e7b7f430cf90cbde6269feb45f9c2dde67f63 | [
"MIT"
] | 97 | 2015-01-12T11:37:25.000Z | 2021-12-28T06:20:18.000Z | lib/plug_rails_cookie_session_store.ex | RudolfMan/plug_rails_cookie_session_store | 4c1e7b7f430cf90cbde6269feb45f9c2dde67f63 | [
"MIT"
] | 18 | 2015-01-10T05:45:57.000Z | 2022-02-03T09:54:21.000Z | lib/plug_rails_cookie_session_store.ex | RudolfMan/plug_rails_cookie_session_store | 4c1e7b7f430cf90cbde6269feb45f9c2dde67f63 | [
"MIT"
] | 27 | 2015-01-10T00:15:07.000Z | 2021-06-29T15:22:41.000Z | defmodule PlugRailsCookieSessionStore do
@moduledoc """
Stores the session in a cookie.
This cookie store is based on `Plug.Crypto.MessageVerifier`
and `Plug.Crypto.Message.Encryptor` which encrypts and signs
each cookie to ensure they can't be read nor tampered with.
Since this store uses crypto features, it requires you to
set the `:secret_key_base` field in your connection. This
can be easily achieved with a plug:
plug :put_secret_key_base
def put_secret_key_base(conn, _) do
put_in conn.secret_key_base, "-- LONG STRING WITH AT LEAST 64 BYTES --"
end
## Options
* `:encrypt` - specify whether to encrypt cookies, defaults to true.
When this option is false, the cookie is still signed, meaning it
can't be tempered with but its contents can be read;
* `:encryption_salt` - a salt used with `conn.secret_key_base` to generate
a key for encrypting/decrypting a cookie;
* `:signing_salt` - a salt used with `conn.secret_key_base` to generate a
key for signing/verifying a cookie;
* `:key_iterations` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to 1000;
* `:key_length` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to 32;
* `:key_digest` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to `:sha256`;
* `:serializer` - cookie serializer module that defines `encode/1` and
`decode/1` returning an `{:ok, value}` tuple. Defaults to
`:external_term_format`.
## Examples
# Use the session plug with the table name
plug Plug.Session, store: PlugRailsCookieSessionStore,
key: "_my_app_session",
encryption_salt: "cookie store encryption salt",
signing_salt: "cookie store signing salt",
key_length: 64,
serializer: Poison
"""
@behaviour Plug.Session.Store
alias Plug.Crypto.KeyGenerator
alias PlugRailsCookieSessionStore.MessageVerifier
alias PlugRailsCookieSessionStore.MessageEncryptor
def init(opts) do
encryption_salt = check_encryption_salt(opts)
signing_salt = check_signing_salt(opts)
iterations = Keyword.get(opts, :key_iterations, 1000)
length = Keyword.get(opts, :key_length, 32)
digest = Keyword.get(opts, :key_digest, :sha256)
key_opts = [iterations: iterations, length: length, digest: digest, cache: Plug.Keys]
serializer = check_serializer(opts[:serializer] || :external_term_format)
%{
encryption_salt: encryption_salt,
signing_salt: signing_salt,
key_opts: key_opts,
serializer: serializer
}
end
def get(conn, cookie, opts) do
key_opts = opts.key_opts
cookie = cookie |> URI.decode_www_form()
if key = opts.encryption_salt do
MessageEncryptor.verify_and_decrypt(
cookie,
derive(conn, key, key_opts),
derive(conn, opts.signing_salt, key_opts)
)
else
MessageVerifier.verify(cookie, derive(conn, opts.signing_salt, key_opts))
end
|> decode(opts.serializer)
end
def put(conn, _sid, term, opts) do
binary = encode(term, opts.serializer)
key_opts = opts.key_opts
if key = opts.encryption_salt do
MessageEncryptor.encrypt_and_sign(
binary,
derive(conn, key, key_opts),
derive(conn, opts.signing_salt, key_opts)
)
else
MessageVerifier.sign(binary, derive(conn, opts.signing_salt, key_opts))
end
|> URI.encode_www_form()
end
def delete(_conn, _sid, _opts) do
:ok
end
defp encode(term, :external_term_format), do: :erlang.term_to_binary(term)
defp encode(term, serializer) do
case serializer.encode(term) do
{:ok, binary} -> binary
_ -> nil
end
end
defp decode({:ok, binary}, :external_term_format), do: {nil, :erlang.binary_to_term(binary)}
defp decode({:ok, binary}, serializer) do
case serializer.decode(binary) do
{:ok, term} -> {nil, term}
_ -> {nil, %{}}
end
end
defp decode(:error, _serializer), do: {nil, %{}}
defp derive(conn, key, key_opts) do
conn.secret_key_base
|> validate_secret_key_base()
|> generate_key(key, key_opts)
end
defp generate_key(secret, nil, _), do: secret
defp generate_key(secret, key, key_opts), do: KeyGenerator.generate(secret, key, key_opts)
defp validate_secret_key_base(nil),
do: raise(ArgumentError, "cookie store expects conn.secret_key_base to be set")
defp validate_secret_key_base(secret_key_base) when byte_size(secret_key_base) < 32,
do: raise(ArgumentError, "cookie store expects conn.secret_key_base to be at least 32 bytes")
defp validate_secret_key_base(secret_key_base), do: secret_key_base
defp check_signing_salt(opts) do
if Keyword.get(opts, :signing_with_salt, true) do
case opts[:signing_salt] do
nil -> raise ArgumentError, "cookie store expects :signing_salt as option"
salt -> salt
end
end
end
defp check_encryption_salt(opts) do
if Keyword.get(opts, :encrypt, true) do
case opts[:encryption_salt] do
nil -> raise ArgumentError, "encrypted cookie store expects :encryption_salt as option"
salt -> salt
end
end
end
defp check_serializer(serializer) when is_atom(serializer), do: serializer
defp check_serializer(_),
do: raise(ArgumentError, "cookie store expects :serializer option to be a module")
end
| 31.863636 | 97 | 0.689907 |
7399f7c4277ca122c4d00fdba749b96b7ce37657 | 96 | ex | Elixir | lib/ch/repo.ex | ckampfe/ch | 88d1ab3f7b8cf4be63913348610ba78a3e9e2e54 | [
"BSD-3-Clause"
] | null | null | null | lib/ch/repo.ex | ckampfe/ch | 88d1ab3f7b8cf4be63913348610ba78a3e9e2e54 | [
"BSD-3-Clause"
] | null | null | null | lib/ch/repo.ex | ckampfe/ch | 88d1ab3f7b8cf4be63913348610ba78a3e9e2e54 | [
"BSD-3-Clause"
] | null | null | null | defmodule Ch.Repo do
use Ecto.Repo,
otp_app: :ch,
adapter: Ecto.Adapters.Postgres
end
| 16 | 35 | 0.697917 |
7399f9296eb23613fb79249d1bd419d1e8abb76f | 2,081 | ex | Elixir | lib/probability_weighting_function/participant.ex | xeejp/xee_probability_weighting_function | dc58d41e5921719c9f3410978923eb7b1c405ea6 | [
"MIT"
] | null | null | null | lib/probability_weighting_function/participant.ex | xeejp/xee_probability_weighting_function | dc58d41e5921719c9f3410978923eb7b1c405ea6 | [
"MIT"
] | null | null | null | lib/probability_weighting_function/participant.ex | xeejp/xee_probability_weighting_function | dc58d41e5921719c9f3410978923eb7b1c405ea6 | [
"MIT"
] | null | null | null | defmodule ProbabilityWeightingFunction.Participant do
alias ProbabilityWeightingFunction.Actions
require Logger
# Actions
def fetch_contents(data, id) do
Actions.update_participant_contents(data, id)
end
def set_question(data,id,question) do
data = data
|>put_in([:participants,id,:question],question)
|>put_in([:participants,id,:state],1)
Actions.set_question(data,id,question)
end
def next(data,id,add) do
slideIndex = get_in(data,[:participants,id,:slideIndex])
slideIndex = slideIndex + 1
n = to_string(add["type"])
if n != "6" do
nextadd = add["add"]
data = data |> put_in([:participants, id, :plus], add["plus"])
if data.participants[id].befor[n] != -1 do
if add["choice"] != data.participants[id].befor[n] do
data = data |> put_in([:participants, id, :down, n], true)
end
if data.participants[id].down[n], do: data = data |> put_in([:participants, id, :plus, n], div(data.participants[id].plus[n], 2))
nextadd = nextadd |> Map.put(n, add["add"][n] + data.participants[id].plus[n])
else
nextadd = nextadd |> Map.put(n, add["add"][n] + data.participants[id].plus[n])
end
data = data |> put_in([:participants, id, :add], nextadd)
|> put_in([:participants, id, :befor, n], add["choice"])
end
data = data
|>put_in([:participants,id,:slideIndex],slideIndex)
Actions.next(data,id,slideIndex)
end
def finish(data,id) do
data = data
|>put_in([:participants,id,:state],2)
Actions.finish(data,id)
end
# Utilities
def format_participant(participant), do: participant
def format_data(data) do
%{
page: data.page,
money: data.money,
unit: data.unit,
anses: data.anses,
joined: Map.size(data.participants)
}
end
def format_contents(data, id) do
%{participants: participants} = data
participant = Map.get(participants, id)
format_participant(participant)
|> Map.merge(format_data(data))
end
end
| 30.602941 | 137 | 0.626141 |
739a7669c9c152a2c7e935aa20df6913a9a066f8 | 211 | ex | Elixir | lib/kubex/replicate_set.ex | IanLuites/kubex | ef26f539a130cc416fc605e66623e715e35521df | [
"MIT"
] | 1 | 2020-01-12T03:41:14.000Z | 2020-01-12T03:41:14.000Z | lib/kubex/replicate_set.ex | IanLuites/kubex | ef26f539a130cc416fc605e66623e715e35521df | [
"MIT"
] | null | null | null | lib/kubex/replicate_set.ex | IanLuites/kubex | ef26f539a130cc416fc605e66623e715e35521df | [
"MIT"
] | null | null | null | defmodule Kubex.ReplicaSet do
@doc false
def __resource__ do
"replicasets"
end
@doc false
def __default__ do
%{
kind: __MODULE__,
apiVersion: :"extensions/v1beta1"
}
end
end
| 14.066667 | 39 | 0.64455 |
739a79c7911bd84e4d6ec8cd623d51c796c27bb8 | 280 | exs | Elixir | test/modal_example_web/views/layout_view_test.exs | pthompson/liveview_tailwind_modal | 5e5d173cdc8e4710ab98ce0a925822f4c72cd3ea | [
"MIT"
] | 25 | 2020-07-29T04:21:10.000Z | 2022-02-07T16:06:51.000Z | test/modal_example_web/views/layout_view_test.exs | pthompson/liveview_tailwind_modal | 5e5d173cdc8e4710ab98ce0a925822f4c72cd3ea | [
"MIT"
] | null | null | null | test/modal_example_web/views/layout_view_test.exs | pthompson/liveview_tailwind_modal | 5e5d173cdc8e4710ab98ce0a925822f4c72cd3ea | [
"MIT"
] | 8 | 2020-08-19T13:06:39.000Z | 2021-08-11T17:18:34.000Z | defmodule ModalExampleWeb.LayoutViewTest do
use ModalExampleWeb.ConnCase, async: true
# When testing helpers, you may want to import Phoenix.HTML and
# use functions such as safe_to_string() to convert the helper
# result into an HTML string.
# import Phoenix.HTML
end
| 31.111111 | 65 | 0.775 |
739a876b6473523868abb4061ad333474f2cc401 | 250 | ex | Elixir | lib/ex_binance/responses/create_order.ex | arturictus/ex_binance | 9d1f51070a452e5e763a1d148d1151dddedd1956 | [
"MIT"
] | null | null | null | lib/ex_binance/responses/create_order.ex | arturictus/ex_binance | 9d1f51070a452e5e763a1d148d1151dddedd1956 | [
"MIT"
] | null | null | null | lib/ex_binance/responses/create_order.ex | arturictus/ex_binance | 9d1f51070a452e5e763a1d148d1151dddedd1956 | [
"MIT"
] | null | null | null | defmodule ExBinance.Responses.CreateOrder do
defstruct ~w(
client_order_id
executed_qty
order_id
orig_qty
price
side
status
symbol
time_in_force
transact_time
type
fills
)a
use ExConstructor
end
| 13.157895 | 44 | 0.676 |
739b01ebb812ea5b4560e3a4238cc00d5d464a82 | 1,383 | ex | Elixir | lib/samly.ex | bundacia/samly | 9e742d473f7dcc3b183b519695e2912891212a5b | [
"MIT"
] | null | null | null | lib/samly.ex | bundacia/samly | 9e742d473f7dcc3b183b519695e2912891212a5b | [
"MIT"
] | null | null | null | lib/samly.ex | bundacia/samly | 9e742d473f7dcc3b183b519695e2912891212a5b | [
"MIT"
] | null | null | null | defmodule Samly do
@moduledoc """
Elixir library used to enable SAML SP SSO to a Phoenix/Plug based application.
"""
alias Plug.Conn
alias Samly.{Assertion, State}
@doc """
Returns authenticated user SAML Assertion.
The struct includes the attributes sent from IdP as well as any corresponding locally
computed/derived attributes. Returns `nil` if the current Plug session
is not authenticated.
## Parameters
- conn: Plug connection
"""
@spec get_active_assertion(Conn.t()) :: Assertion.t()
def get_active_assertion(conn) do
nameid = conn |> Conn.get_session("samly_nameid")
case State.get_by_nameid(nameid) do
{^nameid, saml_assertion} -> saml_assertion
_ -> nil
end
end
@doc """
Returns value of the specified attribute name in the given SAML Assertion.
Checks for the attribute in `computed` map first and `attributes` map next.
Returns `nil` if not present in either.
## Parameters
- assertion: SAML assertion obtained by calling `get_active_assertion/1`
- name: Attribute name
"""
@spec get_attribute(nil | Assertion.t(), String.t()) :: nil | String.t()
def get_attribute(nil, _name), do: nil
def get_attribute(%Assertion{} = assertion, name) do
computed = assertion.computed
attributes = assertion.attributes
Map.get(computed, name) || Map.get(attributes, name)
end
end
| 27.66 | 87 | 0.705712 |
739b12f6488c43ef28eed264effda5b008e81a4b | 1,276 | ex | Elixir | lib/money/ecto/currency_type.ex | stefanluptak/money | 51b4b403787f76e21b12fc162270113a6cfe6a08 | [
"MIT"
] | 350 | 2019-02-21T02:25:03.000Z | 2022-03-10T02:35:50.000Z | lib/money/ecto/currency_type.ex | stefanluptak/money | 51b4b403787f76e21b12fc162270113a6cfe6a08 | [
"MIT"
] | 68 | 2019-02-21T07:19:17.000Z | 2022-03-29T03:13:12.000Z | lib/money/ecto/currency_type.ex | stefanluptak/money | 51b4b403787f76e21b12fc162270113a6cfe6a08 | [
"MIT"
] | 70 | 2015-08-28T16:45:57.000Z | 2019-02-06T01:11:11.000Z | if Code.ensure_loaded?(Ecto.Type) do
defmodule Money.Ecto.Currency.Type do
@moduledoc """
Provides a type for Ecto to store a currency.
The underlying data type is a string.
## Migration
create table(:my_table) do
add :currency, :varchar, size: 3
end
## Schema
schema "my_table" do
field :currency, Money.Ecto.Currency.Type
end
"""
alias Money.Currency
if macro_exported?(Ecto.Type, :__using__, 1) do
use Ecto.Type
else
@behaviour Ecto.Type
end
@spec type :: :string
def type, do: :string
@spec cast(Money.t() | String.t()) :: {:ok, atom()}
def cast(val)
def cast(%Money{currency: currency}), do: {:ok, currency}
def cast(str) when is_binary(str) do
{:ok, Currency.to_atom(str)}
rescue
_ -> :error
end
def cast(atom) when is_atom(atom) do
if Currency.exists?(atom), do: {:ok, atom}, else: :error
end
def cast(_), do: :error
@spec load(String.t()) :: {:ok, atom()}
def load(str) when is_binary(str), do: {:ok, Currency.to_atom(str)}
@spec dump(atom()) :: {:ok, String.t()}
def dump(atom) when is_atom(atom), do: {:ok, Atom.to_string(atom)}
def dump(_), do: :error
end
end
| 22.385965 | 71 | 0.587774 |
739b15965c5eff0cbb56ef173734eb8e5854e6bd | 456 | ex | Elixir | farmbot_os/platform/host/system_tasks.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | 1 | 2021-08-23T13:36:14.000Z | 2021-08-23T13:36:14.000Z | farmbot_os/platform/host/system_tasks.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | farmbot_os/platform/host/system_tasks.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | defmodule FarmbotOS.Platform.Host.SystemTasks do
@moduledoc "Host implementation for Farmbot.System."
@behaviour FarmbotOS.System
def reboot() do
Application.stop(:farmbot)
Application.stop(:farmbot_ext)
Application.stop(:farmbot_core)
Application.ensure_all_started(:farmbot_core)
Application.ensure_all_started(:farmbot_ext)
Application.ensure_all_started(:farmbot)
end
def shutdown() do
System.halt()
end
end
| 22.8 | 54 | 0.756579 |
739b191030612a1f62f7e5d1bb3a259b8c53f9ff | 4,346 | ex | Elixir | lib/discovery_api/data/model.ex | folkengine/discovery_api | 0bb7a0aff4603a4f7532cc65ca8c8dbb37a57217 | [
"Apache-2.0"
] | null | null | null | lib/discovery_api/data/model.ex | folkengine/discovery_api | 0bb7a0aff4603a4f7532cc65ca8c8dbb37a57217 | [
"Apache-2.0"
] | null | null | null | lib/discovery_api/data/model.ex | folkengine/discovery_api | 0bb7a0aff4603a4f7532cc65ca8c8dbb37a57217 | [
"Apache-2.0"
] | null | null | null | defmodule DiscoveryApi.Data.Model do
@moduledoc """
utilities to persist and load discovery data models
"""
alias DiscoveryApi.Data.Persistence
@behaviour Access
defstruct [
:accessLevel,
:categories,
:completeness,
:conformsToUri,
:contactEmail,
:contactName,
:describedByMimeType,
:describedByUrl,
:description,
:downloads,
:fileTypes,
:homepage,
:id,
:issuedDate,
:keywords,
:language,
:lastUpdatedDate,
:license,
:modifiedDate,
:name,
:organization,
:organizationDetails,
:parentDataset,
:private,
:publishFrequency,
:queries,
:referenceUrls,
:rights,
:schema,
:sourceFormat,
:sourceType,
:sourceUrl,
:spatial,
:systemName,
:temporal,
:title
]
@model_name_space "discovery-api:model:"
def get(id) do
(@model_name_space <> id)
|> Persistence.get()
|> struct_from_json()
|> add_system_attributes()
end
def get_all() do
get_all_models()
|> add_system_attributes()
end
def get_all(ids) do
get_models(ids)
|> add_system_attributes()
end
def get_completeness({id, completeness}) do
processed_completeness =
case completeness do
nil -> nil
score -> score |> Jason.decode!() |> Map.get("completeness", nil)
end
{id, processed_completeness}
end
def get_last_updated_date(id) do
("forklift:last_insert_date:" <> id)
|> Persistence.get()
end
def save(%__MODULE__{} = model) do
model_to_save =
model
|> default_nil_field_to(:keywords, [])
|> Map.from_struct()
Persistence.persist(@model_name_space <> model.id, model_to_save)
end
# sobelow_skip ["DOS.StringToAtom"]
def get_count_maps(dataset_id) do
case Persistence.get_keys("smart_registry:*:count:" <> dataset_id) do
[] ->
%{}
all_keys ->
friendly_keys = Enum.map(all_keys, fn key -> String.to_atom(Enum.at(String.split(key, ":"), 1)) end)
all_values = Persistence.get_many(all_keys)
Enum.into(0..(Enum.count(friendly_keys) - 1), %{}, fn friendly_key ->
{Enum.at(friendly_keys, friendly_key), Enum.at(all_values, friendly_key)}
end)
end
end
@impl Access
def fetch(term, key), do: Map.fetch(term, key)
@impl Access
def get_and_update(data, key, func) do
Map.get_and_update(data, key, func)
end
@impl Access
def pop(data, key), do: Map.pop(data, key)
defp get_all_models() do
(@model_name_space <> "*")
|> Persistence.get_all()
|> Enum.map(&struct_from_json/1)
end
defp get_models(ids) do
ids
|> Enum.map(&(@model_name_space <> &1))
|> Persistence.get_many(true)
|> Enum.map(&struct_from_json/1)
end
defp add_system_attributes(nil), do: nil
defp add_system_attributes(model) when is_map(model) do
model
|> List.wrap()
|> add_system_attributes()
|> List.first()
end
defp add_system_attributes(models) do
redis_kv_results =
Enum.map(models, &Map.get(&1, :id))
|> get_all_keys()
|> Persistence.get_many_with_keys()
new_models =
models
|> Enum.map(fn model ->
completeness = redis_kv_results["discovery-api:stats:#{model.id}"]
downloads = redis_kv_results["smart_registry:downloads:count:#{model.id}"]
queries = redis_kv_results["smart_registry:queries:count:#{model.id}"]
last_updated_date = redis_kv_results["forklift:last_insert_date:#{model.id}"]
model
|> Map.put(:completeness, completeness)
|> Map.put(:downloads, downloads)
|> Map.put(:queries, queries)
|> Map.put(:lastUpdatedDate, last_updated_date)
end)
end
defp get_all_keys(ids) do
ids
|> Enum.map(fn id ->
[
"forklift:last_insert_date:#{id}",
"smart_registry:downloads:count:#{id}",
"smart_registry:queries:count:#{id}",
"discovery-api:stats:#{id}"
]
end)
|> List.flatten()
end
defp struct_from_json(nil), do: nil
defp struct_from_json(json) do
map = Jason.decode!(json, keys: :atoms)
struct(__MODULE__, map)
end
defp default_nil_field_to(model, field, default) do
case Map.get(model, field) do
nil -> Map.put(model, field, default)
_ -> model
end
end
end
| 22.994709 | 108 | 0.630925 |
739b264158318770650b2ba45e9e2de59d6a4cc7 | 1,674 | exs | Elixir | lib/mix/test/mix/tasks/help_test.exs | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/help_test.exs | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/help_test.exs | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | 1 | 2020-12-07T08:04:16.000Z | 2020-12-07T08:04:16.000Z | Code.require_file "../../test_helper.exs", __DIR__
defmodule Mix.Tasks.HelpTest do
use MixTest.Case
import ExUnit.CaptureIO
test "help lists all tasks" do
in_fixture "no_mixfile", fn ->
Mix.Tasks.Help.run []
assert_received {:mix_shell, :info, ["mix" <> _]}
assert_received {:mix_shell, :info, ["mix help" <> _]}
assert_received {:mix_shell, :info, ["mix compile" <> _]}
end
end
test "help list default task" do
in_fixture "no_mixfile", fn ->
Mix.Tasks.Help.run []
{_, _, [output]} =
assert_received {:mix_shell, :info, [_]}
assert output =~ ~r/^mix\s+# Run the default task \(current: mix run\)/m
end
end
defmodule Aliases do
def project do
[aliases: [h: "hello", c: "compile"]]
end
end
test "help --names" do
Mix.Project.push Aliases
in_fixture "no_mixfile", fn ->
Mix.Tasks.Help.run ["--names"]
assert_received {:mix_shell, :info, ["c"]}
assert_received {:mix_shell, :info, ["compile"]}
assert_received {:mix_shell, :info, ["h"]}
assert_received {:mix_shell, :info, ["help"]}
assert_received {:mix_shell, :info, ["escript.build"]}
end
end
test "help TASK" do
in_fixture "no_mixfile", fn ->
output =
capture_io fn ->
Mix.Tasks.Help.run ["compile"]
end
assert output =~ "# mix compile"
assert output =~ "## Command line options"
assert output =~ ~r/^Location:/m
end
end
test "bad arguments" do
assert_raise Mix.Error, "Unexpected arguments, expected `mix help` or `mix help TASK`", fn ->
Mix.Tasks.Help.run ["foo", "bar"]
end
end
end
| 25.753846 | 97 | 0.601553 |
739b302ec341be3c6296f10c26518677169eff15 | 1,271 | ex | Elixir | lib/api_phoenix_jwt_crud_web/endpoint.ex | melardev/Elixir_Phoenix_Ecto_AuthJwt_Crud | 3c90eafe889b0414ed0f027d7d42bf401682f739 | [
"MIT"
] | null | null | null | lib/api_phoenix_jwt_crud_web/endpoint.ex | melardev/Elixir_Phoenix_Ecto_AuthJwt_Crud | 3c90eafe889b0414ed0f027d7d42bf401682f739 | [
"MIT"
] | null | null | null | lib/api_phoenix_jwt_crud_web/endpoint.ex | melardev/Elixir_Phoenix_Ecto_AuthJwt_Crud | 3c90eafe889b0414ed0f027d7d42bf401682f739 | [
"MIT"
] | null | null | null | defmodule ApiPhoenixJwtCrudWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :api_phoenix_jwt_crud
socket "/socket", ApiPhoenixJwtCrudWeb.UserSocket,
websocket: true,
longpoll: false
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :api_phoenix_jwt_crud,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_api_phoenix_jwt_crud_key",
signing_salt: "tmxCqglc"
plug ApiPhoenixJwtCrudWeb.Router
end
| 28.244444 | 63 | 0.720692 |
739b49a25dc0359d766f37fba75e0e1f37f22d78 | 157 | exs | Elixir | .formatter.exs | Ovyerus/brewing-stand | a631d5e90485b959525e3a79b941f7aaa86b3fdd | [
"MIT"
] | 14 | 2021-03-28T10:27:50.000Z | 2021-04-27T10:14:07.000Z | .formatter.exs | Ovyerus/brewing-stand | a631d5e90485b959525e3a79b941f7aaa86b3fdd | [
"MIT"
] | null | null | null | .formatter.exs | Ovyerus/brewing-stand | a631d5e90485b959525e3a79b941f7aaa86b3fdd | [
"MIT"
] | null | null | null | # Used by "mix format"
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}", "priv/**/*.{ex,exs"],
locals_without_parens: [defenum: 1]
]
| 26.166667 | 91 | 0.585987 |
739b4ff0ab5e911b0167fd5749e74b084367f276 | 3,328 | exs | Elixir | mix.exs | ooodigi/nerves_influxdb2_x86_64 | 606a29ceee95eed83468eb003766a97c58f5a4d1 | [
"Apache-2.0"
] | null | null | null | mix.exs | ooodigi/nerves_influxdb2_x86_64 | 606a29ceee95eed83468eb003766a97c58f5a4d1 | [
"Apache-2.0"
] | null | null | null | mix.exs | ooodigi/nerves_influxdb2_x86_64 | 606a29ceee95eed83468eb003766a97c58f5a4d1 | [
"Apache-2.0"
] | null | null | null | defmodule NervesInfluxDB2X8664.MixProject do
use Mix.Project
@github_organization "ooodigi"
@app :nerves_influxdb2_x86_64
@source_url "https://github.com/#{@github_organization}/#{@app}"
@version Path.join(__DIR__, "VERSION")
|> File.read!()
|> String.trim()
def project do
[
app: @app,
version: @version,
elixir: "~> 1.6",
compilers: Mix.compilers() ++ [:nerves_package],
nerves_package: nerves_package(),
description: description(),
package: package(),
deps: deps(),
aliases: [loadconfig: [&bootstrap/1]],
docs: docs(),
preferred_cli_env: %{
docs: :docs,
"hex.build": :docs,
"hex.publish": :docs
}
]
end
def application do
[extra_applications: [:eex]]
end
defp bootstrap(args) do
set_target()
Application.start(:nerves_bootstrap)
Mix.Task.run("loadconfig", args)
end
defp nerves_package do
[
type: :system,
artifact_sites: [
{:github_releases, "#{@github_organization}/#{@app}"}
],
build_runner_opts: build_runner_opts(),
platform: Nerves.System.BR,
platform_config: [
defconfig: "nerves_defconfig"
],
# The :env key is an optional experimental feature for adding environment
# variables to the crosscompile environment. These are intended for
# llvm-based tooling that may need more precise processor information.
env: [
{"TARGET_ARCH", "x86_64"},
{"TARGET_OS", "linux"},
{"TARGET_ABI", "musl"}
],
checksum: package_files()
]
end
defp deps do
[
{:nerves, "~> 1.5.4 or ~> 1.6.0 or ~> 1.7.4", runtime: false},
{:nerves_system_br, "1.17.3", runtime: false},
{:nerves_toolchain_x86_64_nerves_linux_musl, "~> 1.4.3", runtime: false},
{:nerves_system_linter, "~> 0.4", only: [:dev, :test], runtime: false},
{:ex_doc, "~> 0.22", only: :docs, runtime: false}
]
end
defp description do
"""
Nerves System - x86_64
"""
end
defp docs do
[
extras: ["README.md", "CHANGELOG.md"],
main: "readme",
source_ref: "v#{@version}",
source_url: @source_url,
skip_undefined_reference_warnings_on: ["CHANGELOG.md"]
]
end
defp package do
[
files: package_files(),
licenses: ["Apache 2.0"],
links: %{"GitHub" => @source_url}
]
end
defp package_files do
[
"fwup_include",
"lib",
"priv",
"rootfs_overlay",
"CHANGELOG.md",
"fwup-revert.conf",
"fwup.conf",
"grub.cfg",
"LICENSE",
"linux-5.4.defconfig",
"mix.exs",
"nerves_defconfig",
"post-build.sh",
"post-createfs.sh",
"README.md",
"VERSION"
]
end
defp build_runner_opts() do
# Download source files first to get download errors right away.
[make_args: primary_site() ++ ["source", "all", "legal-info"]]
end
defp primary_site() do
case System.get_env("BR2_PRIMARY_SITE") do
nil -> []
primary_site -> ["BR2_PRIMARY_SITE=#{primary_site}"]
end
end
defp set_target() do
if function_exported?(Mix, :target, 1) do
apply(Mix, :target, [:target])
else
System.put_env("MIX_TARGET", "target")
end
end
end
| 23.942446 | 79 | 0.583534 |
739b598dae660f5a541de1586c4a85568d513e2d | 1,394 | exs | Elixir | test/chaos_spawn/chaotic/worker_test.exs | meadsteve/chaos-spawn | ca983ad96cb1e8541474e726f727d20636828c24 | [
"MIT"
] | 90 | 2015-09-29T16:40:17.000Z | 2022-02-20T18:45:54.000Z | test/chaos_spawn/chaotic/worker_test.exs | meadsteve/chaos-spawn | ca983ad96cb1e8541474e726f727d20636828c24 | [
"MIT"
] | 17 | 2015-09-29T15:37:43.000Z | 2015-12-09T08:46:41.000Z | test/chaos_spawn/chaotic/worker_test.exs | meadsteve/chaos-spawn | ca983ad96cb1e8541474e726f727d20636828c24 | [
"MIT"
] | 6 | 2015-12-03T16:03:35.000Z | 2018-06-28T00:54:08.000Z | defmodule Chaotic.WorkerTest do
use ExUnit.Case
alias ChaosSpawn.Chaotic.ChaoticWorker
alias ChaosSpawn.Chaotic.Supervisor.Wrapper
test "worker/2 wraps up a call to Supervisor.Spec worker" do
args = [:arg_one, :arg_two]
expected = {
ModuleToCall,
{Wrapper, :start_link_wrapper, [ModuleToCall, :start_link, args]},
:permanent,
5000,
:worker,
[ModuleToCall]
}
assert ChaoticWorker.worker(ModuleToCall, args) == expected
end
test "worker/2 takes :function as an option" do
args = [:arg_one, :arg_two]
my_start_func = :special_start_link
expected = {
ModuleToCall,
{Wrapper, :start_link_wrapper, [ModuleToCall, my_start_func, args]},
:permanent,
5000,
:worker,
[ModuleToCall]
}
worker = ChaoticWorker.worker(ModuleToCall, args, function: my_start_func)
assert worker == expected
end
test "worker/3 sets :id, :restart, :shutdown, :modules" do
args = [:arg_one, :arg_two]
expected = {
:special_id,
{Wrapper, :start_link_wrapper, [ModuleToCall, :start_link, args]},
:transient,
:brutal_kill,
:worker,
:dynamic
}
worker = ChaoticWorker.worker(ModuleToCall, args,
id: :special_id,
restart: :transient,
shutdown: :brutal_kill,
modules: :dynamic
)
assert worker == expected
end
end
| 23.627119 | 78 | 0.644189 |
739b625480ad2edf12c2a9a1169851dbadf795d2 | 2,407 | ex | Elixir | clients/testing/lib/google_api/testing/v1/model/regular_file.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/testing/lib/google_api/testing/v1/model/regular_file.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/testing/lib/google_api/testing/v1/model/regular_file.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Testing.V1.Model.RegularFile do
@moduledoc """
A file or directory to install on the device before the test starts.
## Attributes
* `content` (*type:* `GoogleApi.Testing.V1.Model.FileReference.t`, *default:* `nil`) - Required. The source file.
* `devicePath` (*type:* `String.t`, *default:* `nil`) - Required. Where to put the content on the device. Must be an absolute, allowlisted path. If the file exists, it will be replaced. The following device-side directories and any of their subdirectories are allowlisted: ${EXTERNAL_STORAGE}, /sdcard, or /storage ${ANDROID_DATA}/local/tmp, or /data/local/tmp Specifying a path outside of these directory trees is invalid. The paths /sdcard and /data will be made available and treated as implicit path substitutions. E.g. if /sdcard on a particular device does not map to external storage, the system will replace it with the external storage path prefix for that device and copy the file there. It is strongly advised to use the Environment API in app and test code to access files on the device in a portable way.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:content => GoogleApi.Testing.V1.Model.FileReference.t() | nil,
:devicePath => String.t() | nil
}
field(:content, as: GoogleApi.Testing.V1.Model.FileReference)
field(:devicePath)
end
defimpl Poison.Decoder, for: GoogleApi.Testing.V1.Model.RegularFile do
def decode(value, options) do
GoogleApi.Testing.V1.Model.RegularFile.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Testing.V1.Model.RegularFile do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 48.14 | 821 | 0.746572 |
739b632b00cfceb1cc3f445e9cfb6c4d53061674 | 1,606 | ex | Elixir | apps/admin_app/test/support/conn_case.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 456 | 2018-09-20T02:40:59.000Z | 2022-03-07T08:53:48.000Z | apps/admin_app/test/support/conn_case.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 273 | 2018-09-19T06:43:43.000Z | 2021-08-07T12:58:26.000Z | apps/admin_app/test/support/conn_case.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 122 | 2018-09-26T16:32:46.000Z | 2022-03-13T11:44:19.000Z | defmodule AdminAppWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
alias Phoenix.ConnTest
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
import AdminAppWeb.Router.Helpers
# The default endpoint for testing
@endpoint AdminAppWeb.Endpoint
@default_opts [
store: :cookie,
key: "secretkey",
encryption_salt: "encrypted cookie salt",
signing_salt: "signing salt"
]
@signing_opts Plug.Session.init(Keyword.put(@default_opts, :encrypt, false))
def signin_guardian(conn, user) do
conn =
conn
|> Plug.Session.call(@signing_opts)
|> Plug.Conn.fetch_session()
|> AdminAppWeb.Guardian.Plug.sign_in(user)
|> Guardian.Plug.VerifySession.call([])
end
end
end
setup tags do
:ok = Sandbox.checkout(Snitch.Repo)
unless tags[:async] do
Sandbox.mode(Snitch.Repo, {:shared, self()})
end
:ok
end
setup _tags do
{:ok, conn: ConnTest.build_conn()}
end
end
| 26.327869 | 82 | 0.671233 |
739b6ccbea8a6419bc73138165e5838b33ef3b2e | 1,357 | ex | Elixir | lib/maru/params/types/atom.ex | elixir-maru/maru_params | 4bc1d05008e881136aff87667791ed4da1c12bd4 | [
"WTFPL"
] | 4 | 2021-12-29T06:45:02.000Z | 2022-02-10T12:48:57.000Z | lib/maru/params/types/atom.ex | elixir-maru/maru_params | 4bc1d05008e881136aff87667791ed4da1c12bd4 | [
"WTFPL"
] | null | null | null | lib/maru/params/types/atom.ex | elixir-maru/maru_params | 4bc1d05008e881136aff87667791ed4da1c12bd4 | [
"WTFPL"
] | 1 | 2021-12-29T06:45:03.000Z | 2021-12-29T06:45:03.000Z | defmodule Maru.Params.Types.Atom do
@moduledoc """
Buildin Type: Atom
## Validator Arguments
* `:ecto_enum` - validate input by `Ecto.Enum.dump_values/2`
* `:values` - validate output is one item of given values
## Examples
optional :role, Atom, values: [:role1, :role2]
optional :fruit, Atom, ecto_enum: {User, :fruit}
"""
use Maru.Params.Type
def parser_arguments, do: [:ecto_enum]
def validator_arguments, do: [:values]
def parse(input, _) when is_atom(input), do: {:ok, input}
def parse(input, %{ecto_enum: {model, field}}) do
values = apply(Ecto.Enum, :dump_values, [model, field])
if input in values do
{:ok, input |> to_string |> String.to_existing_atom()}
else
{:error, :validate, "allowed values: #{Enum.join(values, ", ")}"}
end
rescue
ArgumentError -> {:error, :parse, "not an already existing atom"}
end
def parse(input, _) do
{:ok, input |> to_string |> String.to_existing_atom()}
rescue
ArgumentError -> {:error, :parse, "not an already existing atom"}
end
def validate(parsed, values: [h | _] = values) when is_atom(h) do
if parsed in values do
{:ok, parsed}
else
{:error, :validate, "allowed values: #{Enum.join(values, ", ")}"}
end
end
def validate(parsed, values: _values) do
{:ok, parsed}
end
end
| 26.096154 | 71 | 0.632277 |
739b6eef8fecf909a472a4ac0da940e598c91823 | 278 | exs | Elixir | test/event_store/adapters/couch/subscription_test.exs | skunkwerks/commanded | 10e08641a190259056d52a35cfce1ba9575f4fb3 | [
"MIT"
] | null | null | null | test/event_store/adapters/couch/subscription_test.exs | skunkwerks/commanded | 10e08641a190259056d52a35cfce1ba9575f4fb3 | [
"MIT"
] | null | null | null | test/event_store/adapters/couch/subscription_test.exs | skunkwerks/commanded | 10e08641a190259056d52a35cfce1ba9575f4fb3 | [
"MIT"
] | null | null | null | defmodule Commanded.EventStore.Adapters.Couch.SubscriptionTest do
alias Commanded.EventStore.Adapters.Couch
use Commanded.EventStore.CouchTestCase
use Commanded.EventStore.SubscriptionTestCase, event_store: Couch
defp event_store_wait(default \\ nil), do: default
end
| 30.888889 | 67 | 0.830935 |
739b982b0e7ba90516270796dd6f1065ea62f0e6 | 1,289 | exs | Elixir | config/dev.exs | lambda2/matrice-site | 5ce6f9d8973f590ee431dab3c020aa24eed20cc4 | [
"MIT"
] | null | null | null | config/dev.exs | lambda2/matrice-site | 5ce6f9d8973f590ee431dab3c020aa24eed20cc4 | [
"MIT"
] | null | null | null | config/dev.exs | lambda2/matrice-site | 5ce6f9d8973f590ee431dab3c020aa24eed20cc4 | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :matrice, Matrice.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin",
cd: Path.expand("../", __DIR__)]]
# Watch static and templates for browser reloading.
config :matrice, Matrice.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
config :matrice, Matrice.Repo,
adapter: Ecto.Adapters.Postgres,
username: "intra",
password: "",
database: "matrice_dev",
hostname: "localhost",
pool_size: 10
| 29.295455 | 73 | 0.691234 |
739bc29ae11f01b9d87807f033ffcdb4c349ce43 | 4,903 | ex | Elixir | client/lib/client/worker.ex | vfournie/udp-playground | c180004344d5b673730dae04848294902e6a75ee | [
"MIT"
] | null | null | null | client/lib/client/worker.ex | vfournie/udp-playground | c180004344d5b673730dae04848294902e6a75ee | [
"MIT"
] | null | null | null | client/lib/client/worker.ex | vfournie/udp-playground | c180004344d5b673730dae04848294902e6a75ee | [
"MIT"
] | null | null | null | defmodule UdpClient.Worker do
use GenServer
require Logger
@so_sndbuf_size 2097152 # 2 MB
@so_rcvbuf_size 2097152 # 2 MB
defmodule State do
defstruct [
socket: nil,
address: nil,
port: nil,
client_id: nil,
timer_pid: nil,
]
end
## Client API
def start_link([_address, _port, _client_id] = args) do
GenServer.start_link(__MODULE__, args)
end
def start_ping(pid, initial_delay \\ 0) do
GenServer.call(pid, {:start_ping, initial_delay})
end
def stop_ping(pid) do
GenServer.call(pid, :stop_ping)
end
def stop(pid) do
GenServer.stop(pid)
end
## Server Callbacks
def init([address, port, client_id]) do
opts = [:binary, reuseaddr: true, recbuf: @so_rcvbuf_size, sndbuf: @so_sndbuf_size]
address = String.to_charlist(address)
with {:ok, socket} <- :gen_udp.open(0, opts),
{:ok, timer_pid} <- UdpClient.Worker.SendTimer.start_link([socket, address, port, client_id])
do
{:ok, %State{
socket: socket,
address: address,
port: port,
timer_pid: timer_pid,
client_id: client_id,
}}
else error ->
Logger.info("UDP client failed to connect (#{inspect error})")
error
end
end
def handle_call({:start_ping, initial_delay}, _from, %State{timer_pid: timer_pid} = state) do
UdpClient.Worker.SendTimer.start_timer(timer_pid, initial_delay)
{:reply, :ok, state}
end
def handle_call(:stop_ping, _from, %State{timer_pid: timer_pid} = state) do
UdpClient.Worker.SendTimer.stop_timer(timer_pid)
{:reply, :ok, state}
end
def handle_info({:udp, _socket, _ip, _port, data}, %State{client_id: client_id} = state) do
# Update recv counter
# recv_counter = {:recv, client_id}
# UdpClient.Collector.inc_counter(recv_counter)
process_packet(data, client_id)
{:noreply, state}
end
def handle_info(_any, state) do
{:noreply, state}
end
def terminate(_reason, %State{socket: socket} = _state) do
:gen_udp.close(socket)
end
# Private
def process_packet(binary, client_id)
# def process_packet(<<
# 10 :: size(16),
# time :: signed-integer-size(64),
# >>,
# client_id) do
# curr_time = System.monotonic_time(:milliseconds)
# delta = curr_time - time
# :ets.insert_new(:udp_stats, {{:pong, client_id, time}, delta})
# end
def process_packet(_, _client_id) do
:ok
end
end
defmodule UdpClient.Worker.SendTimer do
use GenServer
require Logger
@period 25 # Period between executions (in ms)
defmodule State do
defstruct [
socket: nil,
address: nil,
port: nil,
client_id: nil,
timer: nil,
]
end
## Client API
def start_link([_socket, _address, _port, _client_id] = args) do
GenServer.start_link(__MODULE__, args)
end
def start_timer(pid, initial_delay) do
GenServer.call(pid, {:start_timer, initial_delay})
end
def stop_timer(pid) do
GenServer.call(pid, :stop_timer)
end
## Server Callbacks
def init([socket, address, port, client_id]) do
{:ok, %State{
socket: socket,
address: address,
port: port,
client_id: client_id
}}
end
def handle_call({:start_timer, initial_delay}, _from, state) do
timer = Process.send_after(self(), :ping, initial_delay)
{:reply, :ok, %State{state | timer: timer}}
end
def handle_call(:stop_timer, _from, %State{timer: timer} = state) do
cancel_timer(timer)
{:reply, :ok, %State{state | timer: nil}}
end
def handle_info(:ping, %State{socket: socket, address: address, port: port, client_id: _client_id} = state) do
# Send ping
monotonic_time = System.monotonic_time(:milliseconds)
packet = <<
10 :: size(16),
monotonic_time :: signed-integer-size(64),
>>
:ok = :gen_udp.send(socket, address, port, packet)
# Update send counter
# send_counter = {:send, client_id}
# UdpClient.Collector.inc_counter(send_counter)
# Start the timer again
timer = Process.send_after(self(), :ping, @period)
{:noreply, %State{state | timer: timer}}
end
def handle_info(_any, state) do
{:noreply, state}
end
## Pivate
defp cancel_timer(timer)
defp cancel_timer(nil), do: :ok
defp cancel_timer(timer), do: Process.cancel_timer(timer)
end
| 25.536458 | 114 | 0.578625 |
739bdd5ee4dcd364ec064defae90f20faa4e7de7 | 477 | ex | Elixir | samples/client/petstore/elixir/lib/openapi_petstore/model/big_cat_all_of.ex | MalcolmScoffable/openapi-generator | 73605a0c0e0c825286c95123c63678ba75b44d5c | [
"Apache-2.0"
] | 11,868 | 2018-05-12T02:58:07.000Z | 2022-03-31T21:19:39.000Z | samples/client/petstore/elixir/lib/openapi_petstore/model/big_cat_all_of.ex | MalcolmScoffable/openapi-generator | 73605a0c0e0c825286c95123c63678ba75b44d5c | [
"Apache-2.0"
] | 9,672 | 2018-05-12T14:25:43.000Z | 2022-03-31T23:59:30.000Z | samples/client/petstore/elixir/lib/openapi_petstore/model/big_cat_all_of.ex | MalcolmScoffable/openapi-generator | 73605a0c0e0c825286c95123c63678ba75b44d5c | [
"Apache-2.0"
] | 4,776 | 2018-05-12T12:06:08.000Z | 2022-03-31T19:52:51.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule OpenapiPetstore.Model.BigCatAllOf do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"kind"
]
@type t :: %__MODULE__{
:"kind" => String.t | nil
}
end
defimpl Poison.Decoder, for: OpenapiPetstore.Model.BigCatAllOf do
def decode(value, _options) do
value
end
end
| 18.346154 | 91 | 0.685535 |
739bfea47cf93b7deba995ee62f72670fa15eb86 | 2,784 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/instance_group_aggregated_list.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/instance_group_aggregated_list.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/instance_group_aggregated_list.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.InstanceGroupAggregatedList do
@moduledoc """
## Attributes
- id (String.t): [Output Only] Unique identifier for the resource; defined by the server. Defaults to: `null`.
- items (%{optional(String.t) => InstanceGroupsScopedList}): A list of InstanceGroupsScopedList resources. Defaults to: `null`.
- kind (String.t): [Output Only] The resource type, which is always compute#instanceGroupAggregatedList for aggregated lists of instance groups. Defaults to: `null`.
- nextPageToken (String.t): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. Defaults to: `null`.
- selfLink (String.t): [Output Only] Server-defined URL for this resource. Defaults to: `null`.
- warning (AcceleratorTypeAggregatedListWarning): Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:id => any(),
:items => map(),
:kind => any(),
:nextPageToken => any(),
:selfLink => any(),
:warning => GoogleApi.Compute.V1.Model.AcceleratorTypeAggregatedListWarning.t()
}
field(:id)
field(:items, as: GoogleApi.Compute.V1.Model.InstanceGroupsScopedList, type: :map)
field(:kind)
field(:nextPageToken)
field(:selfLink)
field(:warning, as: GoogleApi.Compute.V1.Model.AcceleratorTypeAggregatedListWarning)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.InstanceGroupAggregatedList do
def decode(value, options) do
GoogleApi.Compute.V1.Model.InstanceGroupAggregatedList.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.InstanceGroupAggregatedList do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.190476 | 381 | 0.74102 |
739c04650f509a1813c4c0deab737328c53d85e4 | 564 | ex | Elixir | apps/ejoi_stats/lib/ejoi_stats_web/views/changeset_view.ex | vahansoft/ejoi-stats | f2faf5fce3324e1dd9980917f78b9bfa3d006380 | [
"MIT"
] | null | null | null | apps/ejoi_stats/lib/ejoi_stats_web/views/changeset_view.ex | vahansoft/ejoi-stats | f2faf5fce3324e1dd9980917f78b9bfa3d006380 | [
"MIT"
] | 2 | 2021-03-10T05:08:08.000Z | 2021-05-11T01:05:07.000Z | apps/ejoi_stats/lib/ejoi_stats_web/views/changeset_view.ex | vahansoft/ejoi-stats | f2faf5fce3324e1dd9980917f78b9bfa3d006380 | [
"MIT"
] | null | null | null | defmodule EjoiStatsWeb.ChangesetView do
use EjoiStatsWeb, :view
@doc """
Traverses and translates changeset errors.
See `Ecto.Changeset.traverse_errors/2` and
`EjoiStatsWeb.ErrorHelpers.translate_error/1` for more details.
"""
def translate_errors(changeset) do
Ecto.Changeset.traverse_errors(changeset, &translate_error/1)
end
def render("error.json", %{changeset: changeset}) do
# When encoded, the changeset returns its errors
# as a JSON object. So we just pass it forward.
%{errors: translate_errors(changeset)}
end
end
| 28.2 | 65 | 0.741135 |
739c190d1cef909c89c4beeb0cd79540d62db00a | 1,125 | ex | Elixir | lib/json5/encode.ex | thomas9911/json5 | be16e6bcc1f64dd07143d48a21c37860cc8baa85 | [
"MIT"
] | null | null | null | lib/json5/encode.ex | thomas9911/json5 | be16e6bcc1f64dd07143d48a21c37860cc8baa85 | [
"MIT"
] | null | null | null | lib/json5/encode.ex | thomas9911/json5 | be16e6bcc1f64dd07143d48a21c37860cc8baa85 | [
"MIT"
] | null | null | null | defmodule Json5.Encode do
@moduledoc """
Encode Elixir term to Json5 string
"""
require Decimal
require Json5.ECMA
alias Json5.Encode.Array
alias Json5.Encode.Object
alias Json5.Error
defguardp is_to_string(input)
when input in [true, false] or is_float(input) or is_integer(input) or
Decimal.is_decimal(input)
def dump(input, config \\ %{}) do
case do_dump(input, config) do
{:error, error} -> {:error, error}
other -> {:ok, other}
end
end
def do_dump(nil, _) do
"null"
end
def do_dump(input, _) when is_to_string(input) do
to_string(input)
end
def do_dump(input, %{double_quote_string: true}) when is_binary(input) do
"\"#{input}\""
end
def do_dump(input, _) when is_binary(input) do
"'#{input}'"
end
def do_dump(input, config) when is_list(input) do
Array.dump(input, config)
end
def do_dump(input, config) when is_map(input) and not is_struct(input) do
Object.dump(input, config)
end
def do_dump(input, _) do
{:error, Error.exception(%{type: :invalid_input, input: input})}
end
end
| 22.058824 | 82 | 0.653333 |
739c2c6a06faf8c75026c5f5fade040265ec7652 | 583 | exs | Elixir | phoenix/test/views/error_view_test.exs | sap-archive/jam-login-microservice | 9f4e9d465c8e3913754fb73d08a68b4e8f13a8d2 | [
"Apache-2.0"
] | 1 | 2020-01-03T16:00:09.000Z | 2020-01-03T16:00:09.000Z | phoenix/test/views/error_view_test.exs | SAP/jam-login-microservice | 9f4e9d465c8e3913754fb73d08a68b4e8f13a8d2 | [
"Apache-2.0"
] | null | null | null | phoenix/test/views/error_view_test.exs | SAP/jam-login-microservice | 9f4e9d465c8e3913754fb73d08a68b4e8f13a8d2 | [
"Apache-2.0"
] | null | null | null | defmodule LoginProxy.ErrorViewTest do
use LoginProxy.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(LoginProxy.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(LoginProxy.ErrorView, "500.html", []) ==
"Internal server error"
end
test "render any other" do
assert render_to_string(LoginProxy.ErrorView, "505.html", []) ==
"Internal server error"
end
end
| 26.5 | 68 | 0.684391 |
739c32f2cf42d134b8cd57e7ace931c1c92f75ca | 78 | exs | Elixir | youtube/groxio/tetris/test/tetris_web/views/page_view_test.exs | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | youtube/groxio/tetris/test/tetris_web/views/page_view_test.exs | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | youtube/groxio/tetris/test/tetris_web/views/page_view_test.exs | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | defmodule TetrisWeb.PageViewTest do
use TetrisWeb.ConnCase, async: true
end
| 19.5 | 37 | 0.820513 |
739c38d9e640aa8b3692fc7e2328e6afd47133b9 | 1,160 | ex | Elixir | clients/cloud_build/lib/google_api/cloud_build/v1/connection.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/cloud_build/lib/google_api/cloud_build/v1/connection.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/cloud_build/lib/google_api/cloud_build/v1/connection.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudBuild.V1.Connection do
@moduledoc """
Handle Tesla connections for GoogleApi.CloudBuild.V1.
"""
use GoogleApi.Gax.Connection,
scopes: [
# View and manage your data across Google Cloud Platform services
"https://www.googleapis.com/auth/cloud-platform"
],
otp_app: :google_api_cloud_build,
base_url: "https://cloudbuild.googleapis.com"
end
| 36.25 | 77 | 0.748276 |
739c3d0c06d8b0bbad5f472e30ec62bedbb996b3 | 3,550 | exs | Elixir | mix.exs | aforward-oss/timex | 1dc6d355c12f06b0250466b62ca001dd1dad8de0 | [
"MIT"
] | null | null | null | mix.exs | aforward-oss/timex | 1dc6d355c12f06b0250466b62ca001dd1dad8de0 | [
"MIT"
] | null | null | null | mix.exs | aforward-oss/timex | 1dc6d355c12f06b0250466b62ca001dd1dad8de0 | [
"MIT"
] | null | null | null | defmodule Timex.Mixfile do
use Mix.Project
def project do
[ app: :timex,
version: "2.1.4",
elixir: "~> 1.1",
description: description,
package: package,
deps: deps,
docs: docs,
compilers: [:gettext] ++ Mix.compilers,
test_coverage: [tool: ExCoveralls] ]
end
def application do
[applications: [:logger, :tzdata, :gettext, :combine],
env: [local_timezone: nil, default_locale: "en"]]
end
defp description do
"""
A comprehensive date/time library for Elixir
Fully timezone-aware, using the Olson Timezone database
- Supports local-timezone lookups
- Supports POSIX-style timezones
- Supports lookups of any Olson tzdata timezones
- Supports arbitrary shifts across time and through timezones,
including ambiguous time periods, non-existent time periods, and leaps.
Provides both Date and DateTime types, for use depending on your needs,
with an AmbiguousDateTime type for handling those DateTime values which fall on
an ambigouos timezone period.
Extendable via Convertable and Comparable protocols, so you can use Timex with
your own types!
Locale-aware, currently only supports "ru" and "en", but more will be added over time.
Provides a broad array of date/time helper functions
- shifting/adding/subtracting
- diffing
- comparing/before?/after?/between?
- conversions
- get day of week, week of year, ISO dates, and names for each
- get the beginning or ending of a given week
- get the beginning or ending of a year, quarter, week, or month
- get days in a given month
- normalization
Provides a broad array of time-specific helpers
- convert to and from units: weeks, days, hours, seconds, ms, and microseconds
- measure execution time
- diff/compare
- to/from 12/24 hour clock times
- add/subtract
Safe date/time string formatting and parsing
- Informative parser errors
- Supports strftime, as well as an easier to read formatter, i.e. `{ISO:Basic}`, `{YYYY}`
- Supports many formats out of the box: ISO8601 basic and extended, RFC822, RFC1123, RFC3339, ANSIC, UNIX
- Relative time formatter (i.e. "2 years from now")
Extendable
- Protocols for core modules like the parser tokenizer
- Easy to wrap to add extra functionality
Can be used with Phoenix and Ecto when used with timex_ecto package
"""
end
defp package do
[ files: ["lib", "priv", "mix.exs", "README.md", "LICENSE.md"],
maintainers: ["Paul Schoenfelder"],
licenses: ["MIT"],
links: %{ "GitHub": "https://github.com/bitwalker/timex" } ]
end
def deps do
[{:tzdata, "~> 0.1.8 or ~> 0.5"},
{:combine, "~> 0.7"},
{:gettext, "~> 0.10"},
{:ex_doc, "~> 0.11", only: :dev},
{:earmark, "~> 0.2", only: :dev},
{:benchfella, "~> 0.3", only: :dev},
{:dialyze, "~> 0.2", only: :dev},
{:excoveralls, "~> 0.4", only: [:dev, :test]},
{:inch_ex, "~> 0.4", only: [:dev, :test]}]
end
defp docs do
[main: "getting-started",
formatter_opts: [gfm: true],
extras: [
"docs/Getting Started.md",
"CHANGELOG.md",
"docs/Basic Usage.md",
"docs/Erlang Interop.md",
"docs/Working with DateTime.md",
"docs/Working with Time.md",
"docs/Formatting.md",
"docs/Parsing.md",
"docs/FAQ.md",
"docs/Using with Ecto.md",
"docs/Custom Parsers.md",
"docs/Custom Formatters.md"
]]
end
end
| 30.084746 | 109 | 0.636901 |
739c62a49e78d1a2989a4fcc4d43d4a4825b4e32 | 1,905 | ex | Elixir | lib/blog/comments.ex | kadmohardy/Blog | 755bfc4325c899166b7c865a60060fc4355e7d15 | [
"MIT"
] | null | null | null | lib/blog/comments.ex | kadmohardy/Blog | 755bfc4325c899166b7c865a60060fc4355e7d15 | [
"MIT"
] | 17 | 2021-04-22T00:58:05.000Z | 2021-09-26T04:03:12.000Z | lib/blog/comments.ex | theguuholi/blog | 36152d03012d163d76520cf57c51783828d14317 | [
"MIT"
] | null | null | null | defmodule Blog.Comments do
@moduledoc """
The Comments context.
"""
import Ecto.Query, warn: false
alias Blog.Repo
alias Blog.Comments.Comment
@doc """
Returns the list of comments.
## Examples
iex> list_comments()
[%Comment{}, ...]
"""
def list_comments do
Repo.all(Comment)
end
@doc """
Gets a single comment.
Raises `Ecto.NoResultsError` if the Comment does not exist.
## Examples
iex> get_comment!(123)
%Comment{}
iex> get_comment!(456)
** (Ecto.NoResultsError)
"""
def get_comment!(id), do: Repo.get!(Comment, id)
@doc """
Creates a comment.
## Examples
iex> create_comment(%{field: value})
{:ok, %Comment{}}
iex> create_comment(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_comment(post_id, user_id, attrs \\ %{}) do
Blog.Posts.get_post!(post_id)
|> Ecto.build_assoc(:comments, user_id: user_id)
|> change_comment(attrs)
|> Repo.insert()
end
@doc """
Updates a comment.
## Examples
iex> update_comment(comment, %{field: new_value})
{:ok, %Comment{}}
iex> update_comment(comment, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_comment(%Comment{} = comment, attrs) do
comment
|> Comment.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a comment.
## Examples
iex> delete_comment(comment)
{:ok, %Comment{}}
iex> delete_comment(comment)
{:error, %Ecto.Changeset{}}
"""
def delete_comment(%Comment{} = comment) do
Repo.delete(comment)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking comment changes.
## Examples
iex> change_comment(comment)
%Ecto.Changeset{data: %Comment{}}
"""
def change_comment(%Comment{} = comment, attrs \\ %{}) do
Comment.changeset(comment, attrs)
end
end
| 17.971698 | 62 | 0.606824 |
739cab67941a3412726e4d932215ccb5176b178b | 1,596 | exs | Elixir | grade-school/grade_school_test.exs | nickpellant/xelixir | 6e27dc7083ef3e423a1615f5151910672e6397a8 | [
"MIT"
] | null | null | null | grade-school/grade_school_test.exs | nickpellant/xelixir | 6e27dc7083ef3e423a1615f5151910672e6397a8 | [
"MIT"
] | null | null | null | grade-school/grade_school_test.exs | nickpellant/xelixir | 6e27dc7083ef3e423a1615f5151910672e6397a8 | [
"MIT"
] | null | null | null | if System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("example.exs")
else
Code.load_file("school.exs")
end
ExUnit.start
ExUnit.configure exclude: :pending, trace: true
defmodule SchoolTest do
use ExUnit.Case, async: true
def db, do: %{}
test "add student" do
actual = School.add(db, "Aimee", 2)
assert actual == %{2 => ["Aimee"]}
end
@tag :pending
test "add more students in same class" do
actual = db
|> School.add("James", 2)
|> School.add("Blair", 2)
|> School.add("Paul", 2)
assert Enum.sort(actual[2]) == ["Blair", "James", "Paul"]
end
@tag :pending
test "add students to different grades" do
actual = db
|> School.add("Chelsea", 3)
|> School.add("Logan", 7)
assert actual == %{3 => ["Chelsea"], 7 => ["Logan"]}
end
@tag :pending
test "get students in a grade" do
actual = db
|> School.add("Bradley", 5)
|> School.add("Franklin", 5)
|> School.add("Jeff", 1)
|> School.grade(5)
assert Enum.sort(actual) == ["Bradley", "Franklin"]
end
@tag :pending
test "get students in a non existant grade" do
assert [] == School.grade(db, 1)
end
@tag :pending
test "sort school by grade and by student name" do
actual = db
|> School.add("Bart", 4)
|> School.add("Jennifer", 4)
|> School.add("Christopher", 4)
|> School.add("Kareem", 6)
|> School.add("Kyle", 3)
|> School.sort
expected = %{
3 => ["Kyle"],
4 => ["Bart", "Christopher", "Jennifer"],
6 => ["Kareem"]
}
assert expected == actual
end
end
| 21.28 | 61 | 0.580201 |
739cba76373ae26a745d7c239e673b81e1efe3d0 | 4,184 | ex | Elixir | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/policy.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/policy.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/policy.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudFunctions.V1.Model.Policy do
@moduledoc """
Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. A `Policy` consists of a list of `bindings`. A `binding` binds a list of `members` to a `role`, where the members can be user accounts, Google groups, Google domains, and service accounts. A `role` is a named list of permissions defined by IAM. **JSON Example** { \"bindings\": [ { \"role\": \"roles/owner\", \"members\": [ \"user:[email protected]\", \"group:[email protected]\", \"domain:google.com\", \"serviceAccount:[email protected]\" ] }, { \"role\": \"roles/viewer\", \"members\": [\"user:[email protected]\"] } ] } **YAML Example** bindings: - members: - user:[email protected] - group:[email protected] - domain:google.com - serviceAccount:[email protected] role: roles/owner - members: - user:[email protected] role: roles/viewer For a description of IAM and its features, see the [IAM developer's guide](https://cloud.google.com/iam/docs).
## Attributes
- auditConfigs ([AuditConfig]): Specifies cloud audit logging configuration for this policy. Defaults to: `null`.
- bindings ([Binding]): Associates a list of `members` to a `role`. `bindings` with no members will result in an error. Defaults to: `null`.
- etag (binary()): `etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. If no `etag` is provided in the call to `setIamPolicy`, then the existing policy is overwritten blindly. Defaults to: `null`.
- version (integer()): Deprecated. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:auditConfigs => list(GoogleApi.CloudFunctions.V1.Model.AuditConfig.t()),
:bindings => list(GoogleApi.CloudFunctions.V1.Model.Binding.t()),
:etag => any(),
:version => any()
}
field(:auditConfigs, as: GoogleApi.CloudFunctions.V1.Model.AuditConfig, type: :list)
field(:bindings, as: GoogleApi.CloudFunctions.V1.Model.Binding, type: :list)
field(:etag)
field(:version)
end
defimpl Poison.Decoder, for: GoogleApi.CloudFunctions.V1.Model.Policy do
def decode(value, options) do
GoogleApi.CloudFunctions.V1.Model.Policy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudFunctions.V1.Model.Policy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 73.403509 | 1,425 | 0.694073 |
739ce5d35e5882549eff497c5608bac09e1b9814 | 866 | ex | Elixir | lib/games/gcd.ex | NikitaNaumenko/brain_games_elixir | 25ef3898164846faab5813096804661734d54af8 | [
"MIT"
] | null | null | null | lib/games/gcd.ex | NikitaNaumenko/brain_games_elixir | 25ef3898164846faab5813096804661734d54af8 | [
"MIT"
] | null | null | null | lib/games/gcd.ex | NikitaNaumenko/brain_games_elixir | 25ef3898164846faab5813096804661734d54af8 | [
"MIT"
] | null | null | null | defmodule BrainGames.Games.Gcd do
@moduledoc """
Greatest Common Divisor (GCD)".
The essence of the game is as follows: the user is shown two random numbers, for example, 45 73.
The user must calculate and enter the largest common divisor of these numbers.
"""
require Integer
@max 100
@spec generate_game_data() :: map()
def generate_game_data do
first = Enum.random(0..@max)
second = Enum.random(0..@max)
question = "#{first} #{second}"
answer = find_gcd(first, second)
%{"question" => question, "answer" => Integer.to_string(answer)}
end
@spec get_task() :: String.t()
def get_task do
"Find the greatest common divisor of given numbers."
end
@spec find_gcd(a :: integer(), b :: integer()) :: integer()
defp find_gcd(a, b) when b == 0, do: a
defp find_gcd(a, b) do
find_gcd(b, rem(a, b))
end
end
| 26.242424 | 98 | 0.658199 |
739d2b0d1f57c4f5c6aeca66d137a214fc16e779 | 1,296 | ex | Elixir | lib/game/format/scan.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | null | null | null | lib/game/format/scan.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | null | null | null | lib/game/format/scan.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | null | null | null | defmodule Game.Format.Scan do
@moduledoc """
Formatting for the scan command
"""
import Game.Format.Context
alias Game.Format
@doc """
Format the scan for the room you're in
"""
def room(room, rooms) do
[
room_current(room),
rooms(rooms)
]
|> Enum.join("\n")
|> String.trim()
end
defp rooms(rooms) do
rooms
|> Enum.map(fn {direction, room} ->
room_in_direction(direction, room)
end)
|> Enum.join("\n")
|> String.trim()
end
defp room_current(room) do
context()
|> assign(:who, who(room))
|> Format.template("You look around and see:\n[who]")
end
defp room_in_direction(direction, room) do
context()
|> assign(:direction, direction)
|> assign(:who, who(room))
|> Format.template("You look {command}#{direction}{/command} and see:\n[who]")
end
defp who(room) do
Enum.join(npcs(room) ++ players(room), "\n")
end
defp npcs(room) do
Enum.map(room.npcs, fn npc ->
context()
|> assign(:name, Format.npc_name(npc))
|> Format.template(" - [name]")
end)
end
defp players(room) do
Enum.map(room.players, fn player ->
context()
|> assign(:name, Format.player_name(player))
|> Format.template(" - [name]")
end)
end
end
| 20.25 | 82 | 0.591821 |
739d37c8f2283ae5b543a8257eef3e01b69f9b9d | 840 | ex | Elixir | test/support/example_domain/ticketing/timeout_reservation_handler.ex | blockfi/commanded-scheduler | 46582ecccad254d997d13b6b80bc829d8a24ca21 | [
"MIT"
] | null | null | null | test/support/example_domain/ticketing/timeout_reservation_handler.ex | blockfi/commanded-scheduler | 46582ecccad254d997d13b6b80bc829d8a24ca21 | [
"MIT"
] | 1 | 2020-05-29T18:50:28.000Z | 2020-07-14T11:15:49.000Z | test/support/example_domain/ticketing/timeout_reservation_handler.ex | blockfi/commanded-scheduler | 46582ecccad254d997d13b6b80bc829d8a24ca21 | [
"MIT"
] | null | null | null | defmodule ExampleDomain.TimeoutReservationHandler do
@moduledoc false
use Commanded.Event.Handler, name: __MODULE__, application: Commanded.Scheduler.App
alias Commanded.Scheduler.ScheduleOnce
alias ExampleDomain.AppRouter
alias ExampleDomain.TicketBooking.Commands.TimeoutReservation
alias ExampleDomain.TicketBooking.Events.TicketReserved
@doc """
Timeout the ticket reservation after the expiry date/time.
"""
def handle(%TicketReserved{} = event, _metadata) do
%TicketReserved{ticket_uuid: ticket_uuid, expires_at: expires_at} = event
timeout_reservation = %TimeoutReservation{ticket_uuid: ticket_uuid}
schedule_once = %ScheduleOnce{
schedule_uuid: "schedule-" <> ticket_uuid,
command: timeout_reservation,
due_at: expires_at
}
AppRouter.dispatch(schedule_once)
end
end
| 30 | 85 | 0.772619 |
739d50d8c7fccbb3312778f9d5ec033c7a8ada7c | 2,605 | ex | Elixir | lib/tanks_web/channels/game_channel.ex | marcinbiegun/elixir-tanks | 29a3beef303825a137249c8ae0a3ff21c33d9a1c | [
"MIT"
] | null | null | null | lib/tanks_web/channels/game_channel.ex | marcinbiegun/elixir-tanks | 29a3beef303825a137249c8ae0a3ff21c33d9a1c | [
"MIT"
] | null | null | null | lib/tanks_web/channels/game_channel.ex | marcinbiegun/elixir-tanks | 29a3beef303825a137249c8ae0a3ff21c33d9a1c | [
"MIT"
] | null | null | null | defmodule TanksWeb.GameChannel do
use TanksWeb, :channel
require Logger
def join("game:" <> game_id, %{"playerToken" => player_token} = _params, socket) do
# Join game server
{response, socket} =
with {:ok, server} <- Tanks.GameServer.get(game_id),
{:ok, player} <- Tanks.GameServer.add_player(game_id, player_token),
{:ok, init_state} <- Tanks.GameServer.init_state(game_id) do
# Assign data to channel
socket = assign(socket, :current_server, %{game_id: server.game_id, player_id: player.id})
:ok =
Tanks.ChannelWatcher.monitor(
:game,
self(),
{__MODULE__, :leave, [game_id, player.id, player_token]}
)
response = %{
game_id: server.game_id,
player_id: player.id,
msg: "Joined game #{game_id}",
init_state: init_state
}
{response, socket}
else
error ->
response = %{msg: "Unable to join game #{game_id}: #{inspect(error)}"}
{response, socket}
end
# Respond
{:ok, response, socket}
end
def leave(game_id, player_id, _player_token) do
with {:ok, _server} <- Tanks.GameServer.get(game_id) do
# {:ok, _player} <- Tanks.GameServer.get_player(game_id, player_id) do
Tanks.GameServer.remove_player(game_id, player_id)
end
end
def handle_in(
"input",
%{"left" => left, "right" => right, "up" => up, "down" => down} = _data,
socket
) do
%{game_id: game_id, player_id: player_id} = socket.assigns.current_server
input = %{left: left, right: right, up: up, down: down}
Tanks.Game.Server.send_input(game_id, player_id, input)
{:noreply, socket}
end
def handle_in(
"admin_input",
%{"type" => type} = data,
socket
) do
%{game_id: game_id} = socket.assigns.current_server
Logger.debug("Handling admin_input: #{type}")
case type do
"next_map" ->
Tanks.GameServer.next_map(game_id)
"restart_map" ->
Tanks.GameServer.restart_map(game_id)
"restart_game" ->
Tanks.GameServer.restart_game(game_id)
_other ->
Logger.warn("Unknown admin_input: #{inspect(data)})")
end
{:noreply, socket}
end
def handle_in(
"action",
%{"type" => "fire", "x" => x, "y" => y} = _data,
socket
) do
%{game_id: game_id, player_id: player_id} = socket.assigns.current_server
Tanks.Game.Server.send_action(game_id, player_id, :fire, {x, y})
{:noreply, socket}
end
end
| 27.421053 | 98 | 0.586948 |
739d568793cfddba909a401c7979ccaa6c42dcf1 | 12,698 | ex | Elixir | lib/hex/registry/server.ex | sudix/hex | f739a57d8829ea0b0f7759c164dc9149c3340e49 | [
"Apache-2.0"
] | null | null | null | lib/hex/registry/server.ex | sudix/hex | f739a57d8829ea0b0f7759c164dc9149c3340e49 | [
"Apache-2.0"
] | 1 | 2021-06-25T15:19:59.000Z | 2021-06-25T15:19:59.000Z | lib/hex/registry/server.ex | sudix/hex | f739a57d8829ea0b0f7759c164dc9149c3340e49 | [
"Apache-2.0"
] | null | null | null | defmodule Hex.Registry.Server do
use GenServer
@behaviour Hex.Registry
@name __MODULE__
@filename "cache.ets"
@timeout 60_000
defmacrop unwrap_mix_error(expr) do
quote do
trap_exit? = Process.flag(:trap_exit, true)
try do
unquote(expr)
catch
:exit, {{error, stacktrace}, _call} ->
reraise(error, stacktrace)
after
Process.flag(:trap_exit, trap_exit?)
end
end
end
def start_link(opts \\ []) do
opts = Keyword.put_new(opts, :name, @name)
GenServer.start_link(__MODULE__, [], opts)
end
def open(opts \\ []) do
GenServer.call(@name, {:open, opts}, @timeout)
|> unwrap_mix_error()
end
def close do
GenServer.call(@name, :close, @timeout)
|> unwrap_mix_error()
end
def persist do
GenServer.call(@name, :persist, @timeout)
|> unwrap_mix_error()
end
def prefetch(packages) do
case GenServer.call(@name, {:prefetch, packages}, @timeout) do
:ok ->
:ok
{:error, message} ->
Mix.raise(message)
end
|> unwrap_mix_error()
end
def versions(repo, package) do
GenServer.call(@name, {:versions, repo, package}, @timeout)
|> unwrap_mix_error()
end
def deps(repo, package, version) do
GenServer.call(@name, {:deps, repo, package, version}, @timeout)
|> unwrap_mix_error()
end
def checksum(repo, package, version) do
GenServer.call(@name, {:checksum, repo, package, version}, @timeout)
|> unwrap_mix_error()
end
def retired(repo, package, version) do
GenServer.call(@name, {:retired, repo, package, version}, @timeout)
|> unwrap_mix_error()
end
def tarball_etag(repo, package, version) do
GenServer.call(@name, {:tarball_etag, repo, package, version}, @timeout)
|> unwrap_mix_error()
end
def tarball_etag(repo, package, version, etag) do
GenServer.call(@name, {:tarball_etag, repo, package, version, etag}, @timeout)
|> unwrap_mix_error()
end
def last_update() do
GenServer.call(@name, :last_update, @timeout)
|> unwrap_mix_error()
end
def last_update(time) do
GenServer.call(@name, {:last_update, time}, @timeout)
|> unwrap_mix_error()
end
def init([]) do
{:ok, state()}
end
defp state() do
%{
ets: nil,
path: nil,
pending: Hex.Set.new(),
fetched: Hex.Set.new(),
waiting: %{},
closing_fun: nil
}
end
def handle_call({:open, opts}, _from, %{ets: nil} = state) do
if Keyword.get(opts, :check_version, true) do
Hex.UpdateChecker.start_check()
end
path = opts[:registry_path] || path()
ets =
Hex.string_to_charlist(path)
|> open_ets()
|> check_version()
|> set_version()
state = %{state | ets: ets, path: path}
{:reply, :ok, state}
end
def handle_call({:open, opts}, _from, state) do
if Keyword.get(opts, :check_version, true) do
Hex.UpdateChecker.start_check()
end
{:reply, :ok, state}
end
def handle_call(:close, from, %{ets: tid, path: path} = state) do
state =
wait_closing(state, fn ->
if tid do
persist(tid, path)
:ets.delete(tid)
end
GenServer.reply(from, :ok)
state()
end)
{:noreply, state}
end
def handle_call(:persist, _from, state) do
persist(state.ets, state.path)
{:reply, :ok, state}
end
def handle_call({:prefetch, packages}, _from, state) do
packages =
packages
|> Enum.uniq()
|> Enum.reject(&(&1 in state.fetched))
|> Enum.reject(&(&1 in state.pending))
purge_repo_from_cache(packages, state)
if Hex.State.fetch!(:offline?) do
prefetch_offline(packages, state)
else
prefetch_online(packages, state)
end
end
def handle_call({:versions, repo, package}, from, state) do
maybe_wait({repo, package}, from, state, fn ->
lookup(state.ets, {:versions, repo, package})
end)
end
def handle_call({:deps, repo, package, version}, from, state) do
maybe_wait({repo, package}, from, state, fn ->
lookup(state.ets, {:deps, repo, package, version})
end)
end
def handle_call({:checksum, repo, package, version}, from, state) do
maybe_wait({repo, package}, from, state, fn ->
lookup(state.ets, {:checksum, repo, package, version})
end)
end
def handle_call({:retired, repo, package, version}, from, state) do
maybe_wait({repo, package}, from, state, fn ->
lookup(state.ets, {:retired, repo, package, version})
end)
end
def handle_call({:tarball_etag, repo, package, version}, _from, state) do
etag = lookup(state.ets, {:tarball_etag, repo, package, version})
{:reply, etag, state}
end
def handle_call({:tarball_etag, repo, package, version, etag}, _from, state) do
:ets.insert(state.ets, {{:tarball_etag, repo, package, version}, etag})
{:reply, :ok, state}
end
def handle_call(:last_update, _from, state) do
time = lookup(state.ets, :last_update)
{:reply, time, state}
end
def handle_call({:last_update, time}, _from, state) do
:ets.insert(state.ets, {:last_update, time})
{:reply, :ok, state}
end
def handle_info({:DOWN, _ref, :process, _pid, :normal}, state) do
{:noreply, state}
end
def handle_info({:get_package, repo, package, result}, state) do
repo_package = {repo, package}
pending = Hex.Set.delete(state.pending, repo_package)
fetched = Hex.Set.put(state.fetched, repo_package)
{replys, waiting} = Map.pop(state.waiting, repo_package, [])
write_result(result, repo, package, state)
Enum.each(replys, fn {from, fun} ->
GenServer.reply(from, fun.())
end)
state = %{state | pending: pending, waiting: waiting, fetched: fetched}
state = maybe_close(state)
{:noreply, state}
end
defp open_ets(path) do
case :ets.file2tab(path) do
{:ok, tid} ->
tid
{:error, {:read_error, {:file_error, _path, :enoent}}} ->
:ets.new(@name, [])
{:error, reason} ->
Hex.Shell.error("Error opening ETS file #{path}: #{inspect(reason)}")
File.rm(path)
:ets.new(@name, [])
end
end
defp check_version(ets) do
case :ets.lookup(ets, :version) do
[{:version, 1}] ->
ets
_ ->
:ets.delete(ets)
:ets.new(@name, [])
end
end
defp set_version(ets) do
:ets.insert(ets, {:version, 1})
ets
end
defp persist(tid, path) do
dir = Path.dirname(path)
File.mkdir_p!(dir)
:ok = :ets.tab2file(tid, Hex.to_charlist(path))
end
defp purge_repo_from_cache(packages, %{ets: ets}) do
Enum.each(packages, fn {repo, _package} ->
config = Hex.Repo.get_repo(repo)
url = config.url
case :ets.lookup(ets, {:repo, repo}) do
[{_key, ^url}] -> :ok
[] -> :ok
_ -> purge_repo(repo, ets)
end
:ets.insert(ets, {{:repo, repo}, url})
end)
end
# :ets.fun2ms(fn
# {{:versions, ^repo, _package}, _} -> true
# {{:deps, ^repo, _package, _version}, _} -> true
# {{:checksum, ^repo, _package, _version}, _} -> true
# {{:retired, ^repo, _package, _version}, _} -> true
# {{:tarball_etag, ^repo, _package, _version}, _} -> true
# {{:registry_etag, ^repo, _package}, _} -> true
# _ -> false
# end)
defp purge_repo_matchspec(repo) do
[
{{{:versions, :"$1", :"$2"}, :_}, [{:"=:=", {:const, repo}, :"$1"}], [true]},
{{{:deps, :"$1", :"$2", :"$3"}, :_}, [{:"=:=", {:const, repo}, :"$1"}], [true]},
{{{:checksum, :"$1", :"$2", :"$3"}, :_}, [{:"=:=", {:const, repo}, :"$1"}], [true]},
{{{:retired, :"$1", :"$2", :"$3"}, :_}, [{:"=:=", {:const, repo}, :"$1"}], [true]},
{{{:tarball_etag, :"$1", :"$2", :"$3"}, :_}, [{:"=:=", {:const, repo}, :"$1"}], [true]},
{{{:registry_etag, :"$1", :"$2"}, :_}, [{:"=:=", {:const, repo}, :"$1"}], [true]},
{:_, [], [false]}
]
end
defp purge_repo(repo, ets) do
:ets.select_delete(ets, purge_repo_matchspec(repo))
end
defp prefetch_online(packages, state) do
Enum.each(packages, fn {repo, package} ->
etag = package_etag(repo, package, state)
Hex.Parallel.run(:hex_fetcher, {:registry, repo, package}, [await: false], fn ->
{:get_package, repo, package, Hex.Repo.get_package(repo, package, etag)}
end)
end)
pending = Enum.into(packages, state.pending)
state = %{state | pending: pending}
{:reply, :ok, state}
end
defp prefetch_offline(packages, state) do
missing =
Enum.find(packages, fn {repo, package} ->
unless lookup(state.ets, {:versions, repo, package}) do
package
end
end)
if missing do
message =
"Hex is running in offline mode and the registry entry for " <>
"package #{inspect(missing)} is not cached locally"
{:reply, {:error, message}, state}
else
fetched = Enum.into(packages, state.fetched)
{:reply, :ok, %{state | fetched: fetched}}
end
end
defp write_result({:ok, {code, body, headers}}, repo, package, %{ets: tid})
when code in 200..299 do
releases =
body
|> :zlib.gunzip()
|> Hex.Repo.verify(repo)
|> Hex.Repo.decode()
delete_package(repo, package, tid)
Enum.each(releases, fn %{version: version, checksum: checksum, dependencies: deps} = release ->
:ets.insert(tid, {{:checksum, repo, package, version}, checksum})
:ets.insert(tid, {{:retired, repo, package, version}, release[:retired]})
deps =
Enum.map(deps, fn dep ->
{dep[:repository] || repo, dep[:package], dep[:app] || dep[:package], dep[:requirement],
!!dep[:optional]}
end)
:ets.insert(tid, {{:deps, repo, package, version}, deps})
end)
versions = Enum.map(releases, & &1[:version])
:ets.insert(tid, {{:versions, repo, package}, versions})
if etag = headers['etag'] do
:ets.insert(tid, {{:registry_etag, repo, package}, List.to_string(etag)})
end
end
defp write_result({:ok, {304, _, _}}, _repo, _package, _state) do
:ok
end
defp write_result(other, repo, package, %{ets: tid}) do
cached? = !!:ets.lookup(tid, {:versions, package})
print_error(other, repo, package, cached?)
unless cached? do
raise "Stopping due to errors"
end
end
defp print_error(result, repo, package, cached?) do
cached_message = if cached?, do: " (using cache)"
repo_message = if repo, do: "#{repo}/"
Hex.Shell.error(
"Failed to fetch record for '#{repo_message}#{package}' from registry#{cached_message}"
)
if missing_status?(result) do
Hex.Shell.error(
"This could be because the package does not exist, it was spelled " <>
"incorrectly or you don't have permissions to it"
)
end
if not missing_status?(result) or Mix.debug?() do
Hex.Utils.print_error_result(result)
end
end
defp missing_status?({:ok, {status, _, _}}), do: status in [403, 404]
defp missing_status?(_), do: false
defp maybe_wait(package, from, state, fun) do
cond do
package in state.fetched ->
{:reply, fun.(), state}
package in state.pending ->
tuple = {from, fun}
waiting = Map.update(state.waiting, package, [tuple], &[tuple | &1])
state = %{state | waiting: waiting}
{:noreply, state}
true ->
Mix.raise("Package #{inspect(package)} not prefetched, please report this issue")
end
end
defp wait_closing(state, fun) do
if Hex.Set.size(state.pending) == 0 do
state = fun.()
%{state | closing_fun: nil}
else
%{state | closing_fun: fun}
end
end
defp maybe_close(%{closing_fun: nil} = state) do
state
end
defp maybe_close(%{closing_fun: fun} = state) do
wait_closing(state, fun)
end
defp package_etag(repo, package, %{ets: tid}) do
case :ets.lookup(tid, {:registry_etag, repo, package}) do
[{_, etag}] -> etag
[] -> nil
end
end
defp path do
Path.join(Hex.State.fetch!(:home), @filename)
end
defp delete_package(repo, package, tid) do
:ets.delete(tid, {:registry_etag, repo, package})
versions = lookup(tid, {:versions, repo, package}) || []
:ets.delete(tid, {:versions, repo, package})
Enum.each(versions, fn version ->
:ets.delete(tid, {:checksum, repo, package, version})
:ets.delete(tid, {:retired, repo, package, version})
:ets.delete(tid, {:deps, repo, package, version})
end)
end
defp lookup(tid, key) do
case :ets.lookup(tid, key) do
[{^key, element}] -> element
[] -> nil
end
end
end
| 26.509395 | 99 | 0.594346 |
739d993c15976afaba323929785db61825bcdd52 | 549 | exs | Elixir | admin/test/models/video_test.exs | shipperizer/symmetrical-octo-parakeet | 6c9c428898d3529c04d872fec8f099456cc54633 | [
"MIT"
] | null | null | null | admin/test/models/video_test.exs | shipperizer/symmetrical-octo-parakeet | 6c9c428898d3529c04d872fec8f099456cc54633 | [
"MIT"
] | null | null | null | admin/test/models/video_test.exs | shipperizer/symmetrical-octo-parakeet | 6c9c428898d3529c04d872fec8f099456cc54633 | [
"MIT"
] | null | null | null | defmodule Admin.VideoTest do
use Admin.ModelCase
alias Admin.Video
@valid_attrs %{active: true, approved_at: %{day: 17, hour: 14, min: 0, month: 4, sec: 0, year: 2010}, description: "some content", likes: 42, name: "some content", views: 42}
@invalid_attrs %{}
test "changeset with valid attributes" do
changeset = Video.changeset(%Video{}, @valid_attrs)
assert changeset.valid?
end
test "changeset with invalid attributes" do
changeset = Video.changeset(%Video{}, @invalid_attrs)
refute changeset.valid?
end
end
| 28.894737 | 176 | 0.701275 |
739dcc19307ca6bf794fc00706153164d5e4e4ac | 5,105 | exs | Elixir | test/controllers/webhook_controller_test.exs | gebner/bors-ng | 7a88ef7cceb1f112374a426a4bdfffc323b44a61 | [
"Apache-2.0"
] | null | null | null | test/controllers/webhook_controller_test.exs | gebner/bors-ng | 7a88ef7cceb1f112374a426a4bdfffc323b44a61 | [
"Apache-2.0"
] | 1 | 2020-03-07T08:28:14.000Z | 2020-03-07T08:28:14.000Z | test/controllers/webhook_controller_test.exs | gebner/bors-ng | 7a88ef7cceb1f112374a426a4bdfffc323b44a61 | [
"Apache-2.0"
] | null | null | null | defmodule BorsNG.WebhookControllerTest do
use BorsNG.ConnCase
alias BorsNG.Database.Installation
alias BorsNG.Database.Patch
alias BorsNG.Database.Project
alias BorsNG.Database.Repo
alias BorsNG.Database.User
alias BorsNG.GitHub.Pr
alias BorsNG.GitHub
setup do
installation = Repo.insert!(%Installation{
installation_xref: 31,
})
project = Repo.insert!(%Project{
installation_id: installation.id,
repo_xref: 13,
name: "example/project",
})
user = Repo.insert!(%User{
user_xref: 23,
login: "ghost",
})
{:ok, installation: installation, project: project, user: user}
end
test "edit PR", %{conn: conn, project: project} do
patch = Repo.insert!(%Patch{
title: "T",
body: "B",
pr_xref: 1,
project_id: project.id,
into_branch: "SOME_BRANCH"})
body_params = %{
"repository" => %{"id" => 13},
"action" => "edited",
"pull_request" => %{
"number" => 1,
"title" => "U",
"body" => "C",
"state" => "open",
"base" => %{"ref" => "OTHER_BRANCH", "repo" => %{"id" => 456}},
"head" => %{
"sha" => "S",
"ref" => "BAR_BRANCH",
"repo" => %{
"id" => 345
},
},
"merged_at" => nil,
"mergeable" => true,
"user" => %{
"id" => 23,
"login" => "ghost",
"avatar_url" => "U"}}}
conn
|> put_req_header("x-github-event", "pull_request")
|> post(webhook_path(conn, :webhook, "github"), body_params)
patch2 = Repo.get!(Patch, patch.id)
assert "U" == patch2.title
assert "C" == patch2.body
assert "OTHER_BRANCH" == patch2.into_branch
end
test "sync PR on reopen", %{conn: conn, project: project} do
patch = Repo.insert!(%Patch{
title: "T",
body: "B",
pr_xref: 1,
project_id: project.id,
commit: "A",
open: false,
into_branch: "SOME_BRANCH"})
body_params = %{
"repository" => %{"id" => 13},
"action" => "reopened",
"pull_request" => %{
"number" => 1,
"title" => "T",
"body" => "B",
"state" => "open",
"base" => %{"ref" => "OTHER_BRANCH", "repo" => %{"id" => 456}},
"head" => %{
"sha" => "B",
"ref" => "BAR_BRANCH",
"repo" => %{
"id" => 345
},
},
"merged_at" => nil,
"mergeable" => true,
"user" => %{
"id" => 23,
"login" => "ghost",
"avatar_url" => "U"}}}
conn
|> put_req_header("x-github-event", "pull_request")
|> post(webhook_path(conn, :webhook, "github"), body_params)
patch2 = Repo.get!(Patch, patch.id)
assert "B" == patch2.commit
assert patch2.open
end
test "deletes by patch", %{conn: conn, project: proj} do
pr = %Pr{
number: 1,
title: "Test",
body: "Mess",
state: :closed,
base_ref: "master",
head_sha: "00000001",
head_ref: "update",
base_repo_id: 13,
head_repo_id: 13,
merged: true
}
GitHub.ServerMock.put_state(%{
{{:installation, 31}, 13} => %{
branches: %{"master" => "ini", "update" => "foo"},
comments: %{1 => []},
statuses: %{},
pulls: %{
1 => pr,
},
files: %{
"master" => %{
".github/bors.toml" => ~s"""
status = [ "ci" ]
delete_merged_branches = true
"""
},
"update" => %{
".github/bors.toml" => ~s"""
status = [ "ci" ]
delete_merged_branches = true
"""
}
}
}})
%Patch{
project_id: proj.id,
pr_xref: 1,
commit: "foo",
into_branch: "master"}
|> Repo.insert!()
body_params = %{
"repository" => %{"id" => 13},
"action" => "closed",
"pull_request" => %{
"number" => 1,
"title" => "U",
"body" => "C",
"state" => "closed",
"base" => %{"ref" => "OTHER_BRANCH", "repo" => %{"id" => 456}},
"head" => %{
"sha" => "S",
"ref" => "BAR_BRANCH",
"repo" => %{
"id" => 345
},
},
"merged_at" => "time",
"mergeable" => true,
"user" => %{
"id" => 23,
"login" => "ghost",
"avatar_url" => "U"}}}
conn
|> put_req_header("x-github-event", "pull_request")
|> post(webhook_path(conn, :webhook, "github"), body_params)
wait_until_other_branch_is_removed()
branches = GitHub.ServerMock.get_state()
|> Map.get({{:installation, 31}, 13})
|> Map.get(:branches)
|> Map.keys
assert branches == ["master"]
end
def wait_until_other_branch_is_removed do
branches = GitHub.ServerMock.get_state()
|> Map.get({{:installation, 31}, 13})
|> Map.get(:branches)
|> Map.keys
if branches == ["master"] do
:ok
else
wait_until_other_branch_is_removed()
end
end
end
| 25.782828 | 71 | 0.477375 |
739dd8f799762988ee6d1e8f051613e43ae09fba | 304 | ex | Elixir | kekend/lib/serial_killer/application.ex | evbogdanov/serial_killer | 7e67452b26f66a0fd01eca0133e705219957a17d | [
"MIT"
] | null | null | null | kekend/lib/serial_killer/application.ex | evbogdanov/serial_killer | 7e67452b26f66a0fd01eca0133e705219957a17d | [
"MIT"
] | null | null | null | kekend/lib/serial_killer/application.ex | evbogdanov/serial_killer | 7e67452b26f66a0fd01eca0133e705219957a17d | [
"MIT"
] | null | null | null | defmodule SerialKiller.Application do
use Application
def start(_type, _args),
do: Supervisor.start_link(children(), opts())
defp children do
[
SerialKiller.Endpoint
]
end
defp opts do
[
strategy: :one_for_one,
name: SerialKiller.Supervisor
]
end
end
| 15.2 | 49 | 0.651316 |
739de693b91d3f1ea38fccaa8b8d49d0a0448960 | 1,376 | ex | Elixir | test/support/data_case.ex | sophiabrandt/mango | c42c2e463c7bdeb7f18400a3001a8cc6e326159d | [
"MIT"
] | 4 | 2019-07-17T04:46:37.000Z | 2020-09-20T14:15:23.000Z | test/support/data_case.ex | sophiabrandt/mango | c42c2e463c7bdeb7f18400a3001a8cc6e326159d | [
"MIT"
] | null | null | null | test/support/data_case.ex | sophiabrandt/mango | c42c2e463c7bdeb7f18400a3001a8cc6e326159d | [
"MIT"
] | 1 | 2019-07-17T04:46:39.000Z | 2019-07-17T04:46:39.000Z | defmodule Mango.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Mango.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Mango.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Mango.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Mango.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Enum.reduce(opts, message, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 25.481481 | 77 | 0.678052 |
739de8700c1d4284aa4f359c261b69498eda8d8d | 166 | exs | Elixir | reduce/reduce1.exs | leogtzr/elixir_code_snippets | 5c8c921dc165de8fc29bb14046386efa81ce7542 | [
"MIT"
] | null | null | null | reduce/reduce1.exs | leogtzr/elixir_code_snippets | 5c8c921dc165de8fc29bb14046386efa81ce7542 | [
"MIT"
] | null | null | null | reduce/reduce1.exs | leogtzr/elixir_code_snippets | 5c8c921dc165de8fc29bb14046386efa81ce7542 | [
"MIT"
] | null | null | null | defmodule MyList do
def reduce([], value, _fun) do
value
end
def reduce([head | tail], value, fun) do
reduce(tail, fun.(head, value), fun)
end
end
| 13.833333 | 42 | 0.620482 |
739e000b65a73c2fb50ca166ccd31fbe0e505ee1 | 2,855 | ex | Elixir | lib/new_relic/harvest/collector/transaction_event/harvester.ex | tpitale/elixir_agent | f356b588fe9d7cc3b9db00d9a251e009a20b3e0e | [
"Apache-2.0"
] | 227 | 2018-09-05T15:33:23.000Z | 2022-02-25T18:12:06.000Z | lib/new_relic/harvest/collector/transaction_event/harvester.ex | tpitale/elixir_agent | f356b588fe9d7cc3b9db00d9a251e009a20b3e0e | [
"Apache-2.0"
] | 211 | 2018-09-05T21:42:41.000Z | 2022-03-25T17:51:56.000Z | lib/new_relic/harvest/collector/transaction_event/harvester.ex | tpitale/elixir_agent | f356b588fe9d7cc3b9db00d9a251e009a20b3e0e | [
"Apache-2.0"
] | 84 | 2018-09-05T04:26:26.000Z | 2022-03-09T14:28:14.000Z | defmodule NewRelic.Harvest.Collector.TransactionEvent.Harvester do
use GenServer
@moduledoc false
alias NewRelic.Harvest
alias NewRelic.Harvest.Collector
alias NewRelic.Transaction.Event
alias NewRelic.Util.PriorityQueue
def start_link(_) do
GenServer.start_link(__MODULE__, [])
end
def init(_) do
{:ok,
%{
start_time: System.system_time(),
start_time_mono: System.monotonic_time(),
end_time_mono: nil,
sampling: %{
reservoir_size: Collector.AgentRun.lookup(:transaction_event_reservoir_size, 100),
events_seen: 0
},
events: PriorityQueue.new()
}}
end
# API
def report_event(%Event{} = event),
do:
Collector.TransactionEvent.HarvestCycle
|> Harvest.HarvestCycle.current_harvester()
|> GenServer.cast({:report, event})
def gather_harvest,
do:
Collector.TransactionEvent.HarvestCycle
|> Harvest.HarvestCycle.current_harvester()
|> GenServer.call(:gather_harvest)
# Server
def handle_cast(_late_msg, :completed), do: {:noreply, :completed}
def handle_cast({:report, event}, state) do
state =
state
|> store_event(event)
|> store_sampling
{:noreply, state}
end
def handle_call(_late_msg, _from, :completed), do: {:reply, :completed, :completed}
def handle_call(:send_harvest, _from, state) do
send_harvest(%{state | end_time_mono: System.monotonic_time()})
{:reply, :ok, :completed}
end
def handle_call(:gather_harvest, _from, state) do
{:reply, build_payload(state), state}
end
# Helpers
def store_event(%{sampling: %{reservoir_size: size}} = state, event) do
key = event.user_attributes[:priority] || :rand.uniform() |> Float.round(6)
%{state | events: PriorityQueue.insert(state.events, size, key, event)}
end
def store_sampling(%{sampling: sampling} = state),
do: %{state | sampling: Map.update!(sampling, :events_seen, &(&1 + 1))}
def send_harvest(state) do
events = build_payload(state)
Collector.Protocol.transaction_event([
Collector.AgentRun.agent_run_id(),
state.sampling,
events
])
log_harvest(length(events), state.sampling.events_seen, state.sampling.reservoir_size)
end
def log_harvest(harvest_size, events_seen, reservoir_size) do
NewRelic.report_metric({:supportability, "AnalyticEventData"}, harvest_size: harvest_size)
NewRelic.report_metric({:supportability, "AnalyticEventData"},
events_seen: events_seen,
reservoir_size: reservoir_size
)
NewRelic.log(
:debug,
"Completed Transaction Event harvest - " <>
"size: #{harvest_size}, seen: #{events_seen}, max: #{reservoir_size}"
)
end
def build_payload(state) do
state.events
|> PriorityQueue.values()
|> Event.format_events()
end
end
| 25.954545 | 94 | 0.67986 |
739e006699139846dd192b37ee2d07919363f3e5 | 4,522 | ex | Elixir | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/binding.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/binding.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/binding.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudFunctions.V1.Model.Binding do
@moduledoc """
Associates `members` with a `role`.
## Attributes
* `condition` (*type:* `GoogleApi.CloudFunctions.V1.Model.Expr.t`, *default:* `nil`) - The condition that is associated with this binding.
If the condition evaluates to `true`, then this binding applies to the
current request.
If the condition evaluates to `false`, then this binding does not apply to
the current request. However, a different role binding might grant the same
role to one or more of the members in this binding.
To learn which resources support conditions in their IAM policies, see the
[IAM
documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
* `members` (*type:* `list(String.t)`, *default:* `nil`) - Specifies the identities requesting access for a Cloud Platform resource.
`members` can have the following values:
* `allUsers`: A special identifier that represents anyone who is
on the internet; with or without a Google account.
* `allAuthenticatedUsers`: A special identifier that represents anyone
who is authenticated with a Google account or a service account.
* `user:{emailid}`: An email address that represents a specific Google
account. For example, `[email protected]` .
* `serviceAccount:{emailid}`: An email address that represents a service
account. For example, `[email protected]`.
* `group:{emailid}`: An email address that represents a Google group.
For example, `[email protected]`.
* `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique
identifier) representing a user that has been recently deleted. For
example, `[email protected]?uid=123456789012345678901`. If the user is
recovered, this value reverts to `user:{emailid}` and the recovered user
retains the role in the binding.
* `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus
unique identifier) representing a service account that has been recently
deleted. For example,
`[email protected]?uid=123456789012345678901`.
If the service account is undeleted, this value reverts to
`serviceAccount:{emailid}` and the undeleted service account retains the
role in the binding.
* `deleted:group:{emailid}?uid={uniqueid}`: An email address (plus unique
identifier) representing a Google group that has been recently
deleted. For example, `[email protected]?uid=123456789012345678901`. If
the group is recovered, this value reverts to `group:{emailid}` and the
recovered group retains the role in the binding.
* `domain:{domain}`: The G Suite domain (primary) that represents all the
users of that domain. For example, `google.com` or `example.com`.
* `role` (*type:* `String.t`, *default:* `nil`) - Role that is assigned to `members`.
For example, `roles/viewer`, `roles/editor`, or `roles/owner`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:condition => GoogleApi.CloudFunctions.V1.Model.Expr.t(),
:members => list(String.t()),
:role => String.t()
}
field(:condition, as: GoogleApi.CloudFunctions.V1.Model.Expr)
field(:members, type: :list)
field(:role)
end
defimpl Poison.Decoder, for: GoogleApi.CloudFunctions.V1.Model.Binding do
def decode(value, options) do
GoogleApi.CloudFunctions.V1.Model.Binding.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudFunctions.V1.Model.Binding do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.87037 | 142 | 0.702344 |
739e1bff337f382d814ff7f6adc96f76871abe83 | 857 | ex | Elixir | test/support/fixtures/accounts_fixtures.ex | riamaria/banchan | c4f8bd9374acaf0a8bb2c501e2ae1eb78f96579f | [
"BlueOak-1.0.0",
"Apache-2.0"
] | null | null | null | test/support/fixtures/accounts_fixtures.ex | riamaria/banchan | c4f8bd9374acaf0a8bb2c501e2ae1eb78f96579f | [
"BlueOak-1.0.0",
"Apache-2.0"
] | null | null | null | test/support/fixtures/accounts_fixtures.ex | riamaria/banchan | c4f8bd9374acaf0a8bb2c501e2ae1eb78f96579f | [
"BlueOak-1.0.0",
"Apache-2.0"
] | null | null | null | defmodule Banchan.AccountsFixtures do
@moduledoc """
This module defines test helpers for creating
entities via the `Banchan.Accounts` context.
"""
def unique_user_email, do: "user#{System.unique_integer()}@example.com"
def unique_user_handle, do: "user#{:rand.uniform(100_000)}"
def valid_user_password, do: "hello world!"
def valid_user_attributes(attrs \\ %{}) do
Enum.into(attrs, %{
email: unique_user_email(),
handle: unique_user_handle(),
password: valid_user_password()
})
end
def user_fixture(attrs \\ %{}) do
{:ok, user} =
attrs
|> valid_user_attributes()
|> Banchan.Accounts.register_user_test()
user
end
def extract_user_token(fun) do
{:ok, captured} = fun.(&"[TOKEN]#{&1}[TOKEN]")
[_, token, _] = String.split(captured.body, "[TOKEN]")
token
end
end
| 25.205882 | 73 | 0.656943 |
739e39f46a39372dc4aceddee72106cb630cea3b | 3,280 | ex | Elixir | lib/content/audio/predictions.ex | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 1 | 2022-01-24T12:39:05.000Z | 2022-01-24T12:39:05.000Z | lib/content/audio/predictions.ex | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 40 | 2021-05-05T10:14:25.000Z | 2022-03-31T18:34:15.000Z | lib/content/audio/predictions.ex | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 1 | 2022-03-20T21:08:12.000Z | 2022-03-20T21:08:12.000Z | defmodule Content.Audio.Predictions do
@moduledoc """
Module to convert a Message.Predictions.t() struct into the
appropriate audio struct, whether it's a NextTrainCountdown.t(),
TrainIsArriving.t(), TrainIsBoarding.t(), or TrackChange.t().
"""
require Logger
require Content.Utilities
alias Content.Audio.TrackChange
alias Content.Audio.TrainIsBoarding
alias Content.Audio.TrainIsArriving
alias Content.Audio.Approaching
alias Content.Audio.NextTrainCountdown
@heavy_rail_routes ["Red", "Orange", "Blue"]
@spec from_sign_content(
{Signs.Utilities.SourceConfig.source(), Content.Message.Predictions.t()},
Content.line_location(),
boolean()
) :: nil | Content.Audio.t()
def from_sign_content(
{%Signs.Utilities.SourceConfig{} = src, %Content.Message.Predictions{} = predictions},
line,
multi_source?
) do
cond do
TrackChange.park_track_change?(predictions) and predictions.minutes == :boarding ->
%TrackChange{
destination: predictions.destination,
route_id: predictions.route_id,
berth: predictions.stop_id
}
predictions.minutes == :boarding ->
%TrainIsBoarding{
destination: predictions.destination,
trip_id: predictions.trip_id,
route_id: predictions.route_id,
track_number: Content.Utilities.stop_track_number(predictions.stop_id)
}
predictions.minutes == :arriving ->
%TrainIsArriving{
destination: predictions.destination,
trip_id: predictions.trip_id,
platform: src.platform,
route_id: predictions.route_id
}
predictions.minutes == :approaching and (line == :top or multi_source?) and
predictions.route_id in @heavy_rail_routes ->
%Approaching{
destination: predictions.destination,
trip_id: predictions.trip_id,
platform: src.platform,
route_id: predictions.route_id,
new_cars?: predictions.new_cars?
}
predictions.minutes == :approaching ->
%NextTrainCountdown{
destination: predictions.destination,
route_id: predictions.route_id,
minutes: 1,
verb: if(src.terminal?, do: :departs, else: :arrives),
track_number: Content.Utilities.stop_track_number(predictions.stop_id),
platform: src.platform
}
predictions.minutes == :max_time ->
%NextTrainCountdown{
destination: predictions.destination,
route_id: predictions.route_id,
minutes: div(Content.Utilities.max_time_seconds(), 60),
verb: if(src.terminal?, do: :departs, else: :arrives),
track_number: Content.Utilities.stop_track_number(predictions.stop_id),
platform: src.platform
}
is_integer(predictions.minutes) ->
%NextTrainCountdown{
destination: predictions.destination,
route_id: predictions.route_id,
minutes: predictions.minutes,
verb: if(src.terminal?, do: :departs, else: :arrives),
track_number: Content.Utilities.stop_track_number(predictions.stop_id),
platform: src.platform
}
end
end
end
| 34.893617 | 94 | 0.650305 |
739e6236941e55fa70274d22a56729923288a620 | 73 | exs | Elixir | lib/elixir/test/elixir/fixtures/configs/env.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/elixir/test/elixir/fixtures/configs/env.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/elixir/test/elixir/fixtures/configs/env.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | import Config
config :my_app, env: config_env(), target: config_target()
| 24.333333 | 58 | 0.767123 |