hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7e88280c6542738637ed43698b99b2ecfd8104f | 1,473 | ex | Elixir | lib/tauth_proxy/proxy_server.ex | tellerhq/tauth_proxy | 761ce3dea4eb895f9ed67bbc479dfc4adabe7286 | [
"MIT"
] | 10 | 2017-12-14T10:40:40.000Z | 2018-06-18T09:46:13.000Z | lib/tauth_proxy/proxy_server.ex | tellerhq/tauth_proxy | 761ce3dea4eb895f9ed67bbc479dfc4adabe7286 | [
"MIT"
] | null | null | null | lib/tauth_proxy/proxy_server.ex | tellerhq/tauth_proxy | 761ce3dea4eb895f9ed67bbc479dfc4adabe7286 | [
"MIT"
] | null | null | null | defmodule TAuthProxy.ProxyServer do
use Plug.Router
import Plug.Conn
plug Plug.Logger, log: :debug
plug :match
plug :dispatch
@www "https://teller.io"
@api "https://api.teller.io"
match "/api/*path" do
dir = Application.app_dir(:tauth_proxy)
{:ok, client} =
:hackney.request(method(conn), uri(conn), conn.req_headers, :stream, [
ssl_options: [
certfile: :filename.absname("priv/certificate.pem", dir),
keyfile: :filename.absname("priv/private_key.pem", dir)
]
])
conn
|> write(client)
|> read(client)
end
defp write(conn, client) do
case read_body(conn, []) do
{:ok, body, conn} ->
:hackney.send_body(client, body)
conn
{:more, body, conn} ->
:hackney.send_body(client, body)
write(conn, client)
end
end
defp read(conn, client) do
{:ok, status, headers, client} = :hackney.start_response(client)
{:ok, body} = :hackney.body(client)
headers = List.keydelete(headers, "Transfer-Encoding", 0)
send_resp(%{conn | resp_headers: headers}, status, body)
end
defp method(%{method: "GET"}), do: :get
defp method(%{method: "POST"}), do: :post
defp method(%{method: "PUT"}), do: :put
defp method(%{method: "PATCH"}), do: :patch
defp method(%{method: "DELETE"}), do: :delete
defp uri(conn) do
["api" | path_info] = conn.path_info
@api <> "/" <> Enum.join(path_info, "/")
end
end
| 24.55 | 76 | 0.600136 |
f7e8bb7aa25864b77058163b3493c5e9d621b597 | 2,200 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/file_list.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/file_list.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/file_list.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V34.Model.FileList do
@moduledoc """
List of files for a report.
## Attributes
* `etag` (*type:* `String.t`, *default:* `nil`) - Etag of this resource.
* `items` (*type:* `list(GoogleApi.DFAReporting.V34.Model.File.t)`, *default:* `nil`) - The files returned in this response.
* `kind` (*type:* `String.t`, *default:* `dfareporting#fileList`) - Identifies what kind of resource this is. Value: the fixed string "dfareporting#fileList".
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Continuation token used to page through files. To retrieve the next page of results, set the next request's "pageToken" to the value of this field. The page token is only valid for a limited amount of time and should not be persisted.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:etag => String.t(),
:items => list(GoogleApi.DFAReporting.V34.Model.File.t()),
:kind => String.t(),
:nextPageToken => String.t()
}
field(:etag)
field(:items, as: GoogleApi.DFAReporting.V34.Model.File, type: :list)
field(:kind)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V34.Model.FileList do
def decode(value, options) do
GoogleApi.DFAReporting.V34.Model.FileList.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V34.Model.FileList do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.285714 | 297 | 0.711818 |
f7e8bfaed52b5068dd3ae50d41d2bd396e54fb67 | 270 | exs | Elixir | config/test.exs | Simrayz/foresight | 01a35dceee369ad068102669f366cc6e61169626 | [
"MIT"
] | null | null | null | config/test.exs | Simrayz/foresight | 01a35dceee369ad068102669f366cc6e61169626 | [
"MIT"
] | 2 | 2022-02-17T21:25:57.000Z | 2022-02-17T21:33:15.000Z | config/test.exs | Simrayz/foresight | 01a35dceee369ad068102669f366cc6e61169626 | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :foresight, ForesightWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 24.545455 | 56 | 0.740741 |
f7e8c266ddaeb7f847d1089a5ff75d3e5c21e6c0 | 457 | ex | Elixir | lib/draw_something/dictionary/crawler/supervisor.ex | ShockN745/drawcheat | d02fa05c1499e3c91d7dd85342c0b149b203c240 | [
"MIT"
] | null | null | null | lib/draw_something/dictionary/crawler/supervisor.ex | ShockN745/drawcheat | d02fa05c1499e3c91d7dd85342c0b149b203c240 | [
"MIT"
] | null | null | null | lib/draw_something/dictionary/crawler/supervisor.ex | ShockN745/drawcheat | d02fa05c1499e3c91d7dd85342c0b149b203c240 | [
"MIT"
] | null | null | null | defmodule DrawSomething.Dictionary.Crawler.Supervisor do
use Supervisor
import Supervisor.Spec
def start_link do
Supervisor.start_link __MODULE__, :ok, name: __MODULE__
end
def start_crawler(start_idx, end_idx) do
Supervisor.start_child __MODULE__, [start_idx, end_idx]
end
def init(:ok) do
children = [
worker(DrawSomething.Dictionary.Crawler, [])
]
supervise(children, strategy: :simple_one_for_one)
end
end
| 20.772727 | 59 | 0.73523 |
f7e8d03df943ebb21e619fd7b811b96ffb679df3 | 2,301 | exs | Elixir | test/elxpro_blog/comments_test.exs | AkioCode/elxpro-blog | 236984915851b91058e091414deb70c5e8fed72a | [
"MIT"
] | null | null | null | test/elxpro_blog/comments_test.exs | AkioCode/elxpro-blog | 236984915851b91058e091414deb70c5e8fed72a | [
"MIT"
] | 4 | 2021-08-11T03:19:33.000Z | 2021-09-26T01:29:58.000Z | test/elxpro_blog/comments_test.exs | AkioCode/elxpro-blog | 236984915851b91058e091414deb70c5e8fed72a | [
"MIT"
] | null | null | null | defmodule ElxproBlog.CommentsTest do
use ElxproBlog.DataCase
alias ElxproBlog.Comments.Core.CommentRepo
alias ElxproBlog.Factory
describe "comments" do
alias ElxproBlog.Comments.Data.Comment
@valid_attrs %{content: "some content"}
@update_attrs %{content: "some updated content"}
@invalid_attrs %{content: nil}
test "list_comments/0 returns all comments" do
comment = Factory.insert(:comment)
comment_inserted = Repo.get!(Comment, comment.id)
assert CommentRepo.list_comments() == [comment_inserted]
end
test "get_comment!/1 returns the comment with given id" do
comment = Factory.insert(:comment)
comment_inserted = Repo.get!(Comment, comment.id) |> Repo.preload(:post)
assert comment == comment_inserted
end
test "create_comment/1 with valid data creates a comment" do
post = Factory.insert(:post)
assert {:ok, %Comment{} = comment} = CommentRepo.create_comment(post.id, @valid_attrs)
assert comment.content == "some content"
end
test "create_comment/1 with invalid data returns error changeset" do
post = Factory.insert(:post)
assert {:error, %Ecto.Changeset{}} = CommentRepo.create_comment(post.id, @invalid_attrs)
end
test "update_comment/2 with valid data updates the comment" do
comment = Factory.insert(:comment)
assert {:ok, %Comment{} = comment} = CommentRepo.update_comment(comment, @update_attrs)
assert comment.content == "some updated content"
end
test "update_comment/2 with invalid data returns error changeset" do
comment = Factory.insert(:comment)
assert {:error, %Ecto.Changeset{}} = CommentRepo.update_comment(comment, @invalid_attrs)
comment_inserted = Repo.get!(Comment, comment.id) |> Repo.preload(:post)
assert comment == comment_inserted
end
test "delete_comment/1 deletes the comment" do
comment = Factory.insert(:comment)
assert {:ok, %Comment{}} = CommentRepo.delete_comment(comment)
assert_raise Ecto.NoResultsError, fn -> CommentRepo.get_comment!(comment.id) end
end
test "change_comment/1 returns a comment changeset" do
comment = Factory.insert(:comment)
assert %Ecto.Changeset{} = CommentRepo.change_comment(comment)
end
end
end
| 37.112903 | 94 | 0.704476 |
f7e8f221c7f1c61148b4ae50fabff9ca81686816 | 63 | ex | Elixir | lib/littlechat/connected_user.ex | BenBera/littleChatWebrtc | 91850323d0e60c4049a84ff8985b09856b356016 | [
"MIT"
] | 166 | 2020-07-15T14:47:19.000Z | 2022-03-25T03:57:35.000Z | lib/littlechat/connected_user.ex | BenBera/littleChatWebrtc | 91850323d0e60c4049a84ff8985b09856b356016 | [
"MIT"
] | 12 | 2020-07-01T23:32:47.000Z | 2021-03-18T21:21:28.000Z | lib/littlechat/connected_user.ex | BenBera/littleChatWebrtc | 91850323d0e60c4049a84ff8985b09856b356016 | [
"MIT"
] | 21 | 2020-07-15T14:59:39.000Z | 2022-03-20T21:05:16.000Z | defmodule Littlechat.ConnectedUser do
defstruct uuid: ""
end
| 15.75 | 37 | 0.793651 |
f7e8f514161a7d93b9ef30a45fd277d0689751e6 | 1,381 | ex | Elixir | lib/musiccast/upnp/plug/event_dispatcher.ex | almightycouch/musiccast | eba8107d4e0829f988693625d59d72090133a78a | [
"MIT"
] | 6 | 2017-03-18T21:04:52.000Z | 2020-09-27T00:43:02.000Z | lib/musiccast/upnp/plug/event_dispatcher.ex | almightycouch/musiccast | eba8107d4e0829f988693625d59d72090133a78a | [
"MIT"
] | null | null | null | lib/musiccast/upnp/plug/event_dispatcher.ex | almightycouch/musiccast | eba8107d4e0829f988693625d59d72090133a78a | [
"MIT"
] | 1 | 2021-01-11T17:23:22.000Z | 2021-01-11T17:23:22.000Z | defmodule MusicCast.UPnP.Plug.EventDispatcher do
@moduledoc """
A `Plug` for receiving and dispatching UPnP notification events.
In order to subscribe to UPnP events, the `:upnp_callback_url` config must be configured correctly:
config :musiccast,
upnp_callback_url: "http://192.168.0.42:4000/upnp/event"
## Example
scope "/upnp", MusicCast.UPnP.Plug do
forward "/event", EventDispatcher, service: MusicCast.UPnP.AVTransport
end
"""
alias MusicCast.UPnP.Service
import Plug.Conn
@behaviour Plug
@spec init(Plug.opts) :: Plug.opts
def init(opts), do: opts
@spec call(Plug.Conn.t, Plug.opts) :: Plug.Conn.t
def call(conn, opts) do
service = Keyword.fetch!(opts, :service)
[session_id] = get_req_header(conn, "sid")
{:ok, body, conn} = read_body(conn)
if dispatch_event(session_id, Service.cast_event(service, body)),
do: send_resp(conn, 200, ""),
else: send_resp(conn, 410, "")
end
#
# Helpers
#
defp dispatch_event(session_id, event) do
devices = MusicCast.which_devices(:upnp_session_id)
if pid = Enum.find_value(devices, &match_device(session_id, &1)) do
send(pid, {:upnp_event, event})
end
end
defp match_device(session_id, {pid, device_session_id}) when session_id == device_session_id, do: pid
defp match_device(_session_id, _lookup), do: nil
end
| 27.62 | 103 | 0.689356 |
f7e8fc32acfcad85197a24d4fdb067db5d905f0c | 405 | ex | Elixir | display/lib/display/application.ex | Jwsonic/air | b4f6ed4b2be47aee78b40345044c96de1fc49813 | [
"MIT"
] | 1 | 2021-12-14T09:20:06.000Z | 2021-12-14T09:20:06.000Z | display/lib/display/application.ex | Jwsonic/air | b4f6ed4b2be47aee78b40345044c96de1fc49813 | [
"MIT"
] | null | null | null | display/lib/display/application.ex | Jwsonic/air | b4f6ed4b2be47aee78b40345044c96de1fc49813 | [
"MIT"
] | null | null | null | defmodule Display.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
children = [
{Scenic, viewports: [Application.get_env(:display, :viewport)]}
]
opts = [strategy: :one_for_one, name: Display.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 23.823529 | 69 | 0.711111 |
f7e92cc16c87834ae1d985ec4df0b98e4766f0ce | 5,054 | ex | Elixir | lib/ElixirWx/WxFunctions.ex | DwayneDibley/ElixirWxTests | 1446a92e6510f31eac2c4d85dfdcf81fdcfd73e4 | [
"Apache-2.0"
] | 3 | 2018-10-26T21:12:47.000Z | 2020-09-01T02:09:34.000Z | lib/ElixirWx/WxFunctions.ex | DwayneDibley/ElixirWxTests | 1446a92e6510f31eac2c4d85dfdcf81fdcfd73e4 | [
"Apache-2.0"
] | null | null | null | lib/ElixirWx/WxFunctions.ex | DwayneDibley/ElixirWxTests | 1446a92e6510f31eac2c4d85dfdcf81fdcfd73e4 | [
"Apache-2.0"
] | null | null | null | defmodule WxFunctions do
require Logger
# import WxUtilities
@moduledoc """
```
## General functions
newWindow({window_spec, evt_handler}, show)
newWindow(name, {window_spec, evt_handler}, show)
"""
# @doc """
# Create a new window where:
# windiw_spec: Ths file containing the window specification
# evt_handler: The file containing the code to be called on window _eventSource
# show: Bool, if true the window will be shown, if false, the window must be
# shown by calling showWindow separately
# """
# def newWindow({window_spec, evt_handler}, show) do
# case WxWindowObject.start_link({nil, {window_spec, evt_handler}}, show) do
# {:ok, window} ->
# {:ok, window}
#
# {:error, reason} ->
# Logger.error("Cannot create window> #{inspect(reason)}")
# :err
# end
# end
#
# @doc """
# Create a new named window where:
# name: An atom used to name the window
# windiw_spec: Ths file containing the window specification
# evt_handler: The file containing the code to be called on window _eventSource
# show: Bool, if true the window will be shown, if false, the window must be
# shown by calling showWindow separately
# """
# def newWindow(name, {window_spec, evt_handler}, show) do
# case WxWindowObject.start_link({name, {window_spec, evt_handler}}, show) do
# {:ok, window} ->
# {:ok, window}
#
# {:error, reason} ->
# Logger.error("Cannot create window> #{inspect(reason)}")
# :err
# end
# end
# @doc """
# Finction called to close and destroy the current window. This may be called from
# an event callback.
# """
# def closeWindow(windowName) do
# Logger.debug("closeWindow(#{inspect(windowName)})")
# {_, _, frame} = WinInfo.get_by_name(:__main_frame__)
#
# case frame do
# nil ->
# Logger.error("No __main_frame__!!")
#
# _ ->
# :wxEvtHandler.disconnect(frame)
# :wxWindow.destroy(frame)
# end
#
# send(self(), {WindowExit, windowName})
# end
#
# ------------------------------------------------------------------------------------
def setFrameIcon(frame, iconFile) do
# :wxFrame.setIcon(frame, iconFile)
icon = :wxIcon.new(iconFile)
:wxTopLevelWindow.setIcon(frame, icon)
end
# ============================================================================
# Event handling operations
# ============================================================================
@doc """
Event handling
If the event is registered with a callback function, it will be routed to that
function. A callback function must have arity 3 and accept the following
parameters:
callback(window, eventType, senderId, senderObj)
"""
def eventCallback({:wx, id, _eventSource, windowData, eventData}, event) do
Logger.debug("eventCallback!!: Event=#{inspect(event)}")
Logger.debug("eventCallback!!: id=#{inspect(id)}")
Logger.debug("eventCallback!!: eventData=#{inspect(eventData)}")
event =
case eventData do
{_, event, [], -1, 0} ->
event
{:wxCommand, event, [], 0, 0} ->
event
{:wxClose, event} ->
event
_ ->
Logger.error("Unknown event received!!: Data=#{inspect(eventData)}")
:unknown_event
end
{eventType, _idx, callBack} = WinInfo.get_by_name(event)
{senderName, _senderId, senderObj} = WinInfo.get_by_id(id)
try do
callBack.(windowData, eventType, senderName, senderObj)
rescue
e in RuntimeError -> Logger.error("Callback error: #{inspect(e)}")
end
end
# Called when the application wants to check for an event. It is only
# necessary to call this function if no event handler was specified in
# the event specification
def getEvent(timeout \\ 0) do
receive do
{:wx, senderId, senderObj, _winInfo, {_group, event, _, _, _}} ->
Logger.debug("Event Message: #{inspect(senderId)}, #{inspect(senderObj)}}")
Logger.debug(" Event: #{inspect(event)}")
{_eventType, _senderId, _callback} = WinInfo.get_by_id(senderId)
{:wx, senderId, senderObj, _winInfo, {group, event}} ->
Logger.debug("Event Message: #{inspect(senderId)}, #{inspect(senderObj)}}")
Logger.debug(" Event: #{inspect(event)}")
{_eventType, senderId, _callback} = WinInfo.get_by_id(senderId)
{senderId, event, group}
other ->
Logger.debug("Unhandled event Message: #{inspect(other)}")
other
after
timeout * 1000 ->
:timeout
end
end
# Object independent text interface
def getObjText(ctrlId, {info, _xref}) do
ctrl = Map.get(info, ctrlId)
case ctrl do
{_, _, :wxTextCtrl, _} -> to_string(:wxTextCtrl.getValue(ctrl))
end
end
def putObjText(ctrlId, text, {info, _xref}) do
ctrl = Map.get(info, ctrlId)
case ctrl do
{_, _, :wxStaticText, _} -> :wxStaticText.setLabel(ctrl, text)
end
end
end
| 31.006135 | 88 | 0.598338 |
f7e92d65e9d985f0ce4bbde2f138c7590d6918f8 | 1,928 | ex | Elixir | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_uiv1beta3_disable_processor_metadata.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_uiv1beta3_disable_processor_metadata.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_uiv1beta3_disable_processor_metadata.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiUiv1beta3DisableProcessorMetadata do
@moduledoc """
The long running operation metadata for disable processor method.
## Attributes
* `commonMetadata` (*type:* `GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiUiv1beta3CommonOperationMetadata.t`, *default:* `nil`) - The basic metadata of the long running operation.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:commonMetadata =>
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiUiv1beta3CommonOperationMetadata.t()
| nil
}
field(:commonMetadata,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiUiv1beta3CommonOperationMetadata
)
end
defimpl Poison.Decoder,
for: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiUiv1beta3DisableProcessorMetadata do
def decode(value, options) do
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiUiv1beta3DisableProcessorMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiUiv1beta3DisableProcessorMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.428571 | 190 | 0.767635 |
f7e994055d079352dec775d1c1970f66d284aa96 | 12,475 | exs | Elixir | test/lib/code_corps/github/sync/task/changeset_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 275 | 2015-06-23T00:20:51.000Z | 2021-08-19T16:17:37.000Z | test/lib/code_corps/github/sync/task/changeset_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 1,304 | 2015-06-26T02:11:54.000Z | 2019-12-12T21:08:00.000Z | test/lib/code_corps/github/sync/task/changeset_test.exs | fikape/code-corps-api | c21674b0b2a19fa26945c94268db8894420ca181 | [
"MIT"
] | 140 | 2016-01-01T18:19:47.000Z | 2020-11-22T06:24:47.000Z | defmodule CodeCorps.GitHub.Sync.Task.ChangesetTest do
@moduledoc false
use CodeCorps.DbAccessCase
alias CodeCorps.GitHub.Sync.Task
alias Ecto.Changeset
describe "create_changeset/4" do
test "assigns proper changes to the task" do
github_issue = insert(
:github_issue,
github_created_at: DateTime.utc_now |> Timex.shift(minutes: 1),
github_updated_at: DateTime.utc_now |> Timex.shift(hours: 1))
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task_list = insert(:task_list, project: project, inbox: true)
changeset =
github_issue |> Task.Changeset.create_changeset(github_repo, user)
assert changeset |> Changeset.get_change(:created_at) == github_issue.github_created_at
assert changeset |> Changeset.get_change(:markdown) == github_issue.body
assert changeset |> Changeset.get_change(:modified_at) == github_issue.github_updated_at
assert changeset |> Changeset.get_change(:title) == github_issue.title
assert changeset |> Changeset.get_field(:status) == github_issue.state
assert changeset |> Changeset.get_change(:created_from) == "github"
assert changeset |> Changeset.get_change(:modified_from) == "github"
assert changeset |> Changeset.get_change(:github_issue) |> Map.get(:data) == github_issue
assert changeset |> Changeset.get_change(:github_repo) |> Map.get(:data) == github_repo
assert changeset |> Changeset.get_change(:project_id) == github_repo.project_id
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
assert changeset |> Changeset.get_change(:user) |> Map.get(:data) == user
assert changeset |> Changeset.get_change(:position)
assert changeset |> Changeset.get_field(:archived) == false
expected_body =
github_issue.body
|> Earmark.as_html!(%Earmark.Options{code_class_prefix: "language-"})
assert Changeset.get_change(changeset, :body) == expected_body
assert changeset.valid?
end
test "assigns task to inbox list if github issue is open" do
github_issue = insert(:github_issue, state: "open")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task_list = insert(:task_list, project: project, inbox: true)
changeset =
github_issue |> Task.Changeset.create_changeset(github_repo, user)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "assigns task to pull request list if github issue is associated with pull request" do
github_pull_request = insert(:github_pull_request)
github_issue = insert(:github_issue, github_pull_request: github_pull_request, state: "open")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task_list = insert(:task_list, project: project, pull_requests: true)
changeset =
github_issue |> Task.Changeset.create_changeset(github_repo, user)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "assigns task to 'done' list if github issue is closed" do
github_issue = insert(:github_issue, state: "closed")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task_list = insert(:task_list, project: project, done: true)
changeset =
github_issue |> Task.Changeset.create_changeset(github_repo, user)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "assigns task to 'done' list if github issue is closed and associated to pull request" do
github_pull_request = insert(:github_pull_request)
github_issue = insert(:github_issue, github_pull_request: github_pull_request, state: "closed")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task_list = insert(:task_list, project: project, done: true)
changeset =
github_issue |> Task.Changeset.create_changeset(github_repo, user)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "archives task and removes from task list if issue is closed and unmodified for over a month" do
over_a_month_ago = Timex.now |> Timex.shift(days: -35)
github_pull_request = insert(:github_pull_request)
github_issue = insert(
:github_issue,
github_pull_request: github_pull_request,
state: "closed",
github_updated_at: over_a_month_ago)
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
insert(:task_list, project: project, done: true)
changeset =
github_issue |> Task.Changeset.create_changeset(github_repo, user)
assert changeset |> Changeset.get_field(:archived) == true
assert changeset |> Changeset.get_field(:task_list_id) == nil
end
test "does not fail and instead returns invalid changeset if no task list matched" do
github_issue = insert(
:github_issue,
github_created_at: DateTime.utc_now |> Timex.shift(minutes: 1),
github_updated_at: DateTime.utc_now |> Timex.shift(hours: 1))
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
changeset =
github_issue |> Task.Changeset.create_changeset(github_repo, user)
refute changeset.valid?
end
end
describe "update_changeset/3" do
test "assigns proper changes to the task" do
github_issue = insert(
:github_issue,
github_created_at: DateTime.utc_now |> Timex.shift(minutes: 1),
github_updated_at: DateTime.utc_now |> Timex.shift(hours: 1))
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task_list = insert(:task_list, project: project, inbox: true)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
assert changeset |> Changeset.get_change(:markdown) == github_issue.body
assert changeset |> Changeset.get_change(:modified_at) == github_issue.github_updated_at
assert changeset |> Changeset.get_change(:title) == github_issue.title
assert changeset |> Changeset.get_field(:status) == github_issue.state
refute changeset |> Changeset.get_change(:created_from)
assert changeset |> Changeset.get_change(:modified_from) == "github"
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
assert changeset |> Changeset.get_change(:position)
assert changeset |> Changeset.get_field(:archived) == false
expected_body =
github_issue.body
|> Earmark.as_html!(%Earmark.Options{code_class_prefix: "language-"})
assert Changeset.get_change(changeset, :body) == expected_body
assert changeset.valid?
end
test "validates that modified_at has not already happened" do
project = insert(:project)
github_issue = insert(:github_issue, github_updated_at: DateTime.utc_now |> Timex.shift(minutes: -1), state: "open")
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
insert(:task_list, project: project, inbox: true)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
refute changeset.valid?
assert changeset.errors[:modified_at] == {"cannot be before the last recorded time", []}
end
test "assigns task to inbox list if github issue is open" do
github_issue = insert(:github_issue, state: "open")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
task_list = insert(:task_list, project: project, inbox: true)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "assigns task to pull request list if github issue is associated with pull request" do
github_pull_request = insert(:github_pull_request)
github_issue = insert(:github_issue, github_pull_request: github_pull_request, state: "open")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
task_list = insert(:task_list, project: project, pull_requests: true)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "assigns task to 'done' list if github issue is closed" do
github_issue = insert(:github_issue, state: "closed")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
task_list = insert(:task_list, project: project, done: true)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "assigns task to 'done' list if github issue is closed and associated to pull request" do
github_pull_request = insert(:github_pull_request)
github_issue = insert(:github_issue, github_pull_request: github_pull_request, state: "closed")
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
task_list = insert(:task_list, project: project, done: true)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
assert changeset |> Changeset.get_change(:task_list_id) == task_list.id
end
test "archives task and removes from task list if issue is closed and unmodified for over a month" do
over_a_month_ago = Timex.now |> Timex.shift(days: -35)
github_pull_request = insert(:github_pull_request)
github_issue = insert(
:github_issue,
github_pull_request: github_pull_request,
state: "closed",
github_updated_at: over_a_month_ago)
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
insert(:task_list, project: project, done: true)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
assert changeset |> Changeset.get_field(:archived) == true
assert changeset |> Changeset.get_field(:task_list_id) == nil
end
test "does not fail and instead returns invalid changeset if no task list matched" do
github_issue = insert(
:github_issue,
github_created_at: DateTime.utc_now |> Timex.shift(minutes: 1),
github_updated_at: DateTime.utc_now |> Timex.shift(hours: 1))
project = insert(:project)
github_repo = insert(:github_repo, project: project)
user = insert(:user)
task = insert(:task, project: project, github_issue: github_issue, github_repo: github_repo, user: user, modified_at: DateTime.utc_now)
changeset =
task |> Task.Changeset.update_changeset(github_issue, github_repo)
refute changeset.valid?
end
end
end
| 43.926056 | 141 | 0.700521 |
f7e9f8cd8b3edbc62fbcb8a1b2148f04512a3c42 | 281 | ex | Elixir | lib/module_example/child_module1.ex | koladilip/elixir-examples | 49553fe39bbff6e35d1cb2c26fcb6d87fe4e28d3 | [
"MIT"
] | null | null | null | lib/module_example/child_module1.ex | koladilip/elixir-examples | 49553fe39bbff6e35d1cb2c26fcb6d87fe4e28d3 | [
"MIT"
] | null | null | null | lib/module_example/child_module1.ex | koladilip/elixir-examples | 49553fe39bbff6e35d1cb2c26fcb6d87fe4e28d3 | [
"MIT"
] | null | null | null | defmodule ModuleExample.ChildModule1 do
@moduledoc """
Documentation for `ModuleExample.ChildModule1`.
"""
@doc """
Name of the module
## Examples
iex> ModuleExample.ChildModule1.name()
ModuleExample.ChildModule1
"""
def name(), do: __MODULE__
end | 15.611111 | 49 | 0.679715 |
f7ea2e113d0a51c2ebd002c317d6d62502bded86 | 1,973 | exs | Elixir | apps/roadtrip_web/mix.exs | myrrlyn/roadtrip | 424d85dd6a713e99c27f97f052cff88dbbeb6423 | [
"MIT"
] | 2 | 2022-01-19T17:59:21.000Z | 2022-01-19T18:05:05.000Z | apps/roadtrip_web/mix.exs | myrrlyn/roadtrip | 424d85dd6a713e99c27f97f052cff88dbbeb6423 | [
"MIT"
] | null | null | null | apps/roadtrip_web/mix.exs | myrrlyn/roadtrip | 424d85dd6a713e99c27f97f052cff88dbbeb6423 | [
"MIT"
] | null | null | null | defmodule RoadtripWeb.MixProject do
use Mix.Project
def project do
[
app: :roadtrip_web,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.12",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {RoadtripWeb.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.6.0-rc.0", override: true},
{:phoenix_ecto, "~> 4.4"},
{:phoenix_html, "~> 3.0"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_view, "~> 0.16.0"},
{:floki, ">= 0.30.0", only: :test},
{:phoenix_live_dashboard, "~> 0.5"},
{:esbuild, "~> 0.2", runtime: Mix.env() == :dev},
{:telemetry_metrics, "~> 0.6"},
{:telemetry_poller, "~> 1.0"},
{:gettext, "~> 0.18"},
{:roadtrip, in_umbrella: true},
{:jason, "~> 1.2"},
{:plug_cowboy, "~> 2.5"},
{:csv, "~> 2.4"},
{:tzdata, "~> 1.1"},
{:hackney, "~> 1.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"],
"assets.deploy": ["esbuild default --minify", "phx.digest"]
]
end
end
| 27.788732 | 68 | 0.555499 |
f7ea31ad6d6f0c294271b88f7e32664f1cfd1a0a | 2,014 | exs | Elixir | config/dev.exs | ashkan18/onsor | 73b75b24f638f1a425de8ebf4454df971040e9f2 | [
"MIT"
] | null | null | null | config/dev.exs | ashkan18/onsor | 73b75b24f638f1a425de8ebf4454df971040e9f2 | [
"MIT"
] | 4 | 2021-03-09T00:47:04.000Z | 2022-02-10T15:15:28.000Z | config/dev.exs | ashkan18/onsor | 73b75b24f638f1a425de8ebf4454df971040e9f2 | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :onsor, OnsorWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :onsor, OnsorWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/onsor_web/views/.*(ex)$},
~r{lib/onsor_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
# Configure your database
config :onsor, Onsor.Repo,
database: "onsor_dev",
hostname: "localhost",
pool_size: 10
| 27.216216 | 68 | 0.684211 |
f7ea86ca3c6380490096fa78a330e18f9fee17ce | 2,084 | ex | Elixir | clients/testing/lib/google_api/testing/v1/model/robo_starting_intent.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/testing/lib/google_api/testing/v1/model/robo_starting_intent.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/testing/lib/google_api/testing/v1/model/robo_starting_intent.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Testing.V1.Model.RoboStartingIntent do
@moduledoc """
Message for specifying the start activities to crawl.
## Attributes
* `launcherActivity` (*type:* `GoogleApi.Testing.V1.Model.LauncherActivityIntent.t`, *default:* `nil`) - An intent that starts the main launcher activity.
* `startActivity` (*type:* `GoogleApi.Testing.V1.Model.StartActivityIntent.t`, *default:* `nil`) - An intent that starts an activity with specific details.
* `timeout` (*type:* `String.t`, *default:* `nil`) - Timeout in seconds for each intent.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:launcherActivity => GoogleApi.Testing.V1.Model.LauncherActivityIntent.t() | nil,
:startActivity => GoogleApi.Testing.V1.Model.StartActivityIntent.t() | nil,
:timeout => String.t() | nil
}
field(:launcherActivity, as: GoogleApi.Testing.V1.Model.LauncherActivityIntent)
field(:startActivity, as: GoogleApi.Testing.V1.Model.StartActivityIntent)
field(:timeout)
end
defimpl Poison.Decoder, for: GoogleApi.Testing.V1.Model.RoboStartingIntent do
def decode(value, options) do
GoogleApi.Testing.V1.Model.RoboStartingIntent.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Testing.V1.Model.RoboStartingIntent do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.320755 | 159 | 0.739443 |
f7eaa1847c1c72f36d7b6af25f47d22a96c79759 | 2,424 | ex | Elixir | apps/ewallet/lib/ewallet/gates/transfer_gate.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 2 | 2019-07-13T05:49:03.000Z | 2021-08-19T23:58:23.000Z | apps/ewallet/lib/ewallet/gates/transfer_gate.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/lib/ewallet/gates/transfer_gate.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 3 | 2018-05-08T17:15:42.000Z | 2021-11-10T04:08:33.000Z | defmodule EWallet.TransferGate do
@moduledoc """
Handles the logic for a transfer of value between two addresses.
"""
alias EWallet.TransferFormatter
alias LocalLedger.Entry
alias EWalletDB.Transfer
@doc """
Gets or inserts a transfer using the given idempotency token and other given attributes.
## Examples
res = Transactions.Transfer.get_or_insert(%{
idempotency_token: "84bafebf-9776-4cb0-a7f7-8b1e5c7ec830",
from: "c4f829d0-fe85-4b4c-a326-0c46f26b47c5",
to: "f084d20b-6aa7-4231-803f-a0d8d938f939",
minted_token_id: "f7ef021b-95bf-45c8-990f-743ca99d742a",
amount: 10,
metadata: %{},
encrypted_metadata: %{},
payload: %{}
})
case res do
{:ok, transfer} ->
# Everything went well, do something.
{:error, changeset} ->
# Something went wrong with the Transfer insert.
end
"""
def get_or_insert(%{
idempotency_token: _,
from: _,
to: _,
minted_token_id: _,
amount: _,
payload: _
} = attrs) do
attrs
|> Map.put(:type, Transfer.internal)
|> Transfer.get_or_insert()
end
@doc """
Process a transfer and sends the transaction to the ledger(s).
## Examples
res = Transactions.Transfer.process(transfer)
case res do
{:ok, ledger_response} ->
# Everything went well, do something.
{:error, code, description} ->
# Something went wrong with the transfer processing.
end
"""
def process(transfer) do
transfer
|> TransferFormatter.format()
|> Entry.insert(%{genesis: false})
|> update_transfer(transfer)
end
@doc """
Process a genesis transfer and sends the transaction to the ledger(s).
## Examples
res = Transactions.Transfer.genesis(transfer)
case res do
{:ok, ledger_response} ->
# Everything went well, do something.
{:error, code, description} ->
# Something went wrong with the transfer processing.
end
"""
def genesis(transfer) do
transfer
|> TransferFormatter.format()
|> Entry.insert(%{genesis: true})
|> update_transfer(transfer)
end
defp update_transfer({:ok, entry}, transfer) do
Transfer.confirm(transfer, %{
entry_id: entry.id
})
end
defp update_transfer({:error, code, description}, transfer) do
Transfer.fail(transfer, %{
code: code,
description: description
})
end
end
| 23.764706 | 90 | 0.643152 |
f7eaad890ed9d0d227ec5fe0c688bb4c12180001 | 525 | exs | Elixir | priv/repo/migrations/20211007062947_add_missing_uniq_index_for_aritlce_upvotes.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | priv/repo/migrations/20211007062947_add_missing_uniq_index_for_aritlce_upvotes.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | priv/repo/migrations/20211007062947_add_missing_uniq_index_for_aritlce_upvotes.exs | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule GroupherServer.Repo.Migrations.AddMissingUniqIndexForAritlceUpvotes do
use Ecto.Migration
def change do
create(unique_index(:article_upvotes, [:user_id, :meetup_id]))
create(unique_index(:article_upvotes, [:user_id, :drink_id]))
create(unique_index(:article_upvotes, [:user_id, :blog_id]))
create(unique_index(:article_upvotes, [:user_id, :works_id]))
create(unique_index(:article_upvotes, [:user_id, :radar_id]))
create(unique_index(:article_upvotes, [:user_id, :guide_id]))
end
end
| 40.384615 | 80 | 0.754286 |
f7eac964a783e0dcb5a47d885acf082d17a7cb52 | 561 | ex | Elixir | lib/rethinkdb.ex | point/rethinkdb-elixir | 0549902e5060e75b0942716befc16db984a14b76 | [
"MIT"
] | 530 | 2015-05-12T21:06:49.000Z | 2021-07-29T01:06:43.000Z | lib/rethinkdb.ex | point/rethinkdb-elixir | 0549902e5060e75b0942716befc16db984a14b76 | [
"MIT"
] | 122 | 2015-05-12T03:12:50.000Z | 2019-11-11T11:21:10.000Z | lib/rethinkdb.ex | point/rethinkdb-elixir | 0549902e5060e75b0942716befc16db984a14b76 | [
"MIT"
] | 79 | 2015-05-14T17:14:52.000Z | 2022-02-14T07:00:00.000Z | defmodule RethinkDB do
@moduledoc """
Some convenience functions for interacting with RethinkDB.
"""
@doc """
See `RethinkDB.Connection.run/2`
"""
defdelegate run(query, pid), to: RethinkDB.Connection
@doc """
See `RethinkDB.Connection.run/3`
"""
defdelegate run(query, pid, opts), to: RethinkDB.Connection
@doc """
See `RethinkDB.Connection.next/1`
"""
defdelegate next(collection), to: RethinkDB.Connection
@doc """
See `RethinkDB.Connection.close/1`
"""
defdelegate close(collection), to: RethinkDB.Connection
end
| 20.035714 | 61 | 0.686275 |
f7eae1b1c3774fd0ff53348dbfe72998dd696ea5 | 307 | exs | Elixir | test/fl_hook/handshake_error_test.exs | tlux/fl_hook_client | 0cd74c1d8873eea8aa654710efe7aab97a1eec5c | [
"MIT"
] | 1 | 2022-01-26T21:06:26.000Z | 2022-01-26T21:06:26.000Z | test/fl_hook/handshake_error_test.exs | tlux/fl_hook_client | 0cd74c1d8873eea8aa654710efe7aab97a1eec5c | [
"MIT"
] | null | null | null | test/fl_hook/handshake_error_test.exs | tlux/fl_hook_client | 0cd74c1d8873eea8aa654710efe7aab97a1eec5c | [
"MIT"
] | null | null | null | defmodule FLHook.HandshakeErrorTest do
use ExUnit.Case, async: true
alias FLHook.HandshakeError
describe "message/1" do
test "get message" do
assert Exception.message(%HandshakeError{actual_message: "Lorem Ipsum"}) ==
"Socket is not a valid FLHook socket"
end
end
end
| 23.615385 | 81 | 0.697068 |
f7eaeffcb8f907f649d39b99b67afc56e9f77ac2 | 15,679 | ex | Elixir | lib/iex/lib/iex.ex | diogovk/elixir | 7b8213affaad38b50afaa3dfc3a43717f35ba4e7 | [
"Apache-2.0"
] | null | null | null | lib/iex/lib/iex.ex | diogovk/elixir | 7b8213affaad38b50afaa3dfc3a43717f35ba4e7 | [
"Apache-2.0"
] | null | null | null | lib/iex/lib/iex.ex | diogovk/elixir | 7b8213affaad38b50afaa3dfc3a43717f35ba4e7 | [
"Apache-2.0"
] | null | null | null | defmodule IEx do
@moduledoc ~S"""
Elixir's interactive shell.
This module is the main entry point for Interactive Elixir and
in this documentation we will talk a bit about how IEx works.
Notice that some of the functionalities described here will not be available
depending on your terminal. In particular, if you get a message
saying that the smart terminal could not be run, some of the
features described here won't work.
## Helpers
IEx provides a bunch of helpers. They can be accessed by typing
`h()` into the shell or as a documentation for the `IEx.Helpers` module.
## Autocomplete
To discover all available functions for a module, type the module name
followed by a dot, then press tab to trigger autocomplete. For example:
Enum.
Such function may not be available on some Windows shells. You may need
to pass the `--werl` flag when starting iex, as in `iex --werl` for it
to work. `--werl` may be permanently enabled by setting the `IEX_WITH_WERL`
environment variable.
## The Break command
Inside IEx, hitting `Ctrl+C` will open up the `BREAK` menu. In this
menu you can quit the shell, see process and ets tables information
and much more.
## The User Switch command
Besides the break command, one can type `Ctrl+G` to get to the
user switch command menu. When reached, you can type `h` to
get more information.
In this menu, developers are able to start new shells and
alternate between them. Let's give it a try:
User switch command
--> s 'Elixir.IEx'
--> c
The command above will start a new shell and connect to it.
Create a new variable called `hello` and assign some value to it:
hello = :world
Now, let's roll back to the first shell:
User switch command
--> c 1
Now, try to access the `hello` variable again:
hello
** (UndefinedFunctionError) undefined function hello/0
The command above fails because we have switched shells.
Since shells are isolated from each other, you can't access the
variables defined in one shell from the other one.
The user switch command menu also allows developers to connect to remote
shells using the `r` command. A topic which we will discuss next.
## Remote shells
IEx allows you to connect to another node in two fashions.
First of all, we can only connect to a shell if we give names
both to the current shell and the shell we want to connect to.
Let's give it a try. First start a new shell:
$ iex --sname foo
iex(foo@HOST)1>
The string between the parentheses in the prompt is the name
of your node. We can retrieve it by calling the `node()`
function:
iex(foo@HOST)1> node()
:"foo@HOST"
iex(foo@HOST)2> Node.alive?()
true
For fun, let's define a simple module in this shell too:
iex(foo@HOST)3> defmodule Hello do
...(foo@HOST)3> def world, do: "it works!"
...(foo@HOST)3> end
Now, let's start another shell, giving it a name as well:
$ iex --sname bar
iex(bar@HOST)1>
If we try to dispatch to `Hello.world`, it won't be available
as it was defined only in the other shell:
iex(bar@HOST)1> Hello.world
** (UndefinedFunctionError) undefined function Hello.world/0
However, we can connect to the other shell remotely. Open up
the User Switch prompt (Ctrl+G) and type:
User switch command
--> r 'foo@HOST' 'Elixir.IEx'
--> c
Now we are connected into the remote node, as the prompt shows us,
and we can access the information and modules defined over there:
rem(foo@macbook)1> Hello.world
"it works"
In fact, connecting to remote shells is so common that we provide
a shortcut via the command line as well:
$ iex --sname baz --remsh foo@HOST
Where "remsh" means "remote shell". In general, Elixir supports:
* remsh from an Elixir node to an Elixir node
* remsh from a plain Erlang node to an Elixir node (through the ^G menu)
* remsh from an Elixir node to a plain Erlang node (and get an `erl` shell there)
Connecting an Elixir shell to a remote node without Elixir is
**not** supported.
## The .iex.exs file
When starting IEx, it will look for a local `.iex.exs` file (located in the current
working directory), then a global one (located at `~/.iex.exs`) and will load the
first one it finds (if any). The code in the chosen .iex.exs file will be
evaluated in the shell's context. So, for instance, any modules that are
loaded or variables that are bound in the .iex.exs file will be available in the
shell after it has booted.
Sample contents of a local .iex.exs file:
# source another ".iex.exs" file
import_file "~/.iex.exs"
# print something before the shell starts
IO.puts "hello world"
# bind a variable that'll be accessible in the shell
value = 13
Running the shell in the directory where the above .iex.exs file is located
results in:
$ iex
Erlang 17 [...]
hello world
Interactive Elixir - press Ctrl+C to exit (type h() ENTER for help)
iex(1)> value
13
It is possible to load another file by supplying the `--dot-iex`
option to iex. See `iex --help`.
## Configuring the shell
There are a number of customization options provided by the shell. Take a look
at the docs for the `IEx.configure/1` function by typing `h IEx.configure/1`.
Those options can be configured in your project configuration file or globally
by calling `IEx.configure/1` from your `~/.iex.exs` file like this:
# .iex.exs
IEx.configure(inspect: [limit: 3])
### now run the shell ###
$ iex
Erlang 17 (erts-5.10.1) [...]
Interactive Elixir - press Ctrl+C to exit (type h() ENTER for help)
iex(1)> [1, 2, 3, 4, 5]
[1, 2, 3, ...]
## Expressions in IEx
As an interactive shell, IEx evaluates expressions. This has some
interesting consequences that are worth discussing.
The first one is that the code is truly evaluated and not compiled.
This means that any benchmarking done in the shell is going to have
skewed results. So never run any profiling nor benchmarks in the shell.
Second, IEx allows you to break an expression into many lines,
since this is common in Elixir. For example:
iex(1)> "ab
...(1)> c"
"ab\nc"
In the example above, the shell will be expecting more input until it
finds the closing quote. Sometimes it is not obvious which character
the shell is expecting, and the user may find themselves trapped in
the state of incomplete expression with no ability to terminate it other
than by exiting the shell.
For such cases, there is a special break-trigger (`#iex:break`) that when
encountered on a line by itself will force the shell to break out of any
pending expression and return to its normal state:
iex(1)> ["ab
...(1)> c"
...(1)> "
...(1)> ]
...(1)> #iex:break
** (TokenMissingError) iex:1: incomplete expression
"""
@doc """
Configures IEx.
The supported options are: `:colors`, `:inspect`, `:width`,
`:history_size`, `:default_prompt` and `:alive_prompt`.
## Colors
A keyword list that encapsulates all color settings used by the
shell. See documentation for the `IO.ANSI` module for the list of
supported colors and attributes.
The value is a keyword list. List of supported keys:
* `:enabled` - boolean value that allows for switching the coloring on and off
* `:eval_result` - color for an expression's resulting value
* `:eval_info` - ... various informational messages
* `:eval_error` - ... error messages
* `:stack_app` - ... the app in stack traces
* `:stack_info` - ... the remaining info in stacktraces
* `:ls_directory` - ... for directory entries (ls helper)
* `:ls_device` - ... device entries (ls helper)
When printing documentation, IEx will convert the markdown
documentation to ANSI as well. Those can be configured via:
* `:doc_code` - the attributes for code blocks (cyan, bright)
* `:doc_inline_code` - inline code (cyan)
* `:doc_headings` - h1 and h2 (yellow, bright)
* `:doc_title` - the overall heading for the output (reverse, yellow, bright)
* `:doc_bold` - (bright)
* `:doc_underline` - (underline)
## Inspect
A keyword list containing inspect options used by the shell
when printing results of expression evaluation. Default to
pretty formatting with a limit of 50 entries.
See `Inspect.Opts` for the full list of options.
## Width
An integer indicating the number of columns to use in documentation
output. Default is 80 columns or result of `:io.columns`, whichever
is smaller. The configured value will be used unless it is too large,
which in that case `:io.columns` is used. This way you can configure
IEx to be your largest screen size and it should always take up the
full width of your terminal screen.
## History size
Number of expressions and their results to keep in the history.
The value is an integer. When it is negative, the history is unlimited.
## Prompt
This is an option determining the prompt displayed to the user
when awaiting input.
The value is a keyword list. Two prompt types:
* `:default_prompt` - used when `Node.alive?` returns `false`
* `:alive_prompt` - used when `Node.alive?` returns `true`
The following values in the prompt string will be replaced appropriately:
* `%counter` - the index of the history
* `%prefix` - a prefix given by `IEx.Server`
* `%node` - the name of the local node
"""
def configure(options) do
IEx.Config.configure(options)
end
@doc """
Returns IEx configuration.
"""
def configuration do
IEx.Config.configuration()
end
@doc """
Registers a function to be invoked after the IEx process is spawned.
"""
def after_spawn(fun) when is_function(fun) do
IEx.Config.after_spawn(fun)
end
@doc """
Returns registered `after_spawn` callbacks.
"""
def after_spawn do
IEx.Config.after_spawn()
end
@doc """
Returns `true` if IEx was started.
"""
def started? do
IEx.Config.started?()
end
@doc """
Returns `string` escaped using the specified `color`.
ANSI escapes in `string` are not processed in any way.
"""
def color(color, string) do
case IEx.Config.color(color) do
nil ->
string
ansi ->
IO.iodata_to_binary([IO.ANSI.format_fragment(ansi, true), string | IO.ANSI.reset])
end
end
@doc """
Gets the IEx width for printing.
Used by helpers and it has a maximum cap of 80 chars.
"""
def width do
IEx.Config.width()
end
@doc """
Gets the options used for inspecting.
"""
def inspect_opts do
IEx.Config.inspect_opts()
end
@doc """
Pries into the process environment.
This is useful for debugging a particular chunk of code
and inspect the state of a particular process. The process
is temporarily changed to trap exits (i.e. the process flag
`:trap_exit` is set to `true`) and has the `group_leader` changed
to support ANSI escape codes. Those values are reverted by
calling `respawn`, which starts a new IEx shell, freeing up
the pried one.
When a process is pried, all code runs inside IEx and, as
such, it is evaluated and cannot access private functions
of the module being pried. Module functions still need to be
accessed via `Mod.fun(args)`.
## Examples
Let's suppose you want to investigate what is happening
with some particular function. By invoking `IEx.pry/1` from
the function, IEx will allow you to access its binding
(variables), verify its lexical information and access
the process information. Let's see an example:
import Enum, only: [map: 2]
require IEx
defmodule Adder do
def add(a, b) do
c = a + b
IEx.pry
end
end
When invoking `Adder.add(1, 2)`, you will receive a message in
your shell to pry the given environment. By allowing it,
the shell will be reset and you gain access to all variables
and the lexical scope from above:
pry(1)> map([a, b, c], &IO.inspect(&1))
1
2
3
Keep in mind that `IEx.pry/1` runs in the caller process,
blocking the caller during the evaluation cycle. The caller
process can be freed by calling `respawn`, which starts a
new IEx evaluation cycle, letting this one go:
pry(2)> respawn
true
Interactive Elixir - press Ctrl+C to exit (type h() ENTER for help)
Setting variables or importing modules in IEx does not
affect the caller the environment (hence it is called `pry`).
"""
defmacro pry(timeout \\ 5000) do
quote do
IEx.pry(binding, __ENV__, unquote(timeout))
end
end
@doc """
Callback for `IEx.pry/1`.
You can invoke this function directly when you are not able to invoke
`IEx.pry/1` as a macro. This function expects the binding (from
`Kernel.binding/0`), the environment (from `__ENV__`) and the timeout
(a sensible default is 5000).
"""
def pry(binding, env, timeout) do
opts = [binding: binding, dot_iex_path: "", env: env, prefix: "pry"]
meta = "#{inspect self} at #{Path.relative_to_cwd(env.file)}:#{env.line}"
desc =
if File.regular?(env.file) do
parse_file(env)
else
""
end
res = IEx.Server.take_over("Request to pry #{meta}#{desc}", opts, timeout)
# We cannot use colors because IEx may be off.
case res do
{:error, :no_iex} ->
extra =
case :os.type do
{:win32, _} -> " If you are Windows, you may need to start IEx with the --werl flag."
_ -> ""
end
IO.puts :stdio, "Cannot pry #{meta}. Is an IEx shell running?" <> extra
_ ->
:ok
end
res
end
defp parse_file(env) do
lines =
env.file
|> File.stream!
|> Enum.slice(max(env.line - 3, 0), 5)
Enum.intersperse(["\n\n" | lines], " ")
end
## Callbacks
# This is a callback invoked by Erlang shell utilities
# when someone press Ctrl+G and adds 's Elixir.IEx'.
@doc false
def start(opts \\ [], mfa \\ {IEx, :dont_display_result, []}) do
spawn fn ->
case :init.notify_when_started(self()) do
:started -> :ok
_ -> :init.wait_until_started()
end
:ok = start_iex()
:ok = set_expand_fun()
:ok = run_after_spawn()
IEx.Server.start(opts, mfa)
end
end
@doc false
def dont_display_result, do: :"do not show this result in output"
## Helpers
defp start_iex() do
{:ok, _} = Application.ensure_all_started(:iex)
:ok
end
defp set_expand_fun do
gl = Process.group_leader
glnode = node gl
expand_fun =
if glnode != node do
_ = ensure_module_exists glnode, IEx.Remsh
IEx.Remsh.expand node
else
&IEx.Autocomplete.expand(&1)
end
# expand_fun is not supported by a shell variant
# on Windows, so we do two io calls, not caring
# about the result of the expand_fun one.
_ = :io.setopts(gl, expand_fun: expand_fun)
:io.setopts(gl, binary: true, encoding: :unicode)
end
defp ensure_module_exists(node, mod) do
unless :rpc.call node, :code, :is_loaded, [mod] do
{m, b, f} = :code.get_object_code mod
{:module, _} = :rpc.call node, :code, :load_binary, [m, f, b]
end
end
defp run_after_spawn do
_ = for fun <- Enum.reverse(after_spawn), do: fun.()
:ok
end
end
| 29.864762 | 97 | 0.667581 |
f7eaf8ec24ae7950f3e0a2b3e9ed65999b127e18 | 11,291 | exs | Elixir | lib/mix/test/mix/rebar_test.exs | mertonium/elixir | 74e666156906974082f6b4d34dfbe6988d6465c0 | [
"Apache-2.0"
] | 1 | 2018-10-02T13:55:29.000Z | 2018-10-02T13:55:29.000Z | lib/mix/test/mix/rebar_test.exs | mertonium/elixir | 74e666156906974082f6b4d34dfbe6988d6465c0 | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/rebar_test.exs | mertonium/elixir | 74e666156906974082f6b4d34dfbe6988d6465c0 | [
"Apache-2.0"
] | 1 | 2021-09-30T01:21:02.000Z | 2021-09-30T01:21:02.000Z | Code.require_file("../test_helper.exs", __DIR__)
defmodule Mix.RebarTest do
use MixTest.Case
defmodule RebarAsDep do
def project do
[
app: :rebar_as_dep,
version: "0.1.0",
deps: [
{:rebar_dep, path: MixTest.Case.tmp_path("rebar_dep"), app: false}
]
]
end
end
defmodule RebarAsDepWithEnv do
def project do
[
app: :rebar_as_dep,
version: "0.1.0",
deps: [
{
:rebar_dep,
path: MixTest.Case.tmp_path("rebar_dep"),
app: false,
manager: :rebar,
system_env: [{"FILE_FROM_ENV", "rebar-test-rebar"}, {"CONTENTS_FROM_ENV", "rebar"}]
}
]
]
end
end
defmodule Rebar3AsDep do
def project do
[
app: :rebar_as_dep,
version: "0.1.0",
deps: [
{
:rebar_dep,
path: MixTest.Case.tmp_path("rebar_dep"),
app: false,
manager: :rebar3,
system_env: [{"FILE_FROM_ENV", "rebar-test-rebar3"}, {"CONTENTS_FROM_ENV", "rebar3"}]
}
]
]
end
end
defmodule RebarOverrideAsDep do
def project do
[
app: :rebar_as_dep,
version: "0.1.0",
deps: [
{
:rebar_override,
path: MixTest.Case.tmp_path("rebar_override"), app: false, manager: :rebar3
}
]
]
end
end
describe "load_config/1" do
test "loads rebar.config" do
path = MixTest.Case.fixture_path("rebar_dep")
config = Mix.Rebar.load_config(path)
assert config[:sub_dirs] == ['apps/*']
assert config[:SCRIPT] == 'rebar.config.script'
end
test "loads rebar.config.script on dependency directory" do
path = MixTest.Case.fixture_path("rebar_dep_script")
config = Mix.Rebar.load_config(path)
assert config[:dir] == {:ok, String.to_charlist(path)}
end
end
@git_rebar_charlist '../../test/fixtures/git_rebar'
@git_rebar_string "../../test/fixtures/git_rebar"
describe "deps/1" do
test "parses Rebar dependencies" do
config = [deps: [{:git_rebar, '~> 1.0'}]]
assert Mix.Rebar.deps(config) == [{:git_rebar, "~> 1.0"}]
config = [deps: [{:git_rebar, '~> 1.0', {:pkg, :rebar_fork}}]]
assert Mix.Rebar.deps(config) == [{:git_rebar, "~> 1.0", hex: :rebar_fork}]
config = [deps: [{:git_rebar, {:pkg, :rebar_fork}}]]
assert Mix.Rebar.deps(config) == [{:git_rebar, ">= 0.0.0", hex: :rebar_fork}]
config = [deps: [{:git_rebar, '0.1..*', {:git, @git_rebar_charlist, :master}}]]
assert Mix.Rebar.deps(config) ==
[{:git_rebar, ~r"0.1..*", [git: @git_rebar_string, ref: "master"]}]
config = [deps: [{:git_rebar, {:git, @git_rebar_charlist, :master}}]]
assert Mix.Rebar.deps(config) ==
[{:git_rebar, ">= 0.0.0", [git: @git_rebar_string, ref: "master"]}]
config = [deps: [{:git_rebar, '0.1..*', {:git, @git_rebar_charlist}, [:raw]}]]
assert Mix.Rebar.deps(config) ==
[{:git_rebar, ~r"0.1..*", [git: @git_rebar_string, compile: false]}]
config = [deps: [{:git_rebar, '', {:git, @git_rebar_charlist, {:ref, '64691eb'}}}]]
assert Mix.Rebar.deps(config) ==
[{:git_rebar, ~r"", [git: @git_rebar_string, ref: "64691eb"]}]
end
end
describe "apply_overrides/3" do
test "applies overrides" do
config = [deps: {:git_rebar, '~> 2.0'}]
overrides = [{:override, [deps: [{:git_rebar, '~> 1.0'}]]}]
assert Mix.Rebar.apply_overrides(:foo, config, overrides) ==
[deps: [{:git_rebar, '~> 1.0'}], overrides: overrides]
config = [deps: [{:git_rebar, '~> 2.0'}]]
overrides = [{:override, :bar, [deps: [{:git_rebar, '~> 1.0'}]]}]
assert Mix.Rebar.apply_overrides(:foo, config, overrides) ==
[deps: [{:git_rebar, '~> 2.0'}], overrides: overrides]
config = [deps: [{:git_rebar, '~> 2.0'}]]
overrides = [{:override, :foo, [deps: [{:git_rebar, '~> 1.0'}]]}]
assert Mix.Rebar.apply_overrides(:foo, config, overrides) ==
[deps: [{:git_rebar, '~> 1.0'}], overrides: overrides]
config = [deps: [{:git_rebar, '~> 1.0'}]]
overrides = [{:add, :foo, [deps: [{:git_rebar2, '~> 2.0'}]]}]
assert Mix.Rebar.apply_overrides(:foo, config, overrides) ==
[deps: [{:git_rebar2, '~> 2.0'}, {:git_rebar, '~> 1.0'}], overrides: overrides]
end
test "concatenates overrides" do
config = [deps: {:git_rebar, '~> 2.0'}, overrides: [{:add, :bar, []}]]
overrides = [{:override, [deps: [{:git_rebar, '~> 1.0'}]]}]
assert Mix.Rebar.apply_overrides(:foo, config, overrides) ==
[deps: [{:git_rebar, '~> 1.0'}], overrides: overrides ++ [{:add, :bar, []}]]
end
end
describe "dependency_config/1" do
test "converts Rebar config to dependency config" do
config = Mix.Rebar.load_config(fixture_path("rebar_dep"))
dep_config = Mix.Rebar.dependency_config(config)
assert config[:erl_opts] == [:warnings_as_errors]
assert dep_config[:erl_opts] == []
end
end
describe "recur/1" do
test "recurs over sub dirs" do
path = MixTest.Case.fixture_path("rebar_dep")
File.cd!(path, fn ->
config = Mix.Rebar.load_config(path)
Mix.Rebar.recur(config, fn config ->
if config[:sub_dirs] == ['from_apps_another'] do
Process.put(:inside_apps_another, true)
end
end)
end)
unless Process.get(:inside_apps_another) do
flunk("Expected inside_apps_another to return true")
end
end
end
describe "integration with Mix" do
test "inherits Rebar manager" do
Mix.Project.push(Rebar3AsDep)
deps = Mix.Dep.loaded([])
assert Enum.all?(deps, &(&1.manager == :rebar3))
end
test "parses Rebar dependencies from rebar.config" do
Mix.Project.push(RebarAsDep)
deps = Mix.Dep.loaded([])
assert Enum.find(deps, &(&1.app == :rebar_dep))
assert Enum.find(deps, fn %Mix.Dep{app: app, opts: opts} ->
if app == :git_rebar do
assert Enum.find(opts, &match?({:git, _}, &1))
assert Enum.find(opts, &match?({:ref, "master"}, &1))
true
end
end)
end
test "handles Rebar overrides" do
Mix.Project.push(RebarOverrideAsDep)
in_tmp("Rebar overrides", fn ->
Mix.Tasks.Deps.Get.run([])
assert Mix.Dep.loaded([]) |> Enum.map(& &1.app) ==
[:git_repo, :git_rebar, :rebar_override]
end)
after
purge([GitRepo.MixProject])
end
test "gets and compiles dependencies for Rebar" do
Mix.Project.push(RebarAsDep)
in_tmp("get and compile dependencies for Rebar", fn ->
Mix.Tasks.Deps.Get.run([])
assert_received {:mix_shell, :info, ["* Getting git_rebar" <> _]}
Mix.Tasks.Deps.Compile.run([])
assert_received {:mix_shell, :run, ["===> Compiling git_rebar\n"]}
assert_received {:mix_shell, :run, ["===> Compiling rebar_dep\n"]}
assert :git_rebar.any_function() == :ok
assert :rebar_dep.any_function() == :ok
load_paths =
Mix.Dep.loaded([])
|> Enum.map(&Mix.Dep.load_paths(&1))
|> Enum.concat()
assert File.exists?("_build/dev/lib/rebar_dep/ebin/rebar_dep.beam")
assert File.exists?("_build/dev/lib/git_rebar/ebin/git_rebar.beam")
# Assert we have no .mix/compile.lock as a .mix/compile.lock
# means we check for the Elixir version on every command.
refute File.exists?("_build/dev/lib/rebar_dep/.mix/compile.lock")
refute File.exists?("_build/dev/lib/git_rebar/.mix/compile.lock")
assert Enum.any?(load_paths, &String.ends_with?(&1, "git_rebar/ebin"))
assert Enum.any?(load_paths, &String.ends_with?(&1, "rebar_dep/ebin"))
end)
end
# We run only on Unix because Windows has a hard time
# removing the rebar executable after executed.
@tag [unix: true]
test "applies variables from :system_env option when compiling dependencies for Rebar" do
Mix.Project.push(RebarAsDepWithEnv)
in_tmp("applies variables from system_env for Rebar", fn ->
expected_file = Path.join(tmp_path("rebar_dep"), "rebar-test-rebar")
File.rm(expected_file)
Mix.Tasks.Deps.Get.run([])
Mix.Tasks.Deps.Compile.run([])
assert {:ok, "rebar"} = File.read(expected_file)
end)
end
test "gets and compiles dependencies for rebar3" do
Mix.Project.push(Rebar3AsDep)
in_tmp("get and compile dependencies for rebar3", fn ->
Mix.Tasks.Deps.Get.run([])
assert_received {:mix_shell, :info, ["* Getting git_rebar " <> _]}
Mix.Tasks.Deps.Compile.run([])
assert_received {:mix_shell, :run, ["===> Compiling git_rebar\n"]}
assert_received {:mix_shell, :run, ["===> Compiling rebar_dep\n"]}
assert :git_rebar.any_function() == :ok
assert :rebar_dep.any_function() == :ok
load_paths =
Mix.Dep.loaded([])
|> Enum.map(&Mix.Dep.load_paths(&1))
|> Enum.concat()
assert File.exists?("_build/dev/lib/rebar_dep/ebin/rebar_dep.beam")
assert File.exists?("_build/dev/lib/git_rebar/ebin/git_rebar.beam")
# Assert we have no .mix/compile.lock as a .mix/compile.lock
# means we check for the Elixir version on every command.
refute File.exists?("_build/dev/lib/rebar_dep/.mix/compile.lock")
refute File.exists?("_build/dev/lib/git_rebar/.mix/compile.lock")
assert Enum.any?(load_paths, &String.ends_with?(&1, "git_rebar/ebin"))
assert Enum.any?(load_paths, &String.ends_with?(&1, "rebar_dep/ebin"))
end)
end
# We run only on Unix because Windows has a hard time
# removing the rebar executable after executed.
@tag [unix: true]
test "applies variables from :system_env option when compiling dependencies for rebar3" do
Mix.Project.push(Rebar3AsDep)
in_tmp("applies variables from system_env for rebar3", fn ->
expected_file = Path.join(tmp_path("rebar_dep"), "rebar-test-rebar3")
File.rm(expected_file)
Mix.Tasks.Deps.Get.run([])
Mix.Tasks.Deps.Compile.run([])
assert {:ok, "rebar3"} = File.read(expected_file)
end)
end
test "gets and compiles dependencies for Rebar with Mix" do
Mix.Project.push(RebarAsDep)
in_tmp("get and compile dependencies for Rebar with Mix", fn ->
File.write!(MixTest.Case.tmp_path("rebar_dep/mix.exs"), """
defmodule RebarDep.MixProject do
use Mix.Project
def project do
[app: :rebar_dep,
version: "0.0.1"]
end
end
""")
Mix.Tasks.Deps.Compile.run([])
assert_received {:mix_shell, :info, ["==> rebar_dep"]}
assert_received {:mix_shell, :info, ["Generated rebar_dep app"]}
assert File.regular?("_build/dev/lib/rebar_dep/ebin/rebar_dep.app")
end)
after
File.rm(MixTest.Case.tmp_path("rebar_dep/mix.exs"))
end
end
end
| 33.01462 | 97 | 0.585422 |
f7eb55c2312792e268ab0cf9af3608a3b0d3714e | 563 | exs | Elixir | test/utils_test.exs | Phiriq/noisex | 274a7fb9317b05dcf303803664335affa1c92b35 | [
"MIT"
] | 2 | 2021-06-17T21:07:57.000Z | 2021-06-18T15:52:34.000Z | test/utils_test.exs | Phiriq/noisex | 274a7fb9317b05dcf303803664335affa1c92b35 | [
"MIT"
] | null | null | null | test/utils_test.exs | Phiriq/noisex | 274a7fb9317b05dcf303803664335affa1c92b35 | [
"MIT"
] | null | null | null | defmodule Isotope.UtilsTest do
use ExUnit.Case, async: true
doctest Isotope.Utils
import ExUnit.CaptureIO
alias Isotope.{Noise, Utils}
test "show_noisemap/1 outputs visualization to stdout" do
{:ok, noise} = Noise.new()
output =
fn ->
noise
|> Noise.noise_map({10, 10})
|> Utils.show_noisemap()
end
|> capture_io
assert String.contains?(output, "▒")
# output contains characters used in ansi codes
assert String.contains?(output, "[")
assert String.contains?(output, ";")
end
end
| 21.653846 | 59 | 0.637655 |
f7eb6302d08ae6931522b26a473dd7e04dcf8d90 | 1,054 | ex | Elixir | lib/mailjex/api/message.ex | dwarner/mailjex | d611f2e2d70b4e6567b22dd3ef19b792dcbf2d79 | [
"MIT"
] | 8 | 2017-10-10T15:51:33.000Z | 2021-08-18T01:06:49.000Z | lib/mailjex/api/message.ex | dwarner/mailjex | d611f2e2d70b4e6567b22dd3ef19b792dcbf2d79 | [
"MIT"
] | 5 | 2018-05-23T15:03:15.000Z | 2019-11-17T16:30:18.000Z | lib/mailjex/api/message.ex | dwarner/mailjex | d611f2e2d70b4e6567b22dd3ef19b792dcbf2d79 | [
"MIT"
] | 4 | 2018-11-16T15:45:22.000Z | 2019-11-16T16:19:48.000Z | defmodule Mailjex.Api.Message do
@moduledoc false
import Mailjex.Utils.Comms
def list(params \\ %{}) do
url_params = params |> URI.encode_query
request(:get, "/REST/message?#{url_params}")
|> decode_json
end
def history(params \\ %{}) do
url_params = params |> URI.encode_query
request(:get, "/REST/messagehistory?#{url_params}")
|> decode_json
end
def information(params \\ %{}) do
url_params = params |> URI.encode_query
request(:get, "/REST/messageinformation?#{url_params}")
|> decode_json
end
def sent_statistics(params \\ %{}) do
url_params = params |> URI.encode_query
request(:get, "/REST/messagesentstatistics?#{url_params}")
|> decode_json
end
def state(params \\ %{}) do
url_params = params |> URI.encode_query
request(:get, "/REST/messagestate?#{url_params}")
|> decode_json
end
def statistics(params \\ %{}) do
url_params = params |> URI.encode_query
request(:get, "/REST/messagestatistics?#{url_params}")
|> decode_json
end
end
| 22.425532 | 62 | 0.6537 |
f7eb865039b6018e21e6fb613011c315e4f2e9e3 | 603 | ex | Elixir | lib/realbook/dictionary.ex | ityonemo/realbook | cc506144492b9344b540181a63ba35b6f5943c9c | [
"MIT"
] | 11 | 2020-07-13T20:43:43.000Z | 2021-05-18T23:52:27.000Z | lib/realbook/dictionary.ex | ityonemo/realbook | cc506144492b9344b540181a63ba35b6f5943c9c | [
"MIT"
] | 50 | 2020-07-13T02:20:14.000Z | 2020-08-17T16:45:17.000Z | lib/realbook/dictionary.ex | ityonemo/realbook | cc506144492b9344b540181a63ba35b6f5943c9c | [
"MIT"
] | 1 | 2021-03-11T17:02:21.000Z | 2021-03-11T17:02:21.000Z | defmodule Realbook.Dictionary do
@moduledoc false
# private implementation of realbook setters and getters.
alias Realbook.Storage
@spec set(keyword) :: :ok
def set(keyword) do
Storage.update(:dictionary, &Keyword.merge(&1, keyword))
end
@spec get(atom, term) :: term
def get(key, default \\ nil) do
dictionary = Storage.props(:dictionary)
dictionary
|> Keyword.get(key, default)
|| raise KeyError, key: key,
term: dictionary
end
@spec keys() :: [atom]
def keys do
:dictionary
|> Storage.props
|> Keyword.keys
end
end
| 20.1 | 60 | 0.640133 |
f7eb9e9d4f68ad75428a00dca414260333d5fe68 | 1,065 | ex | Elixir | lib/visualixir_web/channels/nodes_channel.ex | markhu53/visualixir | 80ce8b75fe33476fe7d17110ffc0271f71bc1f91 | [
"MIT"
] | 1,311 | 2015-11-08T02:21:41.000Z | 2022-03-25T17:32:57.000Z | lib/visualixir_web/channels/nodes_channel.ex | markhu53/visualixir | 80ce8b75fe33476fe7d17110ffc0271f71bc1f91 | [
"MIT"
] | 30 | 2015-11-09T17:07:50.000Z | 2022-02-10T17:13:50.000Z | lib/visualixir_web/channels/nodes_channel.ex | markhu53/visualixir | 80ce8b75fe33476fe7d17110ffc0271f71bc1f91 | [
"MIT"
] | 58 | 2015-11-08T16:50:00.000Z | 2022-03-30T13:03:18.000Z | defmodule VisualixirWeb.NodesChannel do
use Visualixir.Web, :channel
require Logger
alias VisualixirWeb.Endpoint
alias VisualixirWeb.TraceChannel
alias Visualixir.Tracer
@channel "nodes"
def join(@channel, _auth_msg, socket) do
{:ok, nodes_msg(), socket}
end
def refresh do
Endpoint.broadcast!(@channel, "update", nodes_msg())
end
def handle_in("add", node, socket) do
ping_result = node |> String.to_atom |> Node.ping
Logger.debug("[Visualixir] Pinging node #{node} returned #{inspect ping_result}")
refresh()
{:noreply, socket}
end
def handle_in("visualize", node, socket) do
node = String.to_atom(node)
Tracer.start(node)
TraceChannel.announce_visualize(node)
{:noreply, socket}
end
def handle_in("cleanup", node, socket) do
node = String.to_atom(node)
Logger.debug("[Visualixir] Telling node #{node} to clean up")
Tracer.stop(node)
TraceChannel.announce_cleanup(node)
{:noreply, socket}
end
defp nodes_msg do
%{nodes: Node.list(:known)}
end
end
| 20.09434 | 85 | 0.686385 |
f7eb9fe29504bdc475e0d829fda58b58979013d1 | 901 | ex | Elixir | lib/rockelivery/orders/create.ex | LuizFerK/Rockelivery | 4742d73027bcaa97c2fc7969c8d032edf73a28fd | [
"MIT"
] | null | null | null | lib/rockelivery/orders/create.ex | LuizFerK/Rockelivery | 4742d73027bcaa97c2fc7969c8d032edf73a28fd | [
"MIT"
] | null | null | null | lib/rockelivery/orders/create.ex | LuizFerK/Rockelivery | 4742d73027bcaa97c2fc7969c8d032edf73a28fd | [
"MIT"
] | null | null | null | defmodule Rockelivery.Orders.Create do
import Ecto.Query
alias Rockelivery.{Error, Item, Order, Repo}
alias Rockelivery.Orders.ValidateAndMultiplyItems
def call(%{"items" => items_params} = params) do
items_ids = Enum.map(items_params, fn item -> item["id"] end)
query = from item in Item, where: item.id in ^items_ids
query
|> Repo.all()
|> ValidateAndMultiplyItems.call(items_ids, items_params)
|> handle_items(params)
end
defp handle_items({:error, result}, _params), do: {:error, Error.build(:bad_request, result)}
defp handle_items({:ok, items}, params) do
params
|> Map.put("items", items)
|> Order.changeset()
|> Repo.insert()
|> handle_insert()
end
defp handle_insert({:ok, %Order{} = order}), do: {:ok, Repo.preload(order, :user)}
defp handle_insert({:error, result}), do: {:error, Error.build(:bad_request, result)}
end
| 29.064516 | 95 | 0.672586 |
f7ebc084c8eaf426cc0d8f7b496ccd60e029884f | 3,118 | exs | Elixir | Primer/9_looping_and_recursion.exs | joelbandi/elixir-start-here | 65722377d455a4b4678658eee56713681c6f16ce | [
"MIT"
] | null | null | null | Primer/9_looping_and_recursion.exs | joelbandi/elixir-start-here | 65722377d455a4b4678658eee56713681c6f16ce | [
"MIT"
] | null | null | null | Primer/9_looping_and_recursion.exs | joelbandi/elixir-start-here | 65722377d455a4b4678658eee56713681c6f16ce | [
"MIT"
] | null | null | null | # looping in elixir is done with recursion. Elixir does not have any for loop kind of deals anywhere
# Let's rewrite map function.
import IO, only: [puts: 1]
defmodule Sample.Enum do
def map([], f) do # you can use pattern matching in this this case to handle termination cases
# pattern matchin is useful in achieving method overloading and method polymorphism
end
def map(list,f) do
[f.(hd list) | Sample.Enum.map(tl(list), f)]
end
# guard clauses is an augmentation to pattern matching... you can use advanced checkes to invoke
# methods based on conditions
# for example; we could have implemented termination cases handling using
# def map(list,f) when list.length == 0 do
# end
end
Sample.Enum.map([1,2,3,4,5,6,7,8,9,0,:ok], fn(x) -> IO.puts x end)
# The Enum module.
# A set of algorithms for enumerating over enumerables. All collection except tuples are Enumerables.
# all? - applies a given function predicate against an entire list and
# returns a boolean if every element satisfies the condition.
puts Enum.all?([1,2,3,4,5,6,7,8,:ok], fn(x) -> is_integer x end)
puts Enum.all?([1,2,3,4,5], fn(x) -> is_integer x end)
# any? - similar to all? but returns true if atleast one element satisfies the function
puts Enum.any? [1,2,3,4,5,:ok], fn(x) -> is_atom(x) end
# chunk - breaks up a enumerable into chunks of specified sizes
Enum.chunk_every([1, 2, 3, 4, 5, 6], 2) # =>[[1, 2], [3, 4], [5, 6]]
Enum.chunk_every([1, 2, 3, 4, 5, 6, 7], 2) # =>[[1, 2], [3, 4], [5, 6]]
# starts a new chunk everytime the fn output changes
Enum.chunk_by(["one", "two", "three", "four", "five", "six"], fn(x) -> String.length(x) end) # => [["one", "two"], ["three"], ["four", "five"], ["six"]]
# applies fn every threee items
Enum.map_every([1, 2, 3, 4, 5, 6, 7, 8], 3, fn x -> x + 1000 end)
# => [1001, 2, 3, 1004, 5, 6, 1007, 8]
Enum.each([1,2,3,4,5], fn(x) -> IO.puts x end)
Enum.map([1,2,3,4,5], fn(x) -> IO.puts x end)
# other functions
# 1. Enum.min finds the min element
# 2. Enum.max finds max
# 3. Enum.sort/1 sorts an enumerable
# 4. Enum.sort/2 sorts an enumerable according to a comparator fn provided by us
Enum.sort([%{:val => 4}, %{:val => 1}], fn(x, y) -> x[:val] > y[:val] end)
# 5. Enum.uniq removes duplicates
# With reduce/3 we can distill our collection down into a single value.
# To do this we supply an optional accumulator (10 in this example) to be passed into our function;
# if no accumulator is provided the first element in the enumerable is used:
puts Enum.reduce([1,2,3,4,5,6,7,8,9], fn(x,acc) -> x+acc end)
# In Elixir, it is common to loop over an Enumerable,
# often filtering out some results and mapping values into another list.
# Comprehensions are syntactic sugar for such constructs:
# they group those common tasks into the for special form.
for n <- [1, 2, 3, 4], do: n * n
# => [1, 4, 9, 16]
# In the expression above, n <- [1, 2, 3, 4] is the generator. `<-` the generator operator
# It is literally generating values to be used in the comprehension.
# Any enumerable can be passed on the right-hand side of the generator expression
| 40.493506 | 152 | 0.676074 |
f7ebc13e3f19c7567746cd8d038ea7e0c345ea34 | 1,129 | exs | Elixir | config/config.exs | mendrugory/enchufeweb | f87d24869eb71cabae436c9e105f88cc5f3d7313 | [
"MIT"
] | 5 | 2017-09-11T12:17:58.000Z | 2018-02-20T16:37:33.000Z | config/config.exs | mendrugory/enchufeweb | f87d24869eb71cabae436c9e105f88cc5f3d7313 | [
"MIT"
] | null | null | null | config/config.exs | mendrugory/enchufeweb | f87d24869eb71cabae436c9e105f88cc5f3d7313 | [
"MIT"
] | 2 | 2018-02-20T16:37:39.000Z | 2018-09-27T23:05:17.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :enchufeweb, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:enchufeweb, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.419355 | 73 | 0.752879 |
f7ebe14fb509ddcae2588084e05bb3d40dcc1536 | 2,348 | exs | Elixir | test/query/joins_test.exs | scottdavis/rethinkdb-elixir | 28727810d6af07f07056082ce471be90cb10f4a0 | [
"MIT"
] | 6 | 2018-09-21T09:15:06.000Z | 2020-12-05T21:50:35.000Z | test/query/joins_test.exs | scottdavis/rethinkdb-elixir | 28727810d6af07f07056082ce471be90cb10f4a0 | [
"MIT"
] | 44 | 2018-09-25T13:16:41.000Z | 2021-08-02T13:14:38.000Z | test/query/joins_test.exs | scottdavis/rethinkdb-elixir | 28727810d6af07f07056082ce471be90cb10f4a0 | [
"MIT"
] | 3 | 2019-03-13T03:11:47.000Z | 2020-12-15T00:50:54.000Z | defmodule JoinsTest do
use ExUnit.Case, async: false
use RethinkDB.Connection
import RethinkDB.Query
alias RethinkDB.Record
alias RethinkDB.Collection
require RethinkDB.Lambda
import RethinkDB.Lambda
@table_name "joins_test_table_1"
setup_all do
start_link()
table_create(@table_name) |> run
on_exit(fn ->
start_link()
table_drop(@table_name) |> run
end)
:ok
end
setup do
table(@table_name) |> delete |> run
:ok
end
test "inner join arrays" do
left = [%{a: 1, b: 2}, %{a: 2, b: 3}]
right = [%{a: 1, c: 4}, %{a: 2, c: 6}]
q =
inner_join(
left,
right,
lambda(fn l, r ->
l[:a] == r[:a]
end)
)
{:ok, %Record{data: data}} = run(q)
assert data == [
%{"left" => %{"a" => 1, "b" => 2}, "right" => %{"a" => 1, "c" => 4}},
%{"left" => %{"a" => 2, "b" => 3}, "right" => %{"a" => 2, "c" => 6}}
]
{:ok, %Record{data: data}} = q |> zip |> run
assert data == [%{"a" => 1, "b" => 2, "c" => 4}, %{"a" => 2, "b" => 3, "c" => 6}]
end
test "outer join arrays" do
left = [%{a: 1, b: 2}, %{a: 2, b: 3}]
right = [%{a: 1, c: 4}]
q =
outer_join(
left,
right,
lambda(fn l, r ->
l[:a] == r[:a]
end)
)
{:ok, %Record{data: data}} = run(q)
assert data == [
%{"left" => %{"a" => 1, "b" => 2}, "right" => %{"a" => 1, "c" => 4}},
%{"left" => %{"a" => 2, "b" => 3}}
]
{:ok, %Record{data: data}} = q |> zip |> run
assert data == [%{"a" => 1, "b" => 2, "c" => 4}, %{"a" => 2, "b" => 3}]
end
test "eq join arrays" do
table_create("test_1") |> run
table_create("test_2") |> run
table("test_1") |> insert([%{id: 3, a: 1, b: 2}, %{id: 2, a: 2, b: 3}]) |> run
table("test_2") |> insert([%{id: 1, c: 4}]) |> run
q = eq_join(table("test_1"), :a, table("test_2"), index: :id)
{:ok, %Collection{data: data}} = run(q)
{:ok, %Collection{data: data2}} = q |> zip |> run
table_drop("test_1") |> run
table_drop("test_2") |> run
assert data == [
%{"left" => %{"id" => 3, "a" => 1, "b" => 2}, "right" => %{"id" => 1, "c" => 4}}
]
assert data2 == [%{"id" => 1, "a" => 1, "b" => 2, "c" => 4}]
end
end
| 24.458333 | 93 | 0.430579 |
f7ec00460864ea8f60a55a5f901af6e495185666 | 4,479 | ex | Elixir | lib/mix/tasks/certs_dev.ex | kwoodwardizi/MongoosePush | 9d8408367ccaea56742c3963157a9e861dff1551 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/certs_dev.ex | kwoodwardizi/MongoosePush | 9d8408367ccaea56742c3963157a9e861dff1551 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/certs_dev.ex | kwoodwardizi/MongoosePush | 9d8408367ccaea56742c3963157a9e861dff1551 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Certs.Dev do
@moduledoc """
Generate fake certs (placeholders) for `HTTPS` endpoint and `APNS` service.
Please be aware that `APNS` requires valid Apple Developer certificates, so it
will not accept those fake certificates. Generated certificates may be used
only with mock APNS service (like one provided by docker
`mobify/apns-http2-mock-server`).
"""
@shortdoc "Generate fake certs (placeholders) for HTTPS endpoint and APNS"
use Mix.Task
# From: https://developer.apple.com/library/content/documentation/NetworkingInternet/Conceptual/RemoteNotificationsPG/CommunicatingwithAPNs.html
@apns_topic_extn_id {1, 2, 840, 113_635, 100, 6, 3, 6}
# Here we use the binary extension extracted from real APNS certificate. It's much better for
# testing to use the real extension instead of a generated one since, the genertor would be based
# on reverse-engineered structure that may not be correct. Also testing decoding on extesion
# encoded using the same encoder is kinda pointless.
@apns_topic_extn_value <<48, 129, 133, 12, 26, 99, 111, 109, 46, 105, 110, 97, 107, 97, 110,
101, 116, 119, 111, 114, 107, 115, 46, 77, 97, 110, 103, 111, 115, 116,
97, 48, 5, 12, 3, 97, 112, 112, 12, 31, 99, 111, 109, 46, 105, 110, 97,
107, 97, 110, 101, 116, 119, 111, 114, 107, 115, 46, 77, 97, 110, 103,
111, 115, 116, 97, 46, 118, 111, 105, 112, 48, 6, 12, 4, 118, 111, 105,
112, 12, 39, 99, 111, 109, 46, 105, 110, 97, 107, 97, 110, 101, 116,
119, 111, 114, 107, 115, 46, 77, 97, 110, 103, 111, 115, 116, 97, 46,
99, 111, 109, 112, 108, 105, 99, 97, 116, 105, 111, 110, 48, 14, 12,
12, 99, 111, 109, 112, 108, 105, 99, 97, 116, 105, 111, 110>>
@spec run(term) :: :ok
def run(_) do
maybe_gen_dev_apns()
maybe_gen_prod_apns()
maybe_gen_https()
end
defp maybe_gen_dev_apns do
maybe_gen_cert("priv/apns/dev_cert.pem", "priv/apns/dev_key.pem", "mongoose-push-apns-dev")
end
defp maybe_gen_prod_apns do
extensions = [
{@apns_topic_extn_id, @apns_topic_extn_value}
]
maybe_gen_cert(
"priv/apns/prod_cert.pem",
"priv/apns/prod_key.pem",
"mongoose-push-apns-prod",
extensions
)
end
defp maybe_gen_https do
maybe_gen_cert("priv/ssl/fake_cert.pem", "priv/ssl/fake_key.pem", "mongoose-push")
end
defp maybe_gen_cert(cert_file, key_file, common_name, extensions \\ []) do
if File.exists?(cert_file) and File.exists?(key_file) do
:ok
else
gen_cert(cert_file, key_file, common_name, extensions)
end
end
defp gen_cert(cert_file, key_file, common_name, extensions) do
cert_dir = Path.dirname(cert_file)
key_dir = Path.dirname(key_file)
:ok = File.mkdir_p(cert_dir)
:ok = File.mkdir_p(key_dir)
ext_file = openssl_tmp_extfile(extensions)
req_file = create_csr!(common_name, key_file, cert_file)
:ok = sign_csr!(req_file, key_file, ext_file, cert_file)
:ok = File.rm!(ext_file)
:ok = File.rm!(req_file)
end
defp create_csr!(common_name, key_file, cert_file) do
req_file = cert_file <> ".csr"
{_, 0} =
System.cmd("openssl", [
"req",
"-new",
"-nodes",
"-days",
"365",
"-subj",
"/C=PL/ST=ML/L=Krakow/CN=" <> common_name,
"-newkey",
"rsa:2048",
"-keyout",
key_file,
"-out",
req_file
])
req_file
end
defp sign_csr!(req_file, key_file, ext_file, cert_file) do
{_, 0} =
System.cmd("openssl", [
"x509",
"-req",
"-days",
"365",
"-in",
req_file,
"-signkey",
key_file,
"-extfile",
ext_file,
"-out",
cert_file
])
:ok
end
defp openssl_tmp_extfile(extensions) do
ext_file = Path.join("/tmp", UUID.uuid4())
# Make sure the file exists even if there are no extensions
File.touch(ext_file)
for {ext_id, ext_bin} <- extensions do
ext_id = extn_id_to_string(ext_id)
ext_bin = Base.encode16(ext_bin)
:ok = File.write!(ext_file, ~s"#{ext_id}=DER:#{ext_bin}\n", [:append])
end
ext_file
end
defp extn_id_to_string(extn_id) do
extn_id
|> Tuple.to_list()
|> Enum.join(".")
end
end
| 30.889655 | 146 | 0.604153 |
f7ec1e865fa81e93f490e397e91e5325379e9cff | 3,447 | ex | Elixir | lib/gutenex/pdf/builders/image_builder.ex | tulinmola/gutenex | 09a17dada273e089685176b5a14820430624675f | [
"MIT"
] | 184 | 2015-01-16T23:05:42.000Z | 2018-10-25T18:51:52.000Z | lib/gutenex/pdf/builders/image_builder.ex | nicolasva/gutenex | 69aba56001455e3ff6ec6257d95b2f70e3fcffad | [
"MIT"
] | 15 | 2015-05-29T00:28:08.000Z | 2018-10-11T18:55:35.000Z | lib/gutenex/pdf/builders/image_builder.ex | nicolasva/gutenex | 69aba56001455e3ff6ec6257d95b2f70e3fcffad | [
"MIT"
] | 32 | 2015-02-14T07:39:48.000Z | 2018-09-12T21:01:02.000Z | defmodule Gutenex.PDF.Builders.ImageBuilder do
alias Gutenex.PDF.Context
alias Gutenex.PDF.RenderContext
alias Gutenex.PDF.Images
def build({%RenderContext{} = render_context, %Context{} = context}) do
render_context = add_images(render_context, Map.to_list(context.images))
{render_context, context}
end
def add_images(%RenderContext{} = render_context, []) do
%RenderContext{
render_context
| image_objects: Enum.reverse(render_context.image_objects)
}
end
def add_images(%RenderContext{} = render_context, [{image_alias, current_image} | images_tail]) do
add_image(render_context, current_image, image_alias)
|> add_images(images_tail)
end
defp add_image(render_context, image, image_alias) do
add_image_extra_object(render_context, image)
|> add_image_object(image)
|> add_image_alias(image_alias)
|> RenderContext.next_index()
end
@doc """
Calculate the attributes, any additional objects, and add the image to the
list of images
"""
defp add_image_object(%RenderContext{} = render_context, %Imagineer.Image.PNG{} = image) do
image_object = {
RenderContext.current_object(render_context),
{:stream, image_attributes(image, extra_attributes(image)), image_stream_data(image)}
}
%RenderContext{
render_context
| image_objects: [image_object | render_context.image_objects]
}
end
defp image_stream_data(%Imagineer.Image.PNG{} = image) do
Imagineer.Image.PNG.Pixels.NoInterlace.encode(image)
|> Enum.join()
|> compress_image_data
end
defp compress_image_data(decompressed_data) do
zlib_stream = :zlib.open()
:ok = :zlib.deflateInit(zlib_stream)
compressed_data = :zlib.deflate(zlib_stream, decompressed_data, :finish)
:ok = :zlib.deflateEnd(zlib_stream)
:ok = :zlib.close(zlib_stream)
:binary.list_to_bin(compressed_data)
end
@doc """
Adds the alias to the RenderContext#image_aliases map, under the assumption
that the current index is that of the image object
"""
defp add_image_alias(render_context, image_alias) do
image_reference = RenderContext.current_reference(render_context)
%RenderContext{
render_context
| image_aliases: Map.put(render_context.image_aliases, image_alias, image_reference)
}
end
@doc """
Extra attributes specific to the image format, color type, or other attributes
"""
def extra_attributes(%Imagineer.Image.PNG{color_type: 2} = image) do
%{
"Filter" => {:name, "FlateDecode"},
"ColorSpace" => {:name, Images.png_color(image.color_type)},
"DecodeParams" => decode_params(image),
"BitsPerComponent" => image.bit_depth
}
end
@doc """
PNGs with color type 2 have no extra object
returns the render_context
"""
defp add_image_extra_object(render_context, %Imagineer.Image.PNG{color_type: 2}) do
render_context
end
defp image_attributes(image, extra_attributes) do
{:dict,
Map.merge(
%{
"Type" => {:name, "XObject"},
"Width" => image.width,
"Height" => image.height,
"Subtype" => {:name, "Image"}
},
extra_attributes
)}
end
defp decode_params(image) do
{
:dict,
%{
"Colors" => Images.png_bits(image.color_type),
"Columns" => image.width,
"Predictor" => 15,
"BitsPerComponent" => image.bit_depth
}
}
end
end
| 28.966387 | 100 | 0.684943 |
f7ec3dfbadb129fa9b51a72d497afce097c6bdac | 2,051 | ex | Elixir | elixir/lib/homework/transactions.ex | Koyamot/Divvy-EngineerExercise | de2f2c0d1ad461b2bbb937fa1483c18d4ececc6a | [
"MIT"
] | null | null | null | elixir/lib/homework/transactions.ex | Koyamot/Divvy-EngineerExercise | de2f2c0d1ad461b2bbb937fa1483c18d4ececc6a | [
"MIT"
] | 8 | 2022-01-17T20:50:29.000Z | 2022-01-17T20:54:41.000Z | elixir/lib/homework/transactions.ex | Koyamot/Divvy-EngineerExercise | de2f2c0d1ad461b2bbb937fa1483c18d4ececc6a | [
"MIT"
] | 1 | 2022-01-19T06:55:41.000Z | 2022-01-19T06:55:41.000Z | defmodule Homework.Transactions do
@moduledoc """
The Transactions context.
"""
import Ecto.Query, warn: false
alias Homework.Repo
alias Homework.Transactions.Transaction
@doc """
Returns the list of transactions.
## Examples
iex> list_transactions([])
[%Transaction{}, ...]
"""
def list_transactions(_args) do
Repo.all(Transaction)
end
@doc """
Gets a single transaction.
Raises `Ecto.NoResultsError` if the Transaction does not exist.
## Examples
iex> get_transaction!(123)
%Transaction{}
iex> get_transaction!(456)
** (Ecto.NoResultsError)
"""
def get_transaction!(id), do: Repo.get!(Transaction, id)
@doc """
Creates a transaction.
## Examples
iex> create_transaction(%{field: value})
{:ok, %Transaction{}}
iex> create_transaction(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_transaction(attrs \\ %{}) do
%Transaction{}
|> Transaction.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a transaction.
## Examples
iex> update_transaction(transaction, %{field: new_value})
{:ok, %Transaction{}}
iex> update_transaction(transaction, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_transaction(%Transaction{} = transaction, attrs) do
transaction
|> Transaction.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a transaction.
## Examples
iex> delete_transaction(transaction)
{:ok, %Transaction{}}
iex> delete_transaction(transaction)
{:error, %Ecto.Changeset{}}
"""
def delete_transaction(%Transaction{} = transaction) do
Repo.delete(transaction)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking transaction changes.
## Examples
iex> change_transaction(transaction)
%Ecto.Changeset{data: %Transaction{}}
"""
def change_transaction(%Transaction{} = transaction, attrs \\ %{}) do
Transaction.changeset(transaction, attrs)
end
end
| 19.533333 | 71 | 0.643101 |
f7ec7da5e0a69b3b6bd68f0aea3d43dd1c12f4ad | 5,878 | ex | Elixir | lib/yacto/migration/structure.ex | aman-io/yacto | 4c26772343a9029923ad7e25245f17f2be22a1a1 | [
"Apache-2.0"
] | 56 | 2017-11-30T02:07:07.000Z | 2022-02-16T17:38:42.000Z | lib/yacto/migration/structure.ex | aman-io/yacto | 4c26772343a9029923ad7e25245f17f2be22a1a1 | [
"Apache-2.0"
] | 22 | 2018-01-04T00:34:51.000Z | 2021-08-01T06:52:10.000Z | lib/yacto/migration/structure.ex | aman-io/yacto | 4c26772343a9029923ad7e25245f17f2be22a1a1 | [
"Apache-2.0"
] | 13 | 2018-08-08T05:32:42.000Z | 2021-07-30T14:57:35.000Z | defmodule Yacto.Migration.Structure do
defstruct source: nil,
prefix: nil,
primary_key: [:id],
fields: [:id],
field_sources: %{id: :id},
types: %{id: :id},
associations: [],
embeds: [],
read_after_writes: [],
autogenerate_id: {:id, :id, :id},
meta: %{attrs: %{}, indices: %{}}
# undocumented keys:
# :query
# :hash
# :autogenerate
# :autoupdate
defp once_difference(nil, nil) do
:not_changed
end
defp once_difference(nil, to_value) do
{:create, to_value}
end
defp once_difference(from_value, nil) do
{:delete, from_value}
end
defp once_difference(from_value, to_value) do
if from_value == to_value do
:not_changed
else
{:changed, from_value, to_value}
end
end
defp myers_difference_to_map(difference) do
# get all :del
deletes =
for {key, value} <- difference, key == :del do
value
end
deletes = deletes |> Enum.concat() |> Enum.into(%{})
# get all :ins
inserts =
for {key, value} <- difference, key == :ins do
value
end
inserts = inserts |> Enum.concat() |> Enum.into(%{})
%{del: deletes, ins: inserts}
end
defp map_difference(from_map, to_map) do
from_map = from_map |> Map.to_list() |> Enum.sort()
to_map = to_map |> Map.to_list() |> Enum.sort()
difference = List.myers_difference(from_map, to_map)
myers_difference_to_map(difference)
end
def diff(structure_from, structure_to) do
# %Yacto.Migration.Structure{
# associations: [],
# autogenerate_id: {:id, :id, :id},
# embeds: [],
# fields: [:id, :name, :value],
# prefix: nil,
# primary_key: [:id],
# read_after_writes: [],
# source: "player",
# types: %{id: :id, name: :string, value: :integer}
# }
source = once_difference(structure_from.source, structure_to.source)
from_fields = Enum.map(structure_from.fields, &structure_from.field_sources[&1])
to_fields = Enum.map(structure_to.fields, &structure_to.field_sources[&1])
fields = List.myers_difference(from_fields, to_fields)
primary_key = List.myers_difference(structure_from.primary_key, structure_to.primary_key)
autogenerate_id =
once_difference(structure_from.autogenerate_id, structure_to.autogenerate_id)
from_types =
structure_from.types
|> Enum.map(fn {f, t} -> {Map.fetch!(structure_from.field_sources, f), t} end)
|> Enum.into(%{})
to_types =
structure_to.types
|> Enum.map(fn {f, t} -> {Map.fetch!(structure_to.field_sources, f), t} end)
|> Enum.into(%{})
types = map_difference(from_types, to_types)
from_attrs =
structure_from.meta.attrs
|> Enum.map(fn {f, t} -> {Map.fetch!(structure_from.field_sources, f), t} end)
|> Enum.into(%{})
to_attrs =
structure_to.meta.attrs
|> Enum.map(fn {f, t} -> {Map.fetch!(structure_to.field_sources, f), t} end)
|> Enum.into(%{})
from_indices =
structure_from.meta.indices
|> Enum.map(fn {{fields, opts}, value} ->
{{Enum.map(fields, &Map.fetch!(structure_from.field_sources, &1)), opts}, value}
end)
|> Enum.into(%{})
to_indices =
structure_to.meta.indices
|> Enum.map(fn {{fields, opts}, value} ->
{{Enum.map(fields, &Map.fetch!(structure_to.field_sources, &1)), opts}, value}
end)
|> Enum.into(%{})
meta = %{
attrs: map_difference(from_attrs, to_attrs),
indices: map_difference(from_indices, to_indices)
}
%{
source: source,
fields: fields,
primary_key: primary_key,
autogenerate_id: autogenerate_id,
types: types,
meta: meta
}
end
def from_schema(schema) do
keys =
struct(__MODULE__) |> Map.drop([:__struct__, :meta, :types, :field_sources]) |> Map.keys()
fields = keys |> Enum.map(fn key -> {key, schema.__schema__(key)} end)
# get types
types =
for field <- schema.__schema__(:fields), into: %{} do
# use specified migration type if :type is defined in meta
result =
if function_exported?(schema, :__meta__, 1) do
types = schema.__meta__(:types)
Map.fetch(types, field)
else
:error
end
type =
case result do
:error ->
# resolve ecto type if :type is not defined
type = schema.__schema__(:type, field)
type = Ecto.Type.type(type)
type
{:ok, type} ->
type
end
{field, type}
end
fields = [{:types, types} | fields]
# get field_sources
field_sources =
for field <- schema.__schema__(:fields), into: %{} do
{field, schema.__schema__(:field_source, field)}
end
fields = [{:field_sources, field_sources} | fields]
st = struct(__MODULE__, fields)
if function_exported?(schema, :__meta__, 1) do
meta_keys = Map.keys(struct(__MODULE__).meta)
metas = meta_keys |> Enum.map(fn key -> {key, schema.__meta__(key)} end) |> Enum.into(%{})
Map.put(st, :meta, metas)
else
st
end
end
def to_string(value) do
value
|> Inspect.Yacto.Migration.Structure.inspect(%Inspect.Opts{})
|> Inspect.Algebra.format(:infinity)
|> IO.iodata_to_binary()
end
end
defimpl Inspect, for: Yacto.Migration.Structure do
def inspect(value, opts) do
default = %Yacto.Migration.Structure{}
# remove default value
drop_keys =
for {k, v} <- Map.to_list(value), v == Map.fetch!(default, k) do
k
end
value = Map.drop(value, drop_keys)
Inspect.Map.inspect(value, Inspect.Atom.inspect(Yacto.Migration.Structure, opts), opts)
end
end
| 27.596244 | 96 | 0.59459 |
f7ec8439bb31b66081a3759b0186d7ab1615885a | 411 | exs | Elixir | config/config.exs | BenMorganIO/guardian_db | e16a4d76e6286c77e7b0097c6234be085eb23160 | [
"MIT"
] | null | null | null | config/config.exs | BenMorganIO/guardian_db | e16a4d76e6286c77e7b0097c6234be085eb23160 | [
"MIT"
] | null | null | null | config/config.exs | BenMorganIO/guardian_db | e16a4d76e6286c77e7b0097c6234be085eb23160 | [
"MIT"
] | null | null | null | use Mix.Config
config :guardian, Guardian.DB,
issuer: "GuardianDB",
secret_key: "HcdlxxmyDRvfrwdpjUPh2M8mWP+KtpOQK1g6fT5SHrnflSY8KiWeORqN6IZSJYTA",
repo: Guardian.DB.TestSupport.Repo
config :guardian_db, ecto_repos: [Guardian.DB.TestSupport.Repo]
config :guardian_db, Guardian.DB.TestSupport.Repo,
database: "guardian_db_test",
pool: Ecto.Adapters.SQL.Sandbox,
priv: "priv/temp/guardian_db_test"
| 29.357143 | 81 | 0.793187 |
f7ec8df8359b71eb583d15b856c06ea1fbd1b789 | 777 | ex | Elixir | lib/cachex/actions/count.ex | botwerk/cachex | d37996d3be35b0d8281e347d44c024ecf2735131 | [
"MIT"
] | 946 | 2017-06-26T00:36:58.000Z | 2022-03-29T19:52:31.000Z | lib/cachex/actions/count.ex | botwerk/cachex | d37996d3be35b0d8281e347d44c024ecf2735131 | [
"MIT"
] | 152 | 2017-06-28T10:01:24.000Z | 2022-03-24T18:46:13.000Z | lib/cachex/actions/count.ex | botwerk/cachex | d37996d3be35b0d8281e347d44c024ecf2735131 | [
"MIT"
] | 84 | 2017-06-30T05:30:31.000Z | 2022-03-01T20:23:16.000Z | defmodule Cachex.Actions.Count do
@moduledoc false
# Command module to allow the counting of a cache.
#
# Counting a cache will make sure to take the expiration time of items into
# consideration, making the semantics different to those of the `size()` calls.
alias Cachex.Query
# import needed macros
import Cachex.Spec
##############
# Public API #
##############
@doc """
Counts the number of items in a cache.
This will only return the number of items which have not yet expired; this
means that any items set to be removed in the next purge will not be added
to the count. Lazy expiration does not apply to this call.
"""
def execute(cache(name: name), _options),
do: { :ok, :ets.select_count(name, Query.unexpired(true)) }
end
| 29.884615 | 81 | 0.688546 |
f7ec99e7149c122b27e7ae2c4881b35418271f61 | 1,185 | ex | Elixir | lib/events_tools_web/channels/user_socket.ex | Apps-Team/conferencetools | ce2e16a3e4a521dc4682e736a209e6dd380c050d | [
"Apache-2.0"
] | null | null | null | lib/events_tools_web/channels/user_socket.ex | Apps-Team/conferencetools | ce2e16a3e4a521dc4682e736a209e6dd380c050d | [
"Apache-2.0"
] | 6 | 2017-10-05T20:16:34.000Z | 2017-10-05T20:36:11.000Z | lib/events_tools_web/channels/user_socket.ex | apps-team/events-tools | ce2e16a3e4a521dc4682e736a209e6dd380c050d | [
"Apache-2.0"
] | null | null | null | defmodule EventsToolsWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", EventsToolsWeb.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# EventsToolsWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 31.184211 | 86 | 0.706329 |
f7eca8d781b6e717b4ca6891936dc1c811824e45 | 1,075 | ex | Elixir | test/support/conn_case.ex | manojsamanta/codebreaker-prototype | 14d521db45784dee692de9e7252dd6a54bb793bb | [
"MIT"
] | null | null | null | test/support/conn_case.ex | manojsamanta/codebreaker-prototype | 14d521db45784dee692de9e7252dd6a54bb793bb | [
"MIT"
] | null | null | null | test/support/conn_case.ex | manojsamanta/codebreaker-prototype | 14d521db45784dee692de9e7252dd6a54bb793bb | [
"MIT"
] | null | null | null | defmodule LiveViewDemoWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias LiveViewDemoWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint LiveViewDemoWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(LiveViewDemo.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(LiveViewDemo.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 27.564103 | 74 | 0.724651 |
f7ecb412d52dfb8038c52862a9c3aba5a8f0146d | 109 | ex | Elixir | examples/lib/fibonacci.ex | jresendiz27/TestingElixir | b211876aa6c467221260a8aab1cf75b11fa0c5a7 | [
"Apache-2.0"
] | null | null | null | examples/lib/fibonacci.ex | jresendiz27/TestingElixir | b211876aa6c467221260a8aab1cf75b11fa0c5a7 | [
"Apache-2.0"
] | null | null | null | examples/lib/fibonacci.ex | jresendiz27/TestingElixir | b211876aa6c467221260a8aab1cf75b11fa0c5a7 | [
"Apache-2.0"
] | null | null | null | defmodule Fibonacci do
def fib(0), do: 0
def fib(1), do: 1
def fib(n), do: fib(n - 1) + fib(n - 2)
end
| 18.166667 | 41 | 0.568807 |
f7ecdc8315989a015d120601139b91f9ccb43819 | 1,248 | ex | Elixir | lib/surface/formatter/phases/newlines.ex | RudolfMan/surface | ee8c1546b77c91fe08e7360e64666c613bc5a900 | [
"MIT"
] | 468 | 2021-03-11T23:10:41.000Z | 2022-03-30T20:12:06.000Z | lib/surface/formatter/phases/newlines.ex | RudolfMan/surface | ee8c1546b77c91fe08e7360e64666c613bc5a900 | [
"MIT"
] | 248 | 2021-03-12T07:39:37.000Z | 2022-03-31T13:36:44.000Z | lib/surface/formatter/phases/newlines.ex | RudolfMan/surface | ee8c1546b77c91fe08e7360e64666c613bc5a900 | [
"MIT"
] | 46 | 2021-03-22T09:28:58.000Z | 2022-03-23T21:32:20.000Z | defmodule Surface.Formatter.Phases.Newlines do
@moduledoc """
Standardizes usage of newlines.
- Prevents more than 1 empty line in a row.
- Prevents an empty line separating an opening/closing tag from the contents inside.
"""
@behaviour Surface.Formatter.Phase
alias Surface.Formatter.Phase
def run(nodes, _opts) do
nodes
|> collapse_newlines()
|> prevent_empty_line_at_beginning()
|> prevent_empty_line_at_end()
end
defp collapse_newlines(nodes) do
nodes
|> Enum.chunk_by(&(&1 == :newline))
|> Enum.map(fn
[:newline, :newline | _] -> [:newline, :newline]
nodes -> nodes
end)
|> Enum.flat_map(&Function.identity/1)
|> Phase.transform_element_children(&collapse_newlines/1)
end
defp prevent_empty_line_at_beginning(nodes) do
nodes
|> case do
[:newline, :newline | rest] -> [:newline | rest]
_ -> nodes
end
|> Phase.transform_element_children(&prevent_empty_line_at_beginning/1)
end
defp prevent_empty_line_at_end(nodes) do
nodes
|> Enum.slice(-2..-1)
|> case do
[:newline, :newline] -> Enum.slice(nodes, 0..-2)
_ -> nodes
end
|> Phase.transform_element_children(&prevent_empty_line_at_end/1)
end
end
| 25.469388 | 86 | 0.669071 |
f7ed1a481d4190c903ebbeccd9b8ea8050455606 | 227 | exs | Elixir | priv/repo/migrations/20170123152224_add_password_to_user.exs | koenighotze/callforpaper | 3ca57abc0cd8d857dedf20e70bce841264d0fe30 | [
"MIT"
] | 1 | 2017-01-25T21:34:37.000Z | 2017-01-25T21:34:37.000Z | priv/repo/migrations/20170123152224_add_password_to_user.exs | koenighotze/callforpaper | 3ca57abc0cd8d857dedf20e70bce841264d0fe30 | [
"MIT"
] | 40 | 2017-02-09T07:22:25.000Z | 2020-10-28T13:43:12.000Z | priv/repo/migrations/20170123152224_add_password_to_user.exs | koenighotze/callforpaper | 3ca57abc0cd8d857dedf20e70bce841264d0fe30 | [
"MIT"
] | null | null | null | defmodule Callforpapers.Repo.Migrations.AddPasswordToUser do
use Ecto.Migration
def change do
alter table(:presenters) do
add :password, :string, virtual: true
add :password_hash, :string
end
end
end
| 20.636364 | 60 | 0.718062 |
f7ed341b10fd5e23f78095f42b14ec2fbb22c757 | 4,788 | ex | Elixir | lib/covid19_api_web/controllers/covid19_controller.ex | HenningLanghorst/jhu_csse_covid19_api | 46861f31ce7154f74c377d37a31bd4283524894f | [
"MIT"
] | null | null | null | lib/covid19_api_web/controllers/covid19_controller.ex | HenningLanghorst/jhu_csse_covid19_api | 46861f31ce7154f74c377d37a31bd4283524894f | [
"MIT"
] | null | null | null | lib/covid19_api_web/controllers/covid19_controller.ex | HenningLanghorst/jhu_csse_covid19_api | 46861f31ce7154f74c377d37a31bd4283524894f | [
"MIT"
] | null | null | null | defmodule Covid19ApiWeb.Covid19Controller do
use Covid19ApiWeb, :controller
import Covid19ApiWeb.DateOperations
@files %{
"confirmed" =>
"csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_confirmed_global.csv",
"deaths" =>
"csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_deaths_global.csv",
"recovered" =>
"csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_recovered_global.csv"
}
def time_series(conn, %{
"country" => country,
"time_series" => time_series,
"province" => province
})
when is_map_key(@files, time_series) and province != "" do
csv_data =
time_series
|> file_name()
|> to_file_path()
|> csv_data()
|> Enum.find(&map_with_country_province?(&1, country, province))
if csv_data do
conn |> json(csv_data |> to_output() |> to_response_body())
else
conn |> put_status(404) |> json(%{error: "country/provice not found"})
end
end
def time_series(conn, %{"country" => country, "province" => province}) do
result =
@files
|> Map.to_list()
|> Enum.map(fn {time_series, file_name} ->
{time_series,
file_name
|> to_file_path()
|> csv_data()
|> Stream.filter(&map_with_country_province?(&1, country, province))
|> Stream.map(&to_output/1)
|> Enum.flat_map(& &1)}
end)
|> Stream.filter(fn {_, val} -> val != [] end)
|> Map.new()
if result == %{} do
conn |> put_status(404) |> json(%{error: "country not found"})
else
conn |> json(result)
end
end
def time_series(conn, %{"country" => country, "time_series" => time_series})
when is_map_key(@files, time_series) do
csv_data =
time_series
|> file_name()
|> to_file_path()
|> csv_data()
|> Enum.find(&map_with_country?(&1, country))
if csv_data do
conn |> json(csv_data |> to_output() |> to_response_body())
else
conn |> put_status(404) |> json(%{error: "country not found"})
end
end
def time_series(conn, %{"country" => country}) do
result =
@files
|> Map.to_list()
|> Enum.map(fn {time_series, file_name} ->
{time_series,
file_name
|> to_file_path()
|> csv_data()
|> Stream.filter(&map_with_country?(&1, country))
|> Stream.map(&to_output/1)
|> Enum.flat_map(& &1)}
end)
|> Stream.filter(fn {_, val} -> val != [] end)
|> Map.new()
if result == %{} do
conn |> put_status(404) |> json(%{error: "country not found"})
else
conn |> json(result)
end
end
def time_series(conn, %{"time_series" => invalid}) do
conn |> put_status(404) |> json(%{error: "invalid time_series: '#{invalid}'"})
end
def countries(conn, _) do
countries =
@files
|> Map.keys()
|> Stream.flat_map(fn time_series ->
time_series
|> file_name()
|> to_file_path()
|> csv_data()
|> Stream.map(fn %{"Country/Region" => country} -> country end)
end)
|> Stream.uniq()
|> Enum.sort()
|> to_response_body()
conn |> json(countries)
end
def provinces(conn, %{"country" => country}) do
countries =
@files
|> Map.keys()
|> Stream.flat_map(fn time_series ->
time_series
|> file_name()
|> to_file_path()
|> csv_data()
|> Stream.flat_map(fn %{"Country/Region" => c, "Province/State" => p} ->
if c == country and p != "", do: [p], else: []
end)
end)
|> Stream.uniq()
|> Enum.sort()
|> to_response_body()
conn |> json(countries)
end
defp map_with_country_province?(
%{"Country/Region" => c, "Province/State" => p},
country,
province
) do
c == country and p == province
end
defp map_with_country_province?(_, _, _), do: false
defp map_with_country?(%{"Country/Region" => c, "Province/State" => ""}, country) do
c == country
end
defp map_with_country?(_, _), do: false
defp file_name(time_series), do: @files |> Map.get(time_series)
defp to_file_path(file), do: System.get_env("REPO_BASE_DIR", ".") |> Path.join(file)
defp csv_data(path) do
path
|> Path.expand(__DIR__)
|> File.stream!()
|> CSV.decode!(headers: true)
end
defp to_output(raw) do
raw
|> Map.to_list()
|> Stream.flat_map(fn {csv_date, value} ->
case parse_date(csv_date) do
{:ok, date} -> [%{date: date, count: value |> String.to_integer()}]
_ -> []
end
end)
|> Enum.sort()
end
defp to_response_body(data) when is_list(data), do: %{result: data}
end
| 26.748603 | 94 | 0.573517 |
f7ed46af018d95ef7050c94c53ba03a339d96dff | 346 | ex | Elixir | lib/makeup/lexer.ex | rubysolo/makeup | e1c5d27e3f14c04904d74b94b6298bbc28a406c3 | [
"BSD-2-Clause"
] | null | null | null | lib/makeup/lexer.ex | rubysolo/makeup | e1c5d27e3f14c04904d74b94b6298bbc28a406c3 | [
"BSD-2-Clause"
] | null | null | null | lib/makeup/lexer.ex | rubysolo/makeup | e1c5d27e3f14c04904d74b94b6298bbc28a406c3 | [
"BSD-2-Clause"
] | null | null | null | defmodule Makeup.Lexer do
def merge([{tag, meta, value1}, {tag, meta, value2} | rest]),
do: merge [{tag, meta, value1 <> value2} | rest]
def merge([token | rest]),
do: [token | merge(rest)]
def merge([]),
do: []
def unlex(tokens) do
tokens
|> Enum.map(fn {_, _, value} -> value end)
|> Enum.join("")
end
end | 21.625 | 63 | 0.557803 |
f7ed5225de8fe3ac98ae8c45b7cd615fa9fa6561 | 10,432 | exs | Elixir | apps/ewallet/test/ewallet/fetchers/amount_fetcher_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet/test/ewallet/fetchers/amount_fetcher_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet/test/ewallet/fetchers/amount_fetcher_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.AmountFetcherTest do
use EWallet.DBCase, async: true
import EWalletDB.Factory
alias EWallet.AmountFetcher
describe "fetch/3 with amount" do
test "returns error when passing amount and from_token_id/to_token_id" do
token_1 = insert(:token)
token_2 = insert(:token)
res =
AmountFetcher.fetch(
%{
"amount" => 0,
"from_token_id" => token_1.id,
"to_token_id" => token_2.id
},
%{},
%{}
)
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. `amount` not allowed when exchanging values. Use `from_amount` and/or `to_amount`."}
end
test "sets the amount in from_amount and to_amount" do
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"amount" => 100
},
%{},
%{}
)
assert res == :ok
assert from == %{from_amount: 100}
assert to == %{to_amount: 100}
assert exchange == %{}
end
test "supports string integer" do
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"amount" => "100"
},
%{},
%{}
)
assert res == :ok
assert from == %{from_amount: 100}
assert to == %{to_amount: 100}
assert exchange == %{}
end
test "returns an error if amount is not an integer (float)" do
res =
AmountFetcher.fetch(
%{
"amount" => 100.2
},
%{},
%{}
)
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. `amount` is not an integer: 100.2"}
end
test "returns an error if amount is not an integer (string)" do
res =
AmountFetcher.fetch(
%{
"amount" => "fake"
},
%{},
%{}
)
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. String number is not a valid number: 'fake'."}
end
end
describe "fetch/3 with from_amount/to_amount" do
test "sets from_amount and to_amount when valid integer" do
token_1 = insert(:token)
token_2 = insert(:token)
pair = insert(:exchange_pair, from_token: token_1, to_token: token_2, rate: 2)
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"from_amount" => 100,
"to_amount" => 200
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == :ok
assert from[:from_amount] == 100
assert to[:to_amount] == 200
assert exchange[:actual_rate] == 2
assert exchange[:calculated_at] != nil
assert exchange[:pair_uuid] == pair.uuid
end
test "support string integers" do
token_1 = insert(:token)
token_2 = insert(:token)
pair = insert(:exchange_pair, from_token: token_1, to_token: token_2, rate: 2)
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"from_amount" => "100",
"to_amount" => "200"
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == :ok
assert from[:from_amount] == 100
assert to[:to_amount] == 200
assert exchange[:actual_rate] == 2
assert exchange[:calculated_at] != nil
assert exchange[:pair_uuid] == pair.uuid
end
test "sets from_amount only when sending nil to_amount with exchange pair" do
token_1 = insert(:token)
token_2 = insert(:token)
pair = insert(:exchange_pair, from_token: token_1, to_token: token_2, rate: 2)
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"from_amount" => 100,
"to_amount" => nil
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == :ok
assert from[:from_amount] == 100
assert to[:to_amount] == 200
assert exchange[:actual_rate] == 2
assert exchange[:calculated_at] != nil
assert exchange[:pair_uuid] == pair.uuid
end
test "sets from_amount only when sending nil to_amount without exchange pair" do
token_1 = insert(:token)
token_2 = insert(:token)
res =
AmountFetcher.fetch(
%{
"from_amount" => 100,
"to_amount" => nil
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == {:error, :exchange_pair_not_found}
end
test "sets from_amount only when not sending to_amount with exchange pair" do
token_1 = insert(:token)
token_2 = insert(:token)
pair = insert(:exchange_pair, from_token: token_1, to_token: token_2, rate: 2)
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"from_amount" => 100
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == :ok
assert from[:from_amount] == 100
assert to[:to_amount] == 200
assert exchange[:actual_rate] == 2
assert exchange[:calculated_at] != nil
assert exchange[:pair_uuid] == pair.uuid
end
test "sets from_amount only when not sending to_amount without exchange pair" do
token_1 = insert(:token)
token_2 = insert(:token)
res =
AmountFetcher.fetch(
%{
"from_amount" => 100
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == {:error, :exchange_pair_not_found}
end
test "sets to_amount only when sending nil from_amount with exchange pair" do
token_1 = insert(:token)
token_2 = insert(:token)
pair = insert(:exchange_pair, from_token: token_1, to_token: token_2, rate: 2)
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"from_amount" => nil,
"to_amount" => 200
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == :ok
assert from[:from_amount] == 100
assert to[:to_amount] == 200
assert exchange[:actual_rate] == 2
assert exchange[:calculated_at] != nil
assert exchange[:pair_uuid] == pair.uuid
end
test "sets to_amount only when sending nil from_amount without exchange pair" do
token_1 = insert(:token)
token_2 = insert(:token)
res =
AmountFetcher.fetch(
%{
"from_amount" => nil,
"to_amount" => 200
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == {:error, :exchange_pair_not_found}
end
test "sets to_amount only when not sending from_amount with exchange rate" do
token_1 = insert(:token)
token_2 = insert(:token)
pair = insert(:exchange_pair, from_token: token_1, to_token: token_2, rate: 2)
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"to_amount" => 200
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == :ok
assert from[:from_amount] == 100
assert to[:to_amount] == 200
assert exchange[:actual_rate] == 2
assert exchange[:calculated_at] != nil
assert exchange[:pair_uuid] == pair.uuid
end
test "returns an error when exchange pair is not found" do
token_1 = insert(:token)
token_2 = insert(:token)
res =
AmountFetcher.fetch(
%{
"to_amount" => 200
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == {:error, :exchange_pair_not_found}
end
test "returns an error when sending invalid from_amount and to_amount" do
res =
AmountFetcher.fetch(
%{
"from_amount" => "fake",
"to_amount" => "fake"
},
%{},
%{}
)
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. String numbers are not valid numbers: 'fake, fake'."}
end
test "returns an error when sending invalid from_amount" do
res =
AmountFetcher.fetch(
%{
"from_amount" => "fake"
},
%{},
%{}
)
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. String number is not a valid number: 'fake'."}
end
test "returns an error when sending invalid to_amount" do
res =
AmountFetcher.fetch(
%{
"to_amount" => "fake"
},
%{},
%{}
)
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. String number is not a valid number: 'fake'."}
end
test "returns an error when sending nil to_amount" do
token_1 = insert(:token)
token_2 = insert(:token)
res =
AmountFetcher.fetch(
%{
"to_amount" => nil
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. `amount`, `from_amount` or `to_amount` is required."}
end
end
describe "fetch/3 with invalid params" do
test "returns an error when sending nil to_amount" do
res = AmountFetcher.fetch(%{}, %{}, %{})
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. `amount`, `from_amount` or `to_amount` is required."}
end
end
end
| 27.308901 | 129 | 0.545725 |
f7ed5b4528c78d5b114f2c4b761cfac65fe7c7a4 | 1,365 | ex | Elixir | lib/godfist/requests/champion_mastery.ex | whitfin/godfist | b311b17b3db3b47b650b070da4fd753f4492560d | [
"MIT"
] | 6 | 2017-07-27T08:23:25.000Z | 2020-01-14T19:17:52.000Z | lib/godfist/requests/champion_mastery.ex | whitfin/godfist | b311b17b3db3b47b650b070da4fd753f4492560d | [
"MIT"
] | 7 | 2017-08-26T06:01:16.000Z | 2018-04-03T18:30:44.000Z | lib/godfist/requests/champion_mastery.ex | whitfin/godfist | b311b17b3db3b47b650b070da4fd753f4492560d | [
"MIT"
] | 2 | 2017-08-26T05:50:29.000Z | 2018-10-01T20:44:42.000Z | defmodule Godfist.ChampionMastery do
@moduledoc """
Module to get Champion masteries
"""
alias Godfist.LeagueRates
@endpoint "/lol/champion-mastery/v3"
@doc """
Get all champion mastery entries by number of champion points.
## Example
```elixir
iex> Godfist.ChampionMastery.by_summoner(:na, summonerid)
```
"""
@spec by_summoner(atom, integer) :: {:ok, map} | {:error, String.t()}
def by_summoner(region, id) do
rest = @endpoint <> "/champion-masteries/by-summoner/#{id}"
LeagueRates.handle_rate(region, rest, :other)
end
@doc """
Get a champion mastery by player id and champion id.
## Example
```elixir
iex> Godfist.ChampionMastery.by_champion(:na, summid, champid)
```
"""
@spec by_champion(atom, integer, integer) :: {:ok, map} | {:error, String.t()}
def by_champion(region, id, champ_id) do
rest = @endpoint <> "/champion-masteries/by-summoner/#{id}/by-champion/#{champ_id}"
LeagueRates.handle_rate(region, rest, :other)
end
@doc """
Get a player's total mastery score by player id.
## Example
```elixir
iex> Godfist.ChampionMastery.total(:lan, summid)
```
"""
@spec total(atom, integer) :: {:ok, map} | {:error, String.t()}
def total(region, id) do
rest = @endpoint <> "/scores/by-summoner/#{id}"
LeagueRates.handle_rate(region, rest, :other)
end
end
| 23.534483 | 87 | 0.65641 |
f7edd0278f4399c140a87e2f37308deb111eae4e | 679 | ex | Elixir | eSalud/web/views/diagnostic__patient_view.ex | ret16339/BaseDeDatosES | 0a6daa40be857c5a31d4fa5afba22c764bf9f044 | [
"Apache-2.0"
] | null | null | null | eSalud/web/views/diagnostic__patient_view.ex | ret16339/BaseDeDatosES | 0a6daa40be857c5a31d4fa5afba22c764bf9f044 | [
"Apache-2.0"
] | null | null | null | eSalud/web/views/diagnostic__patient_view.ex | ret16339/BaseDeDatosES | 0a6daa40be857c5a31d4fa5afba22c764bf9f044 | [
"Apache-2.0"
] | null | null | null | defmodule ESalud.Diagnostic_PatientView do
use ESalud.Web, :view
def render("index.json", %{diagnostic_patients: diagnostic_patients}) do
%{data: render_many(diagnostic_patients, ESalud.Diagnostic_PatientView, "diagnostic__patient.json")}
end
def render("show.json", %{diagnostic__patient: diagnostic__patient}) do
%{data: render_one(diagnostic__patient, ESalud.Diagnostic_PatientView, "diagnostic__patient.json")}
end
def render("diagnostic__patient.json", %{diagnostic__patient: diagnostic__patient}) do
%{id: diagnostic__patient.id,
patient_id: diagnostic__patient.patient_id,
diagnostic_id: diagnostic__patient.diagnostic_id}
end
end
| 37.722222 | 104 | 0.78056 |
f7eddf401189067580a77900c2c8b2484be0573a | 65 | ex | Elixir | lib/msir/mailer.ex | atilla777/msir | 602df87890c2d49b4a19ce490f271b503484b618 | [
"BSD-2-Clause"
] | null | null | null | lib/msir/mailer.ex | atilla777/msir | 602df87890c2d49b4a19ce490f271b503484b618 | [
"BSD-2-Clause"
] | null | null | null | lib/msir/mailer.ex | atilla777/msir | 602df87890c2d49b4a19ce490f271b503484b618 | [
"BSD-2-Clause"
] | null | null | null | defmodule Msir.Mailer do
use Swoosh.Mailer, otp_app: :msir
end
| 16.25 | 35 | 0.769231 |
f7ede5d6ca9c6bffc41ef818f255c2f42b535bd3 | 1,106 | exs | Elixir | test/bank_stone/operations_test.exs | theguuholi/bank_stone | 150a7c7ac9eb2d9bb977d1d784518b39df5c5ab5 | [
"MIT"
] | 3 | 2020-04-25T11:35:06.000Z | 2021-10-06T19:59:47.000Z | test/bank_stone/operations_test.exs | theguuholi/bank_stone | 150a7c7ac9eb2d9bb977d1d784518b39df5c5ab5 | [
"MIT"
] | 12 | 2019-11-04T11:06:37.000Z | 2019-11-21T11:03:57.000Z | test/bank_stone/operations_test.exs | theguuholi/bank_stone | 150a7c7ac9eb2d9bb977d1d784518b39df5c5ab5 | [
"MIT"
] | 1 | 2020-12-11T07:05:04.000Z | 2020-12-11T07:05:04.000Z | defmodule BankStone.OperationsTest do
use BankStone.DataCase
alias BankStone.Repo
describe "users" do
alias BankStone.Accounts.Account
alias BankStone.Accounts.Operations
def account_fixture do
{:ok, account} = Repo.insert(Account.changeset(%Account{}))
account
end
test "perform/3 should decrease balance" do
account = account_fixture()
assert account.balance == 1000
{:ok, account} = Operations.perform(account.id, Decimal.new("100"), :sub)
assert account.balance == Decimal.new("900.00")
end
test "perform/3 should show message error" do
account = account_fixture()
assert account.balance == 1000
error = Operations.perform(account.id, Decimal.new("1100"), :sub)
assert error == {:error, "You can`t have negative balance"}
end
test "perform/3 should increase balance" do
account = account_fixture()
assert account.balance == 1000
{:ok, account} = Operations.perform(account.id, Decimal.new("100"), :add)
assert account.balance == Decimal.new("1100.00")
end
end
end
| 29.891892 | 79 | 0.670886 |
f7edf809ac260bd4bcd1b1de36ca3915a5eb3a31 | 65 | ex | Elixir | lib/birdcage_web/views/layout_view.ex | forest/birdcage | 4c1d851d9ae01ebe94c0366a6e2d932db7435d19 | [
"Apache-2.0"
] | 2 | 2020-10-04T16:07:25.000Z | 2020-12-25T14:28:28.000Z | lib/birdcage_web/views/layout_view.ex | cognizant-softvision/birdcage | 2f766c7bc6d70f1243aab56ad9ac7f7d4c5014fb | [
"Apache-2.0"
] | 11 | 2020-07-10T16:05:17.000Z | 2020-08-25T23:44:34.000Z | lib/birdcage_web/views/layout_view.ex | forest/birdcage | 4c1d851d9ae01ebe94c0366a6e2d932db7435d19 | [
"Apache-2.0"
] | 1 | 2020-08-06T18:56:52.000Z | 2020-08-06T18:56:52.000Z | defmodule BirdcageWeb.LayoutView do
use BirdcageWeb, :view
end
| 16.25 | 35 | 0.815385 |
f7edfadeba56ef1a8695ffb76a3d3b5989a3d386 | 22 | ex | Elixir | lib/elixir/lib/tuple.ex | ekosz/elixir | 62e375bc711b4072e1b68de776e96cc31f571d45 | [
"Apache-2.0"
] | 1 | 2017-10-29T16:37:08.000Z | 2017-10-29T16:37:08.000Z | lib/elixir/lib/tuple.ex | ekosz/elixir | 62e375bc711b4072e1b68de776e96cc31f571d45 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/tuple.ex | ekosz/elixir | 62e375bc711b4072e1b68de776e96cc31f571d45 | [
"Apache-2.0"
] | null | null | null | defmodule Tuple do
end | 11 | 18 | 0.863636 |
f7ee09249843a23bc4d028225790249afebf48b5 | 21,287 | exs | Elixir | test/operation_test.exs | kor-and-m/exop | 774d14a5c680b98481b333d75d8e2918aa2a9341 | [
"MIT"
] | null | null | null | test/operation_test.exs | kor-and-m/exop | 774d14a5c680b98481b333d75d8e2918aa2a9341 | [
"MIT"
] | null | null | null | test/operation_test.exs | kor-and-m/exop | 774d14a5c680b98481b333d75d8e2918aa2a9341 | [
"MIT"
] | null | null | null | defmodule OperationTest do
use ExUnit.Case, async: true
defmodule Operation do
use Exop.Operation
parameter :param1, type: :integer
parameter :param2, type: :string
def process(params) do
["This is the process/1 params", params]
end
end
@valid_contract [
%{name: :param1, opts: [type: :integer]},
%{name: :param2, opts: [type: :string]}
]
test "defines contract/0" do
assert :functions |> Operation.__info__ |> Keyword.has_key?(:contract)
end
test "stores defined properties in a contract" do
assert Operation.contract |> is_list
assert Operation.contract |> List.first |> is_map
assert Enum.sort(Operation.contract) == Enum.sort(@valid_contract)
end
test "defines run/1" do
assert :functions |> Operation.__info__ |> Keyword.has_key?(:run)
end
test "process/1 takes a single param which is Map type" do
assert Operation.run(param1: 1, param2: "string") == {:ok, ["This is the process/1 params", %{param1: 1, param2: "string"}]}
end
test "run/1: returns :validation_failed error when contract didn't pass validation" do
{:error, {:validation, reasons}} = Operation.run(param1: "not integer", param2: 777)
assert is_map(reasons)
end
test "run/1: pass default value of missed parameter" do
defmodule DefOperation do
use Exop.Operation
parameter :param2, required: false
parameter :param, default: 999
def process(params) do
params[:param]
end
end
assert DefOperation.run == {:ok, 999}
end
test "run/1: pass default value of required missed parameter (thus pass a validation)" do
defmodule Def2Operation do
use Exop.Operation
parameter :param, required: true, default: 999
def process(params) do
params[:param]
end
end
assert Def2Operation.run() == {:ok, 999}
end
test "run/1: doesn't pass default value if a parameter was passed to run/1" do
defmodule Def3Operation do
use Exop.Operation
parameter :param, type: :integer, default: 999
def process(params) do
params[:param]
end
end
assert Def3Operation.run(param: 111) == {:ok, 111}
end
test "params/1: doesn't invoke a contract validation" do
assert Operation.process(param1: "not integer", param2: 777) == ["This is the process/1 params", [param1: "not integer", param2: 777]]
end
test "defined_params/0: returns params that were defined in the contract, filter out others" do
defmodule Def4Operation do
use Exop.Operation
parameter :a
parameter :b
def process(params) do
params |> defined_params
end
end
assert Def4Operation.run(a: 1, b: 2, c: 3) == {:ok, %{a: 1, b: 2}}
end
test "defined_params/0: respects defaults" do
defmodule Def5Operation do
use Exop.Operation
parameter :a
parameter :b, default: 2
def process(params) do
params |> defined_params
end
end
assert Def5Operation.run(a: 1, c: 3) == {:ok, %{a: 1, b: 2}}
end
test "run/1: returns the last defined value for duplicated keys" do
defmodule Def6Operation do
use Exop.Operation
parameter :a
parameter :b
def process(params), do: params
end
assert Def6Operation.run(a: 1, b: 3) == {:ok, %{a: 1, b: 3}}
assert Def6Operation.run(%{a: 1, b: 3}) == {:ok, %{a: 1, b: 3}}
assert Def6Operation.run(a: 1, a: 3, b: 2) == {:ok, %{a: 3, b: 2}}
end
test "interrupt/1: interupts process and returns the interuption result" do
defmodule Def7Operation do
use Exop.Operation
parameter :x, required: false
def process(_params) do
interrupt(%{my_error: "oops"})
:ok
end
end
assert Def7Operation.run == {:interrupt, %{my_error: "oops"}}
end
test "interrupt/1: pass other exceptions" do
defmodule Def8Operation do
use Exop.Operation
parameter :x, required: false
def process(_params) do
raise "runtime error"
interrupt(%{my_error: "oops"})
:ok
end
end
assert_raise(RuntimeError, fn -> Def8Operation.run end)
end
defmodule TruePolicy do
use Exop.Policy
def test(_opts), do: true
end
defmodule FalsePolicy do
use Exop.Policy
def test(_opts), do: false
end
defmodule TestUser do
defstruct [:name, :email]
end
test "stores policy module and action" do
defmodule Def9Operation do
use Exop.Operation
policy TruePolicy, :test
parameter :x, required: false
def process(_params), do: current_policy()
end
assert Def9Operation.run == {:ok, {TruePolicy, :test}}
end
test "authorizes with provided policy" do
defmodule Def10Operation do
use Exop.Operation
policy TruePolicy, :test
parameter :x, required: false
def process(_params), do: authorize(user: %TestUser{})
end
assert Def10Operation.run == {:ok, :ok}
defmodule Def11Operation do
use Exop.Operation
policy FalsePolicy, :test
parameter :x, required: false
def process(_params), do: authorize(user: %TestUser{})
end
assert Def11Operation.run == {:error, {:auth, :test}}
end
test "operation invokation stops if auth failed" do
defmodule Def12Operation do
use Exop.Operation
policy FalsePolicy, :test
parameter :x, required: false
def process(_params) do
authorize %TestUser{}
:you_will_never_get_here
end
end
assert Def12Operation.run == {:error, {:auth, :test}}
end
test "returns errors with malformed policy definition" do
defmodule Def14Operation do
use Exop.Operation
policy UnknownPolicy, :test
parameter :x, required: false
def process(_params), do: authorize(%TestUser{})
end
defmodule Def15Operation do
use Exop.Operation
policy TruePolicy, :unknown_action
parameter :x, required: false
def process(_params), do: authorize(%TestUser{})
end
assert Def14Operation.run == {:error, {:auth, :unknown_policy}}
assert Def15Operation.run == {:error, {:auth, :unknown_policy}}
end
test "the last policy definition overrides previous definitions" do
defmodule Def16Operation do
use Exop.Operation
policy TruePolicy, :test
policy FalsePolicy, :test
parameter :x, required: false
def process(_params), do: current_policy()
end
assert Def16Operation.run == {:ok, {FalsePolicy, :test}}
end
test "coerce option changes a parameter value (and after defaults resolving)" do
defmodule Def17Operation do
use Exop.Operation
parameter :a, default: 5, coerce_with: &__MODULE__.coerce/1
parameter :b
def process(params), do: {params[:a], params[:b]}
def coerce(x), do: x * 2
end
assert Def17Operation.run(b: 0) == {:ok, {10, 0}}
end
test "coerce option changes a parameter value before validation" do
defmodule Def18Operation do
use Exop.Operation
parameter :a, numericality: %{greater_than: 0}, coerce_with: &__MODULE__.coerce/1
def process(params), do: params[:a]
def coerce(x), do: x * 2
end
defmodule Def19Operation do
use Exop.Operation
parameter :a, required: true, coerce_with: &__MODULE__.coerce/1
def process(params), do: params[:a]
def coerce(_x), do: "str"
end
defmodule Def20Operation do
use Exop.Operation
parameter :a, func: &__MODULE__.validate/2, coerce_with: &__MODULE__.coerce/1
parameter :b, func: &__MODULE__.validate/1
def process(params), do: params
def validate(_params, x), do: validate(x)
def validate(x), do: x > 0
def coerce(x), do: x + 1
end
assert Def18Operation.run(a: 2) == {:ok, 4}
assert Def18Operation.run(a: 0) == {:error, {:validation, %{a: ["must be greater than 0"]}}}
assert Def19Operation.run() == {:ok, "str"}
assert Def20Operation.run(a: -1, b: 0) == {:error, {:validation, %{a: ["isn't valid"], b: ["isn't valid"]}}}
assert Def20Operation.run(a: 0, b: 0) == {:error, {:validation, %{b: ["isn't valid"]}}}
assert Def20Operation.run(a: 0, b: 1) == {:ok, %{a: 1, b: 1}}
end
test "run!/1: return operation's result with valid params" do
defmodule Def21Operation do
use Exop.Operation
parameter :param, required: true
def process(params) do
params[:param] <> " World!"
end
end
assert Def21Operation.run!(param: "Hello") == "Hello World!"
end
test "run!/1: return an error with invalid params" do
defmodule Def22Operation do
use Exop.Operation
parameter :param, required: true
def process(params) do
params[:param] <> " World!"
end
end
assert_raise Exop.Validation.ValidationError, fn -> Def22Operation.run!() end
end
test "run!/1: doesn't affect unhandled errors" do
defmodule Def23Operation do
use Exop.Operation
parameter :param, required: true
def process(_params), do: raise("oops")
end
assert_raise RuntimeError, "oops", fn -> Def23Operation.run!(param: "hi!") end
end
test "run!/1: doesn't affect interruptions" do
defmodule Def24Operation do
use Exop.Operation
parameter :param
def process(_params), do: interrupt()
end
assert Def24Operation.run!(param: :a) == {:interrupt, nil}
end
test "run/1: returns unwrapped error tuple if process/1 returns it" do
defmodule Def25Operation do
use Exop.Operation
parameter :param
def process(params) do
if params[:param], do: params[:param], else: {:error, :ooops}
end
end
assert Def25Operation.run(param: 111) == {:ok, 111}
assert Def25Operation.run(param: nil) == {:error, :ooops}
end
test "run!/1: returns unwrapped error tuple if process/1 returns it" do
defmodule Def26Operation do
use Exop.Operation
parameter :param
def process(params) do
if params[:param], do: params[:param], else: {:error, :ooops}
end
end
assert Def26Operation.run!(param: 111) == 111
assert Def26Operation.run!(param: nil) == {:error, :ooops}
end
test "custom validation function takes a contract as the first parameter" do
defmodule Def27Operation do
use Exop.Operation
parameter :a, default: 5
parameter :b, func: &__MODULE__.custom_validation/2
def process(params), do: {params[:a], params[:b]}
def custom_validation(params, b) do
params[:a] > 10 && b < 10
end
end
assert Def27Operation.run(a: 11, b: 0) == {:ok, {11, 0}}
assert Def27Operation.run(a: 0, b: 0) == {:error, {:validation, %{b: ["isn't valid"]}}}
end
test "run/1: returns unwrapped tuple {:ok, result} if process/1 returns {:ok, result}" do
defmodule Def28Operation do
use Exop.Operation
parameter :param, required: true
def process(params) do
{:ok, params[:param]}
end
end
assert Def28Operation.run(param: "hello") == {:ok, "hello"}
end
test "list_item + default value" do
defmodule Def29Operation do
use Exop.Operation
parameter :list_param, list_item: %{type: :string, length: %{min: 7}}, default: ["1234567", "7chars"]
def process(params), do: {:ok, params[:list_param]}
end
assert Def29Operation.run() == {:error, {:validation, %{"list_param[1]" => ["length must be greater than or equal to 7"]}}}
end
test "list_item + coerce_with" do
defmodule Def30Operation do
use Exop.Operation
parameter :list_param, list_item: [type: :string, length: %{min: 7}], coerce_with: &__MODULE__.make_list/1
def process(params), do: {:ok, params[:list_param]}
def make_list(_), do: ["1234567", "7chars"]
end
assert Def30Operation.run() == {:error, {:validation, %{"list_param[1]" => ["length must be greater than or equal to 7"]}}}
end
test "string-named parameters are allowed" do
defmodule Def31Operation do
use Exop.Operation
parameter "a", type: :string, required: true
parameter "b", type: :integer, required: true
def process(params), do: {:ok, params}
end
assert Def31Operation.run() == {:error, {:validation, %{"a" => ["is required"], "b" => ["is required"]}}}
assert Def31Operation.run(%{"a" => 1, "b" => "2"}) == {:error, {:validation, %{"a" => ["has wrong type"], "b" => ["has wrong type"]}}}
assert Def31Operation.run(%{"a" => "1", b: 2}) == {:error, {:validation, %{"b" => ["is required"]}}}
assert Def31Operation.run(%{"a" => "1", "b" => 2}) == {:ok, %{"a" => "1", "b" => 2}}
end
test "mix-named parameters are allowed" do
defmodule Def32Operation do
use Exop.Operation
parameter "a", type: :string, required: true
parameter :b, type: :integer, required: true
def process(params), do: {:ok, params}
end
assert Def32Operation.run() == {:error, {:validation, %{"a" => ["is required"], :b => ["is required"]}}}
assert Def32Operation.run(%{"a" => 1, b: "2"}) == {:error, {:validation, %{"a" => ["has wrong type"], :b => ["has wrong type"]}}}
assert Def32Operation.run(%{"a" => "1"}) == {:error, {:validation, %{:b => ["is required"]}}}
assert Def32Operation.run(%{"a" => "1", b: 2}) == {:ok, %{"a" => "1", :b => 2}}
end
test "returns any-length error tuple" do
defmodule Def33Operation do
use Exop.Operation
parameter :a, type: :integer, required: true
def process(%{a: 1}), do: {:error}
def process(%{a: 2}), do: {:error, 2}
def process(%{a: 3}), do: {:error, 2, 3}
def process(%{a: 4}), do: {:error, 2, 3, 4}
def process(params), do: params
end
assert Def33Operation.run() == {:error, {:validation, %{a: ["is required"]}}}
assert Def33Operation.run(a: 1) == {:error}
assert Def33Operation.run(a: 2) == {:error, 2}
assert Def33Operation.run(a: 3) == {:error, 2, 3}
assert Def33Operation.run(a: 4) == {:error, 2, 3, 4}
assert Def33Operation.run(a: 777) == {:ok, %{a: 777}}
end
test "coerce_with can invoke a function with arity == 2 (passing param_name & param_value)" do
defmodule Def34Operation do
use Exop.Operation
parameter :a, type: :tuple, coerce_with: &__MODULE__.coerce_with_name/2
def process(params), do: params
def coerce_with_name(param_name, param_value) do
{List.duplicate(param_name, 2), param_value * 10}
end
end
assert Def34Operation.run(a: 1.1) == {:ok, %{a: {[:a, :a], 11}}}
end
test "coerce_with respects an error-tuple result" do
defmodule Def35Operation do
use Exop.Operation
parameter :a, type: :integer, coerce_with: &__MODULE__.coerce/1
def process(params), do: params
def coerce(1), do: {:error, :some_error}
def coerce(2), do: 2
end
assert Def35Operation.run(a: 2) == {:ok, %{a: 2}}
assert Def35Operation.run(a: 1) == {:error, :some_error}
end
describe "allow_nil options" do
test "allows to have nil as parameter value" do
defmodule Def36Operation do
use Exop.Operation
parameter :a, type: :integer, allow_nil: true, required: false
parameter :b, type: :integer, allow_nil: false, required: false
def process(params), do: params
end
assert Def36Operation.run(a: 1) == {:ok, %{a: 1}}
assert Def36Operation.run(a: nil) == {:ok, %{a: nil}}
assert Def36Operation.run(b: 1) == {:ok, %{b: 1}}
assert Def36Operation.run(b: nil) == {:error, {:validation, %{b: ["doesn't allow nil", "has wrong type"]}}}
end
test "skips all checks" do
defmodule Def37Operation do
use Exop.Operation
parameter :a, type: :integer, numericality: [greater_than: 2], allow_nil: true, required: false
parameter :b, allow_nil: true, func: &__MODULE__.nil_check/2, required: false
def nil_check(_, nil), do: {:error, :this_is_nil}
def process(params), do: params
end
assert Def37Operation.run(a: nil) == {:ok, %{a: nil}}
assert Def37Operation.run(a: 1) == {:error, {:validation, %{a: ["must be greater than 2"]}}}
assert Def37Operation.run(a: "1") == {:error, {:validation, %{a: ["not a number", "has wrong type"]}}}
assert Def37Operation.run(b: nil) == {:ok, %{b: nil}}
end
test "required: false + allow_nil: false" do
defmodule Def45Operation do
use Exop.Operation
parameter :a, required: false, allow_nil: false
def process(params), do: params
end
assert Def45Operation.run(a: :a) == {:ok, %{a: :a}}
assert Def45Operation.run() == {:ok, %{}}
assert Def45Operation.run(a: nil) == {:error, {:validation, %{a: ["doesn't allow nil"]}}}
end
end
describe "when parameter is required" do
test "all parameters are required by default" do
defmodule Def38Operation do
use Exop.Operation
parameter :a, type: :integer
def process(params), do: params
end
assert Def38Operation.run(a: nil) == {:error, {:validation, %{a: ["has wrong type"]}}}
assert Def38Operation.run() == {:error, {:validation, %{a: ["is required"]}}}
end
test "with allow_nil" do
defmodule Def39Operation do
use Exop.Operation, name_in_errors: true
parameter :a, type: :integer, allow_nil: true
def process(params), do: params
end
assert Def39Operation.run(a: nil) == {:ok, %{a: nil}}
assert Def39Operation.run() == {:error, {:validation, %{a: ["is required"]}}}
end
test "with default" do
defmodule Def40Operation do
use Exop.Operation
parameter :a, type: :integer, default: 7
def process(params), do: params
end
assert Def40Operation.run(a: nil) == {:error, {:validation, %{a: ["has wrong type"]}}}
assert Def40Operation.run() == {:ok, %{a: 7}}
end
end
describe "when parameter is not required" do
test "should be not required explicitly" do
defmodule Def41Operation do
use Exop.Operation
parameter :a, type: :integer, required: false
def process(params), do: params
end
assert Def41Operation.run(a: nil) == {:error, {:validation, %{a: ["has wrong type"]}}}
assert Def41Operation.run() == {:ok, %{}}
end
test "with allow_nil" do
defmodule Def42Operation do
use Exop.Operation
parameter :a, type: :integer, required: false, allow_nil: true
def process(params), do: params
end
assert Def42Operation.run(a: nil) == {:ok, %{a: nil}}
assert Def42Operation.run() == {:ok, %{}}
end
test "with default" do
defmodule Def43Operation do
use Exop.Operation
parameter :a, type: :integer, required: false, default: 7
def process(params), do: params
end
assert Def43Operation.run(a: nil) == {:error, {:validation, %{a: ["has wrong type"]}}}
assert Def43Operation.run() == {:ok, %{a: 7}}
end
end
test ":inner check validates a parameter type" do
defmodule Def44Operation do
use Exop.Operation
parameter :a, inner: %{b: [type: :atom], c: [type: :string]}
def process(params), do: params
end
assert Def44Operation.run(a: :a) == {:error, {:validation, %{a: ["has wrong type"]}}}
assert Def44Operation.run(a: []) == {:error, {:validation, %{"a[:b]" => ["is required"], "a[:c]" => ["is required"]}}}
assert Def44Operation.run(a: %{}) == {:error, {:validation, %{"a[:b]" => ["is required"], "a[:c]" => ["is required"]}}}
assert Def44Operation.run(a: [b: :b, c: "c"]) == {:ok, %{a: [b: :b, c: "c"]}}
assert Def44Operation.run(a: %{b: :b, c: "c"}) == {:ok, %{a: %{b: :b, c: "c"}}}
end
test ":inner check accepts opts as both map and keyword" do
defmodule Def46Operation do
use Exop.Operation
parameter :a, inner: [b: [type: :atom], c: [type: :string]]
def process(params), do: params
end
defmodule Def47Operation do
use Exop.Operation
parameter :a, inner: %{b: [type: :atom], c: [type: :string]}
def process(params), do: params
end
assert Def46Operation.run(a: :a) == {:error, {:validation, %{a: ["has wrong type"]}}}
assert Def46Operation.run(a: []) == {:error, {:validation, %{"a[:b]" => ["is required"], "a[:c]" => ["is required"]}}}
assert Def46Operation.run(a: %{}) == {:error, {:validation, %{"a[:b]" => ["is required"], "a[:c]" => ["is required"]}}}
assert Def46Operation.run(a: [b: :b, c: "c"]) == {:ok, %{a: [b: :b, c: "c"]}}
assert Def46Operation.run(a: %{b: :b, c: "c"}) == {:ok, %{a: %{b: :b, c: "c"}}}
assert Def47Operation.run(a: :a) == {:error, {:validation, %{a: ["has wrong type"]}}}
assert Def47Operation.run(a: []) == {:error, {:validation, %{"a[:b]" => ["is required"], "a[:c]" => ["is required"]}}}
assert Def47Operation.run(a: %{}) == {:error, {:validation, %{"a[:b]" => ["is required"], "a[:c]" => ["is required"]}}}
assert Def47Operation.run(a: [b: :b, c: "c"]) == {:ok, %{a: [b: :b, c: "c"]}}
assert Def47Operation.run(a: %{b: :b, c: "c"}) == {:ok, %{a: %{b: :b, c: "c"}}}
end
end
| 29.280605 | 138 | 0.614413 |
f7ee13a84c0660b920fd2106baee0b9206b50180 | 663 | ex | Elixir | apps/alerts/lib/banner.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 42 | 2019-05-29T16:05:30.000Z | 2021-08-09T16:03:37.000Z | apps/alerts/lib/banner.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 872 | 2019-05-29T17:55:50.000Z | 2022-03-30T09:28:43.000Z | apps/alerts/lib/banner.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 12 | 2019-07-01T18:33:21.000Z | 2022-03-10T02:13:57.000Z | defmodule Alerts.Banner do
alias Alerts.Alert
alias Alerts.InformedEntitySet
defstruct id: "",
title: "",
url: nil,
effect: :unknown,
severity: 5,
informed_entity_set: %InformedEntitySet{}
@type individual_route_type :: 1..10
@type route_type_list :: [individual_route_type]
@type route_type :: individual_route_type | route_type_list
@type t :: %__MODULE__{
id: String.t(),
title: String.t(),
url: String.t() | nil,
effect: Alert.effect(),
severity: Alert.severity(),
informed_entity_set: InformedEntitySet.t()
}
end
| 26.52 | 61 | 0.591252 |
f7ee1db611559870a4ba8e363fe62af5f8b35fd0 | 344 | exs | Elixir | priv/repo/seeds.exs | HoffsMH/slax | b91ee30b9fd71a4cb7826f50b605ce580b7c1651 | [
"MIT"
] | 11 | 2016-07-05T18:56:21.000Z | 2021-09-15T22:23:54.000Z | priv/repo/seeds.exs | HoffsMH/slax | b91ee30b9fd71a4cb7826f50b605ce580b7c1651 | [
"MIT"
] | 181 | 2016-06-23T00:47:13.000Z | 2022-03-10T11:23:44.000Z | priv/repo/seeds.exs | HoffsMH/slax | b91ee30b9fd71a4cb7826f50b605ce580b7c1651 | [
"MIT"
] | 7 | 2019-01-30T21:38:28.000Z | 2022-03-01T07:13:39.000Z | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Slax.Repo.insert!(%Slax.SomeModel{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 28.666667 | 61 | 0.700581 |
f7ee22045a72c35854b806a909aa44cc79d4f92a | 2,444 | ex | Elixir | lib/glimesh/payments/payable.ex | chriscuris/glimesh.tv | e1a78aa763ea5599d045fdfa65237b94fbb28a23 | [
"MIT"
] | null | null | null | lib/glimesh/payments/payable.ex | chriscuris/glimesh.tv | e1a78aa763ea5599d045fdfa65237b94fbb28a23 | [
"MIT"
] | null | null | null | lib/glimesh/payments/payable.ex | chriscuris/glimesh.tv | e1a78aa763ea5599d045fdfa65237b94fbb28a23 | [
"MIT"
] | null | null | null | defmodule Glimesh.Payments.Payable do
@moduledoc """
Generic storage for anytime we're responsible to pay a user.
Designed so we can accept Stripe Checkout Sessions, Stripe Invoices, and in the long-term future Paypal transactions.
"""
use Ecto.Schema
import Ecto.Changeset
schema "payables" do
field :type, :string
belongs_to :user, Glimesh.Accounts.User
belongs_to :streamer, Glimesh.Accounts.User
# Generic fields to allow for any payable configuration
field :external_source, :string
field :external_reference, :string
field :status, :string
# These fields are calculated by us
# Cents of course...
field :total_amount, :integer
field :external_fees, :integer
field :our_fees, :integer
field :withholding_amount, :integer
field :payout_amount, :integer
# These fields are what actually happened
field :user_paid_at, :naive_datetime
field :streamer_payout_at, :naive_datetime
field :streamer_payout_amount, :integer
field :stripe_transfer_id, :string
timestamps()
end
@doc false
def create_changeset(payable, attrs) do
payable
|> cast(attrs, [
:type,
:external_source,
:external_reference,
:status,
:total_amount,
:external_fees,
:our_fees,
:withholding_amount,
:payout_amount,
:user_paid_at,
:streamer_payout_at,
:streamer_payout_amount,
:stripe_transfer_id
])
|> validate_inclusion(:type, ["donation", "subscription"])
|> validate_inclusion(:external_source, ["stripe"])
|> validate_inclusion(:status, ["created", "paid", "paidout"])
|> put_assoc(:user, attrs.user)
|> maybe_put_assoc(:streamer, Map.get(attrs, :streamer, nil))
|> validate_required([
:type,
:external_source,
:external_reference,
:status,
:total_amount
])
|> unique_constraint(:external_source_reference, name: :external_source_reference)
end
def update_changeset(payable, attrs) do
payable
|> cast(attrs, [
:status,
:total_amount,
:external_fees,
:our_fees,
:withholding_amount,
:payout_amount,
:user_paid_at,
:streamer_payout_at,
:streamer_payout_amount,
:stripe_transfer_id
])
end
def maybe_put_assoc(changeset, key, value) do
if value do
changeset |> put_assoc(key, value)
else
changeset
end
end
end
| 25.726316 | 119 | 0.670213 |
f7ee2712e249bf5e66fd885fc9f04a48388d5427 | 751 | ex | Elixir | test/fixtures/gen_stage_persistent_handler.ex | NFIBrokerage/volley | 592dc5cbf7cbcfb8648f779fea433c721f3893c0 | [
"Apache-2.0"
] | 3 | 2021-05-03T16:33:37.000Z | 2021-09-12T08:03:23.000Z | test/fixtures/gen_stage_persistent_handler.ex | NFIBrokerage/volley | 592dc5cbf7cbcfb8648f779fea433c721f3893c0 | [
"Apache-2.0"
] | 1 | 2021-09-08T15:18:58.000Z | 2021-09-08T15:19:50.000Z | test/fixtures/gen_stage_persistent_handler.ex | NFIBrokerage/volley | 592dc5cbf7cbcfb8648f779fea433c721f3893c0 | [
"Apache-2.0"
] | null | null | null | defmodule Volley.GenStagePersistentHandler do
@moduledoc """
A test fixture for consuming persistent subscriptions as a GenStage instead
of a Broadway topology
"""
use GenStage
def start_link(opts) do
GenStage.start_link(__MODULE__, opts)
end
@impl GenStage
def init(opts) do
producer = Keyword.fetch!(opts, :producer)
{:consumer, Map.new(opts), subscribe_to: [producer]}
end
@impl GenStage
def handle_events(
[%Spear.Event{metadata: %{subscription: subscription}} | _] = events,
_from,
state
) do
send(state.test_proc, events)
Spear.ack(
subscription.connection,
subscription.ref,
events |> Enum.map(& &1.id)
)
{:noreply, [], state}
end
end
| 20.297297 | 77 | 0.652463 |
f7ee41de33669f468faab88296116629f1f7ab7a | 191 | ex | Elixir | lib/binance/futures/schemas/bid.ex | Fadhil/binance.ex | b4cda870c9fab475e43f2498f8f28dec0353e952 | [
"MIT"
] | null | null | null | lib/binance/futures/schemas/bid.ex | Fadhil/binance.ex | b4cda870c9fab475e43f2498f8f28dec0353e952 | [
"MIT"
] | null | null | null | lib/binance/futures/schemas/bid.ex | Fadhil/binance.ex | b4cda870c9fab475e43f2498f8f28dec0353e952 | [
"MIT"
] | 1 | 2021-02-22T22:56:22.000Z | 2021-02-22T22:56:22.000Z | defmodule Binance.Futures.Schemas.Bid do
defstruct [:price, :quantity]
def new([price, quantity] = _bid) do
%__MODULE__{
price: price,
quantity: quantity
}
end
end
| 17.363636 | 40 | 0.649215 |
f7ee7d74cfcddff5e737996f1e08b2c9b7489e05 | 9,776 | ex | Elixir | lib/mix/lib/mix.ex | RyanBard/elixir | 3e0f3b47cf26aa121470141b9a1aa55a366c066e | [
"Apache-2.0"
] | 1 | 2018-08-08T12:15:48.000Z | 2018-08-08T12:15:48.000Z | lib/mix/lib/mix.ex | RyanBard/elixir | 3e0f3b47cf26aa121470141b9a1aa55a366c066e | [
"Apache-2.0"
] | 1 | 2018-09-10T23:36:45.000Z | 2018-09-10T23:36:45.000Z | lib/mix/lib/mix.ex | RyanBard/elixir | 3e0f3b47cf26aa121470141b9a1aa55a366c066e | [
"Apache-2.0"
] | 1 | 2018-09-10T23:32:56.000Z | 2018-09-10T23:32:56.000Z | defmodule Mix do
@moduledoc ~S"""
Mix is a build tool that provides tasks for creating, compiling,
and testing Elixir projects, managing its dependencies, and more.
## Mix.Project
The foundation of Mix is a project. A project can be defined by using
`Mix.Project` in a module, usually placed in a file named `mix.exs`:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0"
]
end
end
See the `Mix.Project` module for detailed documentation on Mix projects.
Once the project is defined, a number of default Mix tasks can be run
directly from the command line:
* `mix compile` - compiles the current project
* `mix test` - runs tests for the given project
* `mix run` - runs a particular command inside the project
Each task has its own options and sometimes specific configuration
to be defined in the `project/0` function. You can use `mix help`
to list all available tasks and `mix help NAME` to show help for
a particular task.
The best way to get started with your first project is by calling
`mix new my_project` from the command line.
## Mix.Task
Tasks are what make Mix extensible.
Projects can extend Mix behaviour by adding their own tasks. For
example, adding the task below inside your project will
make it available to everyone that uses your project:
defmodule Mix.Tasks.Hello do
use Mix.Task
def run(_) do
Mix.shell().info("hello")
end
end
The task can now be invoked with `mix hello`.
## Dependencies
Mix also manages your dependencies and integrates nicely with the [Hex package
manager](https://hex.pm).
In order to use dependencies, you need to add a `:deps` key
to your project configuration. We often extract the list of dependencies
into its own function:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0",
deps: deps()
]
end
defp deps do
[
{:ecto, "~> 2.0"},
{:plug, github: "elixir-lang/plug"}
]
end
end
You can run `mix help deps` to learn more about dependencies in Mix.
## Environments
Mix supports different environments. Environments allow developers to prepare
and organize their project specifically for different scenarios. By default,
Mix provides three environments:
* `:dev` - the default environment
* `:test` - the environment `mix test` runs on
* `:prod` - the environment your dependencies run on
The environment can be changed via the command line by setting
the `MIX_ENV` environment variable, for example:
$ MIX_ENV=prod mix run server.exs
## Aliases
Aliases are shortcuts or tasks specific to the current project.
In the `Mix.Task` section, we have defined a task that would be
available to everyone using our project as a dependency. What if
we wanted the task to only be available for our project? Just
define an alias:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0",
aliases: aliases()
]
end
defp aliases do
[
c: "compile",
hello: &hello/1
]
end
defp hello(_) do
Mix.shell().info("Hello world")
end
end
In the example above, we have defined two aliases. One is `mix c`
which is a shortcut for `mix compile`. The other is named
`mix hello`, which is the equivalent to the `Mix.Tasks.Hello`
we have defined in the `Mix.Task` section.
Aliases may also be lists, specifying multiple tasks to be run
consecutively:
[all: [&hello/1, "deps.get --only #{Mix.env()}", "compile"]]
In the example above, we have defined an alias named `mix all`,
that prints hello, then fetches dependencies specific to the
current environment and compiles the project.
Arguments given to the alias will be appended to the arguments
of the last task in the list, if the last task is a function
they will be given as a list of strings to the function.
Finally, aliases can also be used to augment existing tasks.
Let's suppose you want to augment `mix clean` to clean another
directory Mix does not know about:
[clean: ["clean", &clean_extra/1]]
Where `&clean_extra/1` would be a function in your `mix.exs`
with extra cleanup logic.
Aliases defined in the current project do not affect its dependencies and
aliases defined in dependencies are not accessible from the current project.
Aliases can be used very powerfully to also run Elixir scripts and
bash commands, for example:
# priv/hello.exs
IO.puts("hello")
# priv/world.sh
#!/bin/sh
echo "world!"
# mix.exs
defp create_aliases do
[
"taskalias": ["hex.info", "run priv/hello.exs", "cmd priv/world.sh"],
"taskalias2": ["run priv/hello1.exs", "run priv/hello2.exs"]
]
end
In the example above we have created 2 aliases, the first example
`taskalias` will run task `hex.info`, then (`run`)[`Mix.Tasks.Run`]
to run an Elixir script, then (`cmd`)[`Mix.Tasks.Cmd`] to run a
command line bash script. This shows how powerful aliases mixed
with mix tasks can be.
`taskalias2` shows a limitation of tasks where only one of the given
tasks will run, the execution of `run priv/hello2.exs` will not run.
The `run` command, however, can accept multiple files, so in case
of running multiple files, it can be rewritten to:
"taskalias2": ["run -r priv/hello1.exs -r priv/hello2.exs"]
## Environment variables
Several environment variables can be used to modify Mix's behaviour.
Mix responds to the following variables:
* `MIX_ARCHIVES` - specifies the directory into which the archives should be installed
* `MIX_BUILD_PATH` - sets the project build_path config
* `MIX_DEBUG` - outputs debug information about each task before running it
* `MIX_ENV` - specifies which environment should be used. See [Environments](#module-environments)
* `MIX_EXS` - changes the full path to the `mix.exs` file
* `MIX_HOME` - path to Mix's home directory, stores configuration files and scripts used by Mix
* `MIX_PATH` - appends extra code paths
* `MIX_QUIET` - does not print information messages to the terminal
* `MIX_REBAR` - path to rebar command that overrides the one Mix installs
* `MIX_REBAR3` - path to rebar3 command that overrides the one Mix installs
Environment variables that are not meant to hold a value (and act basically as
flags) should be set to either `1` or `true`, for example:
$ MIX_DEBUG=1 mix compile
"""
use Application
@doc false
def start do
{:ok, _} = Application.ensure_all_started(:mix)
:ok
end
@doc false
def start(_type, []) do
children = [Mix.State, Mix.TasksServer, Mix.ProjectStack]
opts = [strategy: :one_for_one, name: Mix.Supervisor, max_restarts: 0]
Supervisor.start_link(children, opts)
end
@doc """
Returns the Mix environment.
This function should not be used at runtime in application code (as opposed
to infrastructure and build code like Mix tasks). Mix is a build tool and may
not be available after the code is compiled (for example in a release).
To differentiate the program behavior depending on the environment, it is
recommended to use application environment through `Application.get_env/3`.
Proper configuration can be set in `Mix.Config` files, often per-environment
(see `Mix.Config.import_config/1` for more information).
"""
def env do
# env is not available on bootstrapping, so set a :dev default
Mix.State.get(:env, :dev)
end
@doc """
Changes the current Mix environment to `env`.
Be careful when invoking this function as any project
configuration won't be reloaded.
This function should not be used at runtime in application code
(see `env/0` for more information).
"""
def env(env) when is_atom(env) do
Mix.State.put(:env, env)
end
@doc """
Returns the default compilers used by Mix.
It can be used in your `mix.exs` to prepend or
append new compilers to Mix:
def project do
[compilers: Mix.compilers() ++ [:foo, :bar]]
end
"""
def compilers do
[:yecc, :leex, :erlang, :elixir, :xref, :app]
end
@doc """
Returns the current shell.
`shell/0` can be used as a wrapper for the current shell. It contains
conveniences for requesting information from the user, printing to the shell and so
forth. The Mix shell is swappable (see `shell/1`), allowing developers to use
a test shell that simply sends messages to the current process instead of
performing IO (see `Mix.Shell.Process`).
By default, this returns `Mix.Shell.IO`.
"""
def shell do
Mix.State.get(:shell, Mix.Shell.IO)
end
@doc """
Sets the current shell.
After calling this function, `shell` becomes the shell that is returned by
`shell/0`.
"""
def shell(shell) do
Mix.State.put(:shell, shell)
end
@doc """
Returns `true` if Mix is in debug mode.
"""
def debug? do
Mix.State.get(:debug, false)
end
@doc """
Sets Mix debug mode.
"""
def debug(debug) when is_boolean(debug) do
Mix.State.put(:debug, debug)
end
@doc """
Raises a Mix error that is nicely formatted.
"""
@spec raise(binary) :: no_return
def raise(message) when is_binary(message) do
Kernel.raise(Mix.Error, mix: true, message: message)
end
end
| 29.98773 | 102 | 0.674611 |
f7eea9c7777ee2a400f45f98f06589b0f1c01e42 | 1,791 | ex | Elixir | lib/couchdb_adapter/tesla/middleware/couchdb_oauth.ex | sbezugliy/couchdb_adapter | 01304052185405a6420d502c8f25164ced77f493 | [
"Apache-2.0"
] | 2 | 2017-06-14T19:59:03.000Z | 2017-06-26T04:41:56.000Z | lib/couchdb_adapter/tesla/middleware/couchdb_oauth.ex | sbezugliy/couchdb_adapter | 01304052185405a6420d502c8f25164ced77f493 | [
"Apache-2.0"
] | null | null | null | lib/couchdb_adapter/tesla/middleware/couchdb_oauth.ex | sbezugliy/couchdb_adapter | 01304052185405a6420d502c8f25164ced77f493 | [
"Apache-2.0"
] | null | null | null | defmodule Tesla.Middleware.CouchDB.OAuth do
@moduledoc """
Basic authentication middleware
[Wiki on the topic](https://en.wikipedia.org/wiki/Basic_access_authentication)
Example:
defmodule MyClient do
use Tesla
def client(username, password, opts \\ %{}) do
Tesla.build_client [
{Tesla.Middleware.BasicAuth, Map.merge(%{username: username, password: password}, opts)}
]
end
end
Options:
- `:username` - username (defaults to `""`)
- `:password` - password (defaults to `""`)
"""
def call(env, next, opts) do
opts = opts || %{}
IO.puts inspect opts
env_t = env
|> Map.update!(:url, &(opts[:url] <> &1))
{raw_header, params} = authorization_header(env_t, opts)
header = raw_header |> create_header
#ToDo: should check if params need formatting in future
#params = raw_params |> create_options
env_t1 = env_t
|> Map.update!(:headers, &Map.merge(&1, header))
|> Map.update!(:query, &Enum.concat(&1, params))
IO.inspect env_t1
env_t1
|> Tesla.run(next)
end
defp authorization_header(env, opts) do
opts
|> oauth_credentials()
|> sign(env)
end
defp oauth_credentials(opts) do
OAuther.credentials(
consumer_key: opts[:consumer_key],
consumer_secret: opts[:consumer_secret],
token: opts[:token],
token_secret: opts[:token_secret])
end
defp sign(oauth_credentials, env) do
params = OAuther.sign(to_string(Map.get(env, :method)), Map.get(env, :url), Map.get(env, :opts), oauth_credentials)
OAuther.header(params)
end
def create_header(header) do
{name, value} = header
%{"#{name}"=>"#{value}"}
end
def create_options(params) do
params
|> Enum.into(%{})
end
end
| 25.585714 | 119 | 0.632607 |
f7eec63c8dd9afc8419e34656eb568fb200b3d9c | 576 | exs | Elixir | mix.exs | lukyanov/broadway_ecto_job | 686db1cd97d7fb88ca9dc8a0c4e050390286598c | [
"MIT"
] | null | null | null | mix.exs | lukyanov/broadway_ecto_job | 686db1cd97d7fb88ca9dc8a0c4e050390286598c | [
"MIT"
] | null | null | null | mix.exs | lukyanov/broadway_ecto_job | 686db1cd97d7fb88ca9dc8a0c4e050390286598c | [
"MIT"
] | null | null | null | defmodule BroadwayEctoJob.Producer.MixProject do
use Mix.Project
def project do
[
app: :broadway_ecto_job,
version: "0.1.0",
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:broadway, "~> 0.6.0"},
{:gen_stage, "~> 1.0", override: true},
{:ecto_job, "~> 3.0"}
]
end
end
| 19.2 | 59 | 0.569444 |
f7eee7a6efe14cc9c12c7098f1a8ff1147641b65 | 6,263 | ex | Elixir | lib/lambdapad/generate.ex | Coruja-Digital/lambdapad | 28a171f1218396ef4ab1837584de79f2054665c8 | [
"Apache-2.0"
] | null | null | null | lib/lambdapad/generate.ex | Coruja-Digital/lambdapad | 28a171f1218396ef4ab1837584de79f2054665c8 | [
"Apache-2.0"
] | null | null | null | lib/lambdapad/generate.ex | Coruja-Digital/lambdapad | 28a171f1218396ef4ab1837584de79f2054665c8 | [
"Apache-2.0"
] | null | null | null | defmodule Lambdapad.Generate do
alias Lambdapad.Config
def resolve_uri(config, name, funct_or_uri, vars, index \\ nil)
def resolve_uri(_config, _name, funct, vars, index) when is_function(funct) do
funct.(index, vars)
end
def resolve_uri(config, name, uri, vars, _index) when is_binary(uri) do
uri_mod = Module.concat([__MODULE__, URI, name])
unless function_exported?(uri_mod, :render, 1) do
{:ok, _uri_mod} = :erlydtl.compile_template(uri, uri_mod)
end
{:ok, iodata_uri} = uri_mod.render(vars)
Path.join([config["blog"]["url"], IO.iodata_to_binary(iodata_uri)])
end
def process_vars(page_data, data, index \\ nil)
def process_vars(page_data, data, nil) do
case page_data[:var_name] do
:plain ->
Config.to_proplist(data)
var_name when is_binary(var_name) ->
[{var_name, Config.to_proplist(data)}]
end
end
def process_vars(page_data, data, index) do
case page_data[:var_name] do
:plain ->
[{"index", index} | Config.to_proplist(data)]
var_name when is_binary(var_name) ->
[{"index", index}, {var_name, Config.to_proplist(data)}]
end
end
def build_file_abspath(output_dir, url, :dir) do
url_data = URI.parse(url)
abs_path = Path.absname(Path.join([output_dir, url_data.path || "/"]))
:ok = File.mkdir_p!(abs_path)
Path.join([abs_path, "index.html"])
end
def build_file_abspath(output_dir, url, :file) do
url_data = URI.parse(url)
abs_path = Path.absname(Path.join([output_dir, url_data.path]))
dir_path = Path.dirname(abs_path)
:ok = File.mkdir_p!(dir_path)
abs_path
end
def resolve_transforms_on_item(mod, %{transform_on_item: trans_items}) when is_list(trans_items) do
trans_items
|> Enum.reverse()
|> Enum.reduce(fn posts, _config -> posts end, fn
(trans_item, chained_fun) when is_binary(trans_item) ->
case mod.transform(trans_item) do
%{on: :item, run: trans_function} ->
fn posts, config ->
trans_function.(posts, config)
|> chained_fun.(config)
end
%{on: other} when other in [:page, :config] ->
raise "transforms config, page and item cannot be swapped"
error ->
raise "transform #{inspect(trans_item)} unknown: #{inspect(error)}"
end
(trans_item, chained_fun) when is_function(trans_item) ->
fn posts, config ->
trans_item.(posts, config)
|> chained_fun.(config)
end
end)
end
def resolve_transforms_on_item(mod, %{transform_on_item: trans_items}) when is_binary(trans_items) do
case mod.transform(trans_items) do
%{on: :item, run: trans_function} ->
trans_function
%{on: other} when other in [:page, :config] ->
raise "transforms config, page and item cannot be swapped"
error ->
raise "transform #{inspect(trans_items)} unknown: #{inspect(error)}"
end
end
def resolve_transforms_on_item(_mod, %{transform_on_item: trans_items}) when is_function(trans_items) do
trans_items
end
def resolve_transforms_on_item(_mod, %{}), do: nil
def resolve_transforms_on_page(mod, %{transform_on_page: trans_page}) when is_list(trans_page) do
trans_page
|> Enum.reverse()
|> Enum.reduce(fn posts, _config -> posts end, fn
(trans_page, chained_fun) when is_binary(trans_page) ->
case mod.transform(trans_page) do
%{on: :page, run: trans_function} ->
fn posts, config ->
trans_function.(posts, config)
|> chained_fun.(config)
end
%{on: other} when other in [:item, :config] ->
raise "transforms config, page and item cannot be swapped"
error ->
raise "transform #{inspect(trans_page)} unknown: #{inspect(error)}"
end
(trans_page, chained_fun) when is_function(trans_page) ->
fn posts, config ->
trans_page.(posts, config)
|> chained_fun.(config)
end
end)
end
def resolve_transforms_on_page(mod, %{transform_on_page: trans_page}) when is_binary(trans_page) do
case mod.transform(trans_page) do
%{on: :page, run: trans_function} ->
trans_function
%{on: other} when other in [:item, :config] ->
raise "transforms config, page and item cannot be swapped"
error ->
raise "transform #{inspect(trans_page)} unknown: #{inspect(error)}"
end
end
def resolve_transforms_on_page(_mod, %{transform_on_page: trans_page}) when is_function(trans_page) do
trans_page
end
def resolve_transforms_on_page(_mod, %{}), do: nil
def resolve_transforms_on_config(mod, %{transform_on_config: trans_config}) when is_list(trans_config) do
trans_config
|> Enum.reverse()
|> Enum.reduce(fn config, _posts -> config end, fn
(trans_config, chained_fun) when is_binary(trans_config) ->
case mod.transform(trans_config) do
%{on: :config, run: trans_function} ->
fn config, posts ->
trans_function.(config, posts)
|> chained_fun.(posts)
end
%{on: other} when other in [:item, :page] ->
raise "transforms config, page and item cannot be swapped"
error ->
raise "transform #{inspect(trans_config)} unknown: #{inspect(error)}"
end
(trans_config, chained_fun) when is_function(trans_config) ->
fn config, posts ->
trans_config.(config, posts)
|> chained_fun.(posts)
end
end)
end
def resolve_transforms_on_config(mod, %{transform_on_config: trans_config}) when is_binary(trans_config) do
case mod.transform(trans_config) do
%{on: :config, run: trans_function} ->
trans_function
%{on: other} when other in [:item, :page] ->
raise "transforms config, page and item cannot be swapped"
error ->
raise "transform #{inspect(trans_config)} unknown: #{inspect(error)}"
end
end
def resolve_transforms_on_config(_mod, %{transform_on_config: trans_config}) when is_function(trans_config) do
trans_config
end
def resolve_transforms_on_config(_mod, %{}), do: nil
end
| 33.854054 | 112 | 0.642025 |
f7eef72b02415b984069e2ba356fb7855b3051d9 | 97 | ex | Elixir | apps/api_web/lib/api_web/controllers/exceptions.ex | lboyarsky/api | 7ecad79704d13ae6fa7f21d21bc47836c703ebf9 | [
"MIT"
] | null | null | null | apps/api_web/lib/api_web/controllers/exceptions.ex | lboyarsky/api | 7ecad79704d13ae6fa7f21d21bc47836c703ebf9 | [
"MIT"
] | null | null | null | apps/api_web/lib/api_web/controllers/exceptions.ex | lboyarsky/api | 7ecad79704d13ae6fa7f21d21bc47836c703ebf9 | [
"MIT"
] | 1 | 2019-09-09T20:40:13.000Z | 2019-09-09T20:40:13.000Z | defimpl Plug.Exception, for: ApiAccounts.NoResultsError do
def status(_expection), do: 404
end
| 24.25 | 58 | 0.793814 |
f7ef147dd0b863a607f74dc244131754a44628be | 692 | ex | Elixir | lib/wabanex/users/get.ex | daniel-leal/wabanex | c0666de8f663c59ff3861331db5331e143d94b41 | [
"RSA-MD"
] | null | null | null | lib/wabanex/users/get.ex | daniel-leal/wabanex | c0666de8f663c59ff3861331db5331e143d94b41 | [
"RSA-MD"
] | null | null | null | lib/wabanex/users/get.ex | daniel-leal/wabanex | c0666de8f663c59ff3861331db5331e143d94b41 | [
"RSA-MD"
] | null | null | null | defmodule Wabanex.Users.Get do
import Ecto.Query
alias Ecto.UUID
alias Wabanex.{Repo, Training, User}
def call(id) do
id
|> UUID.cast()
|> handle_response()
end
defp handle_response(:error) do
{:error, "Invalid UUID"}
end
defp handle_response({:ok, uuid}) do
case Repo.get(User, uuid) do
nil -> {:error, "User not found"}
user -> {:ok, load_training(user)}
end
end
defp load_training(user) do
today = Date.utc_today()
query =
from training in Training,
where: ^today >= training.start_date and ^today <= training.end_date
Repo.preload(user, trainings: {first(query, :inserted_at), :exercises})
end
end
| 20.969697 | 76 | 0.640173 |
f7ef3d8f39b59c1e78494003a904e5925289d821 | 11,965 | exs | Elixir | test/ueberauth_adfs/adfs_test.exs | fatboypunk/ueberauth_adfs | 8ab4a4b604370599a48dd10fbc10a2546a5caa97 | [
"MIT"
] | null | null | null | test/ueberauth_adfs/adfs_test.exs | fatboypunk/ueberauth_adfs | 8ab4a4b604370599a48dd10fbc10a2546a5caa97 | [
"MIT"
] | null | null | null | test/ueberauth_adfs/adfs_test.exs | fatboypunk/ueberauth_adfs | 8ab4a4b604370599a48dd10fbc10a2546a5caa97 | [
"MIT"
] | null | null | null | defmodule Ueberauth.Strategy.ADFSTest do
use ExUnit.Case
use Ueberauth.Strategy
# Load ADFS Test Handler
Code.load_file("test/ueberauth_adfs/adfs_test_handler.exs")
import Mock
alias Ueberauth.Strategy.ADFS
@env_values adfs_url: "https://example.com",
adfs_metadata_url: "https://example.com/metadata.xml",
client_id: "example_client",
resource_identifier: "example_resource"
@env_handler_values adfs_url: "https://example.com",
adfs_metadata_url: "https://example.com/metadata.xml",
adfs_handler: Ueberauth.Strategy.ADFSTestHandler,
client_id: "example_client",
resource_identifier: "example_resource"
@mock_metadata {:ok,
%HTTPoison.Response{
body:
"<EntityDescriptor><ds:Signature><KeyInfo>" <>
"<X509Data><X509Certificate>1234</X509Certificate></X509Data>" <>
"</KeyInfo></ds:Signature></EntityDescriptor>"
}}
@user_claim %Joken.Token{
claims: %{
"email" => "[email protected]",
"given_name" => "John",
"family_name" => "Doe",
"winaccountname" => "john1"
}
}
describe "ADFS Strategy" do
setup_with_mocks [
{ADFS.OAuth, [:passthrough],
[get_token: fn code, _ -> {:ok, %{token: %{access_token: code}}} end]},
{Application, [:passthrough], [get_env: fn _, _ -> @env_values end]},
{HTTPoison, [:passthrough], [get: fn _, _, _ -> @mock_metadata end]},
{Joken, [:passthrough],
[token: fn _ -> nil end, with_signer: fn _, _ -> nil end, verify: fn _ -> @user_claim end]},
{JOSE.JWK, [:passthrough], [from_pem: fn _ -> %{foo: :bar} end]},
{Ueberauth.Strategy.Helpers, [:passthrough],
[
callback_url: fn _ -> "https://test.com" end,
options: fn _ -> [uid_field: "email"] end,
redirect!: fn _conn, auth_url -> auth_url end,
set_errors!: fn _conn, errors -> errors end
]}
] do
:ok
end
test "Handles the ADFS request" do
request = ADFS.handle_request!(%Plug.Conn{params: %{}})
assert request =~ "#{@env_values[:adfs_url]}/adfs/oauth2/authorize"
end
test "Redirects ADFS request to index when missing config" do
with_mock Application, [:passthrough], get_env: fn _, _ -> [] end do
assert ADFS.handle_request!(nil) == "/"
end
end
test "Handles the logout request" do
assert ADFS.logout(nil, nil) =~ "#{@env_values[:adfs_url]}/adfs/ls/?wa=wsignout1.0"
end
test "Gives an error upon logout request with missing config" do
with_mock Application, [:passthrough], get_env: fn _, _ -> nil end do
assert ADFS.logout(nil, nil) == [
%Ueberauth.Failure.Error{
message: "Failed to logout, please close your browser",
message_key: "Logout Failed"
}
]
end
end
test "Handle callback from ADFS provider, set claims user from JWT" do
conn = ADFS.handle_callback!(%Plug.Conn{params: %{"code" => "1234"}})
assert conn.private.adfs_user == @user_claim.claims
end
test "Handle callback from ADFS provider when JWT is unauthorized" do
with_mock Joken,
[:passthrough],
token: fn _ -> nil end,
with_signer: fn _, _ -> nil end,
verify: fn _ -> nil end do
[error] = ADFS.handle_callback!(%Plug.Conn{params: %{"code" => "1234"}})
assert error ==
%Ueberauth.Failure.Error{
message: "unauthorized",
message_key: "token"
}
end
end
test "Handle callback from ADFS provider when metadata is malformed" do
with_mock HTTPoison,
[:passthrough],
get: fn _, _, _ -> {:ok, %HTTPoison.Response{body: ""}} end do
[error] = ADFS.handle_callback!(%Plug.Conn{params: %{"code" => "1234"}})
assert error == %Ueberauth.Failure.Error{message: "malformed", message_key: "metadata"}
end
end
test "Handle callback from ADFS provider when certificate is not found in metadata" do
with_mock HTTPoison, [:passthrough],
get: fn _, _, _ ->
{:ok, %HTTPoison.Response{body: "<EntityDescriptor></EntityDescriptor>"}}
end do
[error] = ADFS.handle_callback!(%Plug.Conn{params: %{"code" => "1234"}})
assert error == %Ueberauth.Failure.Error{message: "not_found", message_key: "certificate"}
end
end
test "Handle callback from ADFS provider when metadata url is not found" do
with_mock HTTPoison,
[:passthrough],
get: fn _, _, _ -> {:error, %HTTPoison.Error{}} end do
[error] = ADFS.handle_callback!(%Plug.Conn{params: %{"code" => "1234"}})
assert error == %Ueberauth.Failure.Error{
message: "not_found",
message_key: "metadata_url"
}
end
end
test "Handle callback from ADFS provider with token error" do
with_mock ADFS.OAuth,
[:passthrough],
get_token: fn _, _ -> {:error, %{reason: "token_error"}} end do
[error] = ADFS.handle_callback!(%Plug.Conn{params: %{"code" => "1234"}})
assert error == %Ueberauth.Failure.Error{
message: "token_error",
message_key: "Authentication Error"
}
end
end
test "Handle callback from ADFS provider with OAuth2 error" do
with_mock ADFS.OAuth, [:passthrough],
get_token: fn _, _ ->
{:error, %OAuth2.Response{body: %{"error_description" => "oauth_error"}}}
end do
[error] = ADFS.handle_callback!(%Plug.Conn{params: %{"code" => "1234"}})
assert error == %Ueberauth.Failure.Error{
message: "oauth_error",
message_key: "Authentication Error"
}
end
end
test "Handle callback from ADFS provider with error in the params" do
[error] =
ADFS.handle_callback!(%Plug.Conn{
params: %{"error" => "param_error", "error_description" => "param_error_description"}
})
assert error == %Ueberauth.Failure.Error{
message: "param_error_description",
message_key: "param_error"
}
end
test "Handle callback from ADFS provider with missing code" do
[error] = ADFS.handle_callback!(%Plug.Conn{})
assert error == %Ueberauth.Failure.Error{
message: "No code received",
message_key: "missing_code"
}
end
test "Handles cleanup of the private vars in the conn" do
conn =
%Plug.Conn{params: %{"code" => "1234"}}
|> ADFS.handle_callback!()
|> ADFS.handle_cleanup!()
assert conn.private.adfs_user == nil
assert conn.private.adfs_token == nil
assert conn.private.adfs_handler == nil
end
test "Gets the uid field from the conn" do
email =
%Plug.Conn{params: %{"code" => "1234"}}
|> ADFS.handle_callback!()
|> ADFS.uid()
assert email == "[email protected]"
end
test "Gets the token credentials from the conn" do
token =
%Plug.Conn{params: %{"code" => "1234"}}
|> ADFS.handle_callback!()
|> ADFS.credentials()
assert token == %Ueberauth.Auth.Credentials{}
end
test "Gets the user info from the conn" do
info =
%Plug.Conn{params: %{"code" => "1234"}}
|> ADFS.handle_callback!()
|> ADFS.info()
assert info == %Ueberauth.Auth.Info{
name: "John Doe",
nickname: "john1",
email: "[email protected]"
}
end
test "Gets the extra info from the conn" do
extra =
%Plug.Conn{params: %{"code" => "1234"}}
|> ADFS.handle_callback!()
|> ADFS.extra()
assert %Ueberauth.Auth.Extra{raw_info: %{token: %Joken.Token{}, user: %{}}} = extra
end
test "Gets the credential info from the conn with a custom handler" do
with_mock Application, [:passthrough], get_env: fn _, _ -> @env_handler_values end do
credentials =
%Plug.Conn{params: %{"code" => "1234"}}
|> ADFS.handle_callback!()
|> ADFS.credentials()
assert credentials == %Ueberauth.Auth.Credentials{
other: %{handler: true}
}
end
end
test "Gets the user info from the conn with a custom handler" do
with_mock Application, [:passthrough], get_env: fn _, _ -> @env_handler_values end do
info =
%Plug.Conn{params: %{"code" => "1234"}}
|> ADFS.handle_callback!()
|> ADFS.info()
assert info == %Ueberauth.Auth.Info{
name: "John Doe",
nickname: "john1",
email: "[email protected]",
location: "handler"
}
end
end
test "Gets the extra info from the conn with a custom handler" do
with_mock Application, [:passthrough], get_env: fn _, _ -> @env_handler_values end do
extra =
%Plug.Conn{params: %{"code" => "1234"}}
|> ADFS.handle_callback!()
|> ADFS.extra()
assert %Ueberauth.Auth.Extra{
raw_info: %{
token: %Joken.Token{},
user: %{},
with_handler: true
}
} = extra
end
end
test "Returns the configured status when env is present" do
assert ADFS.configured?() == true
end
test "Returns the configured status when env is not present" do
with_mock Application, [:passthrough], get_env: fn _, _ -> [] end do
assert ADFS.configured?() == false
end
end
test "Returns the configured status when env is missing values" do
with_mock Application,
[:passthrough],
get_env: fn _, _ -> [adfs_url: "https://test.com"] end do
assert ADFS.configured?() == false
end
end
end
describe "ADFS Oauth Client" do
setup_with_mocks [{Application, [:passthrough], [get_env: fn _, _ -> @env_values end]}] do
:ok
end
test "Gets the client with the config properties" do
client = ADFS.OAuth.client()
assert client.client_id == @env_values[:client_id]
assert client.authorize_url == "#{@env_values[:adfs_url]}/adfs/oauth2/authorize"
assert client.token_url == "#{@env_values[:adfs_url]}/adfs/oauth2/token"
end
test "Gets the client with options" do
client = ADFS.OAuth.client(client_id: "other_client")
assert client.client_id == "other_client"
end
test "Doesn't get the client without config" do
with_mock Application, [:passthrough], get_env: fn _, _ -> nil end do
client = ADFS.OAuth.client()
assert client == {nil, []}
end
end
test "Get the authorize_url" do
assert ADFS.OAuth.authorize_url!() ==
"#{@env_values[:adfs_url]}/adfs/oauth2/authorize?client_id=example_client&redirect_uri=&response_type=code"
end
test "Gets the signout url" do
assert ADFS.OAuth.signout_url() ==
{:ok, "#{@env_values[:adfs_url]}/adfs/ls/?wa=wsignout1.0"}
end
test "Gets the signout url with params" do
assert ADFS.OAuth.signout_url(%{redirect_uri: "https://test.com"}) ==
{:ok, "#{@env_values[:adfs_url]}/adfs/ls/?wa=wsignout1.0&wreply=https://test.com"}
end
test "Fails to get the signout url without config" do
with_mock Application, [:passthrough], get_env: fn _, _ -> nil end do
assert ADFS.OAuth.signout_url() == {:error, :failed_to_logout}
end
end
end
end
| 33.991477 | 122 | 0.569745 |
f7ef4c12f91a404236015426b4b399152b3432ee | 1,819 | ex | Elixir | clients/memcache/lib/google_api/memcache/v1/model/location_metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/memcache/lib/google_api/memcache/v1/model/location_metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/memcache/lib/google_api/memcache/v1/model/location_metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Memcache.V1.Model.LocationMetadata do
@moduledoc """
Metadata for the given google.cloud.location.Location.
## Attributes
* `availableZones` (*type:* `%{optional(String.t) => GoogleApi.Memcache.V1.Model.ZoneMetadata.t}`, *default:* `nil`) - Output only. The set of available zones in the location. The map is keyed by the lowercase ID of each zone, as defined by GCE. These keys can be specified in the `zones` field when creating a Memcached instance.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:availableZones => %{
optional(String.t()) => GoogleApi.Memcache.V1.Model.ZoneMetadata.t()
}
}
field(:availableZones, as: GoogleApi.Memcache.V1.Model.ZoneMetadata, type: :map)
end
defimpl Poison.Decoder, for: GoogleApi.Memcache.V1.Model.LocationMetadata do
def decode(value, options) do
GoogleApi.Memcache.V1.Model.LocationMetadata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Memcache.V1.Model.LocationMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.122449 | 334 | 0.736668 |
f7ef6f509f999596a4abaa37faa5293467edd118 | 1,567 | ex | Elixir | clients/plus/lib/google_api/plus/v1/model/comment_actor_client_specific_actor_info_youtube_actor_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/plus/lib/google_api/plus/v1/model/comment_actor_client_specific_actor_info_youtube_actor_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/plus/lib/google_api/plus/v1/model/comment_actor_client_specific_actor_info_youtube_actor_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Plus.V1.Model.CommentActorClientSpecificActorInfoYoutubeActorInfo do
@moduledoc """
Actor info specific to YouTube clients.
## Attributes
* `channelId` (*type:* `String.t`, *default:* `nil`) - ID of the YouTube channel owned by the Actor.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:channelId => String.t()
}
field(:channelId)
end
defimpl Poison.Decoder,
for: GoogleApi.Plus.V1.Model.CommentActorClientSpecificActorInfoYoutubeActorInfo do
def decode(value, options) do
GoogleApi.Plus.V1.Model.CommentActorClientSpecificActorInfoYoutubeActorInfo.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Plus.V1.Model.CommentActorClientSpecificActorInfoYoutubeActorInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.134615 | 104 | 0.747288 |
f7ef737a6a74f9973d824cdc28d45a8647e960e5 | 1,385 | ex | Elixir | lib/checker_mal/application.ex | Hiyori-API/checker-mal | c52f6e8a248ba160ffebc2c9369a933fc8fc4499 | [
"MIT"
] | 10 | 2020-06-12T18:36:58.000Z | 2022-02-20T11:07:49.000Z | lib/checker_mal/application.ex | Hiyori-API/checker-mal | c52f6e8a248ba160ffebc2c9369a933fc8fc4499 | [
"MIT"
] | 7 | 2020-05-08T06:03:08.000Z | 2022-01-24T02:57:16.000Z | lib/checker_mal/application.ex | Hiyori-API/checker-mal | c52f6e8a248ba160ffebc2c9369a933fc8fc4499 | [
"MIT"
] | 1 | 2020-12-03T03:49:27.000Z | 2020-12-03T03:49:27.000Z | defmodule CheckerMal.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
defp unapproved_html() do
if Application.get_env(:checker_mal, :unapproved_html_enabled, false) do
[
CheckerMal.UnapprovedHtml.EntryCache,
CheckerMal.UnapprovedHtml.Cache
]
else
[]
end
end
def start(_type, _args) do
children =
Enum.concat(
[
# Start the Ecto repository
CheckerMal.Repo,
# Start the Telemetry supervisor
CheckerMalWeb.Telemetry,
# Start the PubSub system
{Phoenix.PubSub, name: CheckerMal.PubSub},
# Start the Endpoint (http/https)
CheckerMalWeb.Endpoint,
CheckerMal.Core.RateLimit,
CheckerMal.Core.Unapproved,
CheckerMal.Core.Scheduler
],
unapproved_html()
)
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: CheckerMal.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
CheckerMalWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 27.156863 | 76 | 0.662094 |
f7efe29f69b4bf4f68d279e0aa6ebcc8d776ce03 | 15,221 | ex | Elixir | clients/games_configuration/lib/google_api/games_configuration/v1configuration/api/leaderboard_configurations.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/games_configuration/lib/google_api/games_configuration/v1configuration/api/leaderboard_configurations.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/games_configuration/lib/google_api/games_configuration/v1configuration/api/leaderboard_configurations.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.GamesConfiguration.V1configuration.Api.LeaderboardConfigurations do
@moduledoc """
API calls for all endpoints tagged `LeaderboardConfigurations`.
"""
alias GoogleApi.GamesConfiguration.V1configuration.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Delete the leaderboard configuration with the given ID.
## Parameters
- connection (GoogleApi.GamesConfiguration.V1configuration.Connection): Connection to server
- leaderboard_id (String.t): The ID of the leaderboard.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec games_configuration_leaderboard_configurations_delete(
Tesla.Env.client(),
String.t(),
keyword()
) :: {:ok, nil} | {:error, Tesla.Env.t()}
def games_configuration_leaderboard_configurations_delete(
connection,
leaderboard_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/games/v1configuration/leaderboards/{leaderboardId}", %{
"leaderboardId" => URI.encode_www_form(leaderboard_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
@doc """
Retrieves the metadata of the leaderboard configuration with the given ID.
## Parameters
- connection (GoogleApi.GamesConfiguration.V1configuration.Connection): Connection to server
- leaderboard_id (String.t): The ID of the leaderboard.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
## Returns
{:ok, %GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfiguration{}} on success
{:error, info} on failure
"""
@spec games_configuration_leaderboard_configurations_get(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfiguration.t()}
| {:error, Tesla.Env.t()}
def games_configuration_leaderboard_configurations_get(
connection,
leaderboard_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/games/v1configuration/leaderboards/{leaderboardId}", %{
"leaderboardId" => URI.encode_www_form(leaderboard_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[struct: %GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfiguration{}]
)
end
@doc """
Insert a new leaderboard configuration in this application.
## Parameters
- connection (GoogleApi.GamesConfiguration.V1configuration.Connection): Connection to server
- application_id (String.t): The application ID from the Google Play developer console.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :body (LeaderboardConfiguration):
## Returns
{:ok, %GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfiguration{}} on success
{:error, info} on failure
"""
@spec games_configuration_leaderboard_configurations_insert(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfiguration.t()}
| {:error, Tesla.Env.t()}
def games_configuration_leaderboard_configurations_insert(
connection,
application_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/games/v1configuration/applications/{applicationId}/leaderboards", %{
"applicationId" => URI.encode_www_form(application_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[struct: %GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfiguration{}]
)
end
@doc """
Returns a list of the leaderboard configurations in this application.
## Parameters
- connection (GoogleApi.GamesConfiguration.V1configuration.Connection): Connection to server
- application_id (String.t): The application ID from the Google Play developer console.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :maxResults (integer()): The maximum number of resource configurations to return in the response, used for paging. For any response, the actual number of resources returned may be less than the specified maxResults.
- :pageToken (String.t): The token returned by the previous request.
## Returns
{:ok, %GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfigurationListResponse{}} on success
{:error, info} on failure
"""
@spec games_configuration_leaderboard_configurations_list(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok,
GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfigurationListResponse.t()}
| {:error, Tesla.Env.t()}
def games_configuration_leaderboard_configurations_list(
connection,
application_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:maxResults => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/games/v1configuration/applications/{applicationId}/leaderboards", %{
"applicationId" => URI.encode_www_form(application_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[
struct:
%GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfigurationListResponse{}
]
)
end
@doc """
Update the metadata of the leaderboard configuration with the given ID. This method supports patch semantics.
## Parameters
- connection (GoogleApi.GamesConfiguration.V1configuration.Connection): Connection to server
- leaderboard_id (String.t): The ID of the leaderboard.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :body (LeaderboardConfiguration):
## Returns
{:ok, %GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfiguration{}} on success
{:error, info} on failure
"""
@spec games_configuration_leaderboard_configurations_patch(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfiguration.t()}
| {:error, Tesla.Env.t()}
def games_configuration_leaderboard_configurations_patch(
connection,
leaderboard_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/games/v1configuration/leaderboards/{leaderboardId}", %{
"leaderboardId" => URI.encode_www_form(leaderboard_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[struct: %GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfiguration{}]
)
end
@doc """
Update the metadata of the leaderboard configuration with the given ID.
## Parameters
- connection (GoogleApi.GamesConfiguration.V1configuration.Connection): Connection to server
- leaderboard_id (String.t): The ID of the leaderboard.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :body (LeaderboardConfiguration):
## Returns
{:ok, %GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfiguration{}} on success
{:error, info} on failure
"""
@spec games_configuration_leaderboard_configurations_update(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfiguration.t()}
| {:error, Tesla.Env.t()}
def games_configuration_leaderboard_configurations_update(
connection,
leaderboard_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/games/v1configuration/leaderboards/{leaderboardId}", %{
"leaderboardId" => URI.encode_www_form(leaderboard_id)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[struct: %GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfiguration{}]
)
end
end
| 38.436869 | 221 | 0.678602 |
f7f00adedfa4931a73254bde34dbd825f6dff11b | 873 | ex | Elixir | lib/rotational_cipher.ex | NullChronos/exercism | 1a30fa34e780303582c562e452534b9b45944f77 | [
"MIT"
] | null | null | null | lib/rotational_cipher.ex | NullChronos/exercism | 1a30fa34e780303582c562e452534b9b45944f77 | [
"MIT"
] | null | null | null | lib/rotational_cipher.ex | NullChronos/exercism | 1a30fa34e780303582c562e452534b9b45944f77 | [
"MIT"
] | null | null | null | defmodule RotationalCipher do
@doc """
Given a plaintext and amount to shift by, return a rotated string.
Example:
iex> RotationalCipher.rotate("Attack at dawn", 13)
"Nggnpx ng qnja"
"""
@spec rotate(text :: String.t(), shift :: integer) :: String.t()
def rotate(text, shift) do
chars = String.codepoints(text)
convert(chars, shift)
end
def convert(chars, shift_key, acc \\ "")
def convert([<<codepoint>> | tail], shift_key, acc) do
new_acc = acc <> List.to_string([shift(codepoint, shift_key)])
convert(tail, shift_key, new_acc)
end
def convert([], _shift_key, acc) do
acc
end
def shift(char, shift_key) when char in ?a..?z do
rem(char - ?a + shift_key, 26) + ?a
end
def shift(char, shift_key) when char in ?A..?Z do
rem(char - ?A + shift_key, 26) + ?A
end
def shift(char, _shift_key), do: char
end
| 24.25 | 68 | 0.646048 |
f7f00d4e120e06e080c8ce1933e53df199111710 | 2,470 | ex | Elixir | lib/moneybirx/client.ex | Codaisseur/moneybirx | acf552e1b912c77a919a5563d37c51e0db67032b | [
"MIT"
] | 1 | 2019-10-16T11:33:39.000Z | 2019-10-16T11:33:39.000Z | lib/moneybirx/client.ex | Codaisseur/moneybirx | acf552e1b912c77a919a5563d37c51e0db67032b | [
"MIT"
] | null | null | null | lib/moneybirx/client.ex | Codaisseur/moneybirx | acf552e1b912c77a919a5563d37c51e0db67032b | [
"MIT"
] | 1 | 2021-07-27T17:54:20.000Z | 2021-07-27T17:54:20.000Z | defmodule Moneybirx.Client do
@moduledoc false
defmacro __using__(_) do
quote do
use HTTPoison.Base
@content_type "application/json"
@version "v2"
def endpoint, do: Application.get_env(:moneybirx, :endpoint, "https://moneybird.com/api/v2")
def process_url(path) do
if path == "/administrations" do
endpoint() <> path
else
{:ok, administration} = Moneybirx.Administration.default()
endpoint() <> "/" <> "#{administration.id}" <> path
end
end
def process_request_headers(headers) when is_map(headers) do
Enum.into(headers, [])
|> Keyword.merge(http_headers())
end
def process_request_headers(headers) do
Keyword.merge(headers, http_headers())
end
def process_request_body(body) do
Poison.encode!(body)
end
def as_struct(data, _) when is_nil(data), do: nil
def as_struct(data, type) when is_list(data) do
Enum.map(data, fn d ->
as_struct(d, type)
end)
end
def as_struct(data, type) do
struct(
type,
Enum.map(data, fn {k, v} ->
{String.to_atom(k), v}
end)
)
end
def process_response(resp) do
case resp do
%{status_code: 200} ->
resp
%{status_code: 201} ->
resp
%{status_code: 204} ->
resp
%{status_code: 301} ->
resp
%{status_code: 404} ->
raise Moneybirx.NotFoundError
%{body: body, status_code: 422} ->
raise Moneybirx.RequestError, Poison.decode!(body)
resp ->
raise Moneybirx.ServerError
end
end
defp http_headers do
token = Application.get_env(:moneybirx, :token)
[
Accept: @content_type,
Authorization: "Bearer #{token}",
"Content-Type": @content_type
]
end
def create_friendly_error(body) do
Poison.decode!(body)
end
defp moneybird_error(status, message) do
# TODO: do some stuff
end
end
end
end
defmodule Moneybirx.NotFoundError do
defexception message: "Moneybird resource not found"
end
defmodule Moneybirx.RequestError do
defexception message: "Moneybird request error"
end
defmodule Moneybirx.ServerError do
defexception message: "Moneybird server error"
end
| 22.87037 | 98 | 0.575709 |
f7f0229b6c2e6d4d9206c016fb254e3c6315b5b5 | 1,445 | ex | Elixir | lib/koans/17_agents.ex | CuriousCurmudgeon/elixir-koans | d085a59c55495623268cd2051d8e53d2ebd88b96 | [
"MIT"
] | null | null | null | lib/koans/17_agents.ex | CuriousCurmudgeon/elixir-koans | d085a59c55495623268cd2051d8e53d2ebd88b96 | [
"MIT"
] | null | null | null | lib/koans/17_agents.ex | CuriousCurmudgeon/elixir-koans | d085a59c55495623268cd2051d8e53d2ebd88b96 | [
"MIT"
] | null | null | null | defmodule Agents do
use Koans
@intro "Agents"
koan "Agents maintain state, so you can ask them about it" do
{:ok, pid} = Agent.start_link(fn -> "Hi there" end)
# & &1 == fn state -> state end (Programming Elixir calls this the "identity function" on page 257)
# Also, 1.10 added Function.identity/1
assert Agent.get(pid, & &1) == "Hi there"
assert Agent.get(pid, &Function.identity/1) == "Hi there"
end
koan "Agents may also be named so that you don't have to keep the pid around" do
Agent.start_link(fn -> "Why hello" end, name: AgentSmith)
assert Agent.get(AgentSmith, & &1) == "Why hello"
end
koan "Update to update the state" do
Agent.start_link(fn -> "Hi there" end, name: __MODULE__)
Agent.update(__MODULE__, fn old ->
String.upcase(old)
end)
assert Agent.get(__MODULE__, & &1) == "HI THERE"
end
koan "Use get_and_update when you need to read and change a value in one go" do
Agent.start_link(fn -> ["Milk"] end, name: __MODULE__)
old_list =
Agent.get_and_update(__MODULE__, fn old ->
{old, ["Bread" | old]}
end)
assert old_list == ["Milk"]
assert Agent.get(__MODULE__, & &1) == ["Bread", "Milk"]
end
koan "Somebody has to switch off the light at the end of the day" do
{:ok, pid} = Agent.start_link(fn -> ["Milk"] end, name: __MODULE__)
Agent.stop(__MODULE__)
assert Process.alive?(pid) == false
end
end
| 29.489796 | 103 | 0.641522 |
f7f0348a7a7367cbfb750c3f28f3ab445ed86527 | 1,973 | ex | Elixir | clients/spanner/lib/google_api/spanner/v1/model/batch_create_sessions_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/spanner/lib/google_api/spanner/v1/model/batch_create_sessions_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/spanner/lib/google_api/spanner/v1/model/batch_create_sessions_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Spanner.V1.Model.BatchCreateSessionsRequest do
@moduledoc """
The request for BatchCreateSessions.
## Attributes
* `sessionCount` (*type:* `integer()`, *default:* `nil`) - Required. The number of sessions to be created in this batch call. The API may return fewer than the requested number of sessions. If a specific number of sessions are desired, the client can make additional calls to BatchCreateSessions (adjusting session_count as necessary).
* `sessionTemplate` (*type:* `GoogleApi.Spanner.V1.Model.Session.t`, *default:* `nil`) - Parameters to be applied to each created session.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:sessionCount => integer(),
:sessionTemplate => GoogleApi.Spanner.V1.Model.Session.t()
}
field(:sessionCount)
field(:sessionTemplate, as: GoogleApi.Spanner.V1.Model.Session)
end
defimpl Poison.Decoder, for: GoogleApi.Spanner.V1.Model.BatchCreateSessionsRequest do
def decode(value, options) do
GoogleApi.Spanner.V1.Model.BatchCreateSessionsRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Spanner.V1.Model.BatchCreateSessionsRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.46 | 339 | 0.750634 |
f7f03d511e26761e8c2e6c3ec2eaedb01f54d369 | 159 | ex | Elixir | lib/test/slack.ex | onekilo79/tilex | c1b2dbea911a22d21420f52533e96bd63ce8f4ff | [
"MIT"
] | null | null | null | lib/test/slack.ex | onekilo79/tilex | c1b2dbea911a22d21420f52533e96bd63ce8f4ff | [
"MIT"
] | null | null | null | lib/test/slack.ex | onekilo79/tilex | c1b2dbea911a22d21420f52533e96bd63ce8f4ff | [
"MIT"
] | null | null | null | defmodule Test.Slack do
def notify(_post, _developer, _channel, _url) do
:ok
end
def notify_of_awards(_post, _developer, _url) do
:ok
end
end
| 15.9 | 50 | 0.698113 |
f7f03dcd004ffcf73a7a8bd49e6fc1bb1d457382 | 1,584 | ex | Elixir | test/support/data_case.ex | begedin/paushal | 4388e1b702e98008aef2d231816d6cdc6773803e | [
"MIT"
] | null | null | null | test/support/data_case.ex | begedin/paushal | 4388e1b702e98008aef2d231816d6cdc6773803e | [
"MIT"
] | 6 | 2021-03-20T17:24:05.000Z | 2021-03-20T17:41:19.000Z | test/support/data_case.ex | begedin/paushal | 4388e1b702e98008aef2d231816d6cdc6773803e | [
"MIT"
] | null | null | null | defmodule Paushal.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use Paushal.DataCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Paushal.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Paushal.DataCase
import Paushal.Factories.PaymentSlipFactories
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Paushal.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Paushal.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
| 27.789474 | 77 | 0.692551 |
f7f05c945f11bf9d0c9aa2e022dc76beac49b863 | 108 | exs | Elixir | config/test.exs | dhirajbajaj/link_preview | b500c27b0244ec64563cae80a078c94adb12dec8 | [
"Apache-2.0"
] | 15 | 2017-01-31T14:18:27.000Z | 2021-05-24T18:25:01.000Z | config/test.exs | dhirajbajaj/link_preview | b500c27b0244ec64563cae80a078c94adb12dec8 | [
"Apache-2.0"
] | 10 | 2016-09-18T23:55:28.000Z | 2016-09-30T06:35:10.000Z | config/test.exs | dhirajbajaj/link_preview | b500c27b0244ec64563cae80a078c94adb12dec8 | [
"Apache-2.0"
] | 11 | 2018-02-20T04:36:41.000Z | 2022-03-02T13:16:54.000Z | use Mix.Config
config :httparrot,
http_port: 8888,
https_port: 8484,
ssl: true,
unix_socket: false
| 13.5 | 20 | 0.712963 |
f7f075e095c602502b70a5c3738e3b4c510903b2 | 6,554 | ex | Elixir | lib/subscription.ex | goravbhootra/braintree-elixir | 53c10216125c13c05a19edf1dc1c98a818c46e09 | [
"MIT"
] | null | null | null | lib/subscription.ex | goravbhootra/braintree-elixir | 53c10216125c13c05a19edf1dc1c98a818c46e09 | [
"MIT"
] | null | null | null | lib/subscription.ex | goravbhootra/braintree-elixir | 53c10216125c13c05a19edf1dc1c98a818c46e09 | [
"MIT"
] | null | null | null | defmodule Braintree.Subscription do
@moduledoc """
Manage customer subscriptions to recurring billing plans.
For additional reference see:
https://developers.braintreepayments.com/reference/request/subscription/create/ruby
"""
use Braintree.Construction
alias Braintree.{HTTP, Transaction, AddOn, Search}
alias Braintree.ErrorResponse, as: Error
@type t :: %__MODULE__{
id: String.t(),
plan_id: String.t(),
balance: String.t(),
billing_day_of_month: String.t(),
billing_period_end_date: String.t(),
billing_period_start_date: String.t(),
created_at: String.t(),
current_billing_cycle: String.t(),
days_past_due: String.t(),
descriptor: String.t(),
failure_count: String.t(),
first_billing_date: String.t(),
merchant_account_id: String.t(),
never_expires: String.t(),
next_bill_amount: String.t(),
next_billing_date: String.t(),
next_billing_period_amount: String.t(),
number_of_billing_cycles: String.t(),
paid_through_date: String.t(),
payment_method_token: String.t(),
price: String.t(),
status: String.t(),
trial_duration: String.t(),
trial_duration_unit: String.t(),
trial_period: String.t(),
updated_at: String.t(),
add_ons: [AddOn.t()],
discounts: [any],
transactions: [Transaction.t()],
status_history: [any]
}
defstruct id: nil,
plan_id: nil,
balance: nil,
billing_day_of_month: nil,
billing_period_end_date: nil,
billing_period_start_date: nil,
created_at: nil,
current_billing_cycle: nil,
days_past_due: nil,
descriptor: nil,
failure_count: nil,
first_billing_date: nil,
merchant_account_id: nil,
never_expires: nil,
next_bill_amount: nil,
next_billing_date: nil,
next_billing_period_amount: nil,
number_of_billing_cycles: nil,
paid_through_date: nil,
payment_method_token: nil,
price: nil,
status: nil,
trial_duration: nil,
trial_duration_unit: nil,
trial_period: nil,
updated_at: nil,
add_ons: [],
discounts: [],
transactions: [],
status_history: []
@doc """
Create a subscription, or return an error response with after failed
validation.
## Example
{:ok, sub} = Braintree.Subscription.create(%{
payment_method_token: card.token,
plan_id: "starter"
})
"""
@spec create(map, Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def create(params \\ %{}, opts \\ []) do
with {:ok, payload} <- HTTP.post("subscriptions", %{subscription: params}, opts) do
{:ok, new(payload)}
end
end
@doc """
Find an existing subscription by `subscription_id`
## Example
{:ok, subscription} = Subscription.find("123")
"""
@spec find(String.t(), Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def find(subscription_id, opts \\ []) do
with {:ok, payload} <- HTTP.get("subscriptions/#{subscription_id}", opts) do
{:ok, new(payload)}
end
end
@doc """
Cancel an existing subscription by `subscription_id`. A cancelled subscription
cannot be reactivated, you would need to create a new one.
## Example
{:ok, subscription} = Subscription.cancel("123")
"""
@spec cancel(String.t(), Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def cancel(subscription_id, opts \\ []) do
with {:ok, payload} <- HTTP.put("subscriptions/#{subscription_id}/cancel", opts) do
{:ok, new(payload)}
end
end
@doc """
You can manually retry charging past due subscriptions.
By default, we will use the subscription balance when retrying the
transaction. If you would like to use a different amount you can optionally
specify the amount for the transaction.
A successful manual retry of a past due subscription will **always** reduce
the balance of that subscription to $0, regardless of the amount of the
retry.
## Example
{:ok, transaction} = Braintree.Subscription.retry_charge(sub_id)
{:ok, transaction} = Braintree.Subscription.retry_charge(sub_id, "24.00")
"""
@spec retry_charge(String.t()) :: {:ok, Transaction.t()}
@spec retry_charge(String.t(), String.t() | nil, Keyword.t()) ::
{:ok, Transaction.t()} | {:error, Error.t()}
def retry_charge(subscription_id, amount \\ nil, opts \\ []) do
Transaction.sale(%{amount: amount, subscription_id: subscription_id}, opts)
end
@doc """
To update a subscription, use its ID along with new attributes. The same
validations apply as when creating a subscription. Any attribute not passed will
remain unchanged.
## Example
{:ok, subscription} = Braintree.Subscription.update("subscription_id", %{
plan_id: "new_plan_id"
})
subscription.plan_id # "new_plan_id"
"""
@spec update(binary, map, Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def update(id, params, opts \\ []) when is_binary(id) and is_map(params) do
with {:ok, payload} <- HTTP.put("subscriptions/" <> id, %{subscription: params}, opts) do
{:ok, new(payload)}
end
end
@doc """
To search for subscriptions, pass a map of search parameters.
## Example:
{:ok, subscriptions} = Braintree.Subscription.search(%{plan_id: %{is: "starter"}})
"""
@spec search(map, Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def search(params, opts \\ []) when is_map(params) do
Search.perform(params, "subscriptions", &new/1, opts)
end
@doc """
Convert a map into a Subscription struct. Add_ons and transactions
are converted to a list of structs as well.
## Example
subscripton = Braintree.Subscription.new(%{"plan_id" => "business",
"status" => "Active"})
"""
@spec new(map | [map]) :: t | [t]
def new(%{"subscription" => map}) do
new(map)
end
def new(map) when is_map(map) do
subscription = super(map)
add_ons = AddOn.new(subscription.add_ons)
transactions = Transaction.new(subscription.transactions)
%{subscription | add_ons: add_ons, transactions: transactions}
end
def new(list) when is_list(list) do
Enum.map(list, &new/1)
end
end
| 31.815534 | 93 | 0.611993 |
f7f096f0422c210755f551683a3526c916946527 | 1,029 | ex | Elixir | lib/oauth_xyz/application.ex | ritou/elixir-oauth-xyz | 110d4eadb16fa5c106ae0f6fad49c0424bdbf477 | [
"MIT"
] | 2 | 2020-04-22T13:22:25.000Z | 2020-12-01T12:01:30.000Z | lib/oauth_xyz/application.ex | ritou/elixir-oauth-xyz | 110d4eadb16fa5c106ae0f6fad49c0424bdbf477 | [
"MIT"
] | 3 | 2019-12-05T01:32:09.000Z | 2019-12-09T01:15:32.000Z | lib/oauth_xyz/application.ex | ritou/elixir-oauth-xyz-web | 110d4eadb16fa5c106ae0f6fad49c0424bdbf477 | [
"MIT"
] | null | null | null | defmodule OAuthXYZ.Application do
use Application
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the Ecto repository
supervisor(OAuthXYZ.Repo, []),
# Start the endpoint when the application starts
supervisor(OAuthXYZWeb.Endpoint, [])
# Start your own worker by calling: OAuthXYZ.Worker.start_link(arg1, arg2, arg3)
# worker(OAuthXYZ.Worker, [arg1, arg2, arg3]),
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: OAuthXYZ.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
OAuthXYZWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 32.15625 | 86 | 0.716229 |
f7f0a6532babd01acc427ed1974fb283acbb525a | 437 | ex | Elixir | lib/release_tasks.ex | BenBera/littleChatWebrtc | 91850323d0e60c4049a84ff8985b09856b356016 | [
"MIT"
] | 166 | 2020-07-15T14:47:19.000Z | 2022-03-25T03:57:35.000Z | lib/release_tasks.ex | BenBera/littleChatWebrtc | 91850323d0e60c4049a84ff8985b09856b356016 | [
"MIT"
] | 12 | 2020-07-01T23:32:47.000Z | 2021-03-18T21:21:28.000Z | lib/release_tasks.ex | BenBera/littleChatWebrtc | 91850323d0e60c4049a84ff8985b09856b356016 | [
"MIT"
] | 21 | 2020-07-15T14:59:39.000Z | 2022-03-20T21:05:16.000Z | defmodule Littlechat.ReleaseTasks do
@app :littlechat
def migrate do
for repo <- repos() do
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :up, all: true))
end
end
def rollback(repo, version) do
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :down, to: version))
end
defp repos do
Application.load(@app)
Application.fetch_env!(@app, :ecto_repos)
end
end
| 23 | 91 | 0.652174 |
f7f0c7ff2392c7c0c2000d9cc77348bb255704e2 | 1,276 | exs | Elixir | 2017/elixir/day6b.exs | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | 2017/elixir/day6b.exs | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | 2017/elixir/day6b.exs | zakora/elixir-aoc2017 | 216e92cef370081cc0792102e0b40dd3a518d8bf | [
"Unlicense"
] | null | null | null | defmodule Day6 do
def solve(banks) do
recur(banks, %{}, 0, length(banks))
end
defp recur(banks, seen_banks, steps, size) do
if Map.has_key?(seen_banks, banks) do
steps - seen_banks[banks]
else
seen_banks = Map.put(seen_banks, banks, steps)
{index, value} = find_max(banks)
banks =
banks
|> List.replace_at(index, 0)
|> spread(index + 1, value, size)
recur(banks, seen_banks, steps + 1, size)
end
end
defp find_max(list) do
# return {idx, value} of the max from the list
[head | _ ] = list
rec_find_max(list, 0, head, 0)
end
defp rec_find_max(list, index, value, step) do
if Enum.empty?(list) do
{index, value}
else
[head | tail ] = list
{index, value} =
if head > value do
{step, head}
else
{index, value}
end
rec_find_max(tail, index, value, step + 1)
end
end
defp spread(list, idx, value, size) do
if value == 0 do
list
else
idx = rem(idx, size)
list = List.update_at(list, idx, fn (x) -> x + 1 end)
spread(list, idx + 1, value - 1, size)
end
end
end
File.read!("day6.txt")
|> String.split
|> Enum.map(&String.to_integer/1)
|> Day6.solve
|> IO.puts
| 21.627119 | 59 | 0.569749 |
f7f0dd7ce3dbb70d79d28b75cf47ed20ffd3a0dd | 1,622 | ex | Elixir | clients/health_care/lib/google_api/health_care/v1beta1/model/feature.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/model/feature.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/health_care/lib/google_api/health_care/v1beta1/model/feature.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1beta1.Model.Feature do
@moduledoc """
A feature of an entity mention.
## Attributes
* `confidence` (*type:* `float()`, *default:* `nil`) - The model's confidence in this feature annotation. A number between 0 and 1.
* `value` (*type:* `String.t`, *default:* `nil`) - The value of this feature annotation. Its range depends on the type of the feature.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:confidence => float(),
:value => String.t()
}
field(:confidence)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.HealthCare.V1beta1.Model.Feature do
def decode(value, options) do
GoogleApi.HealthCare.V1beta1.Model.Feature.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HealthCare.V1beta1.Model.Feature do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.44 | 138 | 0.721332 |
f7f0e0f150bab3aca4658bb1e5268826eae31d35 | 1,345 | ex | Elixir | release/elixir/drone_app/lib/drone_app_supervisor.ex | o-kurnenkov/otp_basis | 9717d73151a2f898d9c52485df91452bbb46548a | [
"MIT"
] | 1 | 2016-11-01T18:03:26.000Z | 2016-11-01T18:03:26.000Z | release/elixir/drone_app/lib/drone_app_supervisor.ex | o-kurnenkov/otp_basis | 9717d73151a2f898d9c52485df91452bbb46548a | [
"MIT"
] | null | null | null | release/elixir/drone_app/lib/drone_app_supervisor.ex | o-kurnenkov/otp_basis | 9717d73151a2f898d9c52485df91452bbb46548a | [
"MIT"
] | null | null | null | defmodule DroneApp.Supervisor do
use Supervisor
def start_link do
Supervisor.start_link(__MODULE__, [], name: :drone_supervisor)
end
def start_child(drone_name) do
Supervisor.start_child(:drone_supervisor, [drone_name])
end
def which_children do
Supervisor.which_children(:drone_supervisor)
end
def init(_) do
drones = [
# worker(Drone, ["the-One"]) # one_for_one
worker(Drone, []) # simple_one_for_one
]
IO.puts("init DroneSup")
# supervise(drones, strategy: :one_for_one)
# supervise(drones, strategy: :one_for_one)
supervise(drones, strategy: :simple_one_for_one)
end
end
# One For One:
#
# Process.flag(:trap_exit, true)
# Enum.each(["drone_supervisor.ex","drone.ex"],&c &1)
# {:ok, pid} = DroneSup.start_link
# [{_, dpid, _, _}] = Supervisor.which_children(:drone_supervisor)
# Process.exit(dpid, :kill)
# Drone.get_name(dpid)
#
# {:ok, spid} = DroneSup.start_link
# [{_, dpid, _, _}] = Supervisor.which_children(:drone_supervisor)
# Drone.get_name(dpid)
# Process.exit(spid, :kill)
# Drone.get_name(dpid)
# Simple One for One
#
# {:ok, spid} = DroneSup.start_link
# DroneSup.start_child("IDDQD")
# DroneSup.start_child("IDKFA")
# DroneSup.which_children
# Process.whereis(:"Drone-IDKFA") |> Process.exit(:kill)
# DroneSup.which_children
# Process.exit(spid, :kill)
| 24.907407 | 66 | 0.699628 |
f7f0fd877a402b9fd2d57e604a025feec01e6fd5 | 1,884 | exs | Elixir | config/prod.exs | Simrayz/foresight | 01a35dceee369ad068102669f366cc6e61169626 | [
"MIT"
] | null | null | null | config/prod.exs | Simrayz/foresight | 01a35dceee369ad068102669f366cc6e61169626 | [
"MIT"
] | 2 | 2022-02-17T21:25:57.000Z | 2022-02-17T21:33:15.000Z | config/prod.exs | Simrayz/foresight | 01a35dceee369ad068102669f366cc6e61169626 | [
"MIT"
] | null | null | null | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :foresight, ForesightWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH"),
# transport_options: [socket_opts: [:inet6]]
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :foresight, ForesightWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# Finally import the config/prod.secret.exs which loads secrets
# and configuration from environment variables.
# import_config "prod.secret.exs"
| 36.230769 | 66 | 0.712845 |
f7f10dd2fac6f1bf0e4c30a16e3d367c12992b52 | 1,125 | exs | Elixir | config/config.exs | HallowXIII/ymserver | 8261bc5f773505ea6a8688b1f68f3fc741b055ad | [
"MIT"
] | null | null | null | config/config.exs | HallowXIII/ymserver | 8261bc5f773505ea6a8688b1f68f3fc741b055ad | [
"MIT"
] | null | null | null | config/config.exs | HallowXIII/ymserver | 8261bc5f773505ea6a8688b1f68f3fc741b055ad | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :ymserver, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:ymserver, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.290323 | 73 | 0.752 |
f7f1256ea9260990a3c960fe482db1561db82b3f | 6,727 | ex | Elixir | lib/elsa/supervisor.ex | keathley/elsa | a0518b75e4dedcdd91e6e4e1ed15292868035fc5 | [
"Apache-2.0"
] | null | null | null | lib/elsa/supervisor.ex | keathley/elsa | a0518b75e4dedcdd91e6e4e1ed15292868035fc5 | [
"Apache-2.0"
] | null | null | null | lib/elsa/supervisor.ex | keathley/elsa | a0518b75e4dedcdd91e6e4e1ed15292868035fc5 | [
"Apache-2.0"
] | null | null | null | defmodule Elsa.Supervisor do
@moduledoc """
Top-level supervisor that orchestrates all other components
of the Elsa library. Allows for a single point of integration
into your application supervision tree and configuration by way
of a series of nested keyword lists
Components not needed by a running application (if your application
_only_ consumes messages from Kafka and never producers back to it)
can be safely omitted from the configuration.
"""
use Supervisor
@doc """
Defines a connection for locating the Elsa Registry process.
"""
@spec registry(String.t() | atom()) :: atom()
def registry(connection) do
:"elsa_registry_#{connection}"
end
def via_name(registry, name) do
{:via, Elsa.Registry, {registry, name}}
end
def dynamic_supervisor(registry) do
via_name(registry, DynamicSupervisor)
end
@doc """
Starts the top-level Elsa supervisor and links it to the current process.
Starts a brod client and a custom process registry by default
and then conditionally starts and takes supervision of any
brod group-based consumers or producer processes defined.
## Options
* `:endpoints` - Required. Keyword list of kafka brokers. ex. `[localhost: 9092]`
* `:connection` - Required. Atom used to track kafka connection.
* `:config` - Optional. Client configuration options passed to brod.
* `:producer` - Optional. Can be a single producer configuration of multiples in a list.
* `:group_consumer` - Optional. Group consumer configuration.
* `:consumer` - Optional. Simple topic consumer configuration.
## Producer Config
* `:topic` - Required. Producer will be started for configured topic.
* `:poll` - Optional. If set to a number in milliseconds, will poll for new partitions and startup producers on the fly.
* `:config` - Optional. Producer configuration options passed to `brod_producer`.
## Group Consumer Config
* `:group` - Required. Name of consumer group.
* `:topics` - Required. List of topics to subscribe to.
* `:handler` - Required. Module that implements Elsa.Consumer.MessageHandler behaviour.
* `:handler_init_args` - Optional. Any args to be passed to init function in handler module.
* `:assignment_received_handler` - Optional. Arity 4 Function that will be called with any partition assignments.
Return `:ok` to for assignment to be subscribed to. Return `{:error, reason}` to stop subscription.
Arguments are group, topic, partition, generation_id.
* `:assignments_revoked_handler` - Optional. Zero arity function that will be called when assignments are revoked.
All workers will be shutdown before callback is invoked and must return `:ok`.
* `:config` - Optional. Consumer configuration options passed to `brod_consumer`.
## Consumer Config
* `:topic` - Required. Topic to subscribe to.
* `:begin_offset` - Required. Where to begin consuming from. Must be either `:earliest`, `:latest`, or a valid offset integer.
* `:handler` - Required. Module that implements `Elsa.Consumer.MessageHandler` behaviour.
* `:partition` - Optional. Topic partition to subscribe to. If `nil`, will default to all partitions.
* `:handler_init_args` - Optional. Any args to be passed to init function in handler module.
* `:poll` - Optional. If set to number of milliseconds, will poll for new partitions and startup consumers on the fly.
## Example
```
Elsa.Supervisor.start_link([
endpoints: [localhost: 9092],
connection: :conn,
producer: [topic: "topic1"],
consumer: [
topic: "topic2",
partition: 0,
begin_offset: :earliest,
handler: ExampleHandler
],
group_consumer: [
group: "example-group",
topics: ["topic1"],
handler: ExampleHandler,
config: [
begin_offset: :earliest,
offset_reset_policy: :reset_to_earliest
]
]
])
```
"""
@spec start_link(keyword()) :: GenServer.on_start()
def start_link(args) do
opts =
case Keyword.has_key?(args, :name) do
true -> [name: Keyword.fetch!(args, :name)]
false -> []
end
Supervisor.start_link(__MODULE__, args, opts)
end
def init(args) do
connection = Keyword.fetch!(args, :connection)
registry = registry(connection)
children =
[
{Elsa.Registry, name: registry},
{DynamicSupervisor, strategy: :one_for_one, name: dynamic_supervisor(registry)},
start_client(args),
start_producer(registry, Keyword.get(args, :producer)),
start_group_consumer(connection, registry, Keyword.get(args, :group_consumer)),
start_consumer(connection, registry, Keyword.get(args, :consumer))
]
|> List.flatten()
Supervisor.init(children, strategy: :rest_for_one)
end
defp start_client(args) do
connection = Keyword.fetch!(args, :connection)
endpoints = Keyword.fetch!(args, :endpoints)
config = Keyword.get(args, :config, [])
{Elsa.Wrapper,
mfa: {:brod_client, :start_link, [endpoints, connection, config]}, register: {registry(connection), :brod_client}}
end
defp start_group_consumer(_connection, _registry, nil), do: []
defp start_group_consumer(connection, registry, args) do
group_consumer_args =
args
|> Keyword.put(:registry, registry)
|> Keyword.put(:connection, connection)
|> Keyword.put(:name, via_name(registry, Elsa.Group.Supervisor))
{Elsa.Group.Supervisor, group_consumer_args}
end
defp start_consumer(_connection, _registry, nil), do: []
defp start_consumer(connection, registry, args) do
topics =
case Keyword.has_key?(args, :partition) do
true -> [{Keyword.fetch!(args, :topic), Keyword.fetch!(args, :partition)}]
false -> [Keyword.fetch!(args, :topic)]
end
consumer_args =
args
|> Keyword.put(:registry, registry)
|> Keyword.put(:connection, connection)
|> Keyword.put(:topics, topics)
|> Keyword.put_new(:config, [])
{Elsa.DynamicProcessManager,
id: :worker_process_manager,
dynamic_supervisor: dynamic_supervisor(registry),
poll: Keyword.get(args, :poll, false),
initializer: {Elsa.Consumer.Worker.Initializer, :init, [consumer_args]}}
end
defp start_producer(_registry, nil), do: []
defp start_producer(registry, args) do
[
{Elsa.DynamicProcessManager,
id: :producer_process_manager,
dynamic_supervisor: dynamic_supervisor(registry),
initializer: {Elsa.Producer.Initializer, :init, [registry, args]},
poll: Keyword.get(args, :poll, false),
name: via_name(registry, :producer_process_manager)}
]
end
end
| 32.497585 | 128 | 0.687379 |
f7f13159ecb3796e83213a66a987c684201fdbd9 | 1,118 | ex | Elixir | web/models/tag.ex | allen-garvey/artour | fce27b234d11a3e434c897b5fa3178b7c126245f | [
"MIT"
] | 4 | 2019-10-04T16:11:15.000Z | 2021-08-18T21:00:13.000Z | apps/artour/web/models/tag.ex | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | 5 | 2020-03-16T23:52:25.000Z | 2021-09-03T16:52:17.000Z | apps/artour/web/models/tag.ex | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | null | null | null | defmodule Artour.Tag do
use Artour.Web, :model
import Artour.ModelHelpers.SlugValidator, only: [validate_slug: 2]
schema "tags" do
field :name, :string
field :slug, :string
many_to_many :posts, Artour.Post, join_through: "post_tags", on_delete: :delete_all
has_many :post_tags, Artour.PostTag
timestamps()
end
@doc """
Query used for default order
"""
def default_order_query() do
from(Artour.Tag, order_by: :name)
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:name, :slug])
|> validate_required([:name, :slug])
|> validate_slug(:slug)
|> unique_constraint(:name)
|> unique_constraint(:slug)
end
@doc """
Maps a list of tags into tuples, used for forms
"""
def map_for_form(tags) do
Enum.map(tags, &{&1.name, &1.id})
end
@doc """
Returns list of tags with name and id
ordered in default order suitable for select fields
for forms
"""
def form_list(repo) do
repo.all(default_order_query()) |> map_for_form
end
end
| 22.816327 | 87 | 0.65653 |
f7f144e0d593b8e381f74bc8c6c03956ce53a5c1 | 1,529 | ex | Elixir | lib/zcashex/client.ex | nighthawk-apps/zcashex | dca51cca3df69586016b24277614565514e1b8e2 | [
"Apache-2.0"
] | 1 | 2021-05-19T05:56:18.000Z | 2021-05-19T05:56:18.000Z | lib/zcashex/client.ex | nighthawk-apps/zcashex | dca51cca3df69586016b24277614565514e1b8e2 | [
"Apache-2.0"
] | null | null | null | lib/zcashex/client.ex | nighthawk-apps/zcashex | dca51cca3df69586016b24277614565514e1b8e2 | [
"Apache-2.0"
] | null | null | null | defmodule Zcashex.Client do
@moduledoc """
Client that communicates with the Zcash node.
"""
use GenServer
require Logger
def init(state) do
{:ok, state}
end
@doc """
Calls a given Zcashd's JSON-RPC endpoint with params
"""
def handle_call({:call_endpoint, method, params}, _from, state) do
response =
HTTPoison.post(
"http://#{state.host}:#{state.port}",
Poison.encode!(%{
"jsonrpc" => "1.0",
"id" => "zcashex",
"method" => method,
"params" => params
}),
[{"Content-Type", "text/plain"}],
hackney: [basic_auth: {state.username, state.password}],
recv_timeout: 120_000
)
case response do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, %{"error" => nil, "id" => "zcashex", "result" => result}} = Poison.decode(body)
{:reply, {:ok, result}, state}
{:ok, %HTTPoison.Response{body: body}} ->
case Poison.decode(body) do
{:ok, %{"error" => %{"message" => message}, "id" => "zcashex", "result" => nil}} ->
{:reply, {:error, message}, state}
_ ->
{:reply, {:error, "Unknown error."}, state}
end
_ ->
{:reply, {:error, "Unknown error."}, state}
end
end
@doc """
Calls a given Zcashd's JSON-RPC endpoint with params
"""
def handle_call({:call_endpoint, method}, from, state) do
Zcashex.Client.handle_call({:call_endpoint, method, []}, from, state)
end
end
| 27.303571 | 93 | 0.548725 |
f7f16e8b7ecc48788fbf2f477662021dec3d4785 | 764 | ex | Elixir | code examples/example-15-26.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | 8 | 2016-08-14T12:35:16.000Z | 2021-01-26T04:05:31.000Z | code examples/example-15-26.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | null | null | null | code examples/example-15-26.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | 5 | 2016-08-18T22:12:19.000Z | 2020-02-17T18:52:41.000Z | iex(1)> s = %Stewtype{}
%Stewtype{ingredients: [], stewtype: :veg}
iex(2)> sb = %{s|stewtype: :beef}
%Stewtype{ingredients: [], stewtype: :beef}
iex(3)> su = %{s|stewtype: :chicken}
%Stewtype{ingredients: [], stewtype: :chicken}
iex(4)> VegStew.print(s)
{:ok, "It's a vegetable stew"}
iex(5)> sb = %{s|stewtype: :beef}
%Stewtype{ingredients: [], stewtype: :beef}
iex(6)> su = %{s|stewtype: :chicken}
%Stewtype{ingredients: [], stewtype: :chicken}
iex(7)> VegStew.print(sb)
{:error, "Unknown stew"}
iex(8)> BeefStew.print(sb)
{:ok, "It's a beef stew"}
iex(9)> BeefStew.print(s)
{:error, "Unknown stew"}
iex(10)> BeefStew.print(su)
{:error, "Unknown stew"}
iex(11)> VegStew.print(su)
{:error, "Unknown stew"}
iex(12)> VegStew.print(s)
{:ok, "It's a vegetable stew"}
| 30.56 | 46 | 0.65445 |
f7f16eda68e74c74e163fe998620797546e4dff7 | 788 | ex | Elixir | lib/ash/resource/identity.ex | sobolevn/ash | 5eca8379377a633100ca14c13c6994fa61c8f7ea | [
"MIT"
] | null | null | null | lib/ash/resource/identity.ex | sobolevn/ash | 5eca8379377a633100ca14c13c6994fa61c8f7ea | [
"MIT"
] | null | null | null | lib/ash/resource/identity.ex | sobolevn/ash | 5eca8379377a633100ca14c13c6994fa61c8f7ea | [
"MIT"
] | null | null | null | defmodule Ash.Resource.Identity do
@moduledoc "Represents a unique constraint on a resource"
defstruct [:name, :keys]
@schema [
name: [
type: :atom,
required: true,
doc:
"The name of the identity. Used by extensions to target specific identities for fetching single instances of a resource"
],
keys: [
type: {:custom, __MODULE__, :keys, []},
required: true,
doc:
"The names of attributes, aggregates or calculations that uniquely identify this resource."
]
]
def schema, do: @schema
@type t :: %__MODULE__{}
def keys(keys) do
keys = List.wrap(keys)
if Enum.all?(keys, &is_atom/1) do
{:ok, keys}
else
{:error, "Expected a list of atoms for the identity keys"}
end
end
end
| 23.176471 | 128 | 0.623096 |
f7f175525d3caf18a067deda7a6ec8eebcc95ede | 2,200 | ex | Elixir | lib/ecto_mysql_extras/queries/long_running_queries.ex | kianmeng/ecto_mysql_extras | 55c84833f0b41899e37072d3159748484d7c7883 | [
"Apache-2.0"
] | null | null | null | lib/ecto_mysql_extras/queries/long_running_queries.ex | kianmeng/ecto_mysql_extras | 55c84833f0b41899e37072d3159748484d7c7883 | [
"Apache-2.0"
] | 9 | 2021-10-13T08:31:33.000Z | 2021-12-24T13:19:09.000Z | lib/ecto_mysql_extras/queries/long_running_queries.ex | kianmeng/ecto_mysql_extras | 55c84833f0b41899e37072d3159748484d7c7883 | [
"Apache-2.0"
] | 1 | 2021-12-29T16:52:00.000Z | 2021-12-29T16:52:00.000Z | defmodule EctoMySQLExtras.LongRunningQueries do
@moduledoc """
Query all long running queries.
Data is retrieved from the `information_schema` database and the `plugins` table.
The `:threshold` argument is in milliseconds. The duration for MySQL is displayed
in seconds, for MariaDB this is in millieseconds.
At this moment it isn't converted to a more human readable
format.
"""
@behaviour EctoMySQLExtras
def info do
%{
title: "All queries longer than the threshold by descending duration",
order_by: [duration: :DESC],
args: [:threshold],
default_args: [threshold: 500],
columns: [
%{name: :id, type: :integer},
%{name: :thread, type: :integer},
%{name: :user, type: :string},
%{name: :host, type: :string},
%{name: :duration, type: :integer},
%{name: :query, type: :string},
%{name: :memory_used, type: :bytes}
]
}
end
def query(args \\ [db: :mysql, version: "8.0.0"]) do
query = """
/* ECTO_MYSQL_EXTRAS: #{info().title} */
"""
query_db_specific =
if args[:db] == :mysql do
from =
if String.starts_with?(args[:version], "5.7.") do
"information_schema.PROCESSLIST"
else
"performance_schema.processlist"
end
"""
SELECT
ID AS `id`,
NULL AS `thread`,
USER AS `user`,
HOST AS `host`,
TIME AS `duration`,
INFO AS `query`,
NULL AS `memory_used`
FROM #{from}
WHERE DB = DATABASE()
AND COMMAND <> 'Sleep'
AND TIME > #{args[:threshold] / 1000}
ORDER BY `duration` DESC;
"""
else
"""
SELECT
ID AS `id`,
TID AS `thread`,
USER AS `user`,
HOST AS `host`,
TIME_MS AS `duration`,
INFO AS `query`,
MEMORY_USED AS `memory_used`
FROM information_schema.PROCESSLIST
WHERE DB = DATABASE()
AND COMMAND <> 'Sleep'
AND TIME_MS > #{args[:threshold]}
ORDER BY `duration` DESC;
"""
end
query <> query_db_specific
end
end
| 26.829268 | 83 | 0.545455 |
f7f1b98b0d796862f23a7dc2e0d6f3832f086329 | 2,819 | ex | Elixir | apps/ewallet_db/lib/ewallet_db/transaction_request.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 2 | 2019-07-13T05:49:03.000Z | 2021-08-19T23:58:23.000Z | apps/ewallet_db/lib/ewallet_db/transaction_request.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/lib/ewallet_db/transaction_request.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 3 | 2018-05-08T17:15:42.000Z | 2021-11-10T04:08:33.000Z | defmodule EWalletDB.TransactionRequest do
@moduledoc """
Ecto Schema representing transaction requests.
"""
use Ecto.Schema
import Ecto.Changeset
import EWalletDB.Validator
alias Ecto.UUID
alias EWalletDB.{TransactionRequest, TransactionRequestConsumption,
Repo, MintedToken, User, Balance, Helpers}
@valid "valid"
@expired "expired"
@statuses [@valid, @expired]
@send "send"
@receive "receive"
@types [@send, @receive]
@primary_key {:id, Ecto.UUID, autogenerate: true}
schema "transaction_request" do
field :type, :string
field :amount, EWalletDB.Types.Integer
field :status, :string, default: @valid # valid -> expired
field :correlation_id, :string
has_many :consumptions, TransactionRequestConsumption
belongs_to :user, User, foreign_key: :user_id,
references: :id,
type: UUID
belongs_to :account, Account, foreign_key: :account_id,
references: :id,
type: UUID
belongs_to :minted_token, MintedToken, foreign_key: :minted_token_id,
references: :id,
type: UUID
belongs_to :balance, Balance, foreign_key: :balance_address,
references: :address,
type: :string
timestamps()
end
defp create_changeset(%TransactionRequest{} = transaction_request, attrs) do
transaction_request
|> cast(attrs, [
:type, :amount, :correlation_id, :user_id, :account_id,
:minted_token_id, :balance_address
])
|> validate_required([
:type, :status, :minted_token_id, :balance_address
])
|> validate_required_exclusive([:account_id, :user_id])
|> validate_inclusion(:type, @types)
|> validate_inclusion(:status, @statuses)
|> unique_constraint(:correlation_id)
|> assoc_constraint(:minted_token)
|> assoc_constraint(:user)
|> assoc_constraint(:balance)
|> assoc_constraint(:account)
end
@doc """
Gets a transaction request.
"""
@spec get(UUID.t) :: %TransactionRequest{} | nil
@spec get(UUID.t, List.t) :: %TransactionRequest{} | nil
def get(nil), do: nil
def get(id, opts \\ [preload: []])
def get(nil, _), do: nil
def get(id, opts) do
case Helpers.UUID.valid?(id) do
true ->
TransactionRequest
|> Repo.get(id)
|> Repo.preload(opts[:preload])
false -> nil
end
end
@doc """
Inserts a transaction request.
"""
@spec insert(Map.t) :: {:ok, %TransactionRequest{}} | {:error, Map.t}
def insert(attrs) do
%TransactionRequest{}
|> create_changeset(attrs)
|> Repo.insert()
end
end
| 31.322222 | 78 | 0.600568 |
f7f2232375a4ad3f1b7cbd1773353b62f7c8ad2e | 1,282 | exs | Elixir | examples/friends/config/config.exs | jccf091/ecto | 42d47a6da0711f842e1a0e6724a89b318b9b2144 | [
"Apache-2.0"
] | null | null | null | examples/friends/config/config.exs | jccf091/ecto | 42d47a6da0711f842e1a0e6724a89b318b9b2144 | [
"Apache-2.0"
] | null | null | null | examples/friends/config/config.exs | jccf091/ecto | 42d47a6da0711f842e1a0e6724a89b318b9b2144 | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :friends, Friends.Repo,
adapter: Ecto.Adapters.Postgres,
database: "friends_repo",
hostname: "localhost"
config :friends, ecto_repos: [Friends.Repo]
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :friends, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:friends, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 32.871795 | 73 | 0.74961 |
f7f2389ff314f6a794fd1c5903e05dc87eaf75bc | 1,112 | ex | Elixir | lib/ledger.ex | Zac-Garby/ultradark_core | 639f269e9a416ea034df78ac1c24703a23457840 | [
"MIT"
] | null | null | null | lib/ledger.ex | Zac-Garby/ultradark_core | 639f269e9a416ea034df78ac1c24703a23457840 | [
"MIT"
] | null | null | null | lib/ledger.ex | Zac-Garby/ultradark_core | 639f269e9a416ea034df78ac1c24703a23457840 | [
"MIT"
] | null | null | null | defmodule UltraDark.Ledger do
alias UltraDark.Blockchain.Block
alias UltraDark.Store
require Exleveldb
@store_dir ".chaindata"
def initialize do
Store.initialize(@store_dir)
end
@doc """
Add a block to leveldb, indexing it by its hash (this is the most likely piece of data to be unique)
"""
def append_block(block) do
fn ref -> Exleveldb.put(ref, String.to_atom(block.hash), :erlang.term_to_binary(block)) end
|> Store.transact(@store_dir)
end
@doc """
Given a block hash, return its contents
"""
@spec retrieve_block(String.t()) :: Block
def retrieve_block(hash) do
fn ref ->
{:ok, block} = Exleveldb.get(ref, String.to_atom(hash))
:erlang.binary_to_term(block)
end
|> Store.transact(@store_dir)
end
@doc """
Return the whole chain from leveldb
"""
def retrieve_chain do
fn ref ->
ref
|> Exleveldb.map(fn {_, block} -> :erlang.binary_to_term(block) end)
|> Enum.sort_by(& &1.index, &>=/2)
end
|> Store.transact(@store_dir)
end
def is_empty? do
Store.is_empty?(@store_dir)
end
end
| 23.166667 | 104 | 0.653777 |
f7f248783f2bb8c6f366237eda9c352725776948 | 278 | ex | Elixir | example/lib/api.ex | koudelka/behaves_like | 516405e05aadd13b1f927a99f11d5e2c62dcb9f6 | [
"MIT"
] | 6 | 2018-06-06T23:45:45.000Z | 2020-01-13T20:34:14.000Z | example/lib/api.ex | koudelka/behaves_like | 516405e05aadd13b1f927a99f11d5e2c62dcb9f6 | [
"MIT"
] | null | null | null | example/lib/api.ex | koudelka/behaves_like | 516405e05aadd13b1f927a99f11d5e2c62dcb9f6 | [
"MIT"
] | 2 | 2018-07-03T23:31:40.000Z | 2018-11-06T20:55:35.000Z | defmodule Example.API do
import BehavesLike, only: [spec_and_callback: 1]
@type id :: binary()
@type result :: Example.Type.t()
@type error :: any()
spec_and_callback get(id) :: {:ok, result} | {:error, error}
def get(id) do
Example.Backend.get(id)
end
end
| 19.857143 | 62 | 0.651079 |
f7f24c5b0535dda5586a1ed06d98cf07c5c5f5dc | 1,694 | ex | Elixir | lib/2021/6.ex | ballpointcarrot/advent-of-code-2021 | a1adde8a13069efedfc7c190d6c71b06c2d15173 | [
"MIT"
] | null | null | null | lib/2021/6.ex | ballpointcarrot/advent-of-code-2021 | a1adde8a13069efedfc7c190d6c71b06c2d15173 | [
"MIT"
] | null | null | null | lib/2021/6.ex | ballpointcarrot/advent-of-code-2021 | a1adde8a13069efedfc7c190d6c71b06c2d15173 | [
"MIT"
] | null | null | null | import AOC
aoc 2021, 6 do
def next_generation(fish) do
next_fish =
Enum.map(fish, fn f ->
case f do
0 -> 6
_ -> f - 1
end
end)
number_of_new_fish = Enum.count(fish, &(&1 == 0))
# thanks, StackOverflow: https://stackoverflow.com/a/41351683
next_fish ++ List.duplicate(8, number_of_new_fish)
end
def p1(testInput \\ nil) do
input = testInput || input_string()
fish =
input
|> String.trim()
|> String.split(",", trim: true)
|> Enum.map(&String.to_integer/1)
Stream.iterate(fish, &next_generation/1)
|> Enum.take(81)
|> List.last()
|> length()
end
def next_generation_count_map(fish_count_map) do
next_day =
Enum.map(fish_count_map, fn {gen, count} ->
{gen - 1, count}
end)
|> Map.new()
next_day
|> Map.update(6, Map.get(next_day, -1), fn existing_value ->
addend = (is_nil(existing_value) && 0) || existing_value
addend + (Map.get(next_day, -1, 0) || 0)
end)
|> Map.update(8, Map.get(next_day, -1), fn existing_value ->
addend = (is_nil(existing_value) && 0) || existing_value
addend + (Map.get(next_day, -1, 0) || 0)
end)
|> Map.delete(-1)
end
def p2(testInput \\ nil) do
input = testInput || input_string()
fish_count_map =
input
|> String.trim()
|> String.split(",", trim: true)
|> Enum.map(&String.to_integer/1)
|> Enum.frequencies()
counts =
Stream.iterate(fish_count_map, &next_generation_count_map/1)
|> Enum.take(257)
|> List.last()
|> Enum.map(fn {_, ct} -> ct end)
|> Enum.sum()
counts
end
end
| 23.527778 | 66 | 0.567296 |
f7f250099d658ba4108ebf23f482ae90734cb6cb | 71 | exs | Elixir | pattern_matching_and_state_machines/test/test_helper.exs | StoiximanServices/blog | b164ae5e8fb4701ee40925aca9aef2297b80be95 | [
"MIT"
] | 10 | 2016-11-28T03:38:36.000Z | 2021-08-24T10:38:38.000Z | pattern_matching_and_state_machines/test/test_helper.exs | StoiximanServices/blog | b164ae5e8fb4701ee40925aca9aef2297b80be95 | [
"MIT"
] | null | null | null | pattern_matching_and_state_machines/test/test_helper.exs | StoiximanServices/blog | b164ae5e8fb4701ee40925aca9aef2297b80be95 | [
"MIT"
] | 3 | 2018-07-11T08:31:41.000Z | 2019-01-24T18:16:44.000Z | ExUnit.start()
Code.require_file "string_formatter_tests.ex", __DIR__
| 17.75 | 54 | 0.816901 |
f7f25c9bc0ae4dcf50b5ccb9b3166b841f451650 | 2,094 | ex | Elixir | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p2beta1__label_segment.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p2beta1__label_segment.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p2beta1__label_segment.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1_LabelSegment do
@moduledoc """
Video segment level annotation results for label detection.
## Attributes
* `confidence` (*type:* `number()`, *default:* `nil`) - Confidence that the label is accurate. Range: [0, 1].
* `segment` (*type:* `GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1_VideoSegment.t`, *default:* `nil`) - Video segment where a label was detected.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:confidence => number() | nil,
:segment =>
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1_VideoSegment.t()
| nil
}
field(:confidence)
field(:segment,
as: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1_VideoSegment
)
end
defimpl Poison.Decoder,
for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1_LabelSegment do
def decode(value, options) do
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1_LabelSegment.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1_LabelSegment do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.9 | 179 | 0.751671 |
f7f26e67907af79c9cdcec5cc3904248b2ffaacd | 467 | exs | Elixir | config/test.exs | kuroda/live_dendron | dd21ed5c53456765ea0c50a22bdf915c0c55c92f | [
"MIT"
] | 5 | 2019-07-27T14:57:38.000Z | 2020-11-01T00:26:32.000Z | config/test.exs | kuroda/live_dendron | dd21ed5c53456765ea0c50a22bdf915c0c55c92f | [
"MIT"
] | 6 | 2019-07-29T03:45:23.000Z | 2021-05-10T03:49:39.000Z | config/test.exs | kuroda/live_dendron | dd21ed5c53456765ea0c50a22bdf915c0c55c92f | [
"MIT"
] | 2 | 2019-07-29T02:17:02.000Z | 2020-02-17T17:59:05.000Z | use Mix.Config
# Configure your database
config :live_dendron, LiveDendron.Repo,
username: "postgres",
password: "",
database: "live_dendron_test",
hostname: "db",
pool: Ecto.Adapters.SQL.Sandbox
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :live_dendron, LiveDendronWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 24.578947 | 56 | 0.736617 |
f7f28e29b739f1a9cebf025f8eacc43f852c831a | 29,579 | ex | Elixir | lib/logger/lib/logger.ex | tmikeschu/elixir | ae108c110af3220cd4f729ac25edb06f0f81c884 | [
"Apache-2.0"
] | null | null | null | lib/logger/lib/logger.ex | tmikeschu/elixir | ae108c110af3220cd4f729ac25edb06f0f81c884 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:36:45.000Z | 2018-09-10T23:36:45.000Z | lib/logger/lib/logger.ex | tmikeschu/elixir | ae108c110af3220cd4f729ac25edb06f0f81c884 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:32:56.000Z | 2018-09-10T23:32:56.000Z | defmodule Logger do
@moduledoc ~S"""
A logger for Elixir applications.
It includes many features:
* Provides debug, info, warn, and error levels.
* Supports multiple backends which are automatically
supervised when plugged into `Logger`.
* Formats and truncates messages on the client
to avoid clogging `Logger` backends.
* Alternates between sync and async modes to remain
performant when required but also apply backpressure
when under stress.
* Plugs into Erlang's [`:logger`](http://erlang.org/doc/man/logger.html)
(from Erlang/OTP 21) to convert terms to Elixir syntax or wraps
Erlang's [`:error_logger`](http://erlang.org/doc/man/error_logger.html)
in earlier Erlang/OTP versions to prevent it from overflowing.
Logging is useful for tracking when an event of interest happens in your
system. For example, it may be helpful to log whenever a user is deleted.
def delete_user(user) do
Logger.info "Deleting user from the system: #{inspect(user)}"
# ...
end
The `Logger.info/2` macro emits the provided message at the `:info`
level. Note the arguments given to `info/2` will only be evaluated
if a message is logged. For instance, if the Logger level is
set to `:warn`, `:info` messages are never logged and therefore the
arguments given above won't even be executed.
There are additional macros for other levels.
Logger also allows log commands to be removed altogether via the
`:compile_time_purge_matching` option (see below).
For dynamically logging messages, see `bare_log/3`. But note that
`bare_log/3` always evaluates its arguments (unless the argument
is an anonymous function).
## Levels
The supported levels are:
* `:debug` - for debug-related messages
* `:info` - for information of any kind
* `:warn` - for warnings
* `:error` - for errors
## Configuration
`Logger` supports a wide range of configurations.
This configuration is split in three categories:
* Application configuration - must be set before the `:logger`
application is started
* Runtime configuration - can be set before the `:logger`
application is started, but may be changed during runtime
* Erlang configuration - options that handle integration with
Erlang's logging facilities
### Application configuration
The following configuration must be set via config files (such as
`config/config.exs`) before the `:logger` application is started.
* `:backends` - the backends to be used. Defaults to `[:console]`.
See the "Backends" section for more information.
* `:compile_time_application` - sets the `:application` metadata value
to the configured value at compilation time. This configuration is
usually only useful for build tools to automatically add the
application to the metadata for `Logger.debug/2`, `Logger.info/2`, etc.
style of calls.
* `:compile_time_purge_matching` - purges *at compilation time* all calls
that match the given conditions. This means that `Logger` calls with
level lower than this option will be completely removed at compile time,
accruing no overhead at runtime. This configuration expects a list of
keyword lists. Each keyword list contains a metadata key and the matching
value that should be purged. A special key named `:level_lower_than` can
be used to purge all messages with a lower logger level. Remember that
if you want to purge log calls from a dependency, the dependency must be
recompiled.
For example, to configure the `:backends` and purge all calls that happen
at compile time with level lower than `:info` in a `config/config.exs` file:
config :logger,
backends: [:console],
compile_time_purge_matching: [
[level_lower_than: :info]
]
If you want to purge all log calls from an application named `:foo` and only
keep errors from `Bar.foo/3`, you can set up two different matches:
config :logger,
compile_time_purge_matching: [
[application: :foo],
[module: Bar, function: "foo/3", level_lower_than: :error]
]
### Runtime Configuration
All configuration below can be set via config files (such as
`config/config.exs`) but also changed dynamically during runtime via
`Logger.configure/1`.
* `:level` - the logging level. Attempting to log any message
with severity less than the configured level will simply
cause the message to be ignored. Keep in mind that each backend
may have its specific level, too.
* `:utc_log` - when `true`, uses UTC in logs. By default it uses
local time (i.e., it defaults to `false`).
* `:truncate` - the maximum message size to be logged (in bytes).
Defaults to 8192 bytes. Note this configuration is approximate.
Truncated messages will have `" (truncated)"` at the end.
The atom `:infinity` can be passed to disable this behavior.
* `:sync_threshold` - if the `Logger` manager has more than
`:sync_threshold` messages in its queue, `Logger` will change
to *sync mode*, to apply backpressure to the clients.
`Logger` will return to *async mode* once the number of messages
in the queue is reduced to `sync_threshold * 0.75` messages.
Defaults to 20 messages. `:sync_threshold` can be set to `0` to force *sync mode*.
* `:discard_threshold` - if the `Logger` manager has more than
`:discard_threshold` messages in its queue, `Logger` will change
to *discard mode* and messages will be discarded directly in the
clients. `Logger` will return to *sync mode* once the number of
messages in the queue is reduced to `discard_threshold * 0.75`
messages. Defaults to 500 messages.
* `:translator_inspect_opts` - when translating OTP reports and
errors, the last message and state must be inspected in the
error reports. This configuration allow developers to change
how much and how the data should be inspected.
For example, to configure the `:level` and `:truncate` options in a
`config/config.exs` file:
config :logger,
level: :warn,
truncate: 4096
### Error logger configuration
The following configuration applies to `Logger`'s wrapper around
Erlang's logging functionalities. All the configurations below must
be set before the `:logger` application starts.
* `:handle_otp_reports` - redirects OTP reports to `Logger` so
they are formatted in Elixir terms. This effectively disables
Erlang standard logger. Defaults to `true`.
* `:handle_sasl_reports` - redirects supervisor, crash and
progress reports to `Logger` so they are formatted in Elixir
terms. Your application must guarantee `:sasl` is started before
`:logger`. This means you may see some initial reports written
in Erlang syntax until the Logger application kicks in.
Defaults to `false`.
From Erlang/OTP 21, `:handle_sasl_reports` only has an effect if
`:handle_otp_reports` is true.
The following configurations apply only for Erlang/OTP 20 and earlier:
* `:discard_threshold_for_error_logger` - if `:error_logger` has more than
`discard_threshold` messages in its inbox, messages will be dropped
until the message queue goes down to `discard_threshold * 0.75`
entries. The threshold will be checked once again after 10% of threshold
messages are processed, to avoid messages from being constantly dropped.
For example, if the threshold is 500 (the default) and the inbox has
600 messages, 225 messages will dropped, bringing the inbox down to
375 (0.75 * threshold) entries and 50 (0.1 * threshold) messages will
be processed before the threshold is checked once again.
For example, to configure `Logger` to redirect all Erlang messages using a
`config/config.exs` file:
config :logger,
handle_otp_reports: true,
handle_sasl_reports: true
Furthermore, `Logger` allows messages sent by Erlang to be translated
into an Elixir format via translators. Translators can be added at any
time with the `add_translator/1` and `remove_translator/1` APIs. Check
`Logger.Translator` for more information.
## Backends
`Logger` supports different backends where log messages are written to.
The available backends by default are:
* `:console` - logs messages to the console (enabled by default)
Developers may also implement their own backends, an option that
is explored in more detail below.
The initial backends are loaded via the `:backends` configuration,
which must be set before the `:logger` application is started.
### Console backend
The console backend logs messages by printing them to the console. It supports
the following options:
* `:level` - the level to be logged by this backend.
Note that messages are filtered by the general
`:level` configuration for the `:logger` application first.
* `:format` - the format message used to print logs.
Defaults to: `"\n$time $metadata[$level] $levelpad$message\n"`.
It may also be a `{module, function}` tuple that is invoked
with the log level, the message, the current timestamp and
the metadata.
* `:metadata` - the metadata to be printed by `$metadata`.
Defaults to an empty list (no metadata).
Setting `:metadata` to `:all` prints all metadata. See
the "Metadata" section for more information.
* `:colors` - a keyword list of coloring options.
* `:device` - the device to log error messages to. Defaults to
`:user` but can be changed to something else such as `:standard_error`.
* `:max_buffer` - maximum events to buffer while waiting
for a confirmation from the IO device (default: 32).
Once the buffer is full, the backend will block until
a confirmation is received.
The supported keys in the `:colors` keyword list are:
* `:enabled` - boolean value that allows for switching the
coloring on and off. Defaults to: `IO.ANSI.enabled?/0`
* `:debug` - color for debug messages. Defaults to: `:cyan`
* `:info` - color for info messages. Defaults to: `:normal`
* `:warn` - color for warn messages. Defaults to: `:yellow`
* `:error` - color for error messages. Defaults to: `:red`
See the `IO.ANSI` module for a list of colors and attributes.
Here is an example of how to configure the `:console` backend in a
`config/config.exs` file:
config :logger, :console,
format: "\n$time $metadata[$level] $levelpad$message\n",
metadata: [:user_id]
## Metadata
In addition to the keys provided by the user via `Logger.metadata/1`,
the following extra keys are available to the `:metadata` list:
* `:application` - the current application
* `:module` - the current module
* `:function` - the current function
* `:file` - the current file
* `:line` - the current line
* `:pid` - the current process identifier
* `:crash_reason` - a two-element tuple with the throw/error/exit reason
as first argument and the stacktrace as second. A throw will always be
`{:nocatch, term}`. An error is always an `Exception` struct. All other
entries are exits. The console backend ignores this metadata by default
but it can be useful to other backends, such as the ones that report
errors to third-party services
* `:initial_call` - the initial call that started the process
* `:registered_name` - the process registered name as an atom
Note that all metadata is optional and may not always be available.
The `:module`, `:function`, `:line`, and similar metadata are automatically
included when using `Logger` macros. `Logger.bare_log/3` does not include
any metadata beyond the `:pid` by default. Other metadata, such as
`:crash_reason`, `:initial_call`, and `:registered_name` are extracted
from Erlang/OTP crash reports and available only in those cases.
### Custom formatting
The console backend allows you to customize the format of your log messages
with the `:format` option.
You may set `:format` to either a string or a `{module, function}` tuple if
you wish to provide your own format function. Here is an example of how to
configure the `:console` backend in a `config/config.exs` file:
config :logger, :console,
format: {MyConsoleLogger, :format}
And here is an example of how you can define `MyConsoleLogger.format/4` from the
above configuration:
defmodule MyConsoleLogger do
def format(level, message, timestamp, metadata) do
# Custom formatting logic...
end
end
It is extremely important that **the formatting function does not fail**, as
it will bring that particular logger instance down, causing your system to
temporarily lose messages. If necessary, wrap the function in a `rescue` and
log a default message instead:
defmodule MyConsoleLogger do
def format(level, message, timestamp, metadata) do
# Custom formatting logic...
rescue
_ -> "could not format: #{inspect({level, message, metadata})}"
end
end
The `{module, function}` will be invoked with four arguments:
* the log level: an atom
* the message: this is usually chardata, but in some cases it may not be.
Since the formatting function should *never* fail, you need to prepare for
the message being anything (and do something like the `rescue` in the example
above)
* the current timestamp: a term of type `t:Logger.Formatter.time/0`
* the metadata: a keyword list
You can read more about formatting in `Logger.Formatter`.
### Custom backends
Any developer can create their own `Logger` backend.
Since `Logger` is an event manager powered by `:gen_event`,
writing a new backend is a matter of creating an event
handler, as described in the [`:gen_event`](http://erlang.org/doc/man/gen_event.html)
documentation.
From now on, we will be using the term "event handler" to refer
to your custom backend, as we head into implementation details.
Once the `:logger` application starts, it installs all event handlers listed under
the `:backends` configuration into the `Logger` event manager. The event
manager and all added event handlers are automatically supervised by `Logger`.
Once initialized, the handler should be designed to handle events
in the following format:
{level, group_leader, {Logger, message, timestamp, metadata}} | :flush
where:
* `level` is one of `:debug`, `:info`, `:warn`, or `:error`, as previously
described
* `group_leader` is the group leader of the process which logged the message
* `{Logger, message, timestamp, metadata}` is a tuple containing information
about the logged message:
* the first element is always the atom `Logger`
* `message` is the actual message (as chardata)
* `timestamp` is the timestamp for when the message was logged, as a
`{{year, month, day}, {hour, minute, second, millisecond}}` tuple
* `metadata` is a keyword list of metadata used when logging the message
It is recommended that handlers ignore messages where
the group leader is in a different node than the one where
the handler is installed. For example:
def handle_event({_level, gl, {Logger, _, _, _}}, state)
when node(gl) != node() do
{:ok, state}
end
In the case of the event `:flush` handlers should flush any pending data. This
event is triggered by `flush/0`.
Furthermore, backends can be configured via the
`configure_backend/2` function which requires event handlers
to handle calls of the following format:
{:configure, options}
where `options` is a keyword list. The result of the call is
the result returned by `configure_backend/2`. The recommended
return value for successful configuration is `:ok`.
It is recommended that backends support at least the following
configuration options:
* `:level` - the logging level for that backend
* `:format` - the logging format for that backend
* `:metadata` - the metadata to include in that backend
Check the implementation for `Logger.Backends.Console`, for
examples on how to handle the recommendations in this section
and how to process the existing options.
"""
@type backend :: :gen_event.handler()
@type message :: IO.chardata() | String.Chars.t()
@type level :: :error | :info | :warn | :debug
@type metadata :: keyword()
@levels [:error, :info, :warn, :debug]
@metadata :logger_metadata
@compile {:inline, __metadata__: 0}
defp __metadata__ do
Process.get(@metadata) || {true, []}
end
@doc """
Alters the current process metadata according the given keyword list.
This function will merge the given keyword list into the existing metadata,
with the exception of setting a key to `nil`, which will remove that key
from the metadata.
"""
@spec metadata(metadata) :: :ok
def metadata(keyword) do
{enabled?, metadata} = __metadata__()
Process.put(@metadata, {enabled?, into_metadata(keyword, metadata)})
:ok
end
defp into_metadata([], metadata), do: metadata
defp into_metadata(keyword, metadata), do: into_metadata(keyword, [], metadata)
defp into_metadata([{key, nil} | keyword], prepend, metadata) do
into_metadata(keyword, prepend, :lists.keydelete(key, 1, metadata))
end
defp into_metadata([{key, _} = pair | keyword], prepend, metadata) do
into_metadata(keyword, [pair | prepend], :lists.keydelete(key, 1, metadata))
end
defp into_metadata([], prepend, metadata) do
prepend ++ metadata
end
@doc """
Reads the current process metadata.
"""
@spec metadata() :: metadata
def metadata() do
__metadata__() |> elem(1)
end
@doc """
Resets the current process metadata to the given keyword list.
"""
@spec reset_metadata(metadata) :: :ok
def reset_metadata(keywords \\ []) do
{enabled?, _metadata} = __metadata__()
Process.put(@metadata, {enabled?, []})
metadata(keywords)
end
@doc """
Enables logging for the current process.
Currently the only accepted PID is `self()`.
"""
@spec enable(pid) :: :ok
def enable(pid) when pid == self() do
Process.put(@metadata, {true, metadata()})
:ok
end
@doc """
Disables logging for the current process.
Currently the only accepted PID is `self()`.
"""
@spec disable(pid) :: :ok
def disable(pid) when pid == self() do
Process.put(@metadata, {false, metadata()})
:ok
end
@doc """
Retrieves the `Logger` level.
The `Logger` level can be changed via `configure/1`.
"""
@spec level() :: level
def level() do
%{level: level} = Logger.Config.__data__()
level
end
@doc """
Compares log levels.
Receives two log levels and compares the `left` level
against the `right` level and returns:
* `:lt` if `left` is less than `right`
* `:eq` if `left` and `right` are equal
* `:gt` if `left` is greater than `right`
## Examples
iex> Logger.compare_levels(:debug, :warn)
:lt
iex> Logger.compare_levels(:error, :info)
:gt
"""
@spec compare_levels(level, level) :: :lt | :eq | :gt
def compare_levels(level, level) do
:eq
end
def compare_levels(left, right) do
if level_to_number(left) > level_to_number(right), do: :gt, else: :lt
end
defp level_to_number(:debug), do: 0
defp level_to_number(:info), do: 1
defp level_to_number(:warn), do: 2
defp level_to_number(:error), do: 3
@doc """
Configures the logger.
See the "Runtime Configuration" section in the `Logger` module
documentation for the available options.
"""
@valid_options [
:compile_time_application,
:compile_time_purge_level,
:compile_time_purge_matching,
:sync_threshold,
:truncate,
:level,
:utc_log,
:discard_threshold,
:translator_inspect_opts
]
@spec configure(keyword) :: :ok
def configure(options) do
Logger.Config.configure(Keyword.take(options, @valid_options))
end
@doc """
Flushes the logger.
This guarantees all messages sent to `Logger` prior to this call will
be processed. This is useful for testing and it should not be called
in production code.
"""
@spec flush :: :ok
def flush do
_ = Process.whereis(:error_logger) && :gen_event.which_handlers(:error_logger)
:gen_event.sync_notify(Logger, :flush)
end
@doc """
Adds a new backend.
## Options
* `:flush` - when `true`, guarantees all messages currently sent
to `Logger` are processed before the backend is added
"""
@spec add_backend(backend, keyword) :: Supervisor.on_start_child()
def add_backend(backend, opts \\ []) do
_ = if opts[:flush], do: flush()
case Logger.WatcherSupervisor.watch(Logger, Logger.Config.translate_backend(backend), backend) do
{:ok, _} = ok ->
Logger.Config.add_backend(backend)
ok
{:error, {:already_started, _pid}} ->
{:error, :already_present}
{:error, _} = error ->
error
end
end
@doc """
Removes a backend.
## Options
* `:flush` - when `true`, guarantees all messages currently sent
to `Logger` are processed before the backend is removed
"""
@spec remove_backend(backend, keyword) :: :ok | {:error, term}
def remove_backend(backend, opts \\ []) do
_ = if opts[:flush], do: flush()
Logger.Config.remove_backend(backend)
Logger.WatcherSupervisor.unwatch(Logger, Logger.Config.translate_backend(backend))
end
@doc """
Adds a new translator.
"""
@spec add_translator({module, function :: atom}) :: :ok
def add_translator({mod, fun} = translator) when is_atom(mod) and is_atom(fun) do
Logger.Config.add_translator(translator)
end
@doc """
Removes a translator.
"""
@spec remove_translator({module, function :: atom}) :: :ok
def remove_translator({mod, fun} = translator) when is_atom(mod) and is_atom(fun) do
Logger.Config.remove_translator(translator)
end
@doc """
Configures the given backend.
The backend needs to be started and running in order to
be configured at runtime.
"""
@spec configure_backend(backend, keyword) :: term
def configure_backend(backend, options) when is_list(options) do
:gen_event.call(Logger, Logger.Config.translate_backend(backend), {:configure, options})
end
@doc """
Logs a message dynamically.
Opposite to `log/3`, `debug/2`, `info/2`, and friends, the arguments
given to `bare_log/3` are always evaluated. However, you can pass
anonymous functions to `bare_log/3` and they will only be evaluated
if there is something to be logged.
"""
@spec bare_log(level, message | (() -> message | {message, keyword}), keyword) ::
:ok | {:error, :noproc} | {:error, term}
def bare_log(level, chardata_or_fun, metadata \\ []) do
case __should_log__(level) do
:error -> :ok
info -> __do_log__(info, chardata_or_fun, metadata)
end
end
@doc false
def __should_log__(level) when level in @levels do
case __metadata__() do
{true, pdict} ->
%{mode: mode, level: min_level} = config = Logger.Config.__data__()
if compare_levels(level, min_level) != :lt and mode != :discard do
{level, config, pdict}
else
:error
end
{false, _} ->
:error
end
end
@doc false
def __do_log__({level, config, pdict}, chardata_or_fun, metadata) when is_list(metadata) do
%{utc_log: utc_log?, truncate: truncate, mode: mode} = config
metadata = [pid: self()] ++ into_metadata(metadata, pdict)
case normalize_message(chardata_or_fun, metadata) do
{message, metadata} ->
tuple = {Logger, truncate(message, truncate), Logger.Utils.timestamp(utc_log?), metadata}
try do
notify(mode, {level, Process.group_leader(), tuple})
:ok
rescue
ArgumentError -> {:error, :noproc}
catch
:exit, reason -> {:error, reason}
end
:skip ->
:ok
end
end
@doc """
Logs a warning message.
Returns `:ok` or an `{:error, reason}` tuple.
## Examples
Logger.warn "knob turned too far to the right"
Logger.warn fn -> "dynamically calculated warning" end
Logger.warn fn -> {"dynamically calculated warning", [additional: :metadata]} end
"""
defmacro warn(chardata_or_fun, metadata \\ []) do
maybe_log(:warn, chardata_or_fun, metadata, __CALLER__)
end
@doc """
Logs an info message.
Returns `:ok` or an `{:error, reason}` tuple.
## Examples
Logger.info "mission accomplished"
Logger.info fn -> "dynamically calculated info" end
Logger.info fn -> {"dynamically calculated info", [additional: :metadata]} end
"""
defmacro info(chardata_or_fun, metadata \\ []) do
maybe_log(:info, chardata_or_fun, metadata, __CALLER__)
end
@doc """
Logs an error message.
Returns `:ok` or an `{:error, reason}` tuple.
## Examples
Logger.error "oops"
Logger.error fn -> "dynamically calculated error" end
Logger.error fn -> {"dynamically calculated error", [additional: :metadata]} end
"""
defmacro error(chardata_or_fun, metadata \\ []) do
maybe_log(:error, chardata_or_fun, metadata, __CALLER__)
end
@doc """
Logs a debug message.
Returns `:ok` or an `{:error, reason}` tuple.
## Examples
Logger.debug "hello?"
Logger.debug fn -> "dynamically calculated debug" end
Logger.debug fn -> {"dynamically calculated debug", [additional: :metadata]} end
"""
defmacro debug(chardata_or_fun, metadata \\ []) do
maybe_log(:debug, chardata_or_fun, metadata, __CALLER__)
end
@doc """
Logs a message with the given `level`.
Returns `:ok` or an `{:error, reason}` tuple.
The macros `debug/2`, `warn/2`, `info/2`, and `error/2` are
preferred over this macro as they can automatically eliminate
the call to `Logger` altogether at compile time if desired
(see the documentation for the `Logger` module).
"""
defmacro log(level, chardata_or_fun, metadata \\ []) do
macro_log(level, chardata_or_fun, metadata, __CALLER__)
end
defp macro_log(level, data, metadata, caller) do
%{module: module, function: fun, file: file, line: line} = caller
caller =
compile_time_application_and_file(file) ++
[module: module, function: form_fa(fun), line: line]
{compile_metadata, quoted_metadata} =
if Keyword.keyword?(metadata) do
metadata = Keyword.merge(caller, metadata)
{metadata, metadata}
else
{metadata,
quote do
Keyword.merge(unquote(caller), unquote(metadata))
end}
end
compile_level = if is_atom(level), do: level, else: :error
if compile_time_purge_matching?(compile_level, compile_metadata) do
no_log(data, quoted_metadata)
else
quote do
case Logger.__should_log__(unquote(level)) do
:error -> :ok
info -> Logger.__do_log__(info, unquote(data), unquote(quoted_metadata))
end
end
end
end
defp compile_time_application_and_file(file) do
if app = Application.get_env(:logger, :compile_time_application) do
[application: app, file: Path.relative_to_cwd(file)]
else
[file: file]
end
end
defp compile_time_purge_matching?(level, compile_metadata) do
matching = Application.get_env(:logger, :compile_time_purge_matching, [])
Enum.any?(matching, fn filter ->
Enum.all?(filter, fn
{:level_lower_than, min_level} ->
compare_levels(level, min_level) == :lt
{k, v} when is_atom(k) ->
Keyword.fetch(compile_metadata, k) == {:ok, v}
_ ->
raise "expected :compile_time_purge_matching to be a list of keyword lists, " <>
"got: #{inspect(matching)}"
end)
end)
end
# TODO: Either deprecate compile_time_purge_level in favor of
# compile_time_purge_matching or document it again on 1.9 based
# on feedback
defp maybe_log(level, data, metadata, caller) do
min_level = Application.get_env(:logger, :compile_time_purge_level, :debug)
if compare_levels(level, min_level) != :lt do
macro_log(level, data, metadata, caller)
else
no_log(data, metadata)
end
end
defp no_log(data, metadata) do
# We wrap the contents in an anonymous function
# to avoid unused variable warnings.
quote do
_ = fn -> {unquote(data), unquote(metadata)} end
:ok
end
end
defp normalize_message(fun, metadata) when is_function(fun, 0) do
case fun.() do
{message, fun_metadata} -> {message, into_metadata(fun_metadata, metadata)}
:skip -> :skip
message -> {message, metadata}
end
end
defp normalize_message(message, metadata) do
{message, metadata}
end
defp truncate(data, n) when is_list(data) or is_binary(data), do: Logger.Utils.truncate(data, n)
defp truncate(data, n), do: Logger.Utils.truncate(to_string(data), n)
defp form_fa({name, arity}) do
Atom.to_string(name) <> "/" <> Integer.to_string(arity)
end
defp form_fa(nil), do: nil
defp notify(:sync, msg), do: :gen_event.sync_notify(Logger, msg)
defp notify(:async, msg), do: :gen_event.notify(Logger, msg)
end
| 33.460407 | 101 | 0.68312 |
f7f28f8d6f06e69a0c8c0059520123c4ef5fc2c2 | 420 | ex | Elixir | lib/phoenix_example/b_table_context/b_table.ex | getong/phoenix_example | cec4ba1ab2d601a3d4c709d3b77b1284f28ae36b | [
"Apache-2.0"
] | null | null | null | lib/phoenix_example/b_table_context/b_table.ex | getong/phoenix_example | cec4ba1ab2d601a3d4c709d3b77b1284f28ae36b | [
"Apache-2.0"
] | 6 | 2020-07-20T14:24:28.000Z | 2022-03-29T02:25:55.000Z | lib/phoenix_example/b_table_context/b_table.ex | getong/phoenix_example | cec4ba1ab2d601a3d4c709d3b77b1284f28ae36b | [
"Apache-2.0"
] | null | null | null | defmodule PhoenixExample.BTableContext.BTable do
@moduledoc """
The ATable schema
"""
use Ecto.Schema
import Ecto.Changeset
schema "b_table" do
field :age, :integer
field :name, :string
# timestamps()
timestamps(type: :utc_datetime_usec)
end
@doc false
def changeset(b_table, attrs) do
b_table
|> cast(attrs, [:name, :age])
|> validate_required([:name, :age])
end
end
| 17.5 | 48 | 0.657143 |
f7f29abda0f8c2c841e76fce7e5ec5639d693a46 | 708 | ex | Elixir | lib/mastani_server/accounts/purchase.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | 1 | 2019-05-07T15:03:54.000Z | 2019-05-07T15:03:54.000Z | lib/mastani_server/accounts/purchase.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | lib/mastani_server/accounts/purchase.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | defmodule MastaniServer.Accounts.Purchase do
@moduledoc false
alias __MODULE__
use Ecto.Schema
import Ecto.Changeset
alias MastaniServer.Accounts.User
@required_fields ~w(user_id)a
@optional_fields ~w(theme community_chart brainwash_free)a
@type t :: %Purchase{}
schema "purchases" do
belongs_to(:user, User)
field(:theme, :boolean)
field(:community_chart, :boolean)
field(:brainwash_free, :boolean)
timestamps(type: :utc_datetime)
end
@doc false
def changeset(%Purchase{} = purchase, attrs) do
purchase
|> cast(attrs, @optional_fields ++ @required_fields)
|> validate_required(@required_fields)
|> foreign_key_constraint(:user_id)
end
end
| 22.83871 | 60 | 0.720339 |
f7f29fe8490d119b384c4b3ebe5a92fdd78f8853 | 606 | exs | Elixir | test/absinthe/integration/execution/aliases/with_errors_test.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 2 | 2021-04-22T23:45:04.000Z | 2021-05-07T01:01:15.000Z | test/absinthe/integration/execution/aliases/with_errors_test.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 2 | 2019-03-07T00:26:03.000Z | 2019-08-19T17:30:30.000Z | test/absinthe/integration/execution/aliases/with_errors_test.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 1 | 2019-01-18T20:49:03.000Z | 2019-01-18T20:49:03.000Z | defmodule Elixir.Absinthe.Integration.Execution.Aliases.WithErrorsTest do
use Absinthe.Case, async: true
@query """
mutation { foo: failingThing(type: WITH_CODE) { name } }
"""
test "scenario #1" do
assert {:ok,
%{
data: %{"foo" => nil},
errors: [
%{
code: 42,
message: "Custom Error",
path: ["foo"],
locations: [%{column: 12, line: 1}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, [])
end
end
| 26.347826 | 80 | 0.465347 |
f7f2c5b9b058edb5f57c3f6c3d6c6d72604ad26c | 476 | ex | Elixir | priv/templates/autox.infer.models/model.ex | autoxjs/autox-phoenix | 6446f4487e3af28955f6560973cff6add34be4d4 | [
"MIT"
] | null | null | null | priv/templates/autox.infer.models/model.ex | autoxjs/autox-phoenix | 6446f4487e3af28955f6560973cff6add34be4d4 | [
"MIT"
] | 20 | 2016-04-05T06:28:58.000Z | 2016-05-12T15:45:37.000Z | priv/templates/autox.infer.models/model.ex | foxnewsnetwork/autox | 66ea3f0f7ba8b3f9e910984a2ed3cdf0ef5ef29a | [
"MIT"
] | null | null | null | defmodule <%= base %>.<%= model %> do
use <%= base %>.Web, :model
schema "<%= collection_name %>" do
timestamps
end
@create_fields ~w()
@update_fields @create_fields
@optional_fields ~w()
def create_changeset(model, params\\:empty) do
model
|> cast(params, @create_fields, @optional_fields)
end
def update_changeset(model, params\\:empty) do
create_changeset(model, params)
end
def delete_changeset(model, _) do
model
end
end | 19.04 | 53 | 0.661765 |