hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7233fc626498168f04ef691c1710f481ee2586d | 2,314 | ex | Elixir | clients/safe_browsing/lib/google_api/safe_browsing/v4/model/threat_list_descriptor.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/safe_browsing/lib/google_api/safe_browsing/v4/model/threat_list_descriptor.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/safe_browsing/lib/google_api/safe_browsing/v4/model/threat_list_descriptor.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.SafeBrowsing.V4.Model.ThreatListDescriptor do
@moduledoc """
Describes an individual threat list. A list is defined by three parameters: the type of threat posed, the type of platform targeted by the threat, and the type of entries in the list.
## Attributes
- platformType (String): The platform type targeted by the list's entries. Defaults to: `null`.
- Enum - one of [PLATFORM_TYPE_UNSPECIFIED, WINDOWS, LINUX, ANDROID, OSX, IOS, ANY_PLATFORM, ALL_PLATFORMS, CHROME]
- threatEntryType (String): The entry types contained in the list. Defaults to: `null`.
- Enum - one of [THREAT_ENTRY_TYPE_UNSPECIFIED, URL, EXECUTABLE, IP_RANGE, CHROME_EXTENSION, FILENAME, CERT]
- threatType (String): The threat type posed by the list's entries. Defaults to: `null`.
- Enum - one of [THREAT_TYPE_UNSPECIFIED, MALWARE, SOCIAL_ENGINEERING, UNWANTED_SOFTWARE, POTENTIALLY_HARMFUL_APPLICATION, SOCIAL_ENGINEERING_INTERNAL, API_ABUSE, MALICIOUS_BINARY, CSD_WHITELIST, CSD_DOWNLOAD_WHITELIST, CLIENT_INCIDENT, CLIENT_INCIDENT_WHITELIST, APK_MALWARE_OFFLINE, SUBRESOURCE_FILTER]
"""
defstruct [
:"platformType",
:"threatEntryType",
:"threatType"
]
end
defimpl Poison.Decoder, for: GoogleApi.SafeBrowsing.V4.Model.ThreatListDescriptor do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.SafeBrowsing.V4.Model.ThreatListDescriptor do
def encode(value, options) do
GoogleApi.SafeBrowsing.V4.Deserializer.serialize_non_nil(value, options)
end
end
| 43.660377 | 308 | 0.769231 |
f7237fb526730fcc157b82ee74edda2874f758ce | 2,514 | exs | Elixir | config/prod.exs | documents-org/documents.design-el | 6976254e175232afe5e913c29b04c13a86d2e9a2 | [
"MIT"
] | null | null | null | config/prod.exs | documents-org/documents.design-el | 6976254e175232afe5e913c29b04c13a86d2e9a2 | [
"MIT"
] | null | null | null | config/prod.exs | documents-org/documents.design-el | 6976254e175232afe5e913c29b04c13a86d2e9a2 | [
"MIT"
] | null | null | null | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :documents_design, DocumentsDesignWeb.Endpoint,
http: [:inet6, port: System.get_env("PORT") || 4000],
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :documents_design, DocumentsDesignWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# :inet6,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :documents_design, DocumentsDesignWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases (distillery)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :documents_design, DocumentsDesignWeb.Endpoint, server: true
#
# Note you can't rely on `System.get_env/1` when using releases.
# See the releases documentation accordingly.
# Finally import the config/prod.secret.exs which should be versioned
# separately.
import_config "prod.secret.exs"
| 34.916667 | 73 | 0.720764 |
f7238eeeab6f5e5971ec212e31e1b31bac28da33 | 1,808 | ex | Elixir | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/list_liens_response.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/list_liens_response.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/list_liens_response.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudResourceManager.V1.Model.ListLiensResponse do
@moduledoc """
The response message for Liens.ListLiens.
## Attributes
- liens ([Lien]): A list of Liens. Defaults to: `null`.
- nextPageToken (String.t): Token to retrieve the next page of results, or empty if there are no more results in the list. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:liens => list(GoogleApi.CloudResourceManager.V1.Model.Lien.t()),
:nextPageToken => any()
}
field(:liens, as: GoogleApi.CloudResourceManager.V1.Model.Lien, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.CloudResourceManager.V1.Model.ListLiensResponse do
def decode(value, options) do
GoogleApi.CloudResourceManager.V1.Model.ListLiensResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudResourceManager.V1.Model.ListLiensResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.45098 | 145 | 0.747235 |
f72393ccd8271f40ca82258d4d5b7c94490f5773 | 3,748 | ex | Elixir | apps/snitch_core/lib/core/data/model/zone/state_zone.ex | saurabharch/avia | 74a82a95cf8bfe8143d1fce8136a3bb7ffc9467c | [
"MIT"
] | 1 | 2018-12-01T18:13:55.000Z | 2018-12-01T18:13:55.000Z | apps/snitch_core/lib/core/data/model/zone/state_zone.ex | saurabharch/avia | 74a82a95cf8bfe8143d1fce8136a3bb7ffc9467c | [
"MIT"
] | null | null | null | apps/snitch_core/lib/core/data/model/zone/state_zone.ex | saurabharch/avia | 74a82a95cf8bfe8143d1fce8136a3bb7ffc9467c | [
"MIT"
] | null | null | null | defmodule Snitch.Data.Model.StateZone do
@moduledoc """
StateZone API
"""
use Snitch.Data.Model
use Snitch.Data.Model.Zone
import Ecto.Query
alias Snitch.Data.Model.Zone, as: ZoneModel
alias Snitch.Data.Schema.{State, StateZoneMember, Zone}
@doc """
Creates a new state `Zone` whose members are `state_ids`.
`state_ids` is a list of primary keys of the `Snitch.Data.Schema.StateZoneMember`s that
make up this zone. Duplicate IDs are ignored.
## Note
The list of `StateZoneMember.t` is put in `zone.members`.
"""
@spec create(String.t(), String.t(), [non_neg_integer]) :: term
def create(name, description, state_ids) do
zone_params = %{name: name, description: description, zone_type: "S"}
zone_changeset = Zone.create_changeset(%Zone{}, zone_params)
multi = ZoneModel.creation_multi(zone_changeset, state_ids)
case Repo.transaction(multi) do
{:ok, %{zone: zone, members: members}} -> {:ok, %{zone | members: members}}
error -> error
end
end
@spec delete(non_neg_integer | Zone.t()) ::
{:ok, Zone.t()} | {:error, Ecto.Changeset.t()} | {:error, :not_found}
def delete(id_or_instance) do
QH.delete(Zone, id_or_instance, Repo)
end
@spec get(map | non_neg_integer) :: Zone.t() | nil
def get(query_fields_or_primary_key) do
QH.get(Zone, query_fields_or_primary_key, Repo)
end
@spec get_all() :: [Zone.t()]
def get_all, do: Repo.all(from(z in Zone, where: z.zone_type == "S"))
@doc """
Returns the list of `State` IDs that make up this zone.
"""
@spec member_ids(Zone.t()) :: Zone.t()
def member_ids(zone) do
zone |> members() |> Enum.into([], fn x -> x.id end)
end
@doc """
Returns the list of `State` structs that make up this zone
"""
@spec members(Zone.t()) :: Zone.t()
def members(zone) do
query =
from(
s in State,
join: m in StateZoneMember,
on: m.state_id == s.id,
where: m.zone_id == ^zone.id
)
Repo.all(query)
end
@doc """
Updates Zone params and sets the members as per `new_state_ids`.
This replaces the old members with the new ones. Duplicate IDs in the list are
ignored.
## Note
The `zone.members` is set to `nil`!
"""
@spec update(Zone.t(), map, [non_neg_integer]) :: {:ok, Zone.t()} | {:error, Ecto.Changeset.t()}
def update(zone, zone_params, new_state_ids) do
zone_changeset = Zone.update_changeset(zone, zone_params)
multi = ZoneModel.update_multi(zone, zone_changeset, new_state_ids)
case Repo.transaction(multi) do
{:ok, %{zone: zone}} -> {:ok, %{zone | members: nil}}
error -> error
end
end
def remove_members_query(to_be_removed, zone) do
from(m in StateZoneMember, where: m.state_id in ^to_be_removed and m.zone_id == ^zone.id)
end
@doc """
Returns `StateZoneMember` changesets for given `state_ids` for `state_zone` as a stream.
"""
@spec member_changesets([non_neg_integer], Zone.t()) :: Enumerable.t()
def member_changesets(state_ids, state_zone) do
state_ids
|> Stream.uniq()
|> Stream.map(
&StateZoneMember.create_changeset(%StateZoneMember{}, %{
state_id: &1,
zone_id: state_zone.id
})
)
end
@doc """
Returns a query to fetch the state zones shared by (aka. common to) given
`state_id`s.
"""
@spec common_zone_query(non_neg_integer, non_neg_integer) :: Ecto.Query.t()
def common_zone_query(state_a_id, state_b_id) do
from(
szm_a in StateZoneMember,
join: szm_b in StateZoneMember,
join: z in Zone,
on: szm_a.zone_id == szm_b.zone_id and szm_a.zone_id == z.id,
where: szm_a.state_id == ^state_a_id and szm_b.state_id == ^state_b_id,
select: z
)
end
end
| 29.511811 | 98 | 0.655016 |
f7239b6b111623183c0182dddb261c5658709af8 | 504 | ex | Elixir | lib/mix/tasks/phoenix.server.ex | parkerl/phoenix | 3dc207222877ce1876a07b8dd8d67fcd0325f7e1 | [
"MIT"
] | null | null | null | lib/mix/tasks/phoenix.server.ex | parkerl/phoenix | 3dc207222877ce1876a07b8dd8d67fcd0325f7e1 | [
"MIT"
] | null | null | null | lib/mix/tasks/phoenix.server.ex | parkerl/phoenix | 3dc207222877ce1876a07b8dd8d67fcd0325f7e1 | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Phoenix.Server do
use Mix.Task
@shortdoc "Starts applications and their servers"
@moduledoc """
Starts the application by configuring all endpoints servers to run.
"""
def run(_args) do
Application.put_env(:phoenix, :serve_endpoints, true, persistent: true)
Mix.Task.run "app.start", []
no_halt
end
defp no_halt do
unless iex_running?, do: :timer.sleep(:infinity)
end
defp iex_running? do
Code.ensure_loaded?(IEx) && IEx.started?
end
end
| 21.913043 | 75 | 0.702381 |
f723a48aaf15513988c08fd5c3ac2e1fc2457f84 | 2,519 | ex | Elixir | lib/logger_json/formatters/google_cloud_logger.ex | portal-labs/logger_json | 17aa009909c9cce8f52da2d21b7145c8c54c5330 | [
"MIT"
] | null | null | null | lib/logger_json/formatters/google_cloud_logger.ex | portal-labs/logger_json | 17aa009909c9cce8f52da2d21b7145c8c54c5330 | [
"MIT"
] | null | null | null | lib/logger_json/formatters/google_cloud_logger.ex | portal-labs/logger_json | 17aa009909c9cce8f52da2d21b7145c8c54c5330 | [
"MIT"
] | null | null | null | defmodule LoggerJSON.Formatters.GoogleCloudLogger do
@moduledoc """
Google Cloud Logger formatter.
"""
import Jason.Helpers, only: [json_map: 1]
alias LoggerJSON.{FormatterUtils, JasonSafeFormatter}
@behaviour LoggerJSON.Formatter
@processed_metadata_keys ~w[pid file line function module application]a
# Severity levels can be found in Google Cloud Logger docs:
# https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogSeverity
@severity_levels [
{:debug, "DEBUG"},
{:info, "INFO"},
{:warn, "WARNING"},
{:error, "ERROR"}
]
@doc """
Builds structured payload which is mapped to Google Cloud Logger
[`LogEntry`](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry) format.
See: https://cloud.google.com/logging/docs/agent/configuration#special_fields_in_structured_payloads
"""
for {level, gcp_level} <- @severity_levels do
def format_event(unquote(level), msg, ts, md, md_keys) do
Map.merge(
%{
time: FormatterUtils.format_timestamp(ts),
severity: unquote(gcp_level),
message: IO.iodata_to_binary(msg)
},
format_metadata(md, md_keys)
)
end
end
def format_event(_level, msg, ts, md, md_keys) do
Map.merge(
%{
time: FormatterUtils.format_timestamp(ts),
severity: "DEFAULT",
message: IO.iodata_to_binary(msg)
},
format_metadata(md, md_keys)
)
end
defp format_metadata(md, md_keys) do
LoggerJSON.take_metadata(md, md_keys, @processed_metadata_keys)
|> FormatterUtils.maybe_put(:error, FormatterUtils.format_process_crash(md))
|> FormatterUtils.maybe_put(:"logging.googleapis.com/sourceLocation", format_source_location(md))
|> FormatterUtils.maybe_put(:"logging.googleapis.com/operation", format_operation(md))
|> JasonSafeFormatter.format()
end
defp format_operation(md) do
if request_id = Keyword.get(md, :request_id) do
json_map(id: request_id)
end
end
# Description can be found in Google Cloud Logger docs;
# https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogEntrySourceLocation
defp format_source_location(metadata) do
file = Keyword.get(metadata, :file)
line = Keyword.get(metadata, :line, 0)
function = Keyword.get(metadata, :function)
module = Keyword.get(metadata, :module)
json_map(
file: file,
line: line,
function: FormatterUtils.format_function(module, function)
)
end
end
| 31.098765 | 102 | 0.696308 |
f723aea72c620bb435ec723bd2bcad163440b7cb | 3,208 | ex | Elixir | lib/fanout_listener.ex | asheehan/extreme | abf3412126b89959f356a118737b2805a5d9f121 | [
"MIT"
] | null | null | null | lib/fanout_listener.ex | asheehan/extreme | abf3412126b89959f356a118737b2805a5d9f121 | [
"MIT"
] | null | null | null | lib/fanout_listener.ex | asheehan/extreme | abf3412126b89959f356a118737b2805a5d9f121 | [
"MIT"
] | null | null | null | defmodule Extreme.FanoutListener do
@moduledoc ~S"""
Module that uses this listener will connect to stream of event store and wait for new events.
In the contrast of Extreme.Listener which will first read existing events (starting from position x) and then
keep listening new events.
This is similar behavior as RabbitMQs fanout exchenge, hence the name.
It's not uncommon situation to listen live events and propagate them (for example on web sockets).
For that situation there is Extreme.FanoutListener macro that hides noise from listener:
defmodule MyApp.MyFanoutListener do
use Extreme.FanoutListener
import MyApp.MyPusher
defp process_push(push) do
Logger.info "Forward to web socket event #{inspect push.event.event_type}"
:ok = push.event.data
|> :erlang.binary_to_term
|> process_event(push.event.event_type)
end
end
defmodule MyApp.MyPusher do
def process_event(data, "Elixir.MyApp.Events.PersonCreated") do
Logger.debug "Transform and push event with data: #{inspect data}"
:ok
end
def process_event(_, _), do: :ok # Just acknowledge events we are not interested in
end
Listener can be started manually but it is most common to place it in supervisor AFTER specifing Extreme:
defmodule MyApp.Supervisor do
use Supervisor
def start_link, do: Supervisor.start_link __MODULE__, :ok
@event_store MyApp.EventStore
def init(:ok) do
event_store_settings = Application.get_env :my_app, :event_store
children = [
worker(Extreme, [event_store_settings, [name: @event_store]]),
worker(MyApp.MyFanoutListener, [@event_store, "my_indexed_stream", [name: MyFanoutListener]]),
# ... other workers / supervisors
]
supervise children, strategy: :one_for_one
end
end
"""
defmacro __using__(_) do
quote do
use GenServer
def start_link(event_store, stream_name, opts \\ []),
do: GenServer.start_link __MODULE__, {event_store, stream_name}, opts
def init({event_store, stream_name}) do
state = %{ event_store: event_store, subscription_ref: nil, stream_name: stream_name }
GenServer.cast self(), :subscribe
{:ok, state}
end
def handle_cast(:subscribe, state) do
{:ok, subscription} = Extreme.subscribe_to state.event_store, self(), state.stream_name
ref = Process.monitor subscription
{:noreply, %{state|subscription_ref: ref}}
end
def handle_info({:DOWN, ref, :process, _pid, _reason}, %{subscription_ref: ref} = state) do
reconnect_delay = 1_000
Logger.warn "Subscription to EventStore is down. Will retry in #{reconnect_delay} ms."
:timer.sleep(reconnect_delay)
GenServer.cast self(), :subscribe
{:noreply, state}
end
def handle_info({:on_event, push}, state) do
:ok = process_push(push)
{:noreply, state}
end
def handle_info(_msg, state), do: {:noreply, state}
end
end
end
| 36.873563 | 111 | 0.652431 |
f723b324a244661599c3d1aa895e9874c3e60457 | 18,854 | ex | Elixir | lib/aws/generated/detective.ex | pecigonzalo/aws-elixir | b52181ebfb9e62349dc8e8067b7fbcd4f7a18c68 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/detective.ex | pecigonzalo/aws-elixir | b52181ebfb9e62349dc8e8067b7fbcd4f7a18c68 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/detective.ex | pecigonzalo/aws-elixir | b52181ebfb9e62349dc8e8067b7fbcd4f7a18c68 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.Detective do
@moduledoc """
Detective uses machine learning and purpose-built visualizations to help you to
analyze and investigate security issues across your Amazon Web Services (Amazon
Web Services) workloads.
Detective automatically extracts time-based events such as login attempts, API
calls, and network traffic from CloudTrail and Amazon Virtual Private Cloud
(Amazon VPC) flow logs. It also extracts findings detected by Amazon GuardDuty.
The Detective API primarily supports the creation and management of behavior
graphs. A behavior graph contains the extracted data from a set of member
accounts, and is created and managed by an administrator account.
To add a member account to the behavior graph, the administrator account sends
an invitation to the account. When the account accepts the invitation, it
becomes a member account in the behavior graph.
Detective is also integrated with Organizations. The organization management
account designates the Detective administrator account for the organization.
That account becomes the administrator account for the organization behavior
graph. The Detective administrator account can enable any organization account
as a member account in the organization behavior graph. The organization
accounts do not receive invitations. The Detective administrator account can
also invite other accounts to the organization behavior graph.
Every behavior graph is specific to a Region. You can only use the API to manage
behavior graphs that belong to the Region that is associated with the currently
selected endpoint.
The administrator account for a behavior graph can use the Detective API to do
the following:
* Enable and disable Detective. Enabling Detective creates a new
behavior graph.
* View the list of member accounts in a behavior graph.
* Add member accounts to a behavior graph.
* Remove member accounts from a behavior graph.
* Apply tags to a behavior graph.
The organization management account can use the Detective API to select the
delegated administrator for Detective.
The Detective administrator account for an organization can use the Detective
API to do the following:
* Perform all of the functions of an administrator account.
* Determine whether to automatically enable new organization
accounts as member accounts in the organization behavior graph.
An invited member account can use the Detective API to do the following:
* View the list of behavior graphs that they are invited to.
* Accept an invitation to contribute to a behavior graph.
* Decline an invitation to contribute to a behavior graph.
* Remove their account from a behavior graph.
All API actions are logged as CloudTrail events. See [Logging Detective API Calls with
CloudTrail](https://docs.aws.amazon.com/detective/latest/adminguide/logging-using-cloudtrail.html).
We replaced the term "master account" with the term "administrator account." An
administrator account is used to centrally manage multiple accounts. In the case
of Detective, the administrator account manages the accounts in their behavior
graph.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2018-10-26",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "api.detective",
global?: false,
protocol: "rest-json",
service_id: "Detective",
signature_version: "v4",
signing_name: "detective",
target_prefix: nil
}
end
@doc """
Accepts an invitation for the member account to contribute data to a behavior
graph.
This operation can only be called by an invited member account.
The request provides the ARN of behavior graph.
The member account status in the graph must be `INVITED`.
"""
def accept_invitation(%Client{} = client, input, options \\ []) do
url_path = "/invitation"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new behavior graph for the calling account, and sets that account as
the administrator account.
This operation is called by the account that is enabling Detective.
Before you try to enable Detective, make sure that your account has been
enrolled in Amazon GuardDuty for at least 48 hours. If you do not meet this
requirement, you cannot enable Detective. If you do meet the GuardDuty
prerequisite, then when you make the request to enable Detective, it checks
whether your data volume is within the Detective quota. If it exceeds the quota,
then you cannot enable Detective.
The operation also enables Detective for the calling account in the currently
selected Region. It returns the ARN of the new behavior graph.
`CreateGraph` triggers a process to create the corresponding data tables for the
new behavior graph.
An account can only be the administrator account for one behavior graph within a
Region. If the same account calls `CreateGraph` with the same administrator
account, it always returns the same behavior graph ARN. It does not create a new
behavior graph.
"""
def create_graph(%Client{} = client, input, options \\ []) do
url_path = "/graph"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
`CreateMembers` is used to send invitations to accounts.
For the organization behavior graph, the Detective administrator account uses
`CreateMembers` to enable organization accounts as member accounts.
For invited accounts, `CreateMembers` sends a request to invite the specified
Amazon Web Services accounts to be member accounts in the behavior graph. This
operation can only be called by the administrator account for a behavior graph.
`CreateMembers` verifies the accounts and then invites the verified accounts.
The administrator can optionally specify to not send invitation emails to the
member accounts. This would be used when the administrator manages their member
accounts centrally.
For organization accounts in the organization behavior graph, `CreateMembers`
attempts to enable the accounts. The organization accounts do not receive
invitations.
The request provides the behavior graph ARN and the list of accounts to invite
or to enable.
The response separates the requested accounts into two lists:
* The accounts that `CreateMembers` was able to process. For invited
accounts, includes member accounts that are being verified, that have passed
verification and are to be invited, and that have failed verification. For
organization accounts in the organization behavior graph, includes accounts that
can be enabled and that cannot be enabled.
* The accounts that `CreateMembers` was unable to process. This list
includes accounts that were already invited to be member accounts in the
behavior graph.
"""
def create_members(%Client{} = client, input, options \\ []) do
url_path = "/graph/members"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Disables the specified behavior graph and queues it to be deleted.
This operation removes the behavior graph from each member account's list of
behavior graphs.
`DeleteGraph` can only be called by the administrator account for a behavior
graph.
"""
def delete_graph(%Client{} = client, input, options \\ []) do
url_path = "/graph/removal"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes the specified member accounts from the behavior graph.
The removed accounts no longer contribute data to the behavior graph. This
operation can only be called by the administrator account for the behavior
graph.
For invited accounts, the removed accounts are deleted from the list of accounts
in the behavior graph. To restore the account, the administrator account must
send another invitation.
For organization accounts in the organization behavior graph, the Detective
administrator account can always enable the organization account again.
Organization accounts that are not enabled as member accounts are not included
in the `ListMembers` results for the organization behavior graph.
An administrator account cannot use `DeleteMembers` to remove their own account
from the behavior graph. To disable a behavior graph, the administrator account
uses the `DeleteGraph` API method.
"""
def delete_members(%Client{} = client, input, options \\ []) do
url_path = "/graph/members/removal"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns information about the configuration for the organization behavior graph.
Currently indicates whether to automatically enable new organization accounts as
member accounts.
Can only be called by the Detective administrator account for the organization.
"""
def describe_organization_configuration(%Client{} = client, input, options \\ []) do
url_path = "/orgs/describeOrganizationConfiguration"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes the Detective administrator account for the organization in the current
Region.
Deletes the behavior graph for that account.
Can only be called by the organization management account. Before you can select
a different Detective administrator account, you must remove the Detective
administrator account in all Regions.
"""
def disable_organization_admin_account(%Client{} = client, input, options \\ []) do
url_path = "/orgs/disableAdminAccount"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes the member account from the specified behavior graph.
This operation can only be called by an invited member account that has the
`ENABLED` status.
`DisassociateMembership` cannot be called by an organization account in the
organization behavior graph. For the organization behavior graph, the Detective
administrator account determines which organization accounts to enable or
disable as member accounts.
"""
def disassociate_membership(%Client{} = client, input, options \\ []) do
url_path = "/membership/removal"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Designates the Detective administrator account for the organization in the
current Region.
If the account does not have Detective enabled, then enables Detective for that
account and creates a new behavior graph.
Can only be called by the organization management account.
The Detective administrator account for an organization must be the same in all
Regions. If you already designated a Detective administrator account in another
Region, then you must designate the same account.
"""
def enable_organization_admin_account(%Client{} = client, input, options \\ []) do
url_path = "/orgs/enableAdminAccount"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns the membership details for specified member accounts for a behavior
graph.
"""
def get_members(%Client{} = client, input, options \\ []) do
url_path = "/graph/members/get"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns the list of behavior graphs that the calling account is an administrator
account of.
This operation can only be called by an administrator account.
Because an account can currently only be the administrator of one behavior graph
within a Region, the results always contain a single behavior graph.
"""
def list_graphs(%Client{} = client, input, options \\ []) do
url_path = "/graphs/list"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Retrieves the list of open and accepted behavior graph invitations for the
member account.
This operation can only be called by an invited member account.
Open invitations are invitations that the member account has not responded to.
The results do not include behavior graphs for which the member account declined
the invitation. The results also do not include behavior graphs that the member
account resigned from or was removed from.
"""
def list_invitations(%Client{} = client, input, options \\ []) do
url_path = "/invitations/list"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Retrieves the list of member accounts for a behavior graph.
For invited accounts, the results do not include member accounts that were
removed from the behavior graph.
For the organization behavior graph, the results do not include organization
accounts that the Detective administrator account has not enabled as member
accounts.
"""
def list_members(%Client{} = client, input, options \\ []) do
url_path = "/graph/members/list"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns information about the Detective administrator account for an
organization.
Can only be called by the organization management account.
"""
def list_organization_admin_accounts(%Client{} = client, input, options \\ []) do
url_path = "/orgs/adminAccountslist"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns the tag values that are assigned to a behavior graph.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Rejects an invitation to contribute the account data to a behavior graph.
This operation must be called by an invited member account that has the
`INVITED` status.
`RejectInvitation` cannot be called by an organization account in the
organization behavior graph. In the organization behavior graph, organization
accounts do not receive an invitation.
"""
def reject_invitation(%Client{} = client, input, options \\ []) do
url_path = "/invitation/removal"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Sends a request to enable data ingest for a member account that has a status of
`ACCEPTED_BUT_DISABLED`.
For valid member accounts, the status is updated as follows.
* If Detective enabled the member account, then the new status is
`ENABLED`.
* If Detective cannot enable the member account, the status remains
`ACCEPTED_BUT_DISABLED`.
"""
def start_monitoring_member(%Client{} = client, input, options \\ []) do
url_path = "/graph/member/monitoringstate"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Applies tag values to a behavior graph.
"""
def tag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Removes tags from a behavior graph.
"""
def untag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
{query_params, input} =
[
{"TagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Updates the configuration for the Organizations integration in the current
Region.
Can only be called by the Detective administrator account for the organization.
"""
def update_organization_configuration(%Client{} = client, input, options \\ []) do
url_path = "/orgs/updateOrganizationConfiguration"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
| 27.767305 | 101 | 0.69057 |
f723d32b827348c3ae93536b0931168d377e3e1f | 231 | ex | Elixir | lib/absinthe/blueprint/input/raw_value.ex | shamshirz/absinthe | 8183e15b2a13ea0ecbe3a1c2899292ab9bd96988 | [
"MIT"
] | null | null | null | lib/absinthe/blueprint/input/raw_value.ex | shamshirz/absinthe | 8183e15b2a13ea0ecbe3a1c2899292ab9bd96988 | [
"MIT"
] | null | null | null | lib/absinthe/blueprint/input/raw_value.ex | shamshirz/absinthe | 8183e15b2a13ea0ecbe3a1c2899292ab9bd96988 | [
"MIT"
] | null | null | null | defmodule Absinthe.Blueprint.Input.RawValue do
@moduledoc false
Absinthe.Blueprint.Input.Object
@enforce_keys [:content]
defstruct [
:content
]
@type t :: %__MODULE__{
content: Object.t()
}
end
| 15.4 | 46 | 0.649351 |
f723df527e74454c96b5d89f3d7188015694eae9 | 1,302 | ex | Elixir | lib/exboard_web/endpoint.ex | eahanson/exboard | ff2da4d6afd2a01d0e0b54f04198b2e4ffbb48db | [
"MIT"
] | null | null | null | lib/exboard_web/endpoint.ex | eahanson/exboard | ff2da4d6afd2a01d0e0b54f04198b2e4ffbb48db | [
"MIT"
] | 2 | 2021-03-09T18:32:46.000Z | 2021-05-10T15:18:35.000Z | lib/exboard_web/endpoint.ex | eahanson/exboard | ff2da4d6afd2a01d0e0b54f04198b2e4ffbb48db | [
"MIT"
] | null | null | null | defmodule ExboardWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :exboard
socket "/socket", ExboardWeb.UserSocket,
websocket: true,
longpoll: false
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :exboard,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_exboard_key",
signing_salt: "yS70JqEL"
plug ExboardWeb.Router
end
| 27.702128 | 69 | 0.710445 |
f723f9fa65844a1e927bc94762df626e1064380f | 3,148 | exs | Elixir | test/job_board/jobs/jobs_test.exs | TDogVoid/job_board | 23793917bd1cc4e68bccce737b971093030a31eb | [
"MIT"
] | null | null | null | test/job_board/jobs/jobs_test.exs | TDogVoid/job_board | 23793917bd1cc4e68bccce737b971093030a31eb | [
"MIT"
] | null | null | null | test/job_board/jobs/jobs_test.exs | TDogVoid/job_board | 23793917bd1cc4e68bccce737b971093030a31eb | [
"MIT"
] | null | null | null | defmodule JobBoard.JobsTest do
use JobBoard.DataCase
alias JobBoard.Jobs
import JobBoard.TestHelpers
describe "jobs" do
alias JobBoard.Jobs.Job
import JobBoard.TestHelpers
@valid_attrs %{title: "some title", link: "http://somelink.com"}
@update_attrs %{title: "some updated title", link: "http://updatedlink.com"}
@invalid_attrs %{title: nil, link: nil}
@missing_title_attrs %{title: nil, link: "http://somelink.com"}
@missing_link_attrs %{title: "some updated title", description: "updated description", link: nil}
@invalid_link_attrs %{title: "some updated title", description: "updated description", link: "somelink.com"}
def job_fixture(attrs \\ %{}) do
city = city_fixture(:city)
{:ok, job} =
attrs
|> Enum.into(@valid_attrs)
|> Map.put(:zipcode, city.zipcode)
|> Jobs.create_job()
job
end
test "list_jobs/0 returns all jobs" do
job = job_fixture()
|> Map.put(:zipcode, nil) # virtual field
assert Jobs.list_jobs() == [job]
end
test "get_job!/1 returns the job with given id" do
job = job_fixture()
|> Map.put(:zipcode, nil) # virtual field
assert Jobs.get_job!(job.id) == job
end
test "create_job/1 with valid data creates a job" do
city = city_fixture(:city)
attrs = Map.put(@valid_attrs, :zipcode, city.zipcode)
assert {:ok, %Job{} = job} = Jobs.create_job(attrs)
assert job.title == "some title"
end
test "create_job/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Jobs.create_job(@invalid_attrs)
end
test "create_job/1 with missing title returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Jobs.create_job(@missing_title_attrs)
end
test "create_job/1 with missing link returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Jobs.create_job(@missing_link_attrs)
end
test "update_job/2 with valid data updates the job" do
job = job_fixture()
assert {:ok, job} = Jobs.update_job(job, @update_attrs)
assert %Job{} = job
assert job.title == "some updated title"
end
test "update_job/2 with invalid data returns error changeset" do
job = job_fixture()
assert {:error, %Ecto.Changeset{}} = Jobs.update_job(job, @invalid_attrs)
job = Map.put(job, :zipcode, nil) # virtual field
assert job == Jobs.get_job!(job.id)
end
test "delete_job/1 deletes the job" do
job = job_fixture()
assert {:ok, %Job{}} = Jobs.delete_job(job)
assert_raise Ecto.NoResultsError, fn -> Jobs.get_job!(job.id) end
end
test "change_job/1 returns a job changeset" do
job = job_fixture()
assert %Ecto.Changeset{} = Jobs.change_job(job)
end
test "body includes no stripped tags" do
changeset = Job.changeset(%Job{}, @valid_attrs)
assert get_change(changeset, :description) == @valid_attrs[:description]
end
test "invalid link, missing http" do
assert {:error, %Ecto.Changeset{}} = Jobs.create_job(@invalid_link_attrs)
end
end
end
| 33.136842 | 112 | 0.651842 |
f7240e6edec7f701f4b9589210549d0466cc327c | 220 | exs | Elixir | lists-and-recursion/caesar.exs | sfat/programming-elixir-exercises | 19e62e3f3344ec044e1eb1b39b195f4dad3dff1c | [
"Apache-2.0"
] | 1 | 2019-02-17T11:54:17.000Z | 2019-02-17T11:54:17.000Z | lists-and-recursion/caesar.exs | sfat/programming-elixir-exercises | 19e62e3f3344ec044e1eb1b39b195f4dad3dff1c | [
"Apache-2.0"
] | null | null | null | lists-and-recursion/caesar.exs | sfat/programming-elixir-exercises | 19e62e3f3344ec044e1eb1b39b195f4dad3dff1c | [
"Apache-2.0"
] | null | null | null | defmodule MyList do
def caesar([], _), do: []
def caesar([head | tail], n) when head + n > ?z, do: [?a + head + n - ?z - 1| caesar(tail, n)]
def caesar([head | tail], n), do: [head + n | caesar(tail, n)]
end
| 36.666667 | 98 | 0.531818 |
f724194b523e7ea61b1b99bc78d8334265dd672b | 941 | exs | Elixir | elixir/frontend/config/prod.secret.exs | honeycombio/example-greeting-service | 52365a5f2ae434d0b3a84b10889486184042cbc3 | [
"Apache-2.0"
] | 8 | 2020-12-29T17:44:16.000Z | 2021-11-18T22:18:42.000Z | elixir/frontend/config/prod.secret.exs | honeycombio/example-greeting-service | 52365a5f2ae434d0b3a84b10889486184042cbc3 | [
"Apache-2.0"
] | 36 | 2021-04-08T14:30:02.000Z | 2022-03-30T22:06:44.000Z | elixir/frontend/config/prod.secret.exs | honeycombio/example-greeting-service | 52365a5f2ae434d0b3a84b10889486184042cbc3 | [
"Apache-2.0"
] | 1 | 2021-04-05T10:52:23.000Z | 2021-04-05T10:52:23.000Z | # In this file, we load production configuration and secrets
# from environment variables. You can also hardcode secrets,
# although such is generally not recommended and you have to
# remember to add this file to your .gitignore.
use Mix.Config
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :frontend, FrontendWeb.Endpoint,
http: [
port: String.to_integer(System.get_env("PORT") || "7000"),
transport_options: [socket_opts: [:inet6]]
],
secret_key_base: secret_key_base
# ## Using releases (Elixir v1.9+)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :frontend, FrontendWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
| 31.366667 | 62 | 0.731137 |
f7241a873f21a0234c57f0304e20cbf58e26511b | 486 | exs | Elixir | test/edgedb/protocol/codecs/builtin/local_date_test.exs | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | 30 | 2021-05-19T08:54:44.000Z | 2022-03-11T22:52:25.000Z | test/edgedb/protocol/codecs/builtin/local_date_test.exs | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | 3 | 2021-11-17T21:26:01.000Z | 2022-03-12T09:49:25.000Z | test/edgedb/protocol/codecs/builtin/local_date_test.exs | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | 3 | 2021-08-29T14:55:41.000Z | 2022-03-12T01:30:35.000Z | defmodule Tests.EdgeDB.Protocol.Codecs.Builtin.LocalDateTest do
use Tests.Support.EdgeDBCase
setup :edgedb_connection
test "decoding cal::local_date value", %{conn: conn} do
value = ~D[2019-05-06]
assert ^value = EdgeDB.query_single!(conn, "SELECT <cal::local_date>'2019-05-06'")
end
test "encoding cal::local_date value", %{conn: conn} do
value = ~D[2019-05-06]
assert ^value = EdgeDB.query_single!(conn, "SELECT <cal::local_date>$0", [value])
end
end
| 27 | 86 | 0.695473 |
f72468381f1179d72e30d6cb072b5c57cbe7abdc | 526 | ex | Elixir | lib/health/router.ex | ARKultur/naboo | ab26c2e82cdc485e23d428fbb1d4798f1fb1388b | [
"MIT"
] | 16 | 2021-12-14T12:25:59.000Z | 2021-12-16T21:56:27.000Z | lib/health/router.ex | ARKultur/naboo | ab26c2e82cdc485e23d428fbb1d4798f1fb1388b | [
"MIT"
] | 19 | 2021-12-06T08:35:30.000Z | 2022-03-23T18:20:46.000Z | lib/health/router.ex | ARKultur/naboo | ab26c2e82cdc485e23d428fbb1d4798f1fb1388b | [
"MIT"
] | null | null | null | defmodule NabooHealth.Router do
use Plug.Router
defmodule Health do
use Plug.Router
plug(:match)
plug(:dispatch)
forward(
"/",
to: PlugCheckup,
init_opts:
PlugCheckup.Options.new(
json_encoder: Jason,
checks: NabooHealth.checks(),
error_code: NabooHealth.error_code(),
timeout: :timer.seconds(5),
pretty: false
)
)
end
plug(:match)
plug(:dispatch)
forward("/health", to: Health)
match(_, do: conn)
end
| 16.967742 | 47 | 0.577947 |
f724d477bc0fa91a6141ce89193907017e7959ca | 2,037 | exs | Elixir | machine_translation/MorpHIN/Learned/Resources/Set4/TrainingInstances/85.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/Set4/TrainingInstances/85.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/Set4/TrainingInstances/85.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | **EXAMPLE FILE**
quantifier pnoun cm noun noun;
cm quantifier cm noun noun;
noun cm cm ordinal cm;
noun demonstrative cm noun noun;
conj demonstrative noun cm noun;
cm cardinal noun verb noun;
pnoun P_wh cm noun noun;
noun cm cm adjective cm;
noun cm cm cm nst;
noun cm cm quantifier cm;
pn particle noun cm noun;
noun cm demonstrative noun cm;
noun cm cm verb noun;
pnoun cm cm demonstrative cm;
verb cm cm cardinal cm;
noun cm cm adjective cm;
cm pn cm adjective cm;
noun cm cm pnoun cm;
SYM cardinal cm pnoun noun;
pn quantifier cm adjective noun;
noun cm cm adjective cm;
noun cm verb verb_aux cm;
noun cm verb noun cm;
noun cm cm adjective cm;
conj pn cm quantifier cm;
noun cm noun SYM cm;
SYM demonstrative cm noun noun;
noun cm cm noun noun;
cm pn cm quantifier noun;
cm cardinal cm verb noun;
noun cm noun cardinal cm;
noun cardinal verb pn cm;
verb ordinal verb conj noun;
noun cm adjective verb cm;
cm demonstrative cm noun noun;
pnoun cm pnoun cm cm;
noun cm cm pnoun cm;
SYM quantifier adjective verb noun;
cardinal cm cardinal noun noun;
noun cm quantifier noun noun;
noun cm cm particle cm;
noun cm cm pnoun cm;
demonstrative particle cm adjective noun;
pnoun adjective cm pnoun noun;
conj adjective cm pnoun noun;
pnoun adjective cm pnoun noun;
pn pn cm adverb cm;
cm demonstrative cm noun noun;
noun cm cm cardinal cm;
noun cm cm noun cm;
SYM demonstrative verb conj noun;
conj ordinal pnoun cm noun;
noun cm verb verb_aux noun;
verb cm pnoun conj cm;
SYM demonstrative noun cm noun;
noun cm noun quantifier cm;
pnoun cm cm noun cm;
noun cm cm verb cm;
pn ordinal pnoun cm cm;
SYM pn cm pnoun cm;
verb pn cm pn cm;
noun cm particle verb cm;
cardinal pnoun noun cm noun;
pnoun cm cm noun cm;
conj quantifier cm noun noun;
SYM ordinal noun cm noun;
noun cm cm verb cm;
noun cm cm verb cm;
noun cm cm SYM cm;
verb pn SYM pnoun noun;
cm demonstrative cm noun noun;
noun cm ordinal noun cm;
cm quantifier noun verb noun;
noun cm cm demonstrative cm;
| 26.454545 | 42 | 0.742268 |
f724f1f80d858e8554f7f2a6760078670e39c0ed | 957 | ex | Elixir | test/support/channel_case.ex | Foo-x/Shiritorishi | ee9b58f74c51941f958c986578c95c26a9920816 | [
"Apache-2.0"
] | null | null | null | test/support/channel_case.ex | Foo-x/Shiritorishi | ee9b58f74c51941f958c986578c95c26a9920816 | [
"Apache-2.0"
] | null | null | null | test/support/channel_case.ex | Foo-x/Shiritorishi | ee9b58f74c51941f958c986578c95c26a9920816 | [
"Apache-2.0"
] | null | null | null | defmodule ShiritorishiWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint ShiritorishiWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Shiritorishi.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Shiritorishi.Repo, {:shared, self()})
end
:ok
end
end
| 25.184211 | 74 | 0.722048 |
f724f3c751e0b97973798325b1ac822e163081bd | 2,107 | ex | Elixir | lib/fun_with_flags/store.ex | chubarovNick/fun_with_flags | d567f2251a4fce91be7a479e8ceafbdc75eeaf47 | [
"MIT"
] | null | null | null | lib/fun_with_flags/store.ex | chubarovNick/fun_with_flags | d567f2251a4fce91be7a479e8ceafbdc75eeaf47 | [
"MIT"
] | null | null | null | lib/fun_with_flags/store.ex | chubarovNick/fun_with_flags | d567f2251a4fce91be7a479e8ceafbdc75eeaf47 | [
"MIT"
] | null | null | null | defmodule FunWithFlags.Store do
@moduledoc false
require Logger
alias FunWithFlags.Store.Cache
alias FunWithFlags.{Flag, Config}
@persistence FunWithFlags.Store.Persistent.adapter
def lookup(flag_name) do
case Cache.get(flag_name) do
{:ok, flag} ->
{:ok, flag}
{:miss, reason, stale_value_or_nil} ->
case @persistence.get(flag_name) do
{:ok, flag} ->
Cache.put(flag)
{:ok, flag}
{:error, _reason} ->
try_to_use_the_cached_value(reason, stale_value_or_nil, flag_name)
end
end
end
defp try_to_use_the_cached_value(:expired, value, flag_name) do
Logger.warn "FunWithFlags: couldn't load flag '#{flag_name}' from storage, falling back to stale cached value from ETS"
{:ok, value}
end
defp try_to_use_the_cached_value(_, _, flag_name) do
raise "Can't load feature flag '#{flag_name}' from neither storage nor the cache"
end
def put(flag_name, gate) do
flag_name
|> @persistence.put(gate)
|> publish_change()
|> cache_persistence_result()
end
def delete(flag_name, gate) do
flag_name
|> @persistence.delete(gate)
|> publish_change()
|> cache_persistence_result()
end
def delete(flag_name) do
flag_name
|> @persistence.delete()
|> publish_change()
|> cache_persistence_result()
end
def reload(flag_name) do
Logger.debug fn -> "FunWithFlags: reloading cached flag '#{flag_name}' from storage " end
flag_name
|> @persistence.get()
|> cache_persistence_result()
end
defdelegate all_flags(), to: @persistence
defdelegate all_flag_names(), to: @persistence
defp cache_persistence_result(result) do
case result do
{:ok, flag} ->
Cache.put(flag)
{:error, _reason} = error ->
error
end
end
defp publish_change(result = {:ok, %Flag{name: flag_name}}) do
if Config.change_notifications_enabled? do
Config.notifications_adapter.publish_change(flag_name)
end
result
end
defp publish_change(result) do
result
end
end
| 22.414894 | 123 | 0.66113 |
f72515c9296127cd21335b16ee5a32ddffbef8ee | 2,578 | exs | Elixir | config/test.exs | nerves-hub/nerves_hub_user_api | 5d41b1faede4a38c48367a7e7408bf5539cb9d36 | [
"Apache-2.0"
] | 2 | 2019-06-18T19:59:13.000Z | 2020-01-13T18:45:50.000Z | config/test.exs | nerves-hub/nerves_hub_user_api | 5d41b1faede4a38c48367a7e7408bf5539cb9d36 | [
"Apache-2.0"
] | 9 | 2019-03-08T19:06:33.000Z | 2022-03-16T21:35:04.000Z | config/test.exs | nerves-hub/nerves_hub_core | aeef481f80391ad124fa14349ffa0bde67eb96d6 | [
"Apache-2.0"
] | 3 | 2019-11-25T12:15:30.000Z | 2021-01-22T16:17:37.000Z | use Mix.Config
config :nerves_hub_user_api, ca_store: NervesHubCoreTest.CAStore
nerves_hub_web_path =
Mix.Project.deps_paths()
|> Enum.find(&(elem(&1, 0) == :nerves_hub_web))
|> elem(1)
nerves_hub_web_config = Path.join(nerves_hub_web_path, "config/config.exs")
if File.exists?(nerves_hub_web_config) do
import_config(nerves_hub_web_config)
end
working_dir = Path.expand("test/tmp/pki")
config :nerves_hub_user_api,
host: "0.0.0.0",
port: 5002,
# pass list of paths
ca_certs: Path.expand("test/tmp/ssl"),
server_name_indication: :disable,
ecto_repos: [NervesHubCA.Repo, NervesHubWebCore.Repo]
alias NervesHubCA.Intermediate.CA
config :nerves_hub_user_api,
ecto_repos: [
NervesHubCA.Repo,
NervesHubWebCore.Repo
]
config :nerves_hub_ca, :api,
otp_app: :nerves_hub_ca,
port: 8443,
cacertfile: Path.join(working_dir, "ca.pem"),
certfile: Path.join(working_dir, "ca.nerves-hub.org.pem"),
keyfile: Path.join(working_dir, "ca.nerves-hub.org-key.pem")
config :nerves_hub_ca, CA.User,
ca: Path.join(working_dir, "user-root-ca.pem"),
ca_key: Path.join(working_dir, "user-root-ca-key.pem")
config :nerves_hub_ca, CA.Device,
ca: Path.join(working_dir, "device-root-ca.pem"),
ca_key: Path.join(working_dir, "device-root-ca-key.pem")
config :nerves_hub_ca,
ecto_repos: [NervesHubCA.Repo]
config :nerves_hub_ca, NervesHubCA.Repo,
adapter: Ecto.Adapters.Postgres,
ssl: false,
pool: Ecto.Adapters.SQL.Sandbox
config :nerves_hub_web_core, NervesHubWebCore.Repo,
ssl: false,
pool_size: 30,
pool: Ecto.Adapters.SQL.Sandbox
config :nerves_hub_api, NervesHubAPIWeb.Endpoint,
code_reloader: false,
check_origin: false,
server: true,
watchers: [],
pubsub_server: NervesHubWeb.PubSub,
https: [
port: 5002,
otp_app: :nerves_hub_api,
# Enable client SSL
verify: :verify_peer,
keyfile: Path.join(working_dir, "api.nerves-hub.org-key.pem"),
certfile: Path.join(working_dir, "api.nerves-hub.org.pem"),
cacertfile: Path.join(working_dir, "ca.pem")
]
config :nerves_hub_web_core, NervesHubWebCore.CertificateAuthority,
host: "0.0.0.0",
port: 8443,
ssl: [
cacertfile: Path.join(working_dir, "ca.pem"),
server_name_indication: :disable
]
config :nerves_hub_web_core,
firmware_upload: NervesHubWebCore.Firmwares.Upload.File,
delta_updater: NervesHubCoreTest.DeltaUpdater
config :nerves_hub_web_core, NervesHubWebCore.Firmwares.Upload.File,
enabled: true,
local_path: Path.join(System.tmp_dir(), "firmware"),
public_path: "/firmware"
config :logger, level: :warn
| 26.854167 | 75 | 0.738169 |
f7252e0cf73436cf3dde1ca82b48438db241071a | 2,630 | exs | Elixir | config/config.exs | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | config/config.exs | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | config/config.exs | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | # Copyright(c) 2015-2018 ACCESS CO., LTD. All rights reserved.
use Mix.Config
for {app, kw} <- Antikythera.MixConfig.all() do
config(app, kw)
end
# Auxiliary variables.
repo_tmp_dir_basename = if System.get_env("ANTIKYTHERA_COMPILE_ENV") == "local", do: "local", else: :os.getpid()
repo_tmp_dir = Path.join([__DIR__, "..", "tmp", repo_tmp_dir_basename]) |> Path.expand()
config :antikythera, [
# Name of the OTP application that runs as an antikythera instance.
antikythera_instance_name: :antikythera, # `:antikythera` is used here only for testing.
# Directory (which can be in a NFS volume) where antikythera's configuration files, build artifacts, etc. are stored.
antikythera_root_dir: Path.join(repo_tmp_dir, "root"),
# Directory where `Antikythera.Tmpdir.make/2` creates temporary workspaces for gear implementations.
gear_tmp_dir: Path.join(repo_tmp_dir, "gear_tmp"),
# Directory where log/snapshot files of persistent Raft consensus groups are stored.
# (this is used by `AntikytheraCore.AsyncJob.RaftOptionsMaker`).
raft_persistence_dir_parent: Path.join(repo_tmp_dir, "raft_fleet"),
# Keyword list of deployments, where each deployment is a cluster of ErlangVMs to run an antikythera instance.
# Each key is the name (atom) of a deployment and each value is the base domain of the deployment.
# One can interact with a gear running in a deployment by accessing the subdomain of the base domain.
# (To run blackbox tests against deployments, it's necessary to list all existing deployments here.)
deployments: [
dev: "antikytheradev.example.com",
prod: "antikythera.example.com" ,
],
# URL of Content Delivery Network for static assets (such as CSS, JS, etc.).
asset_cdn_endpoint: nil,
# Whether to include detailed information about request and stacktrace in response body
# returned by the default error handler for debugging purpose.
# Note that gears using their own custom error handlers are not affected by this configuration item.
return_detailed_info_on_error?: true,
# Alert settings.
alert: [
email: [
from: "[email protected]",
],
],
# Pluggable modules that implement `AntikytheraEal.*.Behaviour` behaviours.
eal_impl_modules: [
cluster_configuration: AntikytheraEal.ClusterConfiguration.StandAlone,
log_storage: AntikytheraEal.LogStorage.FileSystem ,
metrics_storage: AntikytheraEal.MetricsStorage.Memory ,
alert_mailer: AntikytheraEal.AlertMailer.MemoryInbox ,
asset_storage: AntikytheraEal.AssetStorage.NoOp ,
],
]
| 43.833333 | 119 | 0.73346 |
f725306658d8ad5cfc96c89adc3ff94a9234cabc | 1,683 | ex | Elixir | apps/ewallet/lib/ewallet/fetchers/token_fetcher.ex | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | 1 | 2018-12-07T06:21:21.000Z | 2018-12-07T06:21:21.000Z | apps/ewallet/lib/ewallet/fetchers/token_fetcher.ex | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/lib/ewallet/fetchers/token_fetcher.ex | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | null | null | null | defmodule EWallet.TokenFetcher do
@moduledoc """
Handles retrieval of tokens from params for transactions.
"""
alias EWalletDB.Token
def fetch(%{"token_uuid" => token_uuid}) do
with %Token{} = token <- Token.get_by(uuid: token_uuid) || :token_not_found,
true <- token.enabled || :token_is_disabled do
{:ok, token}
else
error -> {:error, error}
end
end
def fetch(%{"token_id" => token_id}) do
with %Token{} = token <- Token.get_by(id: token_id) || :token_not_found,
true <- token.enabled || :token_is_disabled do
{:ok, token}
else
error -> {:error, error}
end
end
def fetch(%{"token_id" => token_id}, from, to) do
with %Token{} = token <- Token.get_by(id: token_id) || :token_not_found,
true <- token.enabled || :token_is_disabled do
{:ok, Map.put(from, :from_token, token), Map.put(to, :to_token, token)}
else
error -> {:error, error}
end
end
def fetch(%{"from_token_id" => from_token_id, "to_token_id" => to_token_id}, from, to) do
with %Token{} = from_token <- Token.get_by(id: from_token_id) || :from_token_not_found,
true <- from_token.enabled || :from_token_is_disabled,
%Token{} = to_token <- Token.get_by(id: to_token_id) || :to_token_not_found,
true <- to_token.enabled || :to_token_is_disabled do
{:ok, Map.put(from, :from_token, from_token), Map.put(to, :to_token, to_token)}
else
error -> {:error, error}
end
end
def fetch(_, _from, _to) do
{:error, :invalid_parameter,
"Invalid parameter provided. `token_id` or a pair of `from_token_id` and `to_token_id` is required."}
end
end
| 33.66 | 106 | 0.632799 |
f72568d57cae502ba7e13b61df94e2fc38472b11 | 94 | exs | Elixir | test/lib/ecto_cassandra/storage_test.exs | cibingeorge/ecto_cassandra | 4a91d94cbe51063c78138f3fa5d64e78f1e436b7 | [
"MIT"
] | 4 | 2018-08-24T10:02:56.000Z | 2019-10-20T20:21:04.000Z | test/lib/ecto_cassandra/storage_test.exs | cibingeorge/ecto_cassandra | 4a91d94cbe51063c78138f3fa5d64e78f1e436b7 | [
"MIT"
] | null | null | null | test/lib/ecto_cassandra/storage_test.exs | cibingeorge/ecto_cassandra | 4a91d94cbe51063c78138f3fa5d64e78f1e436b7 | [
"MIT"
] | 2 | 2018-10-02T09:51:41.000Z | 2019-11-15T19:44:35.000Z | defmodule EctoCassandra.StorageTest do
@moduledoc false
use ExUnit.Case, async: true
end
| 15.666667 | 38 | 0.787234 |
f72573636772277c77d484385b8dba7aaae34ad1 | 123 | ex | Elixir | lib/igdb/resources/keyword.ex | facto/igdb | 9662e9ffd41e0925c22b1d3cd4774ae49b68e89e | [
"MIT"
] | 2 | 2018-04-09T07:34:37.000Z | 2020-03-08T06:50:03.000Z | lib/igdb/resources/keyword.ex | tomasz-tomczyk/igdb | 98d777798503f427e6c302da0251e819b9583c65 | [
"MIT"
] | null | null | null | lib/igdb/resources/keyword.ex | tomasz-tomczyk/igdb | 98d777798503f427e6c302da0251e819b9583c65 | [
"MIT"
] | null | null | null | defmodule Igdb.Keyword do
@moduledoc """
Represents a Keyword resource in the IGDB API.
"""
use Igdb.Resource
end
| 15.375 | 48 | 0.707317 |
f7257c95a4209839ee758b25fcd6cb3efa429a98 | 1,468 | ex | Elixir | lib/club_web/views/error_helpers.ex | vheathen/club.wallprint.pro | d58d2409d8879d23ed4d60fe3b9c2e1bd82e924d | [
"MIT"
] | null | null | null | lib/club_web/views/error_helpers.ex | vheathen/club.wallprint.pro | d58d2409d8879d23ed4d60fe3b9c2e1bd82e924d | [
"MIT"
] | 34 | 2019-11-10T11:31:37.000Z | 2019-11-27T21:26:48.000Z | lib/club_web/views/error_helpers.ex | vheathen/club.wallprint.pro | d58d2409d8879d23ed4d60fe3b9c2e1bd82e924d | [
"MIT"
] | null | null | null | defmodule ClubWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error), class: "help-block")
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(ClubWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(ClubWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.622222 | 73 | 0.668256 |
f7258f83e618d438f08493a9d23c72f87c3e7864 | 874 | exs | Elixir | config/test.exs | dabaer/liveview-table-test | 61d22fb964120aa4715cc0e9ef12f3d5c61718ea | [
"MIT"
] | null | null | null | config/test.exs | dabaer/liveview-table-test | 61d22fb964120aa4715cc0e9ef12f3d5c61718ea | [
"MIT"
] | null | null | null | config/test.exs | dabaer/liveview-table-test | 61d22fb964120aa4715cc0e9ef12f3d5c61718ea | [
"MIT"
] | null | null | null | import Config
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :test, Test.Repo,
username: "postgres",
password: "postgres",
database: "test_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox,
pool_size: 10
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :test, TestWeb.Endpoint,
http: [ip: {127, 0, 0, 1}, port: 4002],
server: false
# In test we don't send emails.
config :test, Test.Mailer, adapter: Swoosh.Adapters.Test
# Print only warnings and errors during test
config :logger, level: :warn
# Initialize plugs at runtime for faster test compilation
config :phoenix, :plug_init_mode, :runtime
| 29.133333 | 63 | 0.741419 |
f72596263cf2728451b1429c44dc833cfad18fbc | 1,633 | ex | Elixir | lib/panglao/repo.ex | ikeikeikeike/panglao | 6d3f6515d9f1ceb9a2e771ae2d54c222cedbf538 | [
"MIT"
] | 1 | 2017-02-18T21:20:17.000Z | 2017-02-18T21:20:17.000Z | lib/panglao/repo.ex | ikeikeikeike/panglao | 6d3f6515d9f1ceb9a2e771ae2d54c222cedbf538 | [
"MIT"
] | null | null | null | lib/panglao/repo.ex | ikeikeikeike/panglao | 6d3f6515d9f1ceb9a2e771ae2d54c222cedbf538 | [
"MIT"
] | null | null | null | defmodule Panglao.Repo do
use Ecto.Repo, otp_app: :panglao
end
defmodule Panglao.RepoReader do
use Ecto.Repo, otp_app: :panglao
def gets(queryable, id, opts \\ []) do
with r when is_nil(r) <- get(queryable, id, opts),
r <- Panglao.Repo.get(queryable, id, opts) do
r
end
end
def gets!(queryable, id, opts \\ []) do
with r when is_nil(r) <- get(queryable, id, opts),
r <- Panglao.Repo.get!(queryable, id, opts) do
r
end
end
def gets_by(queryable, clauses, opts \\ []) do
with r when is_nil(r) <- get_by(queryable, clauses, opts),
r <- Panglao.Repo.get_by(queryable, clauses, opts) do
r
end
end
def gets_by!(queryable, clauses, opts \\ []) do
with r when is_nil(r) <- get_by(queryable, clauses, opts),
r <- Panglao.Repo.get_by!(queryable, clauses, opts) do
r
end
end
def execute_and_load(sql) do
execute_and_load(sql, [])
end
def execute_and_load(sql, params) do
Ecto.Adapters.SQL.query!(__MODULE__, sql, params)
|> load_into()
end
def execute_and_load(sql, params, model) do
Ecto.Adapters.SQL.query!(__MODULE__, sql, params)
|> load_into(model)
end
defp load_into(qs) do
cols = Enum.map qs.columns, & String.to_atom(&1)
Enum.map qs.rows, fn row ->
Enum.zip(cols, row) |> Enum.into(%{})
end
end
defp load_into(qs, model) do
Enum.map qs.rows, fn(row) ->
zip = Enum.zip(qs.columns, row)
fields = Enum.reduce(zip, %{}, fn({key, value}, map) ->
Map.put(map, key, value)
end)
__MODULE__.load model, fields
end
end
end
| 24.373134 | 63 | 0.613595 |
f725bca5cde08e65dd7ca0387bd5de187aef55cc | 2,100 | ex | Elixir | lib/blockchain_api/notifiers/hotspot_notifier.ex | pakorn186c/blockchain-api | 3c9fbc892e645f9bb144414f3da36749603f37bc | [
"Apache-2.0"
] | 17 | 2019-11-03T03:02:41.000Z | 2022-01-13T17:03:32.000Z | lib/blockchain_api/notifiers/hotspot_notifier.ex | AddressXception/blockchain-api | eea98fa78af2887cc84762f84532c602c3b8b666 | [
"Apache-2.0"
] | 5 | 2019-11-07T23:26:53.000Z | 2020-11-24T21:45:35.000Z | lib/blockchain_api/notifiers/hotspot_notifier.ex | AddressXception/blockchain-api | eea98fa78af2887cc84762f84532c602c3b8b666 | [
"Apache-2.0"
] | 11 | 2019-12-04T07:03:16.000Z | 2022-01-13T17:03:50.000Z | defmodule BlockchainAPI.HotspotNotifier do
alias BlockchainAPI.{Schema.Hotspot, Util}
def send_new_hotspot_notification(pending_gateway) do
data = %{
hotspot_address: Util.bin_to_string(pending_gateway.gateway),
owner: Util.bin_to_string(pending_gateway.owner),
hash: Util.bin_to_string(pending_gateway.hash),
type: "addHotspotSuccess"
}
opts = %{external_id: UUID.uuid5(:oid, "#{pending_gateway.hash}success")}
animal_name = Hotspot.animal_name(pending_gateway.gateway)
message = "#{animal_name} has been added to the network!"
Util.notifier_client().post(data, message, data.owner, opts)
end
def send_add_hotspot_failed(:timed_out, pending_gateway) do
data = %{
hotspot_address: Util.bin_to_string(pending_gateway.gateway),
owner: Util.bin_to_string(pending_gateway.owner),
type: "addHotspotTimeOut"
}
opts = %{external_id: UUID.uuid5(:oid, "#{pending_gateway.hash}timed_out")}
message = "Unable to Add Hotspot. Transaction Timed Out."
Util.notifier_client().post(data, message, data.owner, opts)
end
def send_add_hotspot_failed(:already_exists, pending_gateway) do
data = %{
hotspot_address: Util.bin_to_string(pending_gateway.gateway),
owner: Util.bin_to_string(pending_gateway.owner),
type: "addHotspotAlreadyExists"
}
opts = %{external_id: UUID.uuid5(:oid, "#{pending_gateway.hash}already_exists")}
message = "Unable to Add Hotspot. Hotspot Already on Blockchain."
Util.notifier_client().post(data, message, data.owner, opts)
end
def send_confirm_location_failed(pending_location) do
data = %{
hotspot_address: Util.bin_to_string(pending_location.gateway),
owner: Util.bin_to_string(pending_location.owner),
type: "assertLocationFailure"
}
opts = %{external_id: UUID.uuid5(:oid, "#{pending_location.hash}failed")}
animal_name = Hotspot.animal_name(pending_location.gateway)
message = "#{animal_name} Added Without Location Information."
Util.notifier_client().post(data, message, data.owner, opts)
end
end
| 41.176471 | 84 | 0.728571 |
f725cb696c1e7d3d9d2a18f41ee7255086374b2a | 1,849 | ex | Elixir | apps/nerves_hub_web_core/lib/nerves_hub_web_core/firmwares/upload/file.ex | Gazler/nerves_hub_web | 9a636a17310382819eaa6cee590e053cb47f0dcc | [
"Apache-2.0"
] | 1 | 2019-10-13T10:56:28.000Z | 2019-10-13T10:56:28.000Z | apps/nerves_hub_web_core/lib/nerves_hub_web_core/firmwares/upload/file.ex | Eaftos/nerves_hub_web | ac03bd044b97265bf3ba3edd8da249d300fa3668 | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_web_core/lib/nerves_hub_web_core/firmwares/upload/file.ex | Eaftos/nerves_hub_web | ac03bd044b97265bf3ba3edd8da249d300fa3668 | [
"Apache-2.0"
] | null | null | null | defmodule NervesHubWebCore.Firmwares.Upload.File do
@moduledoc """
Local file adapter for CRUDing firmware files.
"""
@type upload_metadata :: %{local_path: String.t(), public_path: String.t()}
@spec upload_file(String.t(), upload_metadata()) ::
:ok
| {:error, atom()}
def upload_file(source, %{local_path: local_path}) do
with :ok <- local_path |> Path.dirname() |> File.mkdir_p(),
:ok <- File.cp(source, local_path) do
:ok
end
end
@spec download_file(Firmware.t()) ::
{:ok, String.t()}
| {:error, String.t()}
def download_file(firmware) do
{:ok, firmware.upload_metadata["public_path"]}
end
@spec delete_file(Firmware.t()) :: :ok
def delete_file(%{upload_metadata: %{local_path: path}}) do
# Sometimes fw files may be stored in temporary places that
# get cleared on reboots, especially when using this locally.
# So if the file doesn't exist, don't attempt to remove
if File.exists?(path), do: File.rm!(path), else: :ok
end
def delete_file(%{upload_metadata: %{"local_path" => path}}) do
delete_file(%{upload_metadata: %{local_path: path}})
end
@spec metadata(Org.id(), String.t()) :: upload_metadata()
def metadata(org_id, filename) do
config = Application.get_env(:nerves_hub_web_core, __MODULE__)
common_path = "#{org_id}"
local_path = Path.join([config[:local_path], common_path, filename])
url = Application.get_env(:nerves_hub_www, NervesHubWWWWeb.Endpoint)[:url]
port = if Enum.member?([443, 80], url[:port]), do: "", else: ":#{url[:port]}"
public_path =
"#{url[:scheme]}://#{url[:host]}#{port}/" <>
(Path.join([config[:public_path], common_path, filename])
|> String.trim("/"))
%{
public_path: public_path,
local_path: local_path
}
end
end
| 33.017857 | 81 | 0.638183 |
f7260e2e1e35641bfa430fe70213b56beedf67d0 | 2,003 | ex | Elixir | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/logging_destination.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/logging_destination.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/logging_destination.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceConsumerManagement.V1.Model.LoggingDestination do
@moduledoc """
Configuration of a specific logging destination (the producer project
or the consumer project).
## Attributes
* `logs` (*type:* `list(String.t)`, *default:* `nil`) - Names of the logs to be sent to this destination. Each name must
be defined in the Service.logs section. If the log name is
not a domain scoped name, it will be automatically prefixed with
the service name followed by "/".
* `monitoredResource` (*type:* `String.t`, *default:* `nil`) - The monitored resource type. The type must be defined in the
Service.monitored_resources section.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:logs => list(String.t()),
:monitoredResource => String.t()
}
field(:logs, type: :list)
field(:monitoredResource)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.LoggingDestination do
def decode(value, options) do
GoogleApi.ServiceConsumerManagement.V1.Model.LoggingDestination.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.LoggingDestination do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.418182 | 127 | 0.736395 |
f7262233d765e923095ecf023d5cb9d772587673 | 2,576 | exs | Elixir | test/resource/validation/compare_test.exs | smt116/ash | 880a17f197873eb1c8dc8d81a8b4d6d9cb570b3f | [
"MIT"
] | 528 | 2019-12-08T01:51:54.000Z | 2022-03-30T10:09:45.000Z | test/resource/validation/compare_test.exs | smt116/ash | 880a17f197873eb1c8dc8d81a8b4d6d9cb570b3f | [
"MIT"
] | 278 | 2019-12-04T15:25:06.000Z | 2022-03-31T03:40:51.000Z | test/resource/validation/compare_test.exs | smt116/ash | 880a17f197873eb1c8dc8d81a8b4d6d9cb570b3f | [
"MIT"
] | 53 | 2020-08-17T22:08:09.000Z | 2022-03-24T01:58:59.000Z | defmodule Ash.Test.Resource.Validation.CompareTest do
@moduledoc false
use ExUnit.Case, async: true
alias Ash.Resource.Validation.Compare
defmodule Post do
use Ash.Resource
attributes do
uuid_primary_key :id
attribute :number_one, :integer
attribute :number_two, :integer
end
end
describe "greater than" do
test "validate success against number" do
{:ok, opts} = Compare.init(attribute: :number_one, greater_than: 1)
changeset = Post |> Ash.Changeset.new(%{number_one: 100})
assert :ok = Compare.validate(changeset, opts)
end
test "validate success against argument" do
{:ok, opts} = Compare.init(attribute: :number_one, greater_than: :foo)
changeset =
Post
|> Ash.Changeset.new(%{number_one: 100})
|> Ash.Changeset.set_argument(:foo, 1)
assert :ok = Compare.validate(changeset, opts)
end
test "validate success against attribute" do
{:ok, opts} = Compare.init(attribute: :number_one, greater_than: :number_two)
changeset =
Post
|> Ash.Changeset.new(%{number_one: 100, number_two: 1})
assert :ok = Compare.validate(changeset, opts)
end
test "validate failure against number" do
{:ok, opts} = Compare.init(attribute: :number_one, greater_than: 100)
changeset = Post |> Ash.Changeset.new(%{number_one: 1})
assert_error(changeset, opts, "must be greater than 100")
end
test "validate failure against argument" do
{:ok, opts} = Compare.init(attribute: :number_one, greater_than: :foo)
changeset =
Post
|> Ash.Changeset.new(%{number_one: 1})
|> Ash.Changeset.set_argument(:foo, 100)
assert_error(changeset, opts, "must be greater than foo")
end
test "validate failure against attribute" do
{:ok, opts} = Compare.init(attribute: :number_one, greater_than: :number_two)
changeset =
Post
|> Ash.Changeset.new(%{number_one: 1, number_two: 100})
assert_error(changeset, opts, "must be greater than number_two")
end
end
defp assert_error(changeset, opts, expected_message) do
{:error, %{message: message, vars: vars}} = Compare.validate(changeset, opts)
assert expected_message == translate_message(message, vars)
end
defp translate_message(message, vars) do
Enum.reduce(vars, message, fn {key, value}, acc ->
if String.contains?(acc, "%{#{key}}") do
String.replace(acc, "%{#{key}}", to_string(value))
else
acc
end
end)
end
end
| 28.622222 | 83 | 0.651398 |
f7265363c4c7b6966f9c68c87ee935499737e4f7 | 1,906 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/dimension.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/dimension.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/dimension.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AdExchangeBuyer.V14.Model.Dimension do
@moduledoc """
This message carries publisher provided breakdown. E.g. {dimension_type: 'COUNTRY', [{dimension_value: {id: 1, name: 'US'}}, {dimension_value: {id: 2, name: 'UK'}}]}
## Attributes
- dimensionType (String.t): Defaults to: `null`.
- dimensionValues ([DimensionDimensionValue]): Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dimensionType => any(),
:dimensionValues =>
list(GoogleApi.AdExchangeBuyer.V14.Model.DimensionDimensionValue.t())
}
field(:dimensionType)
field(
:dimensionValues,
as: GoogleApi.AdExchangeBuyer.V14.Model.DimensionDimensionValue,
type: :list
)
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V14.Model.Dimension do
def decode(value, options) do
GoogleApi.AdExchangeBuyer.V14.Model.Dimension.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V14.Model.Dimension do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.438596 | 191 | 0.728227 |
f7266cf065a72b17b78ea778644759f26691bbf2 | 1,366 | ex | Elixir | lib/elixlsx/compiler/string_db.ex | leandrocp/elixlsx | df235284306d33becf4d6090b901c10422c237f0 | [
"MIT"
] | 215 | 2015-11-24T09:11:30.000Z | 2022-02-08T13:38:22.000Z | lib/elixlsx/compiler/string_db.ex | leandrocp/elixlsx | df235284306d33becf4d6090b901c10422c237f0 | [
"MIT"
] | 90 | 2016-06-06T13:00:06.000Z | 2022-03-30T21:09:50.000Z | lib/elixlsx/compiler/string_db.ex | leandrocp/elixlsx | df235284306d33becf4d6090b901c10422c237f0 | [
"MIT"
] | 92 | 2016-01-06T14:45:28.000Z | 2022-03-16T12:29:58.000Z | defmodule Elixlsx.Compiler.StringDB do
alias Elixlsx.Compiler.StringDB
alias Elixlsx.XML
@moduledoc ~S"""
Strings in XLSX can be stored in a sharedStrings.xml file and be looked up
by ID. This module handles collection of the data in the preprocessing phase.
"""
defstruct strings: %{}, element_count: 0
@type t :: %StringDB{
strings: %{String.t() => non_neg_integer},
element_count: non_neg_integer
}
@spec register_string(StringDB.t(), String.t()) :: StringDB.t()
def register_string(stringdb, s) do
case Map.fetch(stringdb.strings, s) do
:error ->
%StringDB{
strings: Map.put(stringdb.strings, s, stringdb.element_count),
element_count: stringdb.element_count + 1
}
{:ok, _} ->
stringdb
end
end
def get_id(stringdb, s) do
case Map.fetch(stringdb.strings, s) do
:error ->
if XML.valid?(s) do
raise %ArgumentError{
message: "Invalid key provided for StringDB.get_id: " <> inspect(s)
}
else
# if the xml is invalid, then we never wanted it in the stringdb to
# begin with
-1
end
{:ok, id} ->
id
end
end
def sorted_id_string_tuples(stringdb) do
Enum.map(stringdb.strings, fn {k, v} -> {v, k} end) |> Enum.sort()
end
end
| 26.269231 | 79 | 0.603953 |
f7266e2e9481bd53640dfb33abad1e94149febba | 3,295 | ex | Elixir | lib/purple_web/live/run_live/run_form.ex | knoebber/petaller | 3532db5a3688459127d2427af42e32ca0f494d44 | [
"MIT"
] | null | null | null | lib/purple_web/live/run_live/run_form.ex | knoebber/petaller | 3532db5a3688459127d2427af42e32ca0f494d44 | [
"MIT"
] | null | null | null | lib/purple_web/live/run_live/run_form.ex | knoebber/petaller | 3532db5a3688459127d2427af42e32ca0f494d44 | [
"MIT"
] | null | null | null | defmodule PurpleWeb.RunLive.RunForm do
use PurpleWeb, :live_component
alias Purple.Activities
defp save_run(socket, :edit, params), do: Activities.update_run(socket.assigns.run, params)
defp save_run(_socket, :new, params), do: Activities.create_run(params)
@impl true
def update(%{run: run} = assigns, socket) do
changeset = Activities.change_run(run)
{
:ok,
socket
|> assign(assigns)
|> assign(:changeset, changeset)
|> assign(:duration_in_seconds, run.seconds)
|> assign(:miles, run.miles)
}
end
@impl true
def handle_event("save", %{"run" => run_params}, socket) do
case save_run(socket, socket.assigns.action, run_params) do
{:ok, run} ->
Purple.Tags.sync_tags(run.id, :run)
{:noreply,
socket
|> put_flash(:info, "Run saved")
|> push_patch(to: socket.assigns.return_to)}
{:error, %Ecto.Changeset{} = changeset} ->
{:noreply, assign(socket, changeset: changeset)}
end
end
@impl true
def handle_event("calculate_pace", %{"run" => run_params}, socket) do
changeset =
socket.assigns.run
|> Activities.change_run(run_params)
|> Map.put(:action, :validate)
if changeset.valid? do
duration_in_seconds = Ecto.Changeset.get_field(changeset, :seconds)
miles = Ecto.Changeset.get_field(changeset, :miles)
{:noreply,
socket
|> assign(:changeset, changeset)
|> assign(:duration_in_seconds, duration_in_seconds)
|> assign(:miles, miles)}
else
{:noreply, assign(socket, :changeset, changeset)}
end
end
@impl true
def render(assigns) do
~H"""
<section>
<.form
for={@changeset}
id="run-form"
let={f}
phx-submit="save"
phx-change="calculate_pace"
phx-target={@myself}
>
<div class="flex flex-col mb-2">
<div class="flex mb-2 gap-2">
<%= label(f, :miles, phx_hook: "AutoFocus", class: "w-1/2") %>
<%= label(f, :date, class: "w-1/2") %>
</div>
<div class="flex mb-2 gap-2">
<%= number_input(f, :miles, step: "any", class: "w-1/2") %>
<%= date_input(f, :date, class: "w-1/2") %>
</div>
<div class="flex mb-2 gap-2">
<%= label(f, :hours, class: "w-1/3") %>
<%= label(f, :minutes, class: "w-1/3") %>
<%= label(f, :minute_seconds, "Seconds", class: "w-1/3") %>
</div>
<div class="flex mb-2 gap-2">
<%= number_input(f, :hours, class: "w-1/3") %>
<%= number_input(f, :minutes, class: "w-1/3") %>
<%= number_input(f, :minute_seconds, class: "w-1/3") %>
</div>
<%= label(f, :description) %>
<%= textarea(f, :description, rows: @rows) %>
<p class="mt-2">
<%= if @changeset.valid? do %>
Pace:
<strong>
<%= format_pace(@miles, @duration_in_seconds) %>
</strong>
<% else %>
Invalid
<% end %>
</p>
</div>
<div>
<%= submit("Save", phx_disable_with: "Saving...") %>
</div>
</.form>
</section>
"""
end
end
| 29.419643 | 93 | 0.526555 |
f726918df1ae49f14317bc21fd571f8a1bdce87b | 10,458 | ex | Elixir | lib/cachex/router.ex | botwerk/cachex | d37996d3be35b0d8281e347d44c024ecf2735131 | [
"MIT"
] | null | null | null | lib/cachex/router.ex | botwerk/cachex | d37996d3be35b0d8281e347d44c024ecf2735131 | [
"MIT"
] | null | null | null | lib/cachex/router.ex | botwerk/cachex | d37996d3be35b0d8281e347d44c024ecf2735131 | [
"MIT"
] | null | null | null | defmodule Cachex.Router do
@moduledoc """
Routing module to dispatch Cachex actions to their execution environment.
This module acts as the single source of dispatch within Cachex. In prior
versions the backing actions were called directly from the main interface
and were wrapped in macros, which was difficult to maintain and also quite
noisy. Now that all execution flows via the router, this is no longer an
issue and it also serves as a gateway to distribution in the future.
"""
alias Cachex.Router
alias Cachex.Services
# add some service aliases
alias Services.Informant
alias Services.Overseer
# import macro stuff
import Cachex.Errors
import Cachex.Spec
##############
# Public API #
##############
@doc """
Dispatches a call to an appropriate execution environment.
This acts as a macro just to avoid the overhead of slicing up module
names are runtime, when they can be guaranteed at compile time much
more easily.
"""
defmacro call(cache, { action, _arguments } = call) do
act_name =
action
|> Kernel.to_string
|> String.replace_trailing("?", "")
|> Macro.camelize
act_join = :"Elixir.Cachex.Actions.#{act_name}"
quote do
Overseer.enforce(unquote(cache)) do
Router.execute(var!(cache), unquote(act_join), unquote(call))
end
end
end
@doc """
Executes a previously dispatched action.
This macro should not be called externally; the only reason it remains
public is due to the code injected by the `dispatch/2` macro.
"""
defmacro execute(cache, module, call) do
quote do
current = node()
case unquote(cache) do
cache(nodes: [ ^current ]) ->
unquote(configure_local(cache, module, call))
cache(nodes: remote_nodes) ->
unquote(configure_remote(cache, module, call, quote do: remote_nodes))
end
end
end
@doc false
# Results merging for distributed cache results.
#
# Follows these rules:
#
# - Lists are always concatenated.
# - Numbers are always summed.
# - Booleans are always AND-ed.
# - Maps are always merged (recursively).
#
# This has to be public due to scopes, but we hide the docs
# because we don't really care for anybody else calling it.
def result_merge(left, right) when is_list(left),
do: left ++ right
def result_merge(left, right) when is_number(left),
do: left + right
def result_merge(left, right) when is_boolean(left),
do: left && right
def result_merge(left, right) when is_map(left) do
Map.merge(left, right, fn
(:creation_date, _left, right) ->
right
(key, left, right) when key in [ :hit_rate, :miss_rate ] ->
(left + right) / 2
(_key, left, right) ->
result_merge(left, right)
end)
end
###############
# Private API #
###############
# Provides handling for local actions on this node.
#
# This will provide handling of notifications across hooks before and after
# the execution of an action. This is taken from code formerly in the old
# `Cachex.Actions` module, but has been moved here as it's more appopriate.
#
# If `notify` is set to false, notifications are disabled and the call is
# simply executed as is. If `via` is provided, you can override the handle
# passed to the hooks (useful for re-use of functions). An example of this
# is `decr/4` which simply calls `incr/4` with `via: { :decr, arguments }`.
defp configure_local(cache, module, { _action, arguments } = call) do
quote do
call = unquote(call)
cache = unquote(cache)
module = unquote(module)
arguments = unquote(arguments)
option = List.last(arguments)
notify = Keyword.get(option, :notify, true)
message = notify && case option[:via] do
msg when not is_tuple(msg) -> call
msg -> msg
end
notify && Informant.broadcast(cache, message)
result = apply(module, :execute, [ cache | arguments ])
if notify do
Informant.broadcast(
cache,
message,
Keyword.get(option, :hook_result, result)
)
end
result
end
end
# actions based on a key
@keyed_actions [
:del, :exists?, :expire, :fetch, :get, :get_and_update,
:incr, :invoke, :put, :refresh, :take, :touch,
:ttl, :update
]
# Provides handling to key-based actions distributed to remote nodes.
#
# The algorithm here is simple; hash the key and slot the value using JCH into
# the total number of slots available (i.e. the count of the nodes). If it comes
# out to the local node, just execute the local code, otherwise RPC the base call
# to the remote node, and just assume that it'll correctly handle it.
defp configure_remote(cache, module, { action, [ key | _ ] } = call, nodes)
when action in @keyed_actions,
do: call_slot(cache, module, call, nodes, slot_key(key, nodes))
# actions which merge outputs
@merge_actions [
:clear, :count, :empty?, :export,
:import, :keys, :purge, :reset,
:size, :stats
]
# Provides handling of cross-node actions distributed over remote nodes.
#
# This will do an RPC call across all nodes to fetch their results and merge
# them with the results on the local node. The hooks will only be notified
# on the local node, due to an annoying recursion issue when handling the
# same across all nodes - seems to provide better logic though.
defp configure_remote(cache, module, { action, arguments } = call, nodes)
when action in @merge_actions do
quote do
# :bind_quoted
call = unquote(call)
cache = unquote(cache)
nodes = unquote(nodes)
module = unquote(module)
arguments = unquote(arguments)
# all calls have options we can use
options = List.last(arguments)
results =
# can force local node setting local: true
case Keyword.get(options, :local) do
true -> []
_any ->
# don't want to execute on the local node
other_nodes = List.delete(nodes, node())
# execute the call on all other nodes
{ results, _ } = :rpc.multicall(
other_nodes,
module,
:execute,
[ cache | arguments ]
)
results
end
# execution on the local node, using the local macros and then unpack
{ :ok, result } = (unquote(configure_local(cache, module, call)))
# results merge
merge_result =
results
|> Enum.map(&elem(&1, 1))
|> Enum.reduce(result, &Router.result_merge/2)
# return after merge
{ :ok, merge_result }
end
end
# actions which always run locally
@local_actions [ :dump, :inspect, :load ]
# Provides handling of `:inspect` operations.
#
# These operations are guaranteed to run on the local nodes.
defp configure_remote(cache, module, { action, _arguments } = call, _nodes)
when action in @local_actions,
do: configure_local(cache, module, call)
# Provides handling of `:put_many` operations.
#
# These operations can only execute if their keys slot to the same remote nodes.
defp configure_remote(cache, module, { :put_many, _arguments } = call, nodes),
do: multi_call_slot(cache, module, call, nodes, quote do: &elem(&1, 0))
# Provides handling of `:transaction` operations.
#
# These operations can only execute if their keys slot to the same remote nodes.
defp configure_remote(cache, module, { :transaction, [ keys | _ ] } = call, nodes) do
case keys do
[] -> configure_local(cache, module, call)
__ -> multi_call_slot(cache, module, call, nodes, quote do: &(&1))
end
end
# Any other actions are explicitly disabled in distributed environments.
defp configure_remote(_cache, _module, _call, _nodes),
do: error(:non_distributed)
# Calls a slot for the provided cache action.
#
# This will determine a local slot and delegate locally if so, bypassing
# any RPC calls required. This function currently assumes that there is
# a local variable available named "remote_nodes" and "slot", until I
# figure out how to better improve the macro scoping in use locally.
defp call_slot(cache, module, { action, arguments } = call, nodes, slot) do
quote do
slot = unquote(slot)
nodes = unquote(nodes)
action = unquote(action)
arguments = unquote(arguments)
cache(name: name) = unquote(cache)
case Enum.at(nodes, slot) do
^current ->
unquote(configure_local(cache, module, call))
targeted ->
result = :rpc.call(
targeted,
Cachex,
action,
[ name | arguments ]
)
with { :badrpc, reason } <- result do
{ :error, reason }
end
end
end
end
# Calls a slot for the provided cache action if all keys slot to the same node.
#
# This is a delegate handler for `call_slot/5`, but ensures that all keys slot to the
# same node to avoid the case where we have to fork a call out internally.
defp multi_call_slot(cache, module, { _action, [ keys | _ ] } = call, nodes, mapper) do
quote do
# :bind_quoted
keys = unquote(keys)
mapper = unquote(mapper)
# map all keys to a slot in the nodes list
slots = Enum.map(keys, fn(key) ->
# basically just slot_key(mapper.(key), nodes)
unquote(slot_key((quote do: mapper.(key)), nodes))
end)
# unique to avoid dups
case Enum.uniq(slots) do
# if there's a single slot it's safe to continue with the call to the remote
[ slot ] -> unquote(call_slot(cache, module, call, nodes, quote do: slot))
# otherwise, cross_slot errors!
_disable -> error(:cross_slot)
end
end
end
# Slots a key into the list of provided nodes.
#
# This uses `:erlang.phash2/1` to hash the key to a numeric value,
# as keys can be basically any type - so others hashes would be
# more expensive due to the serialization costs. Note that the
# collision possibility isn't really relevant, as long as there's
# a uniformly random collision possibility.
defp slot_key(key, nodes) do
quote bind_quoted: [ key: key, nodes: nodes ] do
key
|> :erlang.phash2
|> Jumper.slot(length(nodes))
end
end
end
| 32.579439 | 89 | 0.645726 |
f726ad64f6609732ee78a6d0ae23ebdf56d62a45 | 542 | exs | Elixir | test/models/build_job_status_test.exs | crosscloudci/ci_status_repository | 335e8b89bbf59e6cf63e49541ce3ea6b60167e52 | [
"Apache-2.0"
] | 2 | 2019-03-05T16:29:10.000Z | 2020-01-17T14:11:48.000Z | test/models/build_job_status_test.exs | crosscloudci/ci_status_repository | 335e8b89bbf59e6cf63e49541ce3ea6b60167e52 | [
"Apache-2.0"
] | 3 | 2019-03-18T20:26:48.000Z | 2020-06-25T14:31:13.000Z | test/models/build_job_status_test.exs | crosscloudci/ci_status_repository | 335e8b89bbf59e6cf63e49541ce3ea6b60167e52 | [
"Apache-2.0"
] | 1 | 2018-06-16T15:32:25.000Z | 2018-06-16T15:32:25.000Z | defmodule CncfDashboardApi.BuildJobStatusTest do
use CncfDashboardApi.ModelCase
alias CncfDashboardApi.BuildJobStatus
@valid_attrs %{pipeline_id: 42, pipeline_monitor_id: 42, status: "some content"}
@invalid_attrs %{}
test "changeset with valid attributes" do
changeset = BuildJobStatus.changeset(%BuildJobStatus{}, @valid_attrs)
assert changeset.valid?
end
test "changeset with invalid attributes" do
changeset = BuildJobStatus.changeset(%BuildJobStatus{}, @invalid_attrs)
refute changeset.valid?
end
end
| 28.526316 | 82 | 0.771218 |
f726b29f8c54eda0cc0ec7c53940f363b67dbbb3 | 206 | ex | Elixir | lib/helpers/subscription_helper.ex | artsy/aprb | 9e93200462a76823c831b92f02bddcf1b326a451 | [
"MIT"
] | 11 | 2016-08-18T23:18:57.000Z | 2019-05-03T17:46:55.000Z | lib/helpers/subscription_helper.ex | artsy/aprb | 9e93200462a76823c831b92f02bddcf1b326a451 | [
"MIT"
] | 105 | 2016-08-17T23:36:07.000Z | 2019-09-26T18:14:24.000Z | lib/helpers/subscription_helper.ex | artsy/aprb | 9e93200462a76823c831b92f02bddcf1b326a451 | [
"MIT"
] | 14 | 2016-08-17T17:23:45.000Z | 2019-09-16T16:14:59.000Z | defmodule Aprb.SubscriptionHelper do
def parsed_verb(event) do
initial = if event["properties"]["partner"]["initial_subscription"], do: '-initial', else: ''
"#{event["verb"]}#{initial}"
end
end
| 29.428571 | 97 | 0.679612 |
f726befca473f040236c4a9c6cf31b4d79e3943a | 1,122 | ex | Elixir | chapter7/lib/database.ex | aifrak/elixir-in-action-exercises | 45096cf6c74af2ea691e1df1e31058cb749214c2 | [
"MIT"
] | null | null | null | chapter7/lib/database.ex | aifrak/elixir-in-action-exercises | 45096cf6c74af2ea691e1df1e31058cb749214c2 | [
"MIT"
] | null | null | null | chapter7/lib/database.ex | aifrak/elixir-in-action-exercises | 45096cf6c74af2ea691e1df1e31058cb749214c2 | [
"MIT"
] | null | null | null | defmodule Todo.Database do
use GenServer
@db_folder "./persist"
def start do
GenServer.start(__MODULE__, nil, name: __MODULE__)
end
def store(key, data) do
GenServer.cast(__MODULE__, {:store, key, data})
end
def get(key) do
GenServer.call(__MODULE__, {:get, key})
end
@impl GenServer
def init(_) do
File.mkdir_p!(@db_folder)
workers = create_workers()
{:ok, workers}
end
defp create_workers() do
Stream.map(0..2, &create_worker/1)
|> Enum.into(%{})
end
defp create_worker(worker_id) do
{:ok, worker} = Todo.DatabaseWorker.start(@db_folder)
{worker_id, worker}
end
@impl GenServer
def handle_cast({:store, key, data}, workers) do
workers
|> choose_worker(key)
|> Todo.DatabaseWorker.store(key, data)
{:noreply, workers}
end
@impl GenServer
def handle_call({:get, key}, _, workers) do
data =
workers
|> choose_worker(key)
|> Todo.DatabaseWorker.get(key)
{:reply, data, workers}
end
defp choose_worker(workers, index) do
workers
|> Map.get(:erlang.phash2(index, 3))
end
end
| 18.096774 | 57 | 0.641711 |
f726f272f88d2d3106c03c01f64e6e6ca8bbfa50 | 2,597 | ex | Elixir | clients/cloud_channel/lib/google_api/cloud_channel/v1/model/google_cloud_channel_v1_transfer_entitlements_to_google_request.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_channel/lib/google_api/cloud_channel/v1/model/google_cloud_channel_v1_transfer_entitlements_to_google_request.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_channel/lib/google_api/cloud_channel/v1/model/google_cloud_channel_v1_transfer_entitlements_to_google_request.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudChannel.V1.Model.GoogleCloudChannelV1TransferEntitlementsToGoogleRequest do
@moduledoc """
Request message for CloudChannelService.TransferEntitlementsToGoogle.
## Attributes
* `entitlements` (*type:* `list(GoogleApi.CloudChannel.V1.Model.GoogleCloudChannelV1Entitlement.t)`, *default:* `nil`) - Required. The entitlements to transfer to Google.
* `requestId` (*type:* `String.t`, *default:* `nil`) - Optional. You can specify an optional unique request ID, and if you need to retry your request, the server will know to ignore the request if it's complete. For example, you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if it received the original operation with the same request ID. If it did, it will ignore the second request. The request ID must be a valid [UUID](https://tools.ietf.org/html/rfc4122) with the exception that zero UUID is not supported (`00000000-0000-0000-0000-000000000000`).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:entitlements =>
list(GoogleApi.CloudChannel.V1.Model.GoogleCloudChannelV1Entitlement.t()) | nil,
:requestId => String.t() | nil
}
field(:entitlements,
as: GoogleApi.CloudChannel.V1.Model.GoogleCloudChannelV1Entitlement,
type: :list
)
field(:requestId)
end
defimpl Poison.Decoder,
for: GoogleApi.CloudChannel.V1.Model.GoogleCloudChannelV1TransferEntitlementsToGoogleRequest do
def decode(value, options) do
GoogleApi.CloudChannel.V1.Model.GoogleCloudChannelV1TransferEntitlementsToGoogleRequest.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.CloudChannel.V1.Model.GoogleCloudChannelV1TransferEntitlementsToGoogleRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.283333 | 638 | 0.758183 |
f726ff4ae26227976236e9eb46d81c84f621f621 | 2,651 | ex | Elixir | lib/binance/rest/futures_http_client.ex | Fadhil/binance.ex | b4cda870c9fab475e43f2498f8f28dec0353e952 | [
"MIT"
] | null | null | null | lib/binance/rest/futures_http_client.ex | Fadhil/binance.ex | b4cda870c9fab475e43f2498f8f28dec0353e952 | [
"MIT"
] | null | null | null | lib/binance/rest/futures_http_client.ex | Fadhil/binance.ex | b4cda870c9fab475e43f2498f8f28dec0353e952 | [
"MIT"
] | 1 | 2021-02-22T22:56:22.000Z | 2021-02-22T22:56:22.000Z | defmodule Binance.Rest.FuturesHTTPClient do
@futures_endpoint Application.get_env(:binance, :futures_endpoint)
def get_futures(url, headers \\ []) do
HTTPoison.get("#{@futures_endpoint}#{url}", headers)
|> parse_response
end
def get_futures(url, params, secret_key, api_key) do
case prepare_request(url, params, secret_key, api_key) do
{:error, _} = error ->
error
{:ok, url, headers} ->
get_futures(url, headers)
end
end
defp prepare_request(url, params, secret_key, api_key) do
case validate_credentials(secret_key, api_key) do
{:error, _} = error ->
error
_ ->
headers = [{"X-MBX-APIKEY", api_key}]
receive_window = 5000
ts = DateTime.utc_now() |> DateTime.to_unix(:millisecond)
params =
Map.merge(params, %{
timestamp: ts,
recvWindow: receive_window
})
argument_string = URI.encode_query(params)
signature =
:crypto.mac(
:hmac,
:sha256,
secret_key,
argument_string
)
|> Base.encode16()
{:ok, "#{url}?#{argument_string}&signature=#{signature}", headers}
end
end
def post_futures(url, params) do
# generate signature
api_key = Application.get_env(:binance, :api_key)
secret_key = Application.get_env(:binance, :secret_key)
{:ok, url, headers} = prepare_request(url, params, secret_key, api_key)
case HTTPoison.post("#{@futures_endpoint}#{url}", [], headers) do
{:error, err} ->
{:error, {:http_error, err}}
{:ok, response} ->
case Poison.decode(response.body) do
{:ok, data} -> {:ok, data}
{:error, err} -> {:error, {:poison_decode_error, err}}
end
end
end
defp validate_credentials(nil, nil),
do: {:error, {:config_missing, "Secret and API key missing"}}
defp validate_credentials(nil, _api_key),
do: {:error, {:config_missing, "Secret key missing"}}
defp validate_credentials(_secret_key, nil),
do: {:error, {:config_missing, "API key missing"}}
defp validate_credentials(_secret_key, _api_key),
do: :ok
defp parse_response({:ok, response}) do
response.body
|> Poison.decode()
|> parse_response_body
end
defp parse_response({:error, err}) do
{:error, {:http_error, err}}
end
defp parse_response_body({:ok, data}) do
case data do
%{"code" => _c, "msg" => _m} = error -> {:error, error}
_ -> {:ok, data}
end
end
defp parse_response_body({:error, err}) do
{:error, {:poison_decode_error, err}}
end
end
| 26.247525 | 75 | 0.6043 |
f7271648242ca0c0c55519c051f95c0a07ccf6f3 | 816 | ex | Elixir | lib/nautilus/core/admin/admin_message_router.ex | CarloHFR/NautilusGateway | 26211948c5f9127e6662a90e41df5b43b2408372 | [
"MIT"
] | null | null | null | lib/nautilus/core/admin/admin_message_router.ex | CarloHFR/NautilusGateway | 26211948c5f9127e6662a90e41df5b43b2408372 | [
"MIT"
] | null | null | null | lib/nautilus/core/admin/admin_message_router.ex | CarloHFR/NautilusGateway | 26211948c5f9127e6662a90e41df5b43b2408372 | [
"MIT"
] | null | null | null | defmodule Nautilus.Core.Admin.AdminMessageRouter do
@moduledoc """
This module is resposible routing admin and control messages
"""
@split Application.get_env(:nautilus, :Split)
@command_router Application.get_env(:nautilus, :CommandRouter)
@response_router Application.get_env(:nautilus, :ResponseRouter)
def route_message(pid, message = %{"type" => "response"}) do
{_, content} = @split.split_content(message["content"])
@response_router.route_response(pid, message, content)
end
def route_message(pid, message = %{"type" => "command"}) do
{_, content} = @split.split_content(message["content"])
@command_router.route_command(pid, message, content)
end
def route_message(_pid, _) do
{:error, :indefined_type}
end
end
| 28.137931 | 68 | 0.678922 |
f72729caaeba5b6b53c60e4367a0b880612a1ddc | 2,418 | exs | Elixir | back/test/api_web/controllers/user_controller_test.exs | HugoLefebvre/Epitech_TM_MP03 | 0ed161c956f7a10aec245fe2e17eb5a9b55f6075 | [
"MIT"
] | null | null | null | back/test/api_web/controllers/user_controller_test.exs | HugoLefebvre/Epitech_TM_MP03 | 0ed161c956f7a10aec245fe2e17eb5a9b55f6075 | [
"MIT"
] | null | null | null | back/test/api_web/controllers/user_controller_test.exs | HugoLefebvre/Epitech_TM_MP03 | 0ed161c956f7a10aec245fe2e17eb5a9b55f6075 | [
"MIT"
] | null | null | null | defmodule ApiWeb.UserControllerTest do
use ApiWeb.ConnCase
alias Api.Auth
alias Api.Auth.User
@create_attrs %{email: "some email", username: "some username"}
@update_attrs %{email: "some updated email", username: "some updated username"}
@invalid_attrs %{email: nil, username: nil}
def fixture(:user) do
{:ok, user} = Auth.create_user(@create_attrs)
user
end
setup %{conn: conn} do
{:ok, conn: put_req_header(conn, "accept", "application/json")}
end
describe "index" do
test "lists all users", %{conn: conn} do
conn = get conn, user_path(conn, :index)
assert json_response(conn, 200)["data"] == []
end
end
describe "create user" do
test "renders user when data is valid", %{conn: conn} do
conn = post conn, user_path(conn, :create), user: @create_attrs
assert %{"id" => id} = json_response(conn, 201)["data"]
conn = get conn, user_path(conn, :show, id)
assert json_response(conn, 200)["data"] == %{
"id" => id,
"email" => "some email",
"username" => "some username"}
end
test "renders errors when data is invalid", %{conn: conn} do
conn = post conn, user_path(conn, :create), user: @invalid_attrs
assert json_response(conn, 422)["errors"] != %{}
end
end
describe "update user" do
setup [:create_user]
test "renders user when data is valid", %{conn: conn, user: %User{id: id} = user} do
conn = put conn, user_path(conn, :update, user), user: @update_attrs
assert %{"id" => ^id} = json_response(conn, 200)["data"]
conn = get conn, user_path(conn, :show, id)
assert json_response(conn, 200)["data"] == %{
"id" => id,
"email" => "some updated email",
"username" => "some updated username"}
end
test "renders errors when data is invalid", %{conn: conn, user: user} do
conn = put conn, user_path(conn, :update, user), user: @invalid_attrs
assert json_response(conn, 422)["errors"] != %{}
end
end
describe "delete user" do
setup [:create_user]
test "deletes chosen user", %{conn: conn, user: user} do
conn = delete conn, user_path(conn, :delete, user)
assert response(conn, 204)
assert_error_sent 404, fn ->
get conn, user_path(conn, :show, user)
end
end
end
defp create_user(_) do
user = fixture(:user)
{:ok, user: user}
end
end
| 29.487805 | 88 | 0.615798 |
f7273f9eb645aef2a60972720e51171742bfe158 | 4,310 | ex | Elixir | lib/mix/tasks/timber/install/timber_config_file.ex | montebrown/timber-elixir | 1e177cc426422be3617479143038f5882037752f | [
"0BSD"
] | null | null | null | lib/mix/tasks/timber/install/timber_config_file.ex | montebrown/timber-elixir | 1e177cc426422be3617479143038f5882037752f | [
"0BSD"
] | null | null | null | lib/mix/tasks/timber/install/timber_config_file.ex | montebrown/timber-elixir | 1e177cc426422be3617479143038f5882037752f | [
"0BSD"
] | null | null | null | defmodule Mix.Tasks.Timber.Install.TimberConfigFile do
@moduledoc false
alias Mix.Tasks.Timber.Install.{FileHelper, IOHelper}
@deprioritized_platforms ["linux", "other"]
@file_name "timber.exs"
@file_path Path.join(["config", @file_name])
# Adds the config/timber.exs file to be linked in config/config.exs
def create!(application, project, api) do
contents = """
use Mix.Config
#{endpoint_portion(project)}#{repo_portion(project)}
# Use Timber as the logger backend
# Feel free to add additional backends if you want to send you logs to multiple devices.
#{timber_portion(application, api)}
# For the following environments, do not log to the Timber service. Instead, log to STDOUT
# and format the logs properly so they are human readable.
environments_to_exclude = [:test]
if Enum.member?(environments_to_exclude, Mix.env()) do
# Fall back to the default `:console` backend with the Timber custom formatter
config :logger,
backends: [:console],
utc_log: true
config :logger, :console,
format: {Timber.Formatter, :format},
metadata: #{logger_console_metadata_portion()}
config :timber, Timber.Formatter,
colorize: true,
format: :logfmt,
print_timestamps: true,
print_log_level: true,
print_metadata: false # turn this on to view the additional metadata
end
# Need help?
# Email us: [email protected]
# Or, file an issue: https://github.com/timberio/timber-elixir/issues
"""
FileHelper.write!(@file_path, contents, api)
end
defp endpoint_portion(%{endpoint_module_name: nil}), do: ""
defp endpoint_portion(%{mix_name: mix_name, endpoint_module_name: endpoint_module_name}) do
"""
# Update the instrumenters so that we can structure Phoenix logs
config :#{mix_name}, #{endpoint_module_name},
instrumenters: [Timber.Integrations.PhoenixInstrumenter]
"""
end
defp logger_console_metadata_portion do
current_elixir_version = System.version() |> Version.parse!()
all_metadata_elixir_version = Version.parse!("1.6.0")
case Version.compare(current_elixir_version, all_metadata_elixir_version) do
:gt ->
":all"
:eq ->
":all"
:lt ->
"[:timber_context, :event, :context, :application, :file, :function, :line, :module, :meta]"
end
end
defp repo_portion(%{repo_module_name: nil}), do: ""
defp repo_portion(%{mix_name: mix_name, repo_module_name: repo_module_name}) do
"""
# Structure Ecto logs
config :#{mix_name}, #{repo_module_name},
loggers: [{Timber.Integrations.EctoLogger, :log, []}]
"""
end
defp timber_portion(%{platform_type: platform_type}, api)
when platform_type in @deprioritized_platforms do
"""
# Deliver logs via HTTP to the Timber API by using the Timber HTTP backend.
config :logger,
backends: [Timber.LoggerBackends.HTTP],
utc_log: true
config :timber,
api_key: #{api_key_portion(api)}
"""
end
defp timber_portion(_application, _api) do
"""
# Use the `:console` backend provided with Logger but customize
# it to use Timber's internal formatting system
config :logger,
backends: [:console],
utc_log: true
config :logger, :console,
format: {Timber.Formatter, :format},
metadata: #{logger_console_metadata_portion()}
"""
end
defp api_key_portion(%{api_key: api_key} = api) do
"""
How would you prefer to store your Timber API key?
1) In the TIMBER_LOGS_KEY environment variable
2) Inline within the #{@file_path} file
"""
|> IOHelper.puts()
case IOHelper.ask("Enter your choice (1/2)", api) do
"1" ->
"{:system, \"TIMBER_LOGS_KEY\"}"
"2" ->
"\"#{api_key}\""
other ->
"Sorry #{inspect(other)} is not a valid input. Please try again."
|> IOHelper.puts(:red)
api_key_portion(api)
end
end
def file_path, do: @file_path
def link!(config_file_path, api) do
contents = """
# Import Timber, structured logging
import_config \"#{@file_name}\"
"""
check = "import_config \"#{@file_name}\""
FileHelper.append_once!(config_file_path, contents, check, api)
end
end
| 28.733333 | 100 | 0.659165 |
f727572867bdd9080454271156ece751b916197b | 529 | exs | Elixir | priv/repo/migrations/20170703052856_create_sponsors_sponsorship.exs | Apps-Team/conferencetools | ce2e16a3e4a521dc4682e736a209e6dd380c050d | [
"Apache-2.0"
] | null | null | null | priv/repo/migrations/20170703052856_create_sponsors_sponsorship.exs | Apps-Team/conferencetools | ce2e16a3e4a521dc4682e736a209e6dd380c050d | [
"Apache-2.0"
] | 6 | 2017-10-05T20:16:34.000Z | 2017-10-05T20:36:11.000Z | priv/repo/migrations/20170703052856_create_sponsors_sponsorship.exs | apps-team/events-tools | ce2e16a3e4a521dc4682e736a209e6dd380c050d | [
"Apache-2.0"
] | null | null | null | defmodule EventsTools.Repo.Migrations.CreateEventsTools.Sponsors.Sponsorship do
use Ecto.Migration
def change do
create table(:sponsorship_options) do
add :name, :string
add :summary, :text
add :type, :string
add :expo, :text
add :camps, :text
add :website, :text
add :program, :text
add :signage, :text
add :social, :text
add :swag, :text
add :recruiting, :text
add :price, :float
add :stock, :integer
timestamps()
end
end
end
| 21.16 | 79 | 0.612476 |
f727b4c3ed8173d89f73d358e9f8f2fa2c775e0f | 2,604 | ex | Elixir | clients/cloud_kms/lib/google_api/cloud_kms/v1/model/crypto_key_version_template.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/cloud_kms/lib/google_api/cloud_kms/v1/model/crypto_key_version_template.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/cloud_kms/lib/google_api/cloud_kms/v1/model/crypto_key_version_template.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudKMS.V1.Model.CryptoKeyVersionTemplate do
@moduledoc """
A CryptoKeyVersionTemplate specifies the properties to use when creating a new CryptoKeyVersion, either manually with CreateCryptoKeyVersion or automatically as a result of auto-rotation.
## Attributes
- algorithm (String.t): Required. Algorithm to use when creating a CryptoKeyVersion based on this template. For backwards compatibility, GOOGLE_SYMMETRIC_ENCRYPTION is implied if both this field is omitted and CryptoKey.purpose is ENCRYPT_DECRYPT. Defaults to: `null`.
- Enum - one of [CRYPTO_KEY_VERSION_ALGORITHM_UNSPECIFIED, GOOGLE_SYMMETRIC_ENCRYPTION, RSA_SIGN_PSS_2048_SHA256, RSA_SIGN_PSS_3072_SHA256, RSA_SIGN_PSS_4096_SHA256, RSA_SIGN_PSS_4096_SHA512, RSA_SIGN_PKCS1_2048_SHA256, RSA_SIGN_PKCS1_3072_SHA256, RSA_SIGN_PKCS1_4096_SHA256, RSA_SIGN_PKCS1_4096_SHA512, RSA_DECRYPT_OAEP_2048_SHA256, RSA_DECRYPT_OAEP_3072_SHA256, RSA_DECRYPT_OAEP_4096_SHA256, RSA_DECRYPT_OAEP_4096_SHA512, EC_SIGN_P256_SHA256, EC_SIGN_P384_SHA384]
- protectionLevel (String.t): ProtectionLevel to use when creating a CryptoKeyVersion based on this template. Immutable. Defaults to SOFTWARE. Defaults to: `null`.
- Enum - one of [PROTECTION_LEVEL_UNSPECIFIED, SOFTWARE, HSM]
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:algorithm => any(),
:protectionLevel => any()
}
field(:algorithm)
field(:protectionLevel)
end
defimpl Poison.Decoder, for: GoogleApi.CloudKMS.V1.Model.CryptoKeyVersionTemplate do
def decode(value, options) do
GoogleApi.CloudKMS.V1.Model.CryptoKeyVersionTemplate.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudKMS.V1.Model.CryptoKeyVersionTemplate do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 49.132075 | 469 | 0.791475 |
f727c8f160fa683157f3ea308c3e292638c33cf9 | 734 | ex | Elixir | web/gettext.ex | zombalo/cgrates_web_jsonapi | 47845be4311839fe180cc9f2c7c6795649da4430 | [
"MIT"
] | null | null | null | web/gettext.ex | zombalo/cgrates_web_jsonapi | 47845be4311839fe180cc9f2c7c6795649da4430 | [
"MIT"
] | null | null | null | web/gettext.ex | zombalo/cgrates_web_jsonapi | 47845be4311839fe180cc9f2c7c6795649da4430 | [
"MIT"
] | null | null | null | defmodule CgratesWebJsonapi.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import CgratesWebJsonapi.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :cgrates_web_jsonapi
end
| 29.36 | 72 | 0.692098 |
f727e0abeb5674fefc32aedf7af623fdd9f5be84 | 1,057 | ex | Elixir | test/support/channel_case.ex | jwworth/bell | 9b4aeea691d7f8f7dc8cc60f2ff7b19244ada6e4 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | jwworth/bell | 9b4aeea691d7f8f7dc8cc60f2ff7b19244ada6e4 | [
"MIT"
] | 11 | 2021-03-11T05:46:57.000Z | 2021-12-08T15:39:25.000Z | test/support/channel_case.ex | jwworth/bell | 9b4aeea691d7f8f7dc8cc60f2ff7b19244ada6e4 | [
"MIT"
] | null | null | null | defmodule BellWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use BellWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint BellWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Bell.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Bell.Repo, {:shared, self()})
end
:ok
end
end
| 26.425 | 66 | 0.721854 |
f727e8f6c190bea1c81e9c02be1dfd49ab694630 | 1,652 | exs | Elixir | apps/rebuildremoved/config/config.exs | d-led/gorebuild | 2e8b05bc8789fd4ee7c84ebc0a4a23f7825bf4ab | [
"Apache-2.0"
] | null | null | null | apps/rebuildremoved/config/config.exs | d-led/gorebuild | 2e8b05bc8789fd4ee7c84ebc0a4a23f7825bf4ab | [
"Apache-2.0"
] | 10 | 2017-12-11T10:25:16.000Z | 2018-10-28T09:09:59.000Z | apps/rebuildremoved/config/config.exs | d-led/gorebuild | 2e8b05bc8789fd4ee7c84ebc0a4a23f7825bf4ab | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
config :rebuildremoved,
delay_ms: System.get_env("GO_DELAY") || 10 * 1000,
artifacts: [
# e.g.:
# %{pipeline: "test", stage: "defaultStage", job: "defaultJob",
# paths: [
# "foo/bar",
# "foo/bar/start.sh"
# ]
# },
# %{pipeline: "consumer", stage: "defaultStage", job: "DefaultJob",
# paths: [ "bla/blup" ]
# }
],
gocd: %{
url: System.get_env("GO_SERVER_URL") || "https://localhost:8154/go",
user: System.get_env("GO_USERNAME"),
password: System.get_env("GO_PASSWORD")
}
#
# and access this configuration in your application as:
#
# Application.get_env(:rebuildremoved, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
import_config "#{Mix.env()}.exs"
| 33.04 | 73 | 0.685835 |
f7280d2b277f89c7e8cc39e59e9a7af41f28731c | 808 | ex | Elixir | apps/bytepack_web/lib/bytepack_web/live/admin/seller_live/index.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 313 | 2020-12-03T17:26:24.000Z | 2022-03-18T09:05:14.000Z | apps/bytepack_web/lib/bytepack_web/live/admin/seller_live/index.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | null | null | null | apps/bytepack_web/lib/bytepack_web/live/admin/seller_live/index.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 57 | 2020-12-03T17:41:53.000Z | 2022-03-17T17:28:16.000Z | defmodule BytepackWeb.Admin.SellerLive.Index do
use BytepackWeb, :live_view
@impl true
def mount(params, session, socket) do
socket =
socket
|> MountHelpers.assign_admin(params, session)
|> assign(:orgs, Bytepack.Orgs.list_orgs())
|> assign(:page_title, "Sellers")
{:ok, socket, temporary_assigns: [sellers: []]}
end
@impl true
def handle_params(%{"slug" => slug}, _url, socket) do
org = Bytepack.Orgs.get_org!(slug)
seller =
if org.is_seller do
Bytepack.Sales.get_seller!(org)
else
%Bytepack.Sales.Seller{id: org.id}
end
socket =
socket
|> assign(:seller, seller)
|> assign(:org, org)
{:noreply, socket}
end
@impl true
def handle_params(_, _url, socket), do: {:noreply, socket}
end
| 21.837838 | 60 | 0.621287 |
f72820a93d96caf5a4b86be62d0e156ee8cceaf3 | 474 | exs | Elixir | test/oli/accounts/author_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 45 | 2020-04-17T15:40:27.000Z | 2022-03-25T00:13:30.000Z | test/oli/accounts/author_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 944 | 2020-02-13T02:37:01.000Z | 2022-03-31T17:50:07.000Z | test/oli/accounts/author_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 23 | 2020-07-28T03:36:13.000Z | 2022-03-17T14:29:02.000Z | defmodule Oli.Accounts.AuthorTest do
use Oli.DataCase
describe "author" do
alias Oli.Accounts.Author
test "changeset should be invalid if password and confirmation do not match" do
changeset =
Author.changeset(%Author{}, %{
email: "[email protected]",
given_name: "First",
family_name: "Last",
password: "foo",
password_confirmation: "bar"
})
refute changeset.valid?
end
end
end
| 22.571429 | 83 | 0.609705 |
f728220b3fef7727f5e4941d9488b7ed97f4a873 | 94 | ex | Elixir | lib/slipstream/commands/collect_garbage.ex | fhunleth/slipstream | cebd924384b93b7dd3c1aa6ae2ac8d237e942f18 | [
"Apache-2.0"
] | 63 | 2021-02-10T16:18:11.000Z | 2022-03-18T11:06:44.000Z | lib/slipstream/commands/collect_garbage.ex | fhunleth/slipstream | cebd924384b93b7dd3c1aa6ae2ac8d237e942f18 | [
"Apache-2.0"
] | 21 | 2021-01-30T21:00:06.000Z | 2021-12-27T04:27:15.000Z | lib/slipstream/commands/collect_garbage.ex | fhunleth/slipstream | cebd924384b93b7dd3c1aa6ae2ac8d237e942f18 | [
"Apache-2.0"
] | 6 | 2021-02-26T23:56:49.000Z | 2022-03-26T09:28:13.000Z | defmodule Slipstream.Commands.CollectGarbage do
@moduledoc false
defstruct [:socket]
end
| 15.666667 | 47 | 0.797872 |
f7282851f2953183e3f103b8c55cc5b72eba1d0f | 1,981 | exs | Elixir | apps/omg/test/omg/state/measurement_calculation_test.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | 1 | 2020-10-06T03:07:47.000Z | 2020-10-06T03:07:47.000Z | apps/omg/test/omg/state/measurement_calculation_test.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | 9 | 2020-09-16T15:31:17.000Z | 2021-03-17T07:12:35.000Z | apps/omg/test/omg/state/measurement_calculation_test.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | 1 | 2020-09-30T17:17:27.000Z | 2020-09-30T17:17:27.000Z | # Copyright 2019-2020 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.State.MeasurementCalculationTest do
@moduledoc """
Testing functional behaviors.
"""
use ExUnitFixtures
use ExUnit.Case, async: true
alias OMG.Eth.Encoding
alias OMG.State.Core
alias OMG.Utxo
require Utxo
@eth OMG.Eth.zero_address()
@not_eth <<1::size(160)>>
@tag fixtures: [:alice, :bob, :carol]
test "calculate metrics from state", %{alice: alice, bob: bob, carol: carol} do
utxos = %{
Utxo.position(2_000, 4076, 3) => %OMG.Utxo{
output: %OMG.Output{amount: 700_000_000, currency: @eth, owner: alice}
},
Utxo.position(1_000, 2559, 0) => %OMG.Utxo{
output: %OMG.Output{amount: 111_111_111, currency: @not_eth, owner: alice}
},
Utxo.position(8_000, 4854, 2) => %OMG.Utxo{
output: %OMG.Output{amount: 77_000_000, currency: @eth, owner: bob}
},
Utxo.position(7_000, 4057, 3) => %OMG.Utxo{
output: %OMG.Output{amount: 222_222_222, currency: @not_eth, owner: carol}
},
Utxo.position(7_000, 4057, 4) => %OMG.Utxo{output: %{}}
}
assert MapSet.new(OMG.State.MeasurementCalculation.calculate(%Core{utxos: utxos})) ==
MapSet.new([
{:unique_users, 3},
{:balance, 777_000_000, "currency:#{Encoding.to_hex(@eth)}"},
{:balance, 333_333_333, "currency:#{Encoding.to_hex(@not_eth)}"}
])
end
end
| 34.754386 | 89 | 0.656234 |
f72840903c9295624c9e365c214add289105b6a4 | 2,358 | exs | Elixir | mix.exs | antp/cxs_starter | 349cbd61e561f276b552f95e88e24e01397b8519 | [
"MIT"
] | null | null | null | mix.exs | antp/cxs_starter | 349cbd61e561f276b552f95e88e24e01397b8519 | [
"MIT"
] | null | null | null | mix.exs | antp/cxs_starter | 349cbd61e561f276b552f95e88e24e01397b8519 | [
"MIT"
] | null | null | null | defmodule CxsStarter.MixProject do
use Mix.Project
def project do
[
app: :cxs_starter,
version: "0.1.0",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {CxsStarter.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:bcrypt_elixir, "~> 2.0"},
{:phoenix, "~> 1.5.7"},
{:phoenix_ecto, "~> 4.1"},
{:ecto_sql, "~> 3.4"},
{:postgrex, ">= 0.0.0"},
{:phoenix_live_view, "~> 0.15.0"},
{:floki, ">= 0.27.0", only: :test},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_dashboard, "~> 0.4"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:bamboo, "~> 2.0.0"},
{:mox, "~> 1.0", only: :test},
{:ex_machina, "~> 2.4", only: [:dev, :test], runtime: false},
{:faker, "~> 0.16.0", only: [:dev, :test], runtime: false},
{:elogram, only: :test, git: "https://github.com/mcrumm/elogram.git"},
{:mix_test_watch, "~> 1.0", only: :dev, runtime: false},
{:phx_gen_auth, "~> 0.6", only: [:dev], runtime: false}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup", "cmd npm install --prefix assets"],
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"]
]
end
end
| 31.026316 | 84 | 0.563189 |
f7284ac3b0a0c71c95ba03faad74bae6cd5e9a34 | 555 | ex | Elixir | lib/instruments/application.ex | sb8244/instruments | 89a620d181ba4a04ed0ac01c47057c018d645428 | [
"MIT"
] | 1 | 2021-04-28T21:35:01.000Z | 2021-04-28T21:35:01.000Z | lib/instruments/application.ex | sb8244/instruments | 89a620d181ba4a04ed0ac01c47057c018d645428 | [
"MIT"
] | null | null | null | lib/instruments/application.ex | sb8244/instruments | 89a620d181ba4a04ed0ac01c47057c018d645428 | [
"MIT"
] | null | null | null | defmodule Instruments.Application do
@moduledoc false
use Application
alias Instruments.{
FastCounter,
Probe
}
def start(_type, _args) do
import Supervisor.Spec, warn: false
reporter = Application.get_env(:instruments, :reporter_module, Instruments.Statix)
reporter.connect()
children = [
worker(FastCounter, []),
worker(Probe.Definitions, []),
worker(Probe.Supervisor, [])
]
opts = [strategy: :one_for_one, name: Instruments.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 21.346154 | 86 | 0.684685 |
f7284cc5b896105e723afa71c3ca7d6009157711 | 732 | ex | Elixir | lib/action/action.ex | doawoo/elixir_rpg | 4dcd0eb717bd1d654b3e6a06be31aba4c3254fb3 | [
"MIT"
] | 23 | 2021-10-24T00:21:13.000Z | 2022-03-13T12:33:38.000Z | lib/action/action.ex | doawoo/elixir_rpg | 4dcd0eb717bd1d654b3e6a06be31aba4c3254fb3 | [
"MIT"
] | null | null | null | lib/action/action.ex | doawoo/elixir_rpg | 4dcd0eb717bd1d654b3e6a06be31aba4c3254fb3 | [
"MIT"
] | 3 | 2021-11-04T02:42:25.000Z | 2022-02-02T14:22:52.000Z | defmodule ElixirRPG.Action do
use TypedStruct
require Logger
alias __MODULE__
typedstruct do
field :action_type, atom(), enforce: true
field :target_entity, pid(), enforce: true
field :payload, %{}, enforce: true
end
def make_action(type, target, extra_data \\ %{}) do
%Action{
action_type: type,
target_entity: target,
payload: extra_data
}
end
def execute(%Action{} = action) do
if Process.alive?(action.target_entity) do
Logger.debug("Action enqueued from: #{inspect(action)}")
GenServer.call(action.target_entity, {:action_recv, action})
else
Logger.warn("Action was dropped because target PID was dead: #{inspect(action)}")
end
end
end
| 23.612903 | 87 | 0.670765 |
f728af8dffce7115615eefe6e41d538397414e1d | 69,680 | exs | Elixir | integration_test/cases/repo.exs | ckoch-cars/ecto | 28e7d36d5d495cff2fea3f27f1a5ae457c960091 | [
"Apache-2.0"
] | null | null | null | integration_test/cases/repo.exs | ckoch-cars/ecto | 28e7d36d5d495cff2fea3f27f1a5ae457c960091 | [
"Apache-2.0"
] | null | null | null | integration_test/cases/repo.exs | ckoch-cars/ecto | 28e7d36d5d495cff2fea3f27f1a5ae457c960091 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Integration.RepoTest do
use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true)
alias Ecto.Integration.TestRepo
import Ecto.Query
alias Ecto.Integration.Post
alias Ecto.Integration.Order
alias Ecto.Integration.User
alias Ecto.Integration.Comment
alias Ecto.Integration.Permalink
alias Ecto.Integration.Custom
alias Ecto.Integration.Barebone
alias Ecto.Integration.CompositePk
alias Ecto.Integration.PostUserCompositePk
test "returns already started for started repos" do
assert {:error, {:already_started, _}} = TestRepo.start_link
end
test "supports unnamed repos" do
assert {:ok, pid} = TestRepo.start_link(name: nil)
assert Ecto.Repo.Queryable.all(pid, Post, []) == []
end
test "all empty" do
assert TestRepo.all(Post) == []
assert TestRepo.all(from p in Post) == []
end
test "all with in" do
TestRepo.insert!(%Post{title: "hello"})
# Works without the query cache.
assert_raise Ecto.Query.CastError, fn ->
TestRepo.all(from p in Post, where: p.title in ^nil)
end
assert [] = TestRepo.all from p in Post, where: p.title in []
assert [] = TestRepo.all from p in Post, where: p.title in ["1", "2", "3"]
assert [] = TestRepo.all from p in Post, where: p.title in ^[]
assert [_] = TestRepo.all from p in Post, where: p.title not in []
assert [_] = TestRepo.all from p in Post, where: p.title in ["1", "hello", "3"]
assert [_] = TestRepo.all from p in Post, where: p.title in ["1", ^"hello", "3"]
assert [_] = TestRepo.all from p in Post, where: p.title in ^["1", "hello", "3"]
# Still doesn't work after the query cache.
assert_raise Ecto.Query.CastError, fn ->
TestRepo.all(from p in Post, where: p.title in ^nil)
end
end
test "all using named from" do
TestRepo.insert!(%Post{title: "hello"})
query =
from(p in Post, as: :post)
|> where([post: p], p.title == "hello")
assert [_] = TestRepo.all query
end
test "all without schema" do
%Post{} = TestRepo.insert!(%Post{title: "title1"})
%Post{} = TestRepo.insert!(%Post{title: "title2"})
assert ["title1", "title2"] =
TestRepo.all(from(p in "posts", order_by: p.title, select: p.title))
assert [_] =
TestRepo.all(from(p in "posts", where: p.title == "title1", select: p.id))
end
test "all shares metadata" do
TestRepo.insert!(%Post{title: "title1"})
TestRepo.insert!(%Post{title: "title2"})
[post1, post2] = TestRepo.all(Post)
assert :erts_debug.same(post1.__meta__, post2.__meta__)
[new_post1, new_post2] = TestRepo.all(Post)
assert :erts_debug.same(post1.__meta__, new_post1.__meta__)
assert :erts_debug.same(post2.__meta__, new_post2.__meta__)
end
@tag :invalid_prefix
test "all with invalid prefix" do
assert catch_error(TestRepo.all("posts", prefix: "oops"))
end
test "insert, update and delete" do
post = %Post{title: "insert, update, delete", text: "fetch empty"}
meta = post.__meta__
assert %Post{} = inserted = TestRepo.insert!(post)
assert %Post{} = updated = TestRepo.update!(Ecto.Changeset.change(inserted, text: "new"))
deleted_meta = put_in meta.state, :deleted
assert %Post{__meta__: ^deleted_meta} = TestRepo.delete!(updated)
loaded_meta = put_in meta.state, :loaded
assert %Post{__meta__: ^loaded_meta} = TestRepo.insert!(post)
post = TestRepo.one(Post)
assert post.__meta__.state == :loaded
assert post.inserted_at
end
test "insert, update and delete with field source" do
permalink = %Permalink{url: "url"}
assert %Permalink{url: "url"} = inserted =
TestRepo.insert!(permalink)
assert %Permalink{url: "new"} = updated =
TestRepo.update!(Ecto.Changeset.change(inserted, url: "new"))
assert %Permalink{url: "new"} =
TestRepo.delete!(updated)
end
@tag :composite_pk
test "insert, update and delete with composite pk" do
c1 = TestRepo.insert!(%CompositePk{a: 1, b: 2, name: "first"})
c2 = TestRepo.insert!(%CompositePk{a: 1, b: 3, name: "second"})
assert CompositePk |> first |> TestRepo.one == c1
assert CompositePk |> last |> TestRepo.one == c2
changeset = Ecto.Changeset.cast(c1, %{name: "first change"}, ~w(name)a)
c1 = TestRepo.update!(changeset)
assert TestRepo.get_by!(CompositePk, %{a: 1, b: 2}) == c1
TestRepo.delete!(c2)
assert TestRepo.all(CompositePk) == [c1]
assert_raise ArgumentError, ~r"to have exactly one primary key", fn ->
TestRepo.get(CompositePk, [])
end
assert_raise ArgumentError, ~r"to have exactly one primary key", fn ->
TestRepo.get!(CompositePk, [1, 2])
end
end
@tag :composite_pk
test "insert, update and delete with associated composite pk" do
user = TestRepo.insert!(%User{})
post = TestRepo.insert!(%Post{title: "post title", text: "post text"})
user_post = TestRepo.insert!(%PostUserCompositePk{user_id: user.id, post_id: post.id})
assert TestRepo.get_by!(PostUserCompositePk, [user_id: user.id, post_id: post.id]) == user_post
TestRepo.delete!(user_post)
assert TestRepo.all(PostUserCompositePk) == []
end
@tag :invalid_prefix
test "insert, update and delete with invalid prefix" do
post = TestRepo.insert!(%Post{})
changeset = Ecto.Changeset.change(post, title: "foo")
assert catch_error(TestRepo.insert(%Post{}, prefix: "oops"))
assert catch_error(TestRepo.update(changeset, prefix: "oops"))
assert catch_error(TestRepo.delete(changeset, prefix: "oops"))
end
test "insert and update with changeset" do
# On insert we merge the fields and changes
changeset = Ecto.Changeset.cast(%Post{text: "x", title: "wrong"},
%{"title" => "hello", "temp" => "unknown"}, ~w(title temp)a)
post = TestRepo.insert!(changeset)
assert %Post{text: "x", title: "hello", temp: "unknown"} = post
assert %Post{text: "x", title: "hello", temp: "temp"} = TestRepo.get!(Post, post.id)
# On update we merge only fields, direct schema changes are discarded
changeset = Ecto.Changeset.cast(%{post | text: "y"},
%{"title" => "world", "temp" => "unknown"}, ~w(title temp)a)
assert %Post{text: "y", title: "world", temp: "unknown"} = TestRepo.update!(changeset)
assert %Post{text: "x", title: "world", temp: "temp"} = TestRepo.get!(Post, post.id)
end
test "insert and update with empty changeset" do
# On insert we merge the fields and changes
changeset = Ecto.Changeset.cast(%Permalink{}, %{}, ~w())
assert %Permalink{} = permalink = TestRepo.insert!(changeset)
# Assert we can update the same value twice,
# without changes, without triggering stale errors.
changeset = Ecto.Changeset.cast(permalink, %{}, ~w())
assert TestRepo.update!(changeset) == permalink
assert TestRepo.update!(changeset) == permalink
end
@tag :no_primary_key
test "insert with no primary key" do
assert %Barebone{num: nil} = TestRepo.insert!(%Barebone{})
assert %Barebone{num: 13} = TestRepo.insert!(%Barebone{num: 13})
end
@tag :read_after_writes
test "insert and update with changeset read after writes" do
defmodule RAW do
use Ecto.Schema
schema "comments" do
field :text, :string
field :lock_version, :integer, read_after_writes: true
end
end
changeset = Ecto.Changeset.cast(struct(RAW, %{}), %{}, ~w())
# If the field is nil, we will not send it
# and read the value back from the database.
assert %{id: cid, lock_version: 1} = raw = TestRepo.insert!(changeset)
# Set the counter to 11, so we can read it soon
TestRepo.update_all from(u in RAW, where: u.id == ^cid), set: [lock_version: 11]
# We will read back on update too
changeset = Ecto.Changeset.cast(raw, %{"text" => "0"}, ~w(text)a)
assert %{id: ^cid, lock_version: 11, text: "0"} = TestRepo.update!(changeset)
end
test "insert autogenerates for custom type" do
post = TestRepo.insert!(%Post{uuid: nil})
assert byte_size(post.uuid) == 36
assert TestRepo.get_by(Post, uuid: post.uuid) == post
end
@tag :id_type
test "insert autogenerates for custom id type" do
defmodule ID do
use Ecto.Schema
@primary_key {:id, CustomPermalink, autogenerate: true}
schema "posts" do
end
end
id = TestRepo.insert!(struct(ID, id: nil))
assert id.id
assert TestRepo.get_by(ID, id: "#{id.id}-hello") == id
end
@tag :id_type
@tag :assigns_id_type
test "insert with user-assigned primary key" do
assert %Post{id: 1} = TestRepo.insert!(%Post{id: 1})
end
@tag :id_type
@tag :assigns_id_type
test "insert and update with user-assigned primary key in changeset" do
changeset = Ecto.Changeset.cast(%Post{id: 11}, %{"id" => "13"}, ~w(id)a)
assert %Post{id: 13} = post = TestRepo.insert!(changeset)
changeset = Ecto.Changeset.cast(post, %{"id" => "15"}, ~w(id)a)
assert %Post{id: 15} = TestRepo.update!(changeset)
end
test "insert and fetch a schema with utc timestamps" do
datetime = DateTime.from_unix!(System.os_time(:second), :second)
TestRepo.insert!(%User{inserted_at: datetime})
assert [%{inserted_at: ^datetime}] = TestRepo.all(User)
end
test "optimistic locking in update/delete operations" do
import Ecto.Changeset, only: [cast: 3, optimistic_lock: 2]
base_post = TestRepo.insert!(%Comment{})
changeset_ok =
base_post
|> cast(%{"text" => "foo.bar"}, ~w(text)a)
|> optimistic_lock(:lock_version)
TestRepo.update!(changeset_ok)
changeset_stale = optimistic_lock(base_post, :lock_version)
assert_raise Ecto.StaleEntryError, fn -> TestRepo.update!(changeset_stale) end
assert_raise Ecto.StaleEntryError, fn -> TestRepo.delete!(changeset_stale) end
end
test "optimistic locking in update operation with nil field" do
import Ecto.Changeset, only: [cast: 3, optimistic_lock: 3]
base_post =
%Comment{}
|> cast(%{lock_version: nil}, [:lock_version])
|> TestRepo.insert!()
incrementer =
fn
nil -> 1
old_value -> old_value + 1
end
changeset_ok =
base_post
|> cast(%{"text" => "foo.bar"}, ~w(text)a)
|> optimistic_lock(:lock_version, incrementer)
updated = TestRepo.update!(changeset_ok)
assert updated.text == "foo.bar"
assert updated.lock_version == 1
end
test "optimistic locking in delete operation with nil field" do
import Ecto.Changeset, only: [cast: 3, optimistic_lock: 3]
base_post =
%Comment{}
|> cast(%{lock_version: nil}, [:lock_version])
|> TestRepo.insert!()
incrementer =
fn
nil -> 1
old_value -> old_value + 1
end
changeset_ok = optimistic_lock(base_post, :lock_version, incrementer)
TestRepo.delete!(changeset_ok)
refute TestRepo.get(Comment, base_post.id)
end
@tag :unique_constraint
test "unique constraint" do
changeset = Ecto.Changeset.change(%Post{}, uuid: Ecto.UUID.generate())
{:ok, _} = TestRepo.insert(changeset)
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "posts_uuid_index (unique_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
assert exception.message =~ "call `unique_constraint/3`"
message = ~r/constraint error when attempting to insert struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.unique_constraint(:uuid, name: :posts_email_changeset)
|> TestRepo.insert()
end
assert exception.message =~ "posts_email_changeset (unique_constraint)"
{:error, changeset} =
changeset
|> Ecto.Changeset.unique_constraint(:uuid)
|> TestRepo.insert()
assert changeset.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "posts_uuid_index"]}]
assert changeset.data.__meta__.state == :built
end
@tag :unique_constraint
test "unique constraint from association" do
uuid = Ecto.UUID.generate()
post = & %Post{} |> Ecto.Changeset.change(uuid: &1) |> Ecto.Changeset.unique_constraint(:uuid)
{:error, changeset} =
TestRepo.insert %User{
comments: [%Comment{}],
permalink: %Permalink{},
posts: [post.(uuid), post.(uuid), post.(Ecto.UUID.generate)]
}
[_, p2, _] = changeset.changes.posts
assert p2.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "posts_uuid_index"]}]
end
@tag :id_type
@tag :unique_constraint
test "unique constraint with binary_id" do
changeset = Ecto.Changeset.change(%Custom{}, uuid: Ecto.UUID.generate())
{:ok, _} = TestRepo.insert(changeset)
{:error, changeset} =
changeset
|> Ecto.Changeset.unique_constraint(:uuid)
|> TestRepo.insert()
assert changeset.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "customs_uuid_index"]}]
assert changeset.data.__meta__.state == :built
end
test "unique pseudo-constraint violation error message with join table at the repository" do
post =
TestRepo.insert!(%Post{title: "some post"})
|> TestRepo.preload(:unique_users)
user =
TestRepo.insert!(%User{name: "some user"})
# Violate the unique composite index
{:error, changeset} =
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:unique_users, [user, user])
|> TestRepo.update
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
assert errors == %{unique_users: [%{}, %{id: ["has already been taken"]}]}
refute changeset.valid?
end
@tag :join
@tag :unique_constraint
test "unique constraint violation error message with join table in single changeset" do
post =
TestRepo.insert!(%Post{title: "some post"})
|> TestRepo.preload(:constraint_users)
user =
TestRepo.insert!(%User{name: "some user"})
# Violate the unique composite index
{:error, changeset} =
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:constraint_users, [user, user])
|> Ecto.Changeset.unique_constraint(:user,
name: :posts_users_composite_pk_post_id_user_id_index,
message: "has already been assigned")
|> TestRepo.update
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
assert errors == %{constraint_users: [%{}, %{user: ["has already been assigned"]}]}
refute changeset.valid?
end
@tag :join
@tag :unique_constraint
test "unique constraint violation error message with join table and separate changesets" do
post =
TestRepo.insert!(%Post{title: "some post"})
|> TestRepo.preload(:constraint_users)
user = TestRepo.insert!(%User{name: "some user"})
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:constraint_users, [user])
|> TestRepo.update
# Violate the unique composite index
{:error, changeset} =
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:constraint_users, [user])
|> Ecto.Changeset.unique_constraint(:user,
name: :posts_users_composite_pk_post_id_user_id_index,
message: "has already been assigned")
|> TestRepo.update
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
assert errors == %{constraint_users: [%{user: ["has already been assigned"]}]}
refute changeset.valid?
end
@tag :foreign_key_constraint
test "foreign key constraint" do
changeset = Ecto.Changeset.change(%Comment{post_id: 0})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_fkey (foreign_key_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
assert exception.message =~ "call `foreign_key_constraint/3`"
message = ~r/constraint error when attempting to insert struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.foreign_key_constraint(:post_id, name: :comments_post_id_other)
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_other (foreign_key_constraint)"
{:error, changeset} =
changeset
|> Ecto.Changeset.foreign_key_constraint(:post_id)
|> TestRepo.insert()
assert changeset.errors == [post_id: {"does not exist", [constraint: :foreign, constraint_name: "comments_post_id_fkey"]}]
end
@tag :foreign_key_constraint
test "assoc constraint" do
changeset = Ecto.Changeset.change(%Comment{post_id: 0})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_fkey (foreign_key_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
message = ~r/constraint error when attempting to insert struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.assoc_constraint(:post, name: :comments_post_id_other)
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_other (foreign_key_constraint)"
{:error, changeset} =
changeset
|> Ecto.Changeset.assoc_constraint(:post)
|> TestRepo.insert()
assert changeset.errors == [post: {"does not exist", [constraint: :assoc, constraint_name: "comments_post_id_fkey"]}]
end
@tag :foreign_key_constraint
test "no assoc constraint error" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Permalink{user_id: user.id})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to delete struct/, fn ->
TestRepo.delete!(user)
end
assert exception.message =~ "permalinks_user_id_fkey (foreign_key_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
end
@tag :foreign_key_constraint
test "no assoc constraint with changeset mismatch" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Permalink{user_id: user.id})
message = ~r/constraint error when attempting to delete struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
user
|> Ecto.Changeset.change
|> Ecto.Changeset.no_assoc_constraint(:permalink, name: :permalinks_user_id_pther)
|> TestRepo.delete()
end
assert exception.message =~ "permalinks_user_id_pther (foreign_key_constraint)"
end
@tag :foreign_key_constraint
test "no assoc constraint with changeset match" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Permalink{user_id: user.id})
{:error, changeset} =
user
|> Ecto.Changeset.change
|> Ecto.Changeset.no_assoc_constraint(:permalink)
|> TestRepo.delete()
assert changeset.errors == [permalink: {"is still associated with this entry", [constraint: :no_assoc, constraint_name: "permalinks_user_id_fkey"]}]
end
@tag :foreign_key_constraint
test "insert and update with embeds during failing child foreign key" do
changeset =
Order
|> struct(%{})
|> order_changeset(%{item: %{price: 10}, permalink: %{post_id: 0}})
{:error, changeset} = TestRepo.insert(changeset)
assert %Ecto.Changeset{} = changeset.changes.item
order =
Order
|> struct(%{})
|> order_changeset(%{})
|> TestRepo.insert!()
|> TestRepo.preload([:permalink])
changeset = order_changeset(order, %{item: %{price: 10}, permalink: %{post_id: 0}})
assert %Ecto.Changeset{} = changeset.changes.item
{:error, changeset} = TestRepo.update(changeset)
assert %Ecto.Changeset{} = changeset.changes.item
end
def order_changeset(order, params) do
order
|> Ecto.Changeset.cast(params, [:permalink_id])
|> Ecto.Changeset.cast_embed(:item, with: &item_changeset/2)
|> Ecto.Changeset.cast_assoc(:permalink, with: &permalink_changeset/2)
end
def item_changeset(item, params) do
item
|> Ecto.Changeset.cast(params, [:price])
end
def permalink_changeset(comment, params) do
comment
|> Ecto.Changeset.cast(params, [:post_id])
|> Ecto.Changeset.assoc_constraint(:post)
end
test "unsafe_validate_unique/3" do
{:ok, inserted_post} = TestRepo.insert(%Post{title: "Greetings", text: "hi"})
new_post_changeset = Post.changeset(%Post{}, %{title: "Greetings", text: "ho"})
changeset = Ecto.Changeset.unsafe_validate_unique(new_post_changeset, [:title], TestRepo)
assert changeset.errors[:title] ==
{"has already been taken", validation: :unsafe_unique, fields: [:title]}
changeset = Ecto.Changeset.unsafe_validate_unique(new_post_changeset, [:title, :text], TestRepo)
assert changeset.errors[:title] == nil
update_changeset = Post.changeset(inserted_post, %{text: "ho"})
changeset = Ecto.Changeset.unsafe_validate_unique(update_changeset, [:title], TestRepo)
assert changeset.errors[:title] == nil # cannot conflict with itself
end
test "unsafe_validate_unique/3 with composite keys" do
{:ok, inserted_post} = TestRepo.insert(%CompositePk{a: 123, b: 456, name: "UniqueName"})
different_pk = CompositePk.changeset(%CompositePk{}, %{name: "UniqueName", a: 789, b: 321})
changeset = Ecto.Changeset.unsafe_validate_unique(different_pk, [:name], TestRepo)
assert changeset.errors[:name] ==
{"has already been taken", validation: :unsafe_unique, fields: [:name]}
partial_pk = CompositePk.changeset(%CompositePk{}, %{name: "UniqueName", a: 789, b: 456})
changeset = Ecto.Changeset.unsafe_validate_unique(partial_pk, [:name], TestRepo)
assert changeset.errors[:name] ==
{"has already been taken", validation: :unsafe_unique, fields: [:name]}
update_changeset = CompositePk.changeset(inserted_post, %{name: "NewName"})
changeset = Ecto.Changeset.unsafe_validate_unique(update_changeset, [:name], TestRepo)
assert changeset.valid?
assert changeset.errors[:name] == nil # cannot conflict with itself
end
test "get(!)" do
post1 = TestRepo.insert!(%Post{title: "1", text: "hai"})
post2 = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert post1 == TestRepo.get(Post, post1.id)
assert post2 == TestRepo.get(Post, to_string post2.id) # With casting
assert post1 == TestRepo.get!(Post, post1.id)
assert post2 == TestRepo.get!(Post, to_string post2.id) # With casting
TestRepo.delete!(post1)
assert nil == TestRepo.get(Post, post1.id)
assert_raise Ecto.NoResultsError, fn ->
TestRepo.get!(Post, post1.id)
end
end
test "get(!) with custom source" do
custom = Ecto.put_meta(%Custom{}, source: "posts")
custom = TestRepo.insert!(custom)
bid = custom.bid
assert %Custom{bid: ^bid, __meta__: %{source: "posts"}} =
TestRepo.get(from(c in {"posts", Custom}), bid)
end
test "get(!) with binary_id" do
custom = TestRepo.insert!(%Custom{})
bid = custom.bid
assert %Custom{bid: ^bid} = TestRepo.get(Custom, bid)
end
test "get_by(!)" do
post1 = TestRepo.insert!(%Post{title: "1", text: "hai"})
post2 = TestRepo.insert!(%Post{title: "2", text: "hello"})
assert post1 == TestRepo.get_by(Post, id: post1.id)
assert post1 == TestRepo.get_by(Post, text: post1.text)
assert post1 == TestRepo.get_by(Post, id: post1.id, text: post1.text)
assert post2 == TestRepo.get_by(Post, id: to_string(post2.id)) # With casting
assert nil == TestRepo.get_by(Post, text: "hey")
assert nil == TestRepo.get_by(Post, id: post2.id, text: "hey")
assert post1 == TestRepo.get_by!(Post, id: post1.id)
assert post1 == TestRepo.get_by!(Post, text: post1.text)
assert post1 == TestRepo.get_by!(Post, id: post1.id, text: post1.text)
assert post2 == TestRepo.get_by!(Post, id: to_string(post2.id)) # With casting
assert post1 == TestRepo.get_by!(Post, %{id: post1.id})
assert_raise Ecto.NoResultsError, fn ->
TestRepo.get_by!(Post, id: post2.id, text: "hey")
end
end
test "first, last and one(!)" do
post1 = TestRepo.insert!(%Post{title: "1", text: "hai"})
post2 = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert post1 == Post |> first |> TestRepo.one
assert post2 == Post |> last |> TestRepo.one
query = from p in Post, order_by: p.title
assert post1 == query |> first |> TestRepo.one
assert post2 == query |> last |> TestRepo.one
query = from p in Post, order_by: [desc: p.title], limit: 10
assert post2 == query |> first |> TestRepo.one
assert post1 == query |> last |> TestRepo.one
query = from p in Post, where: is_nil(p.id)
refute query |> first |> TestRepo.one
refute query |> last |> TestRepo.one
assert_raise Ecto.NoResultsError, fn -> query |> first |> TestRepo.one! end
assert_raise Ecto.NoResultsError, fn -> query |> last |> TestRepo.one! end
end
test "exists?" do
TestRepo.insert!(%Post{title: "1", text: "hai", visits: 2})
TestRepo.insert!(%Post{title: "2", text: "hai", visits: 1})
query = from p in Post, where: not is_nil(p.title), limit: 2
assert query |> TestRepo.exists? == true
query = from p in Post, where: p.title == "1", select: p.title
assert query |> TestRepo.exists? == true
query = from p in Post, where: is_nil(p.id)
assert query |> TestRepo.exists? == false
query = from p in Post, where: is_nil(p.id)
assert query |> TestRepo.exists? == false
query = from(p in Post, select: {p.visits, avg(p.visits)}, group_by: p.visits, having: avg(p.visits) > 1)
assert query |> TestRepo.exists? == true
end
test "aggregate" do
assert TestRepo.aggregate(Post, :max, :visits) == nil
TestRepo.insert!(%Post{visits: 10})
TestRepo.insert!(%Post{visits: 12})
TestRepo.insert!(%Post{visits: 14})
TestRepo.insert!(%Post{visits: 14})
# Barebones
assert TestRepo.aggregate(Post, :max, :visits) == 14
assert TestRepo.aggregate(Post, :min, :visits) == 10
assert TestRepo.aggregate(Post, :count, :visits) == 4
assert "50" = to_string(TestRepo.aggregate(Post, :sum, :visits))
assert "12.5" <> _ = to_string(TestRepo.aggregate(Post, :avg, :visits))
# With order_by
query = from Post, order_by: [asc: :visits]
assert TestRepo.aggregate(query, :max, :visits) == 14
# With order_by and limit
query = from Post, order_by: [asc: :visits], limit: 2
assert TestRepo.aggregate(query, :max, :visits) == 12
# With distinct
query = from Post, order_by: [asc: :visits], distinct: true
assert TestRepo.aggregate(query, :count, :visits) == 3
end
@tag :insert_cell_wise_defaults
test "insert all" do
assert {2, nil} = TestRepo.insert_all("comments", [[text: "1"], %{text: "2", lock_version: 2}])
assert {2, nil} = TestRepo.insert_all({"comments", Comment}, [[text: "3"], %{text: "4", lock_version: 2}])
assert [%Comment{text: "1", lock_version: 1},
%Comment{text: "2", lock_version: 2},
%Comment{text: "3", lock_version: 1},
%Comment{text: "4", lock_version: 2}] = TestRepo.all(Comment)
assert {2, nil} = TestRepo.insert_all(Post, [[], []])
assert [%Post{}, %Post{}] = TestRepo.all(Post)
assert {0, nil} = TestRepo.insert_all("posts", [])
assert {0, nil} = TestRepo.insert_all({"posts", Post}, [])
end
@tag :insert_select
test "insert all with query" do
comment = TestRepo.insert!(%Comment{text: "1", lock_version: 1})
text_query = from(c in Comment, select: c.text, where: [id: ^comment.id, lock_version: 1])
lock_version_query = from(c in Comment, select: c.lock_version, where: [id: ^comment.id])
rows = [
[text: "2", lock_version: lock_version_query],
[lock_version: lock_version_query, text: "3"],
[text: text_query],
[text: text_query, lock_version: lock_version_query],
[lock_version: 6, text: "6"]
]
assert {5, nil} = TestRepo.insert_all(Comment, rows, [])
inserted_rows = Comment
|> where([c], c.id != ^comment.id)
|> TestRepo.all()
assert [%Comment{text: "2", lock_version: 1},
%Comment{text: "3", lock_version: 1},
%Comment{text: "1"},
%Comment{text: "1", lock_version: 1},
%Comment{text: "6", lock_version: 6}] = inserted_rows
end
@tag :invalid_prefix
@tag :insert_cell_wise_defaults
test "insert all with invalid prefix" do
assert catch_error(TestRepo.insert_all(Post, [[], []], prefix: "oops"))
end
@tag :returning
@tag :insert_cell_wise_defaults
test "insert all with returning with schema" do
assert {0, []} = TestRepo.insert_all(Comment, [], returning: true)
assert {0, nil} = TestRepo.insert_all(Comment, [], returning: false)
{2, [c1, c2]} = TestRepo.insert_all(Comment, [[text: "1"], [text: "2"]], returning: [:id, :text])
assert %Comment{text: "1", __meta__: %{state: :loaded}} = c1
assert %Comment{text: "2", __meta__: %{state: :loaded}} = c2
{2, [c1, c2]} = TestRepo.insert_all(Comment, [[text: "3"], [text: "4"]], returning: true)
assert %Comment{text: "3", __meta__: %{state: :loaded}} = c1
assert %Comment{text: "4", __meta__: %{state: :loaded}} = c2
end
@tag :returning
@tag :insert_cell_wise_defaults
test "insert all with returning with schema with field source" do
assert {0, []} = TestRepo.insert_all(Permalink, [], returning: true)
assert {0, nil} = TestRepo.insert_all(Permalink, [], returning: false)
{2, [c1, c2]} = TestRepo.insert_all(Permalink, [[url: "1"], [url: "2"]], returning: [:id, :url])
assert %Permalink{url: "1", __meta__: %{state: :loaded}} = c1
assert %Permalink{url: "2", __meta__: %{state: :loaded}} = c2
{2, [c1, c2]} = TestRepo.insert_all(Permalink, [[url: "3"], [url: "4"]], returning: true)
assert %Permalink{url: "3", __meta__: %{state: :loaded}} = c1
assert %Permalink{url: "4", __meta__: %{state: :loaded}} = c2
end
@tag :returning
@tag :insert_cell_wise_defaults
test "insert all with returning without schema" do
{2, [c1, c2]} = TestRepo.insert_all("comments", [[text: "1"], [text: "2"]], returning: [:id, :text])
assert %{id: _, text: "1"} = c1
assert %{id: _, text: "2"} = c2
assert_raise ArgumentError, fn ->
TestRepo.insert_all("comments", [[text: "1"], [text: "2"]], returning: true)
end
end
@tag :insert_cell_wise_defaults
test "insert all with dumping" do
uuid = Ecto.UUID.generate
assert {1, nil} = TestRepo.insert_all(Post, [%{uuid: uuid}])
assert [%Post{uuid: ^uuid, title: nil}] = TestRepo.all(Post)
end
@tag :insert_cell_wise_defaults
test "insert all autogenerates for binary_id type" do
custom = TestRepo.insert!(%Custom{bid: nil})
assert custom.bid
assert TestRepo.get(Custom, custom.bid)
assert TestRepo.delete!(custom)
refute TestRepo.get(Custom, custom.bid)
uuid = Ecto.UUID.generate
assert {2, nil} = TestRepo.insert_all(Custom, [%{uuid: uuid}, %{bid: custom.bid}])
assert [%Custom{bid: bid2, uuid: nil},
%Custom{bid: bid1, uuid: ^uuid}] = Enum.sort_by(TestRepo.all(Custom), & &1.uuid)
assert bid1 && bid2
assert custom.bid != bid1
assert custom.bid == bid2
end
test "update all" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
assert {3, nil} = TestRepo.update_all(Post, set: [title: "x"])
assert %Post{title: "x"} = TestRepo.get(Post, id1)
assert %Post{title: "x"} = TestRepo.get(Post, id2)
assert %Post{title: "x"} = TestRepo.get(Post, id3)
assert {3, nil} = TestRepo.update_all("posts", [set: [title: nil]])
assert %Post{title: nil} = TestRepo.get(Post, id1)
assert %Post{title: nil} = TestRepo.get(Post, id2)
assert %Post{title: nil} = TestRepo.get(Post, id3)
end
@tag :invalid_prefix
test "update all with invalid prefix" do
assert catch_error(TestRepo.update_all(Post, [set: [title: "x"]], prefix: "oops"))
end
@tag :returning
test "update all with returning with schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
assert {3, posts} = TestRepo.update_all(select(Post, [p], p), [set: [title: "x"]])
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert %Post{id: ^id1, title: "x"} = p1
assert %Post{id: ^id2, title: "x"} = p2
assert %Post{id: ^id3, title: "x"} = p3
assert {3, posts} = TestRepo.update_all(select(Post, [:id, :visits]), [set: [visits: 11]])
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert %Post{id: ^id1, title: nil, visits: 11} = p1
assert %Post{id: ^id2, title: nil, visits: 11} = p2
assert %Post{id: ^id3, title: nil, visits: 11} = p3
end
@tag :returning
test "update all with returning without schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
assert {3, posts} = TestRepo.update_all(select("posts", [:id, :title]), [set: [title: "x"]])
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert p1 == %{id: id1, title: "x"}
assert p2 == %{id: id2, title: "x"}
assert p3 == %{id: id3, title: "x"}
end
test "update all with filter" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
query = from(p in Post, where: p.title == "1" or p.title == "2",
update: [set: [text: ^"y"]])
assert {2, nil} = TestRepo.update_all(query, set: [title: "x"])
assert %Post{title: "x", text: "y"} = TestRepo.get(Post, id1)
assert %Post{title: "x", text: "y"} = TestRepo.get(Post, id2)
assert %Post{title: "3", text: nil} = TestRepo.get(Post, id3)
end
test "update all no entries" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
query = from(p in Post, where: p.title == "4")
assert {0, nil} = TestRepo.update_all(query, set: [title: "x"])
assert %Post{title: "1"} = TestRepo.get(Post, id1)
assert %Post{title: "2"} = TestRepo.get(Post, id2)
assert %Post{title: "3"} = TestRepo.get(Post, id3)
end
test "update all increment syntax" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", visits: 0})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", visits: 1})
# Positive
query = from p in Post, where: not is_nil(p.id), update: [inc: [visits: 2]]
assert {2, nil} = TestRepo.update_all(query, [])
assert %Post{visits: 2} = TestRepo.get(Post, id1)
assert %Post{visits: 3} = TestRepo.get(Post, id2)
# Negative
query = from p in Post, where: not is_nil(p.id), update: [inc: [visits: -1]]
assert {2, nil} = TestRepo.update_all(query, [])
assert %Post{visits: 1} = TestRepo.get(Post, id1)
assert %Post{visits: 2} = TestRepo.get(Post, id2)
end
@tag :id_type
test "update all with casting and dumping on id type field" do
assert %Post{id: id1} = TestRepo.insert!(%Post{})
assert {1, nil} = TestRepo.update_all(Post, set: [counter: to_string(id1)])
assert %Post{counter: ^id1} = TestRepo.get(Post, id1)
end
test "update all with casting and dumping" do
text = "hai"
datetime = ~N[2014-01-16 20:26:51]
assert %Post{id: id} = TestRepo.insert!(%Post{})
assert {1, nil} = TestRepo.update_all(Post, set: [text: text, inserted_at: datetime])
assert %Post{text: "hai", inserted_at: ^datetime} = TestRepo.get(Post, id)
end
test "delete all" do
assert %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "3", text: "hai"})
assert {3, nil} = TestRepo.delete_all(Post)
assert [] = TestRepo.all(Post)
end
@tag :invalid_prefix
test "delete all with invalid prefix" do
assert catch_error(TestRepo.delete_all(Post, prefix: "oops"))
end
@tag :returning
test "delete all with returning with schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"})
assert {3, posts} = TestRepo.delete_all(select(Post, [p], p))
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert %Post{id: ^id1, title: "1"} = p1
assert %Post{id: ^id2, title: "2"} = p2
assert %Post{id: ^id3, title: "3"} = p3
end
@tag :returning
test "delete all with returning without schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"})
assert {3, posts} = TestRepo.delete_all(select("posts", [:id, :title]))
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert p1 == %{id: id1, title: "1"}
assert p2 == %{id: id2, title: "2"}
assert p3 == %{id: id3, title: "3"}
end
test "delete all with filter" do
assert %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "3", text: "hai"})
query = from(p in Post, where: p.title == "1" or p.title == "2")
assert {2, nil} = TestRepo.delete_all(query)
assert [%Post{}] = TestRepo.all(Post)
end
test "delete all no entries" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"})
query = from(p in Post, where: p.title == "4")
assert {0, nil} = TestRepo.delete_all(query)
assert %Post{title: "1"} = TestRepo.get(Post, id1)
assert %Post{title: "2"} = TestRepo.get(Post, id2)
assert %Post{title: "3"} = TestRepo.get(Post, id3)
end
test "virtual field" do
assert %Post{id: id} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert TestRepo.get(Post, id).temp == "temp"
end
## Query syntax
defmodule Foo do
defstruct [:title]
end
describe "query select" do
test "expressions" do
%Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert [{"1", "hai"}] ==
TestRepo.all(from p in Post, select: {p.title, p.text})
assert [["1", "hai"]] ==
TestRepo.all(from p in Post, select: [p.title, p.text])
assert [%{:title => "1", 3 => "hai", "text" => "hai"}] ==
TestRepo.all(from p in Post, select: %{
:title => p.title,
"text" => p.text,
3 => p.text
})
assert [%{:title => "1", "1" => "hai", "text" => "hai"}] ==
TestRepo.all(from p in Post, select: %{
:title => p.title,
p.title => p.text,
"text" => p.text
})
assert [%Foo{title: "1"}] ==
TestRepo.all(from p in Post, select: %Foo{title: p.title})
end
test "map update" do
%Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert [%Post{:title => "new title", text: "hai"}] =
TestRepo.all(from p in Post, select: %{p | title: "new title"})
assert [%Post{title: "new title", text: "hai"}] =
TestRepo.all(from p in Post, select: %Post{p | title: "new title"})
assert_raise KeyError, fn ->
TestRepo.all(from p in Post, select: %{p | unknown: "new title"})
end
assert_raise BadMapError, fn ->
TestRepo.all(from p in Post, select: %{p.title | title: "new title"})
end
assert_raise BadStructError, fn ->
TestRepo.all(from p in Post, select: %Foo{p | title: p.title})
end
end
test "take with structs" do
%{id: pid1} = TestRepo.insert!(%Post{title: "1"})
%{id: pid2} = TestRepo.insert!(%Post{title: "2"})
%{id: pid3} = TestRepo.insert!(%Post{title: "3"})
[p1, p2, p3] = Post |> select([p], struct(p, [:title])) |> order_by([:title]) |> TestRepo.all
refute p1.id
assert p1.title == "1"
assert match?(%Post{}, p1)
refute p2.id
assert p2.title == "2"
assert match?(%Post{}, p2)
refute p3.id
assert p3.title == "3"
assert match?(%Post{}, p3)
[p1, p2, p3] = Post |> select([:id]) |> order_by([:id]) |> TestRepo.all
assert %Post{id: ^pid1} = p1
assert %Post{id: ^pid2} = p2
assert %Post{id: ^pid3} = p3
end
test "take with maps" do
%{id: pid1} = TestRepo.insert!(%Post{title: "1"})
%{id: pid2} = TestRepo.insert!(%Post{title: "2"})
%{id: pid3} = TestRepo.insert!(%Post{title: "3"})
[p1, p2, p3] = "posts" |> select([p], map(p, [:title])) |> order_by([:title]) |> TestRepo.all
assert p1 == %{title: "1"}
assert p2 == %{title: "2"}
assert p3 == %{title: "3"}
[p1, p2, p3] = "posts" |> select([:id]) |> order_by([:id]) |> TestRepo.all
assert p1 == %{id: pid1}
assert p2 == %{id: pid2}
assert p3 == %{id: pid3}
end
test "take with preload assocs" do
%{id: pid} = TestRepo.insert!(%Post{title: "post"})
TestRepo.insert!(%Comment{post_id: pid, text: "comment"})
fields = [:id, :title, comments: [:text, :post_id]]
[p] = Post |> preload(:comments) |> select([p], ^fields) |> TestRepo.all
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
[p] = Post |> preload(:comments) |> select([p], struct(p, ^fields)) |> TestRepo.all
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
[p] = Post |> preload(:comments) |> select([p], map(p, ^fields)) |> TestRepo.all
assert p == %{id: pid, title: "post", comments: [%{text: "comment", post_id: pid}]}
end
test "take with nil preload assoc" do
%{id: cid} = TestRepo.insert!(%Comment{text: "comment"})
fields = [:id, :text, post: [:title]]
[c] = Comment |> preload(:post) |> select([c], ^fields) |> TestRepo.all
assert %Comment{id: ^cid, text: "comment", post: nil} = c
[c] = Comment |> preload(:post) |> select([c], struct(c, ^fields)) |> TestRepo.all
assert %Comment{id: ^cid, text: "comment", post: nil} = c
[c] = Comment |> preload(:post) |> select([c], map(c, ^fields)) |> TestRepo.all
assert c == %{id: cid, text: "comment", post: nil}
end
test "take with join assocs" do
%{id: pid} = TestRepo.insert!(%Post{title: "post"})
%{id: cid} = TestRepo.insert!(%Comment{post_id: pid, text: "comment"})
fields = [:id, :title, comments: [:text, :post_id, :id]]
query = from p in Post, where: p.id == ^pid, join: c in assoc(p, :comments), preload: [comments: c]
p = TestRepo.one(from q in query, select: ^fields)
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
p = TestRepo.one(from q in query, select: struct(q, ^fields))
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
p = TestRepo.one(from q in query, select: map(q, ^fields))
assert p == %{id: pid, title: "post", comments: [%{text: "comment", post_id: pid, id: cid}]}
end
test "take with single nil column" do
%Post{} = TestRepo.insert!(%Post{title: "1", counter: nil})
assert %{counter: nil} =
TestRepo.one(from p in Post, where: p.title == "1", select: [:counter])
end
test "take with join assocs and single nil column" do
%{id: post_id} = TestRepo.insert!(%Post{title: "1"}, counter: nil)
TestRepo.insert!(%Comment{post_id: post_id, text: "comment"})
assert %{counter: nil} ==
TestRepo.one(from p in Post, join: c in assoc(p, :comments), where: p.title == "1", select: map(p, [:counter]))
end
test "field source" do
TestRepo.insert!(%Permalink{url: "url"})
assert ["url"] = Permalink |> select([p], p.url) |> TestRepo.all()
assert [1] = Permalink |> select([p], count(p.url)) |> TestRepo.all()
end
test "merge" do
%Post{} = TestRepo.insert!(%Post{title: "1", counter: nil})
# Merge on source
assert [%Post{title: "2"}] =
Post |> select([p], merge(p, %{title: "2"})) |> TestRepo.all()
assert [%Post{title: "2"}] =
Post |> select([p], p) |> select_merge([p], %{title: "2"}) |> TestRepo.all()
# Merge on struct
assert [%Post{title: "2"}] =
Post |> select([p], merge(%Post{title: p.title}, %{title: "2"})) |> TestRepo.all()
assert [%Post{title: "2"}] =
Post |> select([p], %Post{title: p.title}) |> select_merge([p], %{title: "2"}) |> TestRepo.all()
# Merge on map
assert [%{title: "2"}] =
Post |> select([p], merge(%{title: p.title}, %{title: "2"})) |> TestRepo.all()
assert [%{title: "2"}] =
Post |> select([p], %{title: p.title}) |> select_merge([p], %{title: "2"}) |> TestRepo.all()
end
test "merge with update on self" do
%Post{} = TestRepo.insert!(%Post{title: "1", counter: 1})
assert [%Post{title: "1", counter: 2}] =
Post |> select([p], merge(p, %{p | counter: 2})) |> TestRepo.all()
assert [%Post{title: "1", counter: 2}] =
Post |> select([p], p) |> select_merge([p], %{p | counter: 2}) |> TestRepo.all()
end
test "merge within subquery" do
%Post{} = TestRepo.insert!(%Post{title: "1", counter: 1})
subquery =
Post
|> select_merge([p], %{p | counter: 2})
|> subquery()
assert [%Post{title: "1", counter: 2}] = TestRepo.all(subquery)
end
end
test "query count distinct" do
TestRepo.insert!(%Post{title: "1"})
TestRepo.insert!(%Post{title: "1"})
TestRepo.insert!(%Post{title: "2"})
assert [3] == Post |> select([p], count(p.title)) |> TestRepo.all
assert [2] == Post |> select([p], count(p.title, :distinct)) |> TestRepo.all
end
test "query where interpolation" do
post1 = TestRepo.insert!(%Post{text: "x", title: "hello"})
post2 = TestRepo.insert!(%Post{text: "y", title: "goodbye"})
assert [post1, post2] == Post |> where([], []) |> TestRepo.all |> Enum.sort_by(& &1.id)
assert [post1] == Post |> where([], [title: "hello"]) |> TestRepo.all
assert [post1] == Post |> where([], [title: "hello", id: ^post1.id]) |> TestRepo.all
params0 = []
params1 = [title: "hello"]
params2 = [title: "hello", id: post1.id]
assert [post1, post2] == (from Post, where: ^params0) |> TestRepo.all |> Enum.sort_by(& &1.id)
assert [post1] == (from Post, where: ^params1) |> TestRepo.all
assert [post1] == (from Post, where: ^params2) |> TestRepo.all
post3 = TestRepo.insert!(%Post{text: "y", title: "goodbye", uuid: nil})
params3 = [title: "goodbye", uuid: post3.uuid]
assert [post3] == (from Post, where: ^params3) |> TestRepo.all
end
describe "upsert via insert" do
@describetag :upsert
test "on conflict raise" do
{:ok, inserted} = TestRepo.insert(%Post{title: "first"}, on_conflict: :raise)
assert catch_error(TestRepo.insert(%Post{id: inserted.id, title: "second"}, on_conflict: :raise))
end
test "on conflict ignore" do
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing)
assert inserted.id
assert inserted.__meta__.state == :loaded
{:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing)
assert not_inserted.id == nil
assert not_inserted.__meta__.state == :loaded
end
@tag :with_conflict_target
test "on conflict and associations" do
on_conflict = [set: [title: "second"]]
post = %Post{uuid: Ecto.UUID.generate(),
title: "first", comments: [%Comment{}]}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert inserted.id
end
@tag :with_conflict_target
test "on conflict with inc" do
uuid = "6fa459ea-ee8a-3ca4-894e-db77e160355e"
post = %Post{title: "first", uuid: uuid}
{:ok, _} = TestRepo.insert(post)
post = %{title: "upsert", uuid: uuid}
TestRepo.insert_all(Post, [post], on_conflict: [inc: [visits: 1]], conflict_target: :uuid)
end
@tag :with_conflict_target
test "on conflict ignore and conflict target" do
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid])
assert inserted.id
# Error on non-conflict target
assert catch_error(TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:id]))
# Error on conflict target
{:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid])
assert not_inserted.id == nil
end
@tag :without_conflict_target
test "on conflict keyword list" do
on_conflict = [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict)
assert inserted.id
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict)
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :with_conflict_target
test "on conflict keyword list and conflict target" do
on_conflict = [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert inserted.id
# Error on non-conflict target
assert catch_error(TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:id]))
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :returning
@tag :with_conflict_target
test "on conflict keyword list and conflict target and returning" do
{:ok, c1} = TestRepo.insert(%Post{})
{:ok, c2} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: [:id, :uuid])
{:ok, c3} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: true)
{:ok, c4} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: false)
assert c2.uuid == c1.uuid
assert c3.uuid == c1.uuid
assert c4.uuid != c1.uuid
end
@tag :with_conflict_target
@tag :with_conflict_target_on_constraint
test "on conflict keyword list and conflict target on constraint" do
on_conflict = [set: [title: "new"]]
post = %Post{title: "old"}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: {:constraint, :posts_pkey})
assert inserted.id
{:ok, updated} = TestRepo.insert(%{post | id: inserted.id}, on_conflict: on_conflict, conflict_target: {:constraint, :posts_pkey})
assert updated.id == inserted.id
assert updated.title != "new"
assert TestRepo.get!(Post, inserted.id).title == "new"
end
@tag :returning
@tag :with_conflict_target
test "on conflict keyword list and conflict target and returning and field source" do
TestRepo.insert!(%Permalink{url: "old"})
{:ok, c1} = TestRepo.insert(%Permalink{url: "old"},
on_conflict: [set: [url: "new1"]],
conflict_target: [:url],
returning: [:url])
TestRepo.insert!(%Permalink{url: "old"})
{:ok, c2} = TestRepo.insert(%Permalink{url: "old"},
on_conflict: [set: [url: "new2"]],
conflict_target: [:url],
returning: true)
assert c1.url == "new1"
assert c2.url == "new2"
end
@tag :returning
@tag :with_conflict_target
test "on conflict ignore and returning" do
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid])
assert inserted.id
{:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid], returning: true)
assert not_inserted.id == nil
end
@tag :without_conflict_target
test "on conflict query" do
on_conflict = from Post, update: [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict)
assert inserted.id
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict)
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :with_conflict_target
test "on conflict query and conflict target" do
on_conflict = from Post, update: [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert inserted.id
# Error on non-conflict target
assert catch_error(TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:id]))
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :with_conflict_target
test "on conflict query having condition" do
post = %Post{title: "first", counter: 1, uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post)
on_conflict = from Post, where: [counter: 2], update: [set: [title: "second"]]
insert_options = [
on_conflict: on_conflict,
conflict_target: [:uuid],
stale_error_field: :counter
]
assert {:error, changeset} = TestRepo.insert(post, insert_options)
assert changeset.errors == [counter: {"is stale", [stale: true]}]
assert TestRepo.get!(Post, inserted.id).title == "first"
end
@tag :without_conflict_target
test "on conflict replace_all" do
post = %Post{title: "first", text: "text", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :replace_all)
assert inserted.id
post = %Post{title: "updated", text: "updated", uuid: post.uuid}
post = TestRepo.insert!(post, on_conflict: :replace_all)
assert post.id != inserted.id
assert post.title == "updated"
assert post.text == "updated"
assert TestRepo.all(from p in Post, select: {p.id, p.title, p.text}) ==
[{post.id, "updated", "updated"}]
assert TestRepo.all(from p in Post, select: count(p.id)) == [1]
end
@tag :with_conflict_target
test "on conflict replace_all and conflict target" do
post = %Post{title: "first", text: "text", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :replace_all, conflict_target: :uuid)
assert inserted.id
post = %Post{title: "updated", text: "updated", uuid: post.uuid}
post = TestRepo.insert!(post, on_conflict: :replace_all, conflict_target: :uuid)
assert post.id != inserted.id
assert post.title == "updated"
assert post.text == "updated"
assert TestRepo.all(from p in Post, select: {p.id, p.title, p.text}) ==
[{post.id, "updated", "updated"}]
assert TestRepo.all(from p in Post, select: count(p.id)) == [1]
end
end
describe "upsert via insert_all" do
@describetag :upsert_all
test "on conflict raise" do
post = [title: "first", uuid: Ecto.UUID.generate()]
{1, nil} = TestRepo.insert_all(Post, [post], on_conflict: :raise)
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: :raise))
end
test "on conflict ignore" do
post = [title: "first", uuid: Ecto.UUID.generate()]
assert TestRepo.insert_all(Post, [post], on_conflict: :nothing) == {1, nil}
# PG returns 0, MySQL returns 1
{entries, nil} = TestRepo.insert_all(Post, [post], on_conflict: :nothing)
assert entries == 0 or entries == 1
assert length(TestRepo.all(Post)) == 1
end
@tag :with_conflict_target
test "on conflict ignore and conflict target" do
post = [title: "first", uuid: Ecto.UUID.generate()]
assert TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:uuid]) ==
{1, nil}
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:uuid]) ==
{0, nil}
end
@tag :with_conflict_target
test "on conflict keyword list and conflict target" do
on_conflict = [set: [title: "second"]]
post = [title: "first", uuid: Ecto.UUID.generate()]
{1, nil} = TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid])
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
assert TestRepo.all(from p in Post, select: p.title) == ["second"]
end
@tag :with_conflict_target
@tag :returning
test "on conflict keyword list and conflict target and returning and source field" do
on_conflict = [set: [url: "new"]]
permalink = [url: "old"]
assert {1, [%Permalink{url: "old"}]} =
TestRepo.insert_all(Permalink, [permalink],
on_conflict: on_conflict, conflict_target: [:url], returning: [:url])
assert {1, [%Permalink{url: "new"}]} =
TestRepo.insert_all(Permalink, [permalink],
on_conflict: on_conflict, conflict_target: [:url], returning: [:url])
end
@tag :with_conflict_target
test "on conflict query and conflict target" do
on_conflict = from Post, update: [set: [title: "second"]]
post = [title: "first", uuid: Ecto.UUID.generate()]
assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
assert TestRepo.all(from p in Post, select: p.title) == ["second"]
end
@tag :returning
@tag :with_conflict_target
test "on conflict query and conflict target and returning" do
on_conflict = from Post, update: [set: [title: "second"]]
post = [title: "first", uuid: Ecto.UUID.generate()]
{1, [%{id: id}]} = TestRepo.insert_all(Post, [post], on_conflict: on_conflict,
conflict_target: [:uuid], returning: [:id])
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict,
conflict_target: [:id], returning: [:id]))
# Error on conflict target
{1, [%Post{id: ^id, title: "second"}]} =
TestRepo.insert_all(Post, [post], on_conflict: on_conflict,
conflict_target: [:uuid], returning: [:id, :title])
end
@tag :with_conflict_target
test "source (without an ecto schema) on conflict query and conflict target" do
on_conflict = [set: [title: "second"]]
{:ok, uuid} = Ecto.UUID.dump(Ecto.UUID.generate())
post = [title: "first", uuid: uuid]
assert TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
# Error on non-conflict target
assert catch_error(TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
assert TestRepo.all(from p in Post, select: p.title) == ["second"]
end
@tag :without_conflict_target
test "on conflict replace_all" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are also replaced
changes = [%{id: post_first.id + 2, title: "first_updated",
text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: :replace_all)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id + 2)
assert updated_first.title == "first_updated"
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id + 2)
assert updated_second.title == "second_updated"
assert updated_second.text == "second_updated"
end
@tag :with_conflict_target
test "on conflict replace_all and conflict_target" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate()}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate()}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all, conflict_target: :uuid)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all, conflict_target: :uuid)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are also replaced
changes = [%{id: post_second.id + 1, title: "first_updated",
text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: :replace_all, conflict_target: :uuid)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_second.id + 1)
assert updated_first.title == "first_updated"
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id + 2)
assert updated_second.title == "second_updated"
assert updated_second.text == "second_updated"
end
@tag :without_conflict_target
test "on conflict replace_all_except_primary_key" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all_except_primary_key)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all_except_primary_key)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are not replaced
changes = [%{id: post_first.id + 2, title: "first_updated",
text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: :replace_all_except_primary_key)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id)
assert updated_first.title == "first_updated"
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id)
assert updated_second.title == "second_updated"
assert updated_second.text == "second_updated"
end
@tag :with_conflict_target
test "on conflict replace_all_except_primary_key and conflict_target" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate()}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate()}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all_except_primary_key, conflict_target: :uuid)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all_except_primary_key, conflict_target: :uuid)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are not replaced
changes = [%{id: post_first.id + 2, title: "first_updated",
text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: :replace_all_except_primary_key, conflict_target: :uuid)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id)
assert updated_first.title == "first_updated"
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id)
assert updated_second.title == "second_updated"
assert updated_second.text == "second_updated"
end
@tag :with_conflict_target
test "on conflict replace and conflict_target" do
post_first = %Post{title: "first", visits: 10, public: true, uuid: Ecto.UUID.generate}
post_second = %Post{title: "second", visits: 20, public: false, uuid: Ecto.UUID.generate}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: {:replace, [:title, :visits]}, conflict_target: :uuid)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: {:replace, [:title, :visits]}, conflict_target: :uuid)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note `public` field is not changed
changes = [%{id: post_first.id, title: "first_updated", visits: 11, public: false, text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id, title: "second_updated", visits: 21, public: true, text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: {:replace, [:title, :visits, :text]}, conflict_target: :uuid)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id)
assert updated_first.title == "first_updated"
assert updated_first.visits == 11
assert updated_first.public == true
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id)
assert updated_second.title == "second_updated"
assert updated_second.visits == 21
assert updated_second.public == false
assert updated_second.text == "second_updated"
end
end
end
| 38.732629 | 152 | 0.633123 |
f728dfbb03d4041e8f2a2c65be6c9add78be2d75 | 365 | exs | Elixir | priv/repo/migrations/20181201181549_add_pageviews.exs | wvffle/analytics | 2c0fd55bc67f74af1fe1e2641678d44e9fee61d5 | [
"MIT"
] | 984 | 2019-09-02T11:36:41.000Z | 2020-06-08T06:25:48.000Z | priv/repo/migrations/20181201181549_add_pageviews.exs | wvffle/analytics | 2c0fd55bc67f74af1fe1e2641678d44e9fee61d5 | [
"MIT"
] | 24 | 2019-09-10T09:53:17.000Z | 2020-06-08T07:35:26.000Z | priv/repo/migrations/20181201181549_add_pageviews.exs | wvffle/analytics | 2c0fd55bc67f74af1fe1e2641678d44e9fee61d5 | [
"MIT"
] | 51 | 2019-09-03T10:48:10.000Z | 2020-06-07T00:23:34.000Z | defmodule Plausible.Repo.Migrations.AddPageviews do
use Ecto.Migration
def change do
create table(:pageviews) do
add :hostname, :text, null: false
add :pathname, :text, null: false
add :referrer, :text
add :user_agent, :text
add :screen_width, :integer
add :screen_height, :integer
timestamps()
end
end
end
| 21.470588 | 51 | 0.654795 |
f728fa7e60af2028c39e4f586f4b6087f63ca406 | 1,162 | ex | Elixir | lib/esi/api/insurance.ex | bmartin2015/esi | 6463d444b01c4373fbed23dad70a4206cd855153 | [
"MIT"
] | 15 | 2017-11-25T09:10:35.000Z | 2021-04-26T03:09:51.000Z | lib/esi/api/insurance.ex | bmartin2015/esi | 6463d444b01c4373fbed23dad70a4206cd855153 | [
"MIT"
] | 6 | 2018-01-19T20:14:20.000Z | 2019-08-03T12:58:39.000Z | lib/esi/api/insurance.ex | bmartin2015/esi | 6463d444b01c4373fbed23dad70a4206cd855153 | [
"MIT"
] | 13 | 2017-12-09T16:30:30.000Z | 2021-12-22T21:27:37.000Z | defmodule ESI.API.Insurance do
@typedoc """
Options for [`Insurance.prices/1`](#prices/1).
- `:language` (DEFAULT: `:"en-us"`) -- Language to use in the response, takes precedence over Accept-Language
"""
@type prices_opts :: [prices_opt]
@type prices_opt :: {:language, nil | :de | :"en-us" | :fr | :ja | :ru | :zh}
@doc """
Return available insurance levels for all ship types.
## Response Example
A list of insurance levels for all ship types:
[
%{
"levels" => [%{"cost" => 10.01, "name" => "Basic", "payout" => 20.01}],
"type_id" => 1
}
]
## Swagger Source
This function was generated from the following Swagger operation:
- `operationId` -- `get_insurance_prices`
- `path` -- `/insurance/prices/`
[View on ESI Site](https://esi.tech.ccp.is/latest/#!/Insurance/get_insurance_prices)
"""
@spec prices(opts :: prices_opts) :: ESI.Request.t()
def prices(opts \\ []) do
%ESI.Request{
verb: :get,
path: "/insurance/prices/",
opts_schema: %{datasource: {:query, :optional}, language: {:query, :optional}},
opts: Map.new(opts)
}
end
end
| 26.409091 | 111 | 0.598107 |
f7290e9faa1ff0da1ad5a309f3c6f128d10911ac | 2,269 | ex | Elixir | clients/plus_domains/lib/google_api/plus_domains/v1/model/plus_domains_aclentry_resource.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/plus_domains/lib/google_api/plus_domains/v1/model/plus_domains_aclentry_resource.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/plus_domains/lib/google_api/plus_domains/v1/model/plus_domains_aclentry_resource.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PlusDomains.V1.Model.PlusDomainsAclentryResource do
@moduledoc """
## Attributes
* `displayName` (*type:* `String.t`, *default:* `nil`) - A descriptive name for this entry. Suitable for display.
* `id` (*type:* `String.t`, *default:* `nil`) - The ID of the entry. For entries of type "person" or "circle", this is the ID of the resource. For other types, this property is not set.
* `type` (*type:* `String.t`, *default:* `nil`) - The type of entry describing to whom access is granted. Possible values are:
- "person" - Access to an individual.
- "circle" - Access to members of a circle.
- "myCircles" - Access to members of all the person's circles.
- "extendedCircles" - Access to members of all the person's circles, plus all of the people in their circles.
- "domain" - Access to members of the person's Google Apps domain.
- "public" - Access to anyone on the web.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:displayName => String.t(),
:id => String.t(),
:type => String.t()
}
field(:displayName)
field(:id)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.PlusDomains.V1.Model.PlusDomainsAclentryResource do
def decode(value, options) do
GoogleApi.PlusDomains.V1.Model.PlusDomainsAclentryResource.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.PlusDomains.V1.Model.PlusDomainsAclentryResource do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.457627 | 189 | 0.703834 |
f72911a6fe90d99b392e814d554f50b7679d1d86 | 1,643 | ex | Elixir | spec/lib/string2.ex | efcasado/stopwatch | 6cc7f0632de3ecb3926160fa98fdb9971c1d994e | [
"Unlicense",
"MIT"
] | null | null | null | spec/lib/string2.ex | efcasado/stopwatch | 6cc7f0632de3ecb3926160fa98fdb9971c1d994e | [
"Unlicense",
"MIT"
] | 1 | 2018-03-28T09:13:04.000Z | 2018-03-28T09:13:04.000Z | spec/lib/string2.ex | efcasado/chronex | 6cc7f0632de3ecb3926160fa98fdb9971c1d994e | [
"Unlicense",
"MIT"
] | null | null | null | ###========================================================================
### File: string2.ex
###
### A dummy module used for testing purposes.
###
###
### Author(s):
### - Enrique Fernandez <[email protected]>
###
###-- LICENSE -------------------------------------------------------------
### The MIT License (MIT)
###
### Copyright (c) 2018 Enrique Fernandez
###
### Permission is hereby granted, free of charge, to any person obtaining
### a copy of this software and associated documentation files (the
### "Software"), to deal in the Software without restriction, including
### without limitation the rights to use, copy, modify, merge, publish,
### distribute, sublicense, and/or sell copies of the Software,
### and to permit persons to whom the Software is furnished to do so,
### subject to the following conditions:
###
### The above copyright notice and this permission notice shall be included
### in all copies or substantial portions of the Software.
###
### THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
### EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
### MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
### IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
### CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
### TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
### SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###========================================================================
defmodule String2 do
def length(str), do: String.length(str)
def throw_test(x), do: throw(x)
end
| 42.128205 | 75 | 0.631163 |
f72938fcca66b1a47efdc496346a275e235b93cd | 1,021 | ex | Elixir | apps/welcome2_web/test/support/conn_case.ex | philihp/welcome2_umbrella | f5f494b98fe4b64a3e1bbfc1b8b432aad7f8c3b2 | [
"MIT"
] | null | null | null | apps/welcome2_web/test/support/conn_case.ex | philihp/welcome2_umbrella | f5f494b98fe4b64a3e1bbfc1b8b432aad7f8c3b2 | [
"MIT"
] | 13 | 2020-03-22T08:00:57.000Z | 2022-03-07T16:35:36.000Z | apps/welcome2_web/test/support/conn_case.ex | philihp/welcome2_umbrella | f5f494b98fe4b64a3e1bbfc1b8b432aad7f8c3b2 | [
"MIT"
] | null | null | null | defmodule Welcome2Web.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use Welcome2Web.ConnCase, async: true`, although
this option is not recommendded for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias Welcome2Web.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint Welcome2Web.Endpoint
end
end
setup _tags do
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 29.171429 | 62 | 0.738492 |
f7295ce433ab541aa790a2883c3796ca3bc0006f | 38 | exs | Elixir | apps/language_server/test/fixtures/formatter/lib/.formatter.exs | ihabunek/elixir-ls | a8bdf9304f04254160c9fc982ad314a50085c51a | [
"Apache-2.0"
] | 912 | 2017-06-08T03:58:03.000Z | 2021-09-06T03:42:07.000Z | apps/language_server/test/fixtures/formatter/lib/.formatter.exs | ihabunek/elixir-ls | a8bdf9304f04254160c9fc982ad314a50085c51a | [
"Apache-2.0"
] | 441 | 2019-01-05T02:33:52.000Z | 2022-03-30T20:56:50.000Z | apps/language_server/test/fixtures/formatter/lib/.formatter.exs | ihabunek/elixir-ls | a8bdf9304f04254160c9fc982ad314a50085c51a | [
"Apache-2.0"
] | 126 | 2018-11-12T19:16:53.000Z | 2022-03-26T13:27:50.000Z | [
locals_without_parens: [foo: 1]
]
| 9.5 | 33 | 0.657895 |
f72a4fcdd50b3b2d979e9125541ac1ac4d3c9dfa | 220 | exs | Elixir | farmbot_celery_script/test/farmbot_celery_script/corpus/arg_test.exs | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | 1 | 2021-04-22T10:18:50.000Z | 2021-04-22T10:18:50.000Z | farmbot_celery_script/test/farmbot_celery_script/corpus/arg_test.exs | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | farmbot_celery_script/test/farmbot_celery_script/corpus/arg_test.exs | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | defmodule FarmbotCeleryScript.Corpus.ArgTest do
use ExUnit.Case, async: true
alias FarmbotCeleryScript.Corpus
test "inspect" do
assert "#Arg<_then [execute, nothing]>" = inspect(Corpus.arg("_then"))
end
end
| 24.444444 | 74 | 0.740909 |
f72a9bb17f57abb46b8cc115c7f228d5aab227c9 | 4,127 | ex | Elixir | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/service_spec.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/service_spec.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/service_spec.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudRun.V1alpha1.Model.ServiceSpec do
@moduledoc """
ServiceSpec holds the desired state of the Route (from the client), which is used to manipulate the underlying Route and Configuration(s).
## Attributes
* `generation` (*type:* `integer()`, *default:* `nil`) - Deprecated and not currently populated by Cloud Run. See metadata.generation instead, which is the sequence number containing the latest generation of the desired state. Read-only.
* `manual` (*type:* `GoogleApi.CloudRun.V1alpha1.Model.ServiceSpecManualType.t`, *default:* `nil`) - Manual contains the options for configuring a manual service. See ServiceSpec for more details. Not currently supported by Cloud Run.
* `pinned` (*type:* `GoogleApi.CloudRun.V1alpha1.Model.ServiceSpecPinnedType.t`, *default:* `nil`) - Pins this service to a specific revision name. The revision must be owned by the configuration provided. Deprecated and not supported by Cloud Run. +optional
* `release` (*type:* `GoogleApi.CloudRun.V1alpha1.Model.ServiceSpecReleaseType.t`, *default:* `nil`) - Release enables gradual promotion of new revisions by allowing traffic to be split between two revisions. This type replaces the deprecated Pinned type. Not currently supported by Cloud Run.
* `runLatest` (*type:* `GoogleApi.CloudRun.V1alpha1.Model.ServiceSpecRunLatest.t`, *default:* `nil`) - RunLatest defines a simple Service. It will automatically configure a route that keeps the latest ready revision from the supplied configuration running. +optional
* `template` (*type:* `GoogleApi.CloudRun.V1alpha1.Model.RevisionTemplate.t`, *default:* `nil`) - Template holds the latest specification for the Revision to be stamped out.
* `traffic` (*type:* `list(GoogleApi.CloudRun.V1alpha1.Model.TrafficTarget.t)`, *default:* `nil`) - Traffic specifies how to distribute traffic over a collection of Knative Revisions and Configurations.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:generation => integer() | nil,
:manual => GoogleApi.CloudRun.V1alpha1.Model.ServiceSpecManualType.t() | nil,
:pinned => GoogleApi.CloudRun.V1alpha1.Model.ServiceSpecPinnedType.t() | nil,
:release => GoogleApi.CloudRun.V1alpha1.Model.ServiceSpecReleaseType.t() | nil,
:runLatest => GoogleApi.CloudRun.V1alpha1.Model.ServiceSpecRunLatest.t() | nil,
:template => GoogleApi.CloudRun.V1alpha1.Model.RevisionTemplate.t() | nil,
:traffic => list(GoogleApi.CloudRun.V1alpha1.Model.TrafficTarget.t()) | nil
}
field(:generation)
field(:manual, as: GoogleApi.CloudRun.V1alpha1.Model.ServiceSpecManualType)
field(:pinned, as: GoogleApi.CloudRun.V1alpha1.Model.ServiceSpecPinnedType)
field(:release, as: GoogleApi.CloudRun.V1alpha1.Model.ServiceSpecReleaseType)
field(:runLatest, as: GoogleApi.CloudRun.V1alpha1.Model.ServiceSpecRunLatest)
field(:template, as: GoogleApi.CloudRun.V1alpha1.Model.RevisionTemplate)
field(:traffic, as: GoogleApi.CloudRun.V1alpha1.Model.TrafficTarget, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.CloudRun.V1alpha1.Model.ServiceSpec do
def decode(value, options) do
GoogleApi.CloudRun.V1alpha1.Model.ServiceSpec.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudRun.V1alpha1.Model.ServiceSpec do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 63.492308 | 297 | 0.755512 |
f72aae5baa221429e9cc9dcc547958446eba50c2 | 1,046 | ex | Elixir | lib/better_big_canvas_web/live/board_live.ex | ChristianTovar/better-big-canvas | dd777ba83fed1e4c59ba92a25ee48cb7f0bf93e0 | [
"MIT"
] | 9 | 2021-02-25T19:58:34.000Z | 2022-03-29T22:46:57.000Z | lib/better_big_canvas_web/live/board_live.ex | ChristianTovar/better-big-canvas | dd777ba83fed1e4c59ba92a25ee48cb7f0bf93e0 | [
"MIT"
] | 3 | 2021-05-28T17:40:32.000Z | 2021-06-07T16:31:58.000Z | lib/better_big_canvas_web/live/board_live.ex | ChristianTovar/better-big-canvas | dd777ba83fed1e4c59ba92a25ee48cb7f0bf93e0 | [
"MIT"
] | 2 | 2021-02-25T19:58:39.000Z | 2021-08-16T22:55:32.000Z | defmodule BetterBigCanvasWeb.BoardLive do
use Phoenix.LiveView, layout: {BetterBigCanvasWeb.LayoutView, "live.html"}
alias BetterBigCanvasWeb.{BoardView, BoardComponent}
alias BetterBigCanvas.Square
@impl true
def mount(_params, _session, socket) do
if connected?(socket), do: Phoenix.PubSub.subscribe(BetterBigCanvas.PubSub, "update")
{:ok, socket}
end
@impl true
def render(assigns), do: Phoenix.View.render(BoardView, "board_live.html", assigns)
@impl true
def handle_info(%{id: id}, socket) do
send_update(BoardComponent, id: id, version: UUID.uuid4())
{:noreply, socket}
end
@impl true
def handle_event("canvas-mounted", %{"id" => id}, socket),
do: {:noreply, push_event(socket, "pixels", %{id: id, pixels: get_pixels(id)})}
def handle_event("canvas-ready", %{"id" => id}, socket),
do: {:noreply, push_event(socket, "new-pixels", %{id: id, pixels: get_pixels(id)})}
defp get_pixels(id) do
id
|> String.to_integer()
|> Square.read()
|> Keyword.values()
end
end
| 27.526316 | 89 | 0.680688 |
f72af12b41d72f48f46d72048742743e5ec532bd | 223 | exs | Elixir | config/config.exs | edragonconnect/elixir_wechat | b41e32bec591e32a68b3f475ffb25689cff4103f | [
"MIT"
] | 12 | 2019-03-07T01:34:12.000Z | 2021-09-11T06:40:29.000Z | config/config.exs | edragonconnect/elixir_wechat | b41e32bec591e32a68b3f475ffb25689cff4103f | [
"MIT"
] | 6 | 2019-10-12T10:48:07.000Z | 2021-03-03T07:06:34.000Z | config/config.exs | edragonconnect/elixir_wechat | b41e32bec591e32a68b3f475ffb25689cff4103f | [
"MIT"
] | 5 | 2019-03-07T01:34:31.000Z | 2021-01-06T08:01:51.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
import Config
config :logger, level: :info
if Mix.env() == :test do
import_config "test.exs"
end
| 22.3 | 61 | 0.753363 |
f72af92f03ca9a73aeb7287556c6fd6bcd11d362 | 7,680 | ex | Elixir | lib/livebook/utils.ex | fozcodes/livebook | b6ddf1883cec5bee80675ff95af8a1619a3da550 | [
"Apache-2.0"
] | null | null | null | lib/livebook/utils.ex | fozcodes/livebook | b6ddf1883cec5bee80675ff95af8a1619a3da550 | [
"Apache-2.0"
] | null | null | null | lib/livebook/utils.ex | fozcodes/livebook | b6ddf1883cec5bee80675ff95af8a1619a3da550 | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Utils do
@moduledoc false
@type id :: binary()
@doc """
Generates a random binary id.
"""
@spec random_id() :: id()
def random_id() do
:crypto.strong_rand_bytes(20) |> Base.encode32(case: :lower)
end
@doc """
Generates a random short binary id.
"""
@spec random_short_id() :: id()
def random_short_id() do
:crypto.strong_rand_bytes(5) |> Base.encode32(case: :lower)
end
@doc """
Generates a random cookie for a distributed node.
"""
@spec random_cookie() :: atom()
def random_cookie() do
:"c_#{Base.url_encode64(:crypto.strong_rand_bytes(39))}"
end
@doc """
Generates a random binary id that includes node information.
## Format
The id is formed from the following binary parts:
* 16B - hashed node name
* 9B - random bytes
The binary is base32 encoded.
"""
@spec random_node_aware_id() :: id()
def random_node_aware_id() do
node_part = node_hash(node())
random_part = :crypto.strong_rand_bytes(9)
binary = <<node_part::binary, random_part::binary>>
# 16B + 9B = 25B is suitable for base32 encoding without padding
Base.encode32(binary, case: :lower)
end
# Note: the result is always 16 bytes long
defp node_hash(node) do
content = Atom.to_string(node)
:erlang.md5(content)
end
@doc """
Extracts node name from the given node aware id.
The node in question must be connected, otherwise it won't be found.
"""
@spec node_from_node_aware_id(id()) :: {:ok, node()} | :error
def node_from_node_aware_id(id) do
binary = Base.decode32!(id, case: :lower)
<<node_part::binary-size(16), _random_part::binary-size(9)>> = binary
known_nodes = [node() | Node.list()]
Enum.find_value(known_nodes, :error, fn node ->
node_hash(node) == node_part && {:ok, node}
end)
end
@doc """
Converts the given name to node identifier.
"""
@spec node_from_name(String.t()) :: atom()
def node_from_name(name) do
if name =~ "@" do
String.to_atom(name)
else
# Default to the same host as the current node
:"#{name}@#{node_host()}"
end
end
@doc """
Returns the host part of a node.
"""
@spec node_host() :: binary()
def node_host do
[_, host] = node() |> Atom.to_string() |> :binary.split("@")
host
end
@doc """
Registers the given process under `name` for the time of `fun` evaluation.
"""
@spec temporarily_register(pid(), atom(), (... -> any())) :: any()
def temporarily_register(pid, name, fun) do
Process.register(pid, name)
fun.()
after
Process.unregister(name)
end
@doc """
Returns a function that accesses list items by the given id.
## Examples
iex> list = [%{id: 1, name: "Jake"}, %{id: 2, name: "Amy"}]
iex> get_in(list, [Livebook.Utils.access_by_id(2), Access.key(:name)])
"Amy"
iex> list = [%{id: 1, name: "Jake"}, %{id: 2, name: "Amy"}]
iex> put_in(list, [Livebook.Utils.access_by_id(2), Access.key(:name)], "Amy Santiago")
[%{id: 1, name: "Jake"}, %{id: 2, name: "Amy Santiago"}]
An error is raised if the accessed structure is not a list:
iex> get_in(%{}, [Livebook.Utils.access_by_id(1)])
** (RuntimeError) Livebook.Utils.access_by_id/1 expected a list, got: %{}
"""
@spec access_by_id(term()) ::
Access.access_fun(data :: struct() | map(), current_value :: term())
def access_by_id(id) do
fn
:get, data, next when is_list(data) ->
data
|> Enum.find(fn item -> item.id == id end)
|> next.()
:get_and_update, data, next when is_list(data) ->
case Enum.split_while(data, fn item -> item.id != id end) do
{prev, [item | cons]} ->
case next.(item) do
{get, update} ->
{get, prev ++ [update | cons]}
:pop ->
{item, prev ++ cons}
end
_ ->
{nil, data}
end
_op, data, _next ->
raise "Livebook.Utils.access_by_id/1 expected a list, got: #{inspect(data)}"
end
end
@doc """
Validates if the given URL is syntactically valid.
"""
@spec valid_url?(String.t()) :: boolean()
def valid_url?(url) do
uri = URI.parse(url)
uri.scheme != nil and uri.host != nil and uri.host =~ "."
end
@doc """
Validates if the given hex color is the correct format
## Examples
iex> Livebook.Utils.valid_hex_color?("#111111")
true
iex> Livebook.Utils.valid_hex_color?("#ABC123")
true
iex> Livebook.Utils.valid_hex_color?("ABCDEF")
false
iex> Livebook.Utils.valid_hex_color?("#111")
false
"""
@spec valid_hex_color?(String.t()) :: boolean()
def valid_hex_color?(hex_color), do: hex_color =~ ~r/^#[0-9a-fA-F]{6}$/
@doc """
Changes the first letter in the given string to upper case.
## Examples
iex> Livebook.Utils.upcase_first("sippin tea")
"Sippin tea"
iex> Livebook.Utils.upcase_first("short URL")
"Short URL"
iex> Livebook.Utils.upcase_first("")
""
"""
@spec upcase_first(String.t()) :: String.t()
def upcase_first(string) do
{first, rest} = String.split_at(string, 1)
String.upcase(first) <> rest
end
@doc """
Changes the first letter in the given string to lower case.
## Examples
iex> Livebook.Utils.downcase_first("Sippin tea")
"sippin tea"
iex> Livebook.Utils.downcase_first("Short URL")
"short URL"
iex> Livebook.Utils.downcase_first("")
""
"""
@spec downcase_first(String.t()) :: String.t()
def downcase_first(string) do
{first, rest} = String.split_at(string, 1)
String.downcase(first) <> rest
end
@doc """
Expands a relative path in terms of the given URL.
## Examples
iex> Livebook.Utils.expand_url("file:///home/user/lib/file.ex", "../root.ex")
"file:///home/user/root.ex"
iex> Livebook.Utils.expand_url("https://example.com/lib/file.ex?token=supersecret", "../root.ex")
"https://example.com/root.ex?token=supersecret"
"""
@spec expand_url(String.t(), String.t()) :: String.t()
def expand_url(url, relative_path) do
url
|> URI.parse()
|> Map.update!(:path, fn path ->
path |> Path.dirname() |> Path.join(relative_path) |> Path.expand()
end)
|> URI.to_string()
end
@doc ~S"""
Wraps the given line into lines that fit in `width` characters.
Words longer than `width` are not broken apart.
## Examples
iex> Livebook.Utils.wrap_line("cat on the roof", 7)
"cat on\nthe\nroof"
iex> Livebook.Utils.wrap_line("cat in the cup", 7)
"cat in\nthe cup"
iex> Livebook.Utils.wrap_line("cat in the cup", 2)
"cat\nin\nthe\ncup"
"""
@spec wrap_line(String.t(), pos_integer()) :: String.t()
def wrap_line(line, width) do
line
|> String.split()
|> Enum.reduce({[[]], 0}, fn part, {[group | groups], group_size} ->
size = String.length(part)
cond do
group == [] ->
{[[part] | groups], size}
group_size + 1 + size <= width ->
{[[part, " " | group] | groups], group_size + 1 + size}
true ->
{[[part], group | groups], size}
end
end)
|> elem(0)
|> Enum.map(&Enum.reverse/1)
|> Enum.reverse()
|> Enum.intersperse("\n")
|> IO.iodata_to_binary()
end
@doc """
Reads file contents and encodes it into a data URL.
"""
@spec read_as_data_url!(Path.t()) :: binary()
def read_as_data_url!(path) do
content = File.read!(path)
mime = MIME.from_path(path)
data = Base.encode64(content)
"data:#{mime};base64,#{data}"
end
end
| 25.945946 | 103 | 0.601302 |
f72b6c27d945b3c0c5eb1f83226289287a5c8405 | 1,596 | ex | Elixir | lib/ada/source/last_fm/api_client.ex | cloud8421/ada | 384f5e5e85e95c4d5883298259e781cc0a54bd07 | [
"MIT"
] | 7 | 2019-05-11T12:14:48.000Z | 2021-04-02T18:42:51.000Z | lib/ada/source/last_fm/api_client.ex | cloud8421/ada | 384f5e5e85e95c4d5883298259e781cc0a54bd07 | [
"MIT"
] | 3 | 2019-05-11T08:01:47.000Z | 2019-05-14T12:06:50.000Z | lib/ada/source/last_fm/api_client.ex | cloud8421/ada | 384f5e5e85e95c4d5883298259e781cc0a54bd07 | [
"MIT"
] | 1 | 2021-01-06T14:57:32.000Z | 2021-01-06T14:57:32.000Z | defmodule Ada.Source.LastFm.ApiClient do
@moduledoc false
@base_url "https://ws.audioscrobbler.com/2.0"
@api_key System.get_env("LAST_FM_API_KEY")
alias Ada.{HTTP, Source.LastFm.Track}
@spec get_recent(Ada.Source.LastFm.username(), DateTime.t(), DateTime.t()) ::
{:ok, [Track.t()]} | {:error, term()}
def get_recent(user, from, to) do
qs_params = [
{"method", "user.getrecenttracks"},
{"user", user},
{"from", DateTime.to_unix(from)},
{"to", DateTime.to_unix(to)},
{"limit", "200"},
{"api_key", @api_key},
{"format", "json"}
]
case HTTP.Client.json_get(@base_url, [], qs_params) do
%HTTP.Client.Response{status_code: 200, body: body} ->
{:ok, parse_response(body)}
%HTTP.Client.Response{status_code: status_code, body: body} ->
{:error, {status_code, body}}
%HTTP.Client.ErrorResponse{message: message} ->
{:error, message}
end
end
@spec parse_response(map()) :: [Track.t()]
def parse_response(response) do
tracks = get_in(response, ["recenttracks", "track"])
tracks
|> Enum.reverse()
|> Enum.map(fn t ->
%Track{
artist: get_in(t, ["artist", "#text"]),
album: get_in(t, ["album", "#text"]),
name: Map.get(t, "name"),
listened_at: parse_listened_at(t)
}
end)
end
defp parse_listened_at(t) do
case get_in(t, ["@attr", "nowplaying"]) do
"true" ->
:now_playing
nil ->
t |> get_in(["date", "uts"]) |> String.to_integer() |> DateTime.from_unix!()
end
end
end
| 27.050847 | 84 | 0.580201 |
f72b785622d66c35bf438cf37a338312ac71986f | 261 | exs | Elixir | test/day_21_springdroid_adventure_test.exs | scmx/advent-of-code-2019-elixir | f3022efb422e15abead6b882c78855b26b138443 | [
"MIT"
] | 1 | 2019-12-02T16:27:06.000Z | 2019-12-02T16:27:06.000Z | test/day_21_springdroid_adventure_test.exs | scmx/advent-of-code-2019-elixir | f3022efb422e15abead6b882c78855b26b138443 | [
"MIT"
] | null | null | null | test/day_21_springdroid_adventure_test.exs | scmx/advent-of-code-2019-elixir | f3022efb422e15abead6b882c78855b26b138443 | [
"MIT"
] | 1 | 2020-12-10T10:47:21.000Z | 2020-12-10T10:47:21.000Z | defmodule Adventofcode.Day21SpringdroidAdventureTest do
use Adventofcode.FancyCase
import Adventofcode.Day21SpringdroidAdventure
describe "part_1/1" do
test_with_puzzle_input do
assert 19_352_638 = puzzle_input() |> part_1()
end
end
end
| 21.75 | 55 | 0.777778 |
f72b8df725ddbc1b46347aae4292891293487a0f | 2,429 | exs | Elixir | src/frontend/config/prod.exs | wbpascal/statuswebsite | 7a81e530a9176c53abeab0582cb710113101b716 | [
"MIT"
] | 1 | 2021-04-18T20:21:03.000Z | 2021-04-18T20:21:03.000Z | src/frontend/config/prod.exs | wbpascal/statuswebsite | 7a81e530a9176c53abeab0582cb710113101b716 | [
"MIT"
] | null | null | null | src/frontend/config/prod.exs | wbpascal/statuswebsite | 7a81e530a9176c53abeab0582cb710113101b716 | [
"MIT"
] | 1 | 2021-04-18T20:21:08.000Z | 2021-04-18T20:21:08.000Z | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :frontend, FrontendWeb.Endpoint,
http: [:inet6, port: 4000],
url: [host: "vs-status.de", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :frontend, FrontendWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# :inet6,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :frontend, FrontendWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases (distillery)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :frontend, FrontendWeb.Endpoint, server: true
#
# Note you can't rely on `System.get_env/1` when using releases.
# See the releases documentation accordingly.
# Finally import the config/prod.secret.exs which should be versioned
# separately.
import_config "prod.secret.exs"
| 33.736111 | 69 | 0.714286 |
f72bbe45272511293646fe7e28904af9b136f3b0 | 145 | ex | Elixir | apps/tai/lib/tai/commander/delete_all_orders.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | null | null | null | apps/tai/lib/tai/commander/delete_all_orders.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | 78 | 2020-10-12T06:21:43.000Z | 2022-03-28T09:02:00.000Z | apps/tai/lib/tai/commander/delete_all_orders.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | null | null | null | defmodule Tai.Commander.DeleteAllOrders do
@spec execute() :: {non_neg_integer, nil}
def execute do
Tai.NewOrders.delete_all()
end
end
| 20.714286 | 43 | 0.737931 |
f72bd2aae61fd19d5b083acb1820b14be74e199d | 426 | ex | Elixir | lib/validation/rules/credit_card/diners.ex | elixir-validation/validation | 164a62c881e03a307172e4a20413f81260abb2dc | [
"MIT"
] | 60 | 2019-09-13T13:37:01.000Z | 2021-01-06T05:20:32.000Z | lib/validation/rules/credit_card/diners.ex | elixir-validation/validation | 164a62c881e03a307172e4a20413f81260abb2dc | [
"MIT"
] | 1 | 2019-12-16T13:57:22.000Z | 2019-12-16T13:57:22.000Z | lib/validation/rules/credit_card/diners.ex | elixir-validation/validation | 164a62c881e03a307172e4a20413f81260abb2dc | [
"MIT"
] | 5 | 2019-09-13T19:14:24.000Z | 2019-11-26T17:33:08.000Z | defmodule Validation.Rules.CreditCard.Diners do
@moduledoc false
alias Validation.Rules.CreditCard
@spec validate?(String.t) :: boolean
def validate?(input) when is_binary(input) do
# only numbers
input = Regex.replace(~r/\D/, input, "")
CreditCard.validate?(input) and diners_validation(input)
end
defp diners_validation(input) do
String.match?(input, ~r/^3(?:0[0-5]|[68]\d)\d{11}$/)
end
end
| 25.058824 | 60 | 0.692488 |
f72bda2e6d3cfcc91d7c722bbc7a4f64e4a73dcb | 1,121 | exs | Elixir | config/config.exs | hlmeS/distillery-aws | bfc59dd775d0ad9b2c4c19286138ebf57287daea | [
"Apache-2.0"
] | 1 | 2020-07-27T04:06:37.000Z | 2020-07-27T04:06:37.000Z | config/config.exs | hlmeS/distillery-aws | bfc59dd775d0ad9b2c4c19286138ebf57287daea | [
"Apache-2.0"
] | null | null | null | config/config.exs | hlmeS/distillery-aws | bfc59dd775d0ad9b2c4c19286138ebf57287daea | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :distillery_example,
ecto_repos: [Example.Repo]
# Configures the endpoint
config :distillery_example, ExampleWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "WLTAq0m3cyhaqufuub/5RyX8E8/s2UR2P1cVJVWn0d46GixiKO6yaTa6i8B8jbzS",
render_errors: [view: ExampleWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: Example.PubSub,
adapter: Phoenix.PubSub.PG2]
config :phoenix, :json_library, Jason
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:user_id]
config :libcluster,
topologies: [
example: [
strategy: Cluster.Strategy.Epmd,
config: [hosts: []]
]
]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 29.5 | 86 | 0.743979 |
f72bfd190206bfb9d9aba66af90b991b49701cef | 11,489 | ex | Elixir | lib/ecto/query/inspect.ex | alco/ecto | 8972c318d8ed551395607b0d6e15e3c6cfbf425a | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/inspect.ex | alco/ecto | 8972c318d8ed551395607b0d6e15e3c6cfbf425a | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/inspect.ex | alco/ecto | 8972c318d8ed551395607b0d6e15e3c6cfbf425a | [
"Apache-2.0"
] | 1 | 2020-10-07T16:52:00.000Z | 2020-10-07T16:52:00.000Z | import Inspect.Algebra
import Kernel, except: [to_string: 1]
alias Ecto.Query.{BooleanExpr, DynamicExpr, JoinExpr, QueryExpr, WithExpr}
defimpl Inspect, for: Ecto.Query.DynamicExpr do
def inspect(%DynamicExpr{binding: binding} = dynamic, opts) do
joins =
binding
|> Enum.drop(1)
|> Enum.with_index()
|> Enum.map(&%JoinExpr{ix: &1})
aliases =
for({as, _} when is_atom(as) <- binding, do: as)
|> Enum.with_index()
|> Map.new
query = %Ecto.Query{joins: joins, aliases: aliases}
{expr, binding, params, _, _} = Ecto.Query.Builder.Dynamic.fully_expand(query, dynamic)
names = Enum.map(binding, fn
{_, {name, _, _}} -> Atom.to_string(name)
{name, _, _} -> Atom.to_string(name)
end)
inspected = Inspect.Ecto.Query.expr(expr, List.to_tuple(names), %{expr: expr, params: params})
container_doc("dynamic(", [Macro.to_string(binding), inspected], ")", opts, fn str, _ ->
str
end)
end
end
defimpl Inspect, for: Ecto.Query do
@doc false
def inspect(query, opts) do
list =
Enum.map(to_list(query), fn
{key, string} ->
concat(Atom.to_string(key) <> ": ", string)
string ->
string
end)
result = container_doc("#Ecto.Query<", list, ">", opts, fn str, _ -> str end)
case query.with_ctes do
%WithExpr{recursive: recursive, queries: [_ | _] = queries} ->
with_ctes =
Enum.map(queries, fn {name, query} ->
cte = case query do
%Ecto.Query{} -> __MODULE__.inspect(query, opts)
%Ecto.Query.QueryExpr{} -> expr(query, {})
end
concat(["|> with_cte(\"" <> name <> "\", as: ", cte, ")"])
end)
result = if recursive, do: glue(result, "\n", "|> recursive_ctes(true)"), else: result
[result | with_ctes] |> Enum.intersperse(break("\n")) |> concat()
_ ->
result
end
end
@doc false
def to_string(query) do
Enum.map_join(to_list(query), ",\n ", fn
{key, string} ->
Atom.to_string(key) <> ": " <> string
string ->
string
end)
end
defp to_list(query) do
names =
query
|> collect_sources
|> generate_letters
|> generate_names
|> List.to_tuple()
from = bound_from(query.from, binding(names, 0))
joins = joins(query.joins, names)
preloads = preloads(query.preloads)
assocs = assocs(query.assocs, names)
windows = windows(query.windows, names)
combinations = combinations(query.combinations)
wheres = bool_exprs(%{and: :where, or: :or_where}, query.wheres, names)
group_bys = kw_exprs(:group_by, query.group_bys, names)
havings = bool_exprs(%{and: :having, or: :or_having}, query.havings, names)
order_bys = kw_exprs(:order_by, query.order_bys, names)
updates = kw_exprs(:update, query.updates, names)
lock = kw_inspect(:lock, query.lock)
limit = kw_expr(:limit, query.limit, names)
offset = kw_expr(:offset, query.offset, names)
select = kw_expr(:select, query.select, names)
distinct = kw_expr(:distinct, query.distinct, names)
Enum.concat([
from,
joins,
wheres,
group_bys,
havings,
windows,
combinations,
order_bys,
limit,
offset,
lock,
distinct,
updates,
select,
preloads,
assocs
])
end
defp bound_from(nil, name), do: ["from #{name} in query"]
defp bound_from(%{source: source} = from, name) do
["from #{name} in #{inspect_source(source)}"] ++ kw_as_and_prefix(from)
end
defp inspect_source(%Ecto.Query{} = query), do: "^" <> inspect(query)
defp inspect_source(%Ecto.SubQuery{query: query}), do: "subquery(#{to_string(query)})"
defp inspect_source({source, nil}), do: inspect(source)
defp inspect_source({nil, schema}), do: inspect(schema)
defp inspect_source({source, schema} = from) do
inspect(if source == schema.__schema__(:source), do: schema, else: from)
end
defp joins(joins, names) do
joins
|> Enum.with_index()
|> Enum.flat_map(fn {expr, ix} -> join(expr, binding(names, expr.ix || ix + 1), names) end)
end
defp join(%JoinExpr{qual: qual, assoc: {ix, right}, on: on} = join, name, names) do
string = "#{name} in assoc(#{binding(names, ix)}, #{inspect(right)})"
[{join_qual(qual), string}] ++ kw_as_and_prefix(join) ++ maybe_on(on, names)
end
defp join(
%JoinExpr{qual: qual, source: {:fragment, _, _} = source, on: on} = join = part,
name,
names
) do
string = "#{name} in #{expr(source, names, part)}"
[{join_qual(qual), string}] ++ kw_as_and_prefix(join) ++ [on: expr(on, names)]
end
defp join(%JoinExpr{qual: qual, source: source, on: on} = join, name, names) do
string = "#{name} in #{inspect_source(source)}"
[{join_qual(qual), string}] ++ kw_as_and_prefix(join) ++ [on: expr(on, names)]
end
defp maybe_on(%QueryExpr{expr: true}, _names), do: []
defp maybe_on(%QueryExpr{} = on, names), do: [on: expr(on, names)]
defp preloads([]), do: []
defp preloads(preloads), do: [preload: inspect(preloads)]
defp assocs([], _names), do: []
defp assocs(assocs, names), do: [preload: expr(assocs(assocs), names, %{})]
defp assocs(assocs) do
Enum.map(assocs, fn
{field, {idx, []}} ->
{field, {:&, [], [idx]}}
{field, {idx, children}} ->
{field, {{:&, [], [idx]}, assocs(children)}}
end)
end
defp windows(windows, names) do
Enum.map(windows, &window(&1, names))
end
defp window({name, %{expr: definition} = part}, names) do
{:windows, "[#{name}: " <> expr(definition, names, part) <> "]"}
end
defp combinations(combinations) do
Enum.map(combinations, fn {key, val} -> {key, "(" <> to_string(val) <> ")"} end)
end
defp bool_exprs(keys, exprs, names) do
Enum.map(exprs, fn %{expr: expr, op: op} = part ->
{Map.fetch!(keys, op), expr(expr, names, part)}
end)
end
defp kw_exprs(key, exprs, names) do
Enum.map(exprs, &{key, expr(&1, names)})
end
defp kw_expr(_key, nil, _names), do: []
defp kw_expr(key, expr, names), do: [{key, expr(expr, names)}]
defp kw_inspect(_key, nil), do: []
defp kw_inspect(key, val), do: [{key, inspect(val)}]
defp kw_as_and_prefix(%{as: as, prefix: prefix}) do
kw_inspect(:as, as) ++ kw_inspect(:prefix, prefix)
end
defp expr(%{expr: expr} = part, names) do
expr(expr, names, part)
end
@doc false
def expr(expr, names, part) do
Macro.to_string(expr, &expr_to_string(&1, &2, names, part))
end
# For keyword and interpolated fragments use normal escaping
defp expr_to_string({:fragment, _, [{_, _} | _] = parts}, _, names, part) do
"fragment(" <> unmerge_fragments(parts, "", [], names, part) <> ")"
end
# Convert variables to proper names
defp expr_to_string({:&, _, [ix]}, _, names, %{take: take}) do
case take do
%{^ix => {:any, fields}} when ix == 0 ->
Kernel.inspect(fields)
%{^ix => {tag, fields}} ->
"#{tag}(" <> binding(names, ix) <> ", " <> Kernel.inspect(fields) <> ")"
_ ->
binding(names, ix)
end
end
defp expr_to_string({:&, _, [ix]}, _, names, _) do
binding(names, ix)
end
# Inject the interpolated value
#
# In case the query had its parameters removed,
# we use ... to express the interpolated code.
defp expr_to_string({:^, _, [_ix, _len]}, _, _, _part) do
Macro.to_string({:^, [], [{:..., [], nil}]})
end
defp expr_to_string({:^, _, [ix]}, _, _, %{params: params}) do
case Enum.at(params || [], ix) do
{value, _type} -> "^" <> Kernel.inspect(value, charlists: :as_lists)
_ -> "^..."
end
end
# Strip trailing ()
defp expr_to_string({{:., _, [_, _]}, _, []}, string, _, _) do
size = byte_size(string)
:binary.part(string, 0, size - 2)
end
# Types need to be converted back to AST for fields
defp expr_to_string({:type, [], [expr, type]}, _string, names, part) do
"type(#{expr(expr, names, part)}, #{type |> type_to_expr() |> expr(names, part)})"
end
# Tagged values
defp expr_to_string(%Ecto.Query.Tagged{value: value, tag: nil}, _, _names, _) do
inspect(value)
end
defp expr_to_string(%Ecto.Query.Tagged{value: value, tag: tag}, _, names, part) do
{:type, [], [value, tag]} |> expr(names, part)
end
defp expr_to_string({:json_extract_path, _, [expr, path]}, _, names, part) do
json_expr_path_to_expr(expr, path) |> expr(names, part)
end
defp expr_to_string({:{}, [], [:subquery, i]}, _string, _names, %BooleanExpr{subqueries: subqueries}) do
# We were supposed to match on {:subquery, i} but Elixir incorrectly
# translates those to `:{}` when converting to string.
# See https://github.com/elixir-lang/elixir/blob/27bd9ffcc607b74ce56b547cb6ba92c9012c317c/lib/elixir/lib/macro.ex#L932
inspect_source(Enum.fetch!(subqueries, i))
end
defp expr_to_string(_expr, string, _, _) do
string
end
defp type_to_expr({composite, type}) when is_atom(composite) do
{composite, type_to_expr(type)}
end
defp type_to_expr({part, type}) when is_integer(part) do
{{:., [], [{:&, [], [part]}, type]}, [], []}
end
defp type_to_expr(type) do
type
end
defp json_expr_path_to_expr(expr, path) do
Enum.reduce(path, expr, fn element, acc ->
{{:., [], [Access, :get]}, [], [acc, element]}
end)
end
defp unmerge_fragments([{:raw, s}, {:expr, v} | t], frag, args, names, part) do
unmerge_fragments(t, frag <> s <> "?", [expr(v, names, part) | args], names, part)
end
defp unmerge_fragments([{:raw, s}], frag, args, _names, _part) do
Enum.join([inspect(frag <> s) | Enum.reverse(args)], ", ")
end
defp join_qual(:inner), do: :join
defp join_qual(:inner_lateral), do: :join_lateral
defp join_qual(:left), do: :left_join
defp join_qual(:left_lateral), do: :left_join_lateral
defp join_qual(:right), do: :right_join
defp join_qual(:full), do: :full_join
defp join_qual(:cross), do: :cross_join
defp collect_sources(%{from: nil, joins: joins}) do
["query" | join_sources(joins)]
end
defp collect_sources(%{from: %{source: source}, joins: joins}) do
[from_sources(source) | join_sources(joins)]
end
defp from_sources(%Ecto.SubQuery{query: query}), do: from_sources(query.from.source)
defp from_sources({source, schema}), do: schema || source
defp from_sources(nil), do: "query"
defp join_sources(joins) do
joins
|> Enum.sort_by(& &1.ix)
|> Enum.map(fn
%JoinExpr{assoc: {_var, assoc}} ->
assoc
%JoinExpr{source: {:fragment, _, _}} ->
"fragment"
%JoinExpr{source: %Ecto.Query{from: from}} ->
from_sources(from.source)
%JoinExpr{source: source} ->
from_sources(source)
end)
end
defp generate_letters(sources) do
Enum.map(sources, fn source ->
source
|> Kernel.to_string()
|> normalize_source()
|> String.first()
|> String.downcase()
end)
end
defp generate_names(letters) do
{names, _} = Enum.map_reduce(letters, 0, &{"#{&1}#{&2}", &2 + 1})
names
end
defp binding(names, pos) do
try do
elem(names, pos)
rescue
ArgumentError -> "unknown_binding_#{pos}!"
end
end
defp normalize_source("Elixir." <> _ = source),
do: source |> Module.split() |> List.last()
defp normalize_source(source),
do: source
end
| 29.159898 | 122 | 0.606406 |
f72c153ad00cbcc9d58a9c1de2e100998af2264d | 1,780 | exs | Elixir | rel/config.exs | c18t/elixir-phx-websocket-chat | 4dc921599b263a856a23d04200631bfee83e3fc3 | [
"WTFPL"
] | null | null | null | rel/config.exs | c18t/elixir-phx-websocket-chat | 4dc921599b263a856a23d04200631bfee83e3fc3 | [
"WTFPL"
] | null | null | null | rel/config.exs | c18t/elixir-phx-websocket-chat | 4dc921599b263a856a23d04200631bfee83e3fc3 | [
"WTFPL"
] | 1 | 2020-11-17T08:23:18.000Z | 2020-11-17T08:23:18.000Z | # Import all plugins from `rel/plugins`
# They can then be used by adding `plugin MyPlugin` to
# either an environment, or release definition, where
# `MyPlugin` is the name of the plugin module.
Path.join(["rel", "plugins", "*.exs"])
|> Path.wildcard()
|> Enum.map(&Code.eval_file(&1))
use Mix.Releases.Config,
# This sets the default release built by `mix release`
default_release: :default,
# This sets the default environment used by `mix release`
default_environment: Mix.env()
# For a full list of config options for both releases
# and environments, visit https://hexdocs.pm/distillery/configuration.html
# You may define one or more environments in this file,
# an environment's settings will override those of a release
# when building in that environment, this combination of release
# and environment configuration is called a profile
environment :dev do
# If you are running Phoenix, you should make sure that
# server: true is set and the code reloader is disabled,
# even in dev mode.
# It is recommended that you build with MIX_ENV=prod and pass
# the --env flag to Distillery explicitly if you want to use
# dev mode.
set dev_mode: true
set include_erts: false
set cookie: :"dLBou55w`V!fTOs9ngwjAzNf|S01FIIlsh|EmG|ey3}I}f]rq@&15JTLoAHohVg<"
end
environment :prod do
set include_erts: true
set include_src: false
set cookie: :"O@[jf/PqUVt_CYm,S=r%]XG@*kh0).@JpxEtIQBK`^S.B9(VGbq.s*fNVTxd?Ukq"
end
# You may define one or more releases in this file.
# If you have not set a default release, or selected one
# when running `mix release`, the first release in the file
# will be used by default
release :websocket_chat do
set version: current_version(:websocket_chat)
set applications: [
:runtime_tools
]
end
| 32.962963 | 81 | 0.738764 |
f72c1a2a7f555be2d97163ef19db84cbe522045f | 2,808 | exs | Elixir | mix.exs | daskycodes/twirp-elixir | 51b701111a3b33601980703417388cd099dc7e44 | [
"Apache-2.0"
] | 30 | 2019-11-03T16:30:13.000Z | 2020-06-23T19:38:53.000Z | mix.exs | daskycodes/twirp-elixir | 51b701111a3b33601980703417388cd099dc7e44 | [
"Apache-2.0"
] | 16 | 2020-03-13T17:56:16.000Z | 2020-06-11T10:40:02.000Z | mix.exs | daskycodes/twirp-elixir | 51b701111a3b33601980703417388cd099dc7e44 | [
"Apache-2.0"
] | 3 | 2019-12-05T16:43:15.000Z | 2020-05-11T21:34:44.000Z | defmodule Twirp.MixProject do
use Mix.Project
@version "0.8.0"
@source_url "https://github.com/keathley/twirp-elixir"
def project do
[
app: :twirp,
version: @version,
elixir: "~> 1.8",
elixirc_paths: elixirc_paths(Mix.env()),
escript: escript(),
start_permanent: Mix.env() == :prod,
consolidate_protocols: Mix.env() != :test,
deps: deps(),
dialyzer: [
plt_add_deps: :apps_direct,
plt_add_apps: [:finch, :hackney],
],
xref: [
exclude: [
Finch,
:hackney,
:hackney_pool,
]
],
description: description(),
package: package(),
aliases: aliases(),
preferred_cli_env: ["test.generation": :test],
name: "Twirp",
source_url: "https://github.com/keathley/twirp",
docs: docs()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
def escript do
[main_module: Twirp.Protoc.CLI, name: "protoc-gen-twirp_elixir", app: nil]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:plug, "~> 1.13"},
{:norm, "~> 0.9"},
{:jason, "~> 1.1"},
{:protobuf, "~> 0.9"},
{:google_protos, "~>0.1"},
{:finch, "~> 0.6", optional: true},
{:hackney, "~> 1.17", optional: true},
{:telemetry, "~> 0.4 or ~> 1.0"},
{:bypass, "~> 2.1", only: [:dev, :test]},
{:credo, "~> 1.1", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.0", only: [:dev, :test], runtime: false},
{:ex_doc, "~> 0.19", only: [:dev, :test], runtime: false},
{:plug_cowboy, "~> 2.0", only: [:dev, :test]},
{:mox, "~> 1.0", only: [:test]},
]
end
def aliases do
[
"test.generation": [
"escript.build",
"escript.install --force",
&generate_protos/1,
"test"
]
]
end
defp generate_protos(_) do
result = System.cmd("protoc", [
"--proto_path=./test/support",
"--elixir_out=./test/support",
"--twirp_elixir_out=./test/support",
"./test/support/service.proto",
])
case result do
{_, 0} ->
:ok
{error, code} ->
throw {error, code}
end
end
def description do
"""
Twirp provides an Elixir implementation of the Twirp RPC framework.
"""
end
def package do
[
licenses: ["Apache-2.0"],
links: %{"GitHub" => @source_url}
]
end
def docs do
[
source_ref: "v#{@version}",
source_url: @source_url,
main: "Twirp"
]
end
end
| 22.285714 | 78 | 0.530271 |
f72c2714e52ae185f30f0e621722b3534b82ebfc | 18,580 | ex | Elixir | lib/cforum/threads.ex | MatthiasApsel/cforum_ex | 52c621a583182d82692b74694b0b2792ac23b8ff | [
"MIT"
] | null | null | null | lib/cforum/threads.ex | MatthiasApsel/cforum_ex | 52c621a583182d82692b74694b0b2792ac23b8ff | [
"MIT"
] | null | null | null | lib/cforum/threads.ex | MatthiasApsel/cforum_ex | 52c621a583182d82692b74694b0b2792ac23b8ff | [
"MIT"
] | null | null | null | defmodule Cforum.Threads do
@moduledoc """
The boundary for the Threads system.
"""
import Ecto.{Query, Changeset}, warn: false
alias Cforum.Repo
alias Cforum.System
alias Cforum.Users.User
alias Cforum.Threads.Thread
alias Cforum.InvisibleThreads.InvisibleThread
alias Cforum.Threads.ThreadHelpers
alias Cforum.Threads.ThreadCaching
alias Cforum.Messages
alias Cforum.Messages.Message
alias Cforum.Messages.MessageHelpers
@doc """
Returns the list of unarchived threads.
## Examples
iex> list_threads()
[%Thread{}, ...]
"""
def list_threads(forum, visible_forums)
def list_threads(forum, _) when not is_nil(forum), do: list_threads(nil, [forum])
def list_threads(_, forums) do
threads = ThreadCaching.cached_threads()
if forums == nil || forums == [] do
Map.values(threads)
else
forum_ids = Enum.map(forums, & &1.forum_id)
for {_, thread} <- threads, thread.forum_id in forum_ids, do: thread
end
end
@doc """
Rejects deleted message based on a boolean parameter
"""
def reject_deleted_threads(threads, view_all \\ false, reject_with_reason \\ false)
def reject_deleted_threads(threads, true, _), do: threads
def reject_deleted_threads(nil, _, _), do: nil
def reject_deleted_threads(%Thread{} = thread, view_all, reject_with_reason) do
reject_deleted_threads([thread], view_all, reject_with_reason)
|> List.first()
end
def reject_deleted_threads(threads, _, reject_with_reason) do
Enum.reduce(threads, [], fn thread, list ->
messages = Enum.reject(thread.messages, &MessageHelpers.message_deleted?(&1, reject_with_reason))
thread = Map.put(thread, :messages, messages)
[thread | list]
end)
|> Enum.reject(&(&1.messages == []))
|> Enum.reverse()
end
@doc """
Sort the threads ascending, descending or by the newest message
"""
def sort_threads(threads, direction, opts \\ [])
def sort_threads(threads, "ascending", opts) do
Enum.sort(threads, fn a, b ->
cond do
a.sticky == b.sticky && !opts[:ignore_sticky] -> Timex.compare(a.created_at, b.created_at) <= 0
a.sticky && !opts[:ignore_sticky] -> true
b.sticky && !opts[:ignore_sticky] -> false
true -> Timex.compare(a.created_at, b.created_at) <= 0
end
end)
end
def sort_threads(threads, "descending", opts) do
Enum.sort(threads, fn a, b ->
cond do
a.sticky == b.sticky && !opts[:ignore_sticky] -> Timex.compare(a.created_at, b.created_at) >= 0
a.sticky && !opts[:ignore_sticky] -> true
b.sticky && !opts[:ignore_sticky] -> false
true -> Timex.compare(a.created_at, b.created_at) >= 0
end
end)
end
def sort_threads(threads, "newest-first", opts) do
Enum.sort(threads, fn a, b ->
cond do
a.sticky == b.sticky && !opts[:ignore_sticky] -> Timex.compare(a.latest_message, b.latest_message) >= 0
a.sticky && !opts[:ignore_sticky] -> true
b.sticky && !opts[:ignore_sticky] -> false
true -> Timex.compare(a.latest_message, b.latest_message) >= 0
end
end)
end
def apply_user_infos(threads, user, opts \\ [])
def apply_user_infos(%Thread{} = thread, user, opts),
do: Cforum.Messages.IndexHelper.set_user_attributes([thread], user, opts) |> List.first()
def apply_user_infos(threads, user, opts),
do: Cforum.Messages.IndexHelper.set_user_attributes(threads, user, opts)
def apply_highlights(%Thread{} = thread, conn),
do: Cforum.Messages.HighlightsHelper.apply_highlights([thread], conn) |> List.first()
def apply_highlights(threads, conn),
do: Cforum.Messages.HighlightsHelper.apply_highlights(threads, conn)
def build_message_trees(threads, message_order) do
for thread <- threads do
build_message_tree(thread, message_order)
end
end
def build_message_tree(thread, ordering) do
sorted_messages = Messages.sort_messages(thread.messages, ordering)
tree =
sorted_messages
|> Enum.reverse()
|> Enum.reduce(%{}, fn msg, map ->
msg = %Message{msg | messages: Map.get(map, msg.message_id, [])}
Map.update(map, msg.parent_id, [msg], fn msgs -> [msg | msgs] end)
end)
|> Map.get(nil)
|> ensure_found!()
|> hd
thread
|> Map.put(:sorted_messages, sorted_messages)
|> Map.put(:message, tree)
|> Map.put(:tree, tree)
|> Map.put(:accepted, Enum.filter(sorted_messages, &(&1.flags["accepted"] == "yes")))
end
def paged_thread_list(threads, page, limit, use_paging \\ true)
def paged_thread_list(threads, _, _, false), do: threads
def paged_thread_list(threads, page, limit, _) do
{sticky, normal} =
Enum.reduce(threads, {[], []}, fn
%Thread{sticky: true} = thread, {sticky, normal} -> {[thread | sticky], normal}
thread, {sticky, normal} -> {sticky, [thread | normal]}
end)
limit = limit - length(sticky)
Enum.concat(Enum.reverse(sticky), normal |> Enum.reverse() |> Enum.slice(page * limit, limit))
end
def reject_invisible_threads(threads, user, view_all \\ false)
def reject_invisible_threads(threads, _, true), do: threads
def reject_invisible_threads(threads, nil, _), do: threads
def reject_invisible_threads(threads, user, _) do
tids = Enum.map(threads, & &1.thread_id)
invisible =
from(iv in InvisibleThread, select: iv.thread_id, where: iv.user_id == ^user.user_id and iv.thread_id in ^tids)
|> Repo.all()
Enum.reject(threads, &Enum.member?(invisible, &1.thread_id))
end
def reject_read_threads(threads, reject)
def reject_read_threads(threads, false),
do: threads
def reject_read_threads(threads, _),
do: Enum.filter(threads, fn thread -> Enum.any?(thread.messages, &(not &1.attribs[:is_read])) end)
def filter_wo_answer(threads, filter \\ true)
def filter_wo_answer(threads, false), do: threads
def filter_wo_answer(threads, _),
do: Enum.filter(threads, &(length(&1.messages) <= 1 && MessageHelpers.open?(List.first(&1.messages))))
def ensure_found!(threads) when threads == [] or threads == nil, do: raise(Ecto.NoResultsError, queryable: Thread)
def ensure_found!(threads), do: threads
@doc """
Gets a single thread.
Raises `Ecto.NoResultsError` if the Thread does not exist.
## Examples
iex> get_thread!(123)
%Thread{}
iex> get_thread!(456)
** (Ecto.NoResultsError)
"""
def get_thread!(id) do
Thread
|> Repo.get_by!(thread_id: id)
|> Repo.preload(Thread.default_preloads())
|> Repo.preload(
messages:
{from(m in Message, order_by: [asc: fragment("? NULLS FIRST", m.parent_id), desc: m.created_at]),
Message.default_preloads()}
)
end
def get_thread(id) do
Thread
|> Repo.get_by(thread_id: id)
|> Repo.preload(Thread.default_preloads())
|> Repo.preload(
messages:
{from(m in Message, order_by: [asc: fragment("? NULLS FIRST", m.parent_id), desc: m.created_at]),
Message.default_preloads()}
)
end
def get_thread!(forum, visible_forums, id) do
list_threads(forum, visible_forums)
|> Enum.find(&(&1.thread_id == id))
|> case do
nil ->
from(thread in Thread, where: thread.thread_id == ^id, order_by: [desc: :created_at])
|> ThreadHelpers.set_forum_id(visible_forums, forum)
|> Repo.one()
|> Repo.preload(Thread.default_preloads())
|> Repo.preload(
messages:
{from(m in Message, order_by: [asc: fragment("? NULLS FIRST", m.parent_id), desc: m.created_at]),
Message.default_preloads()}
)
thread ->
thread
end
|> case do
nil -> raise Ecto.NoResultsError, queryable: Thread
thread -> thread
end
end
@doc """
Gets a single thread by its slug.
Raises `Ecto.NoResultsError` if the Thread does not exist.
## Examples
iex> get_thread!("2017/08/25/foo-bar")
%Thread{}
iex> get_thread!("2017/08/32/non-existant")
** (Ecto.NoResultsError)
"""
def get_thread_by_slug!(forum, visible_forums, slug) do
threads = ThreadCaching.cached_threads()
thread = get_thread_by_slug_from_cache(threads[slug], forum, visible_forums, slug)
case thread do
nil -> raise Ecto.NoResultsError, queryable: Thread
thread -> thread
end
end
defp get_thread_by_slug_from_cache(nil, forum, visible_forums, slug) do
from(thread in Thread, where: thread.slug == ^slug, order_by: [desc: :created_at])
|> ThreadHelpers.set_forum_id(visible_forums, forum)
|> Repo.one()
|> Repo.preload(Thread.default_preloads())
|> Repo.preload(
messages:
{from(m in Message, order_by: [asc: fragment("? NULLS FIRST", m.parent_id), desc: m.created_at]),
Message.default_preloads()}
)
end
defp get_thread_by_slug_from_cache(thread, nil, visible_forums, _) do
if Enum.find(visible_forums, &(&1.forum_id == thread.forum_id)),
do: thread,
else: nil
end
defp get_thread_by_slug_from_cache(thread, forum, _, _) do
if thread.forum_id == forum.forum_id,
do: thread,
else: nil
end
@doc """
Gets threads by their tid. Tids are historically not unique, so you might get more than one thread.
Raises `Ecto.NoResultsError` if no thread could be found.
## Examples
iex> get_threads_by_tid!(1)
[%Thread{}]
iex> get_thread!(-1)
** (Ecto.NoResultsError)
"""
def get_threads_by_tid!(tid) do
from(thread in Thread, where: thread.tid == ^tid, order_by: [desc: :created_at])
|> Repo.all()
|> Repo.preload(Thread.default_preloads())
|> Repo.preload(
messages:
{from(m in Message, order_by: [asc: fragment("? NULLS FIRST", m.parent_id), desc: m.created_at]),
Message.default_preloads()}
)
|> case do
[] -> raise Ecto.NoResultsError, queryable: Thread
threads -> threads
end
end
def get_threads_by_message_ids(message_ids) do
from(thread in Thread,
where:
thread.thread_id in fragment(
"SELECT thread_id FROM messages WHERE message_id = ANY(?) AND deleted = false",
^message_ids
),
order_by: [desc: :created_at]
)
|> Repo.all()
|> Repo.preload(Thread.default_preloads())
|> Repo.preload(
messages:
{from(m in Message, order_by: [asc: fragment("? NULLS FIRST", m.parent_id), desc: m.created_at]),
Message.default_preloads()}
)
end
@doc """
Creates a thread.
## Examples
iex> create_thread(%{field: value})
{:ok, %Thread{}}
iex> create_thread(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_thread(attrs, user, forum, visible_forums, opts \\ [create_tags: false]) do
retval =
Repo.transaction(fn ->
retval =
%Thread{latest_message: DateTime.truncate(Timex.now(), :second)}
|> Thread.changeset(attrs, forum, visible_forums)
|> Repo.insert()
case retval do
{:ok, thread} ->
create_message(attrs, user, visible_forums, thread, Keyword.merge(opts, notify: false))
{:error, t_changeset} ->
thread = Ecto.Changeset.apply_changes(t_changeset)
# we need a changeset with an action; since thread_id is empty this always fails
create_message(attrs, user, visible_forums, thread)
end
end)
with {:ok, {:ok, thread, message}} <- retval do
{:ok, Repo.preload(thread, [:forum]), message}
end
|> maybe_notify_users()
|> ThreadCaching.refresh_cached_thread()
end
defp create_message(attrs, user, visible_forums, thread, opts \\ [create_tags: false]) do
case Messages.create_message(attrs, user, visible_forums, thread, nil, Keyword.put(opts, :notify, false)) do
{:ok, message} ->
{:ok, thread, message}
{:error, changeset} ->
Repo.rollback(changeset)
end
end
def maybe_notify_users({:ok, thread, message}) do
Cforum.Jobs.NotifyUsersMessageJob.enqueue(thread, message, "thread")
{:ok, thread, message}
end
def maybe_notify_users(val), do: val
@doc """
Updates a thread.
## Examples
iex> update_thread(thread, %{field: new_value})
{:ok, %Thread{}}
iex> update_thread(thread, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_thread(%Thread{} = thread, attrs, forum, visible_forums) do
thread
|> thread_changeset(attrs, forum, visible_forums)
|> Repo.update()
|> ThreadCaching.refresh_cached_thread()
end
def move_thread(%User{} = user, %Thread{} = thread, forum_id, visible_forums, url_generator) do
forum = Cforum.Forums.get_forum!(forum_id)
thread
|> change_thread(forum, visible_forums)
|> do_move_thread(user, forum, url_generator)
end
defp do_move_thread(%Ecto.Changeset{valid?: false} = changeset, _user, _forum, _url_generator), do: changeset
defp do_move_thread(%Ecto.Changeset{valid?: true, data: thread}, user, forum, url_generator) do
System.audited("move", user, fn ->
from(thr in Thread, where: thr.thread_id == ^thread.thread_id)
|> Repo.update_all(set: [forum_id: forum.forum_id])
from(m in Message, where: m.thread_id == ^thread.thread_id)
|> Repo.update_all(set: [forum_id: forum.forum_id])
Enum.each(thread.messages, fn msg ->
[old_url, new_url] = url_generator.(forum, thread, msg)
from(r in Cforum.System.Redirection, where: r.path == ^new_url)
|> Repo.delete_all()
%Cforum.System.Redirection{}
|> Ecto.Changeset.change(%{path: old_url, destination: new_url, http_status: 301})
|> Repo.insert!()
end)
thread = get_thread!(thread.thread_id)
{:ok, thread}
end)
|> ThreadCaching.refresh_cached_thread()
end
def split_thread(user, thread, message, attrs, visible_forums, url_generator, opts \\ []) do
ret =
System.audited("split", user, fn ->
message
|> Messages.update_message(attrs, user, visible_forums, opts)
|> split_thread(thread, message, visible_forums, url_generator)
end)
with {:ok, new_thread} <- ret do
ThreadCaching.refresh_cached_thread(thread)
ThreadCaching.refresh_cached_thread(new_thread)
refreshed_thread = get_thread!(new_thread.forum, visible_forums, new_thread.thread_id)
message = Messages.get_message_from_mid!(refreshed_thread, message.message_id)
{:ok, refreshed_thread, message}
end
end
defp split_thread({:ok, new_message}, thread, message, visible_forums, url_generator) do
ret =
%Thread{latest_message: DateTime.truncate(Timex.now(), :second)}
|> Thread.changeset(%{subject: new_message.subject, forum_id: new_message.forum_id}, nil, visible_forums)
|> Repo.insert()
with {:ok, new_thread} <- ret do
new_thread = Repo.preload(new_thread, [:forum])
from(m in Message, where: m.message_id == ^message.message_id)
|> Repo.update_all(set: [parent_id: nil])
message_ids =
message
|> messages_ids_of_children()
|> List.flatten()
from(m in Message, where: m.message_id in ^message_ids)
|> Repo.update_all(set: [thread_id: new_thread.thread_id, forum_id: new_message.forum_id])
new_thread = Repo.preload(new_thread, [:messages])
Enum.each(new_thread.messages, fn msg ->
[old_url, new_url] = url_generator.(thread, new_thread, msg)
from(r in Cforum.System.Redirection, where: r.path == ^new_url)
|> Repo.delete_all()
%Cforum.System.Redirection{}
|> Ecto.Changeset.change(%{path: old_url, destination: new_url, http_status: 301})
|> Repo.insert!()
end)
{:ok, new_thread}
end
end
defp split_thread(val, _, _, _, _), do: val
defp messages_ids_of_children(message),
do: [message.message_id | Enum.map(message.messages, &messages_ids_of_children/1)]
def mark_thread_sticky(%User{} = user, %Thread{} = thread) do
System.audited("sticky", user, fn ->
thread
|> Ecto.Changeset.change(sticky: true)
|> Repo.update()
end)
|> ThreadCaching.refresh_cached_thread()
end
def mark_thread_unsticky(%User{} = user, %Thread{} = thread) do
System.audited("unsticky", user, fn ->
thread
|> Ecto.Changeset.change(sticky: false)
|> Repo.update()
end)
|> ThreadCaching.refresh_cached_thread()
end
@doc """
Deletes a Thread.
## Examples
iex> delete_thread(thread)
{:ok, %Thread{}}
iex> delete_thread(thread)
{:error, %Ecto.Changeset{}}
"""
def delete_thread(%Thread{} = thread) do
Repo.delete(thread)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking thread changes.
## Examples
iex> change_thread(thread)
%Ecto.Changeset{source: %Thread{}}
"""
def change_thread(%Thread{} = thread, forum \\ nil, visible_forums \\ []) do
thread_changeset(thread, %{}, forum, visible_forums)
end
def preview_thread(attrs, user, forum, visible_forums) do
changeset =
%Thread{created_at: Timex.now()}
|> Thread.changeset(attrs, forum, visible_forums)
thread = Ecto.Changeset.apply_changes(changeset)
{message, msg_changeset} = Messages.preview_message(attrs, user, visible_forums, thread)
forum = Enum.find(visible_forums, &(&1.forum_id == message.forum_id))
thread = %Thread{
thread
| forum: forum,
forum_id: message.forum_id,
messages: [message],
message: message
}
{thread, message, msg_changeset}
end
defp thread_changeset(%Thread{} = thread, attrs, forum, visible_forums) do
Thread.changeset(thread, attrs, forum, visible_forums)
end
def flag_thread(thread, flag, value) do
flags = Map.put(thread.flags, flag, value)
thread
|> Ecto.Changeset.change(flags: flags)
|> Repo.update()
end
def unflag_thread(thread, flag) do
flags = Map.delete(thread.flags, flag)
thread
|> Ecto.Changeset.change(flags: flags)
|> Repo.update()
end
def flag_thread_no_archive(user, thread) do
System.audited("no-archive-yes", user, fn ->
flag_thread(thread, "no-archive", "yes")
end)
|> ThreadCaching.refresh_cached_thread()
end
def flag_thread_archive(user, thread) do
System.audited("no-archive-no", user, fn ->
unflag_thread(thread, "no-archive")
end)
|> ThreadCaching.refresh_cached_thread()
end
end
| 29.775641 | 117 | 0.650538 |
f72c33b7e8d6cd47e99aa262d41c851b184a09c7 | 2,541 | ex | Elixir | lib/hexerss/feed.ex | Awlexus/hexerss | 88af54129f968435483008c276a2fec6a7489139 | [
"MIT"
] | 2 | 2022-02-09T08:37:53.000Z | 2022-02-09T09:24:15.000Z | lib/hexerss/feed.ex | Awlexus/hexerss | 88af54129f968435483008c276a2fec6a7489139 | [
"MIT"
] | 2 | 2022-02-09T08:43:51.000Z | 2022-02-09T12:19:48.000Z | lib/hexerss/feed.ex | Awlexus/hexerss | 88af54129f968435483008c276a2fec6a7489139 | [
"MIT"
] | null | null | null | defmodule Hexerss.Feed do
alias Hexerss.Hexpm
alias Hexerss.Feed.Item
defstruct [:title, :link, :description, :items]
@type t :: %__MODULE__{
title: String.t(),
link: String.t(),
items: [Item.t()]
}
@callback content_type() :: String.t()
@callback build_feed(Feed.t()) :: iodata()
@spec build([String.t()], ([String.t()] -> String.t())) :: {:ok, t} | {:error, :empty_feed}
def build(package_list, link_fun, opts \\ []) when is_function(link_fun, 1) do
count =
opts
|> Keyword.get(:count, 20)
|> min(50)
|> max(5)
stream =
Task.async_stream(package_list, &Hexpm.fetch_package/1, timeout: 30_000, on_timeout: :exit)
packages = for {:ok, {:ok, package}} <- stream, do: package
if packages == [] do
{:error, :empty_feed}
else
{:ok,
%__MODULE__{
title: title(packages),
link: link_fun.(Enum.map(packages, & &1.name)),
description: description(packages),
items: build_items(packages, count)
}}
end
end
def extract_package({:ok, {:ok, package}}), do: package
def extract_package(_), do: nil
defp build_items(packages, count) do
map = Map.new(packages, &{&1.name, &1})
items =
for package <- packages,
{release, index} <- Enum.with_index(package.releases) do
%{
package: package.name,
docs: release.docs,
version: release.version,
timestamp: release.inserted_at,
local_index: index
}
end
items
|> Enum.sort_by(& &1.timestamp, :desc)
|> Enum.take(count)
|> Enum.map(&Item.build(map[&1.package], &1))
end
defp title([package]), do: "Hex releases for #{package.name}"
defp title(packages) when length(packages) < 5 do
package_names =
packages
|> Enum.map(& &1.name)
|> humanized_join()
"Hex releases for " <> package_names
end
defp title(_packages), do: "Hex releases several pacakges"
defp description([package]), do: package.description
defp description(packages) do
names =
packages
|> Enum.map(& &1.name)
|> Enum.sort()
|> humanized_join()
["Feed for the packages: ", names]
end
defp humanized_join([item]), do: item
defp humanized_join([a, b]), do: "#{a} and #{b}"
defp humanized_join(list) do
[head | tail] = :lists.reverse(list)
joined =
tail
|> :lists.reverse()
|> Enum.intersperse(", ")
[joined, " and ", head]
end
end
| 24.2 | 97 | 0.578906 |
f72c35ba2272554be42371179a55c6a630a1211d | 2,022 | exs | Elixir | apps/forklift/test/unit/integration/init_server_test.exs | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 26 | 2019-09-20T23:54:45.000Z | 2020-08-20T14:23:32.000Z | apps/forklift/test/unit/integration/init_server_test.exs | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 757 | 2019-08-15T18:15:07.000Z | 2020-09-18T20:55:31.000Z | apps/forklift/test/unit/integration/init_server_test.exs | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 9 | 2019-11-12T16:43:46.000Z | 2020-03-25T16:23:16.000Z | defmodule Forklift.Integration.InitServerTest do
use ExUnit.Case
use Placebo
import Mox
alias SmartCity.TestDataGenerator, as: TDG
@instance_name Forklift.instance_name()
setup :set_mox_global
setup :verify_on_exit!
setup_all do
Application.put_env(:forklift, :output_topic, "test-topic")
on_exit(fn -> Application.delete_env(:forklift, :output_topic) end)
end
test "starts a dataset topic reader for each dataset view state" do
test = self()
dataset1 = TDG.create_dataset(%{id: "view-state-1"})
dataset2 = TDG.create_dataset(%{id: "view-state-2"})
allow Brook.get_all_values!(@instance_name, :datasets), return: [dataset1, dataset2]
stub(MockTopic, :init, fn _ -> :ok end)
stub(MockReader, :init, fn args -> send(test, args[:dataset]) && :ok end)
assert {:ok, _} = Forklift.InitServer.start_link(name: :foo)
assert_receive %SmartCity.Dataset{id: "view-state-1"}, 1000
assert_receive %SmartCity.Dataset{id: "view-state-2"}, 1000
end
test "initializes output_topic TopicWriter" do
test = self()
allow Brook.get_all_values!(@instance_name, :datasets), return: []
stub(MockReader, :init, fn _ -> :ok end)
stub(MockTopic, :init, fn args -> send(test, args[:topic]) && :ok end)
assert {:ok, _} = Forklift.InitServer.start_link(name: :bar)
assert_receive "test-topic"
end
test "re-initializes if Pipeline.DynamicSupervisor crashes" do
test = self()
dataset1 = TDG.create_dataset(%{id: "restart-1"})
dataset2 = TDG.create_dataset(%{id: "restart-2"})
allow Brook.get_all_values!(@instance_name, :datasets), return: [dataset1, dataset2]
stub(MockTopic, :init, fn _ -> :ok end)
expect(MockReader, :init, 2, fn _ -> :ok end)
expect(MockReader, :init, 2, fn args -> send(test, args[:dataset]) && :ok end)
Forklift.InitServer.start_link(name: :baz)
DynamicSupervisor.stop(Pipeline.DynamicSupervisor, :test)
assert_receive dataset1, 1_000
assert_receive dataset2, 1_000
end
end
| 32.612903 | 88 | 0.692878 |
f72c49cc9eb666622d977c3f27e229ebab30db85 | 66 | exs | Elixir | test/test_helper.exs | PetrNikolas/blazeee | 0d97a39b53d19951ccd946b2c4949b054e1487cd | [
"MIT"
] | 17 | 2018-09-18T23:35:26.000Z | 2021-12-05T08:03:23.000Z | test/test_helper.exs | PetrNikolas/blazeee | 0d97a39b53d19951ccd946b2c4949b054e1487cd | [
"MIT"
] | null | null | null | test/test_helper.exs | PetrNikolas/blazeee | 0d97a39b53d19951ccd946b2c4949b054e1487cd | [
"MIT"
] | 2 | 2019-07-08T20:43:13.000Z | 2020-03-04T19:11:39.000Z | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(Api.Repo, :manual)
| 16.5 | 49 | 0.757576 |
f72c7a42a92a73292911e8b7d6e7fb339dfb81a2 | 26 | ex | Elixir | testData/org/elixir_lang/parser_definition/matched_call_operation/no_parentheses_many_arguments_strict_parsing_test_case/DecimalWholeNumber.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/matched_call_operation/no_parentheses_many_arguments_strict_parsing_test_case/DecimalWholeNumber.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/matched_call_operation/no_parentheses_many_arguments_strict_parsing_test_case/DecimalWholeNumber.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | 1 positional,
key: value | 13 | 13 | 0.730769 |
f72ca6fb86f8e911ef774dba6f3d185fbe718b97 | 12,131 | ex | Elixir | lib/seren/player/player.ex | allen-garvey/seren | f61cb7edcd7d3f927d2929db14b2a4a1578a3925 | [
"MIT"
] | 4 | 2019-10-04T16:11:15.000Z | 2021-08-18T21:00:13.000Z | apps/seren/lib/seren/player/player.ex | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | 5 | 2020-03-16T23:52:25.000Z | 2021-09-03T16:52:17.000Z | lib/seren/player/player.ex | allen-garvey/seren | f61cb7edcd7d3f927d2929db14b2a4a1578a3925 | [
"MIT"
] | null | null | null | defmodule Seren.Player do
@moduledoc """
The Player context.
"""
import Ecto.Query, warn: false
alias Seren.Repo
alias Seren.Player.Track
@doc """
Returns the list of tracks.
## Examples
iex> list_tracks()
[%Track{}, ...]
"""
def list_tracks do
Repo.all(Track)
end
def list_tracks(limit) do
from(t in Track, join: artist in assoc(t, :artist), left_join: album in assoc(t, :album), limit: ^limit, order_by: [artist.name, album.title, :album_disc_number, :track_number, :title])
|> Repo.all
end
def list_tracks(limit, offset) do
from(t in Track, join: artist in assoc(t, :artist), left_join: album in assoc(t, :album), limit: ^limit, offset: ^offset, order_by: [artist.name, album.title, :album_disc_number, :track_number, :title])
|> Repo.all
end
@doc """
Returns list of tracks for various models
"""
def tracks_for_artist(id) do
from(t in Track, where: t.artist_id == ^id, left_join: album in assoc(t, :album), order_by: [album.title, :album_disc_number, :track_number, :title])
|> Repo.all
end
def tracks_for_genre(id) do
from(t in Track, join: artist in assoc(t, :artist), left_join: album in assoc(t, :album), where: t.genre_id == ^id, order_by: [artist.name, album.title, :album_disc_number, :track_number, :title])
|> Repo.all
end
def tracks_for_composer(id) do
from(t in Track, join: artist in assoc(t, :artist), left_join: album in assoc(t, :album), where: t.composer_id == ^id, order_by: [album.title, :album_disc_number, :track_number, artist.name, :title])
|> Repo.all
end
def tracks_for_album(id) do
from(t in Track, join: artist in assoc(t, :artist), where: t.album_id == ^id, order_by: [:album_disc_number, :track_number, artist.name, :title])
|> Repo.all
end
@doc """
Returns list of tracks for search query
"""
def tracks_for_search(query, limit) do
like_query = "%#{String.replace(query, "%", "\\%") |> String.replace("_", "\\_")}%"
from(t in Track, join: artist in assoc(t, :artist), left_join: album in assoc(t, :album), left_join: c in assoc(t, :composer), where: ilike(t.title, ^like_query) or ilike(artist.name, ^like_query) or ilike(album.title, ^like_query) or ilike(c.name, ^like_query), order_by: [artist.name, album.title, :album_disc_number, :track_number, :title], limit: ^limit)
|> Repo.all
end
@doc """
Gets a single track.
Raises `Ecto.NoResultsError` if the Track does not exist.
## Examples
iex> get_track!(123)
%Track{}
iex> get_track!(456)
** (Ecto.NoResultsError)
"""
def get_track!(id), do: Repo.get!(Track, id)
@doc """
Creates a track.
## Examples
iex> create_track(%{field: value})
{:ok, %Track{}}
iex> create_track(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_track(attrs \\ %{}) do
%Track{}
|> Track.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a track.
## Examples
iex> update_track(track, %{field: new_value})
{:ok, %Track{}}
iex> update_track(track, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_track(%Track{} = track, attrs) do
track
|> Track.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Track.
## Examples
iex> delete_track(track)
{:ok, %Track{}}
iex> delete_track(track)
{:error, %Ecto.Changeset{}}
"""
def delete_track(%Track{} = track) do
Repo.delete(track)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking track changes.
## Examples
iex> change_track(track)
%Ecto.Changeset{source: %Track{}}
"""
def change_track(%Track{} = track) do
Track.changeset(track, %{})
end
alias Seren.Player.Artist
@doc """
Returns the list of artists.
## Examples
iex> list_artists()
[%Artist{}, ...]
"""
def list_artists do
from(Artist, order_by: :name)
|> Repo.all
end
@doc """
Gets a single artist.
Raises `Ecto.NoResultsError` if the Artist does not exist.
## Examples
iex> get_artist!(123)
%Artist{}
iex> get_artist!(456)
** (Ecto.NoResultsError)
"""
def get_artist!(id) do
Repo.get!(Artist, id)
end
@doc """
Creates a artist.
## Examples
iex> create_artist(%{field: value})
{:ok, %Artist{}}
iex> create_artist(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_artist(attrs \\ %{}) do
%Artist{}
|> Artist.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a artist.
## Examples
iex> update_artist(artist, %{field: new_value})
{:ok, %Artist{}}
iex> update_artist(artist, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_artist(%Artist{} = artist, attrs) do
artist
|> Artist.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Artist.
## Examples
iex> delete_artist(artist)
{:ok, %Artist{}}
iex> delete_artist(artist)
{:error, %Ecto.Changeset{}}
"""
def delete_artist(%Artist{} = artist) do
Repo.delete(artist)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking artist changes.
## Examples
iex> change_artist(artist)
%Ecto.Changeset{source: %Artist{}}
"""
def change_artist(%Artist{} = artist) do
Artist.changeset(artist, %{})
end
alias Seren.Player.Genre
@doc """
Returns the list of genres.
## Examples
iex> list_genres()
[%Genre{}, ...]
"""
def list_genres do
from(Genre, order_by: :name)
|> Repo.all
end
@doc """
Gets a single genre.
Raises `Ecto.NoResultsError` if the Genre does not exist.
## Examples
iex> get_genre!(123)
%Genre{}
iex> get_genre!(456)
** (Ecto.NoResultsError)
"""
def get_genre!(id), do: Repo.get!(Genre, id)
@doc """
Creates a genre.
## Examples
iex> create_genre(%{field: value})
{:ok, %Genre{}}
iex> create_genre(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_genre(attrs \\ %{}) do
%Genre{}
|> Genre.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a genre.
## Examples
iex> update_genre(genre, %{field: new_value})
{:ok, %Genre{}}
iex> update_genre(genre, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_genre(%Genre{} = genre, attrs) do
genre
|> Genre.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Genre.
## Examples
iex> delete_genre(genre)
{:ok, %Genre{}}
iex> delete_genre(genre)
{:error, %Ecto.Changeset{}}
"""
def delete_genre(%Genre{} = genre) do
Repo.delete(genre)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking genre changes.
## Examples
iex> change_genre(genre)
%Ecto.Changeset{source: %Genre{}}
"""
def change_genre(%Genre{} = genre) do
Genre.changeset(genre, %{})
end
alias Seren.Player.Composer
@doc """
Returns the list of composers.
## Examples
iex> list_composers()
[%Composer{}, ...]
"""
def list_composers do
from(Composer, order_by: :name)
|> Repo.all
end
@doc """
Gets a single composer.
Raises `Ecto.NoResultsError` if the Composer does not exist.
## Examples
iex> get_composer!(123)
%Composer{}
iex> get_composer!(456)
** (Ecto.NoResultsError)
"""
def get_composer!(id), do: Repo.get!(Composer, id)
@doc """
Creates a composer.
## Examples
iex> create_composer(%{field: value})
{:ok, %Composer{}}
iex> create_composer(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_composer(attrs \\ %{}) do
%Composer{}
|> Composer.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a composer.
## Examples
iex> update_composer(composer, %{field: new_value})
{:ok, %Composer{}}
iex> update_composer(composer, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_composer(%Composer{} = composer, attrs) do
composer
|> Composer.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Composer.
## Examples
iex> delete_composer(composer)
{:ok, %Composer{}}
iex> delete_composer(composer)
{:error, %Ecto.Changeset{}}
"""
def delete_composer(%Composer{} = composer) do
Repo.delete(composer)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking composer changes.
## Examples
iex> change_composer(composer)
%Ecto.Changeset{source: %Composer{}}
"""
def change_composer(%Composer{} = composer) do
Composer.changeset(composer, %{})
end
alias Seren.Player.FileType
@doc """
Returns the list of file_types.
## Examples
iex> list_file_types()
[%FileType{}, ...]
"""
def list_file_types do
Repo.all(FileType)
end
@doc """
Gets a single file_type.
Raises `Ecto.NoResultsError` if the File type does not exist.
## Examples
iex> get_file_type!(123)
%FileType{}
iex> get_file_type!(456)
** (Ecto.NoResultsError)
"""
def get_file_type!(id), do: Repo.get!(FileType, id)
@doc """
Creates a file_type.
## Examples
iex> create_file_type(%{field: value})
{:ok, %FileType{}}
iex> create_file_type(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_file_type(attrs \\ %{}) do
%FileType{}
|> FileType.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a file_type.
## Examples
iex> update_file_type(file_type, %{field: new_value})
{:ok, %FileType{}}
iex> update_file_type(file_type, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_file_type(%FileType{} = file_type, attrs) do
file_type
|> FileType.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a FileType.
## Examples
iex> delete_file_type(file_type)
{:ok, %FileType{}}
iex> delete_file_type(file_type)
{:error, %Ecto.Changeset{}}
"""
def delete_file_type(%FileType{} = file_type) do
Repo.delete(file_type)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking file_type changes.
## Examples
iex> change_file_type(file_type)
%Ecto.Changeset{source: %FileType{}}
"""
def change_file_type(%FileType{} = file_type) do
FileType.changeset(file_type, %{})
end
alias Seren.Player.Album
@doc """
Returns the list of albums.
## Examples
iex> list_albums()
[%Album{}, ...]
"""
def list_albums do
Repo.all(Album)
end
@doc """
Gets a single album.
Raises `Ecto.NoResultsError` if the Album does not exist.
## Examples
iex> get_album!(123)
%Album{}
iex> get_album!(456)
** (Ecto.NoResultsError)
"""
def get_album!(id), do: Repo.get!(Album, id)
@doc """
Creates a album.
## Examples
iex> create_album(%{field: value})
{:ok, %Album{}}
iex> create_album(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_album(attrs \\ %{}) do
%Album{}
|> Album.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a album.
## Examples
iex> update_album(album, %{field: new_value})
{:ok, %Album{}}
iex> update_album(album, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_album(%Album{} = album, attrs) do
album
|> Album.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Album.
## Examples
iex> delete_album(album)
{:ok, %Album{}}
iex> delete_album(album)
{:error, %Ecto.Changeset{}}
"""
def delete_album(%Album{} = album) do
Repo.delete(album)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking album changes.
## Examples
iex> change_album(album)
%Ecto.Changeset{source: %Album{}}
"""
def change_album(%Album{} = album) do
Album.changeset(album, %{})
end
end
| 19.19462 | 362 | 0.599456 |
f72d06023bc59f8ffdd63599c6fc79e914a88f88 | 32,244 | ex | Elixir | lib/aws/pinpoint_email.ex | ahsandar/aws-elixir | 25de8b6c3a1401bde737cfc26b0679b14b058f23 | [
"Apache-2.0"
] | null | null | null | lib/aws/pinpoint_email.ex | ahsandar/aws-elixir | 25de8b6c3a1401bde737cfc26b0679b14b058f23 | [
"Apache-2.0"
] | null | null | null | lib/aws/pinpoint_email.ex | ahsandar/aws-elixir | 25de8b6c3a1401bde737cfc26b0679b14b058f23 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.PinpointEmail do
@moduledoc """
Amazon Pinpoint Email Service
Welcome to the *Amazon Pinpoint Email API Reference*. This guide provides
information about the Amazon Pinpoint Email API (version 1.0), including
supported operations, data types, parameters, and schemas.
[Amazon Pinpoint](https://aws.amazon.com/pinpoint) is an AWS service that
you can use to engage with your customers across multiple messaging
channels. You can use Amazon Pinpoint to send email, SMS text messages,
voice messages, and push notifications. The Amazon Pinpoint Email API
provides programmatic access to options that are unique to the email
channel and supplement the options provided by the Amazon Pinpoint API.
If you're new to Amazon Pinpoint, you might find it helpful to also review
the [Amazon Pinpoint Developer
Guide](https://docs.aws.amazon.com/pinpoint/latest/developerguide/welcome.html).
The *Amazon Pinpoint Developer Guide* provides tutorials, code samples, and
procedures that demonstrate how to use Amazon Pinpoint features
programmatically and how to integrate Amazon Pinpoint functionality into
mobile apps and other types of applications. The guide also provides
information about key topics such as Amazon Pinpoint integration with other
AWS services and the limits that apply to using the service.
The Amazon Pinpoint Email API is available in several AWS Regions and it
provides an endpoint for each of these Regions. For a list of all the
Regions and endpoints where the API is currently available, see [AWS
Service
Endpoints](https://docs.aws.amazon.com/general/latest/gr/rande.html#pinpoint_region)
in the *Amazon Web Services General Reference*. To learn more about AWS
Regions, see [Managing AWS
Regions](https://docs.aws.amazon.com/general/latest/gr/rande-manage.html)
in the *Amazon Web Services General Reference*.
In each Region, AWS maintains multiple Availability Zones. These
Availability Zones are physically isolated from each other, but are united
by private, low-latency, high-throughput, and highly redundant network
connections. These Availability Zones enable us to provide very high levels
of availability and redundancy, while also minimizing latency. To learn
more about the number of Availability Zones that are available in each
Region, see [AWS Global
Infrastructure](http://aws.amazon.com/about-aws/global-infrastructure/).
"""
@doc """
Create a configuration set. *Configuration sets* are groups of rules that
you can apply to the emails you send using Amazon Pinpoint. You apply a
configuration set to an email by including a reference to the configuration
set in the headers of the email. When you apply a configuration set to an
email, all of the rules in that configuration set are applied to the email.
"""
def create_configuration_set(client, input, options \\ []) do
path_ = "/v1/email/configuration-sets"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Create an event destination. In Amazon Pinpoint, *events* include message
sends, deliveries, opens, clicks, bounces, and complaints. *Event
destinations* are places that you can send information about these events
to. For example, you can send event data to Amazon SNS to receive
notifications when you receive bounces or complaints, or you can use Amazon
Kinesis Data Firehose to stream data to Amazon S3 for long-term storage.
A single configuration set can include more than one event destination.
"""
def create_configuration_set_event_destination(client, configuration_set_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/event-destinations"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Create a new pool of dedicated IP addresses. A pool can include one or more
dedicated IP addresses that are associated with your Amazon Pinpoint
account. You can associate a pool with a configuration set. When you send
an email that uses that configuration set, Amazon Pinpoint sends it using
only the IP addresses in the associated pool.
"""
def create_dedicated_ip_pool(client, input, options \\ []) do
path_ = "/v1/email/dedicated-ip-pools"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Create a new predictive inbox placement test. Predictive inbox placement
tests can help you predict how your messages will be handled by various
email providers around the world. When you perform a predictive inbox
placement test, you provide a sample message that contains the content that
you plan to send to your customers. Amazon Pinpoint then sends that message
to special email addresses spread across several major email providers.
After about 24 hours, the test is complete, and you can use the
`GetDeliverabilityTestReport` operation to view the results of the test.
"""
def create_deliverability_test_report(client, input, options \\ []) do
path_ = "/v1/email/deliverability-dashboard/test"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Verifies an email identity for use with Amazon Pinpoint. In Amazon
Pinpoint, an identity is an email address or domain that you use when you
send email. Before you can use an identity to send email with Amazon
Pinpoint, you first have to verify it. By verifying an address, you
demonstrate that you're the owner of the address, and that you've given
Amazon Pinpoint permission to send email from the address.
When you verify an email address, Amazon Pinpoint sends an email to the
address. Your email address is verified as soon as you follow the link in
the verification email.
When you verify a domain, this operation provides a set of DKIM tokens,
which you can convert into CNAME tokens. You add these CNAME tokens to the
DNS configuration for your domain. Your domain is verified when Amazon
Pinpoint detects these records in the DNS configuration for your domain. It
usually takes around 72 hours to complete the domain verification process.
"""
def create_email_identity(client, input, options \\ []) do
path_ = "/v1/email/identities"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Delete an existing configuration set.
In Amazon Pinpoint, *configuration sets* are groups of rules that you can
apply to the emails you send. You apply a configuration set to an email by
including a reference to the configuration set in the headers of the email.
When you apply a configuration set to an email, all of the rules in that
configuration set are applied to the email.
"""
def delete_configuration_set(client, configuration_set_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Delete an event destination.
In Amazon Pinpoint, *events* include message sends, deliveries, opens,
clicks, bounces, and complaints. *Event destinations* are places that you
can send information about these events to. For example, you can send event
data to Amazon SNS to receive notifications when you receive bounces or
complaints, or you can use Amazon Kinesis Data Firehose to stream data to
Amazon S3 for long-term storage.
"""
def delete_configuration_set_event_destination(client, configuration_set_name, event_destination_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/event-destinations/#{URI.encode(event_destination_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Delete a dedicated IP pool.
"""
def delete_dedicated_ip_pool(client, pool_name, input, options \\ []) do
path_ = "/v1/email/dedicated-ip-pools/#{URI.encode(pool_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an email identity that you previously verified for use with Amazon
Pinpoint. An identity can be either an email address or a domain name.
"""
def delete_email_identity(client, email_identity, input, options \\ []) do
path_ = "/v1/email/identities/#{URI.encode(email_identity)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Obtain information about the email-sending status and capabilities of your
Amazon Pinpoint account in the current AWS Region.
"""
def get_account(client, options \\ []) do
path_ = "/v1/email/account"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve a list of the blacklists that your dedicated IP addresses appear
on.
"""
def get_blacklist_reports(client, blacklist_item_names, options \\ []) do
path_ = "/v1/email/deliverability-dashboard/blacklist-report"
headers = []
query_ = []
query_ = if !is_nil(blacklist_item_names) do
[{"BlacklistItemNames", blacklist_item_names} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Get information about an existing configuration set, including the
dedicated IP pool that it's associated with, whether or not it's enabled
for sending email, and more.
In Amazon Pinpoint, *configuration sets* are groups of rules that you can
apply to the emails you send. You apply a configuration set to an email by
including a reference to the configuration set in the headers of the email.
When you apply a configuration set to an email, all of the rules in that
configuration set are applied to the email.
"""
def get_configuration_set(client, configuration_set_name, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve a list of event destinations that are associated with a
configuration set.
In Amazon Pinpoint, *events* include message sends, deliveries, opens,
clicks, bounces, and complaints. *Event destinations* are places that you
can send information about these events to. For example, you can send event
data to Amazon SNS to receive notifications when you receive bounces or
complaints, or you can use Amazon Kinesis Data Firehose to stream data to
Amazon S3 for long-term storage.
"""
def get_configuration_set_event_destinations(client, configuration_set_name, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/event-destinations"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Get information about a dedicated IP address, including the name of the
dedicated IP pool that it's associated with, as well information about the
automatic warm-up process for the address.
"""
def get_dedicated_ip(client, ip, options \\ []) do
path_ = "/v1/email/dedicated-ips/#{URI.encode(ip)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
List the dedicated IP addresses that are associated with your Amazon
Pinpoint account.
"""
def get_dedicated_ips(client, next_token \\ nil, page_size \\ nil, pool_name \\ nil, options \\ []) do
path_ = "/v1/email/dedicated-ips"
headers = []
query_ = []
query_ = if !is_nil(pool_name) do
[{"PoolName", pool_name} | query_]
else
query_
end
query_ = if !is_nil(page_size) do
[{"PageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"NextToken", next_token} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve information about the status of the Deliverability dashboard for
your Amazon Pinpoint account. When the Deliverability dashboard is enabled,
you gain access to reputation, deliverability, and other metrics for the
domains that you use to send email using Amazon Pinpoint. You also gain the
ability to perform predictive inbox placement tests.
When you use the Deliverability dashboard, you pay a monthly subscription
charge, in addition to any other fees that you accrue by using Amazon
Pinpoint. For more information about the features and cost of a
Deliverability dashboard subscription, see [Amazon Pinpoint
Pricing](http://aws.amazon.com/pinpoint/pricing/).
"""
def get_deliverability_dashboard_options(client, options \\ []) do
path_ = "/v1/email/deliverability-dashboard"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve the results of a predictive inbox placement test.
"""
def get_deliverability_test_report(client, report_id, options \\ []) do
path_ = "/v1/email/deliverability-dashboard/test-reports/#{URI.encode(report_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve all the deliverability data for a specific campaign. This data is
available for a campaign only if the campaign sent email by using a domain
that the Deliverability dashboard is enabled for
(`PutDeliverabilityDashboardOption` operation).
"""
def get_domain_deliverability_campaign(client, campaign_id, options \\ []) do
path_ = "/v1/email/deliverability-dashboard/campaigns/#{URI.encode(campaign_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve inbox placement and engagement rates for the domains that you use
to send email.
"""
def get_domain_statistics_report(client, domain, end_date, start_date, options \\ []) do
path_ = "/v1/email/deliverability-dashboard/statistics-report/#{URI.encode(domain)}"
headers = []
query_ = []
query_ = if !is_nil(start_date) do
[{"StartDate", start_date} | query_]
else
query_
end
query_ = if !is_nil(end_date) do
[{"EndDate", end_date} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Provides information about a specific identity associated with your Amazon
Pinpoint account, including the identity's verification status, its DKIM
authentication status, and its custom Mail-From settings.
"""
def get_email_identity(client, email_identity, options \\ []) do
path_ = "/v1/email/identities/#{URI.encode(email_identity)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
List all of the configuration sets associated with your Amazon Pinpoint
account in the current region.
In Amazon Pinpoint, *configuration sets* are groups of rules that you can
apply to the emails you send. You apply a configuration set to an email by
including a reference to the configuration set in the headers of the email.
When you apply a configuration set to an email, all of the rules in that
configuration set are applied to the email.
"""
def list_configuration_sets(client, next_token \\ nil, page_size \\ nil, options \\ []) do
path_ = "/v1/email/configuration-sets"
headers = []
query_ = []
query_ = if !is_nil(page_size) do
[{"PageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"NextToken", next_token} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
List all of the dedicated IP pools that exist in your Amazon Pinpoint
account in the current AWS Region.
"""
def list_dedicated_ip_pools(client, next_token \\ nil, page_size \\ nil, options \\ []) do
path_ = "/v1/email/dedicated-ip-pools"
headers = []
query_ = []
query_ = if !is_nil(page_size) do
[{"PageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"NextToken", next_token} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Show a list of the predictive inbox placement tests that you've performed,
regardless of their statuses. For predictive inbox placement tests that are
complete, you can use the `GetDeliverabilityTestReport` operation to view
the results.
"""
def list_deliverability_test_reports(client, next_token \\ nil, page_size \\ nil, options \\ []) do
path_ = "/v1/email/deliverability-dashboard/test-reports"
headers = []
query_ = []
query_ = if !is_nil(page_size) do
[{"PageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"NextToken", next_token} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve deliverability data for all the campaigns that used a specific
domain to send email during a specified time range. This data is available
for a domain only if you enabled the Deliverability dashboard
(`PutDeliverabilityDashboardOption` operation) for the domain.
"""
def list_domain_deliverability_campaigns(client, subscribed_domain, end_date, next_token \\ nil, page_size \\ nil, start_date, options \\ []) do
path_ = "/v1/email/deliverability-dashboard/domains/#{URI.encode(subscribed_domain)}/campaigns"
headers = []
query_ = []
query_ = if !is_nil(start_date) do
[{"StartDate", start_date} | query_]
else
query_
end
query_ = if !is_nil(page_size) do
[{"PageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"NextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(end_date) do
[{"EndDate", end_date} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a list of all of the email identities that are associated with your
Amazon Pinpoint account. An identity can be either an email address or a
domain. This operation returns identities that are verified as well as
those that aren't.
"""
def list_email_identities(client, next_token \\ nil, page_size \\ nil, options \\ []) do
path_ = "/v1/email/identities"
headers = []
query_ = []
query_ = if !is_nil(page_size) do
[{"PageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"NextToken", next_token} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve a list of the tags (keys and values) that are associated with a
specified resource. A *tag* is a label that you optionally define and
associate with a resource in Amazon Pinpoint. Each tag consists of a
required *tag key* and an optional associated *tag value*. A tag key is a
general label that acts as a category for more specific tag values. A tag
value acts as a descriptor within a tag key.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/v1/email/tags"
headers = []
query_ = []
query_ = if !is_nil(resource_arn) do
[{"ResourceArn", resource_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Enable or disable the automatic warm-up feature for dedicated IP addresses.
"""
def put_account_dedicated_ip_warmup_attributes(client, input, options \\ []) do
path_ = "/v1/email/account/dedicated-ips/warmup"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Enable or disable the ability of your account to send email.
"""
def put_account_sending_attributes(client, input, options \\ []) do
path_ = "/v1/email/account/sending"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Associate a configuration set with a dedicated IP pool. You can use
dedicated IP pools to create groups of dedicated IP addresses for sending
specific types of email.
"""
def put_configuration_set_delivery_options(client, configuration_set_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/delivery-options"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Enable or disable collection of reputation metrics for emails that you send
using a particular configuration set in a specific AWS Region.
"""
def put_configuration_set_reputation_options(client, configuration_set_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/reputation-options"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Enable or disable email sending for messages that use a particular
configuration set in a specific AWS Region.
"""
def put_configuration_set_sending_options(client, configuration_set_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/sending"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Specify a custom domain to use for open and click tracking elements in
email that you send using Amazon Pinpoint.
"""
def put_configuration_set_tracking_options(client, configuration_set_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/tracking-options"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Move a dedicated IP address to an existing dedicated IP pool.
<note> The dedicated IP address that you specify must already exist, and
must be associated with your Amazon Pinpoint account.
The dedicated IP pool you specify must already exist. You can create a new
pool by using the `CreateDedicatedIpPool` operation.
</note>
"""
def put_dedicated_ip_in_pool(client, ip, input, options \\ []) do
path_ = "/v1/email/dedicated-ips/#{URI.encode(ip)}/pool"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
<p/>
"""
def put_dedicated_ip_warmup_attributes(client, ip, input, options \\ []) do
path_ = "/v1/email/dedicated-ips/#{URI.encode(ip)}/warmup"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Enable or disable the Deliverability dashboard for your Amazon Pinpoint
account. When you enable the Deliverability dashboard, you gain access to
reputation, deliverability, and other metrics for the domains that you use
to send email using Amazon Pinpoint. You also gain the ability to perform
predictive inbox placement tests.
When you use the Deliverability dashboard, you pay a monthly subscription
charge, in addition to any other fees that you accrue by using Amazon
Pinpoint. For more information about the features and cost of a
Deliverability dashboard subscription, see [Amazon Pinpoint
Pricing](http://aws.amazon.com/pinpoint/pricing/).
"""
def put_deliverability_dashboard_option(client, input, options \\ []) do
path_ = "/v1/email/deliverability-dashboard"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Used to enable or disable DKIM authentication for an email identity.
"""
def put_email_identity_dkim_attributes(client, email_identity, input, options \\ []) do
path_ = "/v1/email/identities/#{URI.encode(email_identity)}/dkim"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Used to enable or disable feedback forwarding for an identity. This setting
determines what happens when an identity is used to send an email that
results in a bounce or complaint event.
When you enable feedback forwarding, Amazon Pinpoint sends you email
notifications when bounce or complaint events occur. Amazon Pinpoint sends
this notification to the address that you specified in the Return-Path
header of the original email.
When you disable feedback forwarding, Amazon Pinpoint sends notifications
through other mechanisms, such as by notifying an Amazon SNS topic. You're
required to have a method of tracking bounces and complaints. If you
haven't set up another mechanism for receiving bounce or complaint
notifications, Amazon Pinpoint sends an email notification when these
events occur (even if this setting is disabled).
"""
def put_email_identity_feedback_attributes(client, email_identity, input, options \\ []) do
path_ = "/v1/email/identities/#{URI.encode(email_identity)}/feedback"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Used to enable or disable the custom Mail-From domain configuration for an
email identity.
"""
def put_email_identity_mail_from_attributes(client, email_identity, input, options \\ []) do
path_ = "/v1/email/identities/#{URI.encode(email_identity)}/mail-from"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Sends an email message. You can use the Amazon Pinpoint Email API to send
two types of messages:
<ul> <li> **Simple** – A standard email message. When you create this type
of message, you specify the sender, the recipient, and the message body,
and Amazon Pinpoint assembles the message for you.
</li> <li> **Raw** – A raw, MIME-formatted email message. When you send
this type of email, you have to specify all of the message headers, as well
as the message body. You can use this message type to send messages that
contain attachments. The message that you specify has to be a valid MIME
message.
</li> </ul>
"""
def send_email(client, input, options \\ []) do
path_ = "/v1/email/outbound-emails"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Add one or more tags (keys and values) to a specified resource. A *tag* is
a label that you optionally define and associate with a resource in Amazon
Pinpoint. Tags can help you categorize and manage resources in different
ways, such as by purpose, owner, environment, or other criteria. A resource
can have as many as 50 tags.
Each tag consists of a required *tag key* and an associated *tag value*,
both of which you define. A tag key is a general label that acts as a
category for more specific tag values. A tag value acts as a descriptor
within a tag key.
"""
def tag_resource(client, input, options \\ []) do
path_ = "/v1/email/tags"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Remove one or more tags (keys and values) from a specified resource.
"""
def untag_resource(client, input, options \\ []) do
path_ = "/v1/email/tags"
headers = []
{query_, input} =
[
{"ResourceArn", "ResourceArn"},
{"TagKeys", "TagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Update the configuration of an event destination for a configuration set.
In Amazon Pinpoint, *events* include message sends, deliveries, opens,
clicks, bounces, and complaints. *Event destinations* are places that you
can send information about these events to. For example, you can send event
data to Amazon SNS to receive notifications when you receive bounces or
complaints, or you can use Amazon Kinesis Data Firehose to stream data to
Amazon S3 for long-term storage.
"""
def update_configuration_set_event_destination(client, configuration_set_name, event_destination_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/event-destinations/#{URI.encode(event_destination_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, Poison.Parser.t(), Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "ses"}
host = build_host("email", client)
url = host
|> build_url(path, client)
|> add_query(query)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, response}
{:ok, %HTTPoison.Response{status_code: status_code, body: body} = response}
when status_code == 200 or status_code == 202 or status_code == 204 ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: ""} = response} ->
{:ok, %{}, response}
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, []) do
url
end
defp add_query(url, query) do
querystring = AWS.Util.encode_query(query)
"#{url}?#{querystring}"
end
defp encode_payload(input) do
if input != nil, do: Poison.Encoder.encode(input, %{}), else: ""
end
end
| 39.178615 | 146 | 0.706178 |
f72d1c0043f4e556c5342d009402d76e5f672590 | 1,519 | exs | Elixir | mix.exs | duderman/elixir-caldav-client | eaee70c890d382d0ee15b5750098e9b4763cb8ae | [
"Apache-2.0"
] | 33 | 2021-02-08T17:53:23.000Z | 2022-03-08T11:16:47.000Z | mix.exs | duderman/elixir-caldav-client | eaee70c890d382d0ee15b5750098e9b4763cb8ae | [
"Apache-2.0"
] | 7 | 2021-02-12T13:54:49.000Z | 2022-03-07T10:50:08.000Z | mix.exs | duderman/elixir-caldav-client | eaee70c890d382d0ee15b5750098e9b4763cb8ae | [
"Apache-2.0"
] | 3 | 2021-03-29T22:36:57.000Z | 2022-02-23T10:29:56.000Z | defmodule CalDAVClient.MixProject do
use Mix.Project
def project do
[
app: :caldav_client,
version: "1.0.1",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
description: description(),
package: package(),
deps: deps(),
# Docs
source_url: "https://github.com/software-mansion-labs/elixir-caldav-client",
homepage_url: "https://hexdocs.pm/caldav_client/readme.html",
docs: [
extras: ["README.md", "LICENSE"],
nest_modules_by_prefix: [
CalDAVClient.Tesla,
CalDAVClient.XML
]
]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:tesla, "~> 1.4"},
{:xml_builder, "~> 2.1"},
{:sweet_xml, "~> 0.6.6"},
{:tzdata, "~> 1.1", optional: true},
{:hackney, "~> 1.17", optional: true},
{:ex_parameterized, "~> 1.3", only: :test},
{:ex_doc, "~> 0.23.0", only: :dev, runtime: false}
]
end
defp description() do
"CalDAV client library with support for recurrence expansion, time zones, and ETags."
end
defp package() do
[
maintainers: ["Software Mansion"],
licenses: ["Apache-2.0"],
files: ~w(lib .formatter.exs mix.exs README* LICENSE*),
links: %{"GitHub" => "https://github.com/software-mansion-labs/elixir-caldav-client"}
]
end
end
| 25.316667 | 91 | 0.577354 |
f72d27feffeafa9bfac79972a780f1de6a3a681b | 2,133 | ex | Elixir | lib/absinthe/validation/prevent_circular_fragments.ex | scrogson/absinthe | aa7e9c83dc10603c72f80e09a60d12495bc1c6b7 | [
"Unlicense"
] | 1 | 2019-05-07T15:05:52.000Z | 2019-05-07T15:05:52.000Z | lib/absinthe/validation/prevent_circular_fragments.ex | scrogson/absinthe | aa7e9c83dc10603c72f80e09a60d12495bc1c6b7 | [
"Unlicense"
] | null | null | null | lib/absinthe/validation/prevent_circular_fragments.ex | scrogson/absinthe | aa7e9c83dc10603c72f80e09a60d12495bc1c6b7 | [
"Unlicense"
] | null | null | null | defmodule Absinthe.Validation.PreventCircularFragments do
alias Absinthe.{Language, Traversal}
@moduledoc false
def validate(doc, {_, errors}) do
doc.definitions
|> Enum.filter(fn
%Language.Fragment{} -> true
_ -> false
end)
|> check(errors)
end
# The overall approach here is to create a digraph with an `acyclic`
# constraint. Then we just add the fragments as vectors, and fragment
# spreads are used to create edges. If at any point :digraph returns
# an error we have a cycle! Thank you :digraph for doing the hard part
# :)
# NOTE: `:digraph` is MUTABLE, as it's backed by `:ets`
def check(fragments, errors) do
graph = :digraph.new([:acyclic])
result = fragments
|> Enum.reduce({errors, graph}, &check_fragment/2)
|> case do
{[], _} -> {:ok, []}
{errors, _} -> {:error, errors}
end
# The table will get deleted when the process exits, but we might
# as well clean up for ourselves explicitly.
:digraph.delete(graph)
result
end
def check([], errors, _), do: errors
def check_fragment(fragment, {errors, graph}) do
_ = :digraph.add_vertex(graph, fragment.name)
Traversal.reduce(fragment, :unused, {errors, graph}, fn
%Language.FragmentSpread{} = spread, traversal, {errors, graph} ->
_ = :digraph.add_vertex(graph, spread.name)
case :digraph.add_edge(graph, fragment.name, spread.name) do
{:error, {:bad_edge, path}} ->
# All just error generation logic
deps = [fragment.name | path]
|> Enum.map(&"`#{&1}'")
|> Enum.join(" => ")
msg = """
Fragment Cycle Error
Fragment `#{fragment.name}' forms a cycle via: (#{deps})
"""
error = %{
message: String.strip(msg),
locations: [%{line: spread.loc.start_line, column: 0}]
}
{:ok, {[error | errors], graph}, traversal}
_ ->
{:ok, {errors, graph}, traversal}
end
_, traversal, acc ->
{:ok, acc, traversal}
end)
end
end
| 28.824324 | 72 | 0.581341 |
f72d3e7715705df6c0d8d821e269a373d6b8bbc8 | 77 | exs | Elixir | test/test_helper.exs | adam-phillips/letter_lines_live | 8ab3fcd6499924390868908a31f8c2935c9392a0 | [
"MIT"
] | null | null | null | test/test_helper.exs | adam-phillips/letter_lines_live | 8ab3fcd6499924390868908a31f8c2935c9392a0 | [
"MIT"
] | null | null | null | test/test_helper.exs | adam-phillips/letter_lines_live | 8ab3fcd6499924390868908a31f8c2935c9392a0 | [
"MIT"
] | null | null | null | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(LetterLinesLive.Repo, :manual)
| 25.666667 | 61 | 0.805195 |
f72d4cef86b9f53d52a8b967af6f85aeb50aae00 | 505 | exs | Elixir | random.exs | hectorip/ErlangExercises | 4a1aa5de0504da1bfe5a6c31c1d20277524ab363 | [
"MIT"
] | 4 | 2016-09-22T03:47:56.000Z | 2017-02-02T17:42:57.000Z | random.exs | hectorip/ErlangExercises | 4a1aa5de0504da1bfe5a6c31c1d20277524ab363 | [
"MIT"
] | null | null | null | random.exs | hectorip/ErlangExercises | 4a1aa5de0504da1bfe5a6c31c1d20277524ab363 | [
"MIT"
] | null | null | null | # how to get random numbers in Elixir
# first we need to seed the pseudo-random number generator.
# We are using the current timestamp as a unique number because it
# can be considered as unique number in an application that has only one
# process, if we have more processes, in order to avoid number
# repetition, we need to look for another unique number to seed.
:random.seed(:os.timestamp)
# now, we can ask for a random number, this will return a random number between
# 0 and 1
:random.uniform
| 31.5625 | 79 | 0.762376 |
f72d55d6d2246b2745d7e324a44ccf6e726d5f83 | 573 | exs | Elixir | test/dbfs_web/views/error_view_test.exs | sunnys/dbfs | bf0e01b969d1107d12b8b7c0130dad17187b027b | [
"MIT"
] | 57 | 2018-01-22T22:42:16.000Z | 2022-02-18T05:20:25.000Z | test/dbfs_web/views/error_view_test.exs | sunnys/dbfs | bf0e01b969d1107d12b8b7c0130dad17187b027b | [
"MIT"
] | 2 | 2018-05-14T20:50:41.000Z | 2021-12-12T18:15:59.000Z | test/dbfs_web/views/error_view_test.exs | sunnys/dbfs | bf0e01b969d1107d12b8b7c0130dad17187b027b | [
"MIT"
] | 15 | 2017-11-11T04:25:04.000Z | 2022-03-07T04:55:19.000Z | defmodule DBFS.Web.ErrorViewTest do
use DBFS.Web.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(DBFS.Web.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(DBFS.Web.ErrorView, "500.html", []) ==
"Internal server error"
end
test "render any other" do
assert render_to_string(DBFS.Web.ErrorView, "505.html", []) ==
"Internal server error"
end
end
| 26.045455 | 66 | 0.670157 |
f72d7763b529e2ad87a4b1127910cbc8e6c36c25 | 67 | ex | Elixir | lib/pebl_web/views/pow/registration_view.ex | tomkonidas/pebl | 6a43570885fba5cf6cdf15c48d843116eedab79b | [
"MIT"
] | 6 | 2020-08-26T11:29:36.000Z | 2021-11-25T08:37:28.000Z | lib/pebl_web/views/pow/registration_view.ex | tomkonidas/pebl | 6a43570885fba5cf6cdf15c48d843116eedab79b | [
"MIT"
] | 20 | 2020-03-06T05:33:35.000Z | 2022-02-17T17:20:38.000Z | lib/pebl_web/views/pow/registration_view.ex | tomkonidas/pebl | 6a43570885fba5cf6cdf15c48d843116eedab79b | [
"MIT"
] | 2 | 2020-12-11T16:24:59.000Z | 2021-08-16T11:31:08.000Z | defmodule PeblWeb.Pow.RegistrationView do
use PeblWeb, :view
end
| 16.75 | 41 | 0.80597 |
f72d7c36fcab35e6148c68fcbbaa9dc71e8eda8e | 2,151 | ex | Elixir | clients/ad_sense/lib/google_api/ad_sense/v14/deserializer.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/ad_sense/lib/google_api/ad_sense/v14/deserializer.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/ad_sense/lib/google_api/ad_sense/v14/deserializer.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AdSense.V14.Deserializer do
@moduledoc """
Helper functions for deserializing responses into models
"""
@doc """
Update the provided model with a deserialization of a nested value
"""
@spec deserialize(struct(), :atom, :atom, struct(), keyword()) :: struct()
def deserialize(model, _field, :list, nil, _options), do: model
def deserialize(model, field, :list, mod, options) do
model
|> Map.update!(field, &(Poison.Decode.decode(&1, Keyword.merge(options, [as: [struct(mod)]]))))
end
def deserialize(model, field, :struct, mod, options) do
model
|> Map.update!(field, &(Poison.Decode.decode(&1, Keyword.merge(options, [as: struct(mod)]))))
end
def deserialize(model, _field, :map, nil, _options), do: model
def deserialize(model, field, :map, mod, options) do
model
|> Map.update!(field, &(Map.new(&1, fn {key, val} -> {key, Poison.Decode.decode(val, Keyword.merge(options, [as: struct(mod)]))} end)))
end
def deserialize(model, field, :date, _, _options) do
case DateTime.from_iso8601(Map.get(model, field)) do
{:ok, datetime} ->
Map.put(model, field, datetime)
_ ->
model
end
end
def serialize_non_nil(model, options) do
model
|> Map.from_struct
|> Enum.filter(fn {_k, v} -> v != nil end)
|> Enum.into(%{})
|> Poison.Encoder.encode(options)
end
end
| 35.85 | 139 | 0.688052 |
f72d86b458fb9b413927971a76043e7363d95072 | 1,574 | exs | Elixir | elixir/test/homework_web/schema/mutation/create_merchant_test.exs | ztoolson/web-homework | 09865a5df66fe8f380dfe0d848bbfae8398be1ef | [
"MIT"
] | null | null | null | elixir/test/homework_web/schema/mutation/create_merchant_test.exs | ztoolson/web-homework | 09865a5df66fe8f380dfe0d848bbfae8398be1ef | [
"MIT"
] | null | null | null | elixir/test/homework_web/schema/mutation/create_merchant_test.exs | ztoolson/web-homework | 09865a5df66fe8f380dfe0d848bbfae8398be1ef | [
"MIT"
] | null | null | null | defmodule HomeworkWeb.Schema.Query.CreateMerchantTest do
use HomeworkWeb.ConnCase, async: true
@query """
mutation CreateMerchant($name: Name!, $description: Description!) {
createMerchant(name: $name, description: $description) {
name
description
}
}
"""
test "createMerchant field creates a merchant sucessfully" do
conn = build_conn()
conn =
post(conn, "/graphiql",
query: @query,
variables: %{
name: "Greater Wynnwood Exotic Animal Park",
description: "Exotic Animal Zoo"
}
)
assert json_response(conn, 200) == %{
"data" => %{
"createMerchant" => %{
"name" => "Greater Wynnwood Exotic Animal Park",
"description" => "Exotic Animal Zoo"
}
}
}
end
@invalid_query """
mutation CreateMerchant($description: Description!) {
createMerchant(description: $description) {
name
description
}
}
"""
test "createMerchant without the required inputs will error" do
conn = build_conn()
conn =
post(conn, "/graphiql",
query: @invalid_query,
variables: %{
description: "Exotic Animal Zoo"
}
)
assert json_response(conn, 200) == %{
"errors" => [
%{
"locations" => [%{"column" => 3, "line" => 2}],
"message" => "In argument \"name\": Expected type \"String!\", found null."
}
]
}
end
end
| 24.984127 | 92 | 0.525413 |
f72d96926712144a9995ebb3b3b83512d7f98193 | 3,251 | exs | Elixir | integration_test/myxql/storage_test.exs | aptinio/ecto_sql | f13a0e8383428154a61a7a93d43dcb4b6f6a6fad | [
"Apache-2.0"
] | 1 | 2019-05-02T14:02:56.000Z | 2019-05-02T14:02:56.000Z | integration_test/myxql/storage_test.exs | aptinio/ecto_sql | f13a0e8383428154a61a7a93d43dcb4b6f6a6fad | [
"Apache-2.0"
] | null | null | null | integration_test/myxql/storage_test.exs | aptinio/ecto_sql | f13a0e8383428154a61a7a93d43dcb4b6f6a6fad | [
"Apache-2.0"
] | null | null | null | Code.require_file "../support/file_helpers.exs", __DIR__
defmodule Ecto.Integration.StorageTest do
use ExUnit.Case
@moduletag :capture_log
@base_migration 5_000_000
import Support.FileHelpers
alias Ecto.Integration.{PoolRepo, TestRepo}
def params do
# Pass log false to ensure we can still create/drop.
url = Application.get_env(:ecto_sql, :mysql_test_url) <> "/storage_mgt"
[log: false] ++ Ecto.Repo.Supervisor.parse_url(url)
end
def wrong_params do
Keyword.merge params(),
[username: "randomuser",
password: "password1234"]
end
def drop_database do
run_mysql("DROP DATABASE #{params()[:database]};")
end
def create_database do
run_mysql("CREATE DATABASE #{params()[:database]};")
end
def create_posts do
run_mysql("CREATE TABLE posts (title varchar(20));", ["-D", params()[:database]])
end
def run_mysql(sql, args \\ []) do
args = ["-u", params()[:username], "-e", sql | args]
System.cmd "mysql", args
end
test "storage up (twice in a row)" do
assert Ecto.Adapters.MyXQL.storage_up(params()) == :ok
assert Ecto.Adapters.MyXQL.storage_up(params()) == {:error, :already_up}
after
drop_database()
end
test "storage down (twice in a row)" do
create_database()
assert Ecto.Adapters.MyXQL.storage_down(params()) == :ok
assert Ecto.Adapters.MyXQL.storage_down(params()) == {:error, :already_down}
end
test "storage up and down (wrong credentials)" do
refute Ecto.Adapters.MyXQL.storage_up(wrong_params()) == :ok
create_database()
refute Ecto.Adapters.MyXQL.storage_down(wrong_params()) == :ok
after
drop_database()
end
test "structure dump and load" do
create_database()
create_posts()
# Default path
{:ok, _} = Ecto.Adapters.MyXQL.structure_dump(tmp_path(), params())
dump = File.read!(Path.join(tmp_path(), "structure.sql"))
drop_database()
create_database()
# Load custom
dump_path = Path.join(tmp_path(), "custom.sql")
File.rm(dump_path)
{:error, _} = Ecto.Adapters.MyXQL.structure_load(tmp_path(), [dump_path: dump_path] ++ params())
# Dump custom
{:ok, _} = Ecto.Adapters.MyXQL.structure_dump(tmp_path(), [dump_path: dump_path] ++ params())
assert strip_timestamp(dump) != strip_timestamp(File.read!(dump_path))
# Load original
{:ok, _} = Ecto.Adapters.MyXQL.structure_load(tmp_path(), params())
{:ok, _} = Ecto.Adapters.MyXQL.structure_dump(tmp_path(), [dump_path: dump_path] ++ params())
assert strip_timestamp(dump) == strip_timestamp(File.read!(dump_path))
after
drop_database()
end
defmodule Migration do
use Ecto.Migration
def change, do: :ok
end
test "structure dump and load with migrations table" do
num = @base_migration + System.unique_integer([:positive])
:ok = Ecto.Migrator.up(PoolRepo, num, Migration, log: false)
{:ok, path} = Ecto.Adapters.MyXQL.structure_dump(tmp_path(), TestRepo.config())
contents = File.read!(path)
assert contents =~ "INSERT INTO `schema_migrations` (version) VALUES ("
end
defp strip_timestamp(dump) do
dump
|> String.split("\n")
|> Enum.reject(&String.contains?(&1, "completed on"))
|> Enum.join("\n")
end
end
| 29.288288 | 100 | 0.675792 |
f72e0234c69a40ab685ebddd81e27394b09956de | 2,542 | ex | Elixir | clients/alert_center/lib/google_api/alert_center/v1beta1/model/device_compromised_security_detail.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/alert_center/lib/google_api/alert_center/v1beta1/model/device_compromised_security_detail.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/alert_center/lib/google_api/alert_center/v1beta1/model/device_compromised_security_detail.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AlertCenter.V1beta1.Model.DeviceCompromisedSecurityDetail do
@moduledoc """
Detailed information of a single MDM device compromised event.
## Attributes
* `deviceCompromisedState` (*type:* `String.t`, *default:* `nil`) - The device compromised state. Possible values are "`Compromised`" or "`Not Compromised`".
* `deviceId` (*type:* `String.t`, *default:* `nil`) - Required. The device ID.
* `deviceModel` (*type:* `String.t`, *default:* `nil`) - The model of the device.
* `deviceType` (*type:* `String.t`, *default:* `nil`) - The type of the device.
* `iosVendorId` (*type:* `String.t`, *default:* `nil`) - Required for iOS, empty for others.
* `resourceId` (*type:* `String.t`, *default:* `nil`) - The device resource ID.
* `serialNumber` (*type:* `String.t`, *default:* `nil`) - The serial number of the device.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:deviceCompromisedState => String.t() | nil,
:deviceId => String.t() | nil,
:deviceModel => String.t() | nil,
:deviceType => String.t() | nil,
:iosVendorId => String.t() | nil,
:resourceId => String.t() | nil,
:serialNumber => String.t() | nil
}
field(:deviceCompromisedState)
field(:deviceId)
field(:deviceModel)
field(:deviceType)
field(:iosVendorId)
field(:resourceId)
field(:serialNumber)
end
defimpl Poison.Decoder, for: GoogleApi.AlertCenter.V1beta1.Model.DeviceCompromisedSecurityDetail do
def decode(value, options) do
GoogleApi.AlertCenter.V1beta1.Model.DeviceCompromisedSecurityDetail.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AlertCenter.V1beta1.Model.DeviceCompromisedSecurityDetail do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.107692 | 161 | 0.694335 |
f72e1fede82e708b63598e046da668fac0332ead | 3,585 | exs | Elixir | test/font/bdf/lexer_test.exs | fstiewitz/chisel | 98d0351e337a83ba21c4bedfd56eb340e0606711 | [
"Apache-2.0"
] | 34 | 2019-12-05T02:15:21.000Z | 2022-03-11T10:07:28.000Z | test/font/bdf/lexer_test.exs | fstiewitz/chisel | 98d0351e337a83ba21c4bedfd56eb340e0606711 | [
"Apache-2.0"
] | 5 | 2019-12-03T04:20:39.000Z | 2021-05-24T23:38:09.000Z | test/font/bdf/lexer_test.exs | fstiewitz/chisel | 98d0351e337a83ba21c4bedfd56eb340e0606711 | [
"Apache-2.0"
] | 2 | 2019-12-22T10:41:00.000Z | 2021-05-24T19:53:54.000Z | defmodule Chisel.Font.BDF.LexerTest do
use ExUnit.Case
alias Chisel.Font.BDF.Lexer
test "scan!/1" do
assert [
{:keyword, "STARTFONT", {1, 10}},
{:value, "2.1", {1, 14}},
{:eol, "\n", {1, 14}},
{:keyword, "COMMENT", {2, 8}},
{:value, "Lorem", {2, 14}},
{:value, "ipsum", {2, 20}},
{:value, "dolor", {2, 26}},
{:value, "set", {2, 30}},
{:eol, "\n", {2, 30}},
{:keyword, "FONT", {3, 5}},
{:value, "simple_font", {3, 17}},
{:eol, "\n", {3, 17}},
{:keyword, "SIZE", {4, 5}},
{:value, "8", {4, 7}},
{:value, "72", {4, 10}},
{:value, "72", {4, 13}},
{:eol, "\n", {4, 13}},
{:eol, "\n", {5, 1}},
{:keyword, "CHARS", {6, 6}},
{:value, "2", {6, 8}},
{:eol, "\n", {6, 8}},
{:keyword, "STARTCHAR", {7, 10}},
{:value, "C1", {7, 13}},
{:eol, "\n", {7, 13}},
{:keyword, "ENCODING", {8, 9}},
{:value, "128", {8, 13}},
{:eol, "\n", {8, 13}},
{:keyword, "DWIDTH", {9, 7}},
{:value, "8", {9, 9}},
{:value, "0", {9, 11}},
{:eol, "\n", {9, 11}},
{:keyword, "BBX", {10, 4}},
{:value, "2", {10, 6}},
{:value, "8", {10, 8}},
{:value, "3", {10, 10}},
{:value, "0", {10, 12}},
{:eol, "\n", {10, 12}},
{:keyword, "BITMAP", {11, 7}},
{:eol, "\n", {11, 7}},
{:value, "FF", {12, 3}},
{:eol, "\n", {12, 3}},
{:value, "FF", {13, 3}},
{:eol, "\n", {13, 3}},
{:value, "FF", {14, 3}},
{:eol, "\n", {14, 3}},
{:value, "FF", {15, 3}},
{:eol, "\n", {15, 3}},
{:value, "FF", {16, 3}},
{:eol, "\n", {16, 3}},
{:value, "FF", {17, 3}},
{:eol, "\n", {17, 3}},
{:value, "FF", {18, 3}},
{:eol, "\n", {18, 3}},
{:value, "FF", {19, 3}},
{:eol, "\n", {19, 3}},
{:keyword, "ENDCHAR", {20, 8}},
{:eol, "\n", {20, 8}},
{:eol, "\n", {21, 1}},
{:keyword, "STARTCHAR", {22, 10}},
{:value, "C2", {22, 13}},
{:eol, "\n", {22, 13}},
{:keyword, "ENCODING", {23, 9}},
{:value, "129", {23, 13}},
{:eol, "\n", {23, 13}},
{:keyword, "DWIDTH", {24, 7}},
{:value, "8", {24, 9}},
{:value, "0", {24, 11}},
{:eol, "\n", {24, 11}},
{:keyword, "BBX", {25, 4}},
{:value, "8", {25, 6}},
{:value, "2", {25, 8}},
{:value, "0", {25, 10}},
{:value, "3", {25, 12}},
{:eol, "\n", {25, 12}},
{:keyword, "BITMAP", {26, 7}},
{:eol, "\n", {26, 7}},
{:value, "FF", {27, 3}},
{:eol, "\n", {27, 3}},
{:value, "FF", {28, 3}},
{:eol, "\n", {28, 3}},
{:keyword, "ENDCHAR", {29, 8}},
{:eol, "\n", {29, 8}},
{:keyword, "ENDFONT", {30, 8}},
{:eol, "\n", {30, 8}}
] ==
file_stream!("test/fixtures/simple.bdf")
|> Lexer.scan!()
|> Enum.to_list()
end
defp file_stream!(filename) do
File.stream!(filename)
end
end
| 35.147059 | 53 | 0.296513 |
f72e2c2b31d5ed98c2c0b1c4c6da5b4b54da1168 | 1,917 | ex | Elixir | lib/ecto_network/macaddr.ex | kianmeng/ecto_network | ed999f408310135d506abc9cb39c6907686b7ed5 | [
"MIT"
] | null | null | null | lib/ecto_network/macaddr.ex | kianmeng/ecto_network | ed999f408310135d506abc9cb39c6907686b7ed5 | [
"MIT"
] | null | null | null | lib/ecto_network/macaddr.ex | kianmeng/ecto_network | ed999f408310135d506abc9cb39c6907686b7ed5 | [
"MIT"
] | null | null | null | defmodule EctoNetwork.MACADDR do
@moduledoc ~S"""
Support for using Ecto with :macaddr fields
"""
@behaviour Ecto.Type
def type, do: :macaddr
@doc "Handle embedding format for CIDR records."
def embed_as(_), do: :self
@doc "Handle equality testing for CIDR records."
def equal?(left, right), do: left == right
@doc "Handle casting to Postgrex.MACADDR"
def cast(%Postgrex.MACADDR{} = address), do: {:ok, address}
def cast(address) when is_binary(address) do
with [a, b, c, d, e, f] <- parse_address(address) do
{:ok, %Postgrex.MACADDR{address: {a, b, c, d, e, f}}}
end
end
@doc "Load from the native Ecto representation"
def load(%Postgrex.MACADDR{} = address), do: {:ok, address}
def load(_), do: :error
@doc "Convert to the native Ecto representation"
def dump(%Postgrex.MACADDR{} = address), do: {:ok, address}
def dump(_), do: :error
@doc "Convert from native Ecto representation to a binary"
def decode(%Postgrex.MACADDR{address: {a, b, c, d, e, f}}) do
[a, b, c, d, e, f]
|> Enum.map(&Integer.to_string(&1, 16))
|> Enum.map(&String.pad_leading(&1, 2, "0"))
|> Enum.join(":")
end
defp parse_address(address) do
with split when length(split) == 6 <- String.split(address, ":"),
[_ | _] = parsed <- Enum.reduce_while(split, [], &parse_octet/2) do
Enum.reverse(parsed)
else
_ -> :error
end
end
defp parse_octet(octet, acc) do
with {int, ""} <- Integer.parse(octet, 16) do
{:cont, [int | acc]}
else
_ -> {:halt, :error}
end
end
end
defimpl String.Chars, for: Postgrex.MACADDR do
def to_string(%Postgrex.MACADDR{} = address), do: EctoNetwork.MACADDR.decode(address)
end
if Code.ensure_loaded?(Phoenix.HTML) do
defimpl Phoenix.HTML.Safe, for: Postgrex.MACADDR do
def to_iodata(%Postgrex.MACADDR{} = address), do: EctoNetwork.MACADDR.decode(address)
end
end
| 28.191176 | 89 | 0.644757 |
f72e4e221e9946312ed2dd8c49925c2eba5e33d3 | 1,313 | ex | Elixir | lib/weather/cli.ex | adamnbowen/weather | 3b19d8b9c4045169718002c83387b7c06a9b4610 | [
"MIT"
] | null | null | null | lib/weather/cli.ex | adamnbowen/weather | 3b19d8b9c4045169718002c83387b7c06a9b4610 | [
"MIT"
] | null | null | null | lib/weather/cli.ex | adamnbowen/weather | 3b19d8b9c4045169718002c83387b7c06a9b4610 | [
"MIT"
] | null | null | null | defmodule Weather.CLI do
@moduledoc """
Handle the command line parsing and the dispatch to
the various functions that display info about the weather.
"""
def main(argv) do
argv
|> parse_args
|> process
end
@doc """
`argv` can be -h or --help, which returns :help.
Otherwise it is a endpoint to query.
Return a tuple of `{ endpoint }`, or `:help` if help was given.
"""
def parse_args(argv) do
parse = OptionParser.parse(argv, switches: [ help: :boolean],
aliases: [ h: :help ])
case parse do
{ [ help: true ], _, _ }
-> :help
{ _, [ endpoint ], _ }
-> { endpoint }
_ -> :help
end
end
def process(:help) do
IO.puts """
usage: weather <endpoint>
"""
System.halt(0)
end
def process({endpoint}) do
Weather.Wunderground.fetch(endpoint)
|> decode_response
end
def decode_response({:ok, body}) do
body
|> get_precip_today_in
|> IO.puts
end
def decode_response({:error, error}) do
description = error["description"]
IO.puts "Error fetching from Wunderground: #{description}"
System.halt(2)
end
def get_precip_today_in(%{"current_observation" => %{"precip_today_in" => precip_today_in}}), do: precip_today_in
end | 21.52459 | 115 | 0.599391 |
f72e5e317efade1e20526871a09e27510cf0e90d | 929 | ex | Elixir | lib/samly/auth_router.ex | workpathco/samly | 7cb1f4dd5e0f4e9a0da97b36425d2450f0e2c003 | [
"MIT"
] | null | null | null | lib/samly/auth_router.ex | workpathco/samly | 7cb1f4dd5e0f4e9a0da97b36425d2450f0e2c003 | [
"MIT"
] | null | null | null | lib/samly/auth_router.ex | workpathco/samly | 7cb1f4dd5e0f4e9a0da97b36425d2450f0e2c003 | [
"MIT"
] | null | null | null | defmodule AuthService.DebugPlug do
import Plug.Conn
def init(options) do
# initialize options
options
end
def call(conn, _opts) do
IO.inspect(conn)
conn
end
end
defmodule Samly.AuthRouter do
@moduledoc false
use Plug.Router
import Plug.Conn
import Samly.RouterUtil, only: [check_idp_id: 2, check_target_url: 2]
# plug(AuthService.DebugPlug)
plug(:fetch_session)
plug(Plug.CSRFProtection)
plug(:match)
plug(:check_idp_id)
plug(:check_target_url)
plug(:dispatch)
get "/signin/*idp_id_seg" do
conn |> Samly.AuthHandler.initiate_sso_req()
end
post "/signin/*idp_id_seg" do
conn |> Samly.AuthHandler.send_signin_req()
end
get "/signout/*idp_id_seg" do
conn |> Samly.AuthHandler.initiate_sso_req()
end
post "/signout/*idp_id_seg" do
conn |> Samly.AuthHandler.send_signout_req()
end
match _ do
conn |> send_resp(404, "not_found")
end
end
| 18.58 | 71 | 0.702906 |
f72eacea0a9af628bfbd45494df78f2a04ea6810 | 476 | ex | Elixir | lib/survey/calcuators/participation.ex | mpisanko/survey | e154e6f9845b9b9c44d4ea4f90d4e6b51ccdb423 | [
"Apache-2.0"
] | null | null | null | lib/survey/calcuators/participation.ex | mpisanko/survey | e154e6f9845b9b9c44d4ea4f90d4e6b51ccdb423 | [
"Apache-2.0"
] | null | null | null | lib/survey/calcuators/participation.ex | mpisanko/survey | e154e6f9845b9b9c44d4ea4f90d4e6b51ccdb423 | [
"Apache-2.0"
] | null | null | null | defmodule Survey.Calculators.Participation do
alias Survey.Calculators.Calculator
@behaviour Calculator
def calculate(%{responses: []}), do: %{type: :participation, result: %{percent: 0, participants: 0, total: 0}}
def calculate(%{responses: rs}) do
total = Enum.count(rs)
participants = rs |> Calculator.submitted |> Enum.count
%{type: :participation, result: %{percent: 100 * participants / total , participants: participants, total: total}}
end
end
| 39.666667 | 118 | 0.710084 |
f72ef88fe89e543f9fde0b823dfc6486348a63a9 | 1,851 | exs | Elixir | test/smwc_web/controllers/user_registration_controller_test.exs | druu/smwcbot | 0c9e3530c470028c767b6a77be8a939481756438 | [
"MIT"
] | 2 | 2022-03-09T18:04:42.000Z | 2022-03-11T22:24:25.000Z | test/smwc_web/controllers/user_registration_controller_test.exs | druu/smwcbot | 0c9e3530c470028c767b6a77be8a939481756438 | [
"MIT"
] | null | null | null | test/smwc_web/controllers/user_registration_controller_test.exs | druu/smwcbot | 0c9e3530c470028c767b6a77be8a939481756438 | [
"MIT"
] | 2 | 2022-02-27T22:00:17.000Z | 2022-02-28T02:20:21.000Z | # defmodule SMWCWeb.UserRegistrationControllerTest do
# use SMWCWeb.ConnCase, async: true
# import SMWC.AccountsFixtures
# describe "GET /users/register" do
# test "renders registration page", %{conn: conn} do
# conn = get(conn, Routes.user_registration_path(conn, :new))
# response = html_response(conn, 200)
# assert response =~ "<h1>Register</h1>"
# assert response =~ "Log in</a>"
# assert response =~ "Register</a>"
# end
# test "redirects if already logged in", %{conn: conn} do
# conn = conn |> log_in_user(user_fixture()) |> get(Routes.user_registration_path(conn, :new))
# assert redirected_to(conn) == "/"
# end
# end
# describe "POST /users/register" do
# @tag :capture_log
# test "creates account and logs the user in", %{conn: conn} do
# email = unique_user_email()
# conn =
# post(conn, Routes.user_registration_path(conn, :create), %{
# "user" => valid_user_attributes(email: email)
# })
# assert get_session(conn, :user_token)
# assert redirected_to(conn) == "/"
# # Now do a logged in request and assert on the menu
# conn = get(conn, "/")
# response = html_response(conn, 200)
# assert response =~ email
# assert response =~ "Settings</a>"
# assert response =~ "Log out</a>"
# end
# test "render errors for invalid data", %{conn: conn} do
# conn =
# post(conn, Routes.user_registration_path(conn, :create), %{
# "user" => %{"email" => "with spaces", "password" => "too short"}
# })
# response = html_response(conn, 200)
# assert response =~ "<h1>Register</h1>"
# assert response =~ "must have the @ sign and no spaces"
# assert response =~ "should be at least 12 character"
# end
# end
# end
| 33.654545 | 100 | 0.597515 |
f72f07fe62c140dc694e720e5bb15e1cb143e1e3 | 2,679 | exs | Elixir | mix.exs | allefgomes/elixir_blog | 413cedd74344f4fc503ba7ab1c9fac23ec31429b | [
"MIT"
] | null | null | null | mix.exs | allefgomes/elixir_blog | 413cedd74344f4fc503ba7ab1c9fac23ec31429b | [
"MIT"
] | 5 | 2021-06-19T16:43:22.000Z | 2021-06-30T10:52:13.000Z | mix.exs | allefgomes/elixir_blog | 413cedd74344f4fc503ba7ab1c9fac23ec31429b | [
"MIT"
] | null | null | null | defmodule Blog.MixProject do
use Mix.Project
@github_url "https://github.com/allefgomes/elixir_blog"
def project do
[
app: :blog,
version: "0.1.0",
elixir: "~> 1.11",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
description: "Project to learn phoenix concepts",
source_url: @github_url,
homepage_url: @github_url,
files: ~w[mix.exs lib LICENSE.md README.md CHANGELOG.md],
package: [
maintainers: ["Allef Gomes"],
licenses: ["MIT"],
links: %{
"GitHub" => @github_url
}
],
docs: [
main: "readme",
extras: ["README.md", "CHANGELOG.md"]
],
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
]
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Blog.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.9"},
{:phoenix_ecto, "~> 4.1"},
{:ecto_sql, "~> 3.4"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_dashboard, "~> 0.4"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:sobelow, "~> 0.8", only: [:dev, :test]},
{:excoveralls, "~> 0.10", only: [:dev, :test]}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup", "cmd npm install --prefix assets"],
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"]
]
end
end
| 28.806452 | 84 | 0.562523 |
f72f19cac06cfa0b93c0d778cd476363ccd2a638 | 1,280 | ex | Elixir | lib/mix/tasks/dump.ex.ex | elixir-desktop/libpe | c7e84e7d35b5ab7661691a2282b44ca2268eec39 | [
"MIT"
] | null | null | null | lib/mix/tasks/dump.ex.ex | elixir-desktop/libpe | c7e84e7d35b5ab7661691a2282b44ca2268eec39 | [
"MIT"
] | 1 | 2022-03-02T14:35:50.000Z | 2022-03-02T14:35:50.000Z | lib/mix/tasks/dump.ex.ex | elixir-desktop/libpe | c7e84e7d35b5ab7661691a2282b44ca2268eec39 | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Pe.Dump do
@moduledoc """
SYNTAX: mix pe.dump (--raw) <filename> (<filename>...)
pe.dump dumps the contents of a PE file.
"""
use Mix.Task
@doc false
def run([]) do
show_help()
end
def run(args) do
%{files: files, raw: raw} = process_args(%{files: [], raw: false}, args)
Enum.each(files, fn filename ->
{:ok, pe} = LibPE.parse_file(filename)
title = "Dumping file: #{Path.basename(filename)}"
IO.puts(title)
IO.puts(String.pad_trailing("", String.length(title), "="))
if raw do
IO.inspect(pe)
else
LibPE.get_resources(pe)
|> LibPE.ResourceTable.dump()
end
IO.puts("")
end)
end
defp show_help() do
IO.puts(@moduledoc)
System.halt()
end
defp process_args(opts, []) do
opts
end
defp process_args(opts, ["--raw" | rest]) do
%{opts | raw: true}
|> process_args(rest)
end
defp process_args(_opts, ["--help" | _rest]), do: show_help()
defp process_args(_opts, ["-h" | _rest]), do: show_help()
defp process_args(opts, [arg | rest]) do
if String.starts_with?(arg, "-") do
raise("Unknown option string '#{arg}'")
end
%{opts | files: [arg | opts.files]}
|> process_args(rest)
end
end
| 20.983607 | 76 | 0.583594 |