hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
73c8d369417767f0988ceea14ccaaeb94fbd98df | 504 | ex | Elixir | lib/peek_code_graphql/schemas/types/customer_type.ex | itorisaias/peek_code | 4f748e45fddc9e817fd4a5ad4d5811451efaa11c | [
"BSD-Source-Code",
"Adobe-Glyph"
] | null | null | null | lib/peek_code_graphql/schemas/types/customer_type.ex | itorisaias/peek_code | 4f748e45fddc9e817fd4a5ad4d5811451efaa11c | [
"BSD-Source-Code",
"Adobe-Glyph"
] | null | null | null | lib/peek_code_graphql/schemas/types/customer_type.ex | itorisaias/peek_code | 4f748e45fddc9e817fd4a5ad4d5811451efaa11c | [
"BSD-Source-Code",
"Adobe-Glyph"
] | 1 | 2021-03-05T00:21:07.000Z | 2021-03-05T00:21:07.000Z | defmodule PeekCodeGraphql.Schema.Types.CustomerType do
use Absinthe.Schema.Notation
use Absinthe.Ecto, repo: PeekCode.Repo
object :customer_type do
field :id, :string
field :email, :string
field :first_name, :string
field :last_name, :string
field(:orders, list_of(:order_type), resolve: assoc(:orders))
end
input_object :customer_input_type do
field :email, non_null(:string)
field :first_name, non_null(:string)
field :last_name, non_null(:string)
end
end
| 26.526316 | 65 | 0.724206 |
73c8dda2387d8843179b8dc7754a7833bf9b24d7 | 3,164 | exs | Elixir | test/html_test.exs | hez/excoveralls | a4c1140e3757df4c075f9f5918307b6e973e8d83 | [
"MIT"
] | 716 | 2015-01-04T20:41:01.000Z | 2022-03-29T08:44:09.000Z | test/html_test.exs | hez/excoveralls | a4c1140e3757df4c075f9f5918307b6e973e8d83 | [
"MIT"
] | 255 | 2015-01-03T23:41:58.000Z | 2022-03-19T23:06:06.000Z | test/html_test.exs | hez/excoveralls | a4c1140e3757df4c075f9f5918307b6e973e8d83 | [
"MIT"
] | 217 | 2015-03-31T20:17:58.000Z | 2022-02-20T23:46:41.000Z | defmodule ExCoveralls.HtmlTest do
use ExUnit.Case
import Mock
import ExUnit.CaptureIO
alias ExCoveralls.Html
@file_name "excoveralls.html"
@file_size 20191
@test_output_dir "cover_test/"
@test_template_path "lib/templates/html/htmlcov/"
@content "defmodule Test do\n def test do\n end\nend\n"
@counts [0, 1, nil, nil]
@source_info [%{name: "test/fixtures/test.ex",
source: @content,
coverage: @counts
}]
@stats_result "" <>
"----------------\n" <>
"COV FILE LINES RELEVANT MISSED\n" <>
" 50.0% test/fixtures/test.ex 4 2 1\n" <>
"[TOTAL] 50.0%\n" <>
"----------------\n"
setup do
path = Path.expand(@file_name, @test_output_dir)
# Assert does not exist prior to write
assert(File.exists?(path) == false)
on_exit fn ->
if File.exists?(path) do
# Ensure removed after test
File.rm!(path)
File.rmdir!(@test_output_dir)
end
end
{:ok, report: path}
end
test "generate stats information with output_dir parameter", %{report: report} do
assert capture_io(fn ->
Html.execute(@source_info, [output_dir: @test_output_dir])
end) =~ @stats_result
assert(File.read!(report) =~ "id='test/fixtures/test.ex'")
%{size: size} = File.stat! report
assert(size == @file_size)
end
test_with_mock "generate stats information", %{report: report}, ExCoveralls.Settings, [],
[
get_coverage_options: fn -> %{"output_dir" => @test_output_dir, "template_path" => @test_template_path} end,
get_file_col_width: fn -> 40 end,
get_print_summary: fn -> true end,
get_print_files: fn -> true end
] do
assert capture_io(fn ->
Html.execute(@source_info)
end) =~ @stats_result
assert(File.read!(report) =~ "id='test/fixtures/test.ex'")
%{size: size} = File.stat! report
assert(size == @file_size)
end
test_with_mock "Exit status code is 1 when actual coverage does not reach the minimum",
ExCoveralls.Settings, [
get_coverage_options: fn -> coverage_options(100) end,
get_file_col_width: fn -> 40 end,
get_print_summary: fn -> true end,
get_print_files: fn -> true end
] do
output = capture_io(fn ->
assert catch_exit(Html.execute(@source_info)) == {:shutdown, 1}
end)
assert String.contains?(output, "FAILED: Expected minimum coverage of 100%, got 50%.")
end
test_with_mock "Exit status code is 0 when actual coverage reaches the minimum",
ExCoveralls.Settings, [
get_coverage_options: fn -> coverage_options(49.9) end,
get_file_col_width: fn -> 40 end,
get_print_summary: fn -> true end,
get_print_files: fn -> true end
] do
assert capture_io(fn ->
Html.execute(@source_info)
end) =~ @stats_result
end
defp coverage_options(minimum_coverage) do
%{
"minimum_coverage" => minimum_coverage,
"output_dir" => @test_output_dir,
"template_path" => @test_template_path
}
end
end
| 31.019608 | 116 | 0.609987 |
73c94e6df81af70fc52606909e1e29446754aa99 | 7,464 | ex | Elixir | lib/instructions/io.ex | benfb/elixush | ab0797d4b39f510283d2230d46aa061bc3198bf7 | [
"MIT"
] | 13 | 2016-02-04T01:34:26.000Z | 2021-05-24T08:34:09.000Z | lib/instructions/io.ex | benfb/elixush | ab0797d4b39f510283d2230d46aa061bc3198bf7 | [
"MIT"
] | null | null | null | lib/instructions/io.ex | benfb/elixush | ab0797d4b39f510283d2230d46aa061bc3198bf7 | [
"MIT"
] | null | null | null | defmodule Elixush.Instructions.IO do
@moduledoc "Instructions that operate on the IO stack."
alias Elixir.String
import Elixush.PushState
import Elixush.Util
import Elixush.Globals.Agent
def print_exec(state) do
if Enum.empty?(state[:exec]) do
state
else
top_thing = top_item(:exec, state)
top_thing_string = Macro.to_string(top_thing)
if get_globals(:max_string_length) < String.length(to_string(stack_ref(:output, 0, state)) <> top_thing_string) do
state
else
stack_assoc(
to_string(stack_ref(:output, 0, state)) <> top_thing_string,
:output,
0,
pop_item(:exec, state)
)
end
end
end
def print_integer(state) do
if Enum.empty?(state[:integer]) do
state
else
top_thing = top_item(:integer, state)
top_thing_string = Macro.to_string(top_thing)
if get_globals(:max_string_length) < String.length(to_string(stack_ref(:output, 0, state)) <> top_thing_string) do
state
else
stack_assoc(
to_string(stack_ref(:output, 0, state)) <> top_thing_string,
:output,
0,
pop_item(:integer, state)
)
end
end
end
def print_float(state) do
if Enum.empty?(state[:float]) do
state
else
top_thing = top_item(:float, state)
top_thing_string = Macro.to_string(top_thing)
if get_globals(:max_string_length) < String.length(to_string(stack_ref(:output, 0, state)) <> top_thing_string) do
state
else
stack_assoc(
to_string(stack_ref(:output, 0, state)) <> top_thing_string,
:output,
0,
pop_item(:float, state)
)
end
end
end
def print_code(state) do
if Enum.empty?(state[:code]) do
state
else
top_thing = top_item(:code, state)
top_thing_string = Macro.to_string(top_thing)
if get_globals(:max_string_length) < String.length(to_string(stack_ref(:output, 0, state)) <> top_thing_string) do
state
else
stack_assoc(
to_string(stack_ref(:output, 0, state)) <> top_thing_string,
:output,
0,
pop_item(:code, state)
)
end
end
end
def print_boolean(state) do
if Enum.empty?(state[:boolean]) do
state
else
top_thing = top_item(:boolean, state)
top_thing_string = Macro.to_string(top_thing)
if get_globals(:max_string_length) < String.length(to_string(stack_ref(:output, 0, state)) <> top_thing_string) do
state
else
stack_assoc(
to_string(stack_ref(:output, 0, state)) <> top_thing_string,
:output,
0,
pop_item(:boolean, state)
)
end
end
end
def print_string(state) do
if Enum.empty?(state[:string]) do
state
else
top_thing = top_item(:string, state)
top_thing_string = Macro.to_string(top_thing)
if get_globals(:max_string_length) < String.length(to_string(stack_ref(:output, 0, state)) <> top_thing_string) do
state
else
stack_assoc(
to_string(stack_ref(:output, 0, state)) <> top_thing_string,
:output,
0,
pop_item(:string, state)
)
end
end
end
def print_char(state) do
if Enum.empty?(state[:char]) do
state
else
top_thing = top_item(:char, state)
top_thing_string = Macro.to_string(top_thing)
if get_globals(:max_string_length) < String.length(to_string(stack_ref(:output, 0, state)) <> top_thing_string) do
state
else
stack_assoc(
to_string(stack_ref(:output, 0, state)) <> top_thing_string,
:output,
0,
pop_item(:char, state)
)
end
end
end
def print_vector_integer(state) do
if Enum.empty?(state[:vector_integer]) do
state
else
top_thing = top_item(:vector_integer, state)
top_thing_string = Macro.to_string(top_thing)
if get_globals(:max_string_length) < String.length(to_string(stack_ref(:output, 0, state)) <> top_thing_string) do
state
else
stack_assoc(
to_string(stack_ref(:output, 0, state)) <> top_thing_string,
:output,
0,
pop_item(:vector_integer, state)
)
end
end
end
def print_vector_float(state) do
if Enum.empty?(state[:vector_float]) do
state
else
top_thing = top_item(:vector_float, state)
top_thing_string = Macro.to_string(top_thing)
if get_globals(:max_string_length) < String.length(to_string(stack_ref(:output, 0, state)) <> top_thing_string) do
state
else
stack_assoc(
to_string(stack_ref(:output, 0, state)) <> top_thing_string,
:output,
0,
pop_item(:vector_float, state)
)
end
end
end
def print_vector_boolean(state) do
if Enum.empty?(state[:vector_boolean]) do
state
else
top_thing = top_item(:vector_boolean, state)
top_thing_string = Macro.to_string(top_thing)
if get_globals(:max_string_length) < String.length(to_string(stack_ref(:output, 0, state)) <> top_thing_string) do
state
else
stack_assoc(
to_string(stack_ref(:output, 0, state)) <> top_thing_string,
:output,
0,
pop_item(:vector_boolean, state)
)
end
end
end
def print_vector_string(state) do
if Enum.empty?(state[:vector_string]) do
state
else
top_thing = top_item(:vector_string, state)
top_thing_string = Macro.to_string(top_thing)
if get_globals(:max_string_length) < String.length(to_string(stack_ref(:output, 0, state)) <> top_thing_string) do
state
else
stack_assoc(
to_string(stack_ref(:output, 0, state)) <> top_thing_string,
:output,
0,
pop_item(:vector_string, state)
)
end
end
end
def print_newline(state) do
string_length =
:output
|> stack_ref(0, state)
|> to_string
|> (fn(x) -> x <> "\n" end).()
|> String.length
if get_globals(:max_string_length) < string_length do
state
else
:output
|> stack_ref(0, state)
|> to_string
|> (fn(x) -> x <> "\n" end).()
|> stack_assoc(:output, 0, state)
end
end
@doc """
Allows Push to handle inN instructions, e.g. in2, using things from the input
stack. We can tell whether a particular inN instruction is valid if N-1
values are on the input stack. Recognizes vectors, simple literals and
quoted code.
"""
def handle_input_instruction(instr, state) do
n = ~r/in(\d+)/ |> Regex.run(Atom.to_string(instr)) |> Enum.at(1) |> String.to_integer
if n > length(state[:input]) or n < 1 do
raise(ArgumentError, message: "Undefined instruction: #{instr} \nNOTE: Likely not same number of items on input stack as input instructions.")
else
item = stack_ref(:input, n - 1, state)
literal_type = recognize_literal(item)
cond do
is_list(item) && item == [] -> push_item([], :vector_integer, push_item([], :vector_float, push_item([], :vector_string, push_item([], :vector_boolean, state))))
is_tuple(item) -> push_item(item, :exec, state)
true -> push_item(item, literal_type, state)
end
end
end
end
| 29.15625 | 169 | 0.613478 |
73c95b74547a8c86646834450a8885ce317d1afe | 29,927 | ex | Elixir | lib/phoenix_live_view/test/client_proxy.ex | getong/phoenix_live_view | 97d64e036c439b9b84df1699d0773199c562069a | [
"MIT"
] | null | null | null | lib/phoenix_live_view/test/client_proxy.ex | getong/phoenix_live_view | 97d64e036c439b9b84df1699d0773199c562069a | [
"MIT"
] | null | null | null | lib/phoenix_live_view/test/client_proxy.ex | getong/phoenix_live_view | 97d64e036c439b9b84df1699d0773199c562069a | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveViewTest.ClientProxy do
@moduledoc false
use GenServer
defstruct session_token: nil,
static_token: nil,
module: nil,
endpoint: nil,
pid: nil,
proxy: nil,
topic: nil,
ref: nil,
rendered: nil,
children: [],
child_statics: %{},
id: nil,
connect_params: %{},
connect_info: %{}
alias Phoenix.LiveViewTest.{ClientProxy, DOM, Element, View}
@doc """
Encoding used by the Channel serializer.
"""
def encode!(msg), do: msg
@doc """
Starts a client proxy.
## Options
* `:caller` - the required `{ref, pid}` pair identifying the caller.
* `:view` - the required `%Phoenix.LiveViewTest.View{}`
* `:html` - the required string of HTML for the document.
"""
def start_link(opts) do
GenServer.start_link(__MODULE__, opts)
end
def init(opts) do
# Since we are always running in the test client, we will disable
# our own logging and let the client do the job.
Logger.disable(self())
%{
caller: {_, ref} = caller,
html: response_html,
connect_params: connect_params,
connect_info: connect_info,
live_module: module,
endpoint: endpoint,
session: session,
url: url,
test_supervisor: test_supervisor
} = opts
# We can assume there is at least one LiveView
# because the live_module assign was set.
root_html = DOM.parse(response_html)
[{id, session_token, static_token} | _] = DOM.find_live_views(root_html)
root_view = %ClientProxy{
id: id,
ref: ref,
connect_params: connect_params,
connect_info: connect_info,
session_token: session_token,
static_token: static_token,
module: module,
endpoint: endpoint,
child_statics: Map.delete(DOM.find_static_views(root_html), id),
topic: Phoenix.LiveView.Utils.random_id()
}
state = %{
join_ref: 0,
ref: 0,
caller: caller,
views: %{},
ids: %{},
pids: %{},
replies: %{},
root_view: nil,
html: root_html,
session: session,
test_supervisor: test_supervisor,
url: url,
page_title: nil
}
try do
{root_view, rendered} = mount_view(state, root_view, url)
new_state =
state
|> Map.put(:root_view, root_view)
|> put_view(root_view, rendered)
|> detect_added_or_removed_children(root_view, root_html)
send_caller(new_state, {:ok, build_client_view(root_view), DOM.to_html(new_state.html)})
{:ok, new_state}
catch
:throw, {:stop, {:shutdown, reason}, _state} ->
send_caller(state, {:error, reason})
:ignore
:throw, {:stop, reason, _} ->
Process.unlink(elem(caller, 0))
{:stop, reason}
end
end
defp build_client_view(%ClientProxy{} = proxy) do
%{id: id, ref: ref, topic: topic, module: module, endpoint: endpoint, pid: pid} = proxy
%View{id: id, pid: pid, proxy: {ref, topic, self()}, module: module, endpoint: endpoint}
end
defp mount_view(state, view, url) do
ref = make_ref()
case start_supervised_channel(state, view, ref, url) do
{:ok, pid} ->
mon_ref = Process.monitor(pid)
receive do
{^ref, {:ok, %{rendered: rendered}}} ->
Process.demonitor(mon_ref, [:flush])
{%{view | pid: pid}, DOM.merge_diff(%{}, rendered)}
{^ref, {:error, %{live_redirect: opts}}} ->
throw(stop_redirect(state, view.topic, {:live_redirect, opts}))
{^ref, {:error, %{redirect: opts}}} ->
throw(stop_redirect(state, view.topic, {:redirect, opts}))
{^ref, {:error, reason}} ->
throw({:stop, reason, state})
{:DOWN, ^mon_ref, _, _, reason} ->
throw({:stop, reason, state})
end
{:error, reason} ->
throw({:stop, reason, state})
end
end
defp start_supervised_channel(state, view, ref, url) do
socket = %Phoenix.Socket{
transport_pid: self(),
serializer: __MODULE__,
channel: view.module,
endpoint: view.endpoint,
private: %{connect_info: Map.put_new(view.connect_info, :session, state.session)},
topic: view.topic,
join_ref: state.join_ref
}
params = %{
"session" => view.session_token,
"static" => view.static_token,
"url" => url,
"params" => Map.put(view.connect_params, "_mounts", 0),
"caller" => state.caller
}
from = {self(), ref}
spec = %{
id: make_ref(),
start: {Phoenix.LiveView.Channel, :start_link, [{view.endpoint, from}]},
restart: :temporary
}
with {:ok, pid} <- Supervisor.start_child(state.test_supervisor, spec) do
send(pid, {Phoenix.Channel, params, from, socket})
{:ok, pid}
end
end
def handle_info({:sync_children, topic, from}, state) do
view = fetch_view_by_topic!(state, topic)
children =
Enum.flat_map(view.children, fn {id, _session} ->
case fetch_view_by_id(state, id) do
{:ok, child} -> [build_client_view(child)]
:error -> []
end
end)
GenServer.reply(from, {:ok, children})
{:noreply, state}
end
def handle_info({:sync_render_element, operation, topic_or_element, from}, state) do
view = fetch_view_by_topic!(state, proxy_topic(topic_or_element))
result = state |> root(view) |> select_node(topic_or_element)
reply =
case {operation, result} do
{:find_element, {:ok, node}} -> {:ok, node}
{:find_element, {:error, _, message}} -> {:raise, ArgumentError.exception(message)}
{:has_element?, {:error, :none, _}} -> {:ok, false}
{:has_element?, _} -> {:ok, true}
end
GenServer.reply(from, reply)
{:noreply, state}
end
def handle_info({:sync_render_event, topic_or_element, type, value, from}, state) do
result =
case topic_or_element do
{topic, event} ->
view = fetch_view_by_topic!(state, topic)
{view, nil, event, stringify(value, & &1)}
%Element{} = element ->
view = fetch_view_by_topic!(state, proxy_topic(element))
root = root(state, view)
with {:ok, node} <- select_node(root, element),
:ok <- maybe_enabled(type, node, element),
{:ok, event} <- maybe_event(type, node, element),
{:ok, extra} <- maybe_values(type, node, element),
{:ok, cid} <- maybe_cid(root, node) do
{view, cid, event, DOM.deep_merge(extra, stringify_type(type, value))}
end
end
case result do
{view, cid, event, values} ->
{type, value} = encode_event_type(type, values)
payload = %{"cid" => cid, "type" => type, "event" => event, "value" => value}
{:noreply, push_with_reply(state, from, view, "event", payload)}
{:patch, topic, path} ->
handle_call({:render_patch, topic, path}, from, state)
{:stop, topic, reason} ->
stop_redirect(state, topic, reason)
{:error, _, message} ->
GenServer.reply(from, {:raise, ArgumentError.exception(message)})
{:noreply, state}
end
end
def handle_info(
%Phoenix.Socket.Message{
event: "redirect",
topic: _topic,
payload: %{to: _to} = opts
},
state
) do
stop_redirect(state, state.root_view.topic, {:redirect, opts})
end
def handle_info(
%Phoenix.Socket.Message{
event: "live_patch",
topic: _topic,
payload: %{to: _to} = opts
},
state
) do
send_patch(state, state.root_view.topic, opts)
{:noreply, state}
end
def handle_info(
%Phoenix.Socket.Message{
event: "live_redirect",
topic: _topic,
payload: %{to: _to} = opts
},
state
) do
stop_redirect(state, state.root_view.topic, {:live_redirect, opts})
end
def handle_info(
%Phoenix.Socket.Message{
event: "diff",
topic: topic,
payload: diff
},
state
) do
{:noreply, merge_rendered(state, topic, diff)}
end
def handle_info(%Phoenix.Socket.Reply{ref: ref} = reply, state) do
case fetch_reply(state, ref) do
{:ok, {_pid, callback}} ->
callback.(reply, drop_reply(state, ref))
:error ->
{:noreply, state}
end
end
def handle_info({:DOWN, _ref, :process, pid, reason}, state) do
case fetch_view_by_pid(state, pid) do
{:ok, _view} ->
{:stop, reason, state}
:error ->
{:noreply, state}
end
end
def handle_info({:socket_close, pid, reason}, state) do
{:ok, view} = fetch_view_by_pid(state, pid)
{:noreply, drop_view_by_id(state, view.id, reason)}
end
def handle_call(:page_title, _from, state) do
{:reply, {:ok, state.page_title}, state}
end
def handle_call({:live_children, topic}, from, state) do
view = fetch_view_by_topic!(state, topic)
:ok = Phoenix.LiveView.Channel.ping(view.pid)
send(self(), {:sync_children, view.topic, from})
{:noreply, state}
end
def handle_call({:render_element, operation, topic_or_element}, from, state) do
topic = proxy_topic(topic_or_element)
%{pid: pid} = fetch_view_by_topic!(state, topic)
:ok = Phoenix.LiveView.Channel.ping(pid)
send(self(), {:sync_render_element, operation, topic_or_element, from})
{:noreply, state}
end
def handle_call({:render_event, topic_or_element, type, value}, from, state) do
topic = proxy_topic(topic_or_element)
%{pid: pid} = fetch_view_by_topic!(state, topic)
:ok = Phoenix.LiveView.Channel.ping(pid)
send(self(), {:sync_render_event, topic_or_element, type, value, from})
{:noreply, state}
end
def handle_call({:render_patch, topic, path}, from, state) do
view = fetch_view_by_topic!(state, topic)
state = push_with_reply(state, from, view, "link", %{"url" => path})
send_patch(state, state.root_view.topic, %{to: path})
{:noreply, state}
end
defp drop_view_by_id(state, id, reason) do
{:ok, view} = fetch_view_by_id(state, id)
push(state, view, "phx_leave", %{})
state =
Enum.reduce(view.children, state, fn {child_id, _child_session}, acc ->
drop_view_by_id(acc, child_id, reason)
end)
flush_replies(
%{
state
| ids: Map.delete(state.ids, view.id),
views: Map.delete(state.views, view.topic),
pids: Map.delete(state.pids, view.pid)
},
view.pid
)
end
defp flush_replies(state, pid) do
Enum.reduce(state.replies, state, fn
{ref, {^pid, _callback}}, acc -> drop_reply(acc, ref)
{_ref, {_pid, _callback}}, acc -> acc
end)
end
defp fetch_reply(state, ref) do
Map.fetch(state.replies, ref)
end
defp put_reply(state, ref, pid, callback) do
%{state | replies: Map.put(state.replies, ref, {pid, callback})}
end
defp drop_reply(state, ref) do
%{state | replies: Map.delete(state.replies, ref)}
end
defp put_child(state, %ClientProxy{} = parent, id, session) do
update_in(state.views[parent.topic], fn %ClientProxy{} = parent ->
%ClientProxy{parent | children: [{id, session} | parent.children]}
end)
end
defp drop_child(state, %ClientProxy{} = parent, id, reason) do
update_in(state.views[parent.topic], fn %ClientProxy{} = parent ->
new_children = Enum.reject(parent.children, fn {cid, _session} -> id == cid end)
%ClientProxy{parent | children: new_children}
end)
|> drop_view_by_id(id, reason)
end
defp verify_session(%ClientProxy{} = view) do
Phoenix.LiveView.Static.verify_session(view.endpoint, view.session_token, view.static_token)
end
defp put_view(state, %ClientProxy{pid: pid} = view, rendered) do
{:ok, %{view: module}} = verify_session(view)
new_view = %ClientProxy{view | module: module, proxy: self(), pid: pid, rendered: rendered}
Process.monitor(pid)
maybe_push_events(state, rendered)
patch_view(
%{
state
| views: Map.put(state.views, new_view.topic, new_view),
pids: Map.put(state.pids, pid, new_view.topic),
ids: Map.put(state.ids, new_view.id, new_view.topic)
},
view,
DOM.render_diff(rendered)
)
end
defp patch_view(state, view, child_html) do
case DOM.patch_id(view.id, state.html, child_html) do
{new_html, [_ | _] = deleted_cids} ->
topic = view.topic
%{state | html: new_html}
|> push_with_callback(view, "cids_destroyed", %{"cids" => deleted_cids}, fn _, state ->
{:noreply, update_in(state.views[topic].rendered, &DOM.drop_cids(&1, deleted_cids))}
end)
{new_html, [] = _deleted_cids} ->
%{state | html: new_html}
end
end
defp stop_redirect(%{caller: {pid, _}} = state, topic, {_kind, opts} = reason)
when is_binary(topic) do
send_caller(state, {:redirect, topic, opts})
Process.unlink(pid)
{:stop, {:shutdown, reason}, state}
end
defp fetch_view_by_topic!(state, topic), do: Map.fetch!(state.views, topic)
defp fetch_view_by_topic(state, topic), do: Map.fetch(state.views, topic)
defp fetch_view_by_pid(state, pid) when is_pid(pid) do
with {:ok, topic} <- Map.fetch(state.pids, pid) do
fetch_view_by_topic(state, topic)
end
end
defp fetch_view_by_id(state, id) do
with {:ok, topic} <- Map.fetch(state.ids, id) do
fetch_view_by_topic(state, topic)
end
end
defp render_reply(reply, from, state) do
%{payload: diff, topic: topic} = reply
new_state = merge_rendered(state, topic, diff)
case fetch_view_by_topic(new_state, topic) do
{:ok, view} ->
GenServer.reply(from, {:ok, new_state.html |> DOM.inner_html!(view.id) |> DOM.to_html()})
new_state
:error ->
new_state
end
end
defp merge_rendered(state, topic, %{diff: diff}), do: merge_rendered(state, topic, diff)
defp merge_rendered(%{html: html_before} = state, topic, %{} = diff) do
maybe_push_events(state, diff)
case diff do
%{r: reply} -> send_caller(state, {:reply, reply})
%{} -> state
end
state =
case diff do
%{t: new_title} -> %{state | page_title: new_title}
%{} -> state
end
case fetch_view_by_topic(state, topic) do
{:ok, view} ->
rendered = DOM.merge_diff(view.rendered, diff)
new_view = %ClientProxy{view | rendered: rendered}
%{state | views: Map.update!(state.views, topic, fn _ -> new_view end)}
|> patch_view(new_view, DOM.render_diff(rendered))
|> detect_added_or_removed_children(new_view, html_before)
:error ->
state
end
end
defp detect_added_or_removed_children(state, view, html_before) do
new_state = recursive_detect_added_or_removed_children(state, view, html_before)
{:ok, new_view} = fetch_view_by_topic(new_state, view.topic)
ids_after =
new_state.html
|> DOM.reverse_filter(&DOM.attribute(&1, "data-phx-view"))
|> DOM.all_attributes("id")
|> MapSet.new()
Enum.reduce(new_view.children, new_state, fn {id, _session}, acc ->
if id in ids_after do
acc
else
drop_child(acc, new_view, id, {:shutdown, :left})
end
end)
end
defp recursive_detect_added_or_removed_children(state, view, html_before) do
state.html
|> DOM.inner_html!(view.id)
|> DOM.find_live_views()
|> Enum.reduce(state, fn {id, session, static}, acc ->
case fetch_view_by_id(acc, id) do
{:ok, view} ->
patch_view(acc, view, DOM.inner_html!(html_before, view.id))
:error ->
static = static || Map.get(state.root_view.child_statics, id)
child_view = build_child(view, id: id, session_token: session, static_token: static)
{child_view, rendered} = mount_view(acc, child_view, state.url)
acc
|> put_view(child_view, rendered)
|> put_child(view, id, child_view.session_token)
|> recursive_detect_added_or_removed_children(child_view, acc.html)
end
end)
end
defp send_caller(%{caller: {pid, ref}}, msg) when is_pid(pid) do
send(pid, {ref, msg})
end
defp send_patch(state, topic, %{to: _to} = opts) do
send_caller(state, {:patch, topic, opts})
end
defp push(state, view, event, payload) do
ref = to_string(state.ref + 1)
send(view.pid, %Phoenix.Socket.Message{
join_ref: state.join_ref,
topic: view.topic,
event: event,
payload: payload,
ref: ref
})
%{state | ref: state.ref + 1}
end
defp push_with_reply(state, from, view, event, payload) do
push_with_callback(state, view, event, payload, fn reply, state ->
%{payload: payload, topic: topic} = reply
case payload do
%{live_redirect: %{to: _to} = opts} ->
stop_redirect(state, topic, {:live_redirect, opts})
%{live_patch: %{to: _to} = opts} ->
send_patch(state, topic, opts)
{:noreply, render_reply(reply, from, state)}
%{redirect: %{to: _to} = opts} ->
stop_redirect(state, topic, {:redirect, opts})
%{} ->
{:noreply, render_reply(reply, from, state)}
end
end)
end
defp push_with_callback(state, view, event, payload, callback) do
ref = to_string(state.ref + 1)
state
|> push(view, event, payload)
|> put_reply(ref, view.pid, callback)
end
defp build_child(%ClientProxy{ref: ref, proxy: proxy, endpoint: endpoint}, attrs) do
attrs_with_defaults =
Keyword.merge(attrs,
ref: ref,
proxy: proxy,
endpoint: endpoint,
topic: Phoenix.LiveView.Utils.random_id()
)
struct!(__MODULE__, attrs_with_defaults)
end
## Element helpers
defp encode_event_type(type, value) when type in [:change, :submit],
do: {"form", Plug.Conn.Query.encode(value)}
defp encode_event_type(type, value),
do: {Atom.to_string(type), value}
defp proxy_topic({topic, _}) when is_binary(topic), do: topic
defp proxy_topic(%{proxy: {_ref, topic, _pid}}), do: topic
defp root(state, view), do: DOM.by_id!(state.html, view.id)
defp select_node(root, %Element{selector: selector, text_filter: nil}) do
root
|> DOM.child_nodes()
|> DOM.maybe_one(selector)
end
defp select_node(root, %Element{selector: selector, text_filter: text_filter}) do
nodes =
root
|> DOM.child_nodes()
|> DOM.all(selector)
filtered_nodes = Enum.filter(nodes, &(DOM.to_text(&1) =~ text_filter))
case {nodes, filtered_nodes} do
{_, [filtered_node]} ->
{:ok, filtered_node}
{[], _} ->
{:error, :none,
"selector #{inspect(selector)} did not return any element within: \n\n" <>
DOM.inspect_html(root)}
{[node], []} ->
{:error, :none,
"selector #{inspect(selector)} did not match text filter #{inspect(text_filter)}, " <>
"got: \n\n#{DOM.inspect_html(node)}"}
{_, []} ->
{:error, :none,
"selector #{inspect(selector)} returned #{length(nodes)} elements " <>
"but none matched the text filter #{inspect(text_filter)}: \n\n" <>
DOM.inspect_html(nodes)}
{_, _} ->
{:error, :many,
"selector #{inspect(selector)} returned #{length(nodes)} elements " <>
"and #{length(filtered_nodes)} of them matched the text filter #{inspect(text_filter)}: \n\n " <>
DOM.inspect_html(filtered_nodes)}
end
end
defp select_node(root, _topic) do
{:ok, root}
end
defp maybe_cid(_tree, nil) do
{:ok, nil}
end
defp maybe_cid(tree, node) do
case DOM.all_attributes(node, "phx-target") do
[] ->
{:ok, nil}
["#" <> _ = target] ->
with {:ok, target} <- DOM.maybe_one(tree, target, "phx-target") do
if cid = DOM.component_id(target) do
{:ok, String.to_integer(cid)}
else
{:ok, nil}
end
end
[maybe_integer] ->
case Integer.parse(maybe_integer) do
{cid, ""} ->
{:ok, cid}
_ ->
{:error, :invalid,
"expected phx-target to be either an ID or a CID, got: #{inspect(maybe_integer)}"}
end
end
end
defp maybe_event(:hook, node, %Element{event: event} = element) do
true = is_binary(event)
if DOM.attribute(node, "phx-hook") do
if DOM.attribute(node, "id") do
{:ok, event}
else
{:error, :invalid,
"element selected by #{inspect(element.selector)} for phx-hook does not have an ID"}
end
else
{:error, :invalid,
"element selected by #{inspect(element.selector)} does not have phx-hook attribute"}
end
end
defp maybe_event(:click, {"a", _, _} = node, element) do
cond do
event = DOM.attribute(node, "phx-click") ->
{:ok, event}
to = DOM.attribute(node, "href") ->
case DOM.attribute(node, "data-phx-link") do
"patch" ->
{:patch, proxy_topic(element), to}
"redirect" ->
kind = DOM.attribute(node, "data-phx-link-state") || "push"
{:stop, proxy_topic(element), {:live_redirect, %{to: to, kind: String.to_atom(kind)}}}
nil ->
{:stop, proxy_topic(element), {:redirect, %{to: to}}}
end
true ->
{:error, :invalid,
"clicked link selected by #{inspect(element.selector)} does not have phx-click or href attributes"}
end
end
defp maybe_event(type, node, element) when type in [:keyup, :keydown] do
cond do
event = DOM.attribute(node, "phx-#{type}") ->
{:ok, event}
event = DOM.attribute(node, "phx-window-#{type}") ->
{:ok, event}
true ->
{:error, :invalid,
"element selected by #{inspect(element.selector)} does not have " <>
"phx-#{type} or phx-window-#{type} attributes"}
end
end
defp maybe_event(type, node, element) do
if event = DOM.attribute(node, "phx-#{type}") do
{:ok, event}
else
{:error, :invalid,
"element selected by #{inspect(element.selector)} does not have phx-#{type} attribute"}
end
end
defp maybe_enabled(_type, {tag, _, _}, %{form_data: form_data})
when tag != "form" and form_data != nil do
{:error, :invalid,
"a form element was given but the selected node is not a form, got #{inspect(tag)}}"}
end
defp maybe_enabled(type, node, element) do
if DOM.attribute(node, "disabled") do
{:error, :invalid,
"cannot #{type} element #{inspect(element.selector)} because it is disabled"}
else
:ok
end
end
defp maybe_values(:hook, _node, _element), do: {:ok, %{}}
defp maybe_values(type, {tag, _, _} = node, element) when type in [:change, :submit] do
if tag == "form" do
defaults =
node
|> DOM.reverse_filter(fn node ->
DOM.tag(node) in ~w(input textarea select) and is_nil(DOM.attribute(node, "disabled"))
end)
|> Enum.reduce(%{}, &form_defaults/2)
case fill_in_map(Enum.to_list(element.form_data || %{}), "", node, []) do
{:ok, value} -> {:ok, DOM.deep_merge(defaults, value)}
{:error, _, _} = error -> error
end
else
{:error, :invalid, "phx-#{type} is only allowed in forms, got #{inspect(tag)}"}
end
end
defp maybe_values(_type, node, _element) do
{:ok, DOM.all_values(node)}
end
defp maybe_push_events(state, rendered) do
case rendered do
%{e: events} ->
for [name, payload] <- events, do: send_caller(state, {:push_event, name, payload})
:ok
%{} ->
:ok
end
end
defp form_defaults(node, acc) do
if name = DOM.attribute(node, "name") do
form_defaults(node, name, acc)
else
acc
end
end
defp form_defaults({"select", _, _} = node, name, acc) do
options = DOM.filter(node, &(DOM.tag(&1) == "option"))
all_selected =
if DOM.attribute(node, "multiple") do
Enum.filter(options, &DOM.attribute(&1, "selected"))
else
List.wrap(Enum.find(options, &DOM.attribute(&1, "selected")) || List.first(options))
end
all_selected
|> Enum.reverse()
|> Enum.reduce(acc, fn selected, acc ->
Plug.Conn.Query.decode_pair({name, DOM.attribute(selected, "value")}, acc)
end)
end
defp form_defaults({"textarea", _, [value]}, name, acc) do
Plug.Conn.Query.decode_pair({name, String.replace_prefix(value, "\n", "")}, acc)
end
defp form_defaults({"input", _, _} = node, name, acc) do
type = DOM.attribute(node, "type") || "text"
value = DOM.attribute(node, "value") || ""
cond do
type in ["radio", "checkbox"] ->
if DOM.attribute(node, "checked") do
Plug.Conn.Query.decode_pair({name, value}, acc)
else
acc
end
type in ["image", "submit"] ->
acc
true ->
Plug.Conn.Query.decode_pair({name, value}, acc)
end
end
defp fill_in_map([{key, value} | rest], prefix, node, acc) do
key = to_string(key)
case fill_in_type(value, fill_in_name(prefix, key), node) do
{:ok, value} -> fill_in_map(rest, prefix, node, [{key, value} | acc])
{:error, _, _} = error -> error
end
end
defp fill_in_map([], _prefix, _node, acc) do
{:ok, Map.new(acc)}
end
defp fill_in_type([{_, _} | _] = value, key, node), do: fill_in_map(value, key, node, [])
defp fill_in_type(%_{} = value, key, node), do: fill_in_value(value, key, node)
defp fill_in_type(%{} = value, key, node), do: fill_in_map(Map.to_list(value), key, node, [])
defp fill_in_type(value, key, node), do: fill_in_value(value, key, node)
@limited ["select", "multiple select", "checkbox", "radio", "hidden"]
@forbidden ["submit", "image"]
defp fill_in_value(non_string_value, name, node) do
value = stringify(non_string_value, &to_string/1)
name = if is_list(value), do: name <> "[]", else: name
{types, dom_values} =
node
|> DOM.filter(fn node ->
DOM.attribute(node, "name") == name and is_nil(DOM.attribute(node, "disabled"))
end)
|> collect_values([], [])
limited? = Enum.all?(types, &(&1 in @limited))
cond do
calendar_value = calendar_value(types, non_string_value, name, node) ->
{:ok, calendar_value}
types == [] ->
{:error, :invalid,
"could not find non-disabled input, select or textarea with name #{inspect(name)} within:\n\n" <>
DOM.inspect_html(DOM.all(node, "[name]"))}
forbidden_type = Enum.find(types, &(&1 in @forbidden)) ->
{:error, :invalid,
"cannot provide value to #{inspect(name)} because #{forbidden_type} inputs are never submitted"}
forbidden_value = limited? && value |> List.wrap() |> Enum.find(&(&1 not in dom_values)) ->
{:error, :invalid,
"value for #{hd(types)} #{inspect(name)} must be one of #{inspect(dom_values)}, " <>
"got: #{inspect(forbidden_value)}"}
true ->
{:ok, value}
end
end
@calendar_fields ~w(year month day hour minute second)a
defp calendar_value([], %{calendar: _} = calendar_type, name, node) do
@calendar_fields
|> Enum.flat_map(fn field ->
string_field = Atom.to_string(field)
with value when not is_nil(value) <- Map.get(calendar_type, field),
{:ok, string_value} <- fill_in_value(value, name <> "[" <> string_field <> "]", node) do
[{string_field, string_value}]
else
_ -> []
end
end)
|> case do
[] -> nil
pairs -> Map.new(pairs)
end
end
defp calendar_value(_, _, _, _) do
nil
end
defp collect_values([{"textarea", _, _} | nodes], types, values) do
collect_values(nodes, ["textarea" | types], values)
end
defp collect_values([{"input", _, _} = node | nodes], types, values) do
type = DOM.attribute(node, "type") || "text"
if type in ["radio", "checkbox", "hidden"] do
value = DOM.attribute(node, "value") || ""
collect_values(nodes, [type | types], [value | values])
else
collect_values(nodes, [type | types], values)
end
end
defp collect_values([{"select", _, _} = node | nodes], types, values) do
options =
node
|> DOM.filter(&(DOM.tag(&1) == "option"))
|> Enum.map(&(DOM.attribute(&1, "value") || ""))
if DOM.attribute(node, "multiple") do
collect_values(nodes, ["multiple select" | types], Enum.reverse(options, values))
else
collect_values(nodes, ["select" | types], Enum.reverse(options, values))
end
end
defp collect_values([_ | nodes], types, values) do
collect_values(nodes, types, values)
end
defp collect_values([], types, values) do
{types, Enum.reverse(values)}
end
defp fill_in_name("", name), do: name
defp fill_in_name(prefix, name), do: prefix <> "[" <> name <> "]"
defp stringify_type(:hook, value), do: stringify(value, & &1)
defp stringify_type(_, value), do: stringify(value, &to_string/1)
defp stringify(%{__struct__: _} = struct, fun),
do: stringify_value(struct, fun)
defp stringify(%{} = params, fun),
do: Enum.into(params, %{}, &stringify_kv(&1, fun))
defp stringify([{_, _} | _] = params, fun),
do: Enum.into(params, %{}, &stringify_kv(&1, fun))
defp stringify(params, fun) when is_list(params),
do: Enum.map(params, &stringify(&1, fun))
defp stringify(other, fun),
do: stringify_value(other, fun)
defp stringify_value(other, fun), do: fun.(other)
defp stringify_kv({k, v}, fun), do: {to_string(k), stringify(v, fun)}
end
| 29.630693 | 108 | 0.599626 |
73c96edb9846f80b3e050190ec3d7de2b666a306 | 2,870 | exs | Elixir | test/playground/document_live/new_test.exs | begedin/philtre | 3fc92c097875942689aad97997a983c8a83b602f | [
"MIT"
] | 2 | 2022-01-28T13:56:13.000Z | 2022-03-20T12:17:54.000Z | test/playground/document_live/new_test.exs | begedin/philtre | 3fc92c097875942689aad97997a983c8a83b602f | [
"MIT"
] | null | null | null | test/playground/document_live/new_test.exs | begedin/philtre | 3fc92c097875942689aad97997a983c8a83b602f | [
"MIT"
] | null | null | null | defmodule Playground.DocumentLive.NewTest do
@moduledoc false
use Playground.ConnCase
import Phoenix.LiveViewTest
alias Editor.Block
alias Playground.Documents
@editor %Editor{
id: "-1",
blocks: [
%Block{id: "1", cells: [%Block.Cell{id: "1-1", text: "Foo", modifiers: []}], type: "h1"},
%Block{id: "2", cells: [%Block.Cell{id: "2-1", text: "Bar", modifiers: []}], type: "p"},
%Block{id: "3", cells: [%Block.Cell{id: "3-1", text: "Baz", modifiers: []}], type: "p"}
]
}
test "creates document", %{conn: conn} do
{:ok, view, _html} = live(conn, "/documents/new")
send(view.pid, {:update, @editor})
assert dom = view |> render() |> Floki.parse_document!()
assert dom |> Floki.find("h1[contenteditable]") |> Floki.text() == "Foo"
assert dom |> Floki.find("p[contenteditable]") |> Floki.text() == "BarBaz"
assert view |> element("form") |> render_submit(%{"filename" => "foo"})
assert {:ok, %Editor{} = editor} = Documents.get_document("foo")
assert %Editor{
blocks: [
%Block{id: "1", cells: [%{id: "1-1", text: "Foo", modifiers: []}], type: "h1"},
%Block{id: "2", cells: [%{id: "2-1", text: "Bar", modifiers: []}], type: "p"},
%Block{id: "3", cells: [%{id: "3-1", text: "Baz", modifiers: []}], type: "p"}
],
clipboard: nil,
selected_blocks: [],
selection: nil
} = editor
Documents.delete_document("foo")
end
test "can select,then copy and paste blocks", %{conn: conn} do
{:ok, view, _html} = live(conn, "/documents/new")
send(view.pid, {:update, @editor})
block_ids = @editor.blocks |> Enum.map(& &1.id) |> Enum.take(2)
view
|> element("[id=editor__selection__#{@editor.id}]")
|> render_hook("select_blocks", %{"block_ids" => block_ids})
assert %{socket: %{assigns: %{editor: %Editor{} = editor}}} = :sys.get_state(view.pid)
assert editor.selected_blocks == block_ids
view
|> element("[id=editor__selection__#{@editor.id}]")
|> render_hook("copy_blocks", %{"block_ids" => block_ids})
assert %{socket: %{assigns: %{editor: %Editor{} = editor}}} = :sys.get_state(view.pid)
assert [
%Block{cells: [%{text: "Foo"}], type: "h1"},
%Block{cells: [%{text: "Bar"}], type: "p"}
] = editor.clipboard
block = Enum.at(@editor.blocks, 0)
view
|> element("[id^=#{block.id}]")
|> render_hook("paste_blocks", %{
"selection" => %{
"start_id" => "1-1",
"end_id" => "1-1",
"start_offset" => 1,
"end_offset" => 1
}
})
assert %{socket: %{assigns: %{editor: %Editor{} = editor}}} = :sys.get_state(view.pid)
assert Enum.count(editor.blocks) == 6
assert Editor.text(editor) == "FFooBarooBarBaz"
end
end
| 31.538462 | 95 | 0.549477 |
73c972b27599b7c7a6c9f37653a478e5865db0c9 | 250 | ex | Elixir | lib/river/frame/window_update.ex | peburrows/river | e8968535d02a86e70a7942a690c8e461fed55913 | [
"MIT"
] | 86 | 2016-08-19T21:59:28.000Z | 2022-01-31T20:14:18.000Z | lib/river/frame/window_update.ex | peburrows/river | e8968535d02a86e70a7942a690c8e461fed55913 | [
"MIT"
] | 7 | 2016-09-27T14:44:16.000Z | 2017-08-08T14:57:45.000Z | lib/river/frame/window_update.ex | peburrows/river | e8968535d02a86e70a7942a690c8e461fed55913 | [
"MIT"
] | 4 | 2016-09-26T10:57:24.000Z | 2018-04-03T14:30:19.000Z | defmodule River.Frame.WindowUpdate do
alias River.Frame
defstruct [:increment]
def decode(%Frame{} = frame, <<_::1, inc::31>>) do
%{frame | payload: %__MODULE__{increment: inc}}
end
def decode(_, _), do: {:error, :invalid_frame}
end
| 20.833333 | 52 | 0.66 |
73c97b88d3f140dcf7dc99ce1eeae92ffef4808a | 11,623 | ex | Elixir | lib/zaryn/crypto/keystore/shared_secrets/software_impl.ex | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | 1 | 2021-07-06T19:47:14.000Z | 2021-07-06T19:47:14.000Z | lib/zaryn/crypto/keystore/shared_secrets/software_impl.ex | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | null | null | null | lib/zaryn/crypto/keystore/shared_secrets/software_impl.ex | ambareesha7/node-zaryn | 136e542801bf9b6fa4a015d3464609fdf3dacee8 | [
"Apache-2.0"
] | null | null | null | defmodule Zaryn.Crypto.SharedSecretsKeystore.SoftwareImpl do
@moduledoc false
alias Zaryn.Crypto
alias Zaryn.Crypto.SharedSecretsKeystore
alias Zaryn.SharedSecrets
alias Zaryn.TransactionChain
alias Zaryn.TransactionChain.Transaction
alias Zaryn.TransactionChain.Transaction.ValidationStamp
alias Zaryn.TransactionChain.TransactionData
alias Zaryn.TransactionChain.TransactionData.Keys
use GenStateMachine, callback_mode: :handle_event_function
require Logger
@behaviour SharedSecretsKeystore
def start_link(args \\ []) do
GenStateMachine.start_link(__MODULE__, args, name: __MODULE__)
end
@impl SharedSecretsKeystore
@spec sign_with_node_shared_secrets_key(data :: binary()) :: binary()
def sign_with_node_shared_secrets_key(data) do
GenStateMachine.call(__MODULE__, {:sign_with_node_shared_key, data})
end
@impl SharedSecretsKeystore
@spec sign_with_node_shared_secrets_key(data :: binary(), index :: non_neg_integer()) ::
binary()
def sign_with_node_shared_secrets_key(data, index) do
GenStateMachine.call(__MODULE__, {:sign_with_node_shared_key, data, index})
end
@impl SharedSecretsKeystore
@spec sign_with_network_pool_key(data :: binary()) :: binary()
def sign_with_network_pool_key(data) do
GenStateMachine.call(__MODULE__, {:sign_with_network_pool_key, data})
end
@impl SharedSecretsKeystore
@spec sign_with_network_pool_key(data :: binary(), index :: non_neg_integer()) :: binary()
def sign_with_network_pool_key(data, index) do
GenStateMachine.call(__MODULE__, {:sign_with_network_pool_key, data, index})
end
@impl SharedSecretsKeystore
@spec sign_with_daily_nonce_key(data :: binary(), DateTime.t()) :: binary()
def sign_with_daily_nonce_key(data, index) do
GenServer.call(__MODULE__, {:sign_with_daily_nonce_key, data, index})
end
@impl SharedSecretsKeystore
@spec node_shared_secrets_public_key(index :: non_neg_integer()) :: Crypto.key()
def node_shared_secrets_public_key(index) do
GenStateMachine.call(__MODULE__, {:node_shared_secrets_public_key, index})
end
@impl SharedSecretsKeystore
@spec network_pool_public_key(index :: non_neg_integer()) :: Crypto.key()
def network_pool_public_key(index) do
GenStateMachine.call(__MODULE__, {:network_pool_public_key, index})
end
@impl SharedSecretsKeystore
@spec wrap_secrets(key :: binary()) ::
{enc_transaction_seed :: binary(), enc_network_pool_seed :: binary()}
def wrap_secrets(key) do
GenStateMachine.call(__MODULE__, {:wrap_secrets, key})
end
@impl SharedSecretsKeystore
@spec unwrap_secrets(encrypted_secrets :: binary(), encrypted_key :: binary(), DateTime.t()) ::
:ok | :error
def unwrap_secrets(
secrets,
encrypted_secret_key,
date = %DateTime{}
) do
GenStateMachine.call(__MODULE__, {:unwrap_secrets, secrets, encrypted_secret_key, date})
end
@impl SharedSecretsKeystore
@spec get_network_pool_key_index() :: non_neg_integer()
def get_network_pool_key_index do
GenStateMachine.call(__MODULE__, :get_network_pool_key_index)
end
@impl SharedSecretsKeystore
@spec set_network_pool_key_index(non_neg_integer()) :: :ok
def set_network_pool_key_index(index) do
GenStateMachine.cast(__MODULE__, {:set_network_pool_key_index, index})
end
@impl SharedSecretsKeystore
@spec get_node_shared_key_index() :: non_neg_integer()
def get_node_shared_key_index do
GenStateMachine.call(__MODULE__, :get_node_shared_key_index)
end
@impl SharedSecretsKeystore
@spec set_node_shared_secrets_key_index(non_neg_integer()) :: :ok
def set_node_shared_secrets_key_index(index) do
GenStateMachine.cast(__MODULE__, {:set_node_shared_secrets_key_index, index})
end
@impl GenStateMachine
def init(_) do
node_shared_secrets_chain =
TransactionChain.list_transactions_by_type(:node_shared_secrets,
data: [:keys],
validation_stamp: [:timestamp]
)
nb_node_shared_secrets_keys = Enum.count(node_shared_secrets_chain)
Logger.debug("#{nb_node_shared_secrets_keys} node shared keys loaded into the keystore")
nb_network_pool_keys = TransactionChain.count_transactions_by_type(:node_rewards)
Logger.debug("#{nb_network_pool_keys} network pool keys loaded into the keystore")
{:ok, :idle,
%{
shared_secrets_index: nb_node_shared_secrets_keys,
network_pool_index: nb_network_pool_keys,
daily_nonce_keys: %{}
}, {:next_event, :internal, {:initial_load, node_shared_secrets_chain}}}
end
def handle_event(:internal, {:initial_load, node_shared_secrets_chain}, :idle, data) do
case Enum.at(node_shared_secrets_chain, 0) do
nil ->
:keep_state_and_data
%Transaction{
data: %TransactionData{keys: keys = %Keys{secret: secret}},
validation_stamp: %ValidationStamp{timestamp: timestamp}
} ->
if Keys.authorized_key?(keys, Crypto.last_node_public_key()) do
encrypted_secret_key = Keys.get_encrypted_key(keys, Crypto.last_node_public_key())
daily_nonce_date = SharedSecrets.next_application_date(timestamp)
{:ok, new_data} =
do_unwrap_secrets(secret, encrypted_secret_key, daily_nonce_date, data)
{:next_state, :authorized, new_data}
else
:keep_state_and_data
end
end
end
@impl GenStateMachine
def handle_event(
{:call, from},
{:sign_with_node_shared_key, data},
:authorized,
%{transaction_seed: seed, shared_secrets_index: index}
) do
{_, pv} = previous_keypair(seed, index)
{:keep_state_and_data, {:reply, from, Crypto.sign(data, pv)}}
end
def handle_event({:call, _}, {:sign_with_node_shared_key, _}, :idle, _data) do
{:keep_state_and_data, :postpone}
end
def handle_event(
{:call, from},
{:sign_with_node_shared_key, data, index},
:authorized,
%{transaction_seed: seed}
) do
{_, pv} = Crypto.derive_keypair(seed, index)
{:keep_state_and_data, {:reply, from, Crypto.sign(data, pv)}}
end
def handle_event({:call, _}, {:sign_with_node_shared_key, _, _}, :idle, _data) do
{:keep_state_and_data, :postpone}
end
def handle_event(
{:call, from},
{:sign_with_network_pool_key, data},
:authorized,
%{network_pool_seed: seed, network_pool_index: index}
) do
{_, pv} = previous_keypair(seed, index)
{:keep_state_and_data, {:reply, from, Crypto.sign(data, pv)}}
end
def handle_event({:call, _}, {:sign_with_network_pool_key, _}, :idle, _data) do
{:keep_state_and_data, :postpone}
end
def handle_event(
{:call, from},
{:sign_with_network_pool_key, data, index},
:authorized,
%{network_pool_seed: seed}
) do
{_, pv} = Crypto.derive_keypair(seed, index)
{:keep_state_and_data, {:reply, from, Crypto.sign(data, pv)}}
end
def handle_event({:call, _}, {:sign_with_network_pool_key, _, _}, :idle, _data) do
{:keep_state_and_data, :postpone}
end
def handle_event(
{:call, from},
{:sign_with_daily_nonce_key, data, timestamp},
:authorized,
%{daily_nonce_keys: keys}
) do
{pub, pv} =
keys
|> Enum.sort_by(&elem(&1, 0), {:desc, DateTime})
|> Enum.filter(&(DateTime.diff(elem(&1, 0), timestamp) <= 0))
|> List.first()
|> elem(1)
Logger.debug("Sign with the daily nonce for the public key #{Base.encode16(pub)}")
{:keep_state_and_data, {:reply, from, Crypto.sign(data, pv)}}
end
def handle_event({:call, _}, {:sign_with_daily_nonce_key, _, _}, :idle, _data) do
{:keep_state_and_data, :postpone}
end
def handle_event(
{:call, from},
{:node_shared_secrets_public_key, index},
:authorized,
%{transaction_seed: seed}
) do
{pub, _} = Crypto.derive_keypair(seed, index)
{:keep_state_and_data, {:reply, from, pub}}
end
def handle_event({:call, _}, {:node_shared_secrets_public_key, _}, :idle, _data) do
{:keep_state_and_data, :postpone}
end
def handle_event({:call, from}, {:network_pool_public_key, index}, :authorized, %{
network_pool_seed: seed
}) do
{pub, _} = Crypto.derive_keypair(seed, index)
{:keep_state_and_data, {:reply, from, pub}}
end
def handle_event({:call, _}, {:network_pool_public_key, _}, :idle, _data) do
{:keep_state_and_data, :postpone}
end
def handle_event(
{:call, from},
{:wrap_secrets, secret_key},
:authorized,
%{transaction_seed: transaction_seed, network_pool_seed: network_pool_seed}
) do
encrypted_transaction_seed = Crypto.aes_encrypt(transaction_seed, secret_key)
encrypted_network_pool_seed = Crypto.aes_encrypt(network_pool_seed, secret_key)
{:keep_state_and_data,
{:reply, from, {encrypted_transaction_seed, encrypted_network_pool_seed}}}
end
def handle_event({:call, _}, {:wrap_secrets, _, _}, :idle, _data) do
{:keep_state_and_data, :postpone}
end
def handle_event(
{:call, from},
{:unwrap_secrets, encrypted_secrets, encrypted_aes_key, timestamp},
_,
data
) do
case do_unwrap_secrets(encrypted_secrets, encrypted_aes_key, timestamp, data) do
{:ok, new_data} ->
{:next_state, :authorized, new_data, {:reply, from, :ok}}
{:error, :decryption_failed} ->
Logger.error("Cannot decrypt the node shared secrets")
{:keep_state_and_data, {:reply, from, :error}}
end
end
def handle_event(
{:call, from},
:get_node_shared_key_index,
_,
_data = %{shared_secrets_index: index}
) do
{:keep_state_and_data, {:reply, from, index}}
end
def handle_event(
{:call, from},
:get_network_pool_key_index,
_,
_data = %{network_pool_index: index}
) do
{:keep_state_and_data, {:reply, from, index}}
end
def handle_event(:cast, {:set_network_pool_key_index, index}, _, data) do
{:keep_state, Map.put(data, :network_pool_index, index)}
end
def handle_event(:cast, {:set_node_shared_secrets_key_index, index}, _, data) do
{:keep_state, Map.put(data, :shared_secrets_index, index)}
end
defp do_unwrap_secrets(
encrypted_secrets,
encrypted_aes_key,
timestamp,
data = %{daily_nonce_keys: daily_nonce_keys}
) do
<<enc_daily_nonce_seed::binary-size(60), enc_transaction_seed::binary-size(60),
enc_network_pool_seed::binary-size(60)>> = encrypted_secrets
with {:ok, aes_key} <- Crypto.ec_decrypt_with_node_key(encrypted_aes_key),
{:ok, daily_nonce_seed} <- Crypto.aes_decrypt(enc_daily_nonce_seed, aes_key),
{:ok, transaction_seed} <- Crypto.aes_decrypt(enc_transaction_seed, aes_key),
{:ok, network_pool_seed} <- Crypto.aes_decrypt(enc_network_pool_seed, aes_key) do
daily_nonce_keypair = Crypto.generate_deterministic_keypair(daily_nonce_seed)
new_keys =
daily_nonce_keys
|> Map.put(timestamp, daily_nonce_keypair)
|> Enum.sort_by(&elem(&1, 0), {:desc, DateTime})
|> Enum.into(%{})
new_data =
data
|> Map.put(:daily_nonce_keys, new_keys)
|> Map.put(:transaction_seed, transaction_seed)
|> Map.put(:network_pool_seed, network_pool_seed)
{:ok, new_data}
end
end
defp previous_keypair(seed, 0) do
Crypto.derive_keypair(seed, 0)
end
defp previous_keypair(seed, index) do
Crypto.derive_keypair(seed, index - 1)
end
end
| 32.46648 | 97 | 0.69285 |
73c9becc209048c74784ad270e803774abb492e2 | 669 | ex | Elixir | test/support/exemplars/minimal.ex | jesseshieh/crit19 | 0bba407fea09afed72cbb90ca579ba34c537edef | [
"MIT"
] | null | null | null | test/support/exemplars/minimal.ex | jesseshieh/crit19 | 0bba407fea09afed72cbb90ca579ba34c537edef | [
"MIT"
] | null | null | null | test/support/exemplars/minimal.ex | jesseshieh/crit19 | 0bba407fea09afed72cbb90ca579ba34c537edef | [
"MIT"
] | null | null | null | defmodule Crit.Exemplars.Minimal do
@moduledoc """
This is for dirt-simple structures where only a few most obvious structure
fields are relevant. Typically, for example, associated records are not
created.
"""
use ExUnit.CaseTemplate
use Crit.Global.Constants
alias Crit.Factory
alias Crit.Users.{Password, PasswordToken}
alias Crit.Sql
def user(opts \\ []) do
user = Factory.sql_insert!(:user, opts, @institution)
assert Password.count_for(user.auth_id, @institution) == 0
refute Sql.exists?(PasswordToken, @institution)
user
end
def animal(opts \\ []) do
Factory.sql_insert!(:animal, opts, @institution)
end
end
| 26.76 | 76 | 0.714499 |
73c9c6dcbe3a26980c8030c6afae77814cbeb0d0 | 1,834 | exs | Elixir | test/membrane/integration/timer_test.exs | membraneframework/membrane-core | 096c2546869824c49ad1d7412ffe02d050164611 | [
"Apache-2.0"
] | 515 | 2018-06-18T11:09:44.000Z | 2020-07-31T07:54:35.000Z | test/membrane/integration/timer_test.exs | membraneframework/membrane-core | 096c2546869824c49ad1d7412ffe02d050164611 | [
"Apache-2.0"
] | 120 | 2018-06-07T08:34:06.000Z | 2020-07-30T07:09:50.000Z | test/membrane/integration/timer_test.exs | membraneframework/membrane-core | 096c2546869824c49ad1d7412ffe02d050164611 | [
"Apache-2.0"
] | 13 | 2018-07-27T11:58:15.000Z | 2020-05-06T15:19:55.000Z | defmodule Membrane.Integration.TimerTest do
use ExUnit.Case, async: true
import Membrane.Testing.Assertions
alias Membrane.{Pipeline, Testing, Time}
defmodule Element do
use Membrane.Source
@impl true
def handle_prepared_to_playing(_ctx, state) do
{{:ok, start_timer: {:timer, Time.milliseconds(100)}}, state}
end
@impl true
def handle_tick(:timer, _ctx, state) do
{{:ok, notify: :tick, stop_timer: :timer}, state}
end
end
defmodule Bin do
use Membrane.Bin
@impl true
def handle_prepared_to_playing(_ctx, state) do
{{:ok, start_timer: {:timer, Time.milliseconds(100)}}, state}
end
@impl true
def handle_tick(:timer, _ctx, state) do
{{:ok, notify: :tick, stop_timer: :timer}, state}
end
end
defmodule Pipeline do
use Membrane.Pipeline
@impl true
def handle_init(pid) do
spec = %ParentSpec{
children: [element: Element, bin: Bin]
}
{{:ok, spec: spec, playback: :playing}, %{pid: pid}}
end
@impl true
def handle_prepared_to_playing(_ctx, state) do
{{:ok, start_timer: {:timer, Time.milliseconds(100)}}, state}
end
@impl true
def handle_tick(:timer, _ctx, state) do
send(state.pid, :pipeline_tick)
{{:ok, stop_timer: :timer}, state}
end
end
test "Stopping timer from handle_tick" do
{:ok, pipeline} =
Testing.Pipeline.start_link(
module: Pipeline,
custom_args: self()
)
assert_pipeline_playback_changed(pipeline, _, :playing)
assert_pipeline_notified(pipeline, :element, :tick)
assert_pipeline_notified(pipeline, :bin, :tick)
assert_receive :pipeline_tick
Testing.Pipeline.execute_actions(pipeline, playback: :stopped)
assert_pipeline_playback_changed(pipeline, _, :stopped)
end
end
| 25.123288 | 67 | 0.660851 |
73c9eb9390f2bac7b64b473580d0dba71dc912f5 | 2,601 | ex | Elixir | clients/content/lib/google_api/content/v2/model/datafeedstatuses_custom_batch_request_entry.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/datafeedstatuses_custom_batch_request_entry.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/datafeedstatuses_custom_batch_request_entry.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.DatafeedstatusesCustomBatchRequestEntry do
@moduledoc """
A batch entry encoding a single non-batch datafeedstatuses request.
## Attributes
* `batchId` (*type:* `integer()`, *default:* `nil`) - An entry ID, unique within the batch request.
* `country` (*type:* `String.t`, *default:* `nil`) - The country for which to get the datafeed status. If this parameter is provided then language must also be provided. Note that for multi-target datafeeds this parameter is required.
* `datafeedId` (*type:* `String.t`, *default:* `nil`) - The ID of the data feed to get.
* `language` (*type:* `String.t`, *default:* `nil`) - The language for which to get the datafeed status. If this parameter is provided then country must also be provided. Note that for multi-target datafeeds this parameter is required.
* `merchantId` (*type:* `String.t`, *default:* `nil`) - The ID of the managing account.
* `method` (*type:* `String.t`, *default:* `nil`) - The method of the batch entry.
Acceptable values are:
- "`get`"
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:batchId => integer(),
:country => String.t(),
:datafeedId => String.t(),
:language => String.t(),
:merchantId => String.t(),
:method => String.t()
}
field(:batchId)
field(:country)
field(:datafeedId)
field(:language)
field(:merchantId)
field(:method)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.DatafeedstatusesCustomBatchRequestEntry do
def decode(value, options) do
GoogleApi.Content.V2.Model.DatafeedstatusesCustomBatchRequestEntry.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.DatafeedstatusesCustomBatchRequestEntry do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.015385 | 239 | 0.704729 |
73ca0805f36207718dfbf370a13dd6f92a8ab1ec | 629 | ex | Elixir | lib/firmata/protocol/modes.ex | smedegaard/firmata | 080ef4f3be84c5e84a840da5e406cac8d9a92d72 | [
"ISC"
] | 30 | 2017-08-17T23:06:42.000Z | 2021-06-22T04:42:22.000Z | lib/firmata/protocol/modes.ex | smedegaard/firmata | 080ef4f3be84c5e84a840da5e406cac8d9a92d72 | [
"ISC"
] | 2 | 2017-08-17T23:11:22.000Z | 2020-09-17T14:21:48.000Z | lib/firmata/protocol/modes.ex | smedegaard/firmata | 080ef4f3be84c5e84a840da5e406cac8d9a92d72 | [
"ISC"
] | 7 | 2016-05-09T16:53:26.000Z | 2017-03-13T04:46:56.000Z | defmodule Firmata.Protocol.Modes do
defmacro __using__(_) do
quote location: :keep do
@input 0x00
@output 0x01
@analog 0x02
@pwm 0x03
@servo 0x04
@shift 0x05
@i2c 0x06
@onewire 0x07
@stepper 0x08
@serial 0x0a
@ignore 0x7f
@ping_read 0x75
@sonar 0x0B
@unknown 0x10
@modes [
@input,
@output,
@analog,
@pwm,
@servo,
@shift,
@i2c,
@onewire,
@stepper,
@serial,
@ignore,
@ping_read,
@sonar,
@unknown
]
end
end
end
| 16.552632 | 35 | 0.476948 |
73ca08ff19778800879a0fe01accef66adfc2c68 | 5,926 | exs | Elixir | exercises/practice/state-of-tic-tac-toe/test/state_of_tic_tac_toe_test.exs | kwchang0831/elixir | 1b21ae1ca610de97db79e76db890503ba75ce466 | [
"MIT"
] | null | null | null | exercises/practice/state-of-tic-tac-toe/test/state_of_tic_tac_toe_test.exs | kwchang0831/elixir | 1b21ae1ca610de97db79e76db890503ba75ce466 | [
"MIT"
] | null | null | null | exercises/practice/state-of-tic-tac-toe/test/state_of_tic_tac_toe_test.exs | kwchang0831/elixir | 1b21ae1ca610de97db79e76db890503ba75ce466 | [
"MIT"
] | null | null | null | defmodule StateOfTicTacToeTest do
use ExUnit.Case
describe "Won games" do
# @tag :pending
test "Finished game where X won via column victory (1)" do
board = """
XOO
X..
X..
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where X won via column victory (2)" do
board = """
OXO
.X.
.X.
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where X won via column victory (3)" do
board = """
OOX
..X
..X
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where O won via column victory (1)" do
board = """
OXX
OX.
O..
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where O won via column victory (2)" do
board = """
XOX
.OX
.O.
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where O won via column victory (3)" do
board = """
XXO
.XO
..O
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where X won via row victory (1)" do
board = """
XXX
XOO
O..
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where X won via row victory (2)" do
board = """
O.O
XXX
.O.
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where X won via row victory (3)" do
board = """
.OO
O.X
XXX
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where O won via row victory (1)" do
board = """
OOO
XXO
XX.
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where O won via row victory (2)" do
board = """
XX.
OOO
X..
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where O won via row victory (3)" do
board = """
XOX
.XX
OOO
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where X won via diagonal victory (1)" do
board = """
XOO
.X.
..X
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where X won via diagonal victory (2)" do
board = """
O.X
OX.
X..
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where O won via diagonal victory (3)" do
board = """
OXX
OOX
X.O
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where O won via diagonal victory (1)" do
board = """
..O
.OX
OXX
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where X won via a row and a column victory" do
board = """
XXX
XOO
XOO
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
@tag :pending
test "Finished game where X won via two diagonal victories" do
board = """
XOX
OXO
XOX
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :win}
end
end
describe "Drawn games" do
@tag :pending
test "Draw (1)" do
board = """
XOX
XXO
OXO
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :draw}
end
@tag :pending
test "Draw (2)" do
board = """
XXO
OXX
XOO
"""
{:ok, :draw}
assert StateOfTicTacToe.game_state(board) == {:ok, :draw}
end
end
describe "Ongoing games" do
@tag :pending
test "Ongoing game (1)" do
board = """
...
X..
...
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :ongoing}
end
@tag :pending
test "Ongoing game (2)" do
board = """
O..
.X.
...
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :ongoing}
end
@tag :pending
test "Ongoing game (3)" do
board = """
X..
.XO
OX.
"""
assert StateOfTicTacToe.game_state(board) == {:ok, :ongoing}
end
end
describe "Invalid boards" do
@tag :pending
test "Invalid board (1)" do
board = """
XX.
...
...
"""
assert StateOfTicTacToe.game_state(board) == {:error, "Wrong turn order: X went twice"}
end
@tag :pending
test "Invalid board (2)" do
board = """
OOX
...
...
"""
assert StateOfTicTacToe.game_state(board) == {:error, "Wrong turn order: O started"}
end
@tag :pending
test "Invalid board: X won and O kept playing" do
board = """
XXX
OOO
...
"""
assert StateOfTicTacToe.game_state(board) ==
{:error, "Impossible board: game should have ended after the game was won"}
end
@tag :pending
test "Invalid board: players kept playing after a win" do
board = """
XXX
OOO
XOX
"""
assert StateOfTicTacToe.game_state(board) ==
{:error, "Impossible board: game should have ended after the game was won"}
end
end
end
| 18.932907 | 93 | 0.520756 |
73ca178969bebe46f9bdebb67c8367c6d48ef7cc | 4,530 | ex | Elixir | clients/spanner/lib/google_api/spanner/v1/model/backup.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/spanner/lib/google_api/spanner/v1/model/backup.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/spanner/lib/google_api/spanner/v1/model/backup.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Spanner.V1.Model.Backup do
@moduledoc """
A backup of a Cloud Spanner database.
## Attributes
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The time the CreateBackup request is received. If the request does not specify `version_time`, the `version_time` of the backup will be equivalent to the `create_time`.
* `database` (*type:* `String.t`, *default:* `nil`) - Required for the CreateBackup operation. Name of the database from which this backup was created. This needs to be in the same instance as the backup. Values are of the form `projects//instances//databases/`.
* `encryptionInfo` (*type:* `GoogleApi.Spanner.V1.Model.EncryptionInfo.t`, *default:* `nil`) - Output only. The encryption information for the backup.
* `expireTime` (*type:* `DateTime.t`, *default:* `nil`) - Required for the CreateBackup operation. The expiration time of the backup, with microseconds granularity that must be at least 6 hours and at most 366 days from the time the CreateBackup request is processed. Once the `expire_time` has passed, the backup is eligible to be automatically deleted by Cloud Spanner to free the resources used by the backup.
* `name` (*type:* `String.t`, *default:* `nil`) - Output only for the CreateBackup operation. Required for the UpdateBackup operation. A globally unique identifier for the backup which cannot be changed. Values are of the form `projects//instances//backups/a-z*[a-z0-9]` The final segment of the name must be between 2 and 60 characters in length. The backup is stored in the location(s) specified in the instance configuration of the instance containing the backup, identified by the prefix of the backup name of the form `projects//instances/`.
* `referencingDatabases` (*type:* `list(String.t)`, *default:* `nil`) - Output only. The names of the restored databases that reference the backup. The database names are of the form `projects//instances//databases/`. Referencing databases may exist in different instances. The existence of any referencing database prevents the backup from being deleted. When a restored database from the backup enters the `READY` state, the reference to the backup is removed.
* `sizeBytes` (*type:* `String.t`, *default:* `nil`) - Output only. Size of the backup in bytes.
* `state` (*type:* `String.t`, *default:* `nil`) - Output only. The current state of the backup.
* `versionTime` (*type:* `DateTime.t`, *default:* `nil`) - The backup will contain an externally consistent copy of the database at the timestamp specified by `version_time`. If `version_time` is not specified, the system will set `version_time` to the `create_time` of the backup.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:createTime => DateTime.t() | nil,
:database => String.t() | nil,
:encryptionInfo => GoogleApi.Spanner.V1.Model.EncryptionInfo.t() | nil,
:expireTime => DateTime.t() | nil,
:name => String.t() | nil,
:referencingDatabases => list(String.t()) | nil,
:sizeBytes => String.t() | nil,
:state => String.t() | nil,
:versionTime => DateTime.t() | nil
}
field(:createTime, as: DateTime)
field(:database)
field(:encryptionInfo, as: GoogleApi.Spanner.V1.Model.EncryptionInfo)
field(:expireTime, as: DateTime)
field(:name)
field(:referencingDatabases, type: :list)
field(:sizeBytes)
field(:state)
field(:versionTime, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.Spanner.V1.Model.Backup do
def decode(value, options) do
GoogleApi.Spanner.V1.Model.Backup.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Spanner.V1.Model.Backup do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 63.802817 | 550 | 0.721854 |
73ca1ce4895fe0a0f887ced750bdc200607e8cd1 | 2,461 | ex | Elixir | lib/docusign/model/checkbox.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 4 | 2020-12-21T12:50:13.000Z | 2022-01-12T16:50:43.000Z | lib/docusign/model/checkbox.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 12 | 2018-09-18T15:26:34.000Z | 2019-09-28T15:29:39.000Z | lib/docusign/model/checkbox.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 15 | 2020-04-29T21:50:16.000Z | 2022-02-11T18:01:51.000Z | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule DocuSign.Model.Checkbox do
@moduledoc """
A tab that allows the recipient to select a yes/no (on/off) option.
"""
@derive [Poison.Encoder]
defstruct [
:anchorCaseSensitive,
:anchorHorizontalAlignment,
:anchorIgnoreIfNotPresent,
:anchorMatchWholeWord,
:anchorString,
:anchorUnits,
:anchorXOffset,
:anchorYOffset,
:conditionalParentLabel,
:conditionalParentValue,
:customTabId,
:documentId,
:errorDetails,
:locked,
:mergeField,
:name,
:pageNumber,
:recipientId,
:required,
:requireInitialOnSharedChange,
:selected,
:shared,
:status,
:tabId,
:tabLabel,
:tabOrder,
:templateLocked,
:templateRequired,
:xPosition,
:yPosition
]
@type t :: %__MODULE__{
:anchorCaseSensitive => String.t(),
:anchorHorizontalAlignment => String.t(),
:anchorIgnoreIfNotPresent => String.t(),
:anchorMatchWholeWord => String.t(),
:anchorString => String.t(),
:anchorUnits => String.t(),
:anchorXOffset => String.t(),
:anchorYOffset => String.t(),
:conditionalParentLabel => String.t(),
:conditionalParentValue => String.t(),
:customTabId => String.t(),
:documentId => String.t(),
:errorDetails => ErrorDetails,
:locked => String.t(),
:mergeField => MergeField,
:name => String.t(),
:pageNumber => String.t(),
:recipientId => String.t(),
:required => String.t(),
:requireInitialOnSharedChange => String.t(),
:selected => String.t(),
:shared => String.t(),
:status => String.t(),
:tabId => String.t(),
:tabLabel => String.t(),
:tabOrder => String.t(),
:templateLocked => String.t(),
:templateRequired => String.t(),
:xPosition => String.t(),
:yPosition => String.t()
}
end
defimpl Poison.Decoder, for: DocuSign.Model.Checkbox do
import DocuSign.Deserializer
def decode(value, options) do
value
|> deserialize(:errorDetails, :struct, DocuSign.Model.ErrorDetails, options)
|> deserialize(:mergeField, :struct, DocuSign.Model.MergeField, options)
end
end
| 28.287356 | 80 | 0.595693 |
73ca4123ee613bfd0024d97723e2445f84f6c3c8 | 2,177 | ex | Elixir | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1_human_review_status.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1_human_review_status.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1_human_review_status.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1HumanReviewStatus do
@moduledoc """
The status of human review on a processed document.
## Attributes
* `humanReviewOperation` (*type:* `String.t`, *default:* `nil`) - The name of the operation triggered by the processed document. This field is populated only when the [state] is [HUMAN_REVIEW_IN_PROGRESS]. It has the same response type and metadata as the long running operation returned by [ReviewDocument] method.
* `state` (*type:* `String.t`, *default:* `nil`) - The state of human review on the processing request.
* `stateMessage` (*type:* `String.t`, *default:* `nil`) - A message providing more details about the human review state.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:humanReviewOperation => String.t() | nil,
:state => String.t() | nil,
:stateMessage => String.t() | nil
}
field(:humanReviewOperation)
field(:state)
field(:stateMessage)
end
defimpl Poison.Decoder,
for: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1HumanReviewStatus do
def decode(value, options) do
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1HumanReviewStatus.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1HumanReviewStatus do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.534483 | 319 | 0.735875 |
73ca7d488961582bbd9585515ffe1067eb4c3b76 | 2,540 | ex | Elixir | clients/health_care/lib/google_api/health_care/v1beta1/model/evaluate_annotation_store_response.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/model/evaluate_annotation_store_response.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/model/evaluate_annotation_store_response.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1beta1.Model.EvaluateAnnotationStoreResponse do
@moduledoc """
Response for successful Annotation store evaluation operations. This structure is included in the response upon operation completion.
## Attributes
* `evalStore` (*type:* `String.t`, *default:* `nil`) - The evaluated Annotation store, in the format of `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/annotationStores/{annotation_store_id}`.
* `goldenCount` (*type:* `String.t`, *default:* `nil`) - The number of Annotations in the ground truth Annotation store successfully processed.
* `goldenStore` (*type:* `String.t`, *default:* `nil`) - The ground truth Annotation store, in the format of `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/annotationStores/{annotation_store_id}`.
* `matchedCount` (*type:* `String.t`, *default:* `nil`) - The number of Annotations in the eval store that match with corresponding annotations in the ground truth Annotation store. Two matched annotations both annotate the same resource defined in AnnotationSource.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:evalStore => String.t(),
:goldenCount => String.t(),
:goldenStore => String.t(),
:matchedCount => String.t()
}
field(:evalStore)
field(:goldenCount)
field(:goldenStore)
field(:matchedCount)
end
defimpl Poison.Decoder, for: GoogleApi.HealthCare.V1beta1.Model.EvaluateAnnotationStoreResponse do
def decode(value, options) do
GoogleApi.HealthCare.V1beta1.Model.EvaluateAnnotationStoreResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HealthCare.V1beta1.Model.EvaluateAnnotationStoreResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45.357143 | 270 | 0.74685 |
73ca8ed104c8dd2cba2dae0ea7849ee7f2cb9549 | 509 | exs | Elixir | config/config.exs | ivosequeros/minimal-graphql-elixir | 455996d230dd065fd2182f0950c191a2d880f4bf | [
"Apache-2.0"
] | null | null | null | config/config.exs | ivosequeros/minimal-graphql-elixir | 455996d230dd065fd2182f0950c191a2d880f4bf | [
"Apache-2.0"
] | null | null | null | config/config.exs | ivosequeros/minimal-graphql-elixir | 455996d230dd065fd2182f0950c191a2d880f4bf | [
"Apache-2.0"
] | null | null | null | use Mix.Config
config :outbound_engine,
ecto_repos: [BasicApp.Repo]
config :outbound_engine, BasicApp.API.Endpoint,
url: [host: "localhost"],
secret_key_base: "wOlxWYUmU0dJEoK+e89vPWiGaNJjHN9pxv3bk0zYqFD0Hh1I8TwaP3Eg5p9h/AWx",
render_errors: [view: BasicApp.API.ErrorView, accepts: ~w(json)],
pubsub: [name: BasicApp.PubSub,
adapter: Phoenix.PubSub.PG2]
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:user_id]
import_config "#{Mix.env}.exs"
| 28.277778 | 86 | 0.72888 |
73caa23510efa38007b2dffeaabd2848fb91ac6a | 253 | exs | Elixir | exercises/02-higher-order-functions/02-goodbye-loops/16-piping-3/solution.exs | DennisWinnepenninckx/distributed-applications | 06743e4e2a09dc52ff52be831e486bb073916173 | [
"BSD-3-Clause"
] | 1 | 2021-09-22T09:52:11.000Z | 2021-09-22T09:52:11.000Z | exercises/02-higher-order-functions/02-goodbye-loops/16-piping-3/solution.exs | DennisWinnepenninckx/distributed-applications | 06743e4e2a09dc52ff52be831e486bb073916173 | [
"BSD-3-Clause"
] | 22 | 2019-06-19T18:58:13.000Z | 2020-03-16T14:43:06.000Z | exercises/02-higher-order-functions/02-goodbye-loops/16-piping-3/solution.exs | DennisWinnepenninckx/distributed-applications | 06743e4e2a09dc52ff52be831e486bb073916173 | [
"BSD-3-Clause"
] | 32 | 2019-09-19T03:25:11.000Z | 2020-10-06T15:01:47.000Z | defmodule Bank do
def largest_expense_index(balance_history) do
balance_history
|> Enum.zip(tl(balance_history))
|> Enum.map(fn {x, y} -> y - x end)
|> Enum.with_index(0)
|> Enum.min_by(fn {x, _} -> x end)
|> elem(1)
end
end
| 23 | 47 | 0.612648 |
73cab55f15addb58c453aaf67e1bfed95236b4a9 | 1,199 | ex | Elixir | lib/dagex/operations/remove_edge.ex | jwilger/dagex | b6c0b660c6897ee1109e1d72937e993b7bfebb4f | [
"Apache-2.0"
] | null | null | null | lib/dagex/operations/remove_edge.ex | jwilger/dagex | b6c0b660c6897ee1109e1d72937e993b7bfebb4f | [
"Apache-2.0"
] | null | null | null | lib/dagex/operations/remove_edge.ex | jwilger/dagex | b6c0b660c6897ee1109e1d72937e993b7bfebb4f | [
"Apache-2.0"
] | null | null | null | defmodule Dagex.Operations.RemoveEdge do
@moduledoc """
Represents a database operation to remove an edge between two nodes.
See `c:Dagex.remove_edge/2`
"""
@type t() :: %__MODULE__{
node_type: String.t(),
parent: struct(),
parent_id: String.t(),
child: struct(),
child_id: String.t()
}
@type result() :: {:edge_removed, {parent :: struct(), child :: struct()}}
defstruct [:node_type, :parent, :parent_id, :child, :child_id]
@doc false
@spec new(struct(), struct()) :: t() | {:error, term()}
def new(parent, child) do
node_type = parent.__meta__.source
primary_key_field = parent.__struct__.__schema__(:primary_key) |> List.first()
parent_id = Map.get(parent, primary_key_field) |> to_string()
child_id = Map.get(child, primary_key_field) |> to_string()
%__MODULE__{
node_type: node_type,
parent: parent,
parent_id: parent_id,
child: child,
child_id: child_id
}
end
@doc false
@spec process_result(:ok, t()) :: {:edge_removed, {parent :: struct(), child :: struct()}}
def process_result(:ok, op) do
{:edge_removed, {op.parent, op.child}}
end
end
| 27.883721 | 92 | 0.622185 |
73cac7fb12dd7dd86ada9e22a0fcef1b7685d5c8 | 495 | ex | Elixir | lib/yggdrasil_web/views/error_view.ex | first-tree/yggdrasil | dfe1417822a04c8d8da24912d0b4a8271c24e7c9 | [
"MIT"
] | null | null | null | lib/yggdrasil_web/views/error_view.ex | first-tree/yggdrasil | dfe1417822a04c8d8da24912d0b4a8271c24e7c9 | [
"MIT"
] | 1 | 2018-05-31T02:43:11.000Z | 2018-05-31T02:43:11.000Z | lib/yggdrasil_web/views/error_view.ex | first-tree/yggdrasil | dfe1417822a04c8d8da24912d0b4a8271c24e7c9 | [
"MIT"
] | null | null | null | defmodule YggdrasilWeb.ErrorView do
use YggdrasilWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 29.117647 | 61 | 0.737374 |
73caf6d0ec58e9c82db94f670a2f3c325a71f83d | 3,383 | ex | Elixir | lib/mix/lib/mix/deps/converger.ex | joearms/elixir | 9a0f8107bd8bbd089acb96fe0041d61a05e88a9b | [
"Apache-2.0"
] | 4 | 2016-04-05T05:51:36.000Z | 2019-10-31T06:46:35.000Z | lib/mix/lib/mix/deps/converger.ex | joearms/elixir | 9a0f8107bd8bbd089acb96fe0041d61a05e88a9b | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/deps/converger.ex | joearms/elixir | 9a0f8107bd8bbd089acb96fe0041d61a05e88a9b | [
"Apache-2.0"
] | 5 | 2015-02-01T06:01:19.000Z | 2019-08-29T09:02:35.000Z | # This module is the one responsible for converging
# dependencies in a recursive fashion. This
# module and its functions are private to Mix.
defmodule Mix.Deps.Converger do
@moduledoc false
@doc """
Clear up the mixfile cache.
"""
def clear_cache do
Mix.Server.cast(:clear_mixfile_cache)
end
@doc """
Returns all dependencies from the current project,
including nested dependencies. There is a callback
that is invoked for each dependency and must return
an updated depedency in case some processing is done.
"""
def all(rest, callback) do
config = [ deps_path: Path.expand(Mix.project[:deps_path]),
root_lockfile: Path.expand(Mix.project[:lockfile]) ]
{ main, rest } = Mix.Deps.Retriever.all(rest, config,callback)
{ all(Enum.reverse(main), [], [], main), rest }
end
# We traverse the tree of dependencies in a breadth-
# first fashion. The reason for this is that we converge
# dependencies, but allow the parent to override any
# dependency in the child. Consider this tree with
# dependencies `a`, `b`, etc and the order they are
# converged:
#
# * project
# 1) a
# 2) b
# 4) d
# 3) c
# 5) e
# 6) f
# 7) d
#
# Notice that the `d` dependency exists as a child of `g`
# and child of `f`. In case the dependency is the same,
# we proceed. However, if there is a conflict, for instance
# different git repositories is used as source in each, we
# raise an exception.
#
# In order to solve such dependencies, we allow the project
# to specify the same dependency, but it will be considered
# to have higher priority:
#
# * project
# 1) a
# 2) b
# 5) d
# 3) c
# 6) e
# 7) f
# 8) d
# 4) d
#
# Now, since `d` was specified in a parent project, no
# exception is going to be raised since d is considered
# to be the authorative source.
defp all([dep|t], acc, upper_breadths, current_breadths) do
cond do
contains_dep?(upper_breadths, dep) ->
all(t, acc, upper_breadths, current_breadths)
match?({ diverged_acc, true }, diverged_dep?(acc, dep)) ->
all(t, diverged_acc, upper_breadths, current_breadths)
true ->
deps = dep.deps
if deps != [] do
acc = all(t, [dep|acc], upper_breadths, current_breadths)
all(deps, acc, current_breadths, deps ++ current_breadths)
else
all(t, [dep|acc], upper_breadths, current_breadths)
end
end
end
defp all([], acc, _upper, _current) do
acc
end
# Does the list contain the given dependency?
defp contains_dep?(list, Mix.Dep[app: app]) do
Enum.any?(list, match?(Mix.Dep[app: ^app], &1))
end
# Check the list for matching dependencies.
# In case dependencies are found, check if their
# scm info match. If not, mark the dependencies
# as diverged.
def diverged_dep?(list, dep) do
Mix.Dep[app: app, scm: scm, opts: opts] = dep
Enum.map_reduce list, false, fn(other, diverged) ->
Mix.Dep[app: other_app, scm: other_scm, opts: other_opts] = other
if app != other_app || scm == other_scm && scm.equals?(opts, other_opts) do
{ other, diverged }
else
{ other.status({ :diverged, dep }), true }
end
end
end
end
| 30.754545 | 81 | 0.625185 |
73cb1a178662041d6fb2a3c2d74c76bb70cebfca | 321 | ex | Elixir | lib/idefix/refactor/extract_variable.ex | arjan/idefix | d6a2f074ed2b18b77c7058ce82ab73eed62feb26 | [
"MIT"
] | 1 | 2020-03-26T16:46:05.000Z | 2020-03-26T16:46:05.000Z | lib/idefix/refactor/extract_variable.ex | arjan/idefix | d6a2f074ed2b18b77c7058ce82ab73eed62feb26 | [
"MIT"
] | null | null | null | lib/idefix/refactor/extract_variable.ex | arjan/idefix | d6a2f074ed2b18b77c7058ce82ab73eed62feb26 | [
"MIT"
] | null | null | null | defmodule Idefix.Refactor.ExtractVariable do
@moduledoc """
Extract the current expression into a variable
"""
alias Idefix.AST
def extract_variable(input, {line, col}, newname) do
ast = AST.parse(input)
node = AST.find_node(ast, line, col)
block = AST.find_nearest_do_block(ast, node)
end
end
| 21.4 | 54 | 0.707165 |
73cb4133bb9a8db750a8e9e9f92d88a5bed9d0a5 | 1,454 | exs | Elixir | mix.exs | BleuShan/absinthe_phoenix | afdaf8b65a96aed6e9d64b9b11b10944e08bfc8a | [
"MIT"
] | null | null | null | mix.exs | BleuShan/absinthe_phoenix | afdaf8b65a96aed6e9d64b9b11b10944e08bfc8a | [
"MIT"
] | null | null | null | mix.exs | BleuShan/absinthe_phoenix | afdaf8b65a96aed6e9d64b9b11b10944e08bfc8a | [
"MIT"
] | null | null | null | defmodule Absinthe.Phoenix.Mixfile do
use Mix.Project
@version "1.4.3"
def project do
[
app: :absinthe_phoenix,
version: @version,
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env()),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
docs: [source_ref: "v#{@version}"],
package: package(),
deps: deps()
]
end
defp package do
[
description:
"Subscription support via Phoenix for Absinthe, the GraphQL implementation for Elixir.",
files: ["lib", "mix.exs", "README*"],
maintainers: ["Ben Wilson", "Bruce Williams"],
licenses: ["MIT"],
links: %{
Website: "https://absinthe-graphql.org",
Changelog:
"https://github.com/absinthe-graphql/absinthe_phoenix/blob/master/CHANGELOG.md",
GitHub: "https://github.com/absinthe-graphql/absinthe_phoenix"
}
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
def application do
[extra_applications: [:logger]]
end
defp deps do
[
{:absinthe_plug, "~> 1.4.0"},
{:absinthe, "~> 1.4.0"},
{:decimal, "~> 1.0"},
{:phoenix, "~> 1.2"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_html, "~> 2.10.5 or ~> 2.11", optional: true},
{:ex_doc, "~> 0.14", only: :dev},
{:poison, "~> 2.0 or ~> 3.0", only: [:dev, :test]}
]
end
end
| 25.964286 | 96 | 0.559835 |
73cb79b4d8840530e97d295687869da3cd22b660 | 2,123 | exs | Elixir | proficionym_api/test/controllers/synonyms_controller_test.exs | appropont/proficionym-api-elixir | 37519691e04176490154d7d8205c7bf1f90ff8b3 | [
"Apache-2.0"
] | null | null | null | proficionym_api/test/controllers/synonyms_controller_test.exs | appropont/proficionym-api-elixir | 37519691e04176490154d7d8205c7bf1f90ff8b3 | [
"Apache-2.0"
] | null | null | null | proficionym_api/test/controllers/synonyms_controller_test.exs | appropont/proficionym-api-elixir | 37519691e04176490154d7d8205c7bf1f90ff8b3 | [
"Apache-2.0"
] | null | null | null | defmodule ProficionymApi.SynonymsControllerTest do
use ProficionymApi.ConnCase
alias ProficionymApi.Synonyms
@valid_attrs %{}
@invalid_attrs %{}
setup %{conn: conn} do
{:ok, conn: put_req_header(conn, "accept", "application/json")}
end
test "lists all entries on index", %{conn: conn} do
conn = get conn, synonyms_path(conn, :index)
assert json_response(conn, 200)["data"] == []
end
test "shows chosen resource", %{conn: conn} do
synonyms = Repo.insert! %Synonyms{}
conn = get conn, synonyms_path(conn, :show, synonyms)
assert json_response(conn, 200)["data"] == %{"id" => synonyms.id}
end
test "renders page not found when id is nonexistent", %{conn: conn} do
assert_error_sent 404, fn ->
get conn, synonyms_path(conn, :show, -1)
end
end
test "creates and renders resource when data is valid", %{conn: conn} do
conn = post conn, synonyms_path(conn, :create), synonyms: @valid_attrs
assert json_response(conn, 201)["data"]["id"]
assert Repo.get_by(Synonyms, @valid_attrs)
end
test "does not create resource and renders errors when data is invalid", %{conn: conn} do
conn = post conn, synonyms_path(conn, :create), synonyms: @invalid_attrs
assert json_response(conn, 422)["errors"] != %{}
end
test "updates and renders chosen resource when data is valid", %{conn: conn} do
synonyms = Repo.insert! %Synonyms{}
conn = put conn, synonyms_path(conn, :update, synonyms), synonyms: @valid_attrs
assert json_response(conn, 200)["data"]["id"]
assert Repo.get_by(Synonyms, @valid_attrs)
end
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
synonyms = Repo.insert! %Synonyms{}
conn = put conn, synonyms_path(conn, :update, synonyms), synonyms: @invalid_attrs
assert json_response(conn, 422)["errors"] != %{}
end
test "deletes chosen resource", %{conn: conn} do
synonyms = Repo.insert! %Synonyms{}
conn = delete conn, synonyms_path(conn, :delete, synonyms)
assert response(conn, 204)
refute Repo.get(Synonyms, synonyms.id)
end
end
| 35.383333 | 98 | 0.686293 |
73cb7be0aaaea7fee4e434d58f8792c850aad271 | 1,670 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/placement_tag.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/placement_tag.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/placement_tag.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V34.Model.PlacementTag do
@moduledoc """
Placement Tag
## Attributes
* `placementId` (*type:* `String.t`, *default:* `nil`) - Placement ID
* `tagDatas` (*type:* `list(GoogleApi.DFAReporting.V34.Model.TagData.t)`, *default:* `nil`) - Tags generated for this placement.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:placementId => String.t() | nil,
:tagDatas => list(GoogleApi.DFAReporting.V34.Model.TagData.t()) | nil
}
field(:placementId)
field(:tagDatas, as: GoogleApi.DFAReporting.V34.Model.TagData, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V34.Model.PlacementTag do
def decode(value, options) do
GoogleApi.DFAReporting.V34.Model.PlacementTag.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V34.Model.PlacementTag do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.4 | 132 | 0.728743 |
73cb872a8203a9f30dc712d621391658866468f8 | 1,926 | exs | Elixir | mix.exs | smartrent/harald | 158a69bc2b70b3f51d67bd935d223a42a3633d68 | [
"MIT"
] | 3 | 2020-08-07T02:09:09.000Z | 2020-08-28T12:25:48.000Z | mix.exs | smartrent/harald | 158a69bc2b70b3f51d67bd935d223a42a3633d68 | [
"MIT"
] | null | null | null | mix.exs | smartrent/harald | 158a69bc2b70b3f51d67bd935d223a42a3633d68 | [
"MIT"
] | null | null | null | defmodule Harald.MixProject do
use Mix.Project
def application do
[
extra_applications: [:logger]
]
end
def project do
[
app: :harald,
deps: deps(),
description: description(),
dialyzer: [
flags: [:unmatched_returns, :error_handling, :race_conditions, :underspecs, :overspecs],
ignore_warnings: "dialyzer_ignore.exs"
],
docs: docs(),
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
package: package(),
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
source_url: "https://github.com/verypossible/harald",
start_permanent: Mix.env() == :prod,
test_coverage: [tool: ExCoveralls],
version: "0.2.0"
]
end
defp deps do
[
{:circuits_uart, "~> 1.4", optional: true},
{:credo, "~> 1.4", runtime: false, only: [:test]},
{:dialyxir, "~> 1.0", runtime: false, only: [:test]},
{:ex_bin, "~> 0.4"},
{:ex_doc, "~> 0.22", only: [:dev], runtime: false},
{:excoveralls, "~> 0.13", only: [:test], runtime: false},
{:mix_test_watch, "~> 1.0", only: [:test], runtime: false},
{:stream_data, "~> 0.5", only: [:test]}
]
end
defp description do
"""
An Elixir Bluetooth Host library.
"""
end
defp docs do
[
main: "readme",
extras: [
"README.md",
"docs/faq.md",
"docs/getting_started.md",
"docs/testing/testing.md"
],
extra_section: "DOCS",
groups_for_extras: [
Testing: ~r/docs\/testing\/.?/
]
]
end
defp elixirc_paths(:test), do: ["test/support", "lib"]
defp elixirc_paths(_), do: ["lib"]
defp package do
[
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/verypossible/harald"}
]
end
end
| 23.487805 | 96 | 0.541537 |
73cb8e3acfed206cfd2065e9841ed410921646b4 | 1,273 | ex | Elixir | example_app/web/controllers/user_management_controller.ex | moxley/addict | 9271c60d9a862edcefc31e8a764b3eb5a5905171 | [
"MIT"
] | 750 | 2015-01-18T23:00:36.000Z | 2021-03-24T22:11:09.000Z | example_app/web/controllers/user_management_controller.ex | moxley/addict | 9271c60d9a862edcefc31e8a764b3eb5a5905171 | [
"MIT"
] | 130 | 2015-01-19T12:39:42.000Z | 2021-09-28T22:40:52.000Z | example_app/web/controllers/user_management_controller.ex | moxley/addict | 9271c60d9a862edcefc31e8a764b3eb5a5905171 | [
"MIT"
] | 151 | 2015-01-19T09:24:44.000Z | 2020-09-21T13:52:46.000Z | defmodule ExampleApp.UserManagementController do
use ExampleApp.Web, :controller
alias ExampleApp.User
def index(conn, _params) do
users = Repo.all(User)
render(conn, "index.html", users: users)
end
def register(conn, _params) do
render(conn, "register.html", csrf_token: csrf_token(conn))
end
def login(conn,_params) do
render(conn, "login.html", csrf_token: csrf_token(conn))
end
def send_reset_password_link(conn,_params) do
render(conn, "send_reset_password_link.html", csrf_token: csrf_token(conn))
end
def reset_password(conn,params) do
token = params["token"]
signature = params["signature"]
render(conn, "reset_password.html", token: token, signature: signature, csrf_token: csrf_token(conn))
end
def csrf_token(conn) do
# csrf_token = Plug.CSRFProtection.get_csrf_token
# Plug.Conn.put_session(conn, :_csrf_token, csrf_token)
# csrf_token
end
def delete(conn, %{"id" => id}) do
user = Repo.get!(User, id)
# Here we use delete! (with a bang) because we expect
# it to always work (and if it does not, it will raise).
Repo.delete!(user)
conn
|> put_flash(:info, "User deleted successfully.")
|> redirect(to: user_management_path(conn, :index))
end
end
| 27.085106 | 105 | 0.696779 |
73cb991a2168df240a66a6a9595653961d5731ca | 2,402 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_batch_update_entities_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_batch_update_entities_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_batch_update_entities_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2021-03-04T13:43:47.000Z | 2021-03-04T13:43:47.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2BatchUpdateEntitiesRequest do
@moduledoc """
The request message for EntityTypes.BatchUpdateEntities.
## Attributes
* `entities` (*type:* `list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2EntityTypeEntity.t)`, *default:* `nil`) - Required. The entities to update or create.
* `languageCode` (*type:* `String.t`, *default:* `nil`) - Optional. The language used to access language-specific data. If not specified, the agent's default language is used. For more information, see [Multilingual intent and entity data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* `updateMask` (*type:* `String.t`, *default:* `nil`) - Optional. The mask to control which fields get updated.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:entities =>
list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2EntityTypeEntity.t()),
:languageCode => String.t(),
:updateMask => String.t()
}
field(:entities,
as: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2EntityTypeEntity,
type: :list
)
field(:languageCode)
field(:updateMask)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2BatchUpdateEntitiesRequest do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2BatchUpdateEntitiesRequest.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2BatchUpdateEntitiesRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.126984 | 320 | 0.74438 |
73cbaba29e1ba993b8f1befe3fb146aa8552923c | 2,781 | exs | Elixir | test/apns/notification_test.exs | Frameio/pigeon | 7de765179f74dbf13fbcc9cbe9ae1de59fe953e2 | [
"MIT"
] | 1 | 2021-09-05T13:34:21.000Z | 2021-09-05T13:34:21.000Z | test/apns/notification_test.exs | Frameio/pigeon | 7de765179f74dbf13fbcc9cbe9ae1de59fe953e2 | [
"MIT"
] | null | null | null | test/apns/notification_test.exs | Frameio/pigeon | 7de765179f74dbf13fbcc9cbe9ae1de59fe953e2 | [
"MIT"
] | null | null | null | defmodule Pigeon.APNS.NotificationTest do
use ExUnit.Case
def test_device_token, do: "test1234"
def test_msg, do: "test"
def test_topic, do: "topic.Test"
test "new" do
expected_result = %Pigeon.APNS.Notification{
device_token: test_device_token(),
topic: test_topic(),
payload: %{"aps" => %{"alert" => test_msg()}},
expiration: nil
}
assert Pigeon.APNS.Notification.new(
test_msg(),
test_device_token(),
test_topic()
) == expected_result
end
test "put_alert" do
title = "Test Title"
body = "test body"
alert = %{"title" => title, "body" => body}
n =
test_msg()
|> Pigeon.APNS.Notification.new(test_device_token(), test_topic())
|> Pigeon.APNS.Notification.put_alert(alert)
assert n.payload == %{"aps" => %{"alert" => alert}}
end
test "put_badge" do
badge = 5
n =
test_msg()
|> Pigeon.APNS.Notification.new(test_device_token(), test_topic())
|> Pigeon.APNS.Notification.put_badge(badge)
assert n.payload == %{"aps" => %{"alert" => test_msg(), "badge" => badge}}
end
test "put_sound" do
sound = "default"
n =
test_msg()
|> Pigeon.APNS.Notification.new(test_device_token(), test_topic())
|> Pigeon.APNS.Notification.put_sound(sound)
assert n.payload == %{"aps" => %{"alert" => test_msg(), "sound" => sound}}
end
test "put_content_available" do
n =
test_msg()
|> Pigeon.APNS.Notification.new(test_device_token(), test_topic())
|> Pigeon.APNS.Notification.put_content_available()
assert n.payload == %{
"aps" => %{"alert" => test_msg(), "content-available" => 1}
}
end
test "put_category" do
category = "test-category"
n =
test_msg()
|> Pigeon.APNS.Notification.new(test_device_token(), test_topic())
|> Pigeon.APNS.Notification.put_category(category)
assert n.payload == %{
"aps" => %{"alert" => test_msg(), "category" => category}
}
end
test "put_mutable_content" do
n =
test_msg()
|> Pigeon.APNS.Notification.new(test_device_token(), test_topic())
|> Pigeon.APNS.Notification.put_mutable_content()
assert n.payload == %{
"aps" => %{"alert" => test_msg(), "mutable-content" => 1}
}
end
test "put_custom" do
custom = %{"custom-key" => %{"custom-value" => 500}}
n =
test_msg()
|> Pigeon.APNS.Notification.new(test_device_token(), test_topic())
|> Pigeon.APNS.Notification.put_custom(custom)
assert n.payload ==
%{
"aps" => %{"alert" => test_msg()},
"custom-key" => %{"custom-value" => 500}
}
end
end
| 25.75 | 78 | 0.574973 |
73cc12f49d96599c46fdd14cbe64385dbb7b0d11 | 393 | ex | Elixir | lib/cgrates_web_jsonapi/jsonapi_resource.ex | max-konin/cgrates_web_jsonapi | e82690e343d790b0f77dea6699483fcb6fd8a162 | [
"MIT"
] | 2 | 2018-10-03T07:41:32.000Z | 2021-03-21T11:27:27.000Z | lib/cgrates_web_jsonapi/jsonapi_resource.ex | max-konin/cgrates_web_jsonapi | e82690e343d790b0f77dea6699483fcb6fd8a162 | [
"MIT"
] | 1 | 2018-10-31T04:55:59.000Z | 2018-10-31T04:55:59.000Z | lib/cgrates_web_jsonapi/jsonapi_resource.ex | max-konin/cgrates_web_jsonapi | e82690e343d790b0f77dea6699483fcb6fd8a162 | [
"MIT"
] | 5 | 2018-09-27T11:30:44.000Z | 2021-01-16T08:28:58.000Z | defmodule CgratesWebJsonapi.JSONAPIResource do
@moduledoc """
Utils for building JSONAPI responsed and hande requests
"""
@spec pagination_meta(Scrivener.Page.t()) :: %{
"total-count": integer(),
"total-pages": integer()
}
def pagination_meta(page) do
%{
"total-count": page.total_entries,
"total-pages": page.total_pages
}
end
end
| 23.117647 | 57 | 0.633588 |
73cc5069ecf1adbbbd73512871aa1af14bb93a58 | 419 | exs | Elixir | config/test.exs | enter-haken/storm | 9730508c119763bcf5efe852387603dcc7eeb648 | [
"MIT"
] | null | null | null | config/test.exs | enter-haken/storm | 9730508c119763bcf5efe852387603dcc7eeb648 | [
"MIT"
] | 1 | 2021-03-28T22:57:30.000Z | 2021-04-10T23:05:04.000Z | config/test.exs | enter-haken/storm | 9730508c119763bcf5efe852387603dcc7eeb648 | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :storm, StormWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, :console, format: "[$level] $message\n"
config :storm, :pg_config,
hostname: "localhost",
username: "postgres",
password: "postgres",
database: "storm_test"
| 22.052632 | 56 | 0.706444 |
73cc55f1207f7e9e75f9dd5762244a7a554bb132 | 1,542 | ex | Elixir | lib/bank_account_web/endpoint.ex | gapfranco/bank_account | ffa60fd76b70b9243e7eb575d52f22bdc31d196e | [
"MIT"
] | null | null | null | lib/bank_account_web/endpoint.ex | gapfranco/bank_account | ffa60fd76b70b9243e7eb575d52f22bdc31d196e | [
"MIT"
] | null | null | null | lib/bank_account_web/endpoint.ex | gapfranco/bank_account | ffa60fd76b70b9243e7eb575d52f22bdc31d196e | [
"MIT"
] | null | null | null | defmodule BankAccountWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :bank_account
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_bank_account_key",
signing_salt: "J/uI2xF6"
]
socket "/socket", BankAccountWeb.UserSocket,
websocket: true,
longpoll: false
socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :bank_account,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug Phoenix.CodeReloader
plug Phoenix.Ecto.CheckRepoStatus, otp_app: :bank_account
end
plug Phoenix.LiveDashboard.RequestLogger,
param_key: "request_logger",
cookie_key: "request_logger"
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug BankAccountWeb.Router
end
| 29.09434 | 97 | 0.719844 |
73cc6263a487d9cf66b6c9b12ccd01c8861a5cc2 | 11,484 | ex | Elixir | lib/soap/request/params.ex | Liferenko/soap | 57a88052b7696d565f5597b2a50658bf20d01d3e | [
"MIT"
] | null | null | null | lib/soap/request/params.ex | Liferenko/soap | 57a88052b7696d565f5597b2a50658bf20d01d3e | [
"MIT"
] | null | null | null | lib/soap/request/params.ex | Liferenko/soap | 57a88052b7696d565f5597b2a50658bf20d01d3e | [
"MIT"
] | null | null | null | defmodule Soap.Request.Params do
@moduledoc """
Documentation for Soap.Request.Options.
"""
import XmlBuilder, only: [element: 3, document: 1, generate: 2]
# TODO Remove before flight
@our_hardcoded_schema_types %{
"xmlns:SOAP-ENV" => "http://schemas.xmlsoap.org/soap/envelope/",
"xmlns:SOAP-ENC" => "http://schemas.xmlsoap.org/soap/encoding/",
# Remove before flight - unused "xmlns:ns1" => "urn:riamethods-make-order",
"xmlns:ns2" => "http://credit.ria.com/types",
"xmlns:xsd" => "http://www.w3.org/2001/XMLSchema",
"xmlns:xsi" => "http://www.w3.org/2001/XMLSchema-instance",
"SOAP-ENV:encodingStyle" => "http://schemas.xmlsoap.org/soap/encoding/"
}
@schema_types %{
"xmlns:xsd" => "http://www.w3.org/2001/XMLSchema",
"xmlns:xsi" => "http://www.w3.org/2001/XMLSchema-instance"
}
@soap_version_namespaces %{
"1.1" => "http://schemas.xmlsoap.org/soap/envelope/",
"1.2" => "http://www.w3.org/2003/05/soap-envelope"
}
@date_type_regex "[0-9]{4}-[0-9]{2}-[0-9]{2}"
@date_time_type_regex "[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}"
@doc """
Parsing parameters map and generate body xml by given soap action name and body params(Map).
Returns xml-like string.
"""
@spec build_body(wsdl :: map(), operation :: String.t() | atom(), params :: map(), headers :: map()) :: String.t()
def build_body(wsdl, operation, params, headers) do
with {:ok, body} <- build_soap_body(wsdl, operation, params),
{:ok, header} <- build_soap_header(wsdl, operation, headers) do
# TODO Remove before flight [header, body]. The header is giving brocen <env:Header />
[body]
|> add_envelope_tag_wrapper(wsdl, operation)
|> document
|> generate(format: :none)
|> String.replace(["\n", "\t"], "")
else
{:error, message} -> message
end
end
@spec validate_params(params :: any(), wsdl :: map(), operation :: String.t()) :: any()
def validate_params(params, _wsdl, _operation) when is_binary(params), do: params
def validate_params(params, wsdl, operation) do
errors =
params
|> Enum.map(&validate_param(&1, wsdl, operation))
case Enum.any?(errors) do
true ->
{:error, Enum.reject(errors, &is_nil/1)}
_ ->
params
end
end
@spec validate_param(param :: tuple(), wsdl :: map(), operation :: String.t()) :: String.t() | nil
defp validate_param(param, wsdl, operation) do
{k, _, v} = param
case val_map = wsdl.validation_types[String.downcase(operation)] do
nil ->
nil
_ ->
if Map.has_key?(val_map, k) do
validate_param_attributes(val_map, k, v)
else
"Invalid SOAP message:Invalid content was found starting with element '#{k}'. One of {#{
Enum.join(Map.keys(val_map), ", ")
}} is expected."
end
end
end
@spec validate_param_attributes(val_map :: map(), k :: String.t(), v :: String.t()) :: String.t() | nil
defp validate_param_attributes(val_map, k, v) do
attributes = val_map[k]
[_, type] = String.split(attributes.type, ":")
case Integer.parse(v) do
{number, ""} -> validate_type(k, number, type)
_ -> validate_type(k, v, type)
end
end
defp validate_type(_k, v, "string") when is_binary(v), do: nil
defp validate_type(k, _v, type = "string"), do: type_error_message(k, type)
defp validate_type(_k, v, "decimal") when is_number(v), do: nil
defp validate_type(k, _v, type = "decimal"), do: type_error_message(k, type)
defp validate_type(k, v, "date") when is_binary(v) do
case Regex.match?(~r/#{@date_type_regex}/, v) do
true -> nil
_ -> format_error_message(k, @date_type_regex)
end
end
defp validate_type(k, _v, type = "date"), do: type_error_message(k, type)
defp validate_type(k, v, "dateTime") when is_binary(v) do
case Regex.match?(~r/#{@date_time_type_regex}/, v) do
true -> nil
_ -> format_error_message(k, @date_time_type_regex)
end
nil
end
defp validate_type(k, _v, type = "dateTime"), do: type_error_message(k, type)
defp build_soap_body(wsdl, operation, params) do
case params |> construct_xml_request_body |> validate_params(wsdl, operation) do
{:error, messages} ->
{:error, messages}
validated_params ->
body =
validated_params
|> add_action_tag_wrapper(wsdl, operation)
|> add_body_tag_wrapper
{:ok, body}
end
end
defp build_soap_header(wsdl, operation, headers) do
case headers |> construct_xml_request_header do
{:error, messages} ->
{:error, messages}
validated_params ->
body =
validated_params
|> add_header_part_tag_wrapper(wsdl, operation)
#TODO remove before flight |> add_header_tag_wrapper
{:ok, body}
end
end
defp type_error_message(k, type) do
"Element #{k} has wrong type. Expects #{type} type."
end
defp format_error_message(k, regex) do
"Element #{k} has wrong format. Expects #{regex} format."
end
@spec construct_xml_request_body(params :: map() | list()) :: list()
defp construct_xml_request_body(params) when is_map(params) or is_list(params) do
params |> Enum.map(&construct_xml_request_body/1)
end
@spec construct_xml_request_body(params :: tuple()) :: tuple()
defp construct_xml_request_body(params) when is_tuple(params) do
params
|> Tuple.to_list()
|> Enum.map(&construct_xml_request_body/1)
|> insert_tag_parameters
|> List.to_tuple()
end
@spec construct_xml_request_body(params :: String.t() | atom() | number()) :: String.t()
defp construct_xml_request_body(params) when is_atom(params) or is_number(params), do: params |> to_string
defp construct_xml_request_body(params) when is_binary(params), do: params
@spec construct_xml_request_header(params :: map() | list()) :: list()
defp construct_xml_request_header(params) when is_map(params) or is_list(params) do
params |> Enum.map(&construct_xml_request_header/1)
end
@spec construct_xml_request_header(params :: tuple()) :: tuple()
defp construct_xml_request_header(params) when is_tuple(params) do
params
|> Tuple.to_list()
|> Enum.map(&construct_xml_request_header/1)
|> insert_tag_parameters
|> List.to_tuple()
end
@spec construct_xml_request_header(params :: String.t() | atom() | number()) :: String.t()
defp construct_xml_request_header(params) when is_atom(params) or is_number(params), do: params |> to_string
defp construct_xml_request_header(params) when is_binary(params), do: params
@spec insert_tag_parameters(params :: list()) :: list()
defp insert_tag_parameters(params) when is_list(params), do: params |> List.insert_at(1, nil)
@spec add_action_tag_wrapper(list(), map(), String.t()) :: list()
defp add_action_tag_wrapper(body, wsdl, operation) do
action_tag_attributes = handle_element_form_default(wsdl[:schema_attributes])
action_tag =
wsdl
|> get_action_with_namespace(operation)
|> prepare_action_tag(operation)
[element(action_tag, action_tag_attributes, body)]
end
@spec add_header_part_tag_wrapper(list(), map(), String.t()) :: list()
defp add_header_part_tag_wrapper(body, wsdl, operation) do
action_tag_attributes = handle_element_form_default(wsdl[:schema_attributes])
case get_header_with_namespace(wsdl, operation) do
nil ->
nil
action_tag ->
[element(action_tag, action_tag_attributes, body)]
end
end
defp handle_element_form_default(%{target_namespace: ns, element_form_default: "qualified"}), do: %{xmlns: ns}
defp handle_element_form_default(_schema_attributes), do: %{}
defp prepare_action_tag("", operation), do: operation
defp prepare_action_tag(action_tag, _operation), do: action_tag
@spec get_action_with_namespace(wsdl :: map(), operation :: String.t()) :: String.t()
defp get_action_with_namespace(wsdl, operation) do
case wsdl[:complex_types] do
[] ->
"" #TODO REMOVE BEFORE FLIGHT Here I may to hardcode complex types
_ ->
wsdl[:complex_types]
|> Enum.find(fn x -> x[:name] == operation end)
|> handle_action_extractor_result(wsdl, operation)
end
end
@spec get_header_with_namespace(wsdl :: map(), operation :: String.t()) :: String.t()
defp get_header_with_namespace(wsdl, operation) do
with %{input: %{header: %{message: message, part: part}}} <-
Enum.find(wsdl[:operations], &(&1[:name] == operation)),
%{name: name} <- get_message_part(wsdl, message, part) do
name
else
_ -> nil
end
end
defp get_message_part(wsdl, message, part) do
wsdl[:messages]
|> Enum.find(&("tns:#{&1[:name]}" == message))
|> Map.get(:parts)
|> Enum.find(&(&1[:name] == part))
end
defp handle_action_extractor_result(nil, wsdl, operation) do
wsdl[:complex_types]
|> Enum.find(fn x -> Macro.camelize(x[:name]) == operation end)
|> Map.get(:type)
end
defp handle_action_extractor_result(result, _wsdl, _operation), do: Map.get(result, :type)
@spec get_action_namespace(wsdl :: map(), operation :: String.t()) :: String.t()
defp get_action_namespace(wsdl, operation) do
wsdl
|> get_action_with_namespace(operation)
|> String.split(":")
|> List.first()
end
@spec add_body_tag_wrapper(list()) :: list()
defp add_body_tag_wrapper(body), do: [element(:"#{env_namespace()}:Body", nil, body)]
@spec add_header_tag_wrapper(list()) :: list()
defp add_header_tag_wrapper(body), do: [element(:"#{env_namespace()}:Header", nil, body)]
@spec add_envelope_tag_wrapper(body :: any(), wsdl :: map(), operation :: String.t()) :: any()
defp add_envelope_tag_wrapper(body, wsdl, operation) do
envelop_attributes =
@our_hardcoded_schema_types
"""
@schema_types
|> Map.merge(build_soap_version_attribute(wsdl))
|> Map.merge(build_action_attribute(wsdl, operation))
|> Map.merge(custom_namespaces())
IO.inspect(build_action_attribute(wsdl, operation), label: "takogo vida doljny byt")
IO.inspect(envelop_attributes, label: "Norm vyglyadyat oni?")
"""
[element(:"#{env_namespace()}:Envelope", envelop_attributes, body)]
end
@spec build_soap_version_attribute(Map.t()) :: map()
defp build_soap_version_attribute(wsdl) do
soap_version = wsdl |> soap_version() |> to_string
%{"xmlns:#{env_namespace()}" => @soap_version_namespaces[soap_version]}
end
@spec build_action_attribute(map(), String.t()) :: map()
defp build_action_attribute(wsdl, operation) do
action_attribute_namespace = get_action_namespace(wsdl, operation)
action_attribute_value = wsdl[:namespaces][action_attribute_namespace][:value]
prepare_action_attribute(action_attribute_namespace, action_attribute_value)
end
defp prepare_action_attribute(_action_attribute_namespace, nil), do: %{}
defp prepare_action_attribute(action_attribute_namespace, action_attribute_value) do
%{"xmlns:#{action_attribute_namespace}" => action_attribute_value}
end
defp soap_version(wsdl) do
Map.get(wsdl, :soap_version, Application.fetch_env!(:soap, :globals)[:version])
end
defp env_namespace, do: Application.fetch_env!(:soap, :globals)[:env_namespace] || :env
defp custom_namespaces, do: Application.fetch_env!(:soap, :globals)[:custom_namespaces] || %{}
end
| 35.226994 | 116 | 0.670759 |
73cc7f6c351c7b7b5b317022a9b69357bca4c7d7 | 71 | ex | Elixir | lib/siwapp_web/views/user_registration_view.ex | jakon89/siwapp | b5f8fd43458deae72c76e434ed0c63b620cb97a4 | [
"MIT"
] | null | null | null | lib/siwapp_web/views/user_registration_view.ex | jakon89/siwapp | b5f8fd43458deae72c76e434ed0c63b620cb97a4 | [
"MIT"
] | null | null | null | lib/siwapp_web/views/user_registration_view.ex | jakon89/siwapp | b5f8fd43458deae72c76e434ed0c63b620cb97a4 | [
"MIT"
] | null | null | null | defmodule SiwappWeb.UserRegistrationView do
use SiwappWeb, :view
end
| 17.75 | 43 | 0.830986 |
73cc882890199ebe2159c5399d1fd4cdee2c61be | 1,742 | exs | Elixir | lib/mix/test/mix/tasks/compile_test.exs | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/compile_test.exs | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/compile_test.exs | sunaku/elixir | 8aa43eaedd76be8ac0d495049eb9ecd56971f4fe | [
"Apache-2.0"
] | 1 | 2020-12-07T08:04:16.000Z | 2020-12-07T08:04:16.000Z | Code.require_file "../../test_helper.exs", __DIR__
defmodule Mix.Tasks.CompileTest do
use MixTest.Case
defmodule CustomCompilers do
def project do
[compilers: [:elixir, :app, :custom]]
end
end
setup do
Mix.Project.push MixTest.Case.Sample
:ok
end
test "mix compile --list with mixfile" do
Mix.Tasks.Compile.run ["--list"]
assert_received {:mix_shell, :info, ["\nEnabled compilers: yecc, leex, erlang, elixir, app"]}
assert_received {:mix_shell, :info, ["mix compile.elixir # " <> _]}
end
test "mix compile --list with custom mixfile" do
Mix.Project.push CustomCompilers
Mix.Tasks.Compile.run ["--list"]
assert_received {:mix_shell, :info, ["\nEnabled compilers: elixir, app, custom"]}
end
test "compile a project with mixfile" do
in_fixture "no_mixfile", fn ->
Mix.Tasks.Compile.run []
assert File.regular?("_build/dev/lib/sample/ebin/Elixir.A.beam")
assert File.regular?("_build/dev/lib/sample/ebin/sample.app")
assert_received {:mix_shell, :info, ["Compiled lib/a.ex"]}
assert_received {:mix_shell, :info, ["Generated sample.app"]}
end
end
test "compile a project with multiple compilers and a syntax error in an erlang file" do
in_fixture "no_mixfile", fn ->
import ExUnit.CaptureIO
File.mkdir! "src"
File.write! "src/a.erl", """
-module(b).
def b(), do: b
"""
assert File.regular?("src/a.erl")
assert_raise CompileError, fn ->
capture_io fn -> Mix.Tasks.Compile.run ["--force"] end
end
refute File.regular?("ebin/Elixir.A.beam")
refute File.regular?("ebin/Elixir.B.beam")
refute File.regular?("ebin/Elixir.C.beam")
end
end
end
| 29.033333 | 97 | 0.647532 |
73cc8ff54b33f1179a718002363c9b4c1f622fcd | 785 | exs | Elixir | mix.exs | cwbriones/stache | 51eb1435c81975a2ce680eca8263cd5a146d80f8 | [
"MIT"
] | 1 | 2016-05-02T10:47:45.000Z | 2016-05-02T10:47:45.000Z | mix.exs | cwbriones/stache | 51eb1435c81975a2ce680eca8263cd5a146d80f8 | [
"MIT"
] | null | null | null | mix.exs | cwbriones/stache | 51eb1435c81975a2ce680eca8263cd5a146d80f8 | [
"MIT"
] | null | null | null | defmodule Stache.Mixfile do
use Mix.Project
def project do
[app: :stache,
version: "0.2.1",
elixir: "~> 1.2",
deps: deps,
description: description,
package: package
]
end
defp description do
"Mustache templates in Elixir."
end
def package do
[
files: ["lib", "mix.exs", "README.md", "LICENSE"],
maintainers: ["Christian Briones"],
licenses: ["MIT"],
links: %{
"GitHub" => "https://github.com/cwbriones/stache"
}
]
end
def application do
[applications: []]
end
defp deps do
[
{:yamler, git: "https://github.com/goertzenator/yamler", tag: "16ebac5c", only: :test},
{:ex_doc, "~> 0.11.4", only: :dev},
{:earmark, "~> 0.2.1", only: :dev},
]
end
end
| 19.146341 | 93 | 0.549045 |
73ccd45fd53484c3b0b471c268aeda8b5ed1151a | 343 | exs | Elixir | priv/repo/seeds.exs | sudokid-software/12-day-mmo | 8b3038e754078a3361de6c9b516cb192f2ef0202 | [
"BSD-2-Clause"
] | 1 | 2021-01-04T21:25:35.000Z | 2021-01-04T21:25:35.000Z | priv/repo/seeds.exs | visheshc14/Prisonic-Fairytale | 91152fbf3662952146e29ff36a45ad20bebe4558 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | visheshc14/Prisonic-Fairytale | 91152fbf3662952146e29ff36a45ad20bebe4558 | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Mmo.Repo.insert!(%Mmo.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 28.583333 | 61 | 0.699708 |
73ccd511f75af55429b3ade16a19e1c62136e0cc | 5,156 | ex | Elixir | clients/logging/lib/google_api/logging/v2/model/log_sink.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/logging/lib/google_api/logging/v2/model/log_sink.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/logging/lib/google_api/logging/v2/model/log_sink.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Logging.V2.Model.LogSink do
@moduledoc """
Describes a sink used to export log entries to one of the following destinations in any project: a Cloud Storage bucket, a BigQuery dataset, or a Cloud Pub/Sub topic. A logs filter controls which log entries are exported. The sink must be created within a project, organization, billing account, or folder.
## Attributes
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The creation timestamp of the sink.This field may not be present for older sinks.
* `destination` (*type:* `String.t`, *default:* `nil`) - Required. The export destination:
"storage.googleapis.com/[GCS_BUCKET]"
"bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]"
"pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]"
The sink's writer_identity, set when the sink is created, must have permission to write to the destination or else the log entries are not exported. For more information, see Exporting Logs with Sinks.
* `filter` (*type:* `String.t`, *default:* `nil`) - Optional. An advanced logs filter. The only exported log entries are those that are in the resource owning the sink and that match the filter. For example:
logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR
* `includeChildren` (*type:* `boolean()`, *default:* `nil`) - Optional. This field applies only to sinks owned by organizations and folders. If the field is false, the default, only the logs owned by the sink's parent resource are available for export. If the field is true, then logs from all the projects, folders, and billing accounts contained in the sink's parent resource are also available for export. Whether a particular log entry from the children is exported depends on the sink's filter expression. For example, if this field is true, then the filter resource.type=gce_instance would export all Compute Engine VM instance log entries from all projects in the sink's parent. To only export entries from certain child projects, filter on the project part of the log name:
logName:("projects/test-project1/" OR "projects/test-project2/") AND
resource.type=gce_instance
* `name` (*type:* `String.t`, *default:* `nil`) - Required. The client-assigned sink identifier, unique within the project. Example: "my-syslog-errors-to-pubsub". Sink identifiers are limited to 100 characters and can include only the following characters: upper and lower-case alphanumeric characters, underscores, hyphens, and periods.
* `outputVersionFormat` (*type:* `String.t`, *default:* `nil`) - Deprecated. The log entry format to use for this sink's exported log entries. The v2 format is used by default and cannot be changed.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The last update timestamp of the sink.This field may not be present for older sinks.
* `writerIdentity` (*type:* `String.t`, *default:* `nil`) - Output only. An IAM identity—a service account or group—under which Logging writes the exported log entries to the sink's destination. This field is set by sinks.create and sinks.update based on the value of unique_writer_identity in those methods.Until you grant this identity write-access to the destination, log entry exports from this sink will fail. For more information, see Granting Access for a Resource. Consult the destination service's documentation to determine the appropriate IAM roles to assign to the identity.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:createTime => DateTime.t(),
:destination => String.t(),
:filter => String.t(),
:includeChildren => boolean(),
:name => String.t(),
:outputVersionFormat => String.t(),
:updateTime => DateTime.t(),
:writerIdentity => String.t()
}
field(:createTime, as: DateTime)
field(:destination)
field(:filter)
field(:includeChildren)
field(:name)
field(:outputVersionFormat)
field(:updateTime, as: DateTime)
field(:writerIdentity)
end
defimpl Poison.Decoder, for: GoogleApi.Logging.V2.Model.LogSink do
def decode(value, options) do
GoogleApi.Logging.V2.Model.LogSink.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Logging.V2.Model.LogSink do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 66.961039 | 785 | 0.736811 |
73ccdbcca17f43c70374733f32e32339219d9c31 | 2,279 | exs | Elixir | test/transaction_refund_test.exs | cas27/ex_taxjar | 507d474dbd7e72a21b2e14194b39170b535913bc | [
"MIT"
] | 6 | 2018-04-13T17:50:57.000Z | 2019-09-08T01:25:56.000Z | test/transaction_refund_test.exs | cas27/ex_taxjar | 507d474dbd7e72a21b2e14194b39170b535913bc | [
"MIT"
] | null | null | null | test/transaction_refund_test.exs | cas27/ex_taxjar | 507d474dbd7e72a21b2e14194b39170b535913bc | [
"MIT"
] | 1 | 2021-06-24T20:11:16.000Z | 2021-06-24T20:11:16.000Z | defmodule ExTaxjar.TransactionRefundTest do
use ExUnit.Case
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
alias ExTaxjar.{TransactionRefund, Refund}
describe "ExTaxjar.TransactionRefund.list/1" do
test "with date range" do
use_cassette "transactions_refund#list-range" do
refunds = TransactionRefund.list(%{from_date: "2017/01/01", to_date: "2017/01/31"})
assert refunds == ["with_sales_tax_refund", "default"]
end
end
test "with specific day" do
use_cassette "transactions_refund#list" do
refunds = TransactionRefund.list(%{on_date: "2017/01/31"})
assert refunds == ["with_sales_tax_refund", "default"]
end
end
end
describe "ExTaxjar.TransactionRefund.show/1" do
test "get existing refund" do
use_cassette "transactions#refund" do
refund = TransactionRefund.show("321")
assert refund["amount"] == "-29.94"
end
end
end
describe "ExTaxjar.TransactionRefund.create/1" do
test "creates a new refund transaction" do
use_cassette "transactions#create_refund" do
refund = %Refund{
transaction_id: "1999",
transaction_reference_id: "1999",
transaction_date: "1999/01/01",
to_country: "US",
to_zip: "90210",
to_state: "CA",
amount: 19.99,
shipping: 4.99,
sales_tax: 1.19
}
resp = TransactionRefund.create(refund)
assert resp["transaction_id"] == "1999"
end
end
end
describe "ExTaxjar.TransactionRefund.update/1" do
test "update transaction refund" do
use_cassette "transactions#update_refund" do
refund = %Refund.Update{
transaction_id: "321",
transaction_reference_id: "1999",
amount: 39.99
}
resp = TransactionRefund.update(refund)
assert resp["amount"] == "-39.99"
end
end
end
describe "ExTaxjar.TransactionRefund.delete/1" do
test "delete transaction refund" do
use_cassette "transactions#delete_refund" do
resp = TransactionRefund.delete("321")
assert resp["amount"] == nil
assert resp["to_country"] == nil
assert resp["transaction_id"] == "321"
end
end
end
end
| 28.848101 | 91 | 0.632295 |
73ccddc67b8fbd224519826c5150a3a4c5bd3de5 | 956 | exs | Elixir | test/elir_test.exs | azohra/elir | 45a7fcc051949d77a791cdad0b8f0b04576afd8b | [
"MIT"
] | 5 | 2017-09-23T19:35:30.000Z | 2017-09-27T20:00:02.000Z | test/elir_test.exs | azohra/elir | 45a7fcc051949d77a791cdad0b8f0b04576afd8b | [
"MIT"
] | 4 | 2017-10-02T15:43:21.000Z | 2017-10-03T01:21:26.000Z | test/elir_test.exs | azohra/elir | 45a7fcc051949d77a791cdad0b8f0b04576afd8b | [
"MIT"
] | null | null | null | defmodule ElirTest do
use ExUnit.Case
@elir_demo_project_path "rspec_demo"
@env %{"devices" => ["desktop", "mobile"], "languages" => ["en", "babylonian"]}
test "Elir can be configured from a YAML file in a relative path" do
assert {:ok, file} = Elir.configure(@elir_demo_project_path)
assert %{"elir" => %{"env" => _env, "cmd" => _cmd}} = file
end
test "Elir can be configured from a YAML file given in a fully specified path" do
fqp = File.cwd! |> Path.join(@elir_demo_project_path)
assert {:ok, file} = Elir.configure(fqp)
assert %{"elir" => %{"env" => _env, "cmd" => _cmd}} = file
end
test "poor-man cartesian values" do
assert [
[{"device", "desktop"}, {"language", "en"}],
[{"device", "desktop"}, {"language", "babylonian"}],
[{"device", "mobile"}, {"language", "en"}],
[{"device", "mobile"}, {"language", "babylonian"}]
] == Elir.Utils.cartesian(@env, true)
end
end
| 34.142857 | 83 | 0.599372 |
73ccec57abc25d089c66a4fd291502396e5a3604 | 1,549 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_cloud_storage_path.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_cloud_storage_path.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_cloud_storage_path.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2CloudStoragePath do
@moduledoc """
Message representing a single file or path in Cloud Storage.
## Attributes
* `path` (*type:* `String.t`, *default:* `nil`) - A url representing a file or path (no wildcards) in Cloud Storage.
Example: gs://[BUCKET_NAME]/dictionary.txt
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:path => String.t()
}
field(:path)
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2CloudStoragePath do
def decode(value, options) do
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2CloudStoragePath.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2CloudStoragePath do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.270833 | 120 | 0.742414 |
73ccff67c06777d6ff48965894914c838f93445c | 673 | ex | Elixir | fixtures/module_with_nested.ex | stephanos/rewire | 0dbbec223aa3fa667c9fe428f3f59c8c24a38b39 | [
"Apache-2.0"
] | 54 | 2020-10-06T09:11:06.000Z | 2022-03-14T12:47:49.000Z | fixtures/module_with_nested.ex | stephanos/rewire | 0dbbec223aa3fa667c9fe428f3f59c8c24a38b39 | [
"Apache-2.0"
] | 6 | 2020-10-07T04:15:37.000Z | 2021-11-15T18:46:10.000Z | fixtures/module_with_nested.ex | stephanos/rewire | 0dbbec223aa3fa667c9fe428f3f59c8c24a38b39 | [
"Apache-2.0"
] | 1 | 2020-10-05T19:36:19.000Z | 2020-10-05T19:36:19.000Z | defmodule Rewire.ModuleWithNested do
defmodule RedHerring do
end
defmodule Nested do
alias Rewire.ModuleWithNested.Nested.NestedNested
@nested Rewire.ModuleWithNested.Nested.NestedNested
def hello(), do: NestedNested.hello()
def hello_with_property(), do: @nested.hello_with_property()
defmodule NestedNested do
@hello Rewire.Application.compile_env!(:rewire, :hello)
def hello(), do: Rewire.Hello.hello()
def hello_with_property(), do: @hello.hello()
end
end
defmodule AnotherRedHerring do
end
@nested Nested
def hello(), do: Nested.hello()
def hello_with_property(), do: @nested.hello_with_property()
end
| 24.925926 | 64 | 0.728083 |
73cd0ebd1fe813d20f9df01fba95a9c6110d8288 | 2,888 | exs | Elixir | mix.exs | gissandrogama/find_my_personal | 12f23b37e967949af9f3e511cf1ce684452d8f66 | [
"MIT"
] | null | null | null | mix.exs | gissandrogama/find_my_personal | 12f23b37e967949af9f3e511cf1ce684452d8f66 | [
"MIT"
] | 11 | 2022-01-12T02:39:42.000Z | 2022-02-09T02:17:04.000Z | mix.exs | gissandrogama/find_my_personal | 12f23b37e967949af9f3e511cf1ce684452d8f66 | [
"MIT"
] | null | null | null | defmodule FindMyPersonal.MixProject do
@moduledoc """
this mix
"""
use Mix.Project
@github_url "https://github.com/gissandrogama/find_my_personal.git"
def project do
[
app: :find_my_personal,
version: "0.1.0",
elixir: "~> 1.12",
description: "Aplicação find my personal, que liga pessoas com personal",
source_url: @github_url,
homepage_url: @github_url,
files: ~w(mix.exs lib LICENSE.md README.md CHANGELOG.md),
package: [
maintainers: ["Gissandro Gama"],
licenses: ["MIT"],
links: %{
"Github" => @github_url
}
],
docs: [
main: "readme",
extras: ["README.md", "CHANGELOG.MD"]
],
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.json": :test,
"coveralls.html": :test
]
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {FindMyPersonal.Application, []},
extra_applications: [:logger, :runtime_tools, :bamboo, :bamboo_smtp]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.13"},
{:phoenix_ecto, "~> 4.4"},
{:ecto_sql, "~> 3.4"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_dashboard, "~> 0.4"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.5"},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:sobelow, "~> 0.11.0", only: :dev},
{:excoveralls, "~> 0.14.3", only: :test},
{:bamboo, "~> 2.2"},
{:bamboo_smtp, "~> 4.1"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup", "cmd npm install --prefix assets"],
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"]
]
end
end
| 29.171717 | 84 | 0.567867 |
73cd2d3b993d39f69747ccfaba6448861bf45b07 | 625 | ex | Elixir | lib/stages/stage_munged_ops.ex | cyberpunk-ventures/ex_golos | 2a8f32488b84590ddbb58fd12aaa1cbb9f48efd1 | [
"MIT"
] | 5 | 2016-12-06T17:04:30.000Z | 2017-02-26T19:08:35.000Z | lib/stages/stage_munged_ops.ex | cyberpunk-ventures/ex_golos | 2a8f32488b84590ddbb58fd12aaa1cbb9f48efd1 | [
"MIT"
] | null | null | null | lib/stages/stage_munged_ops.ex | cyberpunk-ventures/ex_golos | 2a8f32488b84590ddbb58fd12aaa1cbb9f48efd1 | [
"MIT"
] | null | null | null | defmodule Golos.Stage.MungedOps do
use GenStage
require Logger
alias Golos.RawOps
def start_link(args, options) do
GenStage.start_link(__MODULE__, args, options)
end
def init(state) do
Logger.info("Golos structured ops producer initializing...")
{:producer_consumer, state,
subscribe_to: state[:subscribe_to], dispatcher: GenStage.BroadcastDispatcher}
end
def handle_events(events, _from, number) do
structured_events =
for event <- List.flatten(events) do
Map.update!(event, :data, &RawOps.Munger.parse/1)
end
{:noreply, structured_events, number}
end
end
| 24.038462 | 82 | 0.712 |
73cd2e362da09e676b5323f5562d73a35014a76f | 2,580 | ex | Elixir | lib/pundit/default_policy.ex | nonrational/pundit-elixir | 5ca084c5457fa4f97ad1fd7ab9b2024b9900d805 | [
"MIT"
] | 24 | 2019-03-24T21:04:09.000Z | 2022-01-03T02:59:58.000Z | lib/pundit/default_policy.ex | nonrational/pundit-elixir | 5ca084c5457fa4f97ad1fd7ab9b2024b9900d805 | [
"MIT"
] | 1 | 2021-03-24T13:27:54.000Z | 2021-03-24T13:27:54.000Z | lib/pundit/default_policy.ex | nonrational/pundit-elixir | 5ca084c5457fa4f97ad1fd7ab9b2024b9900d805 | [
"MIT"
] | 4 | 2019-10-14T19:59:28.000Z | 2021-03-24T12:30:06.000Z | defmodule Pundit.DefaultPolicy do
@moduledoc """
Default access policies for a given type.
All of the functions here are named for actions in a [Phoenix controller](https://hexdocs.pm/phoenix/controllers.html#actions).
If you `use` this module, then default implementations will be added in your module that all
return `false` by default (default safe, nothing is permitted). All are overrideable.
"""
@doc """
Returns true only if the user should be allowed to see an index (list) of the given things.
"""
@callback index?(thing :: struct() | module(), user :: term()) :: boolean()
@doc """
Returns true only if the user should be allowed to see the given thing.
"""
@callback show?(thing :: struct() | module(), user :: term()) :: boolean()
@doc """
Returns true only if the user should be allowed to create a new kind of thing.
"""
@callback create?(thing :: struct() | module(), user :: term()) :: boolean()
@doc """
Returns true only if the user should be allowed to see a form to create a new thing.
See [the page on Phoenix controllers](https://hexdocs.pm/phoenix/controllers.html#actions) for more details on the
purpose of this action.
"""
@callback new?(thing :: struct() | module(), user :: term()) :: boolean()
@doc """
Returns true only if the user should be allowed to update the attributes of a thing.
"""
@callback update?(thing :: struct() | module(), user :: term()) :: boolean()
@doc """
Returns true only if the user should be allowed to see a form for updating the thing.
See [the page on Phoenix controllers](https://hexdocs.pm/phoenix/controllers.html#actions) for more details on the
purpose of this action.
"""
@callback edit?(thing :: struct() | module(), user :: term()) :: boolean()
@doc """
Returns true only if the user should be allowed to delete a thing.
"""
@callback delete?(thing :: struct() | module(), user :: term()) :: boolean()
defmacro __using__(_) do
quote do
@behaviour Pundit.DefaultPolicy
def index?(_thing, _user), do: false
def show?(_thing, _user), do: false
def create?(_thing, _user), do: false
def new?(_thing, _user), do: false
def update?(_thing, _user), do: false
def edit?(_thing, _user), do: false
def delete?(_thing, _user), do: false
defoverridable index?: 2,
show?: 2,
create?: 2,
new?: 2,
update?: 2,
edit?: 2,
delete?: 2
end
end
end
| 34.864865 | 129 | 0.627132 |
73cd3a2e01561d4ff6cccd5727ac0518c9ba4b38 | 612 | ex | Elixir | lib/banned_book_report/release.ex | LannyBose/banned-book-report | 38de7d6399609c104f4821dc06672e8f583ecaed | [
"MIT"
] | null | null | null | lib/banned_book_report/release.ex | LannyBose/banned-book-report | 38de7d6399609c104f4821dc06672e8f583ecaed | [
"MIT"
] | null | null | null | lib/banned_book_report/release.ex | LannyBose/banned-book-report | 38de7d6399609c104f4821dc06672e8f583ecaed | [
"MIT"
] | null | null | null | defmodule BannedBookReport.Release do
@moduledoc """
Used for executing DB release tasks when run in production without Mix
installed.
"""
@app :banned_book_report
def migrate do
load_app()
for repo <- repos() do
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :up, all: true))
end
end
def rollback(repo, version) do
load_app()
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :down, to: version))
end
defp repos do
Application.fetch_env!(@app, :ecto_repos)
end
defp load_app do
Application.load(@app)
end
end
| 21.103448 | 91 | 0.660131 |
73cd6ce54985a269912e3691b4432259920e682e | 2,210 | exs | Elixir | test/server_tests/node_addition_deletion_test.exs | appoks/opcua-elixir-telemetry | 7f8d1f8190d4b647ced4b2a788f7fd20e87a767f | [
"MIT"
] | null | null | null | test/server_tests/node_addition_deletion_test.exs | appoks/opcua-elixir-telemetry | 7f8d1f8190d4b647ced4b2a788f7fd20e87a767f | [
"MIT"
] | null | null | null | test/server_tests/node_addition_deletion_test.exs | appoks/opcua-elixir-telemetry | 7f8d1f8190d4b647ced4b2a788f7fd20e87a767f | [
"MIT"
] | null | null | null | defmodule ServerNodeAdditionDeletionTest do
use ExUnit.Case
alias OpcUA.{NodeId, Server, QualifiedName}
setup do
{:ok, pid} = OpcUA.Server.start_link()
Server.set_default_config(pid)
%{pid: pid}
end
test "Add a namespace", state do
{:ok, ns_index} = OpcUA.Server.add_namespace(state.pid, "Room")
assert is_integer(ns_index)
end
test "Add object and variable nodes ", state do
{:ok, ns_index} = OpcUA.Server.add_namespace(state.pid, "Room")
# Object Node
requested_new_node_id =
NodeId.new(ns_index: ns_index, identifier_type: "string", identifier: "R1_TS1_VendorName")
parent_node_id = NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 85)
reference_type_node_id = NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 35)
browse_name = QualifiedName.new(ns_index: ns_index, name: "Temperature sensor")
type_definition = NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 58)
resp =
Server.add_object_node(state.pid,
requested_new_node_id: requested_new_node_id,
parent_node_id: parent_node_id,
reference_type_node_id: reference_type_node_id,
browse_name: browse_name,
type_definition: type_definition
)
assert resp == :ok
# Variable Node
requested_new_node_id =
NodeId.new(ns_index: ns_index, identifier_type: "string", identifier: "R1_TS1_Temperature")
parent_node_id =
NodeId.new(ns_index: ns_index, identifier_type: "string", identifier: "R1_TS1_VendorName")
reference_type_node_id = NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 47)
browse_name = QualifiedName.new(ns_index: ns_index, name: "Temperature")
type_definition = NodeId.new(ns_index: 0, identifier_type: "integer", identifier: 63)
resp =
Server.add_variable_node(state.pid,
requested_new_node_id: requested_new_node_id,
parent_node_id: parent_node_id,
reference_type_node_id: reference_type_node_id,
browse_name: browse_name,
type_definition: type_definition
)
assert resp == :ok
end
end
| 35.079365 | 98 | 0.69457 |
73cd725984ac67349dfc650be65bd856bc43a350 | 1,278 | ex | Elixir | lib/policr_mini_bot/plugs/handle_new_chat_photo_plug.ex | WOCyo/policr-mini | 23e5e9f927d083cba5993f39e9f122e3a0ad1be2 | [
"MIT"
] | 487 | 2020-06-08T03:04:21.000Z | 2022-03-31T14:51:36.000Z | lib/policr_mini_bot/plugs/handle_new_chat_photo_plug.ex | WOCyo/policr-mini | 23e5e9f927d083cba5993f39e9f122e3a0ad1be2 | [
"MIT"
] | 141 | 2020-06-11T01:03:29.000Z | 2022-03-30T20:23:32.000Z | lib/policr_mini_bot/plugs/handle_new_chat_photo_plug.ex | WOCyo/policr-mini | 23e5e9f927d083cba5993f39e9f122e3a0ad1be2 | [
"MIT"
] | 61 | 2020-06-10T05:25:03.000Z | 2022-03-23T15:54:26.000Z | defmodule PolicrMiniBot.HandleNewChatPhotoPlug do
@moduledoc """
新群组头像处理器。
"""
use PolicrMiniBot, plug: :message_handler
alias PolicrMini.Instances
alias PolicrMini.Instances.Chat
@doc """
匹配消息是否包含群组头像修改。
消息中的 `new_chat_photo` 为 `nil` 时,表示不匹配。否则匹配。
"""
@impl true
def match(%{new_chat_photo: nil} = _message, state), do: {:nomatch, state}
@impl true
def match(_message, state), do: {:match, state}
@doc """
处理群组头像修改。
更新数据库中对应的群组记录的头像数据。
"""
@impl true
def handle(message, state) do
%{chat: %{id: chat_id}} = message
# 获取照片中的小图和大图,并更新相应数据
case Chat.get(chat_id) do
{:ok, chat} ->
Instances.update_chat(chat, %{
small_photo_id: small_photo_id(message),
big_photo_id: big_photo_id(message)
})
{:ok, state}
_ ->
{:ok, state}
end
{:ok, state}
end
# 从消息中获取小尺寸头像的文件 id
@spec small_photo_id(Message.t()) :: String.t()
defp small_photo_id(%{new_chat_photo: []}), do: nil
defp small_photo_id(%{new_chat_photo: [photo | _]}), do: photo.file_id
# 从消息中获取大尺寸头像的文件 id
@spec big_photo_id(Message.t()) :: String.t()
defp big_photo_id(%{new_chat_photo: []}), do: nil
defp big_photo_id(%{new_chat_photo: [_, _, photo]}), do: photo.file_id
end
| 22.421053 | 76 | 0.638498 |
73cd825f21efbc5adadcde66594a7d85e95bab30 | 5,970 | ex | Elixir | lib/ecto/adapters/mysql.ex | mschae/ecto | 00f85444c4f61080617179232c0d528381de5ec3 | [
"Apache-2.0"
] | 1 | 2019-05-07T15:05:52.000Z | 2019-05-07T15:05:52.000Z | lib/ecto/adapters/mysql.ex | mschae/ecto | 00f85444c4f61080617179232c0d528381de5ec3 | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/mysql.ex | mschae/ecto | 00f85444c4f61080617179232c0d528381de5ec3 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Adapters.MySQL do
@moduledoc """
Adapter module for MySQL.
It handles and pools the connections to the MySQL
database using `mariaex` and a connection pool,
such as `poolboy`.
## Options
MySQL options split in different categories described
below. All options should be given via the repository
configuration.
### Compile time options
Those options should be set in the config file and require
recompilation in order to make an effect.
* `:adapter` - The adapter name, in this case, `Ecto.Adapters.MySQL`
* `:pool` - The connection pool module, defaults to `Ecto.Pools.Poolboy`
* `:pool_timeout` - The default timeout to use on pool calls, defaults to `5000`
* `:timeout` - The default timeout to use on queries, defaults to `15000`
* `:log_level` - The level to use when logging queries (default: `:debug`)
### Connection options
* `:hostname` - Server hostname
* `:port` - Server port (default: 3306)
* `:username` - Username
* `:password` - User password
* `:parameters` - Keyword list of connection parameters
* `:ssl` - Set to true if ssl should be used (default: false)
* `:ssl_opts` - A list of ssl options, see ssl docs
* `:connect_timeout` - The timeout in miliseconds for establishing new connections (default: 5000)
### Storage options
* `:charset` - the database encoding (default: "utf8")
* `:collation` - the collation order
## Limitations
There are some limitations when using Ecto with MySQL that one
needs to be aware of.
### Engine
Since Ecto uses transactions, MySQL users running old versions
(5.1 and before) must ensure their tables use the InnoDB engine
as the default (MyISAM) does not support transactions.
Tables created by Ecto are guaranteed to use InnoDB, regardless
of the MySQL version.
### UUIDs
MySQL does not support UUID types. Ecto emulates them by using
`binary(16)`.
### Read after writes
Because MySQL does not support RETURNING clauses in INSERT and
UPDATE, it does not support the `:read_after_writes` option of
`Ecto.Schema.field/3`.
### DDL Transaction
MySQL does not support migrations inside transactions as it
automatically commits after some commands like CREATE TABLE.
Therefore MySQL migrations does not run inside transactions.
### usec in datetime
Old MySQL versions did not support usec in datetime while
more recent versions would round or truncate the usec value.
Therefore, in case the user decides to use microseconds in
datetimes and timestamps with MySQL, be aware of such
differences and consult the documentation for your MySQL
version.
"""
# Inherit all behaviour from Ecto.Adapters.SQL
use Ecto.Adapters.SQL, :mariaex
# And provide a custom storage implementation
@behaviour Ecto.Adapter.Storage
## Custom MySQL types
def load({:embed, _} = type, binary) when is_binary(binary),
do: super(type, json_library.decode!(binary))
def load(:map, binary) when is_binary(binary),
do: super(:map, json_library.decode!(binary))
def load(:boolean, 0), do: {:ok, false}
def load(:boolean, 1), do: {:ok, true}
def load(type, value), do: super(type, value)
defp json_library, do: Application.get_env(:ecto, :json_library)
## Storage API
@doc false
def storage_up(opts) do
database = Keyword.fetch!(opts, :database)
charset = Keyword.get(opts, :charset, "utf8")
extra = ""
if collation = Keyword.get(opts, :collation) do
extra = extra <> " DEFAULT COLLATE = #{collation}"
end
{output, status} =
run_with_mysql opts, "CREATE DATABASE `" <> database <>
"` DEFAULT CHARACTER SET = #{charset} " <> extra
cond do
status == 0 -> :ok
String.contains?(output, "database exists") -> {:error, :already_up}
true -> {:error, output}
end
end
@doc false
def storage_down(opts) do
{output, status} = run_with_mysql(opts, "DROP DATABASE `#{opts[:database]}`")
cond do
status == 0 -> :ok
String.contains?(output, "doesn't exist") -> {:error, :already_down}
true -> {:error, output}
end
end
defp run_with_mysql(database, sql_command) do
unless System.find_executable("mysql") do
raise "could not find executable `mysql` in path, " <>
"please guarantee it is available before running ecto commands"
end
env = []
if password = database[:password] do
env = [{"MYSQL_PWD", password}|env]
end
host = database[:hostname] || System.get_env("MYSQL_HOST") || "localhost"
port = database[:port] || System.get_env("MYSQL_TCP_PORT") || "3306"
args = ["--silent", "-u", database[:username], "-h", host, "-P", to_string(port), "-e", sql_command]
System.cmd("mysql", args, env: env, stderr_to_stdout: true)
end
@doc false
def supports_ddl_transaction? do
false
end
@doc false
def insert(_repo, %{model: model}, _params, _autogen, [_|_] = returning, _opts) do
raise ArgumentError, "MySQL does not support :read_after_writes in models. " <>
"The following fields in #{inspect model} are tagged as such: #{inspect returning}"
end
def insert(repo, %{source: {prefix, source}}, params, {pk, :id, nil}, [], opts) do
{fields, values} = :lists.unzip(params)
sql = @conn.insert(prefix, source, fields, [])
case Ecto.Adapters.SQL.query(repo, sql, values, opts) do
{:ok, %{num_rows: 1, last_insert_id: last_insert_id}} ->
{:ok, [{pk, last_insert_id}]}
{:error, err} ->
case @conn.to_constraints(err) do
[] -> raise err
constraints -> {:invalid, constraints}
end
end
end
def insert(repo, model_meta, params, autogen, [], opts) do
super(repo, model_meta, params, autogen, [], opts)
end
end
| 32.445652 | 108 | 0.653769 |
73cd856d42a9da869a6e3ecec4d5bf7be9129143 | 85 | exs | Elixir | astreu_k8s/test/astreu_k8s_test.exs | wesleimp/Astreu | 4d430733e7ecc8b3eba8e27811a152aa2c6d79c1 | [
"Apache-2.0"
] | null | null | null | astreu_k8s/test/astreu_k8s_test.exs | wesleimp/Astreu | 4d430733e7ecc8b3eba8e27811a152aa2c6d79c1 | [
"Apache-2.0"
] | null | null | null | astreu_k8s/test/astreu_k8s_test.exs | wesleimp/Astreu | 4d430733e7ecc8b3eba8e27811a152aa2c6d79c1 | [
"Apache-2.0"
] | null | null | null | defmodule AstreuK8sTest do
use ExUnit.Case
test "greets the world" do
end
end
| 12.142857 | 28 | 0.741176 |
73cd90dc3d1a5baeaec381ef3dc2b14aca92bfdb | 77 | ex | Elixir | lib/find_my_personal_web/views/member_view.ex | romenigld/find_my_personal | d3545d29cdf80c6e2a90b0bb784579bbd488120c | [
"MIT"
] | null | null | null | lib/find_my_personal_web/views/member_view.ex | romenigld/find_my_personal | d3545d29cdf80c6e2a90b0bb784579bbd488120c | [
"MIT"
] | 11 | 2022-01-12T02:39:42.000Z | 2022-02-09T02:17:04.000Z | lib/find_my_personal_web/views/member_view.ex | gissandrogama/find_my_personal | 12f23b37e967949af9f3e511cf1ce684452d8f66 | [
"MIT"
] | null | null | null | defmodule FindMyPersonalWeb.MemberView do
use FindMyPersonalWeb, :view
end
| 19.25 | 41 | 0.844156 |
73cd9bbd29d90eb386158bf8fb19109a71ad85ec | 2,298 | ex | Elixir | lib/winner_web/live/winner_resources_live.ex | manojsamanta/winner | 63293ff2f53e7dd271470bfb36498a303a2394b0 | [
"MIT"
] | null | null | null | lib/winner_web/live/winner_resources_live.ex | manojsamanta/winner | 63293ff2f53e7dd271470bfb36498a303a2394b0 | [
"MIT"
] | null | null | null | lib/winner_web/live/winner_resources_live.ex | manojsamanta/winner | 63293ff2f53e7dd271470bfb36498a303a2394b0 | [
"MIT"
] | null | null | null | defmodule WinnerWeb.WinnerResourcesLive do
use Phoenix.LiveView
alias WinnerWeb.RaffleState
def render(assigns) do
WinnerWeb.WinnerView.render("index.html", assigns)
end
def mount(session, socket) do
socket =
socket
|> new_raffle()
if connected?(socket) do
{:ok, schedule_tick(socket)}
else
{:ok, socket}
end
end
defp new_raffle(socket) do
%{resources: resources}=RaffleState.list_resources()
assign(socket, resources: resources, shuffles: %{}, is_match: false)
end
defp schedule_tick(socket) do
Process.send_after(self(), :tick, 1000)
socket
end
#
# add new resource
#
def handle_event("add_offer", path, socket) do
new_resource=path["offer"]["next"]
if not Enum.member?(socket.assigns.resources, new_resource) do
RaffleState.add_resource(new_resource)
end
%{resources: resources}=RaffleState.list_resources()
{:noreply, assign(socket, resources: resources)}
end
#
# sort out resource/member combo
#
def handle_info(:tick, socket) do
%{
resources: resources,
members: members,
member_resources: member_resources
} = RaffleState.list_resources()
# contributed by @dhedlund, now replaced with invert_map
# matches=Enum.flat_map(member_resources, fn {k,v} -> Enum.map(v, &{&1,k}) end) |> Enum.group_by(fn {k,_} -> k end) |> Map.new(fn {k,vs} -> {k,Enum.map(vs, fn {_, v} -> v end)} end)
matches=invert_map(member_resources)
socket=assign(socket, resources: resources, matches: matches, is_match: true)
new_socket = schedule_tick(socket)
{:noreply, new_socket}
end
def handle_event("auction", path, socket) do
resource=path["resource"]
shuffles = socket.assigns.shuffles
case socket.assigns.matches[resource] do
nil -> {:noreply, socket}
_ -> curr = socket.assigns.matches[resource] |> Enum.shuffle
shuffles= Map.put(shuffles, resource, curr)
{:noreply, assign(socket, shuffles: shuffles)}
end
end
def handle_event(_, _, socket), do: {:noreply, socket}
# Contributed by @dhedlund
defp invert_map(map) do
pairs = for {key, values} <- map, value <- values, do: {key, value}
Enum.group_by(pairs, &elem(&1, 1), &elem(&1, 0))
end
end
| 26.113636 | 185 | 0.660139 |
73cda9db5bc0072488a7f2cffaec6c18c665996a | 598 | exs | Elixir | test/vec3/vec3_creation_test.exs | crertel/graphmath | 798e51d21d509e397a86d9ca855ef18a92243583 | [
"Unlicense"
] | 70 | 2015-01-07T10:13:38.000Z | 2021-09-29T05:06:14.000Z | test/vec3/vec3_creation_test.exs | crertel/graphmath | 798e51d21d509e397a86d9ca855ef18a92243583 | [
"Unlicense"
] | 28 | 2015-01-22T22:38:24.000Z | 2021-06-24T06:15:38.000Z | test/vec3/vec3_creation_test.exs | crertel/graphmath | 798e51d21d509e397a86d9ca855ef18a92243583 | [
"Unlicense"
] | 17 | 2015-01-22T18:35:41.000Z | 2020-12-24T22:42:40.000Z | defmodule GraphmathTest.Vec3.CreateVec3 do
use ExUnit.Case
@tag :vec3
@tag :create
test "create returns {0,0,0}" do
assert {0, 0, 0} == Graphmath.Vec3.create()
end
@tag :vec3
@tag :create
test "create returns {x,y,z} given (x,y,z)" do
assert {4, 5, 6} == Graphmath.Vec3.create(4, 5, 6)
end
@tag :vec3
@tag :create
test "create returns {x,y,z} given vec3" do
assert {1, 2, 3} == Graphmath.Vec3.create([1, 2, 3])
end
@tag :vec3
@tag :create
test "create return {x,y} given vecN" do
assert {6, 7, 8} == Graphmath.Vec3.create([6, 7, 8, 9])
end
end
| 21.357143 | 59 | 0.608696 |
73cdb8bcc8734ee0facfc9ef313edee31ea87bd9 | 4 | ex | Elixir | testData/org/elixir_lang/formatting/without_space_around_match_operator.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 21,008 | 2017-04-01T04:06:55.000Z | 2022-03-31T23:11:05.000Z | ecmascript/codegen/tests/test262-min/b5bc1ffd90912fb1.js | sventschui/swc | cd2a2777d9459ba0f67774ed8a37e2b070b51e81 | [
"Apache-2.0",
"MIT"
] | 2,309 | 2018-01-14T05:54:44.000Z | 2022-03-31T15:48:40.000Z | ecmascript/codegen/tests/test262-min/b5bc1ffd90912fb1.js | sventschui/swc | cd2a2777d9459ba0f67774ed8a37e2b070b51e81 | [
"Apache-2.0",
"MIT"
] | 768 | 2018-01-14T05:15:43.000Z | 2022-03-30T11:29:42.000Z | a=1
| 2 | 3 | 0.5 |
73ce006821c411bc4256b42865df30d4e82e5ae9 | 898 | exs | Elixir | test/open_exchange_rates/client_test.exs | venndr/open_exchange_rates | 1e679ac05d8ac924a7329543ec61bd92a0a646ba | [
"MIT"
] | 5 | 2016-08-07T13:57:56.000Z | 2020-10-05T17:17:17.000Z | test/open_exchange_rates/client_test.exs | venndr/open_exchange_rates | 1e679ac05d8ac924a7329543ec61bd92a0a646ba | [
"MIT"
] | null | null | null | test/open_exchange_rates/client_test.exs | venndr/open_exchange_rates | 1e679ac05d8ac924a7329543ec61bd92a0a646ba | [
"MIT"
] | 5 | 2018-07-01T17:41:40.000Z | 2021-03-31T19:06:47.000Z | defmodule OpenExchangeRates.ClientTest do
use ExUnit.Case
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
test "it should get exchange rates" do
use_cassette "client/get_latest" do
assert {:ok, %{"base" => "USD"}} = OpenExchangeRates.Client.get_latest
end
end
test "it should handle api errors" do
use_cassette "client/api_error" do
assert {:error, "Something went horribly wrong!"} == OpenExchangeRates.Client.get_latest
end
end
test "it should handle corrupt json" do
use_cassette "client/corrupt" do
assert {:error, "Could not parse the JSON : \"{\\n \\\"corrupt\""} == OpenExchangeRates.Client.get_latest
end
end
test "it should handle no connection" do
use_cassette "client/no_connection" do
assert {:error, %HTTPoison.Error{id: nil, reason: "econnrefused"}} == OpenExchangeRates.Client.get_latest
end
end
end
| 29.933333 | 112 | 0.699332 |
73ce088ce5a3f274f97b40f191039b72138534b0 | 1,742 | ex | Elixir | clients/plus/lib/google_api/plus/v1/model/comment_actor_client_specific_actor_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/plus/lib/google_api/plus/v1/model/comment_actor_client_specific_actor_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/plus/lib/google_api/plus/v1/model/comment_actor_client_specific_actor_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Plus.V1.Model.CommentActorClientSpecificActorInfo do
@moduledoc """
Actor info specific to particular clients.
## Attributes
* `youtubeActorInfo` (*type:* `GoogleApi.Plus.V1.Model.CommentActorClientSpecificActorInfoYoutubeActorInfo.t`, *default:* `nil`) - Actor info specific to YouTube clients.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:youtubeActorInfo =>
GoogleApi.Plus.V1.Model.CommentActorClientSpecificActorInfoYoutubeActorInfo.t()
}
field(
:youtubeActorInfo,
as: GoogleApi.Plus.V1.Model.CommentActorClientSpecificActorInfoYoutubeActorInfo
)
end
defimpl Poison.Decoder, for: GoogleApi.Plus.V1.Model.CommentActorClientSpecificActorInfo do
def decode(value, options) do
GoogleApi.Plus.V1.Model.CommentActorClientSpecificActorInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Plus.V1.Model.CommentActorClientSpecificActorInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.156863 | 174 | 0.764064 |
73ce0ebd9cc94984af7058e5a751c41c03d00e9c | 1,694 | ex | Elixir | lib/segment/hispas.ex | Ablu/fintex | 3dd2088c01a4e035478d09df1e09ec244b54cf87 | [
"MIT"
] | 27 | 2015-08-09T16:36:31.000Z | 2020-01-26T17:47:17.000Z | lib/segment/hispas.ex | Ablu/fintex | 3dd2088c01a4e035478d09df1e09ec244b54cf87 | [
"MIT"
] | 14 | 2015-07-31T07:28:07.000Z | 2018-09-06T18:32:37.000Z | lib/segment/hispas.ex | Ablu/fintex | 3dd2088c01a4e035478d09df1e09ec244b54cf87 | [
"MIT"
] | 19 | 2015-07-31T01:25:17.000Z | 2019-06-29T16:28:42.000Z | defmodule FinTex.Segment.HISPAS do
@moduledoc false
defstruct [segment: nil]
def new(segment = [["HISPAS", _, 1, _] | _]) do
%__MODULE__{
segment:
[
segment |> Enum.at(0),
segment |> Enum.at(1),
segment |> Enum.at(2),
segment |> Enum.at(3),
[
segment |> Enum.at(4) |> Enum.at(0),
segment |> Enum.at(4) |> Enum.at(1),
segment |> Enum.at(4) |> Enum.at(2),
segment |> Enum.at(4) |> Enum.drop(3)
]
]
}
end
def new(segment = [["HISPAS", _, 2, _] | _]) do
%__MODULE__{
segment:
[
segment |> Enum.at(0),
segment |> Enum.at(1),
segment |> Enum.at(2),
segment |> Enum.at(3),
[
segment |> Enum.at(4) |> Enum.at(0),
segment |> Enum.at(4) |> Enum.at(1),
segment |> Enum.at(4) |> Enum.at(2),
segment |> Enum.at(4) |> Enum.at(3),
segment |> Enum.at(4) |> Enum.drop(4)
]
]
}
end
def new(segment = [["HISPAS", _, 3, _] | _]) do
%__MODULE__{
segment:
[
segment |> Enum.at(0),
segment |> Enum.at(1),
segment |> Enum.at(2),
segment |> Enum.at(3),
[
segment |> Enum.at(4) |> Enum.at(0),
segment |> Enum.at(4) |> Enum.at(1),
segment |> Enum.at(4) |> Enum.at(2),
segment |> Enum.at(4) |> Enum.at(3),
segment |> Enum.at(4) |> Enum.at(4),
segment |> Enum.at(4) |> Enum.drop(5)
]
]
}
end
end
defimpl Inspect, for: FinTex.Segment.HISPAS do
use FinTex.Helper.Inspect
end
| 23.527778 | 49 | 0.44451 |
73ce26936d77cff844f40fc0311aea0cc3b851ee | 22,069 | ex | Elixir | clients/compute/lib/google_api/compute/v1/api/target_tcp_proxies.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/target_tcp_proxies.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/target_tcp_proxies.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Api.TargetTcpProxies do
@moduledoc """
API calls for all endpoints tagged `TargetTcpProxies`.
"""
alias GoogleApi.Compute.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Deletes the specified TargetTcpProxy resource.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `target_tcp_proxy` (*type:* `String.t`) - Name of the TargetTcpProxy resource to delete.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_target_tcp_proxies_delete(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def compute_target_tcp_proxies_delete(
connection,
project,
target_tcp_proxy,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:requestId => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/{project}/global/targetTcpProxies/{targetTcpProxy}", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"targetTcpProxy" => URI.encode(target_tcp_proxy, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Returns the specified TargetTcpProxy resource. Gets a list of available target TCP proxies by making a list() request.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `target_tcp_proxy` (*type:* `String.t`) - Name of the TargetTcpProxy resource to return.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.TargetTcpProxy{}}` on success
* `{:error, info}` on failure
"""
@spec compute_target_tcp_proxies_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.TargetTcpProxy.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def compute_target_tcp_proxies_get(
connection,
project,
target_tcp_proxy,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{project}/global/targetTcpProxies/{targetTcpProxy}", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"targetTcpProxy" => URI.encode(target_tcp_proxy, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.TargetTcpProxy{}])
end
@doc """
Creates a TargetTcpProxy resource in the specified project using the data included in the request.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
* `:body` (*type:* `GoogleApi.Compute.V1.Model.TargetTcpProxy.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_target_tcp_proxies_insert(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def compute_target_tcp_proxies_insert(connection, project, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:requestId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/{project}/global/targetTcpProxies", %{
"project" => URI.encode(project, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Retrieves the list of TargetTcpProxy resources available to the specified project.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:filter` (*type:* `String.t`) - A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
* `:maxResults` (*type:* `integer()`) - The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
* `:orderBy` (*type:* `String.t`) - Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
Currently, only sorting by `name` or `creationTimestamp desc` is supported.
* `:pageToken` (*type:* `String.t`) - Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.TargetTcpProxyList{}}` on success
* `{:error, info}` on failure
"""
@spec compute_target_tcp_proxies_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Compute.V1.Model.TargetTcpProxyList.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def compute_target_tcp_proxies_list(connection, project, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:filter => :query,
:maxResults => :query,
:orderBy => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{project}/global/targetTcpProxies", %{
"project" => URI.encode(project, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.TargetTcpProxyList{}])
end
@doc """
Changes the BackendService for TargetTcpProxy.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `target_tcp_proxy` (*type:* `String.t`) - Name of the TargetTcpProxy resource whose BackendService resource is to be set.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
* `:body` (*type:* `GoogleApi.Compute.V1.Model.TargetTcpProxiesSetBackendServiceRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_target_tcp_proxies_set_backend_service(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def compute_target_tcp_proxies_set_backend_service(
connection,
project,
target_tcp_proxy,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:requestId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/{project}/global/targetTcpProxies/{targetTcpProxy}/setBackendService", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"targetTcpProxy" => URI.encode(target_tcp_proxy, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Changes the ProxyHeaderType for TargetTcpProxy.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `target_tcp_proxy` (*type:* `String.t`) - Name of the TargetTcpProxy resource whose ProxyHeader is to be set.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
* `:body` (*type:* `GoogleApi.Compute.V1.Model.TargetTcpProxiesSetProxyHeaderRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_target_tcp_proxies_set_proxy_header(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def compute_target_tcp_proxies_set_proxy_header(
connection,
project,
target_tcp_proxy,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:requestId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/{project}/global/targetTcpProxies/{targetTcpProxy}/setProxyHeader", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"targetTcpProxy" => URI.encode(target_tcp_proxy, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
end
| 50.385845 | 434 | 0.655308 |
73ce2eded3974fde878d97a300e008d3028cc165 | 268 | exs | Elixir | test/list_length_test.exs | felipe-jm/ignite-elixir-challenge-01 | b16a1728a76485bc8850b24797d52a483bdfa1db | [
"MIT"
] | null | null | null | test/list_length_test.exs | felipe-jm/ignite-elixir-challenge-01 | b16a1728a76485bc8850b24797d52a483bdfa1db | [
"MIT"
] | null | null | null | test/list_length_test.exs | felipe-jm/ignite-elixir-challenge-01 | b16a1728a76485bc8850b24797d52a483bdfa1db | [
"MIT"
] | null | null | null | defmodule ListLengthTest do
use ExUnit.Case
describe "call/1" do
test "returns the list length" do
list = [1, 2, 3, 5, 7]
response = ListLength.call(list)
expected_response = 5
assert response == expected_response
end
end
end
| 16.75 | 42 | 0.641791 |
73ce58cce38101c37de8a762e0b3b264d6079ce4 | 48,669 | ex | Elixir | lib/rummage_ecto/services/build_search_query.ex | acolin/rummage_ecto | 69b83579437c9d88de6b1951b6ee831424cfaec3 | [
"MIT"
] | 99 | 2018-08-19T10:31:42.000Z | 2021-10-31T03:36:53.000Z | lib/rummage_ecto/services/build_search_query.ex | acolin/rummage_ecto | 69b83579437c9d88de6b1951b6ee831424cfaec3 | [
"MIT"
] | 43 | 2017-03-14T02:00:41.000Z | 2018-07-05T16:28:39.000Z | lib/rummage_ecto/services/build_search_query.ex | acolin/rummage_ecto | 69b83579437c9d88de6b1951b6ee831424cfaec3 | [
"MIT"
] | 26 | 2018-08-23T06:13:42.000Z | 2021-10-19T06:57:18.000Z | defmodule Rummage.Ecto.Services.BuildSearchQuery do
@moduledoc """
`Rummage.Ecto.Services.BuildSearchQuery` is a service module which serves the
default search hook, `Rummage.Ecto.Hook.Search` that comes shipped with `Rummage.Ecto`.
## Module Attributes
```elixir
@search_types ~w{like ilike eq gt lt gteq lteq is_nil}a
@search_exprs ~w{where or_where not_where}a
```
`@search_types` is a collection of all the 8 valid `search_types` that come shipped with
`Rummage.Ecto`'s default search hook. The types are:
* `like`: Searches for a `term` in a given `field` of a `queryable`.
* `ilike`: Searches for a `term` in a given `field` of a `queryable`, in a case insensitive fashion.
* `eq`: Searches for a `term` to be equal to a given `field` of a `queryable`.
* `gt`: Searches for a `term` to be greater than to a given `field` of a `queryable`.
* `lt`: Searches for a `term` to be less than to a given `field` of a `queryable`.
* `gteq`: Searches for a `term` to be greater than or equal to to a given `field` of a `queryable`.
* `lteq`: Searches for a `term` to be less than or equal to a given `field` of a `queryable`.
* `is_nil`: Searches for a null value when `term` is true, or not null when `term` is false.
* `in`: Searches for a given `field` in a collection of `terms` in a `queryable`.
`@search_exprs` is a collection of 3 valid `search_exprs` that are used to
apply a `search_type` to a `Ecto.Queryable`. Those expressions are:
* `where` (DEFAULT): An AND where query expression.
* `or_where`: An OR where query expression. Behaves exactly like where but
combines the previous expression with an `OR` operation. Useful
for optional searches.
* `not_where`: A NOT where query expression. This can be used while excluding
a list of entries based on where query.
Feel free to use this module on a custom search hook that you write.
"""
import Ecto.Query
@typedoc ~s(TODO: Finish)
@type search_expr :: :where | :or_where | :not_where
@typedoc ~s(TODO: Finish)
@type search_type :: :like | :ilike | :eq | :gt | :lt | :gteq | :lteq | :is_nil
@search_types ~w{like ilike eq gt lt gteq lteq is_nil in}a
@search_exprs ~w{where or_where not_where}a
# Only for Postgres (only one or two interpolations are supported)
# TODO: Fix this once Ecto 3.0 comes out with `unsafe_fragment`
@supported_fragments_one [
"date_part('day', ?)",
"date_part('month', ?)",
"date_part('year', ?)",
"date_part('hour', ?)",
"lower(?)",
"upper(?)"
]
@supported_fragments_two ["concat(?, ?)", "coalesce(?, ?)"]
@doc """
Builds a searched `queryable` on top of the given `queryable` using `field`, `search_type`
and `search_term`.
## Examples
When `search_type` is `where`:
When `field`, `search_type` and `queryable` are passed with `search_type` of `like`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:where, :like}, "field_!")
#Ecto.Query<from p0 in "parents", where: like(p0.field_1, ^"%field_!%")>
When `field`, `search_type` and `queryable` are passed with `search_type` of `ilike`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:where, :ilike}, "field_!")
#Ecto.Query<from p0 in "parents", where: ilike(p0.field_1, ^"%field_!%")>
When `field`, `search_type` and `queryable` are passed with `search_type` of `eq`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:where, :eq}, "field_!")
#Ecto.Query<from p0 in "parents", where: p0.field_1 == ^"field_!">
When `field`, `search_type` and `queryable` are passed with `search_type` of `gt`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:where, :gt}, "field_!")
#Ecto.Query<from p0 in "parents", where: p0.field_1 > ^"field_!">
When `field`, `search_type` and `queryable` are passed with `search_type` of `lt`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:where, :lt}, "field_!")
#Ecto.Query<from p0 in "parents", where: p0.field_1 < ^"field_!">
When `field`, `search_type` and `queryable` are passed with `search_type` of `gteq`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:where, :gteq}, "field_!")
#Ecto.Query<from p0 in "parents", where: p0.field_1 >= ^"field_!">
When `field`, `search_type` and `queryable` are passed with `search_type` of `lteq`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:where, :lteq}, "field_!")
#Ecto.Query<from p0 in "parents", where: p0.field_1 <= ^"field_!">
When `search_type` is `or_where`:
When `field`, `search_type` and `queryable` are passed with `search_type` of `like`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:or_where, :like}, "field_!")
#Ecto.Query<from p0 in "parents", or_where: like(p0.field_1, ^"%field_!%")>
When `field`, `search_type` and `queryable` are passed with `search_type` of `ilike`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:or_where, :ilike}, "field_!")
#Ecto.Query<from p0 in "parents", or_where: ilike(p0.field_1, ^"%field_!%")>
When `field`, `search_type` and `queryable` are passed with `search_type` of `eq`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:or_where, :eq}, "field_!")
#Ecto.Query<from p0 in "parents", or_where: p0.field_1 == ^"field_!">
When `field`, `search_type` and `queryable` are passed with `search_type` of `gt`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:or_where, :gt}, "field_!")
#Ecto.Query<from p0 in "parents", or_where: p0.field_1 > ^"field_!">
When `field`, `search_type` and `queryable` are passed with `search_type` of `lt`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:or_where, :lt}, "field_!")
#Ecto.Query<from p0 in "parents", or_where: p0.field_1 < ^"field_!">
When `field`, `search_type` and `queryable` are passed with `search_type` of `gteq`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:or_where, :gteq}, "field_!")
#Ecto.Query<from p0 in "parents", or_where: p0.field_1 >= ^"field_!">
When `field`, `search_type` and `queryable` are passed with `search_type` of `lteq`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:or_where, :lteq}, "field_!")
#Ecto.Query<from p0 in "parents", or_where: p0.field_1 <= ^"field_!">
When `field`, `search_type` and `queryable` are passed with an invalid `search_type`
and `search_expr`:
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.run(queryable, :field_1, {:pizza, :cheese}, "field_!")
** (RuntimeError) Unknown {search_expr, search_type}, {:pizza, :cheese}
search_type should be one of #{inspect(@search_types)}
search_expr should be one of #{inspect(@search_exprs)}
"""
@spec run(
Ecto.Query.t(),
{__MODULE__.search_expr(), __MODULE__.search_type()},
String.t(),
term
) :: {Ecto.Query.t()}
def run(queryable, field, {search_expr, search_type}, search_term)
when search_type in @search_types and search_expr in @search_exprs do
apply(__MODULE__, String.to_atom("handle_" <> to_string(search_type)), [
queryable,
field,
search_term,
search_expr
])
end
def run(_, _, search_tuple, _) do
raise "Unknown {search_expr, search_type}, #{inspect(search_tuple)}\n" <>
"search_type should be one of #{inspect(@search_types)}\n" <>
"search_expr should be one of #{inspect(@search_exprs)}"
end
@doc """
Builds a searched `queryable` on top of the given `queryable` using
`field`, `search_term` and `search_expr` when the `search_type` is `like`.
Checkout [Ecto.Query.API.like/2](https://hexdocs.pm/ecto/Ecto.Query.API.html#like/2)
for more info.
NOTE: Be careful of [Like Injections](https://githubengineering.com/like-injection/)
Assumes that `search_expr` is in #{inspect(@search_exprs)}.
## Examples
When `search_expr` is `:where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_like(queryable, :field_1, "field_!", :where)
#Ecto.Query<from p0 in "parents", where: like(p0.field_1, ^"%field_!%")>
When `search_expr` is `:or_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_like(queryable, :field_1, "field_!", :or_where)
#Ecto.Query<from p0 in "parents", or_where: like(p0.field_1, ^"%field_!%")>
When `search_expr` is `:not_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_like(queryable, :field_1, "field_!", :not_where)
#Ecto.Query<from p0 in "parents", where: not(like(p0.field_1, ^"%field_!%"))>
"""
@spec handle_like(Ecto.Query.t(), atom() | tuple(), String.t(), __MODULE__.search_expr()) ::
Ecto.Query.t()
for fragment <- @supported_fragments_one do
def handle_like(queryable, {:fragment, unquote(fragment), field}, search_term, :where) do
queryable
|> where(
[..., b],
like(
fragment(unquote(fragment), field(b, ^field)),
^"%#{String.replace(search_term, "%", "\\%")}%"
)
)
end
end
for fragment <- @supported_fragments_two do
def handle_like(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:where
) do
queryable
|> where(
[..., b],
like(
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)),
^"%#{String.replace(search_term, "%", "\\%")}%"
)
)
end
end
def handle_like(queryable, field, search_term, :where) do
queryable
|> where(
[..., b],
like(field(b, ^field), ^"%#{String.replace(search_term, "%", "\\%")}%")
)
end
for fragment <- @supported_fragments_one do
def handle_like(queryable, {:fragment, unquote(fragment), field}, search_term, :or_where) do
queryable
|> or_where(
[..., b],
like(
fragment(unquote(fragment), field(b, ^field)),
^"%#{String.replace(search_term, "%", "\\%")}%"
)
)
end
end
for fragment <- @supported_fragments_two do
def handle_like(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:or_where
) do
queryable
|> or_where(
[..., b],
like(
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)),
^"%#{String.replace(search_term, "%", "\\%")}%"
)
)
end
end
def handle_like(queryable, field, search_term, :or_where) do
queryable
|> or_where(
[..., b],
like(field(b, ^field), ^"%#{String.replace(search_term, "%", "\\%")}%")
)
end
for fragment <- @supported_fragments_one do
def handle_like(queryable, {:fragment, unquote(fragment), field}, search_term, :not_where) do
queryable
|> where(
[..., b],
not like(
fragment(unquote(fragment), field(b, ^field)),
^"%#{String.replace(search_term, "%", "\\%")}%"
)
)
end
end
for fragment <- @supported_fragments_two do
def handle_like(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:not_where
) do
queryable
|> where(
[..., b],
not like(
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)),
^"%#{String.replace(search_term, "%", "\\%")}%"
)
)
end
end
def handle_like(queryable, field, search_term, :not_where) do
queryable
|> where(
[..., b],
not like(field(b, ^field), ^"%#{String.replace(search_term, "%", "\\%")}%")
)
end
@doc """
Builds a searched `queryable` on top of the given `queryable` using
`field`, `search_term` and `search_expr` when the `search_type` is `ilike`.
Checkout [Ecto.Query.API.ilike/2](https://hexdocs.pm/ecto/Ecto.Query.API.html#ilike/2)
for more info.
Assumes that `search_expr` is in #{inspect(@search_exprs)}.
## Examples
When `search_expr` is `:where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_ilike(queryable, :field_1, "field_!", :where)
#Ecto.Query<from p0 in "parents", where: ilike(p0.field_1, ^"%field_!%")>
When `search_expr` is `:or_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_ilike(queryable, :field_1, "field_!", :or_where)
#Ecto.Query<from p0 in "parents", or_where: ilike(p0.field_1, ^"%field_!%")>
When `search_expr` is `:not_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_ilike(queryable, :field_1, "field_!", :not_where)
#Ecto.Query<from p0 in "parents", where: not(ilike(p0.field_1, ^"%field_!%"))>
"""
@spec handle_ilike(Ecto.Query.t(), atom(), String.t(), __MODULE__.search_expr()) ::
Ecto.Query.t()
for fragment <- @supported_fragments_one do
def handle_ilike(queryable, {:fragment, unquote(fragment), field}, search_term, :where) do
queryable
|> where(
[..., b],
ilike(
fragment(unquote(fragment), field(b, ^field)),
^"%#{String.replace(search_term, "%", "\\%")}%"
)
)
end
end
for fragment <- @supported_fragments_two do
def handle_ilike(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:where
) do
queryable
|> where(
[..., b],
ilike(
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)),
^"%#{String.replace(search_term, "%", "\\%")}%"
)
)
end
end
def handle_ilike(queryable, field, search_term, :where) when is_binary(search_term) do
queryable
|> where(
[..., b],
ilike(field(b, ^field), ^"%#{String.replace(search_term, "%", "\\%")}%")
)
end
for fragment <- @supported_fragments_one do
def handle_ilike(queryable, {:fragment, unquote(fragment), field}, search_term, :or_where) do
queryable
|> or_where(
[..., b],
ilike(
fragment(unquote(fragment), field(b, ^field)),
^"%#{String.replace(search_term, "%", "\\%")}%"
)
)
end
end
for fragment <- @supported_fragments_two do
def handle_ilike(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:or_where
) do
queryable
|> or_where(
[..., b],
ilike(
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)),
^"%#{String.replace(search_term, "%", "\\%")}%"
)
)
end
end
def handle_ilike(queryable, field, search_term, :or_where) do
queryable
|> or_where(
[..., b],
ilike(field(b, ^field), ^"%#{String.replace(search_term, "%", "\\%")}%")
)
end
for fragment <- @supported_fragments_one do
def handle_ilike(queryable, {:fragment, unquote(fragment), field}, search_term, :not_where) do
queryable
|> where(
[..., b],
not ilike(
fragment(unquote(fragment), field(b, ^field)),
^"%#{String.replace(search_term, "%", "\\%")}%"
)
)
end
end
for fragment <- @supported_fragments_two do
def handle_ilike(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:not_where
) do
queryable
|> where(
[..., b],
not ilike(
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)),
^"%#{String.replace(search_term, "%", "\\%")}%"
)
)
end
end
def handle_ilike(queryable, field, search_term, :not_where) do
queryable
|> where(
[..., b],
not ilike(field(b, ^field), ^"%#{String.replace(search_term, "%", "\\%")}%")
)
end
def handle_ilike(_queryable, _field, _search_term, _),
do: raise(ArgumentError, message: "argument error")
@doc """
Builds a searched `queryable` on top of the given `queryable` using
`field`, `search_term` and `search_expr` when the `search_type` is `eq`.
Assumes that `search_expr` is in #{inspect(@search_exprs)}.
## Examples
When `search_expr` is `:where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_eq(queryable, :field_1, "field_!", :where)
#Ecto.Query<from p0 in \"parents\", where: p0.field_1 == ^\"field_!\">
When `search_expr` is `:or_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_eq(queryable, :field_1, "field_!", :or_where)
#Ecto.Query<from p0 in \"parents\", or_where: p0.field_1 == ^\"field_!\">
When `search_expr` is `:not_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_eq(queryable, :field_1, "field_!", :not_where)
#Ecto.Query<from p0 in \"parents\", where: p0.field_1 != ^\"field_!\">
"""
@spec handle_eq(Ecto.Query.t(), atom(), term(), __MODULE__.search_expr()) :: Ecto.Query.t()
for fragment <- @supported_fragments_one do
def handle_eq(queryable, {:fragment, unquote(fragment), field}, search_term, :where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) == ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_eq(queryable, {:fragment, unquote(fragment), field1, field2}, search_term, :where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) == ^search_term
)
end
end
def handle_eq(queryable, field, search_term, :where) do
queryable
|> where(
[..., b],
field(b, ^field) == ^search_term
)
end
for fragment <- @supported_fragments_one do
def handle_eq(queryable, {:fragment, unquote(fragment), field}, search_term, :or_where) do
queryable
|> or_where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) == ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_eq(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:or_where
) do
queryable
|> or_where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) == ^search_term
)
end
end
def handle_eq(queryable, field, search_term, :or_where) do
queryable
|> or_where(
[..., b],
field(b, ^field) == ^search_term
)
end
for fragment <- @supported_fragments_one do
def handle_eq(queryable, {:fragment, unquote(fragment), field}, search_term, :not_where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) != ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_eq(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:not_where
) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) != ^search_term
)
end
end
def handle_eq(queryable, field, search_term, :not_where) do
queryable
|> where(
[..., b],
field(b, ^field) != ^search_term
)
end
@doc """
Builds a searched `queryable` on top of the given `queryable` using
`field`, `search_term` and `search_expr` when the `search_type` is `gt`.
Assumes that `search_expr` is in #{inspect(@search_exprs)}.
## Examples
When `search_expr` is `:where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_gt(queryable, :field_1, "field_!", :where)
#Ecto.Query<from p0 in \"parents\", where: p0.field_1 > ^\"field_!\">
When `search_expr` is `:or_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_gt(queryable, :field_1, "field_!", :or_where)
#Ecto.Query<from p0 in \"parents\", or_where: p0.field_1 > ^\"field_!\">
When `search_expr` is `:not_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_gt(queryable, :field_1, "field_!", :not_where)
#Ecto.Query<from p0 in \"parents\", where: p0.field_1 <= ^\"field_!\">
"""
@spec handle_gt(Ecto.Query.t(), atom(), term(), __MODULE__.search_expr()) :: Ecto.Query.t()
for fragment <- @supported_fragments_one do
def handle_gt(queryable, {:fragment, unquote(fragment), field}, search_term, :where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) > ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_gt(queryable, {:fragment, unquote(fragment), field1, field2}, search_term, :where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) > ^search_term
)
end
end
def handle_gt(queryable, field, search_term, :where) do
queryable
|> where(
[..., b],
field(b, ^field) > ^search_term
)
end
for fragment <- @supported_fragments_one do
def handle_gt(queryable, {:fragment, unquote(fragment), field}, search_term, :or_where) do
queryable
|> or_where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) > ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_gt(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:or_where
) do
queryable
|> or_where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) > ^search_term
)
end
end
def handle_gt(queryable, field, search_term, :or_where) do
queryable
|> or_where(
[..., b],
field(b, ^field) > ^search_term
)
end
for fragment <- @supported_fragments_one do
def handle_gt(queryable, {:fragment, unquote(fragment), field}, search_term, :not_where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) <= ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_gt(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:not_where
) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) <= ^search_term
)
end
end
def handle_gt(queryable, field, search_term, :not_where) do
queryable
|> where(
[..., b],
field(b, ^field) <= ^search_term
)
end
@doc """
Builds a searched `queryable` on top of the given `queryable` using
`field`, `search_term` and `search_expr` when the `search_type` is `lt`.
Assumes that `search_expr` is in #{inspect(@search_exprs)}.
## Examples
When `search_expr` is `:where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_lt(queryable, :field_1, "field_!", :where)
#Ecto.Query<from p0 in \"parents\", where: p0.field_1 < ^\"field_!\">
When `search_expr` is `:or_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_lt(queryable, :field_1, "field_!", :or_where)
#Ecto.Query<from p0 in \"parents\", or_where: p0.field_1 < ^\"field_!\">
When `search_expr` is `:not_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_lt(queryable, :field_1, "field_!", :not_where)
#Ecto.Query<from p0 in \"parents\", where: p0.field_1 >= ^\"field_!\">
"""
@spec handle_lt(Ecto.Query.t(), atom(), term(), __MODULE__.search_expr()) :: Ecto.Query.t()
for fragment <- @supported_fragments_one do
def handle_lt(queryable, {:fragment, unquote(fragment), field}, search_term, :where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) < ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_lt(queryable, {:fragment, unquote(fragment), field1, field2}, search_term, :where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) < ^search_term
)
end
end
def handle_lt(queryable, field, search_term, :where) do
queryable
|> where(
[..., b],
field(b, ^field) < ^search_term
)
end
for fragment <- @supported_fragments_one do
def handle_lt(queryable, {:fragment, unquote(fragment), field}, search_term, :or_where) do
queryable
|> or_where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) < ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_lt(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:or_where
) do
queryable
|> or_where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) < ^search_term
)
end
end
def handle_lt(queryable, field, search_term, :or_where) do
queryable
|> or_where(
[..., b],
field(b, ^field) < ^search_term
)
end
for fragment <- @supported_fragments_one do
def handle_lt(queryable, {:fragment, unquote(fragment), field}, search_term, :not_where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) >= ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_lt(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:not_where
) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) >= ^search_term
)
end
end
def handle_lt(queryable, field, search_term, :not_where) do
queryable
|> where(
[..., b],
field(b, ^field) >= ^search_term
)
end
@doc """
Builds a searched `queryable` on top of the given `queryable` using
`field`, `search_term` and `search_expr` when the `search_type` is `gteq`.
Assumes that `search_expr` is in #{inspect(@search_exprs)}.
## Examples
When `search_expr` is `:where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_gteq(queryable, :field_1, "field_!", :where)
#Ecto.Query<from p0 in \"parents\", where: p0.field_1 >= ^\"field_!\">
When `search_expr` is `:or_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_gteq(queryable, :field_1, "field_!", :or_where)
#Ecto.Query<from p0 in \"parents\", or_where: p0.field_1 >= ^\"field_!\">
When `search_expr` is `:not_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_gteq(queryable, :field_1, "field_!", :not_where)
#Ecto.Query<from p0 in \"parents\", where: p0.field_1 < ^\"field_!\">
"""
@spec handle_gteq(Ecto.Query.t(), atom(), term(), __MODULE__.search_expr()) :: Ecto.Query.t()
for fragment <- @supported_fragments_one do
def handle_gteq(queryable, {:fragment, unquote(fragment), field}, search_term, :where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) >= ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_gteq(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:where
) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) >= ^search_term
)
end
end
def handle_gteq(queryable, field, search_term, :where) do
queryable
|> where(
[..., b],
field(b, ^field) >= ^search_term
)
end
for fragment <- @supported_fragments_one do
def handle_gteq(queryable, {:fragment, unquote(fragment), field}, search_term, :or_where) do
queryable
|> or_where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) >= ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_gteq(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:or_where
) do
queryable
|> or_where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) >= ^search_term
)
end
end
def handle_gteq(queryable, field, search_term, :or_where) do
queryable
|> or_where(
[..., b],
field(b, ^field) >= ^search_term
)
end
for fragment <- @supported_fragments_one do
def handle_gteq(queryable, {:fragment, unquote(fragment), field}, search_term, :not_where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) < ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_gteq(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:not_where
) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) < ^search_term
)
end
end
def handle_gteq(queryable, field, search_term, :not_where) do
queryable
|> where(
[..., b],
field(b, ^field) < ^search_term
)
end
@doc """
Builds a searched `queryable` on top of the given `queryable` using
`field`, `search_term` and `search_expr` when the `search_type` is `lteq`.
Assumes that `search_expr` is in #{inspect(@search_exprs)}.
## Examples
When `search_expr` is `:where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_lteq(queryable, :field_1, "field_!", :where)
#Ecto.Query<from p0 in \"parents\", where: p0.field_1 <= ^\"field_!\">
When `search_expr` is `:or_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_lteq(queryable, :field_1, "field_!", :or_where)
#Ecto.Query<from p0 in \"parents\", or_where: p0.field_1 <= ^\"field_!\">
When `search_expr` is `:not_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_lteq(queryable, :field_1, "field_!", :not_where)
#Ecto.Query<from p0 in \"parents\", where: p0.field_1 > ^\"field_!\">
"""
@spec handle_lteq(Ecto.Query.t(), atom(), term(), __MODULE__.search_expr()) :: Ecto.Query.t()
for fragment <- @supported_fragments_one do
def handle_lteq(queryable, {:fragment, unquote(fragment), field}, search_term, :where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) <= ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_lteq(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:where
) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) <= ^search_term
)
end
end
def handle_lteq(queryable, field, search_term, :where) do
queryable
|> where(
[..., b],
field(b, ^field) <= ^search_term
)
end
for fragment <- @supported_fragments_one do
def handle_lteq(queryable, {:fragment, unquote(fragment), field}, search_term, :or_where) do
queryable
|> or_where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) <= ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_lteq(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:or_where
) do
queryable
|> or_where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) <= ^search_term
)
end
end
def handle_lteq(queryable, field, search_term, :or_where) do
queryable
|> or_where(
[..., b],
field(b, ^field) <= ^search_term
)
end
for fragment <- @supported_fragments_one do
def handle_lteq(queryable, {:fragment, unquote(fragment), field}, search_term, :not_where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) > ^search_term
)
end
end
for fragment <- @supported_fragments_two do
def handle_lteq(
queryable,
{:fragment, unquote(fragment), field1, field2},
search_term,
:not_where
) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) > ^search_term
)
end
end
def handle_lteq(queryable, field, search_term, :not_where) do
queryable
|> where(
[..., b],
field(b, ^field) > ^search_term
)
end
@doc """
Builds a searched `queryable` on `field` is_nil (when `term` is true),
or not is_nil (when `term` is false), based on `search_expr` given.
Checkout [Ecto.Query.API.like/2](https://hexdocs.pm/ecto/Ecto.Query.API.html#is_nil/1)
for more info.
Assumes that `search_expr` is in #{inspect(@search_exprs)}.
## Examples
When `search_expr` is `:where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_is_nil(queryable, :field_1, true, :where)
#Ecto.Query<from p0 in "parents", where: is_nil(p0.field_1)>
iex> BuildSearchQuery.handle_is_nil(queryable, :field_1, false, :where)
#Ecto.Query<from p0 in "parents", where: not(is_nil(p0.field_1))>
When `search_expr` is `:or_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_is_nil(queryable, :field_1, true, :or_where)
#Ecto.Query<from p0 in "parents", or_where: is_nil(p0.field_1)>
iex> BuildSearchQuery.handle_is_nil(queryable, :field_1, false, :or_where)
#Ecto.Query<from p0 in "parents", or_where: not(is_nil(p0.field_1))>
When `search_expr` is `:not_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_is_nil(queryable, :field_1, true, :not_where)
#Ecto.Query<from p0 in "parents", where: not(is_nil(p0.field_1))>
iex> BuildSearchQuery.handle_is_nil(queryable, :field_1, false, :not_where)
#Ecto.Query<from p0 in "parents", where: is_nil(p0.field_1)>
"""
@spec handle_is_nil(Ecto.Query.t(), atom(), boolean(), __MODULE__.search_expr()) ::
Ecto.Query.t()
for fragment <- @supported_fragments_one do
def handle_is_nil(queryable, {:fragment, unquote(fragment), field}, true, :where) do
queryable
|> where(
[..., b],
is_nil(fragment(unquote(fragment), field(b, ^field)))
)
end
end
for fragment <- @supported_fragments_two do
def handle_is_nil(queryable, {:fragment, unquote(fragment), field1, field2}, true, :where) do
queryable
|> where(
[..., b],
is_nil(fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)))
)
end
end
def handle_is_nil(queryable, field, true, :where) do
queryable
|> where(
[..., b],
is_nil(field(b, ^field))
)
end
for fragment <- @supported_fragments_one do
def handle_is_nil(queryable, {:fragment, unquote(fragment), field}, true, :or_where) do
queryable
|> or_where(
[..., b],
is_nil(fragment(unquote(fragment), field(b, ^field)))
)
end
end
for fragment <- @supported_fragments_two do
def handle_is_nil(queryable, {:fragment, unquote(fragment), field1, field2}, true, :or_where) do
queryable
|> or_where(
[..., b],
is_nil(fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)))
)
end
end
def handle_is_nil(queryable, field, true, :or_where) do
queryable
|> or_where(
[..., b],
is_nil(field(b, ^field))
)
end
for fragment <- @supported_fragments_one do
def handle_is_nil(queryable, {:fragment, unquote(fragment), field}, true, :not_where) do
queryable
|> where(
[..., b],
not is_nil(fragment(unquote(fragment), field(b, ^field)))
)
end
end
for fragment <- @supported_fragments_two do
def handle_is_nil(queryable, {:fragment, unquote(fragment), field1, field2}, true, :not_where) do
queryable
|> where(
[..., b],
not is_nil(fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)))
)
end
end
def handle_is_nil(queryable, field, true, :not_where) do
queryable
|> where(
[..., b],
not is_nil(field(b, ^field))
)
end
for fragment <- @supported_fragments_one do
def handle_is_nil(queryable, {:fragment, unquote(fragment), field}, false, :where) do
queryable
|> where(
[..., b],
not is_nil(fragment(unquote(fragment), field(b, ^field)))
)
end
end
for fragment <- @supported_fragments_two do
def handle_is_nil(queryable, {:fragment, unquote(fragment), field1, field2}, false, :where) do
queryable
|> where(
[..., b],
not is_nil(fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)))
)
end
end
def handle_is_nil(queryable, field, false, :where) do
queryable
|> where(
[..., b],
not is_nil(field(b, ^field))
)
end
for fragment <- @supported_fragments_one do
def handle_is_nil(queryable, {:fragment, unquote(fragment), field}, false, :or_where) do
queryable
|> or_where(
[..., b],
not is_nil(fragment(unquote(fragment), field(b, ^field)))
)
end
end
for fragment <- @supported_fragments_two do
def handle_is_nil(queryable, {:fragment, unquote(fragment), field1, field2}, false, :or_where) do
queryable
|> or_where(
[..., b],
not is_nil(fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)))
)
end
end
def handle_is_nil(queryable, field, false, :or_where) do
queryable
|> or_where(
[..., b],
not is_nil(field(b, ^field))
)
end
for fragment <- @supported_fragments_one do
def handle_is_nil(queryable, {:fragment, unquote(fragment), field}, false, :not_where) do
queryable
|> where(
[..., b],
is_nil(fragment(unquote(fragment), field(b, ^field)))
)
end
end
for fragment <- @supported_fragments_two do
def handle_is_nil(
queryable,
{:fragment, unquote(fragment), field1, field2},
false,
:not_where
) do
queryable
|> where(
[..., b],
is_nil(fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)))
)
end
end
def handle_is_nil(queryable, field, false, :not_where) do
queryable
|> where(
[..., b],
is_nil(field(b, ^field))
)
end
@doc """
Builds a searched `queryable` on `field` based on whether it exists in
a given collection, based on `search_expr` given.
Checkout [Ecto.Query.API.in/2](https://hexdocs.pm/ecto/Ecto.Query.API.html#in/2)
for more info.
Assumes that `search_expr` is in #{inspect(@search_exprs)}.
## Examples
When `search_expr` is `:where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_in(queryable, :field_1, ["a", "b"], :where)
#Ecto.Query<from p0 in "parents", where: p0.field_1 in ^["a", "b"]>
When `search_expr` is `:or_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_in(queryable, :field_1, ["a", "b"], :or_where)
#Ecto.Query<from p0 in "parents", or_where: p0.field_1 in ^["a", "b"]>
When `search_expr` is `:not_where`
iex> alias Rummage.Ecto.Services.BuildSearchQuery
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p0 in "parents">
iex> BuildSearchQuery.handle_in(queryable, :field_1, ["a", "b"], :not_where)
#Ecto.Query<from p0 in "parents", where: p0.field_1 not in ^["a", "b"]>
"""
@spec handle_is_nil(Ecto.Query.t(), atom(), boolean(), __MODULE__.search_expr()) ::
Ecto.Query.t()
for fragment <- @supported_fragments_one do
def handle_in(queryable, {:fragment, unquote(fragment), field}, list, :where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) in ^list
)
end
end
for fragment <- @supported_fragments_two do
def handle_in(queryable, {:fragment, unquote(fragment), field1, field2}, list, :where) do
queryable
|> where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) in ^list
)
end
end
def handle_in(queryable, field, list, :where) do
queryable
|> where(
[..., b],
field(b, ^field) in ^list
)
end
for fragment <- @supported_fragments_one do
def handle_in(queryable, {:fragment, unquote(fragment), field}, list, :or_where) do
queryable
|> or_where(
[..., b],
fragment(unquote(fragment), field(b, ^field)) in ^list
)
end
end
for fragment <- @supported_fragments_two do
def handle_in(queryable, {:fragment, unquote(fragment), field1, field2}, list, :or_where) do
queryable
|> or_where(
[..., b],
fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) in ^list
)
end
end
def handle_in(queryable, field, list, :or_where) do
queryable
|> or_where(
[..., b],
field(b, ^field) in ^list
)
end
for fragment <- @supported_fragments_one do
def handle_in(queryable, {:fragment, unquote(fragment), field}, list, :not_where) do
queryable
|> where(
[..., b],
not (fragment(unquote(fragment), field(b, ^field)) in ^list)
)
end
end
for fragment <- @supported_fragments_two do
def handle_in(queryable, {:fragment, unquote(fragment), field1, field2}, list, :not_where) do
queryable
|> where(
[..., b],
not (fragment(unquote(fragment), field(b, ^field1), field(b, ^field2)) in ^list)
)
end
end
def handle_in(queryable, field, list, :not_where) do
queryable
|> where(
[..., b],
not (field(b, ^field) in ^list)
)
end
end
| 31.098403 | 102 | 0.609443 |
73ceb240245a95ddb9970cd19d38795eb09a52ed | 3,253 | ex | Elixir | lib/ex_admin/themes/admin_lte2/layout.ex | andriybohdan/ex_admin | e31c725078ac4e7390204a87d96360a21ffe7b90 | [
"MIT"
] | 1 | 2018-08-30T20:20:56.000Z | 2018-08-30T20:20:56.000Z | lib/ex_admin/themes/admin_lte2/layout.ex | 8thlight/ex_admin | 314d4068270c47799ec54f719073a565222bcfad | [
"MIT"
] | null | null | null | lib/ex_admin/themes/admin_lte2/layout.ex | 8thlight/ex_admin | 314d4068270c47799ec54f719073a565222bcfad | [
"MIT"
] | 2 | 2018-07-12T07:44:50.000Z | 2018-07-19T11:45:09.000Z | defmodule ExAdmin.Theme.AdminLte2.Layout do
@moduledoc false
import ExAdmin.Navigation
import Phoenix.HTML.Tag, only: [content_tag: 2, content_tag: 3]
use Xain
require Logger
def link_to_active(conn, name, path, id, opts \\ []) do
wrapper = Keyword.get(opts, :wrapper, :li)
active_class = Keyword.get(opts, :active_class, "active")
active_class = if link_active?(conn, path), do: active_class, else: ""
icon = if path == ExAdmin.Utils.admin_path(:page, [:dashboard]) do
content_tag :i, "", class: "fa fa-dashboard"
else
content_tag :i, String.at(name, 0), class: "nav-label label label-info"
end
name_span = content_tag :span, name
a_tag = content_tag :a, [icon, name_span], href: path
if wrapper == :none do
a_tag
else
content_tag wrapper, id: id, class: active_class do
a_tag
end
end
end
def theme_selector? do
not is_nil(Application.get_env(:ex_admin, :theme_selector))
end
def theme_selector do
Application.get_env(:ex_admin, :theme_selector)
|> Enum.with_index
|> theme_selector
end
defp theme_selector(nil), do: ""
defp theme_selector(options) do
current = Application.get_env(:ex_admin, :theme)
for {{name, theme}, inx} <- options do
active = if current == theme, do: "active", else: ""
content_tag :li, class: active do
content_tag :a, name, href: "#", "data-theme": "#{inx}", class: "theme-selector"
end
end
end
def switch_user(%{assigns: %{switch_users: [current_id | users]}}) do
content_tag :li, class: "dropdown", style: "width: 155px" do
[
content_tag :a, href: "#", class: "dropdown-toggle", "data-toggle": "dropdown" do
[
Phoenix.HTML.raw("Switch User"),
content_tag(:span, nil, class: "caret")
]
end,
content_tag :ul, class: "dropdown-menu", role: "menu" do
for {name, id, path} <- users do
active = if id == current_id, do: "active", else: ""
content_tag :li, class: active do
content_tag :a, name, href: "#", "data-path": path, class: "switch-user"
end
end
end
]
end
end
def switch_user(_conn), do: nil
def render_breadcrumbs([]), do: nil
def render_breadcrumbs(list) do
ol(".breadcrumb") do
Enum.each list, fn({link, name}) ->
li do
a(name, href: link)
end
end
end
end
def wrap_title_bar(fun) do
section("#title_bar.content-header") do
fun.()
end
end
def sidebar_view(conn, {name, opts, {mod, fun}}, resource) do
box_attributes = Keyword.get(opts, :box_attributes, ".box.box-primary")
header_attributes = Keyword.get(opts, :header_attributes, ".box-header.with-border")
body_attributes = Keyword.get(opts, :body_attributes, ".box-body")
markup safe: true do
div box_attributes do
div header_attributes do
h3 ".box-title #{name}"
end
div body_attributes do
case apply mod, fun, [conn, resource] do
{_, rest} -> text rest
:ok -> ""
other -> text other
end
end
end
end
end
end
| 30.401869 | 89 | 0.599139 |
73cebd7728ff2505e1cdba2148df6269e3e97671 | 17,853 | ex | Elixir | lib/elixir/lib/kernel/parallel_compiler.ex | AsharDweedar/elixir | 2c5b3c8c362e301b9f074db38b62470050fe243d | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/kernel/parallel_compiler.ex | AsharDweedar/elixir | 2c5b3c8c362e301b9f074db38b62470050fe243d | [
"Apache-2.0"
] | 1 | 2018-09-10T23:36:45.000Z | 2018-09-10T23:36:45.000Z | lib/elixir/lib/kernel/parallel_compiler.ex | AsharDweedar/elixir | 2c5b3c8c362e301b9f074db38b62470050fe243d | [
"Apache-2.0"
] | 1 | 2018-09-10T23:32:56.000Z | 2018-09-10T23:32:56.000Z | defmodule Kernel.ParallelCompiler do
@moduledoc """
A module responsible for compiling and requiring files in parallel.
"""
@doc """
Starts a task for parallel compilation.
If you have a file that needs to compile other modules in parallel,
the spawned processes need to be aware of the compiler environment.
This function allows a developer to create a task that is aware of
those environments.
See `Task.async/1` for more information. The task spawned must be
always awaited on by calling `Task.await/1`
"""
@doc since: "1.6.0"
def async(fun) when is_function(fun) do
if parent = :erlang.get(:elixir_compiler_pid) do
file = :erlang.get(:elixir_compiler_file)
dest = :erlang.get(:elixir_compiler_dest)
{:error_handler, error_handler} = :erlang.process_info(self(), :error_handler)
Task.async(fn ->
:erlang.put(:elixir_compiler_pid, parent)
:erlang.put(:elixir_compiler_file, file)
dest != :undefined and :erlang.put(:elixir_compiler_dest, dest)
:erlang.process_flag(:error_handler, error_handler)
fun.()
end)
else
raise ArgumentError,
"cannot spawn parallel compiler task because " <>
"the current file is not being compiled/required"
end
end
@doc """
Compiles the given files.
Those files are compiled in parallel and can automatically
detect dependencies between them. Once a dependency is found,
the current file stops being compiled until the dependency is
resolved.
It returns `{:ok, modules, warnings}` or `{:error, errors, warnings}`.
Both errors and warnings are a list of three element tuples containing
the file, line and the formatted error/warning.
## Options
* `:each_file` - for each file compiled, invokes the callback passing the
file
* `:each_long_compilation` - for each file that takes more than a given
timeout (see the `:long_compilation_threshold` option) to compile, invoke
this callback passing the file as its argument
* `:each_module` - for each module compiled, invokes the callback passing
the file, module and the module bytecode
* `:each_cycle` - after the given files are compiled, invokes this function
that return a list with potentially more files to compile
* `:long_compilation_threshold` - the timeout (in seconds) after the
`:each_long_compilation` callback is invoked; defaults to `15`
* `:dest` - the destination directory for the BEAM files. When using `files/2`,
this information is only used to properly annotate the BEAM files before
they are loaded into memory. If you want a file to actually be written to
`dest`, use `compile_to_path/3` instead.
"""
@doc since: "1.6.0"
def compile(files, options \\ []) when is_list(options) do
spawn_workers(files, :compile, options)
end
@doc since: "1.6.0"
def compile_to_path(files, path, options \\ []) when is_binary(path) and is_list(options) do
spawn_workers(files, {:compile, path}, options)
end
@doc """
Requires the given files in parallel.
Opposite to compile, dependencies are not attempted to be
automatically solved between files.
It returns `{:ok, modules, warnings}` or `{:error, errors, warnings}`.
Both errors and warnings are a list of three element tuples containing
the file, line and the formatted error/warning.
## Options
* `:each_file` - for each file compiled, invokes the callback passing the
file
* `:each_module` - for each module compiled, invokes the callback passing
the file, module and the module bytecode
"""
@doc since: "1.6.0"
def require(files, options \\ []) when is_list(options) do
spawn_workers(files, :require, options)
end
# TODO: Remove on 2.0
@doc false
@deprecated "Use Kernel.ParallelCompiler.compile/2 instead"
def files(files, options \\ []) when is_list(options) do
case spawn_workers(files, :compile, options) do
{:ok, modules, _} -> modules
{:error, _, _} -> exit({:shutdown, 1})
end
end
# TODO: Remove on 2.0
@doc false
@deprecated "Use Kernel.ParallelCompiler.compile_to_path/2 instead"
def files_to_path(files, path, options \\ []) when is_binary(path) and is_list(options) do
case spawn_workers(files, {:compile, path}, options) do
{:ok, modules, _} -> modules
{:error, _, _} -> exit({:shutdown, 1})
end
end
defp spawn_workers(files, output, options) do
{:module, _} = :code.ensure_loaded(Kernel.ErrorHandler)
compiler_pid = self()
:elixir_code_server.cast({:reset_warnings, compiler_pid})
schedulers = max(:erlang.system_info(:schedulers_online), 2)
result =
spawn_workers(files, [], [], [], [], %{
dest: Keyword.get(options, :dest),
each_cycle: Keyword.get(options, :each_cycle, fn -> [] end),
each_file: Keyword.get(options, :each_file, fn _file -> :ok end),
each_long_compilation: Keyword.get(options, :each_long_compilation, fn _file -> :ok end),
each_module: Keyword.get(options, :each_module, fn _file, _module, _binary -> :ok end),
output: output,
long_compilation_threshold: Keyword.get(options, :long_compilation_threshold, 15),
schedulers: schedulers
})
# In case --warning-as-errors is enabled and there was a warning,
# compilation status will be set to error.
compilation_status = :elixir_code_server.call({:compilation_status, compiler_pid})
case {result, compilation_status} do
{{:ok, _, warnings}, :error} ->
message = "Compilation failed due to warnings while using the --warnings-as-errors option"
IO.puts(:stderr, message)
{:error, warnings, []}
{{:error, errors, warnings}, :error} ->
{:error, errors ++ warnings, []}
_ ->
result
end
end
# We already have n=schedulers currently running, don't spawn new ones
defp spawn_workers(files, waiting, queued, result, warnings, %{schedulers: schedulers} = state)
when length(queued) - length(waiting) >= schedulers do
wait_for_messages(files, waiting, queued, result, warnings, state)
end
# Release waiting processes
defp spawn_workers([{ref, found} | t], waiting, queued, result, warnings, state) do
waiting =
case List.keytake(waiting, ref, 2) do
{{_kind, pid, ^ref, _on, _defining}, waiting} ->
send(pid, {ref, found})
waiting
nil ->
waiting
end
spawn_workers(t, waiting, queued, result, warnings, state)
end
defp spawn_workers([file | files], waiting, queued, result, warnings, state) do
%{output: output, long_compilation_threshold: threshold, dest: dest} = state
parent = self()
{pid, ref} =
:erlang.spawn_monitor(fn ->
:erlang.put(:elixir_compiler_pid, parent)
:erlang.put(:elixir_compiler_file, file)
result =
try do
_ =
case output do
{:compile, path} ->
:erlang.process_flag(:error_handler, Kernel.ErrorHandler)
:erlang.put(:elixir_compiler_dest, path)
:elixir_compiler.file_to_path(Path.expand(file), path)
:compile ->
:erlang.process_flag(:error_handler, Kernel.ErrorHandler)
:erlang.put(:elixir_compiler_dest, dest)
Code.compile_file(file)
:require ->
Code.require_file(file)
end
:ok
catch
kind, reason ->
{kind, reason, __STACKTRACE__}
end
send(parent, {:file_done, self(), file, result})
exit(:shutdown)
end)
timer_ref = Process.send_after(self(), {:timed_out, pid}, threshold * 1000)
queued = [{pid, ref, file, timer_ref} | queued]
spawn_workers(files, waiting, queued, result, warnings, state)
end
# No more files, nothing waiting, queue is empty, this cycle is done
defp spawn_workers([], [], [], result, warnings, state) do
case state.each_cycle.() do
[] ->
modules = for {:module, mod} <- result, do: mod
warnings = Enum.reverse(warnings)
{:ok, modules, warnings}
more ->
spawn_workers(more, [], [], result, warnings, state)
end
end
# Queued x, waiting for x: POSSIBLE ERROR! Release processes so we get the failures
# Single entry, just release it.
defp spawn_workers([], [_] = waiting, [_] = queued, result, warnings, state) do
[{_, _, ref, _, _}] = waiting
spawn_workers([{ref, :not_found}], waiting, queued, result, warnings, state)
end
# Multiple entries, try to release modules.
defp spawn_workers([], waiting, queued, result, warnings, state)
when length(waiting) == length(queued) do
# The goal of this function is to find leaves in the dependency graph,
# i.e. to find code that depends on code that we know is not being defined.
without_definition =
for {pid, _, _, _} <- queued,
entry = waiting_on_without_definition(waiting, pid),
do: entry
# Note we only release modules because those can be rescued. A missing
# struct is a guaranteed compile error, so we never release it and treat
# it exclusively a missing entry/deadlock.
pending =
for {:module, _, ref, on, _} <- without_definition,
do: {on, {ref, :not_found}}
# Instead of releasing all files at once, we release them in groups
# based on the module they are waiting on. We pick the module being
# depended on with less edges, as it is the mostly likely source of
# error (for example, someone made a typo). This may not always be
# true though. For example, if there is a macro injecting code into
# multiple modules and such code becomes faulty, now multiple modules
# are waiting on the same module required by the faulty code. However,
# since we need to pick something to be first, the one with fewer edges
# sounds like a sane choice.
pending
|> Enum.group_by(&elem(&1, 0), &elem(&1, 1))
|> Enum.sort_by(&length(elem(&1, 1)))
|> case do
[{_on, refs} | _] ->
spawn_workers(refs, waiting, queued, result, warnings, state)
[] ->
# There is a deadlock. Instead of printing a deadlock, let's release
# structs, as a missing struct error is clearer than a deadlock one.
structs = for {:struct, _, ref, _, _} <- without_definition, do: {ref, :not_found}
if structs != [] do
spawn_workers(structs, waiting, queued, result, warnings, state)
else
errors = handle_deadlock(waiting, queued)
{:error, errors, warnings}
end
end
end
# No more files, but queue and waiting are not full or do not match
defp spawn_workers([], waiting, queued, result, warnings, state) do
wait_for_messages([], waiting, queued, result, warnings, state)
end
defp waiting_on_without_definition(waiting, pid) do
{_, ^pid, _, on, _} = entry = List.keyfind(waiting, pid, 1)
if Enum.any?(waiting, fn {_, _, _, _, defining} -> on in defining end) do
nil
else
entry
end
end
# Wait for messages from child processes
defp wait_for_messages(files, waiting, queued, result, warnings, state) do
%{output: output} = state
receive do
{:struct_available, module} ->
available =
for {:struct, _, ref, waiting_module, _defining} <- waiting,
module == waiting_module,
do: {ref, :found}
result = [{:struct, module} | result]
spawn_workers(available ++ files, waiting, queued, result, warnings, state)
{:module_available, child, ref, file, module, binary} ->
state.each_module.(file, module, binary)
# Release the module loader which is waiting for an ack
send(child, {ref, :ack})
available =
for {:module, _, ref, waiting_module, _defining} <- waiting,
module == waiting_module,
do: {ref, :found}
cancel_waiting_timer(queued, child)
result = [{:module, module} | result]
spawn_workers(available ++ files, waiting, queued, result, warnings, state)
# If we are simply requiring files, we do not add to waiting.
{:waiting, _kind, child, ref, _on, _defining} when output == :require ->
send(child, {ref, :not_found})
spawn_workers(files, waiting, queued, result, warnings, state)
{:waiting, kind, child, ref, on, defining} ->
# Oops, we already got it, do not put it on waiting.
# Alternatively, we're waiting on ourselves,
# send :found so that we can crash with a better error.
waiting =
if :lists.any(&match?({^kind, ^on}, &1), result) or on in defining do
send(child, {ref, :found})
waiting
else
[{kind, child, ref, on, defining} | waiting]
end
spawn_workers(files, waiting, queued, result, warnings, state)
{:timed_out, child} ->
case List.keyfind(queued, child, 0) do
{^child, _, file, _} ->
state.each_long_compilation.(file)
_ ->
:ok
end
spawn_workers(files, waiting, queued, result, warnings, state)
{:warning, file, line, message} ->
file = file && Path.absname(file)
message = :unicode.characters_to_binary(message)
warning = {file, line, message}
wait_for_messages(files, waiting, queued, result, [warning | warnings], state)
{:file_done, child_pid, file, :ok} ->
discard_down(child_pid)
state.each_file.(file)
cancel_waiting_timer(queued, child_pid)
# Sometimes we may have spurious entries in the waiting
# list because someone invoked try/rescue UndefinedFunctionError
new_files = List.delete(files, child_pid)
new_queued = List.keydelete(queued, child_pid, 0)
new_waiting = List.keydelete(waiting, child_pid, 1)
spawn_workers(new_files, new_waiting, new_queued, result, warnings, state)
{:file_done, child_pid, file, {kind, reason, stack}} ->
discard_down(child_pid)
print_error(file, kind, reason, stack)
cancel_waiting_timer(queued, child_pid)
terminate(queued)
{:error, [to_error(file, kind, reason, stack)], warnings}
{:DOWN, ref, :process, _pid, reason} ->
case handle_down(queued, ref, reason) do
:ok -> wait_for_messages(files, waiting, queued, result, warnings, state)
{:error, errors} -> {:error, errors, warnings}
end
end
end
defp discard_down(pid) do
receive do
{:DOWN, _, :process, ^pid, _} -> :ok
end
end
defp handle_down(_queued, _ref, :normal) do
:ok
end
defp handle_down(queued, ref, reason) do
case List.keyfind(queued, ref, 1) do
{_child, ^ref, file, _timer_ref} ->
print_error(file, :exit, reason, [])
terminate(queued)
{:error, [to_error(file, :exit, reason, [])]}
_ ->
:ok
end
end
defp handle_deadlock(waiting, queued) do
deadlock =
for {pid, _, file, _} <- queued do
{:current_stacktrace, stacktrace} = Process.info(pid, :current_stacktrace)
Process.exit(pid, :kill)
{kind, ^pid, _, on, _} = List.keyfind(waiting, pid, 1)
description = "deadlocked waiting on #{kind} #{inspect(on)}"
error = CompileError.exception(description: description, file: nil, line: nil)
print_error(file, :error, error, stacktrace)
{file, on, description}
end
IO.puts("""
Compilation failed because of a deadlock between files.
The following files depended on the following modules:
""")
max =
deadlock
|> Enum.map(&(&1 |> elem(0) |> String.length()))
|> Enum.max()
for {file, mod, _} <- deadlock do
IO.puts([" ", String.pad_leading(file, max), " => " | inspect(mod)])
end
IO.puts(
"\nEnsure there are no compile-time dependencies between those files " <>
"and that the modules they reference exist and are correctly named\n"
)
for {file, _, description} <- deadlock, do: {Path.absname(file), nil, description}
end
defp terminate(queued) do
for {pid, _, _, _} <- queued do
Process.exit(pid, :kill)
end
end
defp print_error(file, kind, reason, stack) do
IO.write([
"\n== Compilation error in file #{Path.relative_to_cwd(file)} ==\n",
Kernel.CLI.format_error(kind, reason, stack)
])
end
defp cancel_waiting_timer(queued, child_pid) do
case List.keyfind(queued, child_pid, 0) do
{^child_pid, _ref, _file, timer_ref} ->
Process.cancel_timer(timer_ref)
# Let's flush the message in case it arrived before we canceled the
# timeout.
receive do
{:timed_out, ^child_pid} -> :ok
after
0 -> :ok
end
nil ->
:ok
end
end
defp to_error(file, kind, reason, stack) do
line = get_line(file, reason, stack)
file = Path.absname(file)
message = :unicode.characters_to_binary(Kernel.CLI.format_error(kind, reason, stack))
{file, line, message}
end
defp get_line(_file, %{line: line}, _stack) when is_integer(line) and line > 0 do
line
end
defp get_line(file, :undef, [{_, _, _, []}, {_, _, _, info} | _]) do
if Keyword.get(info, :file) == to_charlist(Path.relative_to_cwd(file)) do
Keyword.get(info, :line)
end
end
defp get_line(file, _reason, [{_, _, _, info} | _]) do
if Keyword.get(info, :file) == to_charlist(Path.relative_to_cwd(file)) do
Keyword.get(info, :line)
end
end
defp get_line(_, _, _) do
nil
end
end
| 34.266795 | 98 | 0.634963 |
73cef6ec36f36c0492c96d0f1fa6106afda60ffb | 567 | ex | Elixir | lib/extenant_web/controllers/fallback_controller.ex | ThinkForge/extenant | 70a73aed9a5db42a178919562d60861f9aa09bcb | [
"Apache-2.0"
] | null | null | null | lib/extenant_web/controllers/fallback_controller.ex | ThinkForge/extenant | 70a73aed9a5db42a178919562d60861f9aa09bcb | [
"Apache-2.0"
] | 1 | 2018-11-24T15:09:30.000Z | 2018-11-24T15:09:30.000Z | lib/extenant_web/controllers/fallback_controller.ex | ThinkForge/extenant | 70a73aed9a5db42a178919562d60861f9aa09bcb | [
"Apache-2.0"
] | null | null | null | defmodule ExtenantWeb.FallbackController do
@moduledoc """
Translates controller action results into valid `Plug.Conn` responses.
See `Phoenix.Controller.action_fallback/1` for more details.
"""
use ExtenantWeb, :controller
def call(conn, {:error, %Ecto.Changeset{} = changeset}) do
conn
|> put_status(:unprocessable_entity)
|> render(ExtenantWeb.ChangesetView, "error.json", changeset: changeset)
end
def call(conn, {:error, :not_found}) do
conn
|> put_status(:not_found)
|> render(ExtenantWeb.ErrorView, :"404")
end
end
| 27 | 76 | 0.707231 |
73cf16d6d81f97d75027648f9c7e51cabb451db9 | 20,341 | exs | Elixir | test/remote_ip_test.exs | ajvondrak/remote_ip | 4826fc437976d7e8dca1ac0ed1ebc7459c6b1a33 | [
"MIT"
] | 178 | 2016-10-17T09:47:02.000Z | 2022-03-22T09:39:02.000Z | test/remote_ip_test.exs | ajvondrak/remote_ip | 4826fc437976d7e8dca1ac0ed1ebc7459c6b1a33 | [
"MIT"
] | 28 | 2016-12-14T09:01:56.000Z | 2022-03-22T16:16:46.000Z | test/remote_ip_test.exs | ajvondrak/remote_ip | 4826fc437976d7e8dca1ac0ed1ebc7459c6b1a33 | [
"MIT"
] | 35 | 2017-01-24T23:06:43.000Z | 2021-11-17T14:43:30.000Z | defmodule RemoteIpTest do
use ExUnit.Case, async: true
use Plug.Test
doctest RemoteIp
@unknown [
{"forwarded", "for=unknown"},
{"x-forwarded-for", "not_an_ip"},
{"x-client-ip", "_obf"},
{"x-real-ip", "1.2.3"},
{"custom", "::g"}
]
@loopback [
{"forwarded", "for=127.0.0.1"},
{"x-forwarded-for", "::1"},
{"x-client-ip", "127.0.0.2"},
{"x-real-ip", "::::::1"},
{"custom", "127.127.127.127"}
]
@private [
{"forwarded", "for=10.0.0.1"},
{"x-forwarded-for", "172.16.0.1"},
{"x-client-ip", "fd00::"},
{"x-real-ip", "192.168.10.10"},
{"custom", "172.31.41.59"}
]
@public_v4 [
{"forwarded", "for=2.71.82.8"},
{"x-forwarded-for", "2.71.82.8"},
{"x-client-ip", "2.71.82.8"},
{"x-real-ip", "2.71.82.8"},
{"custom", "2.71.82.8"}
]
@public_v6 [
{"forwarded", "for=\"[::2.71.82.8]\""},
{"x-forwarded-for", "::247:5208"},
{"x-client-ip", "0:0:0:0:0:0:2.71.82.8"},
{"x-real-ip", "0::0:247:5208"},
{"custom", "0:0::2.71.82.8"}
]
def call(conn, opts \\ []) do
RemoteIp.call(conn, RemoteIp.init(opts))
end
describe "call/2" do
test "no headers" do
peer = {86, 75, 30, 9}
head = []
conn = %Plug.Conn{remote_ip: peer, req_headers: head}
assert call(conn).remote_ip == peer
assert Logger.metadata()[:remote_ip] == "86.75.30.9"
end
for {header, value} <- @unknown do
test "#{header} header from unknown IP" do
peer = {1, 2, 3, 4}
head = [{unquote(header), unquote(value)}]
conn = %Plug.Conn{remote_ip: peer, req_headers: head}
opts = [headers: [unquote(header)]]
assert call(conn, opts).remote_ip == peer
assert Logger.metadata()[:remote_ip] == "1.2.3.4"
end
end
for {header, value} <- @loopback do
test "#{header} header from loopback IP" do
peer = {0xD, 0xE, 0xA, 0xD, 0xB, 0xE, 0xE, 0xF}
head = [{unquote(header), unquote(value)}]
conn = %Plug.Conn{remote_ip: peer, req_headers: head}
opts = [headers: [unquote(header)]]
assert call(conn, opts).remote_ip == peer
assert Logger.metadata()[:remote_ip] == "d:e:a:d:b:e:e:f"
end
end
for {header, value} <- @private do
test "#{header} header from private IP" do
peer = {0xDE, 0xAD, 0, 0, 0, 0, 0xBE, 0xEF}
head = [{unquote(header), unquote(value)}]
conn = %Plug.Conn{remote_ip: peer, req_headers: head}
opts = [headers: [unquote(header)]]
assert call(conn, opts).remote_ip == peer
assert Logger.metadata()[:remote_ip] == "de:ad::be:ef"
end
end
for {header, value} <- @public_v4 do
test "#{header} header from public IP (v4)" do
peer = {3, 141, 59, 27}
head = [{unquote(header), unquote(value)}]
conn = %Plug.Conn{remote_ip: peer, req_headers: head}
opts = [headers: [unquote(header)]]
assert call(conn, opts).remote_ip == {2, 71, 82, 8}
assert Logger.metadata()[:remote_ip] == "2.71.82.8"
end
end
for {header, value} <- @public_v6 do
test "#{header} header from public IP (v6)" do
peer = {3, 141, 59, 27}
head = [{unquote(header), unquote(value)}]
conn = %Plug.Conn{remote_ip: peer, req_headers: head}
opts = [headers: [unquote(header)]]
assert call(conn, opts).remote_ip == {0, 0, 0, 0, 0, 0, 583, 21000}
assert Logger.metadata()[:remote_ip] == "::2.71.82.8"
end
end
end
describe "from/2" do
test "no headers" do
head = []
assert RemoteIp.from(head) == nil
assert Logger.metadata()[:remote_ip] == nil
end
for {header, value} <- @unknown do
test "#{header} header from unknown IP" do
head = [{unquote(header), unquote(value)}]
opts = [headers: [unquote(header)]]
assert RemoteIp.from(head, opts) == nil
assert Logger.metadata()[:remote_ip] == nil
end
end
for {header, value} <- @loopback do
test "#{header} header from loopback IP" do
head = [{unquote(header), unquote(value)}]
opts = [headers: [unquote(header)]]
assert RemoteIp.from(head, opts) == nil
assert Logger.metadata()[:remote_ip] == nil
end
end
for {header, value} <- @private do
test "#{header} header from private IP" do
head = [{unquote(header), unquote(value)}]
opts = [headers: [unquote(header)]]
assert RemoteIp.from(head, opts) == nil
assert Logger.metadata()[:remote_ip] == nil
end
end
for {header, value} <- @public_v4 do
test "#{header} header from public IP (v4)" do
head = [{unquote(header), unquote(value)}]
opts = [headers: [unquote(header)]]
assert RemoteIp.from(head, opts) == {2, 71, 82, 8}
assert Logger.metadata()[:remote_ip] == nil
end
end
for {header, value} <- @public_v6 do
test "#{header} header from public IP (v6)" do
head = [{unquote(header), unquote(value)}]
opts = [headers: [unquote(header)]]
assert RemoteIp.from(head, opts) == {0, 0, 0, 0, 0, 0, 583, 21000}
assert Logger.metadata()[:remote_ip] == nil
end
end
end
@proxies [
{"forwarded", "for=1.2.3.4"},
{"x-forwarded-for", "::a"},
{"x-client-ip", "1:2:3:4:5:6:7:8"},
{"x-real-ip", "4.4.4.4"}
]
describe ":proxies option" do
test "can block presumed clients" do
head = @proxies
opts = [proxies: ~w[1.2.0.0/16 ::a/128 4.0.0.0/8 1::/30]]
assert RemoteIp.from(head, opts) == nil
end
test "cannot block known clients" do
head = @proxies
opts = [proxies: ~w[0.0.0.0/0 ::/0], clients: ~w[1.2.0.0/16]]
assert RemoteIp.from(head, opts) == {1, 2, 3, 4}
end
test "always includes reserved IPs" do
head = @proxies ++ @loopback ++ @private
opts = [proxies: ~w[1.2.0.0/16 ::a/128 4.0.0.0/8 1::/30 8.8.8.8/32]]
assert RemoteIp.from(head, opts) == nil
end
test "can be an MFA" do
head = [{"x-forwarded-for", "1.2.3.4, 2.3.4.5"}]
opts = [proxies: {Application, :get_env, [:remote_ip_test, :proxies]}]
Application.put_env(:remote_ip_test, :proxies, [])
assert RemoteIp.from(head, opts) == {2, 3, 4, 5}
Application.put_env(:remote_ip_test, :proxies, ~w[2.0.0.0/8])
assert RemoteIp.from(head, opts) == {1, 2, 3, 4}
end
end
@clients [
{"forwarded", "for=2.71.82.81"},
{"x-forwarded-for", "82.84.59.0"},
{"x-client-ip", "45.235.36.0"},
{"x-real-ip", "28.74.71.35"}
]
describe ":clients option" do
test "can allow reserved IPs" do
head = @loopback ++ @private
opts = [clients: ~w[192.168.10.0/24]]
assert RemoteIp.from(head, opts) == {192, 168, 10, 10}
end
test "can allow known proxies" do
head = @clients
opts = [
proxies: ~w[2.0.0.0/8 82.84.0.0/16 45.235.36.0/24 28.74.71.35/32],
clients: ~w[2.71.0.0/16]
]
assert RemoteIp.from(head, opts) == {2, 71, 82, 81}
end
test "doesn't impact presumed clients" do
head = @clients
opts = [clients: ~w[2.0.0.0/8 82.84.0.0/16 45.235.36.0/24 28.74.71.35/32]]
assert RemoteIp.from(head, opts) == {28, 74, 71, 35}
end
test "can be an MFA" do
head = [{"x-forwarded-for", "1.2.3.4, 127.0.0.1"}]
opts = [clients: {Application, :get_env, [:remote_ip_test, :clients]}]
Application.put_env(:remote_ip_test, :clients, [])
assert RemoteIp.from(head, opts) == {1, 2, 3, 4}
Application.put_env(:remote_ip_test, :clients, ~w[127.0.0.0/8])
assert RemoteIp.from(head, opts) == {127, 0, 0, 1}
end
end
@headers [
{"forwarded", "for=1.2.3.4"},
{"x-forwarded-for", "1.2.3.4"},
{"x-client-ip", "1.2.3.4"},
{"x-real-ip", "1.2.3.4"}
]
describe ":headers option" do
test "specifies which headers to use" do
head = [{"a", "1.2.3.4"}, {"b", "2.3.4.5"}, {"c", "3.4.5.6"}]
assert RemoteIp.from(head, headers: ~w[a b]) == {2, 3, 4, 5}
assert RemoteIp.from(head, headers: ~w[a c]) == {3, 4, 5, 6}
assert RemoteIp.from(head, headers: ~w[b a]) == {2, 3, 4, 5}
assert RemoteIp.from(head, headers: ~w[b c]) == {3, 4, 5, 6}
assert RemoteIp.from(head, headers: ~w[c a]) == {3, 4, 5, 6}
assert RemoteIp.from(head, headers: ~w[c b]) == {3, 4, 5, 6}
assert RemoteIp.from(head, headers: ~w[a]) == {1, 2, 3, 4}
assert RemoteIp.from(head, headers: ~w[b]) == {2, 3, 4, 5}
assert RemoteIp.from(head, headers: ~w[c]) == {3, 4, 5, 6}
end
for {header, value} <- @headers do
test "includes #{header} by default" do
head = [{unquote(header), unquote(value)}]
assert RemoteIp.from(head) == {1, 2, 3, 4}
end
end
test "overrides the defaults when specified" do
head = @headers
opts = [headers: ~w[custom]]
fail = "default headers are still being parsed"
refute RemoteIp.from(head, opts) == {1, 2, 3, 4}, fail
end
test "doesn't care about order" do
head = [{"a", "1.2.3.4"}, {"b", "2.3.4.5"}, {"c", "3.4.5.6"}]
assert RemoteIp.from(head, headers: ~w[a b c]) == {3, 4, 5, 6}
assert RemoteIp.from(head, headers: ~w[a c b]) == {3, 4, 5, 6}
assert RemoteIp.from(head, headers: ~w[b a c]) == {3, 4, 5, 6}
assert RemoteIp.from(head, headers: ~w[b c a]) == {3, 4, 5, 6}
assert RemoteIp.from(head, headers: ~w[c a b]) == {3, 4, 5, 6}
assert RemoteIp.from(head, headers: ~w[c b a]) == {3, 4, 5, 6}
end
test "can be an MFA" do
head = [{"a", "1.2.3.4"}, {"b", "2.3.4.5"}]
opts = [headers: {Application, :get_env, [:remote_ip_test, :headers]}]
Application.put_env(:remote_ip_test, :headers, ~w[a])
assert RemoteIp.from(head, opts) == {1, 2, 3, 4}
Application.put_env(:remote_ip_test, :headers, ~w[b])
assert RemoteIp.from(head, opts) == {2, 3, 4, 5}
end
end
describe "multiple headers" do
test "from unknown to unknown" do
head = [{"forwarded", "for=unknown,for=_obf"}]
opts = []
assert RemoteIp.from(head, opts) == nil
end
test "from unknown to loopback" do
head = [{"x-forwarded-for", "unknown,::1"}]
opts = []
assert RemoteIp.from(head, opts) == nil
end
test "from unknown to private" do
head = [{"x-client-ip", "_obf, fc00:ABCD"}]
opts = []
assert RemoteIp.from(head, opts) == nil
end
test "from unknown to proxy" do
head = [{"x-real-ip", "not_an_ip , 1.2.3.4"}]
opts = [proxies: ~w[1.0.0.0/12]]
assert RemoteIp.from(head, opts) == nil
end
test "from unknown to client" do
head = [{"custom", "unknown ,1.2.3.4"}]
opts = [headers: ~w[custom]]
assert RemoteIp.from(head, opts) == {1, 2, 3, 4}
end
test "from loopback to unknown" do
head = [{"forwarded", "for=\"[::1]\""}, {"x-forwarded-for", "_bogus"}]
opts = []
assert RemoteIp.from(head, opts) == nil
end
test "from loopback to loopback" do
head = [{"x-client-ip", "127.0.0.1"}, {"x-real-ip", "127.0.0.1"}]
opts = []
assert RemoteIp.from(head, opts) == nil
end
test "from loopback to private" do
head = [{"custom", "127.0.0.10"}, {"forwarded", "for=\"[fc00::1]\""}]
opts = [headers: ~w[forwarded custom]]
assert RemoteIp.from(head, opts) == nil
end
test "from loopback to proxy" do
head = [{"forwarded", "for=127.0.0.1"}, {"forwarded", "for=1.2.3.4"}]
opts = [proxies: ~w[1.2.3.4/32]]
assert RemoteIp.from(head, opts) == nil
end
test "from loopback to client" do
head = [{"x-forwarded-for", "127.0.0.1"}, {"x-forwarded-for", "1.2.3.4"}]
opts = []
assert RemoteIp.from(head, opts) == {1, 2, 3, 4}
end
test "from private to unknown" do
head = [{"x-client-ip", "fc00::ABCD"}, {"x-client-ip", "_obf"}]
opts = []
assert RemoteIp.from(head, opts) == nil
end
test "from private to loopback" do
head = [{"x-real-ip", "192.168.1.2"}, {"x-real-ip", "::1"}]
opts = []
assert RemoteIp.from(head, opts) == nil
end
test "from private to private" do
head = [{"custom", "10.0.0.1"}, {"custom", "10.0.0.2"}]
opts = [headers: ~w[custom]]
assert RemoteIp.from(head, opts) == nil
end
test "from private to proxy" do
head = [{"forwarded", "for=10.0.10.0, for=\"[::1.2.3.4]\""}]
opts = [proxies: ~w[::/64]]
assert RemoteIp.from(head, opts) == nil
end
test "from private to client" do
head = [{"x-forwarded-for", "10.0.10.0, ::1.2.3.4"}]
opts = [proxies: ~w[255.0.0.0/8]]
assert RemoteIp.from(head, opts) == {0, 0, 0, 0, 0, 0, 258, 772}
end
test "from proxy to unknown" do
head = [{"x-client-ip", "a:b:c:d:e:f::,unknown"}]
opts = [proxies: ~w[::/0]]
assert RemoteIp.from(head, opts) == nil
end
test "from proxy to loopback" do
head = [
{"x-real-ip", "2001:0db8:85a3:0000:0000:8A2E:0370:7334"},
{"x-real-ip", "127.0.0.2"}
]
opts = [proxies: ~w[2001:0db8:85a3::8A2E:0370:7334/128]]
assert RemoteIp.from(head, opts) == nil
end
test "from proxy to private" do
head = [{"custom", "3.4.5.6 , 172.16.1.2"}]
opts = [headers: ~w[custom], proxies: ~w[3.0.0.0/8]]
assert RemoteIp.from(head, opts) == nil
end
test "from proxy to proxy" do
head = [{"forwarded", "for=1.2.3.4, for=1.2.3.5"}]
opts = [proxies: ~w[1.2.3.0/24]]
assert RemoteIp.from(head, opts) == nil
end
test "from proxy to client" do
head = [{"x-forwarded-for", "::1:2:3:4, ::3:4:5:6"}]
opts = [proxies: ~w[::1:2:3:4/128]]
assert RemoteIp.from(head, opts) == {0, 0, 0, 0, 3, 4, 5, 6}
end
test "from client to unknown" do
head = [{"x-client-ip", "a:b:c:d:e:f::,unknown"}]
opts = [proxies: ~w[b::/64]]
assert RemoteIp.from(head, opts) == {10, 11, 12, 13, 14, 15, 0, 0}
end
test "from client to loopback" do
head = [{"x-real-ip", "127.0.0.1"}, {"x-real-ip", "127.0.0.2"}]
opts = [clients: ~w[127.0.0.1/32]]
assert RemoteIp.from(head, opts) == {127, 0, 0, 1}
end
test "from client to private" do
head = [{"custom", "::1.2.3.4, 10.0.10.0"}]
opts = [proxies: ~w[1:2:3:4::/64], headers: ~w[custom]]
assert RemoteIp.from(head, opts) == {0, 0, 0, 0, 0, 0, 258, 772}
end
test "from client to proxy" do
head = [{"forwarded", "for=1.2.3.4,for=3.4.5.6"}]
opts = [proxies: ~w[3.4.5.0/24]]
assert RemoteIp.from(head, opts) == {1, 2, 3, 4}
end
test "from client to client" do
head = [{"x-forwarded-for", "1.2.3.4"}, {"x-forwarded-for", "10.45.0.1"}]
opts = [clients: ~w[10.45.0.0/16]]
assert RemoteIp.from(head, opts) == {10, 45, 0, 1}
end
test "more than two hops" do
head = [
{"forwarded", "for=\"[fe80::0202:b3ff:fe1e:8329]\""},
{"forwarded", "for=1.2.3.4"},
{"x-forwarded-for", "172.16.0.10"},
{"x-client-ip", "::1, ::1"},
{"x-real-ip", "2.3.4.5, fc00::1, 2.4.6.8"}
]
opts = [proxies: ~w[2.0.0.0/8]]
assert RemoteIp.from(head, opts) == {1, 2, 3, 4}
end
end
defmodule ParserA do
@behaviour RemoteIp.Parser
@impl RemoteIp.Parser
def parse(value) do
ips = RemoteIp.Parsers.Generic.parse(value)
ips |> Enum.map(fn {a, b, c, d} -> {10 + a, 10 + b, 10 + c, 10 + d} end)
end
end
defmodule ParserB do
@behaviour RemoteIp.Parser
@impl RemoteIp.Parser
def parse(value) do
ips = RemoteIp.Parsers.Generic.parse(value)
ips |> Enum.map(fn {a, b, c, d} -> {20 + a, 20 + b, 20 + c, 20 + d} end)
end
end
defmodule ParserC do
@behaviour RemoteIp.Parser
@impl RemoteIp.Parser
def parse(value) do
ips = RemoteIp.Parsers.Generic.parse(value)
ips |> Enum.map(fn {a, b, c, d} -> {30 + a, 30 + b, 30 + c, 30 + d} end)
end
end
describe ":parsers option" do
test "can customize parsers for specific headers" do
headers = [{"a", "1.2.3.4"}, {"b", "2.3.4.5"}, {"c", "3.4.5.6"}]
parsers = %{"a" => ParserA, "b" => ParserB, "c" => ParserC}
assert RemoteIp.from(headers, parsers: parsers, headers: ~w[a]) == {11, 12, 13, 14}
assert RemoteIp.from(headers, parsers: parsers, headers: ~w[b]) == {22, 23, 24, 25}
assert RemoteIp.from(headers, parsers: parsers, headers: ~w[c]) == {33, 34, 35, 36}
end
test "doesn't clobber generic parser on other headers" do
headers = [{"a", "1.2.3.4"}, {"b", "2.3.4.5"}, {"c", "3.4.5.6"}]
parsers = %{"a" => ParserA, "c" => ParserC}
assert RemoteIp.from(headers, parsers: parsers, headers: ~w[a]) == {11, 12, 13, 14}
assert RemoteIp.from(headers, parsers: parsers, headers: ~w[b]) == {2, 3, 4, 5}
assert RemoteIp.from(headers, parsers: parsers, headers: ~w[c]) == {33, 34, 35, 36}
end
test "doesn't clobber Forwarded parser by default" do
headers = [{"forwarded", "for=1.2.3.4"}]
parsers = %{"a" => ParserA, "b" => ParserB, "c" => ParserC}
options = [parsers: parsers, headers: ~w[forwarded a b c]]
assert RemoteIp.from(headers, options) == {1, 2, 3, 4}
end
test "can clobber Forwarded parser" do
headers = [{"forwarded", "1.2.3.4"}]
parsers = %{"forwarded" => ParserA}
options = [parsers: parsers, headers: ~w[forwarded]]
assert RemoteIp.from(headers, options) == {11, 12, 13, 14}
end
test "can be an MFA" do
headers = [{"x", "1.2.3.4"}]
parsers = {Application, :get_env, [:remote_ip_test, :parsers]}
options = [parsers: parsers, headers: ~w[x]]
Application.put_env(:remote_ip_test, :parsers, %{"x" => ParserA})
assert RemoteIp.from(headers, options) == {11, 12, 13, 14}
Application.put_env(:remote_ip_test, :parsers, %{"x" => ParserC})
assert RemoteIp.from(headers, options) == {31, 32, 33, 34}
end
end
defmodule App do
use Plug.Router
plug RemoteIp,
parsers: {__MODULE__, :parsers, []},
headers: {__MODULE__, :config, ["HEADERS"]},
proxies: {__MODULE__, :config, ["PROXIES"]},
clients: {__MODULE__, :config, ["CLIENTS"]}
plug :match
plug :dispatch
get "/ip" do
send_resp(conn, 200, :inet.ntoa(conn.remote_ip))
end
def config(var) do
System.get_env() |> Map.get(var, "") |> String.split(",", trim: true)
end
def parsers do
Enum.into(config("PARSERS"), %{}, fn spec ->
[header, parser] = String.split(spec, ":")
{header, :"Elixir.RemoteIpTest.#{parser}"}
end)
end
end
test "runtime configuration" do
try do
conn = conn(:get, "/ip")
conn = conn |> put_req_header("a", "1.2.3.4, 192.168.0.1, 2.3.4.5")
conn = conn |> put_req_header("b", "3.4.5.6, 192.168.0.1, 4.5.6.7")
assert App.call(conn, App.init([])).resp_body == "127.0.0.1"
System.put_env("HEADERS", "a")
assert App.call(conn, App.init([])).resp_body == "2.3.4.5"
System.put_env("PARSERS", "a:ParserA")
assert App.call(conn, App.init([])).resp_body == "12.13.14.15"
System.put_env("PARSERS", "a:ParserB,c:ParserC")
assert App.call(conn, App.init([])).resp_body == "22.23.24.25"
System.put_env("PROXIES", "22.0.0.0/8,212.188.0.0/16")
assert App.call(conn, App.init([])).resp_body == "21.22.23.24"
System.delete_env("PARSERS")
System.put_env("PROXIES", "2.1.0.0/16,2.2.0.0/16,2.3.0.0/16")
assert App.call(conn, App.init([])).resp_body == "1.2.3.4"
System.put_env("CLIENTS", "192.0.0.0/8,1.2.3.4/32")
assert App.call(conn, App.init([])).resp_body == "192.168.0.1"
System.put_env("HEADERS", "b,c,d")
assert App.call(conn, App.init([])).resp_body == "4.5.6.7"
System.put_env("PROXIES", "4.5.6.0/24")
assert App.call(conn, App.init([])).resp_body == "192.168.0.1"
System.put_env("CLIENTS", "4.5.7.0/24")
assert App.call(conn, App.init([])).resp_body == "3.4.5.6"
after
System.delete_env("PARSERS")
System.delete_env("HEADERS")
System.delete_env("PROXIES")
System.delete_env("CLIENTS")
end
end
end
| 32.65008 | 89 | 0.550366 |
73cf38a90b336f0b58910a9b5cfc4a8f4230d070 | 1,412 | exs | Elixir | test/data_serialize_test/invertible_test.exs | thomas9911/data_serialize | d8c7bf962ba8aa4ef54bca4d720108df5fe9b2b9 | [
"Unlicense"
] | 2 | 2020-01-18T05:39:30.000Z | 2020-08-09T09:51:12.000Z | test/data_serialize_test/invertible_test.exs | thomas9911/data_serialize | d8c7bf962ba8aa4ef54bca4d720108df5fe9b2b9 | [
"Unlicense"
] | 1 | 2019-11-11T12:15:01.000Z | 2019-11-11T12:15:01.000Z | test/data_serialize_test/invertible_test.exs | thomas9911/data_serialize | d8c7bf962ba8aa4ef54bca4d720108df5fe9b2b9 | [
"Unlicense"
] | null | null | null | defmodule DataSerializeTest.Invertible do
use ExUnit.Case
test "cbor" do
start_map = %{
"nested" => %{
"nesting" => "oke",
"other" => "one"
},
"list" => [1, 2, 3, 4],
"key" => "value"
}
assert {:ok, cbor} = DataSerialize.map_to_cbor_hex(start_map)
assert {:ok, end_map} = DataSerialize.cbor_hex_to_map(cbor)
assert start_map == end_map
end
test "json" do
start_map = %{
"nested" => %{
"nesting" => "oke",
"other" => "one"
},
"list" => [1, 2, 3, 4],
"key" => "value"
}
assert {:ok, json} = DataSerialize.map_to_json(start_map)
assert {:ok, end_map} = DataSerialize.json_to_map(json)
assert start_map == end_map
end
test "toml" do
start_map = %{
"nested" => %{
"nesting" => "oke",
"other" => "one"
},
"list" => [1, 2, 3, 4]
}
assert {:ok, toml} = DataSerialize.map_to_toml(start_map)
assert {:ok, end_map} = DataSerialize.toml_to_map(toml)
assert start_map == end_map
end
test "yaml" do
start_map = %{
"nested" => %{
"nesting" => "oke",
"other" => "one"
},
"list" => [1, 2, 3, 4],
"key" => "value"
}
assert {:ok, yaml} = DataSerialize.map_to_yaml(start_map)
assert {:ok, end_map} = DataSerialize.yaml_to_map(yaml)
assert start_map == end_map
end
end
| 21.074627 | 65 | 0.525496 |
73cf634ce16eab453ba9c359b723425728e5e669 | 781 | ex | Elixir | lib/ex338_web/views/draft_pick_view.ex | axelclark/ex338 | 3fb3c260d93bda61f7636ee1a677770d2dc1b89a | [
"MIT"
] | 17 | 2016-12-22T06:39:26.000Z | 2021-01-20T13:51:13.000Z | lib/ex338_web/views/draft_pick_view.ex | axelclark/ex338 | 3fb3c260d93bda61f7636ee1a677770d2dc1b89a | [
"MIT"
] | 608 | 2016-08-06T18:57:58.000Z | 2022-03-01T02:48:17.000Z | lib/ex338_web/views/draft_pick_view.ex | axelclark/ex338 | 3fb3c260d93bda61f7636ee1a677770d2dc1b89a | [
"MIT"
] | 6 | 2017-11-21T22:35:45.000Z | 2022-01-11T21:37:40.000Z | defmodule Ex338Web.DraftPickView do
use Ex338Web, :view
def current_picks(draft_picks, amount) when amount >= 0 do
next_pick_index = Enum.find_index(draft_picks, &(&1.fantasy_player_id == nil))
get_current_picks(draft_picks, next_pick_index, amount)
end
defp get_current_picks(draft_picks, nil, amount) do
Enum.take(draft_picks, -div(amount, 2))
end
defp get_current_picks(draft_picks, index, amount) do
start_index = index - div(amount, 2)
start_index =
if start_index < 0 do
0
else
start_index
end
Enum.slice(draft_picks, start_index, amount)
end
def seconds_to_hours(seconds) do
Float.floor(seconds / 3600, 2)
end
def seconds_to_mins(seconds) do
Float.floor(seconds / 60, 2)
end
end
| 22.970588 | 82 | 0.696543 |
73cf6b5cc6fe4580c3a4ef3c5e866110a13b30bd | 2,868 | exs | Elixir | test/components/form/submit_test.exs | EddyLane/surface | 1f13259cbdf81b5a4740ee13349a48f8b6c54bb5 | [
"MIT"
] | null | null | null | test/components/form/submit_test.exs | EddyLane/surface | 1f13259cbdf81b5a4740ee13349a48f8b6c54bb5 | [
"MIT"
] | null | null | null | test/components/form/submit_test.exs | EddyLane/surface | 1f13259cbdf81b5a4740ee13349a48f8b6c54bb5 | [
"MIT"
] | null | null | null | defmodule Surface.Components.Form.SubmitTest do
use Surface.ConnCase, async: true
alias Surface.Components.Form.Submit
test "label only" do
html =
render_surface do
~H"""
<Submit label="Submit" />
"""
end
assert html =~ """
<button type="submit">Submit</button>
"""
end
test "with class" do
html =
render_surface do
~H"""
<Submit label="Submit" class="button" />
"""
end
assert html =~ ~r/class="button"/
end
test "with multiple classes" do
html =
render_surface do
~H"""
<Submit label="Submit" class="button primary" />
"""
end
assert html =~ ~r/class="button primary"/
end
test "with options" do
html =
render_surface do
~H"""
<Submit label="Submit" class="btn" opts={{ id: "submit-btn" }} />
"""
end
assert html =~ """
<button class="btn" id="submit-btn" type="submit">Submit</button>
"""
end
test "with children" do
html =
render_surface do
~H"""
<Submit class="btn">
<span>Submit</span>
</Submit>
"""
end
assert html =~ """
<button class="btn" type="submit">
<span>Submit</span>
</button>
"""
end
test "blur event with parent live view as target" do
html =
render_surface do
~H"""
<Submit label="Submit" blur="my_blur" />
"""
end
assert html =~ """
<button phx-blur="my_blur" type="submit">Submit</button>
"""
end
test "focus event with parent live view as target" do
html =
render_surface do
~H"""
<Submit label="Submit" focus="my_focus" />
"""
end
assert html =~ """
<button phx-focus="my_focus" type="submit">Submit</button>
"""
end
test "capture click event with parent live view as target" do
html =
render_surface do
~H"""
<Submit label="Submit" capture_click="my_click" />
"""
end
assert html =~ """
<button phx-capture-click="my_click" type="submit">Submit</button>
"""
end
test "keydown event with parent live view as target" do
html =
render_surface do
~H"""
<Submit label="Submit" keydown="my_keydown" />
"""
end
assert html =~ """
<button phx-keydown="my_keydown" type="submit">Submit</button>
"""
end
test "keyup event with parent live view as target" do
html =
render_surface do
~H"""
<Submit label="Submit" keyup="my_keyup" />
"""
end
assert html =~ """
<button phx-keyup="my_keyup" type="submit">Submit</button>
"""
end
end
| 21.088235 | 77 | 0.514296 |
73cf7b8a1ae7401b44c8697af4ac4f486103f83f | 6,167 | ex | Elixir | clients/network_management/lib/google_api/network_management/v1/model/step.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/network_management/lib/google_api/network_management/v1/model/step.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/network_management/lib/google_api/network_management/v1/model/step.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.NetworkManagement.V1.Model.Step do
@moduledoc """
A simulated forwarding path is composed of multiple steps. Each step has a well-defined state and an associated configuration.
## Attributes
* `abort` (*type:* `GoogleApi.NetworkManagement.V1.Model.AbortInfo.t`, *default:* `nil`) - Display info of the final state "abort" and reason.
* `causesDrop` (*type:* `boolean()`, *default:* `nil`) - This is a step that leads to the final state Drop.
* `deliver` (*type:* `GoogleApi.NetworkManagement.V1.Model.DeliverInfo.t`, *default:* `nil`) - Display info of the final state "deliver" and reason.
* `description` (*type:* `String.t`, *default:* `nil`) - A description of the step. Usually this is a summary of the state.
* `drop` (*type:* `GoogleApi.NetworkManagement.V1.Model.DropInfo.t`, *default:* `nil`) - Display info of the final state "drop" and reason.
* `endpoint` (*type:* `GoogleApi.NetworkManagement.V1.Model.EndpointInfo.t`, *default:* `nil`) - Display info of the source and destination under analysis. The endpoint info in an intermediate state may differ with the initial input, as it might be modified by state like NAT, or Connection Proxy.
* `firewall` (*type:* `GoogleApi.NetworkManagement.V1.Model.FirewallInfo.t`, *default:* `nil`) - Display info of a Compute Engine firewall rule.
* `forward` (*type:* `GoogleApi.NetworkManagement.V1.Model.ForwardInfo.t`, *default:* `nil`) - Display info of the final state "forward" and reason.
* `forwardingRule` (*type:* `GoogleApi.NetworkManagement.V1.Model.ForwardingRuleInfo.t`, *default:* `nil`) - Display info of a Compute Engine forwarding rule.
* `instance` (*type:* `GoogleApi.NetworkManagement.V1.Model.InstanceInfo.t`, *default:* `nil`) - Display info of a Compute Engine instance.
* `loadBalancer` (*type:* `GoogleApi.NetworkManagement.V1.Model.LoadBalancerInfo.t`, *default:* `nil`) - Display info of the load balancers.
* `network` (*type:* `GoogleApi.NetworkManagement.V1.Model.NetworkInfo.t`, *default:* `nil`) - Display info of a GCP network.
* `projectId` (*type:* `String.t`, *default:* `nil`) - Project ID that contains the configuration this step is validating.
* `route` (*type:* `GoogleApi.NetworkManagement.V1.Model.RouteInfo.t`, *default:* `nil`) - Display info of a Compute Engine route.
* `state` (*type:* `String.t`, *default:* `nil`) - Each step is in one of the pre-defined states.
* `vpnGateway` (*type:* `GoogleApi.NetworkManagement.V1.Model.VpnGatewayInfo.t`, *default:* `nil`) - Display info of a Compute Engine VPN gateway.
* `vpnTunnel` (*type:* `GoogleApi.NetworkManagement.V1.Model.VpnTunnelInfo.t`, *default:* `nil`) - Display info of a Compute Engine VPN tunnel.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:abort => GoogleApi.NetworkManagement.V1.Model.AbortInfo.t() | nil,
:causesDrop => boolean() | nil,
:deliver => GoogleApi.NetworkManagement.V1.Model.DeliverInfo.t() | nil,
:description => String.t() | nil,
:drop => GoogleApi.NetworkManagement.V1.Model.DropInfo.t() | nil,
:endpoint => GoogleApi.NetworkManagement.V1.Model.EndpointInfo.t() | nil,
:firewall => GoogleApi.NetworkManagement.V1.Model.FirewallInfo.t() | nil,
:forward => GoogleApi.NetworkManagement.V1.Model.ForwardInfo.t() | nil,
:forwardingRule => GoogleApi.NetworkManagement.V1.Model.ForwardingRuleInfo.t() | nil,
:instance => GoogleApi.NetworkManagement.V1.Model.InstanceInfo.t() | nil,
:loadBalancer => GoogleApi.NetworkManagement.V1.Model.LoadBalancerInfo.t() | nil,
:network => GoogleApi.NetworkManagement.V1.Model.NetworkInfo.t() | nil,
:projectId => String.t() | nil,
:route => GoogleApi.NetworkManagement.V1.Model.RouteInfo.t() | nil,
:state => String.t() | nil,
:vpnGateway => GoogleApi.NetworkManagement.V1.Model.VpnGatewayInfo.t() | nil,
:vpnTunnel => GoogleApi.NetworkManagement.V1.Model.VpnTunnelInfo.t() | nil
}
field(:abort, as: GoogleApi.NetworkManagement.V1.Model.AbortInfo)
field(:causesDrop)
field(:deliver, as: GoogleApi.NetworkManagement.V1.Model.DeliverInfo)
field(:description)
field(:drop, as: GoogleApi.NetworkManagement.V1.Model.DropInfo)
field(:endpoint, as: GoogleApi.NetworkManagement.V1.Model.EndpointInfo)
field(:firewall, as: GoogleApi.NetworkManagement.V1.Model.FirewallInfo)
field(:forward, as: GoogleApi.NetworkManagement.V1.Model.ForwardInfo)
field(:forwardingRule, as: GoogleApi.NetworkManagement.V1.Model.ForwardingRuleInfo)
field(:instance, as: GoogleApi.NetworkManagement.V1.Model.InstanceInfo)
field(:loadBalancer, as: GoogleApi.NetworkManagement.V1.Model.LoadBalancerInfo)
field(:network, as: GoogleApi.NetworkManagement.V1.Model.NetworkInfo)
field(:projectId)
field(:route, as: GoogleApi.NetworkManagement.V1.Model.RouteInfo)
field(:state)
field(:vpnGateway, as: GoogleApi.NetworkManagement.V1.Model.VpnGatewayInfo)
field(:vpnTunnel, as: GoogleApi.NetworkManagement.V1.Model.VpnTunnelInfo)
end
defimpl Poison.Decoder, for: GoogleApi.NetworkManagement.V1.Model.Step do
def decode(value, options) do
GoogleApi.NetworkManagement.V1.Model.Step.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.NetworkManagement.V1.Model.Step do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 64.915789 | 301 | 0.721258 |
73d0017e8fb7767ed9c71f730807f4ef2dc2bc37 | 16,304 | ex | Elixir | lib/aws/generated/qldb.ex | andrewhr/aws-elixir | 861dc2fafca50a2b2f83badba4cdcb44b5b0c171 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/qldb.ex | andrewhr/aws-elixir | 861dc2fafca50a2b2f83badba4cdcb44b5b0c171 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/qldb.ex | andrewhr/aws-elixir | 861dc2fafca50a2b2f83badba4cdcb44b5b0c171 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.QLDB do
@moduledoc """
The control plane for Amazon QLDB
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2019-01-02",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "qldb",
global?: false,
protocol: "rest-json",
service_id: "QLDB",
signature_version: "v4",
signing_name: "qldb",
target_prefix: nil
}
end
@doc """
Ends a given Amazon QLDB journal stream.
Before a stream can be canceled, its current status must be `ACTIVE`.
You can't restart a stream after you cancel it. Canceled QLDB stream resources
are subject to a 7-day retention period, so they are automatically deleted after
this limit expires.
"""
def cancel_journal_kinesis_stream(
%Client{} = client,
ledger_name,
stream_id,
input,
options \\ []
) do
url_path =
"/ledgers/#{AWS.Util.encode_uri(ledger_name)}/journal-kinesis-streams/#{AWS.Util.encode_uri(stream_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new ledger in your account in the current Region.
"""
def create_ledger(%Client{} = client, input, options \\ []) do
url_path = "/ledgers"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a ledger and all of its contents.
This action is irreversible.
If deletion protection is enabled, you must first disable it before you can
delete the ledger. You can disable it by calling the `UpdateLedger` operation to
set the flag to `false`.
"""
def delete_ledger(%Client{} = client, name, input, options \\ []) do
url_path = "/ledgers/#{AWS.Util.encode_uri(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns detailed information about a given Amazon QLDB journal stream.
The output includes the Amazon Resource Name (ARN), stream name, current status,
creation time, and the parameters of the original stream creation request.
This action does not return any expired journal streams. For more information,
see [Expiration for terminal streams](https://docs.aws.amazon.com/qldb/latest/developerguide/streams.create.html#streams.create.states.expiration)
in the *Amazon QLDB Developer Guide*.
"""
def describe_journal_kinesis_stream(%Client{} = client, ledger_name, stream_id, options \\ []) do
url_path =
"/ledgers/#{AWS.Util.encode_uri(ledger_name)}/journal-kinesis-streams/#{AWS.Util.encode_uri(stream_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns information about a journal export job, including the ledger name,
export ID, creation time, current status, and the parameters of the original
export creation request.
This action does not return any expired export jobs. For more information, see
[Export job expiration](https://docs.aws.amazon.com/qldb/latest/developerguide/export-journal.request.html#export-journal.request.expiration)
in the *Amazon QLDB Developer Guide*.
If the export job with the given `ExportId` doesn't exist, then throws
`ResourceNotFoundException`.
If the ledger with the given `Name` doesn't exist, then throws
`ResourceNotFoundException`.
"""
def describe_journal_s3_export(%Client{} = client, export_id, name, options \\ []) do
url_path =
"/ledgers/#{AWS.Util.encode_uri(name)}/journal-s3-exports/#{AWS.Util.encode_uri(export_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns information about a ledger, including its state, permissions mode,
encryption at rest settings, and when it was created.
"""
def describe_ledger(%Client{} = client, name, options \\ []) do
url_path = "/ledgers/#{AWS.Util.encode_uri(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Exports journal contents within a date and time range from a ledger into a
specified Amazon Simple Storage Service (Amazon S3) bucket.
The data is written as files in Amazon Ion format.
If the ledger with the given `Name` doesn't exist, then throws
`ResourceNotFoundException`.
If the ledger with the given `Name` is in `CREATING` status, then throws
`ResourcePreconditionNotMetException`.
You can initiate up to two concurrent journal export requests for each ledger.
Beyond this limit, journal export requests throw `LimitExceededException`.
"""
def export_journal_to_s3(%Client{} = client, name, input, options \\ []) do
url_path = "/ledgers/#{AWS.Util.encode_uri(name)}/journal-s3-exports"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns a block object at a specified address in a journal.
Also returns a proof of the specified block for verification if
`DigestTipAddress` is provided.
For information about the data contents in a block, see [Journal contents](https://docs.aws.amazon.com/qldb/latest/developerguide/journal-contents.html)
in the *Amazon QLDB Developer Guide*.
If the specified ledger doesn't exist or is in `DELETING` status, then throws
`ResourceNotFoundException`.
If the specified ledger is in `CREATING` status, then throws
`ResourcePreconditionNotMetException`.
If no block exists with the specified address, then throws
`InvalidParameterException`.
"""
def get_block(%Client{} = client, name, input, options \\ []) do
url_path = "/ledgers/#{AWS.Util.encode_uri(name)}/block"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns the digest of a ledger at the latest committed block in the journal.
The response includes a 256-bit hash value and a block address.
"""
def get_digest(%Client{} = client, name, input, options \\ []) do
url_path = "/ledgers/#{AWS.Util.encode_uri(name)}/digest"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns a revision data object for a specified document ID and block address.
Also returns a proof of the specified revision for verification if
`DigestTipAddress` is provided.
"""
def get_revision(%Client{} = client, name, input, options \\ []) do
url_path = "/ledgers/#{AWS.Util.encode_uri(name)}/revision"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns an array of all Amazon QLDB journal stream descriptors for a given
ledger.
The output of each stream descriptor includes the same details that are returned
by `DescribeJournalKinesisStream`.
This action does not return any expired journal streams. For more information,
see [Expiration for terminal streams](https://docs.aws.amazon.com/qldb/latest/developerguide/streams.create.html#streams.create.states.expiration)
in the *Amazon QLDB Developer Guide*.
This action returns a maximum of `MaxResults` items. It is paginated so that you
can retrieve all the items by calling `ListJournalKinesisStreamsForLedger`
multiple times.
"""
def list_journal_kinesis_streams_for_ledger(
%Client{} = client,
ledger_name,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/ledgers/#{AWS.Util.encode_uri(ledger_name)}/journal-kinesis-streams"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"next_token", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"max_results", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns an array of journal export job descriptions for all ledgers that are
associated with the current account and Region.
This action returns a maximum of `MaxResults` items, and is paginated so that
you can retrieve all the items by calling `ListJournalS3Exports` multiple times.
This action does not return any expired export jobs. For more information, see
[Export job expiration](https://docs.aws.amazon.com/qldb/latest/developerguide/export-journal.request.html#export-journal.request.expiration)
in the *Amazon QLDB Developer Guide*.
"""
def list_journal_s3_exports(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/journal-s3-exports"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"next_token", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"max_results", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns an array of journal export job descriptions for a specified ledger.
This action returns a maximum of `MaxResults` items, and is paginated so that
you can retrieve all the items by calling `ListJournalS3ExportsForLedger`
multiple times.
This action does not return any expired export jobs. For more information, see
[Export job expiration](https://docs.aws.amazon.com/qldb/latest/developerguide/export-journal.request.html#export-journal.request.expiration)
in the *Amazon QLDB Developer Guide*.
"""
def list_journal_s3_exports_for_ledger(
%Client{} = client,
name,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/ledgers/#{AWS.Util.encode_uri(name)}/journal-s3-exports"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"next_token", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"max_results", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns an array of ledger summaries that are associated with the current
account and Region.
This action returns a maximum of 100 items and is paginated so that you can
retrieve all the items by calling `ListLedgers` multiple times.
"""
def list_ledgers(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/ledgers"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"next_token", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"max_results", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns all tags for a specified Amazon QLDB resource.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Creates a journal stream for a given Amazon QLDB ledger.
The stream captures every document revision that is committed to the ledger's
journal and delivers the data to a specified Amazon Kinesis Data Streams
resource.
"""
def stream_journal_to_kinesis(%Client{} = client, ledger_name, input, options \\ []) do
url_path = "/ledgers/#{AWS.Util.encode_uri(ledger_name)}/journal-kinesis-streams"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Adds one or more tags to a specified Amazon QLDB resource.
A resource can have up to 50 tags. If you try to create more than 50 tags for a
resource, your request fails and returns an error.
"""
def tag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes one or more tags from a specified Amazon QLDB resource.
You can specify up to 50 tag keys to remove.
"""
def untag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
{query_params, input} =
[
{"TagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates properties on a ledger.
"""
def update_ledger(%Client{} = client, name, input, options \\ []) do
url_path = "/ledgers/#{AWS.Util.encode_uri(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the permissions mode of a ledger.
Before you switch to the `STANDARD` permissions mode, you must first create all
required IAM policies and table tags to avoid disruption to your users. To learn
more, see [Migrating to the standard permissions mode](https://docs.aws.amazon.com/qldb/latest/developerguide/ledger-management.basics.html#ledger-mgmt.basics.update-permissions.migrating)
in the *Amazon QLDB Developer Guide*.
"""
def update_ledger_permissions_mode(%Client{} = client, name, input, options \\ []) do
url_path = "/ledgers/#{AWS.Util.encode_uri(name)}/permissions-mode"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
| 25.160494 | 190 | 0.641438 |
73d004a921c9bea87e5e128b10d34d9e140b7648 | 139 | ex | Elixir | examples/nfq/lib/nfq/application.ex | shun159/brahman | dfa04d757c4e4422b00bdc97a694d2d6637708f9 | [
"Beerware"
] | null | null | null | examples/nfq/lib/nfq/application.ex | shun159/brahman | dfa04d757c4e4422b00bdc97a694d2d6637708f9 | [
"Beerware"
] | null | null | null | examples/nfq/lib/nfq/application.ex | shun159/brahman | dfa04d757c4e4422b00bdc97a694d2d6637708f9 | [
"Beerware"
] | null | null | null | defmodule NFQ.Application do
@moduledoc false
use Application
def start(_type, _args) do
NFQ.Supervisor.start_link()
end
end
| 13.9 | 31 | 0.733813 |
73d0193cf2e42aa48b57176d9085c796b597649e | 1,111 | exs | Elixir | test/doctor_schedule/appointments/repositories/provider_repository_test.exs | theguuholi/doctor_schedule | a92dfa92d1398c59718be2428d36bb326d6bc361 | [
"MIT"
] | 2 | 2022-03-11T12:15:01.000Z | 2022-03-11T13:53:21.000Z | test/doctor_schedule/appointments/repositories/provider_repository_test.exs | theguuholi/doctor_schedule | a92dfa92d1398c59718be2428d36bb326d6bc361 | [
"MIT"
] | 3 | 2020-12-12T22:10:17.000Z | 2021-04-05T12:53:12.000Z | test/doctor_schedule/appointments/repositories/provider_repository_test.exs | theguuholi/doctor_schedule | a92dfa92d1398c59718be2428d36bb326d6bc361 | [
"MIT"
] | 1 | 2021-02-26T04:24:34.000Z | 2021-02-26T04:24:34.000Z | defmodule DoctorSchedule.Appointments.Repositories.ProviderRepositoryTest do
use DoctorSchedule.DataCase
alias DoctorSchedule.AppointmentFixture
alias DoctorSchedule.Appointments.Repositories.ProviderRepository
alias DoctorSchedule.Repo
describe "Tests Provider in Appointments" do
test "all_day_fom_provider/2 returns all appointments from provider in a month" do
appointment = AppointmentFixture.appointment_fixture()
appointment = appointment |> Repo.preload(:provider)
date = NaiveDateTime.to_date(appointment.date)
assert ProviderRepository.all_day_fom_provider(appointment.provider.id, date)
|> Enum.count() == 1
end
test "all_month_from_provider/3 returns all appointments from provider in a month" do
appointment = AppointmentFixture.appointment_fixture()
appointment = appointment |> Repo.preload(:provider)
year = appointment.date.year
month = appointment.date.month
assert ProviderRepository.all_month_from_provider(appointment.provider.id, year, month)
|> Enum.count() == 1
end
end
end
| 38.310345 | 93 | 0.754275 |
73d0722d8394dd6fc891fef2a63ae344bdda508e | 147 | ex | Elixir | Chapter02/apps/elixir_drip_web/lib/elixir_drip_web/controllers/page_controller.ex | sthagen/Mastering-Elixir | 1b52ee79afe6b2ae80767a5e55c2be51df3c4c1d | [
"MIT"
] | 28 | 2018-08-09T05:05:29.000Z | 2022-03-14T06:59:07.000Z | Chapter02/apps/elixir_drip_web/lib/elixir_drip_web/controllers/page_controller.ex | sthagen/Mastering-Elixir | 1b52ee79afe6b2ae80767a5e55c2be51df3c4c1d | [
"MIT"
] | 1 | 2019-02-11T09:11:33.000Z | 2019-05-06T06:40:19.000Z | Chapter02/apps/elixir_drip_web/lib/elixir_drip_web/controllers/page_controller.ex | sthagen/Mastering-Elixir | 1b52ee79afe6b2ae80767a5e55c2be51df3c4c1d | [
"MIT"
] | 8 | 2018-08-09T14:53:02.000Z | 2020-12-14T19:31:21.000Z | defmodule ElixirDripWeb.PageController do
use ElixirDripWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
end
| 18.375 | 41 | 0.748299 |
73d081016b788ffd852f8a41eeed53f883ce6b6c | 989 | exs | Elixir | mix.exs | lowks/currently | f5d884a242522a044907ab10947aa00f3dea4d83 | [
"MIT"
] | null | null | null | mix.exs | lowks/currently | f5d884a242522a044907ab10947aa00f3dea4d83 | [
"MIT"
] | null | null | null | mix.exs | lowks/currently | f5d884a242522a044907ab10947aa00f3dea4d83 | [
"MIT"
] | null | null | null | Code.ensure_loaded?(Hex) and Hex.start
defmodule Currently.Mixfile do
use Mix.Project
def project do
[ app: :currently,
version: "0.0.4",
elixir: "~> 1.0.0",
escript: escript,
escript_embed_elixir: true,
description: description,
package: package,
deps: deps ]
end
# Configuration for the OTP application
def application do
[applications: [:httpotion]]
end
# Returns the list of dependencies in the format:
# { :foobar, "0.1", git: "https://github.com/elixir-lang/foobar.git" }
defp deps do
[
{:httpotion, github: "myfreeweb/httpotion"},
{:jsex, "~> 2.0" }
]
end
defp escript do
[main_module: Currently]
end
defp description do
"""
Currently is a tool to display cards currently assigns on Trello
"""
end
defp package do
[ contributors: ["chatgris"],
licenses: ["MIT"],
links: [ { "Github", "https://github.com/chatgris/currently" }]]
end
end
| 21.042553 | 72 | 0.615774 |
73d0ce3c7854afec4855b67fef677539f57e5952 | 744 | ex | Elixir | lib/magirator_app_gui_vue_web/gettext.ex | Devotu/magirator_app_gui_vue | a2e3fddc99538df5988b87abd78d016e10a98c22 | [
"MIT"
] | null | null | null | lib/magirator_app_gui_vue_web/gettext.ex | Devotu/magirator_app_gui_vue | a2e3fddc99538df5988b87abd78d016e10a98c22 | [
"MIT"
] | null | null | null | lib/magirator_app_gui_vue_web/gettext.ex | Devotu/magirator_app_gui_vue | a2e3fddc99538df5988b87abd78d016e10a98c22 | [
"MIT"
] | null | null | null | defmodule MagiratorAppGuiVueWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import MagiratorAppGuiVueWeb.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :magirator_app_gui_vue
end
| 29.76 | 72 | 0.696237 |
73d0cf502ce040798cdea9abf763d893c0a5da8d | 1,597 | exs | Elixir | bob/example.exs | wobh/xelixir | 63e5beec40d5c441cf6cf87f30c29848f481bb1b | [
"MIT"
] | null | null | null | bob/example.exs | wobh/xelixir | 63e5beec40d5c441cf6cf87f30c29848f481bb1b | [
"MIT"
] | null | null | null | bob/example.exs | wobh/xelixir | 63e5beec40d5c441cf6cf87f30c29848f481bb1b | [
"MIT"
] | null | null | null | defmodule Teenager do
@doc """
Answers to `hey` like a teenager.
## Examples
iex> Teenager.hey("")
"Fine. Be that way!"
iex> Teenager.hey("Do you like math?")
"Sure."
iex> Teenager.hey("HELLO!")
"Whoa, chill out!"
iex> Teenager.hey("Coding is cool.")
"Whatever."
"""
def hey(input) do
cond do
silent?(input) -> "Fine. Be that way!"
shouting?(input) -> "Whoa, chill out!"
question?(input) -> "Sure."
true -> "Whatever."
end
end
defp silent?(input), do: "" == String.strip(input)
defp shouting?(input), do: input == String.upcase(input) && letters?(input)
defp question?(input), do: String.ends_with?(input, "?")
defp letters?(input), do: Regex.match?(~r/\p{L}+/, input)
end
# Another approach which abstracts knowing about string categories
# away from Teenager and into a single responsibility module.
# (This has been commented out to avoid raising a needless "redefinition"
# warning)
# defmodule Message do
# def silent?(input), do: "" == String.strip(input)
# def shouting?(input), do: input == String.upcase(input) && letters?(input)
# def question?(input), do: String.ends_with?(input, "?")
# defp letters?(input), do: Regex.match?(~r/\p{L}+/, input)
# end
#
# defmodule Teenager do
# import Message, only: [silent?: 1, shouting?: 1, question?: 1]
#
# def hey(input) do
# cond do
# silent?(input) -> "Fine. Be that way!"
# shouting?(input) -> "Whoa, chill out!"
# question?(input) -> "Sure."
# true -> "Whatever."
# end
# end
# end
| 27.067797 | 78 | 0.603006 |
73d0d933f90de9afc20c4f8b9d7dac926401b013 | 456 | ex | Elixir | lib/web/plugs/assign_current_user.ex | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | lib/web/plugs/assign_current_user.ex | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | lib/web/plugs/assign_current_user.ex | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | defmodule Accent.Plugs.AssignCurrentUser do
import Plug.Conn
alias Accent.UserAuthFetcher
def init(_), do: nil
@doc """
Takes a Plug.Conn and fetch the associated user giving the Authorization header.
It assigns nil if any of the steps fails.
"""
def call(conn, _opts) do
user =
conn
|> get_req_header("authorization")
|> Enum.at(0)
|> UserAuthFetcher.fetch()
assign(conn, :current_user, user)
end
end
| 20.727273 | 82 | 0.671053 |
73d0dc5276a6537cd017f58125e3ed70f81f1765 | 498 | ex | Elixir | backend/lib/kakeibo/accounts/balance.ex | makoto-engineer/kakeibo | d6882ccfa56d376a39dd307d28417d956ddc8584 | [
"MIT"
] | null | null | null | backend/lib/kakeibo/accounts/balance.ex | makoto-engineer/kakeibo | d6882ccfa56d376a39dd307d28417d956ddc8584 | [
"MIT"
] | null | null | null | backend/lib/kakeibo/accounts/balance.ex | makoto-engineer/kakeibo | d6882ccfa56d376a39dd307d28417d956ddc8584 | [
"MIT"
] | null | null | null | defmodule Kakeibo.Accounts.Balance do
use Ecto.Schema
import Ecto.Changeset
schema "balance" do
field :day, :date
field :expense, :integer
field :income, :integer
field :method, :string
field :title, :string
field :who, :string
timestamps()
end
@doc false
def changeset(balance, attrs) do
balance
|> cast(attrs, [:day, :title, :expense, :income, :method, :who])
|> validate_required([:day, :title, :expense, :income, :method, :who])
end
end
| 21.652174 | 74 | 0.646586 |
73d0e1e2dbc92ccc2a0f1144abcf04857b7e1269 | 6,070 | ex | Elixir | apps/mishka_html/lib/mishka_html_web/live/admin_notifs.ex | mojtaba-naseri/mishka-cms | 1fb35b49177b9b27f5e68c1b0bf9d72dc0ff9935 | [
"Apache-2.0"
] | null | null | null | apps/mishka_html/lib/mishka_html_web/live/admin_notifs.ex | mojtaba-naseri/mishka-cms | 1fb35b49177b9b27f5e68c1b0bf9d72dc0ff9935 | [
"Apache-2.0"
] | null | null | null | apps/mishka_html/lib/mishka_html_web/live/admin_notifs.ex | mojtaba-naseri/mishka-cms | 1fb35b49177b9b27f5e68c1b0bf9d72dc0ff9935 | [
"Apache-2.0"
] | null | null | null | defmodule MishkaHtmlWeb.AdminBlogNotifsLive do
use MishkaHtmlWeb, :live_view
alias MishkaContent.General.Notif
@section_title MishkaTranslator.Gettext.dgettext("html_live", "مدیریت اعلانات")
use MishkaHtml.Helpers.LiveCRUD,
module: MishkaContent.General.Notif,
redirect: __MODULE__,
router: Routes
@impl true
def render(assigns) do
~H"""
<.live_component
module={MishkaHtml.Helpers.ListContainerComponent}
id={:list_container}
flash={@flash}
section_info={section_info(assigns, @socket)}
filters={@filters}
list={@notifs}
url={MishkaHtmlWeb.AdminBlogNotifsLive}
page_size={@page_size}
parent_assigns={assigns}
admin_menu={live_render(@socket, AdminMenu, id: :admin_menu)}
left_header_side=""
/>
"""
end
@impl true
def mount(_params, session, socket) do
if connected?(socket), do: Notif.subscribe()
Process.send_after(self(), :menu, 100)
socket =
assign(socket,
page_size: 10,
filters: %{},
page: 1,
open_modal: false,
component: nil,
user_id: Map.get(session, "user_id"),
body_color: "#a29ac3cf",
page_title: MishkaTranslator.Gettext.dgettext("html_live", "مدیریت اعلانات"),
notifs: Notif.notifs(conditions: {1, 10}, filters: %{})
)
{:ok, socket, temporary_assigns: [notifs: []]}
end
# Live CRUD and Paginate
paginate(:notifs, user_id: false)
list_search_and_action()
delete_list_item(:notifs, MishkaHtmlWeb.AdminBlogNotifsLive, false)
selected_menue("MishkaHtmlWeb.AdminBlogNotifsLive")
update_list(:notifs, false)
def section_fields() do
[
ListItemComponent.text_field("title", [1], "col-sm-3 header1", MishkaTranslator.Gettext.dgettext("html_live", "تیتر اعلان"),
{true, true, true}, &MishkaHtml.title_sanitize/1),
ListItemComponent.select_field("status", [1, 4], "col header2", MishkaTranslator.Gettext.dgettext("html_live", "وضعیت"),
[
{MishkaTranslator.Gettext.dgettext("html_live", "غیر فعال"), "inactive"},
{MishkaTranslator.Gettext.dgettext("html_live", "فعال"), "active"},
{MishkaTranslator.Gettext.dgettext("html_live", "آرشیو شده"), "archived"},
{MishkaTranslator.Gettext.dgettext("html_live", "حذف با پرچم"), "soft_delete"}
],
{true, true, true}),
ListItemComponent.select_field("section", [1, 4], "col header3", MishkaTranslator.Gettext.dgettext("html_live_component", "بخش"),
[
{MishkaTranslator.Gettext.dgettext("html_live", "مطلب بلاگ"), "blog_post"},
{MishkaTranslator.Gettext.dgettext("html_live", "مدیریت"), "admin"},
{MishkaTranslator.Gettext.dgettext("html_live", "عمومی"), "public"},
{MishkaTranslator.Gettext.dgettext("html_live", "کاربر خاص"), "user_only"}
],
{true, true, true}),
ListItemComponent.select_field("type", [1, 4], "col header4", MishkaTranslator.Gettext.dgettext("html_live_component", "نوع"),
[
{MishkaTranslator.Gettext.dgettext("html_live", "کاربری"), "client"},
{MishkaTranslator.Gettext.dgettext("html_live", "مدیریت"), "admin"}
],
{true, true, true}),
ListItemComponent.select_field("target", [1, 4], "col header5", MishkaTranslator.Gettext.dgettext("html_live_component", "هدف"),
[
{MishkaTranslator.Gettext.dgettext("html_live", "همه"), "all"},
{MishkaTranslator.Gettext.dgettext("html_live", "موبایل"), "mobile"},
{MishkaTranslator.Gettext.dgettext("html_live", "اندروید"), "android"},
{MishkaTranslator.Gettext.dgettext("html_live", "iOS"), "ios"},
{MishkaTranslator.Gettext.dgettext("html_live", "cli"), "cli"}
],
{true, true, true}),
ListItemComponent.link_field("full_name", [1], "col header1", MishkaTranslator.Gettext.dgettext("html_live", "کاربر"),
{MishkaHtmlWeb.AdminUserLive, :user_id},
{true, false, false}, &MishkaHtml.full_name_sanitize/1),
ListItemComponent.time_field("inserted_at", [1], "col header7", MishkaTranslator.Gettext.dgettext("html_live", "ثبت"), false,
{true, false, false})
]
end
def section_info(assigns, socket) do
%{
section_btns: %{
header: [
%{
title: MishkaTranslator.Gettext.dgettext("html_live_templates", "ارسال اعلان"),
router: Routes.live_path(socket, MishkaHtmlWeb.AdminBlogNotifLive),
class: "btn btn-outline-danger"
}
],
list_item: [
%{
method: :delete,
router: nil,
title: MishkaTranslator.Gettext.dgettext("html_live", "حذف"),
class: "btn btn-outline-primary vazir"
},
%{
method: :redirect_keys,
router: MishkaHtmlWeb.AdminBlogNotifLive,
title: MishkaTranslator.Gettext.dgettext("html_live", "ویرایش"),
class: "btn btn-outline-danger vazir",
keys: [
{:id, :id},
{:type, "edit"},
]
},
%{
method: :redirect_keys,
router: MishkaHtmlWeb.AdminBlogNotifLive,
title: MishkaTranslator.Gettext.dgettext("html_live", "نمایش"),
class: "btn btn-outline-info vazir",
keys: [
{:id, :id},
{:type, "show"},
]
}
]
},
title: @section_title,
activities_info: %{
title: MishkaTranslator.Gettext.dgettext("html_live_templates", "اعلانات"),
section_type: MishkaTranslator.Gettext.dgettext("html_live_component", "اعلان"),
action: :section,
action_by: :section,
},
custom_operations: nil,
description:
~H"""
<%= MishkaTranslator.Gettext.dgettext("html_live_templates", "شما در این بخش می توانید اعلانات ارسال شده چه به وسیله سیستم و چه به وسیله مدیریت که به صورت انبوه ارسال می شود را مدیریت کنید.") %>
<div class="space30"></div>
"""
}
end
end
| 38.176101 | 202 | 0.619934 |
73d0fb732c943fab51ba4531648acbfacda759d0 | 5,575 | ex | Elixir | clients/partners/lib/google_api/partners/v2/api/analytics.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/partners/lib/google_api/partners/v2/api/analytics.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/partners/lib/google_api/partners/v2/api/analytics.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Partners.V2.Api.Analytics do
@moduledoc """
API calls for all endpoints tagged `Analytics`.
"""
alias GoogleApi.Partners.V2.Connection
import GoogleApi.Partners.V2.RequestBuilder
@doc """
Lists analytics data for a user's associated company. Should only be called within the context of an authorized logged in user.
## Parameters
- connection (GoogleApi.Partners.V2.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :__/xgafv (String): V1 error format.
- :callback (String): JSONP
- :alt (String): Data format for response.
- :access_token (String): OAuth access token.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :pp (Boolean): Pretty-print response.
- :bearer_token (String): OAuth bearer token.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :upload_protocol (String): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :upload_type (String): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String): Selector specifying which fields to include in a partial response.
- :request_metadata/traffic_source/traffic_sub_id (String): Second level identifier to indicate where the traffic comes from. An identifier has multiple letters created by a team which redirected the traffic to us.
- :request_metadata/user_overrides/user_id (String): Logged-in user ID to impersonate instead of the user's ID.
- :request_metadata/partners_session_id (String): Google Partners session ID.
- :page_token (String): A token identifying a page of results that the server returns. Typically, this is the value of `ListAnalyticsResponse.next_page_token` returned from the previous call to ListAnalytics. Will be a date string in `YYYY-MM-DD` format representing the end date of the date range of results to return. If unspecified or set to \"\", default value is the current date.
- :page_size (Integer): Requested page size. Server may return fewer analytics than requested. If unspecified or set to 0, default value is 30. Specifies the number of days in the date range when querying analytics. The `page_token` represents the end date of the date range and the start date is calculated using the `page_size` as the number of days BEFORE the end date. Must be a non-negative integer.
- :request_metadata/traffic_source/traffic_source_id (String): Identifier to indicate where the traffic comes from. An identifier has multiple letters created by a team which redirected the traffic to us.
- :request_metadata/locale (String): Locale to use for the current request.
- :request_metadata/user_overrides/ip_address (String): IP address to use instead of the user's geo-located IP address.
- :request_metadata/experiment_ids (List[String]): Experiment IDs the current request belongs to.
## Returns
{:ok, %GoogleApi.Partners.V2.Model.ListAnalyticsResponse{}} on success
{:error, info} on failure
"""
@spec partners_analytics_list(Tesla.Env.client, keyword()) :: {:ok, GoogleApi.Partners.V2.Model.ListAnalyticsResponse.t} | {:error, Tesla.Env.t}
def partners_analytics_list(connection, opts \\ []) do
optional_params = %{
:"$.xgafv" => :query,
:"callback" => :query,
:"alt" => :query,
:"access_token" => :query,
:"key" => :query,
:"quotaUser" => :query,
:"pp" => :query,
:"bearer_token" => :query,
:"oauth_token" => :query,
:"upload_protocol" => :query,
:"prettyPrint" => :query,
:"uploadType" => :query,
:"fields" => :query,
:"requestMetadata.trafficSource.trafficSubId" => :query,
:"requestMetadata.userOverrides.userId" => :query,
:"requestMetadata.partnersSessionId" => :query,
:"pageToken" => :query,
:"pageSize" => :query,
:"requestMetadata.trafficSource.trafficSourceId" => :query,
:"requestMetadata.locale" => :query,
:"requestMetadata.userOverrides.ipAddress" => :query,
:"requestMetadata.experimentIds" => :query
}
%{}
|> method(:get)
|> url("/v2/analytics")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.Partners.V2.Model.ListAnalyticsResponse{})
end
end
| 56.313131 | 428 | 0.715336 |
73d10d9ed7ff3fc62d11ebd13f54ff6218bccd06 | 102 | ex | Elixir | lib/bible/repo.ex | wbotelhos/crud-in-5-minutes-with-phoenix-and-elixir | f3218507d5c2ea7c23170d4316b41979beaa9aa6 | [
"MIT"
] | 2 | 2021-05-28T11:32:22.000Z | 2021-05-28T19:39:03.000Z | lib/bible/repo.ex | wbotelhos/crud-in-5-minutes-with-phoenix-and-elixir | f3218507d5c2ea7c23170d4316b41979beaa9aa6 | [
"MIT"
] | null | null | null | lib/bible/repo.ex | wbotelhos/crud-in-5-minutes-with-phoenix-and-elixir | f3218507d5c2ea7c23170d4316b41979beaa9aa6 | [
"MIT"
] | null | null | null | defmodule Bible.Repo do
use Ecto.Repo,
otp_app: :bible,
adapter: Ecto.Adapters.Postgres
end
| 17 | 35 | 0.715686 |
73d1133a21369ac65a879ecc7bd084d70d62582a | 283 | exs | Elixir | priv/config_storage/migrations/20171025231226_add_farmware_table.exs | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | 2 | 2018-08-01T23:07:52.000Z | 2018-10-17T12:49:21.000Z | priv/config_storage/migrations/20171025231226_add_farmware_table.exs | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | null | null | null | priv/config_storage/migrations/20171025231226_add_farmware_table.exs | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | 1 | 2017-07-22T21:51:14.000Z | 2017-07-22T21:51:14.000Z | defmodule Farmbot.System.ConfigStorage.Migrations.AddDevicesTable do
use Ecto.Migration
def change do
create table("farmware_repositories") do
add(:manifests, :text)
add(:url, :string)
end
create(unique_index("farmware_repositories", [:url]))
end
end
| 21.769231 | 68 | 0.713781 |
73d11f46d1aa0b5a81e158ce56a7cbc89206407d | 1,632 | exs | Elixir | bench/storage/subscribe_to_stream_bench.exs | MichalDolata/eventstore | 2a6725fcb4c398d81a74455b6077901189b75386 | [
"MIT"
] | null | null | null | bench/storage/subscribe_to_stream_bench.exs | MichalDolata/eventstore | 2a6725fcb4c398d81a74455b6077901189b75386 | [
"MIT"
] | null | null | null | bench/storage/subscribe_to_stream_bench.exs | MichalDolata/eventstore | 2a6725fcb4c398d81a74455b6077901189b75386 | [
"MIT"
] | null | null | null | defmodule SubscribeToStreamBench do
use Benchfella
alias EventStore.{EventFactory, ProcessHelper, StorageInitializer}
alias TestEventStore, as: EventStore
@await_timeout_ms 100_000
before_each_bench(_) do
StorageInitializer.reset_storage!()
{:ok, pid} = TestEventStore.start_link()
context = [events: EventFactory.create_events(100), pid: pid]
{:ok, context}
end
after_each_bench(context) do
pid = Keyword.fetch!(context, :pid)
ProcessHelper.shutdown(pid)
end
bench "subscribe to stream, 1 subscription" do
subscribe_to_stream(bench_context, 1)
end
bench "subscribe to stream, 10 subscriptions" do
subscribe_to_stream(bench_context, 10)
end
bench "subscribe to stream, 20 subscriptions" do
subscribe_to_stream(bench_context, 20)
end
bench "subscribe to stream, 50 subscriptions" do
subscribe_to_stream(bench_context, 50)
end
defp subscribe_to_stream(context, concurrency) do
events = Keyword.fetch!(context, :events)
stream_uuid = UUID.uuid4()
tasks =
Enum.map(1..concurrency, fn index ->
Task.async(fn ->
{:ok, _subscription} =
EventStore.subscribe_to_stream(stream_uuid, "subscription-#{index}", self())
receive do
{:events, _events} ->
:ok = EventStore.unsubscribe_from_stream(stream_uuid, "subscription-#{index}")
end
end)
end)
append_task =
Task.async(fn ->
:ok = EventStore.append_to_stream(stream_uuid, 0, events)
end)
Enum.each([append_task | tasks], &Task.await(&1, @await_timeout_ms))
end
end
| 24.727273 | 92 | 0.678922 |
73d12a13b9d065e9e00ef108543fe340a497b029 | 1,024 | exs | Elixir | test/lucidboard_web/view_helper_test.exs | borodark/lucidboard | 487a9a54053977ea9704121d4a6a4343012d4421 | [
"MIT"
] | 86 | 2019-01-07T20:49:04.000Z | 2021-10-02T21:15:42.000Z | test/lucidboard_web/view_helper_test.exs | borodark/lucidboard | 487a9a54053977ea9704121d4a6a4343012d4421 | [
"MIT"
] | 26 | 2019-03-27T12:06:52.000Z | 2020-09-20T05:21:09.000Z | test/lucidboard_web/view_helper_test.exs | borodark/lucidboard | 487a9a54053977ea9704121d4a6a4343012d4421 | [
"MIT"
] | 19 | 2015-01-06T19:02:49.000Z | 2020-05-25T08:54:00.000Z | defmodule LucidboardWeb.ViewHelperTest do
use ExUnit.Case
alias Lucidboard.BoardRole
alias LucidboardWeb.ViewHelper
test "more_than_one_owner?" do
assert false == ViewHelper.more_than_one_owner?([])
assert true ==
ViewHelper.more_than_one_owner?([
%BoardRole{board_id: 1, user_id: 1, role: :observer},
%BoardRole{board_id: 1, user_id: 2, role: :owner},
%BoardRole{board_id: 1, user_id: 3, role: :owner}
])
assert false ==
ViewHelper.more_than_one_owner?([
%BoardRole{board_id: 1, user_id: 1, role: :observer},
%BoardRole{board_id: 1, user_id: 2, role: :owner}
])
end
# @spec more_than_one_owner([BoardRole.t()]) :: boolean
# def more_than_one_owner(roles) do
# true ==
# Enum.reduce_while(roles, 0, fn
# %{type: :owner}, 1 -> {:halt, true}
# %{type: :owner}, acc -> {:cont, acc + 1}
# _, acc -> {:cont, acc}
# end)
# end
end
| 31.030303 | 68 | 0.572266 |
73d134f0d7084ae4177e9a375bdf7f65eacb38bd | 207 | ex | Elixir | wabanex/lib/wabanex_web/schema.ex | shonorio/nlwt_elixir | 029b731c747e4b4954bc0197881b325fb80b4ab6 | [
"MIT"
] | null | null | null | wabanex/lib/wabanex_web/schema.ex | shonorio/nlwt_elixir | 029b731c747e4b4954bc0197881b325fb80b4ab6 | [
"MIT"
] | null | null | null | wabanex/lib/wabanex_web/schema.ex | shonorio/nlwt_elixir | 029b731c747e4b4954bc0197881b325fb80b4ab6 | [
"MIT"
] | null | null | null | defmodule WabanexWeb.Schema do
use Absinthe.Schema
import_types(WabanexWeb.Schema.Types.Root)
query do
import_fields(:root_query)
end
mutation do
import_fields(:root_mutation)
end
end
| 14.785714 | 44 | 0.753623 |
73d16dd6e432e5c6594f32fc5717d0f472214853 | 1,519 | exs | Elixir | test/scale/ggity_scale_size_discrete_test.exs | kianmeng/ggity | 75f0097464eae4086f8c70e4bea995d60571eba9 | [
"MIT"
] | null | null | null | test/scale/ggity_scale_size_discrete_test.exs | kianmeng/ggity | 75f0097464eae4086f8c70e4bea995d60571eba9 | [
"MIT"
] | null | null | null | test/scale/ggity_scale_size_discrete_test.exs | kianmeng/ggity | 75f0097464eae4086f8c70e4bea995d60571eba9 | [
"MIT"
] | null | null | null | defmodule GGityScaleSizeDiscreteTest do
use ExUnit.Case
import SweetXml
alias GGity.Scale.Size
setup do
%{scale: Size.Discrete.new() |> Size.Discrete.train(["beef", "chicken", "fish", "lamb"])}
end
describe "new/2" do
test "returns a proper scale for discrete values", %{scale: scale} do
assert_in_delta scale.transform.("beef"), 3, 0.000001
assert_in_delta scale.transform.("chicken"), :math.sqrt(39.3333333), 0.000001
assert_in_delta scale.transform.("fish"), :math.sqrt(69.66666667), 0.000001
assert_in_delta scale.transform.("lamb"), 10, 0.000001
end
end
describe "draw_legend/2" do
test "returns an empty list if scale has one level" do
assert [] ==
Size.Discrete.new()
|> Size.Discrete.train(["fish"])
|> Size.Discrete.draw_legend("Nothing Here", :point, 15)
end
test "returns a legend if scale has two or more levels", %{scale: scale} do
legend =
Size.Discrete.draw_legend(scale, "Fine Meats", :point, 15)
|> IO.chardata_to_string()
|> String.replace_prefix("", "<svg>")
|> String.replace_suffix("", "</svg>")
assert xpath(legend, ~x"//text/text()"ls) == [
"Fine Meats",
"beef",
"chicken",
"fish",
"lamb"
]
assert xpath(legend, ~x"//circle/@r"lf) ==
Enum.map(scale.levels, fn value -> scale.transform.(value) / 2 end)
end
end
end
| 31 | 93 | 0.581303 |
73d18a575b54f43ade0f4a7b68b88c1dfe6b0535 | 1,194 | ex | Elixir | proficionym_api/lib/proficionym_api.ex | appropont/proficionym-api-elixir | 37519691e04176490154d7d8205c7bf1f90ff8b3 | [
"Apache-2.0"
] | null | null | null | proficionym_api/lib/proficionym_api.ex | appropont/proficionym-api-elixir | 37519691e04176490154d7d8205c7bf1f90ff8b3 | [
"Apache-2.0"
] | null | null | null | proficionym_api/lib/proficionym_api.ex | appropont/proficionym-api-elixir | 37519691e04176490154d7d8205c7bf1f90ff8b3 | [
"Apache-2.0"
] | null | null | null | defmodule ProficionymApi do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the endpoint when the application starts
supervisor(ProficionymApi.Endpoint, []),
# Start your own worker by calling: ProficionymApi.Worker.start_link(arg1, arg2, arg3)
# worker(ProficionymApi.Worker, [arg1, arg2, arg3]),
]
pool_size = 5
redix_workers = for i <- 0..(pool_size - 1) do
worker(Redix, [[], [name: :"redix_#{i}"]], id: {Redix, i})
end
children = children ++ redix_workers
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: ProficionymApi.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
ProficionymApi.Endpoint.config_change(changed, removed)
:ok
end
end
| 32.27027 | 92 | 0.703518 |
73d1ad822a60ac26d3d5ef67fe1440659011cffd | 1,117 | ex | Elixir | apps/cashtrail_web/lib/cashtrail_web/channels/user_socket.ex | maxmaccari/cashtrail | 74ec7ff1f8a41299a6086dc98719a32cbfbe9ee7 | [
"MIT"
] | 6 | 2020-05-02T01:12:24.000Z | 2020-06-23T12:31:32.000Z | apps/cashtrail_web/lib/cashtrail_web/channels/user_socket.ex | maxmaccari/cashtrail | 74ec7ff1f8a41299a6086dc98719a32cbfbe9ee7 | [
"MIT"
] | 16 | 2020-05-02T00:07:51.000Z | 2021-06-11T01:54:11.000Z | apps/cashtrail_web/lib/cashtrail_web/channels/user_socket.ex | maxmaccari/cashtrail | 74ec7ff1f8a41299a6086dc98719a32cbfbe9ee7 | [
"MIT"
] | null | null | null | defmodule CashtrailWeb.UserSocket do
@moduledoc false
use Phoenix.Socket
## Channels
# channel "room:*", CashtrailWeb.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
@impl true
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# CashtrailWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
@impl true
def id(_socket), do: nil
end
| 29.394737 | 84 | 0.696509 |
73d1b3e106e4df03afa3c9609f2e225c2c135a2a | 237 | exs | Elixir | priv/repo/migrations/20151106202916_add_bundle_id_to_commands.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 1,003 | 2016-02-23T17:21:12.000Z | 2022-02-20T14:39:35.000Z | priv/repo/migrations/20151106202916_add_bundle_id_to_commands.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 906 | 2016-02-22T22:54:19.000Z | 2022-03-11T15:19:43.000Z | priv/repo/migrations/20151106202916_add_bundle_id_to_commands.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 95 | 2016-02-23T13:42:31.000Z | 2021-11-30T14:39:55.000Z | defmodule Bishop.Repo.Migrations.AddBundleIdToCommands do
use Ecto.Migration
def change do
alter table(:commands) do
add :bundle_id, references(:bundles, type: :uuid, on_delete: :delete_all), null: false
end
end
end
| 23.7 | 92 | 0.729958 |
73d203f5d07987b3456597b84def2592ff750b51 | 116 | exs | Elixir | test/mb_tools_test.exs | burmajam/mb_tools | c254184d12cf8416fee98de514dd2d6bbba1a2d7 | [
"MIT"
] | null | null | null | test/mb_tools_test.exs | burmajam/mb_tools | c254184d12cf8416fee98de514dd2d6bbba1a2d7 | [
"MIT"
] | null | null | null | test/mb_tools_test.exs | burmajam/mb_tools | c254184d12cf8416fee98de514dd2d6bbba1a2d7 | [
"MIT"
] | null | null | null | defmodule MbToolsTest do
use ExUnit.Case
doctest MbTools
test "the truth" do
assert 1 + 1 == 2
end
end
| 12.888889 | 24 | 0.672414 |
73d22e59d2141deb48e65d30483c5ac7751ba802 | 1,599 | ex | Elixir | clients/big_query_connection/lib/google_api/big_query_connection/v1beta1/model/empty.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/big_query_connection/lib/google_api/big_query_connection/v1beta1/model/empty.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/big_query_connection/lib/google_api/big_query_connection/v1beta1/model/empty.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQueryConnection.V1beta1.Model.Empty do
@moduledoc """
A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to use it as the request
or the response type of an API method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
}
The JSON representation for `Empty` is empty JSON object `{}`.
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.BigQueryConnection.V1beta1.Model.Empty do
def decode(value, options) do
GoogleApi.BigQueryConnection.V1beta1.Model.Empty.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQueryConnection.V1beta1.Model.Empty do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.98 | 80 | 0.751094 |
73d287458ace7d772b4dfe130dbf81a2868eb285 | 1,146 | ex | Elixir | lib/analyser.ex | thomasvolk/stressman | 7c1b62d262ea8f2c38854ce053fe2888aaacd5b6 | [
"Apache-2.0"
] | null | null | null | lib/analyser.ex | thomasvolk/stressman | 7c1b62d262ea8f2c38854ce053fe2888aaacd5b6 | [
"Apache-2.0"
] | null | null | null | lib/analyser.ex | thomasvolk/stressman | 7c1b62d262ea8f2c38854ce053fe2888aaacd5b6 | [
"Apache-2.0"
] | null | null | null |
defmodule StressMan.Analyser do
use GenServer
require Logger
def start_link() do
now = StressMan.Time.now()
GenServer.start_link(__MODULE__, {0,0,now,now}, name: via_tuple())
end
defp via_tuple, do: {:via, Registry, {:stress_man_process_registry, "analyser"}}
def add({_duration, {_status, _message} } = record) do
GenServer.cast(via_tuple(), {:add, record})
end
def get() do
GenServer.call(via_tuple(), :get)
end
def init(state) do
{:ok, state}
end
defp result({success_count, error_count, start_time, end_time}) do
{success_count, error_count, end_time - start_time}
end
def handle_call(:get, _from, state) do
{:reply, result(state) , state}
end
def handle_cast({:add, { _duration, {status, _message} } }, {success_count, error_count, start_time, _end_time}) do
now = StressMan.Time.now()
new_state = case status do
:success -> {success_count + 1, error_count, start_time, now}
_ -> {success_count, error_count + 1, start_time, now}
end
Logger.info("analyser #{node()}-#{inspect self()}: #{inspect new_state}")
{:noreply, new_state}
end
end
| 26.651163 | 117 | 0.670157 |
73d2a7d07a9619d067d807ecd452369ee8b26921 | 2,239 | ex | Elixir | clients/display_video/lib/google_api/display_video/v1/model/carrier_and_isp_assigned_targeting_option_details.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/display_video/lib/google_api/display_video/v1/model/carrier_and_isp_assigned_targeting_option_details.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/display_video/lib/google_api/display_video/v1/model/carrier_and_isp_assigned_targeting_option_details.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DisplayVideo.V1.Model.CarrierAndIspAssignedTargetingOptionDetails do
@moduledoc """
Details for assigned carrier and ISP targeting option. This will be populated in the details field of an AssignedTargetingOption when targeting_type is `TARGETING_TYPE_CARRIER_AND_ISP`.
## Attributes
* `displayName` (*type:* `String.t`, *default:* `nil`) - Output only. The display name of the carrier or ISP.
* `negative` (*type:* `boolean()`, *default:* `nil`) - Indicates if this option is being negatively targeted. All assigned carrier and ISP targeting options on the same line item must have the same value for this field.
* `targetingOptionId` (*type:* `String.t`, *default:* `nil`) - Required. The targeting_option_id of a TargetingOption of type `TARGETING_TYPE_CARRIER_AND_ISP`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:displayName => String.t(),
:negative => boolean(),
:targetingOptionId => String.t()
}
field(:displayName)
field(:negative)
field(:targetingOptionId)
end
defimpl Poison.Decoder,
for: GoogleApi.DisplayVideo.V1.Model.CarrierAndIspAssignedTargetingOptionDetails do
def decode(value, options) do
GoogleApi.DisplayVideo.V1.Model.CarrierAndIspAssignedTargetingOptionDetails.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DisplayVideo.V1.Model.CarrierAndIspAssignedTargetingOptionDetails do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.603448 | 223 | 0.744975 |
73d2ae18ab209f589c4614957218fe1b023562b2 | 1,476 | ex | Elixir | lib/yamm_web/live/user_live/form_component.ex | yammine/YAMM | c118eab5029b86c2caf24890dfffa8437684f5d1 | [
"MIT"
] | null | null | null | lib/yamm_web/live/user_live/form_component.ex | yammine/YAMM | c118eab5029b86c2caf24890dfffa8437684f5d1 | [
"MIT"
] | null | null | null | lib/yamm_web/live/user_live/form_component.ex | yammine/YAMM | c118eab5029b86c2caf24890dfffa8437684f5d1 | [
"MIT"
] | null | null | null | defmodule YAMMWeb.UserLive.FormComponent do
use YAMMWeb, :live_component
alias YAMM.Money
@impl true
def update(%{user: user} = assigns, socket) do
changeset = Money.change_user(user)
{:ok,
socket
|> assign(assigns)
|> assign(:changeset, changeset)}
end
@impl true
def handle_event("validate", %{"user" => user_params}, socket) do
changeset =
socket.assigns.user
|> Money.change_user(user_params)
|> Map.put(:action, :validate)
{:noreply, assign(socket, :changeset, changeset)}
end
def handle_event("save", %{"user" => user_params}, socket) do
save_user(socket, socket.assigns.action, user_params)
end
defp save_user(socket, :edit, user_params) do
case Money.update_user(socket.assigns.user, user_params) do
{:ok, _user} ->
{:noreply,
socket
|> put_flash(:info, "User updated successfully")
|> push_redirect(to: socket.assigns.return_to)}
{:error, %Ecto.Changeset{} = changeset} ->
{:noreply, assign(socket, :changeset, changeset)}
end
end
defp save_user(socket, :new, user_params) do
case Money.create_user(user_params) do
{:ok, _user} ->
{:noreply,
socket
|> put_flash(:info, "User created successfully")
|> push_redirect(to: socket.assigns.return_to)}
{:error, %Ecto.Changeset{} = changeset} ->
{:noreply, assign(socket, changeset: changeset)}
end
end
end
| 26.357143 | 67 | 0.632791 |
73d2af5200dbed4dc06727ba79597fb1f494390f | 1,463 | ex | Elixir | lib/white_bread/runners/scenario_runner.ex | ejscunha/white-bread | 1c2eed1c98545beeb70b590426ce9026a8455e97 | [
"MIT"
] | 209 | 2015-03-03T14:14:28.000Z | 2020-10-26T03:23:48.000Z | lib/white_bread/runners/scenario_runner.ex | ejscunha/white-bread | 1c2eed1c98545beeb70b590426ce9026a8455e97 | [
"MIT"
] | 83 | 2015-03-23T11:46:51.000Z | 2020-11-04T09:47:06.000Z | lib/white_bread/runners/scenario_runner.ex | ejscunha/white-bread | 1c2eed1c98545beeb70b590426ce9026a8455e97 | [
"MIT"
] | 46 | 2015-06-12T17:37:21.000Z | 2020-10-30T09:52:45.000Z | defmodule WhiteBread.Runners.ScenarioRunner do
import WhiteBread.Runners.Utilities
alias WhiteBread.Runners.Setup
alias WhiteBread.Runners.StepsRunner
def run(scenario, context, %Setup{} = setup \\ Setup.new) do
start_trapping_exits()
starting_state = setup.starting_state
|> apply_scenario_starting_state(context)
{scenario, scenario.steps}
|> StepsRunner.run(context, setup.background_steps, starting_state)
|> update_result_with_exits
|> stop_trapping_exits
|> build_result_tuple(scenario)
|> output_result(scenario)
end
defp build_result_tuple(result, scenario) do
case result do
{:ok, _} -> {:ok, scenario.name}
error_data -> {:failed, error_data}
end
end
defp output_result(result_tuple, scenario) do
WhiteBread.Outputer.report({:scenario_result, result_tuple, scenario})
result_tuple
end
defp start_trapping_exits, do: Process.flag(:trap_exit, true)
defp stop_trapping_exits(pass_through) do
Process.flag(:trap_exit, false)
pass_through
end
defp update_result_with_exits(result = {:other_failure, _, _}), do: result
defp update_result_with_exits(result) do
receive do
{'DOWN', _ref, _process, _pid2, _reason} = exit_message ->
{:exit_recieved, exit_message}
{:EXIT, _pid, reason} = exit_message when reason != :normal ->
{:exit_recieved, exit_message}
after 0 ->
result
end
end
end
| 26.6 | 76 | 0.702666 |
73d2d26b39a1770b400f50688bbf2f67c0b0557f | 1,082 | ex | Elixir | lib/reservation_book/application.ex | chargio/reservation_book_phoenix | 87f07e390f0da2bed162d55b26d3c73c0f3aea4a | [
"MIT"
] | null | null | null | lib/reservation_book/application.ex | chargio/reservation_book_phoenix | 87f07e390f0da2bed162d55b26d3c73c0f3aea4a | [
"MIT"
] | null | null | null | lib/reservation_book/application.ex | chargio/reservation_book_phoenix | 87f07e390f0da2bed162d55b26d3c73c0f3aea4a | [
"MIT"
] | null | null | null | defmodule ReservationBook.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
children = [
# Start the Ecto repository
ReservationBook.Repo,
# Start the Telemetry supervisor
ReservationBookWeb.Telemetry,
# Start the PubSub system
{Phoenix.PubSub, name: ReservationBook.PubSub},
# Start the Endpoint (http/https)
ReservationBookWeb.Endpoint
# Start a worker by calling: ReservationBook.Worker.start_link(arg)
# {ReservationBook.Worker, arg}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: ReservationBook.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
ReservationBookWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 30.914286 | 73 | 0.721811 |
73d2d9bb6a5ff715f75ee3851c208686823647a0 | 7,305 | ex | Elixir | lib/ex_config.ex | djthread/ex_config | 6900ffe3c0cdc089a224621749bfd5d32cced562 | [
"MIT"
] | null | null | null | lib/ex_config.ex | djthread/ex_config | 6900ffe3c0cdc089a224621749bfd5d32cced562 | [
"MIT"
] | null | null | null | lib/ex_config.ex | djthread/ex_config | 6900ffe3c0cdc089a224621749bfd5d32cced562 | [
"MIT"
] | null | null | null | defmodule ExConfig do
@moduledoc """
Module enhancer for creating a nice place to get configuration data for
your application
To use, create a new module with something like
defmodule MyApp.Config do
use ExConfig
end
Configs under `:my_app` can be had via `MyApp.Config`'s `&fetch/2`,
`&fetch!/2`, and `&get/3`.
## `use` Options
The following options can be passed as a keyword list to the `use ExConfig`
statement:
* `:app` - The app atom. If undefined, this is assumed to be the first part
of the using module's namespace, transformed with `&Macro.underscore/1`.
* `:env_prefix` - A string which should be combined with "_ENV" to form the
prefix of environment variables that are looked up. This is assumed to be
first part of the using module's namespace, transformed with
`&String.upcase/1`.
* `:valid_environments` - A list of atoms which are the environment settings
that can be set (in string form, of course) in the application environment
variable (eg. `SKYLAB_ENV`)
* `:sections` - A list of atoms which are the config sections that should
have functions of the same names dynamically added to the module. (if
:foo_service is in the list, then `&foo_service/1` and `&foo_service!/1`
will be defined.)
* `:data_sources` - A list of data source modules, in the order that they
should be evaluated to resolve any config values. By default, the order is:
* `ExConfig.EnvironmentDataSource`
* `ExConfig.EnvConfigDataSource`
* `ExConfig.ApplicationEnvironmentDataSource`
## Macros
Inside your config module, you may use the following macros.
* `section(atom)` - Shortcut functions will be defined to allow easier access
to the section by the name of the given atom. For instance, if the module
has `section(:thing)`, then:
* `&thing/1` will be defined. Call it with `:base_url` to get the same
value as `get(:thing, :base_url)`.
* `&thing!/1` will be defined. Call it with `:base_url` to get the same
value as `fetch!(:thing, :base_url)`.
## Application Environment
This library adds the concept of the application's (runtime) environment.
The `:valid_environments` list has all the possible values and the first
entry will be the default environment if none is set. In order to set one,
simply define the relevant environment variable. For instance, if your
`:env_prefix` is `"SKYLAB"` (and `:prod` is included in your
`:valid_environments` list) then setting your `SKYLAB_ENV` environment
variable to `"prod"` would set the application environment to `:prod`.
This value is used when finding the needed value in the
`ExConfig.EnvConfigDataSource` step of the cascading logic.
## Cascading Logic
The cascading logic for finding a config value with the `section`
`:some_service` and `key` `:base_url` would be as follows. If any step comes
back with a value, the rest of the steps will be skipped and the value
returned.
* Look in the `SOME_SERVICE_BASE_URL` environment variable
* Look in the application environment under `:my_app`, `:some_service`,
`:env_configs` for a keyword list. Use the application environment atom as
the key to find a keyword list which should then include the `key`.
* Look in the application environment under `:my_app`, `:some_service`,
`:base_url`
"""
alias ExConfig.OptionNormalizer
defmacro __using__(opts \\ []) do
opts =
__CALLER__
|> Module.eval_quoted(opts)
|> elem(0)
|> Keyword.put(:module, __CALLER__.module)
|> OptionNormalizer.normalize_opts!()
quote do
import ExConfig, only: [section: 1]
@doc "Get the configured data sources"
@spec data_sources :: [module]
def data_sources do
unquote(Keyword.get(opts, :data_sources))
end
@doc "Get the application environment"
@spec env :: String.t()
def env do
ExConfig.get_env(
unquote(Keyword.get(opts, :env_prefix)),
unquote(Keyword.get(opts, :valid_environments))
)
end
@doc "Get the atom for the app's config namespace"
@spec app :: atom
def app, do: unquote(Keyword.get(opts, :app))
@doc "Get the all-caps string for the environment variables' prefix"
@spec env_prefix :: String.t()
def env_prefix, do: unquote(Keyword.get(opts, :env_prefix))
@doc "Fetch a configuration key; raise if unset"
@spec fetch!(atom, atom) :: any | no_return
def fetch!(section, key),
do: ExConfig.fetch!(__MODULE__, app(), section, key)
@doc "Fetch a configuration key"
@spec fetch(atom, atom) :: {:ok, any} | :error
def fetch(section, key),
do: ExConfig.fetch(__MODULE__, app(), section, key)
@doc "Get a configuration key"
@spec get(atom, atom, any) :: any
def get(section, key, default \\ nil),
do: ExConfig.get(__MODULE__, app(), section, key, default)
Module.eval_quoted(
__ENV__,
Enum.map(
unquote(Keyword.get(opts, :sections)),
&ExConfig.section_fn_generator/1
)
)
end
end
@doc """
Create some shortcut functions for a given section
"""
defmacro section(section) do
section_fn_generator(section)
end
@doc "Get the application (runtime) environment"
@spec get_env(String.t(), [atom]) :: atom
def get_env(prefix, valid) do
case System.get_env("#{prefix}_ENV") do
nil ->
hd(valid)
val ->
if val in Enum.map(valid, &to_string/1),
do: String.to_atom(val),
else:
raise("""
Invalid #{prefix}_ENV (#{val}). Add `:#{val}` to the \
`:valid_environments` option.\
""")
end
end
@doc "Get a configuration value, raise if unset"
@spec fetch!(module, atom, atom, atom) :: any | no_return
def fetch!(mod, app, section, key) do
case fetch(mod, app, section, key) do
:error ->
raise RuntimeError,
"Couldn't get #{inspect(section)} config: #{inspect(key)}"
{:ok, val} ->
val
end
end
@doc "Get a configuration value"
@spec get(module, atom, atom, atom) :: any
def get(mod, app, section, key, default \\ nil) do
case fetch(mod, app, section, key) do
{:ok, val} -> val
:error -> default
end
end
@doc "Fetch a configuration value"
@spec fetch(module, atom, atom, atom) :: any
def fetch(mod, app, section, key) do
do_fetch(mod.data_sources(), mod, app, section, key)
end
@doc "Generates some section-specific functions"
def section_fn_generator(sec) do
quote do
@doc "get a key from the `:#{unquote(sec)}` section"
@spec unquote(sec)(atom) :: any
def unquote(sec)(key, default \\ nil),
do: get(unquote(sec), key, default)
@doc "fetch a key from the `:#{unquote(sec)}` section"
@spec unquote(:"#{sec}!")(atom) :: {:ok, any} | :error
def unquote(:"#{sec}!")(key), do: fetch!(unquote(sec), key)
end
end
defp do_fetch([source | tail], mod, app, section, key) do
with :error <- source.fetch(mod, app, section, key) do
do_fetch(tail, mod, app, section, key)
end
end
defp do_fetch([], _, _, _, _) do
:error
end
end
| 33.663594 | 79 | 0.650513 |
73d2f278b84aafd9e962a173870faee2b567a555 | 2,067 | ex | Elixir | clients/classroom/lib/google_api/classroom/v1/model/invitation.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/classroom/lib/google_api/classroom/v1/model/invitation.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/classroom/lib/google_api/classroom/v1/model/invitation.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Classroom.V1.Model.Invitation do
@moduledoc """
An invitation to join a course.
## Attributes
* `courseId` (*type:* `String.t`, *default:* `nil`) - Identifier of the course to invite the user to.
* `id` (*type:* `String.t`, *default:* `nil`) - Identifier assigned by Classroom. Read-only.
* `role` (*type:* `String.t`, *default:* `nil`) - Role to invite the user to have. Must not be `COURSE_ROLE_UNSPECIFIED`.
* `userId` (*type:* `String.t`, *default:* `nil`) - Identifier of the invited user. When specified as a parameter of a request, this identifier can be set to one of the following: * the numeric identifier for the user * the email address of the user * the string literal `"me"`, indicating the requesting user
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:courseId => String.t(),
:id => String.t(),
:role => String.t(),
:userId => String.t()
}
field(:courseId)
field(:id)
field(:role)
field(:userId)
end
defimpl Poison.Decoder, for: GoogleApi.Classroom.V1.Model.Invitation do
def decode(value, options) do
GoogleApi.Classroom.V1.Model.Invitation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Classroom.V1.Model.Invitation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.910714 | 313 | 0.700532 |
73d3352b671235103e59e9a6b88aff378249f0db | 1,959 | ex | Elixir | lib/absinthe/plug/types.ex | gialib/absinthe_plug | c5aa394a588823cf6b4eb39e347faa98bc373c11 | [
"MIT"
] | null | null | null | lib/absinthe/plug/types.ex | gialib/absinthe_plug | c5aa394a588823cf6b4eb39e347faa98bc373c11 | [
"MIT"
] | null | null | null | lib/absinthe/plug/types.ex | gialib/absinthe_plug | c5aa394a588823cf6b4eb39e347faa98bc373c11 | [
"MIT"
] | null | null | null | defmodule Absinthe.Plug.Types do
@moduledoc """
This module provides GraphQL types that may be useful in Absinthe schema
and type modules.
## `:upload`
Absinthe.Plug can be used to upload files via GraphQL.
For example, the following schema includes a mutation field that accepts
multiple uploaded files as arguments (`:users` and `:metadata`):
```elixir
defmodule MyAppWeb.Schema do
use Absinthe.Schema
# Important: Needed to get the `:upload` type
import_types Absinthe.Plug.Types
mutation do
field :upload_file, :string do
arg :users, non_null(:upload)
arg :metadata, :upload
resolve fn args, _ ->
args.users # this is a `%Plug.Upload{}` struct.
{:ok, "success"}
end
end
end
end
```
To send a mutation that includes a file upload, you need to
use the `multipart/form-data` content type. For example, using `cURL`:
```shell
$ curl -X POST \
-F query='mutation { uploadFile(users: "users_csv", metadata: "metadata_json") }' \
-F users_csv=@users.csv \
-F metadata_json=@metadata.json \
localhost:4000/graphql
```
Note how there is a correspondance between the value of the `:users` argument
and the `-F` option indicating the associated file.
By treating uploads as regular arguments we get all the usual GraphQL argument
benefits (such as validation and documentation), something we wouldn't get if
we were merely putting them in the context.
"""
use Absinthe.Schema.Notation
alias Absinthe.Blueprint
@desc """
Represents an uploaded file.
"""
scalar :upload do
parse(fn
%Blueprint.Input.String{value: value}, context ->
Map.fetch(context[:__absinthe_plug__][:uploads] || %{}, value)
%Blueprint.Input.Null{}, _ ->
{:ok, nil}
_, _ ->
:error
end)
serialize(fn _ ->
raise "The `:upload` scalar cannot be returned!"
end)
end
end
| 25.115385 | 85 | 0.65952 |
73d338786c1dcb1585dd50c3db996e0d679bf66f | 9,769 | exs | Elixir | apps/theta/test/theta/cms_test.exs | LangPham/thetaproject | c6479d1b761ff58fe6ae5f82e2d9de87a8658883 | [
"MIT"
] | null | null | null | apps/theta/test/theta/cms_test.exs | LangPham/thetaproject | c6479d1b761ff58fe6ae5f82e2d9de87a8658883 | [
"MIT"
] | 11 | 2020-07-21T09:34:54.000Z | 2021-08-29T07:38:02.000Z | apps/theta/test/theta/cms_test.exs | LangPham/thetaproject | c6479d1b761ff58fe6ae5f82e2d9de87a8658883 | [
"MIT"
] | null | null | null | defmodule Theta.CMSTest do
use Theta.DataCase
alias Theta.CMS
describe "taxonomy" do
alias Theta.CMS.Taxonomy
@valid_attrs %{title: "some title"}
@update_attrs %{title: "some updated title"}
@invalid_attrs %{title: nil}
def taxonomy_fixture(attrs \\ %{}) do
{:ok, taxonomy} =
attrs
|> Enum.into(@valid_attrs)
|> CMS.create_taxonomy()
taxonomy
end
test "list_taxonomy/0 returns all taxonomy" do
taxonomy = taxonomy_fixture()
assert CMS.list_taxonomy() == [taxonomy]
end
test "get_taxonomy!/1 returns the taxonomy with given id" do
taxonomy = taxonomy_fixture()
assert CMS.get_taxonomy!(taxonomy.id) == taxonomy
end
test "create_taxonomy/1 with valid data creates a taxonomy" do
assert {:ok, %Taxonomy{} = taxonomy} = CMS.create_taxonomy(@valid_attrs)
assert taxonomy.title == "some title"
end
test "create_taxonomy/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = CMS.create_taxonomy(@invalid_attrs)
end
test "update_taxonomy/2 with valid data updates the taxonomy" do
taxonomy = taxonomy_fixture()
assert {:ok, %Taxonomy{} = taxonomy} = CMS.update_taxonomy(taxonomy, @update_attrs)
assert taxonomy.title == "some updated title"
end
test "update_taxonomy/2 with invalid data returns error changeset" do
taxonomy = taxonomy_fixture()
assert {:error, %Ecto.Changeset{}} = CMS.update_taxonomy(taxonomy, @invalid_attrs)
assert taxonomy == CMS.get_taxonomy!(taxonomy.id)
end
test "delete_taxonomy/1 deletes the taxonomy" do
taxonomy = taxonomy_fixture()
assert {:ok, %Taxonomy{}} = CMS.delete_taxonomy(taxonomy)
assert_raise Ecto.NoResultsError, fn -> CMS.get_taxonomy!(taxonomy.id) end
end
test "change_taxonomy/1 returns a taxonomy changeset" do
taxonomy = taxonomy_fixture()
assert %Ecto.Changeset{} = CMS.change_taxonomy(taxonomy)
end
end
describe "term" do
alias Theta.CMS.Term
@valid_attrs %{title: "some title"}
@update_attrs %{title: "some updated title"}
@invalid_attrs %{title: nil}
def term_fixture(attrs \\ %{}) do
{:ok, term} =
attrs
|> Enum.into(@valid_attrs)
|> CMS.create_term()
term
end
test "list_term/0 returns all term" do
term = term_fixture()
assert CMS.list_term() == [term]
end
test "get_term!/1 returns the term with given id" do
term = term_fixture()
assert CMS.get_term!(term.id) == term
end
test "create_term/1 with valid data creates a term" do
assert {:ok, %Term{} = term} = CMS.create_term(@valid_attrs)
assert term.title == "some title"
end
test "create_term/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = CMS.create_term(@invalid_attrs)
end
test "update_term/2 with valid data updates the term" do
term = term_fixture()
assert {:ok, %Term{} = term} = CMS.update_term(term, @update_attrs)
assert term.title == "some updated title"
end
test "update_term/2 with invalid data returns error changeset" do
term = term_fixture()
assert {:error, %Ecto.Changeset{}} = CMS.update_term(term, @invalid_attrs)
assert term == CMS.get_term!(term.id)
end
test "delete_term/1 deletes the term" do
term = term_fixture()
assert {:ok, %Term{}} = CMS.delete_term(term)
assert_raise Ecto.NoResultsError, fn -> CMS.get_term!(term.id) end
end
test "change_term/1 returns a term changeset" do
term = term_fixture()
assert %Ecto.Changeset{} = CMS.change_term(term)
end
end
describe "author" do
alias Theta.CMS.Author
@valid_attrs %{role: "some role"}
@update_attrs %{role: "some updated role"}
@invalid_attrs %{role: nil}
def author_fixture(attrs \\ %{}) do
{:ok, author} =
attrs
|> Enum.into(@valid_attrs)
|> CMS.create_author()
author
end
test "list_author/0 returns all author" do
author = author_fixture()
assert CMS.list_author() == [author]
end
test "get_author!/1 returns the author with given id" do
author = author_fixture()
assert CMS.get_author!(author.id) == author
end
test "create_author/1 with valid data creates a author" do
assert {:ok, %Author{} = author} = CMS.create_author(@valid_attrs)
assert author.role == "some role"
end
test "create_author/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = CMS.create_author(@invalid_attrs)
end
test "update_author/2 with valid data updates the author" do
author = author_fixture()
assert {:ok, %Author{} = author} = CMS.update_author(author, @update_attrs)
assert author.role == "some updated role"
end
test "update_author/2 with invalid data returns error changeset" do
author = author_fixture()
assert {:error, %Ecto.Changeset{}} = CMS.update_author(author, @invalid_attrs)
assert author == CMS.get_author!(author.id)
end
test "delete_author/1 deletes the author" do
author = author_fixture()
assert {:ok, %Author{}} = CMS.delete_author(author)
assert_raise Ecto.NoResultsError, fn -> CMS.get_author!(author.id) end
end
test "change_author/1 returns a author changeset" do
author = author_fixture()
assert %Ecto.Changeset{} = CMS.change_author(author)
end
end
describe "article" do
alias Theta.CMS.Article
@valid_attrs %{body: "some body", summary: "some summary", title: "some title"}
@update_attrs %{
body: "some updated body",
summary: "some updated summary",
title: "some updated title"
}
@invalid_attrs %{body: nil, summary: nil, title: nil}
def article_fixture(attrs \\ %{}) do
{:ok, article} =
attrs
|> Enum.into(@valid_attrs)
|> CMS.create_article()
article
end
test "list_article/0 returns all article" do
article = article_fixture()
assert CMS.list_article() == [article]
end
test "get_article!/1 returns the article with given id" do
article = article_fixture()
assert CMS.get_article!(article.id) == article
end
test "create_article/1 with valid data creates a article" do
assert {:ok, %Article{} = article} = CMS.create_article(@valid_attrs)
assert article.body == "some body"
assert article.summary == "some summary"
assert article.title == "some title"
end
test "create_article/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = CMS.create_article(@invalid_attrs)
end
test "update_article/2 with valid data updates the article" do
article = article_fixture()
assert {:ok, %Article{} = article} = CMS.update_article(article, @update_attrs)
assert article.body == "some updated body"
assert article.summary == "some updated summary"
assert article.title == "some updated title"
end
test "update_article/2 with invalid data returns error changeset" do
article = article_fixture()
assert {:error, %Ecto.Changeset{}} = CMS.update_article(article, @invalid_attrs)
assert article == CMS.get_article!(article.id)
end
test "delete_article/1 deletes the article" do
article = article_fixture()
assert {:ok, %Article{}} = CMS.delete_article(article)
assert_raise Ecto.NoResultsError, fn -> CMS.get_article!(article.id) end
end
test "change_article/1 returns a article changeset" do
article = article_fixture()
assert %Ecto.Changeset{} = CMS.change_article(article)
end
end
describe "qa" do
alias Theta.Cms.Qa
@valid_attrs %{answer: "some answer", question: "some question", tag: "some tag"}
@update_attrs %{
answer: "some updated answer",
question: "some updated question",
tag: "some updated tag"
}
@invalid_attrs %{answer: nil, question: nil, tag: nil}
def qa_fixture(attrs \\ %{}) do
{:ok, qa} =
attrs
|> Enum.into(@valid_attrs)
|> Cms.create_qa()
qa
end
test "list_qa/0 returns all qa" do
qa = qa_fixture()
assert Cms.list_qa() == [qa]
end
test "get_qa!/1 returns the qa with given id" do
qa = qa_fixture()
assert Cms.get_qa!(qa.id) == qa
end
test "create_qa/1 with valid data creates a qa" do
assert {:ok, %Qa{} = qa} = Cms.create_qa(@valid_attrs)
assert qa.answer == "some answer"
assert qa.question == "some question"
assert qa.tag == "some tag"
end
test "create_qa/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Cms.create_qa(@invalid_attrs)
end
test "update_qa/2 with valid data updates the qa" do
qa = qa_fixture()
assert {:ok, %Qa{} = qa} = Cms.update_qa(qa, @update_attrs)
assert qa.answer == "some updated answer"
assert qa.question == "some updated question"
assert qa.tag == "some updated tag"
end
test "update_qa/2 with invalid data returns error changeset" do
qa = qa_fixture()
assert {:error, %Ecto.Changeset{}} = Cms.update_qa(qa, @invalid_attrs)
assert qa == Cms.get_qa!(qa.id)
end
test "delete_qa/1 deletes the qa" do
qa = qa_fixture()
assert {:ok, %Qa{}} = Cms.delete_qa(qa)
assert_raise Ecto.NoResultsError, fn -> Cms.get_qa!(qa.id) end
end
test "change_qa/1 returns a qa changeset" do
qa = qa_fixture()
assert %Ecto.Changeset{} = Cms.change_qa(qa)
end
end
end
| 30.817035 | 89 | 0.646125 |
73d35430aa52617ef24cd0f17b64d2878c34f125 | 350 | exs | Elixir | priv/repo/seeds.exs | Lugghawk/GameDex | 680d67a925e16ee5ba357d529542d4be4d4c1b99 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | Lugghawk/GameDex | 680d67a925e16ee5ba357d529542d4be4d4c1b99 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | Lugghawk/GameDex | 680d67a925e16ee5ba357d529542d4be4d4c1b99 | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Gamedex.Repo.insert!(%Gamedex.SomeModel{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.166667 | 61 | 0.705714 |