hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f70b342f176c766438d8a420c5ec23c77aa9cf16 | 22,629 | ex | Elixir | lib/elixir/lib/kernel/typespec.ex | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | 1 | 2017-09-09T20:59:04.000Z | 2017-09-09T20:59:04.000Z | lib/elixir/lib/kernel/typespec.ex | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/kernel/typespec.ex | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | null | null | null | defmodule Kernel.Typespec do
@moduledoc """
Provides macros and functions for working with typespecs.
The attributes `@type`, `@opaque`, `@typep`, `@spec` and
`@callback` available in modules are handled by the equivalent
macros defined by this module.
## Defining a type
@type type_name :: type
@typep type_name :: type
@opaque type_name :: type
For more details, see documentation for `deftype`, `deftypep` and `defopaque`
below.
## Defining a specification
@spec function_name(type, type) :: type
@callback function_name(type, type) :: type
For more details, see documentation for `defspec` and `defcallback` below.
## Types
The type syntax provided by Elixir is fairly similar to the one
in Erlang.
Most of the built-in types provided in Erlang (for example, `pid()`)
are expressed the same way: `pid()` or simply `pid`. Parametrized types
are also supported (`list(integer())`) and so are remote types (`Enum.t`).
Certain data type shortcuts (`[...]`, `<<>>` and `{...}`) are supported as
well.
Main differences lie in how bit strings and functions are defined:
### Bit Strings
Bit string with a base size of 3:
<<_ :: 3>>
Bit string with a unit size of 8:
<<_ :: _ * 8>>
### Anonymous functions
Any anonymous function:
((...) -> any)
or
(... -> any)
Anonymous function with arity of zero:
(() -> type)
Anonymous function with some arity:
((type, type) -> type)
or
(type, type -> type)
## Notes
Elixir discourages the use of type `string()` as it might be confused
with binaries which are referred to as "strings" in Elixir (as opposed to
character lists). In order to use the type that is called `string()` in Erlang,
one has to use the `char_list()` type which is a synonym for `string()`. If you
use `string()`, you'll get a warning from the compiler.
If you want to refer to the "string" type (the one operated by functions in the
String module), use `String.t()` type instead.
See http://www.erlang.org/doc/reference_manual/typespec.html
for more information.
"""
@doc """
Defines a type.
This macro is the one responsible for handling the attribute `@type`.
## Examples
@type my_type :: atom
"""
defmacro deftype(type) do
quote do
Kernel.Typespec.deftype(:type, unquote(Macro.escape type), __ENV__)
end
end
@doc """
Defines an opaque type.
This macro is the one responsible for handling the attribute `@opaque`.
## Examples
@opaque my_type :: atom
"""
defmacro defopaque(type) do
quote do
Kernel.Typespec.deftype(:opaque, unquote(Macro.escape type), __ENV__)
end
end
@doc """
Defines a private type.
This macro is the one responsible for handling the attribute `@typep`.
## Examples
@typep my_type :: atom
"""
defmacro deftypep(type) do
quote do
Kernel.Typespec.deftype(:typep, unquote(Macro.escape type), __ENV__)
end
end
@doc """
Defines a spec.
This macro is the one responsible for handling the attribute `@spec`.
## Examples
@spec add(number, number) :: number
"""
defmacro defspec(spec) do
quote do
Kernel.Typespec.defspec(:spec, unquote(Macro.escape spec), __ENV__)
end
end
@doc """
Defines a callback.
This macro is the one responsible for handling the attribute `@callback`.
## Examples
@callback add(number, number) :: number
"""
defmacro defcallback(spec) do
quote do
Kernel.Typespec.defspec(:callback, unquote(Macro.escape spec), __ENV__)
end
end
## Helpers
@doc """
Defines a `type`, `typep` or `opaque` by receiving Erlang's typespec.
"""
def define_type(module, kind, { name, _, vars } = type) when kind in [:type, :typep, :opaque] do
{ kind, export } =
case kind do
:type -> { :type, true }
:typep -> { :type, false }
:opaque -> { :opaque, true }
end
Module.compile_typespec module, kind, type
if export, do:
Module.compile_typespec(module, :export_type, [{ name, length(vars) }])
type
end
@doc """
Defines a `spec` by receiving Erlang's typespec.
"""
def define_spec(module, tuple, definition) do
Module.compile_typespec module, :spec, { tuple, definition }
end
@doc """
Defines a `callback` by receiving Erlang's typespec.
"""
def define_callback(module, tuple, definition) do
Module.compile_typespec module, :callback, { tuple, definition }
end
@doc """
Returns `true` if the current module defines a given type
(private, opaque or not). This function is only available
for modules being compiled.
"""
def defines_type?(module, name, arity) do
finder = match?({ ^name, _, vars } when length(vars) == arity, &1)
:lists.any(finder, Module.get_attribute(module, :type)) or
:lists.any(finder, Module.get_attribute(module, :opaque))
end
@doc """
Returns `true` if the current module defines a given spec.
This function is only available for modules being compiled.
"""
def defines_spec?(module, name, arity) do
tuple = { name, arity }
:lists.any(match?(^tuple, &1), Module.get_attribute(module, :spec))
end
@doc """
Returns `true` if the current module defines a callback.
This function is only available for modules being compiled.
"""
def defines_callback?(module, name, arity) do
tuple = { name, arity }
:lists.any(match?(^tuple, &1), Module.get_attribute(module, :callback))
end
@doc """
Converts a spec clause back to Elixir AST.
"""
def spec_to_ast(name, { :type, line, :fun, [{:type, _, :product, args}, result] }) do
args = lc arg inlist args, do: typespec_to_ast(arg)
{ :::, [line: line], [{ name, [line: line], args }, typespec_to_ast(result)] }
end
def spec_to_ast(name, { :type, line, :fun, [] }) do
{ :::, [line: line], [{ name, [line: line], [] }, quote(do: term)] }
end
def spec_to_ast(name, { :type, line, :bounded_fun, [{ :type, _, :fun, [{ :type, _, :product, args }, result] }, constraints] }) do
[h|t] =
lc {:type, line, :constraint, [{:atom, _, :is_subtype}, [var, type]]} inlist constraints do
{ :is_subtype, [line: line], [typespec_to_ast(var), typespec_to_ast(type)] }
end
args = lc arg inlist args, do: typespec_to_ast(arg)
guards = Enum.reduce t, h, fn(x, acc) -> { :and, line, [acc, x] } end
{ :::, [line: line], [{ :when, [line: line], [{ name, [line: line], args }, guards] }, typespec_to_ast(result)] }
end
@doc """
Converts a type clause back to Elixir AST.
"""
def type_to_ast({ { :record, record }, fields, args }) when is_atom(record) do
fields = lc field inlist fields, do: typespec_to_ast(field)
args = lc arg inlist args, do: typespec_to_ast(arg)
type = { :{}, [], [record|fields] }
quote do: unquote(record)(unquote_splicing(args)) :: unquote(type)
end
def type_to_ast({ name, type, args }) do
args = lc arg inlist args, do: typespec_to_ast(arg)
quote do: unquote(name)(unquote_splicing(args)) :: unquote(typespec_to_ast(type))
end
@doc """
Returns all types available from the module's beam code.
It is returned as a list of tuples where the first
element is the type (`:typep`, `:type` and `:opaque`).
The module has to have a corresponding beam file on the disk which can be
located by the runtime system.
"""
def beam_types(module) do
case abstract_code(module) do
{ :ok, abstract_code } ->
exported_types = lc { :attribute, _, :export_type, types } inlist abstract_code, do: types
exported_types = List.flatten(exported_types)
lc { :attribute, _, kind, { name, _, args } = type } inlist abstract_code, kind in [:opaque, :type] do
cond do
kind == :opaque -> { :opaque, type }
:lists.member({ name, length(args) }, exported_types) -> { :type, type }
true -> { :typep, type }
end
end
_ ->
[]
end
end
@doc """
Returns all specs available from the module's beam code.
It is returned as a list of tuples where the first
element is spec name and arity and the second is the spec.
The module has to have a corresponding beam file on the disk which can be
located by the runtime system.
"""
def beam_specs(module) do
from_abstract_code(module, :spec)
end
@doc """
Returns all callbacks available from the module's beam code.
It is returned as a list of tuples where the first
element is spec name and arity and the second is the spec.
The module has to have a corresponding beam file on the disk which can be
located by the runtime system.
"""
def beam_callbacks(module) do
from_abstract_code(module, :callback)
end
defp from_abstract_code(module, kind) do
case abstract_code(module) do
{ :ok, abstract_code } ->
lc { :attribute, _, abs_kind, value } inlist abstract_code, kind == abs_kind, do: value
_ ->
[]
end
end
defp abstract_code(module) do
case :beam_lib.chunks(abstract_code_beam(module), [:abstract_code]) do
{:ok, { _, [{ :abstract_code, { _raw_abstract_v1, abstract_code } }] } } ->
{ :ok, abstract_code }
_ ->
[]
end
end
defp abstract_code_beam(module) when is_atom(module) do
case :code.get_object_code(module) do
{ ^module, beam, _filename } -> beam
:error -> module
end
end
defp abstract_code_beam(binary) when is_binary(binary) do
binary
end
## Macro callbacks
@doc false
def deftype(kind, { :::, _, [type, definition] }, caller) do
do_deftype(kind, type, definition, caller)
end
def deftype(kind, {name, _meta, args} = type, caller)
when is_atom(name) and not is_list(args) do
do_deftype(kind, type, { :term, [line: caller.line], nil }, caller)
end
def deftype(_kind, other, caller) do
type_spec = Macro.to_string(other)
compile_error caller, "invalid type specification #{type_spec}"
end
defp do_deftype(kind, { name, _, args }, definition, caller) do
args =
if is_atom(args) do
[]
else
lc(arg inlist args, do: variable(arg))
end
vars = lc { :var, _, var } inlist args, do: var
spec = typespec(definition, vars, caller)
vars = lc { :var, _, _ } = var inlist args, do: var
type = { name, spec, vars }
define_type(caller.module, kind, type)
end
@doc false
def defspec(type, {:::, _, [{ :when, _, [{ name, meta, args }, constraints_guard] }, return] }, caller) do
if is_atom(args), do: args = []
constraints = guard_to_constraints(constraints_guard, caller)
spec = { :type, line(meta), :fun, fn_args(meta, args, return, Keyword.keys(constraints), caller) }
spec = { :type, line(meta), :bounded_fun, [spec, Keyword.values(constraints)] }
code = { { name, Kernel.length(args) }, spec }
Module.compile_typespec(caller.module, type, code)
code
end
def defspec(type, {:::, _, [{ name, meta, args }, return]}, caller) do
if is_atom(args), do: args = []
spec = { :type, line(meta), :fun, fn_args(meta, args, return, [], caller) }
code = { { name, Kernel.length(args) }, spec }
Module.compile_typespec(caller.module, type, code)
code
end
def defspec(_type, other, caller) do
spec = Macro.to_string(other)
compile_error caller, "invalid function type specification #{spec}"
end
defp guard_to_constraints({ :is_subtype, meta, [{ name, _, _ }, type] }, caller) do
line = line(meta)
contraints = [{ :atom, line, :is_subtype }, [{:var, line, name}, typespec(type, [], caller)]]
[{ name, { :type, line, :constraint, contraints } }]
end
defp guard_to_constraints({ :and, _, [left, right] }, caller) do
guard_to_constraints(left, caller) ++ guard_to_constraints(right, caller)
end
## To AST conversion
defp typespec_to_ast({ :type, line, :tuple, :any }) do
typespec_to_ast({:type, line, :tuple, []})
end
defp typespec_to_ast({ :type, line, :tuple, args }) do
args = lc arg inlist args, do: typespec_to_ast(arg)
{ :{}, [line: line], args }
end
defp typespec_to_ast({ :type, _line, :list, [arg] }) do
case unpack_typespec_kw(arg, []) do
{ :ok, ast } -> ast
:error -> [typespec_to_ast(arg)]
end
end
defp typespec_to_ast({ :type, _line, :list, args }) do
lc arg inlist args, do: typespec_to_ast(arg)
end
defp typespec_to_ast({ :type, line, :binary, [arg1, arg2] }) do
[arg1, arg2] = lc arg inlist [arg1, arg2], do: typespec_to_ast(arg)
cond do
arg2 == 0 ->
quote line: line, do: <<_ :: unquote(arg1)>>
arg1 == 0 ->
quote line: line, do: <<_ :: _ * unquote(arg2)>>
true ->
quote line: line, do: <<_ :: unquote(arg1) * unquote(arg2)>>
end
end
defp typespec_to_ast({ :type, line, :union, args }) do
args = lc arg inlist args, do: typespec_to_ast(arg)
Enum.reduce tl(args), hd(args),
fn(arg, expr) -> { :|, [line: line], [expr, arg] } end
end
defp typespec_to_ast({ :type, line, :fun, [{:type, _, :product, args}, result] }) do
args = lc arg inlist args, do: typespec_to_ast(arg)
{ :->, [line: line], [{args, [line: line], typespec_to_ast(result)}] }
end
defp typespec_to_ast({ :type, line, :fun, [args, result] }) do
{ :->, [line: line], [{[typespec_to_ast(args)], [line: line], typespec_to_ast(result)}] }
end
defp typespec_to_ast({ :type, line, :fun, [] }) do
typespec_to_ast({ :type, line, :fun, [{:type, line, :any}, {:type, line, :any, []} ] })
end
defp typespec_to_ast({ :type, line, :range, [left, right] }) do
{ :"..", [line: line], [typespec_to_ast(left), typespec_to_ast(right)] }
end
defp typespec_to_ast({ :type, line, name, args }) do
args = lc arg inlist args, do: typespec_to_ast(arg)
{ name, [line: line], args }
end
defp typespec_to_ast({ :var, line, var }) do
var =
case atom_to_binary(var) do
<<"_", c :: [binary, size(1)], rest :: binary>> ->
binary_to_atom("_#{String.downcase(c)}#{rest}")
<<c :: [binary, size(1)], rest :: binary>> ->
binary_to_atom("#{String.downcase(c)}#{rest}")
end
{ var, line, nil }
end
# Special shortcut(s)
defp typespec_to_ast({ :remote_type, line, [{:atom, _, :elixir}, {:atom, _, :char_list}, []] }) do
typespec_to_ast({:type, line, :char_list, []})
end
defp typespec_to_ast({ :remote_type, line, [{:atom, _, :elixir}, {:atom, _, :as_boolean}, [arg]] }) do
typespec_to_ast({:type, line, :as_boolean, [arg]})
end
defp typespec_to_ast({ :remote_type, line, [mod, name, args] }) do
args = lc arg inlist args, do: typespec_to_ast(arg)
dot = { :., [line: line], [typespec_to_ast(mod), typespec_to_ast(name)] }
{ dot, [line: line], args }
end
defp typespec_to_ast({ :ann_type, line, [var, type] }) do
{ :::, [line: line], [typespec_to_ast(var), typespec_to_ast(type)] }
end
defp typespec_to_ast({ :typed_record_field,
{ :record_field, line, { :atom, line1, name }},
type }) do
typespec_to_ast({ :ann_type, line, [{ :var, line1, name }, type] })
end
defp typespec_to_ast({:type, _, :any}) do
quote do: ...
end
defp typespec_to_ast({:paren_type, _, [type]}) do
typespec_to_ast(type)
end
defp typespec_to_ast({ t, _line, atom }) when is_atom(t) do
atom
end
defp typespec_to_ast(other), do: other
## From AST conversion
defp line(meta) do
case :lists.keyfind(:line, 1, meta) do
{ :line, line } -> line
false -> 0
end
end
# Handle unions
defp typespec({ :|, meta, [_, _] } = exprs, vars, caller) do
exprs = Enum.reverse(collect_union(exprs))
union = lc e inlist exprs, do: typespec(e, vars, caller)
{ :type, line(meta), :union, union }
end
# Handle binaries
defp typespec({:<<>>, meta, []}, _, _) do
{:type, line(meta), :binary, [{:integer, line(meta), 0}, {:integer, line(meta), 0}]}
end
defp typespec({:<<>>, meta, [{:::, _, [{:_, meta1, atom}, {:*, _, [{:_, meta2, atom}, unit]}]}]}, _, _) when is_atom(atom) do
{:type, line(meta), :binary, [{:integer, line(meta1), 0}, {:integer, line(meta2), unit}]}
end
defp typespec({:<<>>, meta, [{:::, meta1, [{:_, meta2, atom}, base]}]}, _, _) when is_atom(atom) do
{:type, line(meta), :binary, [{:integer, line(meta1), base}, {:integer, line(meta2), 0}]}
end
# Handle ranges
defp typespec({:"..", meta, args}, vars, caller) do
typespec({:range, meta, args}, vars, caller)
end
# Handle special forms
defp typespec({:__MODULE__, _, atom}, vars, caller) when is_atom(atom) do
typespec(caller.module, vars, caller)
end
defp typespec({:__aliases__, _, _} = alias, vars, caller) do
atom = Macro.expand alias, caller
typespec(atom, vars, caller)
end
# Handle funs
defp typespec({:->, meta, [{[{:fun, _, arguments}], cmeta, return}]}, vars, caller) when is_list(arguments) do
typespec({:->, meta, [{arguments, cmeta, return}]}, vars, caller)
end
defp typespec({:->, meta, [{arguments, _, return}]}, vars, caller) when is_list(arguments) do
args = fn_args(meta, arguments, return, vars, caller)
{ :type, line(meta), :fun, args }
end
# Handle type operator
defp typespec({:"::", meta, [var, expr] }, vars, caller) do
left = typespec(var, [elem(var, 0)|vars], caller)
right = typespec(expr, vars, caller)
{ :ann_type, line(meta), [left, right] }
end
# Handle unary ops
defp typespec({op, meta, [integer]}, _, _) when op in [:+, :-] and is_integer(integer) do
{ :op, line(meta), op, {:integer, line(meta), integer} }
end
# Handle access macro
defp typespec({{:., meta, [Kernel, :access]}, meta1, [target, args]}, vars, caller) do
access = {{:., meta, [Kernel, :access]}, meta1,
[target, args ++ [_: { :any, [], [] }]]}
typespec(Macro.expand(access, caller), vars, caller)
end
# Handle remote calls
defp typespec({{:., meta, [remote, name]}, _, args} = orig, vars, caller) do
remote = Macro.expand remote, caller
unless is_atom(remote) do
compile_error(caller, "invalid remote in typespec: #{Macro.to_string(orig)}")
end
remote_type({typespec(remote, vars, caller), meta, typespec(name, vars, caller), args}, vars, caller)
end
# Handle tuples
defp typespec({:tuple, meta, atom}, vars, caller) when is_atom(atom) do
typespec({:{}, meta, []}, vars, caller)
end
defp typespec({:{}, meta, []}, _, _) do
{ :type, line(meta), :tuple, :any }
end
defp typespec({:{}, meta, t}, vars, caller) when is_list(t) do
args = lc e inlist t, do: typespec(e, vars, caller)
{ :type, line(meta), :tuple, args }
end
# Handle blocks
defp typespec({:__block__, _meta, [arg]}, vars, caller) do
typespec(arg, vars, caller)
end
# Handle variables or local calls
defp typespec({name, meta, atom}, vars, caller) when is_atom(atom) do
if :lists.member(name, vars) do
{ :var, line(meta), name }
else
typespec({name, meta, []}, vars, caller)
end
end
# Handle local calls
defp typespec({:string, meta, arguments}, vars, caller) do
IO.write "warning: string() type use is discouraged. For character lists, use " <>
"char_list() type, for strings, String.t()\n#{Exception.format_stacktrace(caller.stacktrace)}"
arguments = lc arg inlist arguments, do: typespec(arg, vars, caller)
{ :type, line(meta), :string, arguments }
end
defp typespec({:char_list, _meta, arguments}, vars, caller) do
typespec((quote do: :elixir.char_list(unquote_splicing(arguments))), vars, caller)
end
defp typespec({:as_boolean, _meta, arguments}, vars, caller) do
typespec((quote do: :elixir.as_boolean(unquote_splicing(arguments))), vars, caller)
end
defp typespec({name, meta, arguments}, vars, caller) do
arguments = lc arg inlist arguments, do: typespec(arg, vars, caller)
{ :type, line(meta), name, arguments }
end
# Handle literals
defp typespec(atom, _, _) when is_atom(atom) do
{ :atom, 0, atom }
end
defp typespec(integer, _, _) when is_integer(integer) do
{ :integer, 0, integer }
end
defp typespec([], vars, caller) do
typespec({ nil, [], [] }, vars, caller)
end
defp typespec([spec], vars, caller) do
typespec({ :list, [], [spec] }, vars, caller)
end
defp typespec([spec, {:"...", _, quoted}], vars, caller) when is_atom(quoted) do
typespec({ :nonempty_list, [], [spec] }, vars, caller)
end
defp typespec([h|t] = l, vars, caller) do
union = Enum.reduce(t, validate_kw(h, l, caller), fn(x, acc) ->
{ :|, [], [acc, validate_kw(x, l, caller)] }
end)
typespec({ :list, [], [union] }, vars, caller)
end
defp typespec(t, vars, caller) when is_tuple(t) do
args = lc e inlist tuple_to_list(t), do: typespec(e, vars, caller)
{ :type, 0, :tuple, args }
end
## Helpers
defp compile_error(caller, desc) do
raise CompileError, file: caller.file, line: caller.line, description: desc
end
defp remote_type({remote, meta, name, arguments}, vars, caller) do
arguments = lc arg inlist arguments, do: typespec(arg, vars, caller)
{ :remote_type, line(meta), [ remote, name, arguments ] }
end
defp collect_union({ :|, _, [a, b] }), do: [b|collect_union(a)]
defp collect_union(v), do: [v]
defp validate_kw({ key, _ } = t, _, _caller) when is_atom(key), do: t
defp validate_kw(_, original, caller) do
compile_error(caller, "unexpected list #{Macro.to_string original} in typespec")
end
defp fn_args(meta, args, return, vars, caller) do
case [fn_args(meta, args, vars, caller), typespec(return, vars, caller)] do
[{:type, _, :any}, {:type, _, :any, []}] -> []
x -> x
end
end
defp fn_args(meta, [{:"...", _, _}], _vars, _caller) do
{ :type, line(meta), :any }
end
defp fn_args(meta, args, vars, caller) do
args = lc arg inlist args, do: typespec(arg, vars, caller)
{ :type, line(meta), :product, args }
end
defp variable({name, meta, _}) do
{:var, line(meta), name}
end
defp unpack_typespec_kw({ :type, _, :union, [
next,
{ :type, _, :tuple, [{ :atom, _, atom }, type] }
] }, acc) do
unpack_typespec_kw(next, [{atom, typespec_to_ast(type)}|acc])
end
defp unpack_typespec_kw({ :type, _, :tuple, [{ :atom, _, atom }, type] }, acc) do
{ :ok, [{atom, typespec_to_ast(type)}|acc] }
end
defp unpack_typespec_kw(_, _acc) do
:error
end
end
| 30.662602 | 132 | 0.624464 |
f70b3a0e957a0820a651bb276c3d46ebd34590b8 | 29,797 | exs | Elixir | test/lib/services/github/comments_test.exs | yknx4/opencov | dc961a41e29b41b0657bc2a64bb67350a65477b8 | [
"MIT"
] | 8 | 2021-08-22T10:37:57.000Z | 2022-01-10T11:27:06.000Z | test/lib/services/github/comments_test.exs | yknx4/librecov | dc961a41e29b41b0657bc2a64bb67350a65477b8 | [
"MIT"
] | 109 | 2021-08-20T04:08:04.000Z | 2022-01-03T07:39:18.000Z | test/lib/services/github/comments_test.exs | Librecov/librecov | dc961a41e29b41b0657bc2a64bb67350a65477b8 | [
"MIT"
] | null | null | null | defmodule Librecov.Services.Github.CommentsTests do
use ExUnit.Case
import Tesla.Mock
alias Librecov.Services.Github.Comments
alias Librecov.Build
alias Librecov.Services.Github.AuthData
@github_pulls [
%{
url: "https://api.github.com/repos/octocat/Hello-World/pulls/1347",
id: 1,
node_id: "MDExOlB1bGxSZXF1ZXN0MQ==",
html_url: "https://github.com/octocat/Hello-World/pull/1347",
diff_url: "https://github.com/octocat/Hello-World/pull/1347.diff",
patch_url: "https://github.com/octocat/Hello-World/pull/1347.patch",
issue_url: "https://api.github.com/repos/octocat/Hello-World/issues/1347",
commits_url: "https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits",
review_comments_url: "https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments",
review_comment_url:
"https://api.github.com/repos/octocat/Hello-World/pulls/comments%{/number}",
comments_url: "https://api.github.com/repos/octocat/Hello-World/issues/1347/comments",
statuses_url:
"https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e",
number: 1347,
state: "open",
locked: true,
title: "Amazing new feature",
user: %{
login: "octocat",
id: 1,
node_id: "MDQ6VXNlcjE=",
avatar_url: "https://github.com/images/error/octocat_happy.gif",
gravatar_id: "",
url: "https://api.github.com/users/octocat",
html_url: "https://github.com/octocat",
followers_url: "https://api.github.com/users/octocat/followers",
following_url: "https://api.github.com/users/octocat/following%{/other_user}",
gists_url: "https://api.github.com/users/octocat/gists%{/gist_id}",
starred_url: "https://api.github.com/users/octocat/starred%{/owner}%{/repo}",
subscriptions_url: "https://api.github.com/users/octocat/subscriptions",
organizations_url: "https://api.github.com/users/octocat/orgs",
repos_url: "https://api.github.com/users/octocat/repos",
events_url: "https://api.github.com/users/octocat/events%{/privacy}",
received_events_url: "https://api.github.com/users/octocat/received_events",
type: "User",
site_admin: false
},
body: "Please pull these awesome changes in!",
labels: [
%{
id: 208_045_946,
node_id: "MDU6TGFiZWwyMDgwNDU5NDY=",
url: "https://api.github.com/repos/octocat/Hello-World/labels/bug",
name: "bug",
description: "Something isn't working",
color: "f29513",
default: true
}
],
milestone: %{
url: "https://api.github.com/repos/octocat/Hello-World/milestones/1",
html_url: "https://github.com/octocat/Hello-World/milestones/v1.0",
labels_url: "https://api.github.com/repos/octocat/Hello-World/milestones/1/labels",
id: 1_002_604,
node_id: "MDk6TWlsZXN0b25lMTAwMjYwNA==",
number: 1,
state: "open",
title: "v1.0",
description: "Tracking milestone for version 1.0",
creator: %{
login: "octocat",
id: 1,
node_id: "MDQ6VXNlcjE=",
avatar_url: "https://github.com/images/error/octocat_happy.gif",
gravatar_id: "",
url: "https://api.github.com/users/octocat",
html_url: "https://github.com/octocat",
followers_url: "https://api.github.com/users/octocat/followers",
following_url: "https://api.github.com/users/octocat/following%{/other_user}",
gists_url: "https://api.github.com/users/octocat/gists%{/gist_id}",
starred_url: "https://api.github.com/users/octocat/starred%{/owner}%{/repo}",
subscriptions_url: "https://api.github.com/users/octocat/subscriptions",
organizations_url: "https://api.github.com/users/octocat/orgs",
repos_url: "https://api.github.com/users/octocat/repos",
events_url: "https://api.github.com/users/octocat/events%{/privacy}",
received_events_url: "https://api.github.com/users/octocat/received_events",
type: "User",
site_admin: false
},
open_issues: 4,
closed_issues: 8,
created_at: "2011-04-10T20:09:31Z",
updated_at: "2014-03-03T18:58:10Z",
closed_at: "2013-02-12T13:22:01Z",
due_on: "2012-10-09T23:39:01Z"
},
active_lock_reason: "too heated",
created_at: "2011-01-26T19:01:12Z",
updated_at: "2011-01-26T19:01:12Z",
closed_at: "2011-01-26T19:01:12Z",
merged_at: "2011-01-26T19:01:12Z",
merge_commit_sha: "e5bd3914e2e596debea16f433f57875b5b90bcd6",
assignee: %{
login: "octocat",
id: 1,
node_id: "MDQ6VXNlcjE=",
avatar_url: "https://github.com/images/error/octocat_happy.gif",
gravatar_id: "",
url: "https://api.github.com/users/octocat",
html_url: "https://github.com/octocat",
followers_url: "https://api.github.com/users/octocat/followers",
following_url: "https://api.github.com/users/octocat/following%{/other_user}",
gists_url: "https://api.github.com/users/octocat/gists%{/gist_id}",
starred_url: "https://api.github.com/users/octocat/starred%{/owner}%{/repo}",
subscriptions_url: "https://api.github.com/users/octocat/subscriptions",
organizations_url: "https://api.github.com/users/octocat/orgs",
repos_url: "https://api.github.com/users/octocat/repos",
events_url: "https://api.github.com/users/octocat/events%{/privacy}",
received_events_url: "https://api.github.com/users/octocat/received_events",
type: "User",
site_admin: false
},
assignees: [
%{
login: "octocat",
id: 1,
node_id: "MDQ6VXNlcjE=",
avatar_url: "https://github.com/images/error/octocat_happy.gif",
gravatar_id: "",
url: "https://api.github.com/users/octocat",
html_url: "https://github.com/octocat",
followers_url: "https://api.github.com/users/octocat/followers",
following_url: "https://api.github.com/users/octocat/following%{/other_user}",
gists_url: "https://api.github.com/users/octocat/gists%{/gist_id}",
starred_url: "https://api.github.com/users/octocat/starred%{/owner}%{/repo}",
subscriptions_url: "https://api.github.com/users/octocat/subscriptions",
organizations_url: "https://api.github.com/users/octocat/orgs",
repos_url: "https://api.github.com/users/octocat/repos",
events_url: "https://api.github.com/users/octocat/events%{/privacy}",
received_events_url: "https://api.github.com/users/octocat/received_events",
type: "User",
site_admin: false
},
%{
login: "hubot",
id: 1,
node_id: "MDQ6VXNlcjE=",
avatar_url: "https://github.com/images/error/hubot_happy.gif",
gravatar_id: "",
url: "https://api.github.com/users/hubot",
html_url: "https://github.com/hubot",
followers_url: "https://api.github.com/users/hubot/followers",
following_url: "https://api.github.com/users/hubot/following%{/other_user}",
gists_url: "https://api.github.com/users/hubot/gists%{/gist_id}",
starred_url: "https://api.github.com/users/hubot/starred%{/owner}%{/repo}",
subscriptions_url: "https://api.github.com/users/hubot/subscriptions",
organizations_url: "https://api.github.com/users/hubot/orgs",
repos_url: "https://api.github.com/users/hubot/repos",
events_url: "https://api.github.com/users/hubot/events%{/privacy}",
received_events_url: "https://api.github.com/users/hubot/received_events",
type: "User",
site_admin: true
}
],
requested_reviewers: [
%{
login: "other_user",
id: 1,
node_id: "MDQ6VXNlcjE=",
avatar_url: "https://github.com/images/error/other_user_happy.gif",
gravatar_id: "",
url: "https://api.github.com/users/other_user",
html_url: "https://github.com/other_user",
followers_url: "https://api.github.com/users/other_user/followers",
following_url: "https://api.github.com/users/other_user/following%{/other_user}",
gists_url: "https://api.github.com/users/other_user/gists%{/gist_id}",
starred_url: "https://api.github.com/users/other_user/starred%{/owner}%{/repo}",
subscriptions_url: "https://api.github.com/users/other_user/subscriptions",
organizations_url: "https://api.github.com/users/other_user/orgs",
repos_url: "https://api.github.com/users/other_user/repos",
events_url: "https://api.github.com/users/other_user/events%{/privacy}",
received_events_url: "https://api.github.com/users/other_user/received_events",
type: "User",
site_admin: false
}
],
requested_teams: [
%{
id: 1,
node_id: "MDQ6VGVhbTE=",
url: "https://api.github.com/teams/1",
html_url: "https://github.com/orgs/github/teams/justice-league",
name: "Justice League",
slug: "justice-league",
description: "A great team.",
privacy: "closed",
permission: "admin",
members_url: "https://api.github.com/teams/1/members%{/member}",
repositories_url: "https://api.github.com/teams/1/repos",
parent: nil
}
],
head: %{
label: "octocat:new-topic",
ref: "new-topic",
sha: "6dcb09b5b57875f334f61aebed695e2e4193db5e",
user: %{
login: "octocat",
id: 1,
node_id: "MDQ6VXNlcjE=",
avatar_url: "https://github.com/images/error/octocat_happy.gif",
gravatar_id: "",
url: "https://api.github.com/users/octocat",
html_url: "https://github.com/octocat",
followers_url: "https://api.github.com/users/octocat/followers",
following_url: "https://api.github.com/users/octocat/following%{/other_user}",
gists_url: "https://api.github.com/users/octocat/gists%{/gist_id}",
starred_url: "https://api.github.com/users/octocat/starred%{/owner}%{/repo}",
subscriptions_url: "https://api.github.com/users/octocat/subscriptions",
organizations_url: "https://api.github.com/users/octocat/orgs",
repos_url: "https://api.github.com/users/octocat/repos",
events_url: "https://api.github.com/users/octocat/events%{/privacy}",
received_events_url: "https://api.github.com/users/octocat/received_events",
type: "User",
site_admin: false
},
repo: %{
id: 1_296_269,
node_id: "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
name: "Hello-World",
full_name: "octocat/Hello-World",
owner: %{
login: "octocat",
id: 1,
node_id: "MDQ6VXNlcjE=",
avatar_url: "https://github.com/images/error/octocat_happy.gif",
gravatar_id: "",
url: "https://api.github.com/users/octocat",
html_url: "https://github.com/octocat",
followers_url: "https://api.github.com/users/octocat/followers",
following_url: "https://api.github.com/users/octocat/following%{/other_user}",
gists_url: "https://api.github.com/users/octocat/gists%{/gist_id}",
starred_url: "https://api.github.com/users/octocat/starred%{/owner}%{/repo}",
subscriptions_url: "https://api.github.com/users/octocat/subscriptions",
organizations_url: "https://api.github.com/users/octocat/orgs",
repos_url: "https://api.github.com/users/octocat/repos",
events_url: "https://api.github.com/users/octocat/events%{/privacy}",
received_events_url: "https://api.github.com/users/octocat/received_events",
type: "User",
site_admin: false
},
private: false,
html_url: "https://github.com/octocat/Hello-World",
description: "This your first repo!",
fork: false,
url: "https://api.github.com/repos/octocat/Hello-World",
archive_url:
"https://api.github.com/repos/octocat/Hello-World/%{archive_format}%{/ref}",
assignees_url: "https://api.github.com/repos/octocat/Hello-World/assignees%{/user}",
blobs_url: "https://api.github.com/repos/octocat/Hello-World/git/blobs%{/sha}",
branches_url: "https://api.github.com/repos/octocat/Hello-World/branches%{/branch}",
collaborators_url:
"https://api.github.com/repos/octocat/Hello-World/collaborators%{/collaborator}",
comments_url: "https://api.github.com/repos/octocat/Hello-World/comments%{/number}",
commits_url: "https://api.github.com/repos/octocat/Hello-World/commits%{/sha}",
compare_url:
"https://api.github.com/repos/octocat/Hello-World/compare/%{base}...%{head}",
contents_url: "https://api.github.com/repos/octocat/Hello-World/contents/%{+path}",
contributors_url: "https://api.github.com/repos/octocat/Hello-World/contributors",
deployments_url: "https://api.github.com/repos/octocat/Hello-World/deployments",
downloads_url: "https://api.github.com/repos/octocat/Hello-World/downloads",
events_url: "https://api.github.com/repos/octocat/Hello-World/events",
forks_url: "https://api.github.com/repos/octocat/Hello-World/forks",
git_commits_url: "https://api.github.com/repos/octocat/Hello-World/git/commits%{/sha}",
git_refs_url: "https://api.github.com/repos/octocat/Hello-World/git/refs%{/sha}",
git_tags_url: "https://api.github.com/repos/octocat/Hello-World/git/tags%{/sha}",
git_url: "git:github.com/octocat/Hello-World.git",
issue_comment_url:
"https://api.github.com/repos/octocat/Hello-World/issues/comments%{/number}",
issue_events_url:
"https://api.github.com/repos/octocat/Hello-World/issues/events%{/number}",
issues_url: "https://api.github.com/repos/octocat/Hello-World/issues%{/number}",
keys_url: "https://api.github.com/repos/octocat/Hello-World/keys%{/key_id}",
labels_url: "https://api.github.com/repos/octocat/Hello-World/labels%{/name}",
languages_url: "https://api.github.com/repos/octocat/Hello-World/languages",
merges_url: "https://api.github.com/repos/octocat/Hello-World/merges",
milestones_url: "https://api.github.com/repos/octocat/Hello-World/milestones%{/number}",
notifications_url:
"https://api.github.com/repos/octocat/Hello-World/notifications%{?since,all,participating}",
pulls_url: "https://api.github.com/repos/octocat/Hello-World/pulls%{/number}",
releases_url: "https://api.github.com/repos/octocat/Hello-World/releases%{/id}",
ssh_url: "git@github.com:octocat/Hello-World.git",
stargazers_url: "https://api.github.com/repos/octocat/Hello-World/stargazers",
statuses_url: "https://api.github.com/repos/octocat/Hello-World/statuses/%{sha}",
subscribers_url: "https://api.github.com/repos/octocat/Hello-World/subscribers",
subscription_url: "https://api.github.com/repos/octocat/Hello-World/subscription",
tags_url: "https://api.github.com/repos/octocat/Hello-World/tags",
teams_url: "https://api.github.com/repos/octocat/Hello-World/teams",
trees_url: "https://api.github.com/repos/octocat/Hello-World/git/trees%{/sha}",
clone_url: "https://github.com/octocat/Hello-World.git",
mirror_url: "git:git.example.com/octocat/Hello-World",
hooks_url: "https://api.github.com/repos/octocat/Hello-World/hooks",
svn_url: "https://svn.github.com/octocat/Hello-World",
homepage: "https://github.com",
language: nil,
forks_count: 9,
stargazers_count: 80,
watchers_count: 80,
size: 108,
default_branch: "master",
open_issues_count: 0,
is_template: true,
topics: [
"octocat",
"atom",
"electron",
"api"
],
has_issues: true,
has_projects: true,
has_wiki: true,
has_pages: false,
has_downloads: true,
archived: false,
disabled: false,
visibility: "public",
pushed_at: "2011-01-26T19:06:43Z",
created_at: "2011-01-26T19:01:12Z",
updated_at: "2011-01-26T19:14:43Z",
permissions: %{
admin: false,
push: false,
pull: true
},
allow_rebase_merge: true,
template_repository: nil,
temp_clone_token: "ABTLWHOULUVAXGTRYU7OC2876QJ2O",
allow_squash_merge: true,
allow_auto_merge: false,
delete_branch_on_merge: true,
allow_merge_commit: true,
subscribers_count: 42,
network_count: 0,
license: %{
key: "mit",
name: "MIT License",
url: "https://api.github.com/licenses/mit",
spdx_id: "MIT",
node_id: "MDc6TGljZW5zZW1pdA==",
html_url: "https://github.com/licenses/mit"
},
forks: 1,
open_issues: 1,
watchers: 1
}
},
base: %{
label: "octocat:master",
ref: "master",
sha: "6dcb09b5b57875f334f61aebed695e2e4193db5e",
user: %{
login: "octocat",
id: 1,
node_id: "MDQ6VXNlcjE=",
avatar_url: "https://github.com/images/error/octocat_happy.gif",
gravatar_id: "",
url: "https://api.github.com/users/octocat",
html_url: "https://github.com/octocat",
followers_url: "https://api.github.com/users/octocat/followers",
following_url: "https://api.github.com/users/octocat/following%{/other_user}",
gists_url: "https://api.github.com/users/octocat/gists%{/gist_id}",
starred_url: "https://api.github.com/users/octocat/starred%{/owner}%{/repo}",
subscriptions_url: "https://api.github.com/users/octocat/subscriptions",
organizations_url: "https://api.github.com/users/octocat/orgs",
repos_url: "https://api.github.com/users/octocat/repos",
events_url: "https://api.github.com/users/octocat/events%{/privacy}",
received_events_url: "https://api.github.com/users/octocat/received_events",
type: "User",
site_admin: false
},
repo: %{
id: 1_296_269,
node_id: "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
name: "Hello-World",
full_name: "octocat/Hello-World",
owner: %{
login: "octocat",
id: 1,
node_id: "MDQ6VXNlcjE=",
avatar_url: "https://github.com/images/error/octocat_happy.gif",
gravatar_id: "",
url: "https://api.github.com/users/octocat",
html_url: "https://github.com/octocat",
followers_url: "https://api.github.com/users/octocat/followers",
following_url: "https://api.github.com/users/octocat/following%{/other_user}",
gists_url: "https://api.github.com/users/octocat/gists%{/gist_id}",
starred_url: "https://api.github.com/users/octocat/starred%{/owner}%{/repo}",
subscriptions_url: "https://api.github.com/users/octocat/subscriptions",
organizations_url: "https://api.github.com/users/octocat/orgs",
repos_url: "https://api.github.com/users/octocat/repos",
events_url: "https://api.github.com/users/octocat/events%{/privacy}",
received_events_url: "https://api.github.com/users/octocat/received_events",
type: "User",
site_admin: false
},
private: false,
html_url: "https://github.com/octocat/Hello-World",
description: "This your first repo!",
fork: false,
url: "https://api.github.com/repos/octocat/Hello-World",
archive_url:
"https://api.github.com/repos/octocat/Hello-World/%{archive_format}%{/ref}",
assignees_url: "https://api.github.com/repos/octocat/Hello-World/assignees%{/user}",
blobs_url: "https://api.github.com/repos/octocat/Hello-World/git/blobs%{/sha}",
branches_url: "https://api.github.com/repos/octocat/Hello-World/branches%{/branch}",
collaborators_url:
"https://api.github.com/repos/octocat/Hello-World/collaborators%{/collaborator}",
comments_url: "https://api.github.com/repos/octocat/Hello-World/comments%{/number}",
commits_url: "https://api.github.com/repos/octocat/Hello-World/commits%{/sha}",
compare_url:
"https://api.github.com/repos/octocat/Hello-World/compare/%{base}...%{head}",
contents_url: "https://api.github.com/repos/octocat/Hello-World/contents/%{+path}",
contributors_url: "https://api.github.com/repos/octocat/Hello-World/contributors",
deployments_url: "https://api.github.com/repos/octocat/Hello-World/deployments",
downloads_url: "https://api.github.com/repos/octocat/Hello-World/downloads",
events_url: "https://api.github.com/repos/octocat/Hello-World/events",
forks_url: "https://api.github.com/repos/octocat/Hello-World/forks",
git_commits_url: "https://api.github.com/repos/octocat/Hello-World/git/commits%{/sha}",
git_refs_url: "https://api.github.com/repos/octocat/Hello-World/git/refs%{/sha}",
git_tags_url: "https://api.github.com/repos/octocat/Hello-World/git/tags%{/sha}",
git_url: "git:github.com/octocat/Hello-World.git",
issue_comment_url:
"https://api.github.com/repos/octocat/Hello-World/issues/comments%{/number}",
issue_events_url:
"https://api.github.com/repos/octocat/Hello-World/issues/events%{/number}",
issues_url: "https://api.github.com/repos/octocat/Hello-World/issues%{/number}",
keys_url: "https://api.github.com/repos/octocat/Hello-World/keys%{/key_id}",
labels_url: "https://api.github.com/repos/octocat/Hello-World/labels%{/name}",
languages_url: "https://api.github.com/repos/octocat/Hello-World/languages",
merges_url: "https://api.github.com/repos/octocat/Hello-World/merges",
milestones_url: "https://api.github.com/repos/octocat/Hello-World/milestones%{/number}",
notifications_url:
"https://api.github.com/repos/octocat/Hello-World/notifications%{?since,all,participating}",
pulls_url: "https://api.github.com/repos/octocat/Hello-World/pulls%{/number}",
releases_url: "https://api.github.com/repos/octocat/Hello-World/releases%{/id}",
ssh_url: "git@github.com:octocat/Hello-World.git",
stargazers_url: "https://api.github.com/repos/octocat/Hello-World/stargazers",
statuses_url: "https://api.github.com/repos/octocat/Hello-World/statuses/%{sha}",
subscribers_url: "https://api.github.com/repos/octocat/Hello-World/subscribers",
subscription_url: "https://api.github.com/repos/octocat/Hello-World/subscription",
tags_url: "https://api.github.com/repos/octocat/Hello-World/tags",
teams_url: "https://api.github.com/repos/octocat/Hello-World/teams",
trees_url: "https://api.github.com/repos/octocat/Hello-World/git/trees%{/sha}",
clone_url: "https://github.com/octocat/Hello-World.git",
mirror_url: "git:git.example.com/octocat/Hello-World",
hooks_url: "https://api.github.com/repos/octocat/Hello-World/hooks",
svn_url: "https://svn.github.com/octocat/Hello-World",
homepage: "https://github.com",
language: nil,
forks_count: 9,
stargazers_count: 80,
watchers_count: 80,
size: 108,
default_branch: "master",
open_issues_count: 0,
is_template: true,
topics: [
"octocat",
"atom",
"electron",
"api"
],
has_issues: true,
has_projects: true,
has_wiki: true,
has_pages: false,
has_downloads: true,
archived: false,
disabled: false,
visibility: "public",
pushed_at: "2011-01-26T19:06:43Z",
created_at: "2011-01-26T19:01:12Z",
updated_at: "2011-01-26T19:14:43Z",
permissions: %{
admin: false,
push: false,
pull: true
},
allow_rebase_merge: true,
template_repository: nil,
temp_clone_token: "ABTLWHOULUVAXGTRYU7OC2876QJ2O",
allow_squash_merge: true,
allow_auto_merge: false,
delete_branch_on_merge: true,
allow_merge_commit: true,
subscribers_count: 42,
network_count: 0,
license: %{
key: "mit",
name: "MIT License",
url: "https://api.github.com/licenses/mit",
spdx_id: "MIT",
node_id: "MDc6TGljZW5zZW1pdA==",
html_url: "https://github.com/licenses/mit"
},
forks: 1,
open_issues: 1,
watchers: 1
}
},
_links: %{
self: %{
href: "https://api.github.com/repos/octocat/Hello-World/pulls/1347"
},
html: %{
href: "https://github.com/octocat/Hello-World/pull/1347"
},
issue: %{
href: "https://api.github.com/repos/octocat/Hello-World/issues/1347"
},
comments: %{
href: "https://api.github.com/repos/octocat/Hello-World/issues/1347/comments"
},
review_comments: %{
href: "https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments"
},
review_comment: %{
href: "https://api.github.com/repos/octocat/Hello-World/pulls/comments%{/number}"
},
commits: %{
href: "https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits"
},
statuses: %{
href:
"https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e"
}
},
author_association: "OWNER",
auto_merge: nil,
draft: false
}
]
@comment_response %{
id: 1,
node_id: "MDEyOklzc3VlQ29tbWVudDE=",
url: "https://api.github.com/repos/octocat/Hello-World/issues/comments/1",
html_url: "https://github.com/octocat/Hello-World/issues/1347#issuecomment-1",
body: "Me too",
user: %{
login: "octocat",
id: 1,
node_id: "MDQ6VXNlcjE=",
avatar_url: "https://github.com/images/error/octocat_happy.gif",
gravatar_id: "",
url: "https://api.github.com/users/octocat",
html_url: "https://github.com/octocat",
followers_url: "https://api.github.com/users/octocat/followers",
following_url: "https://api.github.com/users/octocat/following%{/other_user}",
gists_url: "https://api.github.com/users/octocat/gists%{/gist_id}",
starred_url: "https://api.github.com/users/octocat/starred%{/owner}%{/repo}",
subscriptions_url: "https://api.github.com/users/octocat/subscriptions",
organizations_url: "https://api.github.com/users/octocat/orgs",
repos_url: "https://api.github.com/users/octocat/repos",
events_url: "https://api.github.com/users/octocat/events%{/privacy}",
received_events_url: "https://api.github.com/users/octocat/received_events",
type: "User",
site_admin: false
},
created_at: "2011-04-14T16:00:49Z",
updated_at: "2011-04-14T16:00:49Z",
issue_url: "https://api.github.com/repos/octocat/Hello-World/issues/1347",
author_association: "COLLABORATOR"
}
setup do
mock(fn
%{
method: :get,
url: "https://api.github.com/repos/github/hello-world/pulls",
query: [state: "open", sort: "updated", head: "github:new-commit"]
} ->
json(@github_pulls, status: 200)
%{
method: :get,
url: "https://api.github.com/repos/github/hello-world/pulls",
query: [state: "open", sort: "updated", head: "github:invalid-branch"]
} ->
json([], status: 200)
%{
method: :get,
url: "https://api.github.com/repos/octocat/Hello-World/issues/1347/comments",
query: [per_page: 100]
} ->
json([], status: 200)
%{
method: :post,
url: "https://api.github.com/repos/octocat/Hello-World/issues/1347/comments"
} ->
json(@comment_response, status: 201)
end)
:ok
end
@base_auth %AuthData{
owner: "github",
repo: "hello-world",
token: "qwerqwer"
}
setup do
Application.put_env(:tesla, :adapter, Tesla.Mock)
end
test "it adds a pr comment when pr is found" do
{:ok, [comment]} =
Comments.add_pr_comment(
"Me too",
@base_auth,
%Build{branch: "new-commit"}
)
assert comment.body == "Me too"
assert Map.has_key?(comment, :id)
end
test "it adds nothing when pr is not found" do
{:error, :pr_not_found} =
Comments.add_pr_comment(
"Me too",
@base_auth,
%Build{branch: "invalid-branch"}
)
end
end
| 46.485179 | 112 | 0.610666 |
f70b451de2dab4900aad80b0d76a4e83a42529a6 | 98 | exs | Elixir | test/test_helper.exs | blueshift-labs/redis_cluster_multilock | 927e48e129b16a8590eb65d70446f159b0804848 | [
"MIT"
] | null | null | null | test/test_helper.exs | blueshift-labs/redis_cluster_multilock | 927e48e129b16a8590eb65d70446f159b0804848 | [
"MIT"
] | null | null | null | test/test_helper.exs | blueshift-labs/redis_cluster_multilock | 927e48e129b16a8590eb65d70446f159b0804848 | [
"MIT"
] | null | null | null | ExUnit.start()
Mox.defmock(RedisClusterMock, for: RedisClusterMultilock.Behaviours.RedisCluster)
| 24.5 | 81 | 0.846939 |
f70b46aa848ebe2e3bfd5863881251f3301a297d | 1,202 | exs | Elixir | config/config.exs | marciorasf/next-level-week-5-Elixir | 5af63fcf04202111db22209f61928772dbb167ad | [
"MIT"
] | null | null | null | config/config.exs | marciorasf/next-level-week-5-Elixir | 5af63fcf04202111db22209f61928772dbb167ad | [
"MIT"
] | null | null | null | config/config.exs | marciorasf/next-level-week-5-Elixir | 5af63fcf04202111db22209f61928772dbb167ad | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
config :inmana,
ecto_repos: [Inmana.Repo]
config :inmana, Inmana.Repo,
migration_primary_key: [type: :binary_id],
migration_foreign_key: [type: :binary_id]
config :inmana, Inmana.Mailer, adapter: Bamboo.LocalAdapter
# Configures the endpoint
config :inmana, InmanaWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "lYsGPuofDsq6ZIZBimCewh/GTM1J67CkcgUfYTRbLRYCXWHgFuupXUJfzEf1MD5o",
render_errors: [view: InmanaWeb.ErrorView, accepts: ~w(json), layout: false],
pubsub_server: Inmana.PubSub,
live_view: [signing_salt: "z2FEDEu7"]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 31.631579 | 86 | 0.769551 |
f70b4c30827fd25aa8d011f024382f3eaf0f6ac1 | 8,972 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/snapshot.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/snapshot.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/snapshot.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.Snapshot do
@moduledoc """
Represents a Persistent Disk Snapshot resource.
You can use snapshots to back up data on a regular interval. For more information, read Creating persistent disk snapshots. (== resource_for {$api_version}.snapshots ==)
## Attributes
* `autoCreated` (*type:* `boolean()`, *default:* `nil`) - [Output Only] Set to true if snapshots are automatically created by applying resource policy on the target disk.
* `chainName` (*type:* `String.t`, *default:* `nil`) - Creates the new snapshot in the snapshot chain labeled with the specified name. The chain name must be 1-63 characters long and comply with RFC1035. This is an uncommon option only for advanced service owners who needs to create separate snapshot chains, for example, for chargeback tracking. When you describe your snapshot resource, this field is visible only if it has a non-empty value.
* `creationTimestamp` (*type:* `String.t`, *default:* `nil`) - [Output Only] Creation timestamp in RFC3339 text format.
* `description` (*type:* `String.t`, *default:* `nil`) - An optional description of this resource. Provide this property when you create the resource.
* `diskSizeGb` (*type:* `String.t`, *default:* `nil`) - [Output Only] Size of the source disk, specified in GB.
* `downloadBytes` (*type:* `String.t`, *default:* `nil`) - [Output Only] Number of bytes downloaded to restore a snapshot to a disk.
* `id` (*type:* `String.t`, *default:* `nil`) - [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* `kind` (*type:* `String.t`, *default:* `compute#snapshot`) - [Output Only] Type of the resource. Always compute#snapshot for Snapshot resources.
* `labelFingerprint` (*type:* `String.t`, *default:* `nil`) - A fingerprint for the labels being applied to this snapshot, which is essentially a hash of the labels set used for optimistic locking. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels, otherwise the request will fail with error 412 conditionNotMet.
To see the latest fingerprint, make a get() request to retrieve a snapshot.
* `labels` (*type:* `map()`, *default:* `nil`) - Labels to apply to this snapshot. These can be later modified by the setLabels method. Label values may be empty.
* `licenseCodes` (*type:* `list(String.t)`, *default:* `nil`) - [Output Only] Integer license codes indicating which licenses are attached to this snapshot.
* `licenses` (*type:* `list(String.t)`, *default:* `nil`) - [Output Only] A list of public visible licenses that apply to this snapshot. This can be because the original image had licenses attached (such as a Windows image).
* `locationHint` (*type:* `String.t`, *default:* `nil`) - An opaque location hint used to place the snapshot close to other resources. This field is for use by internal tools that use the public API.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
* `satisfiesPzs` (*type:* `boolean()`, *default:* `nil`) - [Output Only] Reserved for future use.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - [Output Only] Server-defined URL for the resource.
* `snapshotEncryptionKey` (*type:* `GoogleApi.Compute.V1.Model.CustomerEncryptionKey.t`, *default:* `nil`) - Encrypts the snapshot using a customer-supplied encryption key.
After you encrypt a snapshot using a customer-supplied key, you must provide the same key if you use the snapshot later. For example, you must provide the encryption key when you create a disk from the encrypted snapshot in a future request.
Customer-supplied encryption keys do not protect access to metadata of the snapshot.
If you do not provide an encryption key when creating the snapshot, then the snapshot will be encrypted using an automatically generated key and you do not need to provide a key to use the snapshot later.
* `sourceDisk` (*type:* `String.t`, *default:* `nil`) - The source disk used to create this snapshot.
* `sourceDiskEncryptionKey` (*type:* `GoogleApi.Compute.V1.Model.CustomerEncryptionKey.t`, *default:* `nil`) - The customer-supplied encryption key of the source disk. Required if the source disk is protected by a customer-supplied encryption key.
* `sourceDiskId` (*type:* `String.t`, *default:* `nil`) - [Output Only] The ID value of the disk used to create this snapshot. This value may be used to determine whether the snapshot was taken from the current or a previous instance of a given disk name.
* `status` (*type:* `String.t`, *default:* `nil`) - [Output Only] The status of the snapshot. This can be CREATING, DELETING, FAILED, READY, or UPLOADING.
* `storageBytes` (*type:* `String.t`, *default:* `nil`) - [Output Only] A size of the storage used by the snapshot. As snapshots share storage, this number is expected to change with snapshot creation/deletion.
* `storageBytesStatus` (*type:* `String.t`, *default:* `nil`) - [Output Only] An indicator whether storageBytes is in a stable state or it is being adjusted as a result of shared storage reallocation. This status can either be UPDATING, meaning the size of the snapshot is being updated, or UP_TO_DATE, meaning the size of the snapshot is up-to-date.
* `storageLocations` (*type:* `list(String.t)`, *default:* `nil`) - Cloud Storage bucket storage location of the snapshot (regional or multi-regional).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:autoCreated => boolean() | nil,
:chainName => String.t() | nil,
:creationTimestamp => String.t() | nil,
:description => String.t() | nil,
:diskSizeGb => String.t() | nil,
:downloadBytes => String.t() | nil,
:id => String.t() | nil,
:kind => String.t() | nil,
:labelFingerprint => String.t() | nil,
:labels => map() | nil,
:licenseCodes => list(String.t()) | nil,
:licenses => list(String.t()) | nil,
:locationHint => String.t() | nil,
:name => String.t() | nil,
:satisfiesPzs => boolean() | nil,
:selfLink => String.t() | nil,
:snapshotEncryptionKey => GoogleApi.Compute.V1.Model.CustomerEncryptionKey.t() | nil,
:sourceDisk => String.t() | nil,
:sourceDiskEncryptionKey => GoogleApi.Compute.V1.Model.CustomerEncryptionKey.t() | nil,
:sourceDiskId => String.t() | nil,
:status => String.t() | nil,
:storageBytes => String.t() | nil,
:storageBytesStatus => String.t() | nil,
:storageLocations => list(String.t()) | nil
}
field(:autoCreated)
field(:chainName)
field(:creationTimestamp)
field(:description)
field(:diskSizeGb)
field(:downloadBytes)
field(:id)
field(:kind)
field(:labelFingerprint)
field(:labels, type: :map)
field(:licenseCodes, type: :list)
field(:licenses, type: :list)
field(:locationHint)
field(:name)
field(:satisfiesPzs)
field(:selfLink)
field(:snapshotEncryptionKey, as: GoogleApi.Compute.V1.Model.CustomerEncryptionKey)
field(:sourceDisk)
field(:sourceDiskEncryptionKey, as: GoogleApi.Compute.V1.Model.CustomerEncryptionKey)
field(:sourceDiskId)
field(:status)
field(:storageBytes)
field(:storageBytesStatus)
field(:storageLocations, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.Snapshot do
def decode(value, options) do
GoogleApi.Compute.V1.Model.Snapshot.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.Snapshot do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 71.206349 | 490 | 0.70564 |
f70b7531cd0ca77b735bedd35a1451586c4731bd | 3,878 | exs | Elixir | lib/elixir/test/elixir/version_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/version_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/version_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule VersionTest do
use ExUnit.Case, async: true
alias Version.Parser, as: P
alias Version, as: V
test "lexes specifications properly" do
assert P.lexer("== != > >= < <= ~>", []) == [:'==', :'!=', :'>', :'>=', :'<', :'<=', :'~>']
assert P.lexer("2.3", []) == [:'==', "2.3"]
assert P.lexer("!2.3", []) == [:'!=', "2.3"]
assert P.lexer(">>=", []) == [:'>', :'>=']
assert P.lexer(">2.4", []) == [:'>', "2.4"]
assert P.lexer(" > 2.4", []) == [:'>', "2.4"]
end
test "lexer gets verified properly" do
assert P.valid_requirement?(P.lexer("2.3", []))
refute P.valid_requirement?(P.lexer("> >= 2.3", []))
refute P.valid_requirement?(P.lexer("> 2.3 and", []))
refute P.valid_requirement?(P.lexer("> 2.3 or and 4.3", []))
assert P.valid_requirement?(P.lexer("> 2.4 and 4.5", []))
refute P.valid_requirement?(P.lexer("& 1.0.0", []))
end
test :parse do
assert V.Schema[major: 1, minor: 0, patch: 0] = V.parse("1")
assert V.Schema[major: 1, minor: 2, patch: 0] = V.parse("1.2")
assert V.Schema[major: 1, minor: 2, patch: 3] = V.parse("1.2.3")
assert V.Schema[major: 1, minor: 4, patch: 0, pre: "5-g3318bd5"] = V.parse("1.4-5-g3318bd5")
end
test :== do
assert V.match?("2.3", "2.3")
refute V.match?("2.4", "2.3")
assert V.match?("2.3", "== 2.3")
refute V.match?("2.4", "== 2.3")
assert V.match?("1.0.0", "1.0.0")
assert V.match?("1.0.0", "1.0")
assert V.match?("1.2.3-alpha", "1.2.3-alpha")
assert V.match?("iliketrains", "iliketrains")
assert V.match?("1.2.3.4", "1.2.3.4")
assert V.match?("0.9.3", "== 0.9.3+dev")
end
test :!= do
assert V.match?("2.4", "!2.3")
refute V.match?("2.3", "!2.3")
assert V.match?("2.4", "!= 2.3")
refute V.match?("2.3", "!= 2.3")
end
test :> do
assert V.match?("2.4", "> 2.3")
refute V.match?("2.2", "> 2.3")
refute V.match?("2.3", "> 2.3")
assert V.match?("1.2.3", "> 1.2.3-alpha")
assert V.match?("1.2.3-alpha.1", "> 1.2.3-alpha")
assert V.match?("1.2.3-alpha.beta.sigma", "> 1.2.3-alpha.beta")
refute V.match?("1.2.3-alpha.10", "< 1.2.3-alpha.1")
refute V.match?("0.10.2-dev", "> 0.10.2")
end
test :>= do
assert V.match?("2.4", ">= 2.3")
refute V.match?("2.2", ">= 2.3")
assert V.match?("2.3", ">= 2.3")
assert V.match?("2.0", ">= 1.0")
assert V.match?("1.0.0", ">= 1.0")
end
test :< do
assert V.match?("2.2", "< 2.3")
refute V.match?("2.4", "< 2.3")
refute V.match?("2.3", "< 2.3")
assert V.match?("0.10.2-dev", "< 0.10.2")
end
test :<= do
assert V.match?("2.2", "<= 2.3")
refute V.match?("2.4", "<= 2.3")
assert V.match?("2.3", "<= 2.3")
end
test :'~>' do
assert V.match?("3.0", "~> 3.0")
assert V.match?("3.2", "~> 3.0")
refute V.match?("4.0", "~> 3.0")
refute V.match?("4.4", "~> 3.0")
assert V.match?("3.0.2", "~> 3.0.0")
assert V.match?("3.0.0", "~> 3.0.0")
refute V.match?("3.1", "~> 3.0.0")
refute V.match?("3.4", "~> 3.0.0")
assert V.match?("3.6", "~> 3.5")
assert V.match?("3.5", "~> 3.5")
refute V.match?("4.0", "~> 3.5")
refute V.match?("5.0", "~> 3.5")
assert V.match?("3.5.2", "~> 3.5.0")
assert V.match?("3.5.4", "~> 3.5.0")
refute V.match?("3.6", "~> 3.5.0")
refute V.match?("3.6.3", "~> 3.5.0")
assert V.match?("0.9.3", "~> 0.9.3-dev")
refute V.match?("0.10.0", "~> 0.9.3-dev")
refute V.match?("0.3.0-dev", "~> 0.2.0")
end
test :and do
assert V.match?("0.9.3", "> 0.9 and < 0.10")
refute V.match?("0.10.2", "> 0.9 and < 0.10")
end
test :or do
assert V.match?("0.9.1", "0.9.1 or 0.9.3 or 0.9.5")
assert V.match?("0.9.3", "0.9.1 or 0.9.3 or 0.9.5")
assert V.match?("0.9.5", "0.9.1 or 0.9.3 or 0.9.5")
end
end
| 29.378788 | 96 | 0.486849 |
f70b79743d13aa3f360c93bf020ba4529c78ac6a | 5,494 | exs | Elixir | lib/mix/test/mix/tasks/new_test.exs | Gazler/elixir | e934e3c92edbc9c83da7795ec3a028ff86218c4b | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/new_test.exs | Gazler/elixir | e934e3c92edbc9c83da7795ec3a028ff86218c4b | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/new_test.exs | Gazler/elixir | e934e3c92edbc9c83da7795ec3a028ff86218c4b | [
"Apache-2.0"
] | null | null | null | Code.require_file "../../test_helper.exs", __DIR__
defmodule Mix.Tasks.NewTest do
use MixTest.Case
test "new" do
in_tmp "new", fn ->
Mix.Tasks.New.run ["hello_world", "--bare"]
assert_file "hello_world/mix.exs", fn(file) ->
assert file =~ "app: :hello_world"
assert file =~ "version: \"0.0.1\""
end
assert_file "hello_world/README.md", ~r/HelloWorld\n==========\n\n/
assert_file "hello_world/.gitignore"
assert_file "hello_world/lib/hello_world.ex", ~r/defmodule HelloWorld do/
assert_file "hello_world/test/test_helper.exs", ~r/ExUnit.start()/
assert_file "hello_world/test/hello_world_test.exs", ~r/defmodule HelloWorldTest do/
assert_received {:mix_shell, :info, ["* creating mix.exs"]}
assert_received {:mix_shell, :info, ["* creating lib/hello_world.ex"]}
end
end
test "new with --sup" do
in_tmp "new sup", fn ->
Mix.Tasks.New.run ["hello_world", "--sup"]
assert_file "hello_world/mix.exs", fn(file) ->
assert file =~ "app: :hello_world"
assert file =~ "version: \"0.0.1\""
assert file =~ "mod: {HelloWorld, []}"
end
assert_file "hello_world/README.md", ~r/HelloWorld\n==========\n\n/
assert_file "hello_world/.gitignore"
assert_file "hello_world/lib/hello_world.ex", fn(file) ->
assert file =~ "defmodule HelloWorld do"
assert file =~ "use Application"
assert file =~ "Supervisor.start_link(children, opts)"
end
assert_file "hello_world/test/test_helper.exs", ~r/ExUnit.start()/
assert_file "hello_world/test/hello_world_test.exs", ~r/defmodule HelloWorldTest do/
assert_received {:mix_shell, :info, ["* creating mix.exs"]}
assert_received {:mix_shell, :info, ["* creating lib/hello_world.ex"]}
end
end
test "new with --app" do
in_tmp "new app", fn ->
Mix.Tasks.New.run ["HELLO_WORLD", "--app", "hello_world"]
assert_file "HELLO_WORLD/mix.exs", fn(file) ->
assert file =~ "app: :hello_world"
assert file =~ "version: \"0.0.1\""
end
assert_file "HELLO_WORLD/README.md", ~r/HelloWorld\n==========\n\n/
assert_file "HELLO_WORLD/.gitignore"
assert_file "HELLO_WORLD/lib/hello_world.ex", ~r/defmodule HelloWorld do/
assert_file "HELLO_WORLD/test/test_helper.exs", ~r/ExUnit.start()/
assert_file "HELLO_WORLD/test/hello_world_test.exs", ~r/defmodule HelloWorldTest do/
assert_received {:mix_shell, :info, ["* creating mix.exs"]}
assert_received {:mix_shell, :info, ["* creating lib/hello_world.ex"]}
end
end
test "new with --umbrella" do
in_tmp "new umbrella", fn ->
Mix.Tasks.New.run ["hello_world", "--umbrella"]
assert_file "hello_world/mix.exs", fn(file) ->
assert file =~ "apps_path: \"apps\""
end
assert_file "hello_world/README.md", ~r/HelloWorld\n==========\n\n/
assert_file "hello_world/.gitignore"
assert_received {:mix_shell, :info, ["* creating mix.exs"]}
end
end
test "new inside umbrella" do
in_fixture "umbrella_dep/deps/umbrella", fn ->
File.cd! "apps", fn ->
Mix.Tasks.New.run ["hello_world"]
assert_file "hello_world/mix.exs", fn(file) ->
assert file =~ "deps_path: \"../../deps\""
assert file =~ "lockfile: \"../../mix.lock\""
end
end
end
end
test "new with dot" do
in_tmp "new_with_dot", fn ->
Mix.Tasks.New.run ["."]
assert_file "lib/new_with_dot.ex", ~r/defmodule NewWithDot do/
end
end
test "new with invalid args" do
in_tmp "new with an invalid application name", fn ->
assert_raise Mix.Error, ~r"Application name must start with a letter and ", fn ->
Mix.Tasks.New.run ["007invalid"]
end
end
in_tmp "new with an invalid application name from the app option", fn ->
assert_raise Mix.Error, ~r"Application name must start with a letter and ", fn ->
Mix.Tasks.New.run ["valid", "--app", "007invalid"]
end
end
in_tmp "new with an invalid module name from the module options", fn ->
assert_raise Mix.Error, ~r"Module name must be a valid Elixir alias", fn ->
Mix.Tasks.New.run ["valid", "--module", "not.valid"]
end
end
in_tmp "new with an already taken application name", fn ->
assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn ->
Mix.Tasks.New.run ["mix"]
end
end
in_tmp "new with an already taken application name from the app option", fn ->
assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn ->
Mix.Tasks.New.run ["valid", "--app", "mix"]
end
end
in_tmp "new with an already taken module name from the module options", fn ->
assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn ->
Mix.Tasks.New.run ["valid", "--module", "Mix"]
end
end
in_tmp "new without a specified path", fn ->
assert_raise Mix.Error, "Expected PATH to be given, please use `mix new PATH`", fn ->
Mix.Tasks.New.run []
end
end
end
defp assert_file(file) do
assert File.regular?(file), "Expected #{file} to exist, but does not"
end
defp assert_file(file, match) do
cond do
Regex.regex?(match) ->
assert_file file, &(assert &1 =~ match)
is_function(match, 1) ->
assert_file(file)
match.(File.read!(file))
end
end
end
| 32.317647 | 91 | 0.619585 |
f70bc6cca800493cac1a342c50b1749f04b44883 | 1,115 | ex | Elixir | test/support/channel_case.ex | kadmohardy/mbanking | 42a45f78956eac95597e72ade10cb0291ed5aa76 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | kadmohardy/mbanking | 42a45f78956eac95597e72ade10cb0291ed5aa76 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | kadmohardy/mbanking | 42a45f78956eac95597e72ade10cb0291ed5aa76 | [
"MIT"
] | null | null | null | defmodule MbankingWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use MbankingWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import MbankingWeb.ChannelCase
# The default endpoint for testing
@endpoint MbankingWeb.Endpoint
end
end
setup tags do
:ok = Sandbox.checkout(Mbanking.Repo)
unless tags[:async] do
Sandbox.mode(Mbanking.Repo, {:shared, self()})
end
:ok
end
end
| 26.547619 | 65 | 0.727354 |
f70bd0d9b0718e3cb653929d8e70c32bd920eb6d | 317 | ex | Elixir | lib/hl7/2.5/segments/css.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.5/segments/css.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.5/segments/css.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_5.Segments.CSS do
@moduledoc false
require Logger
alias HL7.V2_5.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
study_scheduled_time_point: DataTypes.Ce,
study_scheduled_patient_time_point: DataTypes.Ts,
study_quality_control_codes: DataTypes.Ce
]
end
| 21.133333 | 55 | 0.722397 |
f70be8d60f2ff86e250040f49459cd12667ee405 | 7,228 | exs | Elixir | test/rolodex/route_test.exs | hauleth/rolodex | 405749d2e845a2c4259b12ebc266680039aa1cef | [
"MIT"
] | null | null | null | test/rolodex/route_test.exs | hauleth/rolodex | 405749d2e845a2c4259b12ebc266680039aa1cef | [
"MIT"
] | null | null | null | test/rolodex/route_test.exs | hauleth/rolodex | 405749d2e845a2c4259b12ebc266680039aa1cef | [
"MIT"
] | null | null | null | defmodule Rolodex.RouteTest do
use ExUnit.Case
alias Phoenix.Router
alias Rolodex.Mocks.{TestController, TestRouter, User}
alias Rolodex.{Config, Route}
describe "#matches_filter?/2" do
setup [:setup_config]
test "Always returns false when no filters provided", %{config: config} do
routes =
TestRouter.__routes__()
|> Enum.map(&Route.new(&1, config))
assert routes |> Enum.at(0) |> Route.matches_filter?(config) == false
assert routes |> Enum.at(1) |> Route.matches_filter?(config) == false
end
test "Returns true when for a route that matches a filter map", %{config: config} do
config = %Config{config | filters: [%{path: "/api/demo", verb: :get}]}
routes =
TestRouter.__routes__()
|> Enum.map(&Route.new(&1, config))
assert routes |> Enum.at(0) |> Route.matches_filter?(config) == true
assert routes |> Enum.at(1) |> Route.matches_filter?(config) == false
end
test "Returns true for a route that matches a filter function", %{config: config} do
config = %Config{
config
| filters: [
fn
%Route{path: "/api/demo/:id", verb: :post} ->
true
_ ->
false
end
]
}
routes =
TestRouter.__routes__()
|> Enum.map(&Route.new(&1, config))
assert routes |> Enum.at(0) |> Route.matches_filter?(config) == false
assert routes |> Enum.at(1) |> Route.matches_filter?(config) == true
end
end
describe "#new/2" do
setup [:setup_config]
test "It builds a new Rolodex.Route for the specified controller action", %{config: config} do
phoenix_route = %Router.Route{
plug: TestController,
opts: :index,
path: "/v2/test",
pipe_through: [],
verb: :get
}
result = Route.new(phoenix_route, config)
assert result == %Route{
desc: "It's a test!",
headers: %{
"X-Request-Id" => %{type: :uuid, required: true}
},
body: %{
type: :object,
properties: %{
id: %{type: :uuid},
name: %{type: :string, desc: "The name"}
}
},
query_params: %{
id: %{
type: :string,
maximum: 10,
minimum: 0,
required: false,
default: 2
},
update: %{type: :boolean}
},
path_params: %{
account_id: %{type: :uuid}
},
responses: %{
200 => %{type: :ref, ref: User},
201 => %{
type: :list,
of: [%{type: :ref, ref: User}]
},
404 => %{
type: :object,
properties: %{
status: %{type: :integer},
message: %{type: :string}
}
}
},
metadata: %{public: true},
tags: ["foo", "bar"],
path: "/v2/test",
pipe_through: [],
verb: :get
}
end
test "It merges controller action params into pipeline params", %{config: config} do
phoenix_route = %Router.Route{
plug: TestController,
opts: :index,
path: "/v2/test",
pipe_through: [:web],
verb: :get
}
result = Route.new(phoenix_route, config)
assert result == %Route{
desc: "It's a test!",
headers: %{
"X-Request-Id" => %{type: :uuid, required: true}
},
body: %{
type: :object,
properties: %{
id: %{type: :uuid},
name: %{type: :string, desc: "The name"},
foo: %{type: :string}
}
},
query_params: %{
id: %{
type: :string,
maximum: 10,
minimum: 0,
required: false,
default: 2
},
update: %{type: :boolean},
foo: %{type: :string},
bar: %{type: :boolean}
},
path_params: %{
account_id: %{type: :uuid}
},
responses: %{
200 => %{type: :ref, ref: User},
201 => %{
type: :list,
of: [%{type: :ref, ref: User}]
},
404 => %{
type: :object,
properties: %{
status: %{type: :integer},
message: %{type: :string}
}
}
},
metadata: %{public: true},
tags: ["foo", "bar"],
path: "/v2/test",
pipe_through: [:web],
verb: :get
}
end
test "Controller action params will win if in conflict with pipeline params", %{
config: config
} do
phoenix_route = %Router.Route{
plug: TestController,
opts: :conflicted,
path: "/v2/test",
pipe_through: [:api],
verb: :get
}
%Route{headers: headers} = Route.new(phoenix_route, config)
assert headers == %{"X-Request-Id" => %{type: :string, required: true}}
end
test "It handles an undocumented route" do
phoenix_route = %Router.Route{
plug: TestController,
opts: :undocumented,
path: "/v2/test",
pipe_through: [],
verb: :post
}
assert Route.new(phoenix_route, Config.new()) == %Route{
desc: "",
headers: %{},
body: %{},
query_params: %{},
responses: %{},
metadata: %{},
tags: [],
path: "/v2/test",
pipe_through: [],
verb: :post
}
end
test "It handles a missing controller action" do
phoenix_route = %Router.Route{
plug: TestController,
opts: :does_not_exist,
path: "/v2/test",
pipe_through: [],
verb: :post
}
assert Route.new(phoenix_route, Config.new()) == nil
end
end
def setup_config(_) do
config =
Config.new(%{
pipelines: %{
api: %{
headers: %{"X-Request-Id" => %{type: :uuid, required: true}},
query_params: %{foo: :string}
},
web: %{
body: %{
type: :object,
properties: %{foo: :string}
},
headers: %{"X-Request-Id" => %{type: :uuid, required: true}},
query_params: %{foo: :string, bar: :boolean}
},
socket: %{
headers: %{bar: :baz}
}
}
})
[config: config]
end
end
| 28.56917 | 98 | 0.425844 |
f70bf23fbf186e42c770acd3e7b95407d91fa61c | 2,321 | ex | Elixir | clients/identity_toolkit/lib/google_api/identity_toolkit/v3/model/email_link_signin_response.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/identity_toolkit/lib/google_api/identity_toolkit/v3/model/email_link_signin_response.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/identity_toolkit/lib/google_api/identity_toolkit/v3/model/email_link_signin_response.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.IdentityToolkit.V3.Model.EmailLinkSigninResponse do
@moduledoc """
Response of email signIn.
## Attributes
- email (String.t): The user's email. Defaults to: `null`.
- expiresIn (String.t): Expiration time of STS id token in seconds. Defaults to: `null`.
- idToken (String.t): The STS id token to login the newly signed in user. Defaults to: `null`.
- isNewUser (boolean()): Whether the user is new. Defaults to: `null`.
- kind (String.t): The fixed string \"identitytoolkit#EmailLinkSigninResponse\". Defaults to: `null`.
- localId (String.t): The RP local ID of the user. Defaults to: `null`.
- refreshToken (String.t): The refresh token for the signed in user. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:email => any(),
:expiresIn => any(),
:idToken => any(),
:isNewUser => any(),
:kind => any(),
:localId => any(),
:refreshToken => any()
}
field(:email)
field(:expiresIn)
field(:idToken)
field(:isNewUser)
field(:kind)
field(:localId)
field(:refreshToken)
end
defimpl Poison.Decoder, for: GoogleApi.IdentityToolkit.V3.Model.EmailLinkSigninResponse do
def decode(value, options) do
GoogleApi.IdentityToolkit.V3.Model.EmailLinkSigninResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.IdentityToolkit.V3.Model.EmailLinkSigninResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.166667 | 113 | 0.706161 |
f70c27e830ab98f59a54cefd79b7eb7a0df006f3 | 1,039 | ex | Elixir | apps/re/lib/developments/job_queue.ex | ruby2elixir/emcasa-backend | 70d7f4f233555417941ffa6ada84cf8740c21dd2 | [
"MIT"
] | 4 | 2019-11-01T16:29:31.000Z | 2020-10-10T21:20:12.000Z | apps/re/lib/developments/job_queue.ex | eduardomartines/emcasa-backend | 70d7f4f233555417941ffa6ada84cf8740c21dd2 | [
"MIT"
] | null | null | null | apps/re/lib/developments/job_queue.ex | eduardomartines/emcasa-backend | 70d7f4f233555417941ffa6ada84cf8740c21dd2 | [
"MIT"
] | 5 | 2019-11-04T21:25:45.000Z | 2020-02-13T23:49:36.000Z | defmodule Re.Developments.JobQueue do
@moduledoc """
Module for processing jobs related with developments domain.
"""
use EctoJob.JobQueue, table_name: "units_jobs"
alias Re.{
Developments.Mirror,
Repo
}
alias Ecto.Multi
def perform(%Multi{} = multi, %{"type" => "mirror_new_unit_to_listing", "uuid" => uuid}) do
multi
|> Multi.run(:mirror_unit, fn _repo, _changes ->
Mirror.mirror_unit_insert_to_listing(uuid)
end)
|> Repo.transaction()
end
def perform(%Multi{} = multi, %{"type" => "mirror_update_unit_to_listing", "uuid" => uuid}) do
multi
|> Multi.run(:mirror_unit, fn _repo, _changes ->
Mirror.mirror_unit_update_to_listing(uuid)
end)
|> Repo.transaction()
end
def perform(%Multi{} = multi, %{
"type" => "mirror_update_development_to_listings",
"uuid" => uuid
}) do
multi
|> Multi.run(:mirror_unit, fn _repo, _changes ->
Mirror.mirror_development_update_to_listings(uuid)
end)
|> Repo.transaction()
end
end
| 25.341463 | 96 | 0.651588 |
f70c6c967c2e36ac2923f0836106538792f773e7 | 2,515 | ex | Elixir | lib/mint/http2/hpack/huffman.ex | vrcca/mint | ba347326a7e507bdadb8a00ec1f3bd24f2817ade | [
"Apache-2.0"
] | null | null | null | lib/mint/http2/hpack/huffman.ex | vrcca/mint | ba347326a7e507bdadb8a00ec1f3bd24f2817ade | [
"Apache-2.0"
] | null | null | null | lib/mint/http2/hpack/huffman.ex | vrcca/mint | ba347326a7e507bdadb8a00ec1f3bd24f2817ade | [
"Apache-2.0"
] | null | null | null | defmodule Mint.HTTP2.HPACK.Huffman do
@moduledoc false
use Bitwise
# This file is downloaded from the spec directly.
# http://httpwg.org/specs/rfc7541.html#huffman.code
table_file = Path.absname("huffman_table", __DIR__)
@external_resource table_file
entries =
Enum.map(File.stream!(table_file), fn line ->
[byte_value, bits, _hex, bit_count] =
line
|> case do
<<?', _, ?', ?\s, rest::binary>> -> rest
"EOS " <> rest -> rest
_other -> line
end
|> String.replace(["|", "(", ")", "[", "]"], "")
|> String.split()
byte_value = String.to_integer(byte_value)
bits = String.to_integer(bits, 2)
bit_count = String.to_integer(bit_count)
{byte_value, bits, bit_count}
end)
{regular_entries, [eos_entry]} = Enum.split(entries, -1)
{_eos_byte_value, eos_bits, eos_bit_count} = eos_entry
## Encoding
@spec encode(binary()) :: binary()
def encode(binary) do
encode(binary, _acc = <<>>)
end
for {byte_value, bits, bit_count} <- regular_entries do
defp encode(<<unquote(byte_value), rest::binary>>, acc) do
encode(rest, <<acc::bitstring, unquote(bits)::size(unquote(bit_count))>>)
end
end
defp encode(<<>>, acc) do
overflowing_bits = rem(bit_size(acc), 8)
if overflowing_bits == 0 do
acc
else
bits_to_add = 8 - overflowing_bits
value_of_bits_to_add =
take_significant_bits(unquote(eos_bits), unquote(eos_bit_count), bits_to_add)
<<acc::bitstring, value_of_bits_to_add::size(bits_to_add)>>
end
end
## Decoding
@spec decode(binary()) :: binary()
def decode(binary)
for {byte_value, bits, bit_count} <- regular_entries do
def decode(<<unquote(bits)::size(unquote(bit_count)), rest::bitstring>>) do
<<unquote(byte_value), decode(rest)::binary>>
end
end
def decode(<<>>) do
<<>>
end
# Use binary syntax for single match context optimization.
def decode(<<padding::bitstring>>) when bit_size(padding) in 1..7 do
padding_size = bit_size(padding)
<<padding::size(padding_size)>> = padding
if take_significant_bits(unquote(eos_bits), unquote(eos_bit_count), padding_size) == padding do
<<>>
else
throw({:mint, {:protocol_error, :invalid_huffman_encoding}})
end
end
## Helpers
@compile {:inline, take_significant_bits: 3}
defp take_significant_bits(value, bit_count, bits_to_take) do
value >>> (bit_count - bits_to_take)
end
end
| 26.473684 | 99 | 0.642942 |
f70c6fcd248270754b9f0361af71da4d05dd3258 | 596 | exs | Elixir | test/graph_test.exs | Lakret/aoc2018 | a09bfe7b9bc5faaada4a411d4d666f0626cd6562 | [
"MIT"
] | 1 | 2021-04-21T16:16:59.000Z | 2021-04-21T16:16:59.000Z | test/graph_test.exs | Lakret/aoc2018 | a09bfe7b9bc5faaada4a411d4d666f0626cd6562 | [
"MIT"
] | null | null | null | test/graph_test.exs | Lakret/aoc2018 | a09bfe7b9bc5faaada4a411d4d666f0626cd6562 | [
"MIT"
] | null | null | null | defmodule GraphTest do
use ExUnit.Case
test "Graph.topological_sort/2 works" do
deps = [
{"C", "A"},
{"C", "F"},
{"A", "B"},
{"A", "D"},
{"B", "E"},
{"D", "E"},
{"F", "E"}
]
graph =
Enum.reduce(deps, Graph.new(), fn {from, to}, graph ->
Graph.add_vertex(graph, to, edge_from: from)
end)
topological_sort_result =
Graph.topological_sort(graph, fn no_deps_vertices ->
no_deps_vertices |> Enum.sort() |> hd()
end)
assert topological_sort_result == ["C", "A", "B", "D", "F", "E"]
end
end
| 21.285714 | 68 | 0.501678 |
f70c80990f0cb6740b5fd9c4d35c999378d3d427 | 1,137 | exs | Elixir | clients/url_shortener/config/config.exs | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/url_shortener/config/config.exs | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/url_shortener/config/config.exs | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :url_shortener_api, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:url_shortener_api, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.677419 | 73 | 0.755497 |
f70c8801bee6e4d5dd98925a3444bb0e5bff8b85 | 6,120 | ex | Elixir | lib/glimesh_web/live/user_live/components/subscribe_button.ex | Drehd/glimesh.tv | 5db047063edb918cd687d4022b87b0958244af40 | [
"MIT"
] | null | null | null | lib/glimesh_web/live/user_live/components/subscribe_button.ex | Drehd/glimesh.tv | 5db047063edb918cd687d4022b87b0958244af40 | [
"MIT"
] | null | null | null | lib/glimesh_web/live/user_live/components/subscribe_button.ex | Drehd/glimesh.tv | 5db047063edb918cd687d4022b87b0958244af40 | [
"MIT"
] | null | null | null | defmodule GlimeshWeb.UserLive.Components.SubscribeButton do
use GlimeshWeb, :live_view
alias Glimesh.Accounts
alias Glimesh.Payments
@impl true
def render(assigns) do
~L"""
<div id="subscription-magic">
<%= if @user do %>
<%= if @can_subscribe do %>
<%= if @subscribed do %>
<button class="btn btn-secondary btn-responsive" phx-click="unsubscribe" phx-throttle="5000"><span class="d-none d-lg-block"><%= gettext("Unsubscribe") %></span><span class="d-lg-none"><i class="fas fa-star"></i></span></button>
<% else %>
<button class="btn btn-secondary btn-responsive" phx-click="show_modal" phx-throttle="5000"><span class="d-none d-lg-block"><%= gettext("Subscribe") %></span><span class="d-lg-none"><i class="fas fa-star"></i>/span></button>
<% end %>
<% else %>
<button class="btn btn-secondary btn-responsive disabled"><span class="d-none d-lg-block"><%= gettext("Subscribe") %></span><span class="d-lg-none"><i class="fas fa-star"></i></span></button>
<% end %>
<% else %>
<span class="d-none d-lg-block"><%= link gettext("Subscribe"), to: Routes.user_registration_path(@socket, :new), class: "btn btn-secondary btn-responsive" %></span><span class="d-lg-none"><i class="fas fa-star"></i></span>
<% end %>
<%= if @show_subscription do %>
<div id="paymentModal2" class="live-modal"
phx-capture-click="hide_modal"
phx-window-keydown="hide_modal"
phx-key="escape"
phx-target="#paymentModal2"
phx-page-loading>
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title"><%= gettext("Payment Details") %></h5>
<button type="button" class="close" phx-click="hide_modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body">
<%= if @stripe_error do %>
<div class="alert alert-danger" role="alert">
<%= @stripe_error %>
</div>
<% end %>
<%= live_component @socket, GlimeshWeb.SubscriptionComponent, id: "subscription-component", type: :channel, user: @user, streamer: @streamer, product_id: @product_id, price_id: @price_id, price: @price %>
<img src="/images/stripe-badge-white.png" alt="We use Stripe as our payment provider."
class="img-fluid mt-4 mx-auto d-block">
</div>
</div>
</div>
</div>
<% end %>
</div>
"""
end
@impl true
def mount(_params, %{"streamer" => streamer, "user" => nil}, socket) do
{:ok,
socket
|> assign(:streamer, streamer)
|> assign(:can_subscribe, false)
|> assign(:user, nil)
|> assign(:subscribed, false)
|> assign(:show_subscription, false)}
end
@impl true
def mount(_params, %{"streamer" => streamer, "user" => user}, socket) do
subscribed = Glimesh.Payments.has_channel_subscription?(user, streamer)
can_subscribe = if Accounts.can_use_payments?(user), do: user.id != streamer.id, else: false
{:ok,
socket
|> assign(:stripe_public_key, Application.get_env(:stripity_stripe, :public_api_key))
|> assign(:stripe_customer_id, Accounts.get_stripe_customer_id(user))
|> assign(:stripe_payment_method, user.stripe_payment_method)
|> assign(:stripe_error, nil)
|> assign(:product_id, Payments.get_channel_sub_base_product_id())
|> assign(:price_id, Payments.get_channel_sub_base_price_id())
|> assign(:price, Payments.get_channel_sub_base_price())
|> assign(:show_subscription, false)
|> assign(:streamer, streamer)
|> assign(:user, user)
|> assign(:can_subscribe, can_subscribe)
|> assign(:subscribed, subscribed)}
end
@impl true
def handle_event("subscriptions.subscribe", %{"paymentMethodId" => payment_method}, socket) do
streamer = socket.assigns.streamer
user = socket.assigns.user
with {:ok, user} <- Payments.set_payment_method(user, payment_method),
{:ok, subscription} <-
Payments.subscribe_to_channel(
user,
streamer,
socket.assigns.product_id,
socket.assigns.price_id
) do
{:reply, subscription,
socket
|> assign(:user, Accounts.get_user!(user.id))
|> assign(:show_subscription, false)
|> assign(
:subscribed,
Payments.has_channel_subscription?(socket.assigns.user, socket.assigns.streamer)
)}
else
# {:pending_requires_action, error_msg} ->
# {:noreply, socket |> assign(:stripe_error, error_msg)}
# {:pending_requires_payment_method, error_msg} ->
# {:noreply, socket |> assign(:stripe_error, error_msg)}
{:error, error_msg} ->
{:noreply,
socket |> assign(:user, Accounts.get_user!(user.id)) |> assign(:stripe_error, error_msg)}
end
end
@impl true
def handle_event("unsubscribe", _value, socket) do
streamer = socket.assigns.streamer
user = socket.assigns.user
subscription = Payments.get_channel_subscription!(user, streamer)
case Payments.unsubscribe(subscription) do
{:ok, _} ->
{:noreply,
socket |> assign(:subscribed, Payments.has_channel_subscription?(user, streamer))}
{:error, error_msg} ->
{:noreply, socket |> assign(:stripe_error, error_msg)}
end
end
@impl true
def handle_event("show_modal", _value, socket) do
{:noreply, socket |> assign(:show_subscription, true)}
end
@impl true
def handle_event("hide_modal", _value, socket) do
{:noreply, socket |> assign(:show_subscription, false)}
end
end
| 39.74026 | 248 | 0.586928 |
f70c8f53e2126db1b1f8660f2989896c3e68a351 | 910 | ex | Elixir | elixir_demo/lib/elixir_demo/application.ex | vanvoljg/KafkaDemo | 25c735111dd44c5df46bb7f2b7495d330fffc903 | [
"MIT"
] | null | null | null | elixir_demo/lib/elixir_demo/application.ex | vanvoljg/KafkaDemo | 25c735111dd44c5df46bb7f2b7495d330fffc903 | [
"MIT"
] | null | null | null | elixir_demo/lib/elixir_demo/application.ex | vanvoljg/KafkaDemo | 25c735111dd44c5df46bb7f2b7495d330fffc903 | [
"MIT"
] | null | null | null | defmodule ElixirDemo.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
@impl true
def start(_type, [:producer]) do
children = [
{ElixirDemo.Producer, []}
]
opts = [strategy: :one_for_one, name: ElixirDemo.Supervisor]
Supervisor.start_link(children, opts)
end
def start(_type, _args) do
children = [
# Starts a worker by calling: ElixirDemo.Worker.start_link(arg)
# {ElixirDemo.Worker, arg}
%{
id: Kaffe.GroupMemberSupervisor,
start: {Kaffe.GroupMemberSupervisor, :start_link, []},
type: :supervisor
}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: ElixirDemo.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 26 | 69 | 0.672527 |
f70cb2474004d0e81c273e45f93b69b373c09a9c | 1,200 | exs | Elixir | apps/teller/config/config.exs | fschuindt/amethyst | 0e8d60f83693b3bdb2c05d461cc3a74fa8b88b30 | [
"MIT"
] | 2 | 2018-06-23T11:43:34.000Z | 2019-01-07T10:52:42.000Z | apps/teller/config/config.exs | fschuindt/amethyst | 0e8d60f83693b3bdb2c05d461cc3a74fa8b88b30 | [
"MIT"
] | 1 | 2018-06-21T01:20:04.000Z | 2018-06-21T01:20:04.000Z | apps/teller/config/config.exs | fschuindt/amethyst | 0e8d60f83693b3bdb2c05d461cc3a74fa8b88b30 | [
"MIT"
] | 1 | 2018-06-30T21:40:51.000Z | 2018-06-30T21:40:51.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :absinthe,
schema: Teller.API.Schema
import_config "#{Mix.env}.exs"
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :teller, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:teller, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 33.333333 | 73 | 0.749167 |
f70d087be74944f59309e60a2b32f7ff958cc2b1 | 3,934 | ex | Elixir | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/job_status.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/job_status.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/job_status.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudRun.V1alpha1.Model.JobStatus do
@moduledoc """
JobStatus represents the current state of a Job.
## Attributes
* `active` (*type:* `integer()`, *default:* `nil`) - Optional. The number of actively running instances. +optional
* `completionTime` (*type:* `DateTime.t`, *default:* `nil`) - Optional. Represents time when the job was completed. It is not guaranteed to be set in happens-before order across separate operations. It is represented in RFC3339 form and is in UTC. +optional
* `conditions` (*type:* `list(GoogleApi.CloudRun.V1alpha1.Model.JobCondition.t)`, *default:* `nil`) - Optional. The latest available observations of a job's current state. More info: https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/ +optional
* `failed` (*type:* `integer()`, *default:* `nil`) - Optional. The number of instances which reached phase Failed. +optional
* `imageDigest` (*type:* `String.t`, *default:* `nil`) - Optional. ImageDigest holds the resolved digest for the image specified within .Spec.Template.Spec.Container.Image. The digest is resolved during the creation of the Job. This field holds the digest value regardless of whether a tag or digest was originally specified in the Container object.
* `instances` (*type:* `list(GoogleApi.CloudRun.V1alpha1.Model.InstanceStatus.t)`, *default:* `nil`) - Optional. Status of completed, failed, and running instances. +optional
* `observedGeneration` (*type:* `integer()`, *default:* `nil`) - Optional. The 'generation' of the job that was last processed by the controller.
* `startTime` (*type:* `DateTime.t`, *default:* `nil`) - Optional. Represents time when the job was acknowledged by the job controller. It is not guaranteed to be set in happens-before order across separate operations. It is represented in RFC3339 form and is in UTC. +optional
* `succeeded` (*type:* `integer()`, *default:* `nil`) - Optional. The number of instances which reached phase Succeeded. +optional
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:active => integer(),
:completionTime => DateTime.t(),
:conditions => list(GoogleApi.CloudRun.V1alpha1.Model.JobCondition.t()),
:failed => integer(),
:imageDigest => String.t(),
:instances => list(GoogleApi.CloudRun.V1alpha1.Model.InstanceStatus.t()),
:observedGeneration => integer(),
:startTime => DateTime.t(),
:succeeded => integer()
}
field(:active)
field(:completionTime, as: DateTime)
field(:conditions, as: GoogleApi.CloudRun.V1alpha1.Model.JobCondition, type: :list)
field(:failed)
field(:imageDigest)
field(:instances, as: GoogleApi.CloudRun.V1alpha1.Model.InstanceStatus, type: :list)
field(:observedGeneration)
field(:startTime, as: DateTime)
field(:succeeded)
end
defimpl Poison.Decoder, for: GoogleApi.CloudRun.V1alpha1.Model.JobStatus do
def decode(value, options) do
GoogleApi.CloudRun.V1alpha1.Model.JobStatus.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudRun.V1alpha1.Model.JobStatus do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 55.408451 | 353 | 0.721657 |
f70d11444fb1c09f0bf4061bcd410a05c7d77680 | 628 | exs | Elixir | test/models/board_test.exs | KazuCocoa/ex_torello | 187d814094f79a3d99bca2746683804333d40dfe | [
"MIT"
] | null | null | null | test/models/board_test.exs | KazuCocoa/ex_torello | 187d814094f79a3d99bca2746683804333d40dfe | [
"MIT"
] | null | null | null | test/models/board_test.exs | KazuCocoa/ex_torello | 187d814094f79a3d99bca2746683804333d40dfe | [
"MIT"
] | null | null | null | defmodule ExTrello.BoardTest do
use ExTrello.ModelCase, async: true
import ExTrello.Factory
alias ExTrello.Board
@valid_attrs %{name: "some content"}
@invalid_attrs %{}
test "changeset with valid attributes" do
user = create(:user)
attributes = @valid_attrs
|> Map.put(:user_id, user.id)
changeset = Board.changeset(build(:board), attributes)
assert changeset.valid?
%{slug: slug} = changeset.changes
assert slug == "some-content"
end
test "changeset with invalid attributes" do
changeset = Board.changeset(%Board{}, @invalid_attrs)
refute changeset.valid?
end
end
| 22.428571 | 58 | 0.69586 |
f70d11c3036f4844ef7d785a995217392b28b15b | 900 | ex | Elixir | clients/network_management/lib/google_api/network_management/v1/metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/network_management/lib/google_api/network_management/v1/metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/network_management/lib/google_api/network_management/v1/metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.NetworkManagement.V1 do
@moduledoc """
API client metadata for GoogleApi.NetworkManagement.V1.
"""
@discovery_revision "20200910"
def discovery_revision(), do: @discovery_revision
end
| 33.333333 | 74 | 0.763333 |
f70d24b127850e29048b59dcf58620229e92b2f1 | 2,491 | exs | Elixir | rel/config.exs | exredorg/exred | 0ece8e6680747ba8f30b4413ede598a45495aa7c | [
"MIT"
] | null | null | null | rel/config.exs | exredorg/exred | 0ece8e6680747ba8f30b4413ede598a45495aa7c | [
"MIT"
] | null | null | null | rel/config.exs | exredorg/exred | 0ece8e6680747ba8f30b4413ede598a45495aa7c | [
"MIT"
] | null | null | null | # Import all plugins from `rel/plugins`
# They can then be used by adding `plugin MyPlugin` to
# either an environment, or release definition, where
# `MyPlugin` is the name of the plugin module.
Path.join(["rel", "plugins", "*.exs"])
|> Path.wildcard()
|> Enum.map(&Code.eval_file(&1))
use Mix.Releases.Config,
# This sets the default release built by `mix release`
default_release: :default,
# This sets the default environment used by `mix release`
default_environment: Mix.env()
# For a full list of config options for both releases
# and environments, visit https://hexdocs.pm/distillery/configuration.html
# You may define one or more environments in this file,
# an environment's settings will override those of a release
# when building in that environment, this combination of release
# and environment configuration is called a profile
environment :dev do
# If you are running Phoenix, you should make sure that
# server: true is set and the code reloader is disabled,
# even in dev mode.
# It is recommended that you build with MIX_ENV=prod and pass
# the --env flag to Distillery explicitly if you want to use
# dev mode.
set dev_mode: true
set include_erts: false
set cookie: :"9GI|sFj<F{R2~xr06]xAtZd:Q*E/0X*G9eS].1$(?.J_ftwD%tlt<N{*(Sdf%w%_"
plugin Conform.ReleasePlugin
end
environment :prod do
set include_erts: true
set include_src: false
set cookie: :"2$2Hk*FzVY@3XSuNdt>gGBCmCpFxt&WX(=Job*VMCxmhrA&5phabB{^/fxJCEQ<f"
plugin Conform.ReleasePlugin
end
# You may define one or more releases in this file.
# If you have not set a default release, or selected one
# when running `mix release`, the first release in the file
# will be used by default
release :exred do
set version: "0.1.5"
set applications: [
exred: :permanent,
exred_ui: :permanent,
exred_scheduler: :permanent,
]
end
#
# :runtime_tools,
#
# exred_node_aws_iot_daemon: :permanent,
# exred_node_aws_iot_thingshadow_in: :permanent,
# exred_node_aws_iot_thingshadow_out: :permanent,
# exred_node_debug: :permanent,
# exred_node_gpio_in: :permanent,
# exred_node_gpio_out: :permanent,
# exred_node_redis_daemon: :permanent,
# exred_node_redis_in: :permanent,
# exred_node_redis_out: :permanent,
# exred_node_suppress: :permanent,
# exred_node_trigger: :permanent,
# exred_node_multiply: :permanent,
# exred_node_shell: :permanent,
# exred_node_rpiphoto: :permanent,
#
# ]
# end
#
| 31.935897 | 81 | 0.726214 |
f70d31e56a8f832cf61d006a6ab44e43f5250edc | 1,961 | ex | Elixir | clients/analytics_admin/lib/google_api/analytics_admin/v1alpha/model/google_analytics_admin_v1alpha_list_firebase_links_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/analytics_admin/lib/google_api/analytics_admin/v1alpha/model/google_analytics_admin_v1alpha_list_firebase_links_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/analytics_admin/lib/google_api/analytics_admin/v1alpha/model/google_analytics_admin_v1alpha_list_firebase_links_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaListFirebaseLinksResponse do
@moduledoc """
Response message for ListFirebaseLinks RPC
## Attributes
* `firebaseLinks` (*type:* `list(GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaFirebaseLink.t)`, *default:* `nil`) - List of FirebaseLinks. This will have at most one value.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:firebaseLinks =>
list(
GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaFirebaseLink.t()
)
}
field(:firebaseLinks,
as: GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaFirebaseLink,
type: :list
)
end
defimpl Poison.Decoder,
for: GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaListFirebaseLinksResponse do
def decode(value, options) do
GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaListFirebaseLinksResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaListFirebaseLinksResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.810345 | 197 | 0.759306 |
f70d37088eb24e3f2561cc41cebba5ba569d255e | 1,162 | ex | Elixir | lib/security.ex | LouisMT/Phoenix-API-Toolkit | fda2be3abe253c95ba30fd4334dda88c705aa4ec | [
"Apache-2.0"
] | 5 | 2020-06-23T18:35:44.000Z | 2022-01-13T21:20:41.000Z | lib/security.ex | LouisMT/Phoenix-API-Toolkit | fda2be3abe253c95ba30fd4334dda88c705aa4ec | [
"Apache-2.0"
] | 11 | 2019-12-11T13:15:28.000Z | 2021-06-01T07:37:25.000Z | lib/security.ex | LouisMT/Phoenix-API-Toolkit | fda2be3abe253c95ba30fd4334dda88c705aa4ec | [
"Apache-2.0"
] | 4 | 2020-08-05T09:23:22.000Z | 2021-01-03T12:16:36.000Z | defmodule PhoenixApiToolkit.Security do
@moduledoc false
defmodule AjaxCSRFError do
@moduledoc """
Error raised when a state-changing request does not have a "x-csrf-token" header.
"""
defexception message: "missing 'x-csrf-token' header", plug_status: 403
end
defmodule MissingContentTypeError do
@moduledoc "Error raised when a content-carrying request does not have a content-type header."
defexception message: "missing 'content-type' header", plug_status: 415
end
defmodule Oauth2TokenVerificationError do
@moduledoc "Error raised when an Oauth2 token is invalid"
defexception message: "Oauth2 token invalid", plug_status: 401
def exception([]), do: %Oauth2TokenVerificationError{}
def exception(message) do
%Oauth2TokenVerificationError{message: message}
end
end
defmodule HmacVerificationError do
@moduledoc "Error raised the HMAC used to sign a request body is invalid"
defexception message: "HMAC invalid", plug_status: 401
def exception([]), do: %HmacVerificationError{}
def exception(message) do
%HmacVerificationError{message: message}
end
end
end
| 30.578947 | 98 | 0.737522 |
f70d68cbda8117ff9262d6daa776edb3696a57f5 | 1,213 | ex | Elixir | lib/delivery_web/views/error_helpers.ex | joaoeliandro/delivery | f8f2ea20a0fa595ccd754a725cfadf35a8c89c69 | [
"MIT"
] | null | null | null | lib/delivery_web/views/error_helpers.ex | joaoeliandro/delivery | f8f2ea20a0fa595ccd754a725cfadf35a8c89c69 | [
"MIT"
] | null | null | null | lib/delivery_web/views/error_helpers.ex | joaoeliandro/delivery | f8f2ea20a0fa595ccd754a725cfadf35a8c89c69 | [
"MIT"
] | null | null | null | defmodule DeliveryWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(DeliveryWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(DeliveryWeb.Gettext, "errors", msg, opts)
end
end
end
| 35.676471 | 77 | 0.671888 |
f70d6ccb15ec4ca14b005a0224764f796c4b3026 | 2,596 | ex | Elixir | lib/open_pantry/web/models/user.ex | openpantry/open_pantry | 27d898a65dd6f44b325f48d41bc448bb486d9c6f | [
"MIT"
] | 41 | 2017-10-04T00:33:46.000Z | 2021-04-09T01:33:34.000Z | lib/open_pantry/web/models/user.ex | openpantry/open_pantry | 27d898a65dd6f44b325f48d41bc448bb486d9c6f | [
"MIT"
] | 74 | 2017-09-20T03:36:17.000Z | 2018-11-20T20:46:16.000Z | lib/open_pantry/web/models/user.ex | openpantry/open_pantry | 27d898a65dd6f44b325f48d41bc448bb486d9c6f | [
"MIT"
] | 12 | 2017-10-04T10:02:49.000Z | 2021-12-28T22:57:20.000Z | defmodule OpenPantry.User do
use OpenPantry.Web, :model
alias OpenPantry.Stock
schema "users" do
field :email, :string
field :name, :string
field :phone, :string
field :ok_to_text, :boolean, default: false
field :family_members, :integer
field :role, UserRoleEnum
field :hashed_password, :string
field :password, :string, virtual: true, default: ""
field :password_confirmation, :string, virtual: true, default: ""
many_to_many :managed_facilities, OpenPantry.Facility, join_through: OpenPantry.UserManagedFacility, on_replace: :delete
belongs_to :facility, OpenPantry.Facility
has_many :foods, through: [:facility, :food]
many_to_many :languages, OpenPantry.Language, join_through: "user_languages"
has_many :user_orders, OpenPantry.UserOrder
has_many :stock_distributions, through: [:user_orders, :stock_distributions]
belongs_to :primary_language, OpenPantry.Language
has_many :user_credits, OpenPantry.UserCredit
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:email, :name, :phone, :ok_to_text, :family_members, :primary_language_id, :facility_id, :password, :password_confirmation, :role])
|> unique_constraint(:email)
|> validate_password
|> validate_required([:name, :family_members, :primary_language_id, :facility_id])
end
def credits(user_id) when is_integer(user_id), do: find(user_id) |> credits
def credits(user) do
credits = Repo.preload(user, :user_credits).user_credits
|> Repo.preload(:credit_type)
food_credits = Enum.map(credits, fn credit ->
{credit.credit_type.name, credit.balance}
end)
|> Map.new
Map.put(food_credits, "Meals", meal_points(food_credits))
end
def meal_points(map) do
Map.values(map)
|> Enum.min
end
def guest() do
from(user in User, where: user.role == ^:guest)
|> Repo.one!
end
def facility_stocks(user) do
Repo.preload(user, :facility).facility
|> Repo.preload(:stocks)
|> (&(&1.stocks)).()
|> Enum.map(&Stock.stockable/1)
end
defp validate_password(changeset) do
case get_change(changeset, :password) do
nil -> changeset
password ->
if password == get_change(changeset, :password_confirmation) do
put_change(changeset, :hashed_password, Comeonin.Bcrypt.hashpwsalt(password))
else
add_error(changeset, :password, "The password and password confirmation don't match.")
end
end
end
end
| 32.45 | 152 | 0.694915 |
f70d76f2e286c9e3199da707d9be83ca3441f61c | 1,161 | ex | Elixir | lib/ex_dadata/httpoison_http_adapter.ex | Elonsoft/ex_dadata | 71edca65f9d35fd2c9aa6553e87992b377500a1c | [
"MIT"
] | 1 | 2021-08-23T08:24:50.000Z | 2021-08-23T08:24:50.000Z | lib/ex_dadata/httpoison_http_adapter.ex | Elonsoft/ex_dadata | 71edca65f9d35fd2c9aa6553e87992b377500a1c | [
"MIT"
] | null | null | null | lib/ex_dadata/httpoison_http_adapter.ex | Elonsoft/ex_dadata | 71edca65f9d35fd2c9aa6553e87992b377500a1c | [
"MIT"
] | null | null | null | if Code.ensure_loaded?(HTTPoison) do
defmodule ExDadata.HTTPoisonHTTPAdapter do
@moduledoc """
Default HTTP Adapter for this library.
"""
alias ExDadata.Client
alias ExDadata.HTTPAdapter
alias ExDadata.HTTPAdapter.Response
require Logger
@behaviour HTTPAdapter
@impl HTTPAdapter
def request(client, method, url, headers, body, opts) do
adapter = Client.json_adapter(client)
bin_body = adapter.encode!(body)
bin_headers =
Enum.map(headers, fn {k, v} ->
{String.to_charlist(k), String.to_charlist(v)}
end)
with {:ok, response} <-
HTTPoison.request(method, url, bin_body, bin_headers, opts) do
{:ok, wrap_response(response, adapter)}
end
end
defp wrap_response(response, json) do
%HTTPoison.Response{
status_code: status,
headers: headers,
body: body
} = response
try do
erl_body = json.decode!(body)
%Response{status: status, headers: headers, body: erl_body}
rescue
_ -> %Response{status: status, headers: headers, body: body}
end
end
end
end
| 24.702128 | 75 | 0.627046 |
f70d81fd9229c78131311f558d7ad61fd3665cb2 | 3,131 | ex | Elixir | clients/street_view_publish/lib/google_api/street_view_publish/v1/model/pose.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/street_view_publish/lib/google_api/street_view_publish/v1/model/pose.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/street_view_publish/lib/google_api/street_view_publish/v1/model/pose.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.StreetViewPublish.V1.Model.Pose do
@moduledoc """
Raw pose measurement for an entity.
## Attributes
- altitude (float()): Altitude of the pose in meters above WGS84 ellipsoid. NaN indicates an unmeasured quantity. Defaults to: `null`.
- heading (float()): Compass heading, measured at the center of the photo in degrees clockwise from North. Value must be >=0 and <360. NaN indicates an unmeasured quantity. Defaults to: `null`.
- latLngPair (LatLng): Latitude and longitude pair of the pose, as explained here: https://cloud.google.com/datastore/docs/reference/rest/Shared.Types/LatLng When creating a Photo, if the latitude and longitude pair are not provided here, the geolocation from the exif header will be used. If the latitude and longitude pair is not provided and cannot be found in the exif header, the create photo process will fail. Defaults to: `null`.
- level (Level): Level (the floor in a building) used to configure vertical navigation. Defaults to: `null`.
- pitch (float()): Pitch, measured at the center of the photo in degrees. Value must be >=-90 and <= 90. A value of -90 means looking directly down, and a value of 90 means looking directly up. NaN indicates an unmeasured quantity. Defaults to: `null`.
- roll (float()): Roll, measured in degrees. Value must be >= 0 and <360. A value of 0 means level with the horizon. NaN indicates an unmeasured quantity. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:altitude => any(),
:heading => any(),
:latLngPair => GoogleApi.StreetViewPublish.V1.Model.LatLng.t(),
:level => GoogleApi.StreetViewPublish.V1.Model.Level.t(),
:pitch => any(),
:roll => any()
}
field(:altitude)
field(:heading)
field(:latLngPair, as: GoogleApi.StreetViewPublish.V1.Model.LatLng)
field(:level, as: GoogleApi.StreetViewPublish.V1.Model.Level)
field(:pitch)
field(:roll)
end
defimpl Poison.Decoder, for: GoogleApi.StreetViewPublish.V1.Model.Pose do
def decode(value, options) do
GoogleApi.StreetViewPublish.V1.Model.Pose.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.StreetViewPublish.V1.Model.Pose do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 49.698413 | 439 | 0.731076 |
f70d84348b2bb8b4a487a38508f0921d5d8e205f | 967 | ex | Elixir | kousa/lib/broth/message/user/update.ex | asgarovf/dogehouse | 12b81d2008562373591fb55ab8179a3fa8a5db18 | [
"MIT"
] | 1 | 2021-05-18T15:21:25.000Z | 2021-05-18T15:21:25.000Z | kousa/lib/broth/message/user/update.ex | asgarovf/dogehouse | 12b81d2008562373591fb55ab8179a3fa8a5db18 | [
"MIT"
] | null | null | null | kousa/lib/broth/message/user/update.ex | asgarovf/dogehouse | 12b81d2008562373591fb55ab8179a3fa8a5db18 | [
"MIT"
] | null | null | null | defmodule Broth.Message.User.Update do
use Broth.Message.Call,
reply: __MODULE__
alias Beef.Repo
@derive {Jason.Encoder, only: ~w(
username
muted
deafened
)a}
@primary_key {:id, :binary_id, []}
schema "users" do
field(:username, :string)
field(:muted, :boolean, virtual: true)
field(:deafened, :boolean, virtual: true)
end
def initialize(state) do
Repo.get(__MODULE__, state.user_id)
end
def changeset(initializer \\ %__MODULE__{}, data) do
initializer
|> cast(data, [:muted, :username])
|> cast(data, [:deafened, :username])
|> validate_required([:username])
end
def execute(changeset, state) do
# TODO: make this a proper changeset-mediated alteration.
with {:ok, update} <- apply_action(changeset, :validate),
{:ok, user} <- Kousa.User.update(state.user_id, Map.from_struct(update)) do
{:reply, struct(__MODULE__, Map.from_struct(user)), state}
end
end
end
| 24.794872 | 84 | 0.658738 |
f70da49d53036535bffb261b621e8e9e9e0b9e99 | 1,681 | ex | Elixir | lib/postoffice_web/router.ex | lonamiaec/postoffice | 3a6f28ea0db94887faf427bf06df2b1504c00d02 | [
"Apache-2.0"
] | 15 | 2020-01-24T10:33:57.000Z | 2020-10-24T07:57:14.000Z | lib/postoffice_web/router.ex | lonamiaec/postoffice | 3a6f28ea0db94887faf427bf06df2b1504c00d02 | [
"Apache-2.0"
] | 24 | 2020-01-24T09:52:56.000Z | 2021-02-19T09:15:12.000Z | lib/postoffice_web/router.ex | mercadona/postoffice | 3a6f28ea0db94887faf427bf06df2b1504c00d02 | [
"Apache-2.0"
] | 5 | 2020-01-25T18:03:44.000Z | 2021-02-23T10:07:03.000Z | defmodule PostofficeWeb.Router do
use PostofficeWeb, :router
import Phoenix.LiveDashboard.Router
alias Api.MessageController, as: ApiMessageController
alias Api.BulkMessageController, as: ApiBulkMessageController
alias Api.TopicController, as: ApiTopicController
alias Api.PublisherController, as: ApiPublisherController
alias Api.HealthController, as: ApiHealthController
alias Api.ScheduleMessageController, as: ApiScheduleMessageController
alias MessageController, as: MessageController
alias IndexController
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_live_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", PostofficeWeb do
pipe_through :browser
get "/", IndexController, :index, as: :dashboard
resources "/topics", TopicController, only: [:index, :new, :create]
resources "/publishers", PublisherController, only: [:index, :new, :create, :edit, :update, :delete]
resources "/messages", MessageController, only: [:index]
live_dashboard "/dashboard"
end
scope "/api", PostofficeWeb, as: :api do
pipe_through :api
resources "/messages", ApiMessageController, only: [:create, :show]
resources "/bulk_messages", ApiBulkMessageController, only: [:create, :show]
resources "/schedule_messages", ApiScheduleMessageController, only: [:create, :show]
resources "/topics", ApiTopicController, only: [:create, :show]
resources "/publishers", ApiPublisherController, only: [:create, :delete]
resources "/health", ApiHealthController, only: [:index]
end
end
| 32.960784 | 104 | 0.739441 |
f70da65c545acad4d70a2166243f7faca72d85e6 | 80 | exs | Elixir | apps/artemis_web/test/artemis_web/views/page_view_test.exs | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 10 | 2019-07-05T19:59:20.000Z | 2021-05-23T07:36:11.000Z | apps/artemis_web/test/artemis_web/views/page_view_test.exs | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 7 | 2019-07-12T21:41:01.000Z | 2020-08-17T21:29:22.000Z | apps/artemis_web/test/artemis_web/views/page_view_test.exs | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 4 | 2019-07-05T20:04:08.000Z | 2021-05-13T16:28:33.000Z | defmodule ArtemisWeb.PageViewTest do
use ArtemisWeb.ConnCase, async: true
end
| 20 | 38 | 0.825 |
f70dd19d415344914eb69a28bc65a4ba09e9d0f7 | 2,097 | ex | Elixir | clients/android_management/lib/google_api/android_management/v1/model/proxy_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/android_management/lib/google_api/android_management/v1/model/proxy_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/android_management/lib/google_api/android_management/v1/model/proxy_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.AndroidManagement.V1.Model.ProxyInfo do
@moduledoc """
Configuration info for an HTTP proxy. For a direct proxy, set the host, port, and excluded_hosts fields. For a PAC script proxy, set the pac_uri field.
## Attributes
* `excludedHosts` (*type:* `list(String.t)`, *default:* `nil`) - For a direct proxy, the hosts for which the proxy is bypassed. The host names may contain wildcards such as *.example.com.
* `host` (*type:* `String.t`, *default:* `nil`) - The host of the direct proxy.
* `pacUri` (*type:* `String.t`, *default:* `nil`) - The URI of the PAC script used to configure the proxy.
* `port` (*type:* `integer()`, *default:* `nil`) - The port of the direct proxy.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:excludedHosts => list(String.t()),
:host => String.t(),
:pacUri => String.t(),
:port => integer()
}
field(:excludedHosts, type: :list)
field(:host)
field(:pacUri)
field(:port)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidManagement.V1.Model.ProxyInfo do
def decode(value, options) do
GoogleApi.AndroidManagement.V1.Model.ProxyInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidManagement.V1.Model.ProxyInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.446429 | 191 | 0.703863 |
f70dee591c3b3ff1cc877f8670e93d44f522ed0d | 1,263 | exs | Elixir | chapter-23/more-cool-stuff-3/line-sigil.exs | carlos4ndre/elixir-exercises | 3673cf69d08a6c7de53224f8956f0db21e7c5dad | [
"MIT"
] | 55 | 2015-12-09T09:23:50.000Z | 2022-01-31T21:14:59.000Z | chapter-23/more-cool-stuff-3/line-sigil.exs | carlos4ndre/elixir-exercises | 3673cf69d08a6c7de53224f8956f0db21e7c5dad | [
"MIT"
] | 3 | 2016-06-15T12:53:39.000Z | 2017-08-21T18:20:42.000Z | chapter-23/more-cool-stuff-3/line-sigil.exs | carlos4ndre/elixir-exercises | 3673cf69d08a6c7de53224f8956f0db21e7c5dad | [
"MIT"
] | 18 | 2016-01-01T18:18:23.000Z | 2021-05-24T02:22:42.000Z | defmodule LineSigil do
def sigil_v(content, _opts) do
lines = String.split(content,"\n", trim: true)
column_names = get_column_names(lines)
remaining_lines = get_remaining_lines(lines)
remaining_lines
|> Enum.map(&parse_line/1)
|> Enum.map(&add_column_names_to_line(&1, column_names))
end
defp get_column_names(lines) do
lines
|> Enum.at(0)
|> String.split(",")
|> Enum.map &(String.to_atom/1)
end
defp get_remaining_lines(lines) do
{_column_names, remaining_lines} = Enum.split(lines, 1)
remaining_lines
end
defp parse_line(line) do
line
|> String.split(",")
|> Enum.map(&parse_cell/1)
end
defp parse_cell(value) do
case Float.parse(value) do
{num, ""} -> num
{num, _r} -> num
:error -> value
end
end
defp add_column_names_to_line(line, column_names) do
Enum.zip(column_names, line)
end
end
defmodule Example do
import LineSigil
def csv do
~v"""
Item,Qty,Price
Teddy bear,4,34.95
Milk,1,2.99
Battery,6,8.00
"""
end
end
IO.inspect Example.csv
"""
$ elixir -r line-sigil.exs
[[Item: "Teddy bear", Qty: 4.0, Price: 34.95],
[Item: "Milk", Qty: 1.0, Price: 2.99], [Item: "Battery", Qty: 6.0, Price: 8.0]]
"""
| 20.047619 | 80 | 0.631829 |
f70e09046c528f780d087828663aec5478a35c78 | 15,261 | ex | Elixir | lib/elixir/lib/version.ex | knewter/elixir | 8310d62499e292d78d5c9d79d5d15a64e32fb738 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/version.ex | knewter/elixir | 8310d62499e292d78d5c9d79d5d15a64e32fb738 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/version.ex | knewter/elixir | 8310d62499e292d78d5c9d79d5d15a64e32fb738 | [
"Apache-2.0"
] | null | null | null | defmodule Version do
@moduledoc %S"""
Functions for parsing and matching versions against requirements.
A version is a string in a specific format or a `Version.Schema`
generated after parsing via `Version.parse/1`.
`Version` parsing and requirements follow
[SemVer 2.0 schema](http://semver.org/) and you will get
the most of Mix's version system by following it. In order
to support integration with projects that may
follow different versioning schemas, Elixir won't choke
on unknown versions, however you won't be able to use
Mix requirements with such unformatted versions.
## Versions
In a nutshell, a version is given by three numbers:
MAJOR.MINOR.PATCH
Pre-releases are supported by appending `-[0-9A-Za-z-\.]`:
"1.0.0-alpha.3"
Build information can be added by appending `+[0-9A-Za-z-\.]`:
"1.0.0-alpha.3+20130417140000"
## Requirements
Requirements allow you to specify which versions of a given
dependency you are willing to work against. It supports common
operators like `>=`, `<=`, `>`, `==` and friends that
work as one would expect:
# Only version 2.0.0
"== 2.0.0"
# Anything later than 2.0.0
"> 2.0.0"
Requirements also support `and` and `or` for complex conditions:
# 2.0.0 and later until 2.1.0
">= 2.0.0 and < 2.1.0"
Since the example above is such a common requirement, it can
be expressed as:
"~> 2.0.0"
"""
@type t :: String.t | Version.Schema.t
@type requirement :: String.t | Version.Requirement.t
@type matchable :: { major :: String.t | non_neg_integer,
minor :: non_neg_integer | nil,
patch :: non_neg_integer | nil,
pre :: [String.t] }
import Kernel, except: [match?: 2]
defrecord Schema, major: 0, minor: 0, patch: 0, pre: nil, build: nil, source: nil
defrecord Requirement, source: nil, matchspec: nil
defexception InvalidRequirement, reason: :invalid_requirement do
def message(InvalidRequirement[reason: reason]) when is_binary(reason) do
{ first, rest } = String.next_grapheme(reason)
String.downcase(first) <> rest
end
def message(InvalidRequirement[]) do
"invalid version specification"
end
end
@doc """
Check if the given version matches the specification.
Returns `true` if `version` satisfies `requirement`, `false` otherwise.
Raises a `Version.InvalidRequirement` exception if `requirement` is not parseable.
## Examples
iex> Version.match?("2.0", ">1.0")
true
iex> Version.match?("2.0", "==1.0")
false
"""
@spec match?(t, requirement) :: boolean
def match?(version, requirement) when is_binary(requirement) do
case Version.Parser.parse_requirement(requirement) do
{ :ok, req } ->
match?(version, req)
{ :error, reason } ->
raise InvalidRequirement, reason: reason
end
end
def match?(version, requirement) when is_binary(version) do
match?(parse(version), requirement)
end
def match?(Schema[] = version, Requirement[matchspec: spec]) do
case :ets.test_ms(to_matchable(version), spec) do
{ :ok, result } ->
result != false
{ :error, reason } ->
raise InvalidRequirement, reason: reason
end
end
@doc """
Check if a version string is compatible with [semver](http://semver.org/).
## Examples
iex> Version.valid?("2.0")
true
iex> Version.valid?("invalid")
false
"""
@spec valid?(String.t | Schema.t) :: boolean
def valid?(string) when is_binary(string) do
Version.Parser.valid_version?(string)
end
def valid?(Version.Schema[major: nil]), do: false
def valid?(Version.Schema[]), do: true
@doc """
Parse a version string into a `Version.Schema`.
## Examples
> Version.parse("2.0.1-alpha1")
#Version.Schema<2.0.1-alpha1>
"""
@spec parse(String.t) :: Schema.t
def parse(string) when is_binary(string) do
case Version.Parser.parse_version(string) do
{ :ok, matchable } -> from_matchable(matchable).source(string).build(get_build(string))
{ :error, _ } -> Version.Schema[source: string]
end
end
defp get_build(string) do
case Regex.run(%r/\+([^\s]+)$/, string) do
nil ->
nil
[_, build] ->
build
end
end
@doc """
Convert a version to a `Version.matchable`
## Examples
iex> Version.to_matchable("2.0.1-alpha.1")
{2, 0, 1, ["alpha", 1]}
"""
@spec to_matchable(String.t | Schema.t) :: Version.matchable
def to_matchable(Schema[major: nil, source: source]) do
{ source, nil, nil, [] }
end
def to_matchable(Version.Schema[major: major, minor: minor, patch: patch, pre: nil]) do
{ major, minor, patch, [] }
end
def to_matchable(Version.Schema[major: major, minor: minor, patch: patch, pre: pre]) do
{ major, minor, patch, Version.Parser.parse_pre(pre) }
end
def to_matchable(string) do
to_matchable(parse(string))
end
@doc """
Convert a matchable to a `Version.Schema`.
## Examples
> Version.from_matchable({2, 0, 1, ["alpha", 1]})
#Version.Schema<2.0.1-alpha.1>
"""
@spec from_matchable(Version.matchable) :: Schema.t
def from_matchable({ source, nil, nil, nil }) when is_binary(source) do
Version.Schema[source: source]
end
def from_matchable({ major, minor, patch, pre }) do
source = "#{major}"
if minor do
source = "#{source}.#{minor}"
if patch do
source = "#{source}.#{patch}"
case pre do
[] ->
pre = nil
list ->
pre = Enum.join(list, ".")
source = "#{source}-#{pre}"
end
end
end
Version.Schema[major: major, minor: minor, patch: patch, pre: pre, source: source]
end
defmodule Parser.DSL do
@moduledoc false
defmacro deflexer(match, do: body) when is_binary(match) do
quote do
def lexer(unquote(match) <> rest, acc) do
lexer(rest, [unquote(body) | acc])
end
end
end
defmacro deflexer(acc, do: body) do
quote do
def lexer("", unquote(acc)) do
unquote(body)
end
end
end
defmacro deflexer(char, acc, do: body) do
quote do
def lexer(<< unquote(char) :: utf8, rest :: binary >>, unquote(acc)) do
unquote(char) = << unquote(char) :: utf8 >>
lexer(rest, unquote(body))
end
end
end
end
defmodule Parser do
@moduledoc false
import Parser.DSL
deflexer ">=", do: :'>='
deflexer "<=", do: :'<='
deflexer "~>", do: :'~>'
deflexer ">", do: :'>'
deflexer "<", do: :'<'
deflexer "==", do: :'=='
deflexer "!=", do: :'!='
deflexer "!", do: :'!='
deflexer " or ", do: :'||'
deflexer " and ", do: :'&&'
deflexer " ", do: :' '
deflexer x, [] do
[x, :'==']
end
deflexer x, [h | acc] do
cond do
is_binary h ->
[h <> x | acc]
h in [:'||', :'&&'] ->
[x, :'==', h | acc]
true ->
[x, h | acc]
end
end
deflexer acc do
Enum.filter(Enum.reverse(acc), &(&1 != :' '))
end
@version_regex %r/^(\d+)(?:\.(\d+)(?:\.(\d+))?)?(?:\-([^\s]+))?(?:\+[^\d]+)?$/
@spec parse_requirement(String.t) :: { :ok, Version.Requirement.t } | { :error, binary | atom }
def parse_requirement(source) do
lexed = lexer(source, [])
if valid_requirement?(lexed) do
spec = to_matchspec(lexed)
case :ets.test_ms({}, spec) do
{ :ok, _ } ->
{ :ok, Requirement[source: source, matchspec: spec] }
{ :error, errors } ->
{ :error, Enum.map(errors, fn { :error, reason } ->
to_string(reason)
end) }
end
else
{ :error, :invalid_requirement }
end
end
defp nillify(""), do: nil
defp nillify(o), do: o
@spec parse_version(String.t) :: { :ok, Version.matchable } | { :error, :invalid_version }
def parse_version(string) when is_binary(string) do
if valid_version?(string) do
destructure [_, major, minor, patch, pre], Regex.run(@version_regex, string)
major = binary_to_integer(major)
minor = binary_to_integer(minor |> nillify || "0")
patch = binary_to_integer(patch |> nillify || "0")
pre = pre && parse_pre(pre) || []
{ :ok, { major, minor, patch, pre } }
else
{ :error, :invalid_version }
end
end
@doc false
def parse_pre(pre) do
String.split(pre, ".") |> Enum.map fn piece ->
if piece =~ %r/^(0|[1-9][0-9]*)$/ do
binary_to_integer(piece)
else
piece
end
end
end
@spec valid_requirement?(list) :: boolean
def valid_requirement?([]) do
false
end
def valid_requirement?([a | next]) do
valid_requirement?(a, next)
end
# it must finish with a version
defp valid_requirement?(a, []) when is_binary(a) do
true
end
# version version
defp valid_requirement?(a, [b | _]) when is_binary(a) and is_binary(b) do
false
end
# or <op> | and <op>
defp valid_requirement?(a, [b | next]) when is_atom(a) and is_atom(b) and a in [:'||', :'&&'] do
valid_requirement?(b, next)
end
# <version> or | <version> and
defp valid_requirement?(a, [b | next]) when is_binary(a) and is_atom(b) and b in [:'||', :'&&'] do
valid_requirement?(b, next)
end
# or <version> | and <version>
defp valid_requirement?(a, [b | next]) when is_atom(a) and is_binary(b) and a in [:'||', :'&&'] do
valid_requirement?(b, next)
end
# <op> <version>; also checks operators work on valid versions
defp valid_requirement?(a, [b | next]) when is_atom(a) and is_binary(b) do
if valid_version?(b) do
valid_requirement?(b, next)
else
if a in [:'==', :'!='] and Regex.match? %r/^\w/, b do
valid_requirement?(b, next)
else
false
end
end
end
defp valid_requirement?(_, _) do
false
end
@spec valid_version?(String.t) :: boolean
def valid_version?(string) do
Regex.match? %r/^\d+(\.\d+(\.\d+)?)?(\-[^\s]+)?(?:\+[^\s]+)?$/, string
end
defp approximate(version) do
Version.from_matchable(case Regex.run(@version_regex, version) do
[_, major] ->
{ binary_to_integer(major) + 1, 0, 0, [] }
[_, major, _] ->
{ binary_to_integer(major) + 1, 0, 0, [] }
[_, major, minor, _] ->
{ binary_to_integer(major), binary_to_integer(minor) + 1, 0, [] }
[_, major, minor, _, _] ->
{ binary_to_integer(major), binary_to_integer(minor) + 1, 0, [] }
end)
end
defp to_matchspec(lexed) do
first = to_condition(lexed)
rest = Enum.drop(lexed, 2)
[{{ :'$1', :'$2', :'$3', :'$4' }, [to_condition(first, rest)], [:'$_'] }]
end
defp to_condition([:'==', version | _]) do
version = Version.to_matchable(version)
{ :'==', :'$_', { :const, version } }
end
defp to_condition([:'!=', version | _]) do
version = Version.to_matchable(version)
{ :'/=', :'$_', { :const, version } }
end
defp to_condition([:'~>', version | _]) do
from = Version.parse(version)
to = approximate(version)
{ :andalso, to_condition([:'>=', to_string(from)]),
to_condition([:'<', to_string(to)]) }
end
defp to_condition([:'>', version | _]) do
{ major, minor, patch, pre } = Version.to_matchable(version)
{ :andalso, { :not, { :is_binary, :'$1' } },
{ :orelse, { :'>', {{ :'$1', :'$2', :'$3' }},
{ :const, { major, minor, patch } } },
{ :andalso, { :'==', {{ :'$1', :'$2', :'$3' }},
{ :const, { major, minor, patch } } },
{ :orelse, { :andalso, { :'==', { :length, :'$4' }, 0 },
{ :'/=', length(pre), 0 } },
{ :andalso, { :'/=', length(pre), 0 },
{ :orelse, { :'>', { :length, :'$4' }, length(pre) },
{ :andalso, { :'==', { :length, :'$4' }, length(pre) },
{ :'>', :'$4', { :const, pre } } } } } } } } }
end
defp to_condition([:'>=', version | _]) do
matchable = Version.to_matchable(version)
{ :orelse, { :andalso, { :not, { :is_binary, :'$1' } },
{ :'==', :'$_', { :const, matchable } } },
to_condition([:'>', version]) }
end
defp to_condition([:'<', version | _]) do
{ major, minor, patch, pre } = Version.to_matchable(version)
{ :andalso, { :not, { :is_binary, :'$1' } },
{ :orelse, { :'<', {{ :'$1', :'$2', :'$3' }},
{ :const, { major, minor, patch } } },
{ :andalso, { :'==', {{ :'$1', :'$2', :'$3' }},
{ :const, { major, minor, patch } } },
{ :orelse, { :andalso, { :'/=', { :length, :'$4' }, 0 },
{ :'==', length(pre), 0 } },
{ :andalso, { :'/=', { :length, :'$4' }, 0 },
{ :orelse, { :'<', { :length, :'$4' }, length(pre) },
{ :andalso, { :'==', { :length, :'$4' }, length(pre) },
{ :'<', :'$4', { :const, pre } } } } } } } } }
end
defp to_condition([:'<=', version | _]) do
matchable = Version.to_matchable(version)
{ :orelse, { :andalso, { :not, { :is_binary, :'$1' } },
{ :'==', :'$_', { :const, matchable } } },
to_condition([:'<', version]) }
end
defp to_condition(current, []) do
current
end
defp to_condition(current, [:'&&', operator, version | rest]) do
to_condition({ :andalso, current, to_condition([operator, version]) }, rest)
end
defp to_condition(current, [:'||', operator, version | rest]) do
to_condition({ :orelse, current, to_condition([operator, version]) }, rest)
end
end
end
defimpl String.Chars, for: Version.Schema do
def to_string(Version.Schema[source: source]) do
source
end
end
defimpl Inspect, for: Version.Schema do
def inspect(self, _opts) do
"#Version.Schema<" <> to_string(self) <> ">"
end
end
defimpl String.Chars, for: Version.Requirement do
def to_string({ _, source, _ }) do
source
end
end
defimpl Inspect, for: Version.Requirement do
def inspect({ _, source, _ }, _opts) do
"#Version.Requirement<" <> source <> ">"
end
end
| 28.848771 | 121 | 0.523622 |
f70e14537efe1db525ae33f93d07e023787072f3 | 430 | ex | Elixir | test/support/comment.ex | devonestes/arbor | d8a608f09977ae6904008f01acc1c9971b00c381 | [
"MIT"
] | null | null | null | test/support/comment.ex | devonestes/arbor | d8a608f09977ae6904008f01acc1c9971b00c381 | [
"MIT"
] | null | null | null | test/support/comment.ex | devonestes/arbor | d8a608f09977ae6904008f01acc1c9971b00c381 | [
"MIT"
] | null | null | null | defmodule Arbor.Comment do
@moduledoc false
use Ecto.Schema
use Arbor.Tree,
foreign_key: :parent_id,
foreign_key_type: :integer,
prefixes: ["private"]
import Ecto.Query
schema "comments" do
field(:body, :string)
belongs_to(:parent, Arbor.Comment)
timestamps()
end
def by_inserted_at(query \\ __MODULE__) do
from(
c in query,
order_by: [asc: :inserted_at]
)
end
end
| 16.538462 | 44 | 0.653488 |
f70e2b8b35a2993f8834c7bee5b6ded7773b1ab1 | 3,400 | ex | Elixir | clients/content/lib/google_api/content/v21/model/orders_in_store_refund_line_item_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v21/model/orders_in_store_refund_line_item_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v21/model/orders_in_store_refund_line_item_request.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V21.Model.OrdersInStoreRefundLineItemRequest do
@moduledoc """
## Attributes
* `lineItemId` (*type:* `String.t`, *default:* `nil`) - The ID of the line item to return. Either lineItemId or productId is required.
* `operationId` (*type:* `String.t`, *default:* `nil`) - The ID of the operation. Unique across all operations for a given order.
* `priceAmount` (*type:* `GoogleApi.Content.V21.Model.Price.t`, *default:* `nil`) - The amount to be refunded. This may be pre-tax or post-tax depending on the location of the order. Required.
* `productId` (*type:* `String.t`, *default:* `nil`) - The ID of the product to return. This is the REST ID used in the products service. Either lineItemId or productId is required.
* `quantity` (*type:* `integer()`, *default:* `nil`) - The quantity to return and refund.
* `reason` (*type:* `String.t`, *default:* `nil`) - The reason for the return.
Acceptable values are:
- "`customerDiscretionaryReturn`"
- "`customerInitiatedMerchantCancel`"
- "`deliveredTooLate`"
- "`expiredItem`"
- "`invalidCoupon`"
- "`malformedShippingAddress`"
- "`other`"
- "`productArrivedDamaged`"
- "`productNotAsDescribed`"
- "`qualityNotAsExpected`"
- "`undeliverableShippingAddress`"
- "`unsupportedPoBoxAddress`"
- "`wrongProductShipped`"
* `reasonText` (*type:* `String.t`, *default:* `nil`) - The explanation of the reason.
* `taxAmount` (*type:* `GoogleApi.Content.V21.Model.Price.t`, *default:* `nil`) - The amount of tax to be refunded. Required.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:lineItemId => String.t(),
:operationId => String.t(),
:priceAmount => GoogleApi.Content.V21.Model.Price.t(),
:productId => String.t(),
:quantity => integer(),
:reason => String.t(),
:reasonText => String.t(),
:taxAmount => GoogleApi.Content.V21.Model.Price.t()
}
field(:lineItemId)
field(:operationId)
field(:priceAmount, as: GoogleApi.Content.V21.Model.Price)
field(:productId)
field(:quantity)
field(:reason)
field(:reasonText)
field(:taxAmount, as: GoogleApi.Content.V21.Model.Price)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V21.Model.OrdersInStoreRefundLineItemRequest do
def decode(value, options) do
GoogleApi.Content.V21.Model.OrdersInStoreRefundLineItemRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V21.Model.OrdersInStoreRefundLineItemRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.963855 | 196 | 0.683235 |
f70e59ffa3b720aec3989ba353295d3f76fc3b24 | 1,729 | ex | Elixir | clients/search_console/lib/google_api/search_console/v1/model/sites_list_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/search_console/lib/google_api/search_console/v1/model/sites_list_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/search_console/lib/google_api/search_console/v1/model/sites_list_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SearchConsole.V1.Model.SitesListResponse do
@moduledoc """
List of sites with access level information.
## Attributes
* `siteEntry` (*type:* `list(GoogleApi.SearchConsole.V1.Model.WmxSite.t)`, *default:* `nil`) - Contains permission level information about a Search Console site. For more information, see [Permissions in Search Console](https://support.google.com/webmasters/answer/2451999).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:siteEntry => list(GoogleApi.SearchConsole.V1.Model.WmxSite.t()) | nil
}
field(:siteEntry, as: GoogleApi.SearchConsole.V1.Model.WmxSite, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.SearchConsole.V1.Model.SitesListResponse do
def decode(value, options) do
GoogleApi.SearchConsole.V1.Model.SitesListResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SearchConsole.V1.Model.SitesListResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.787234 | 278 | 0.754193 |
f70eb73d02021a217ff23f048d22ac0f32e3bbb1 | 325 | ex | Elixir | lib/messaging_status_service/calls/call_sids/call_sids.ex | ciroque/messaging_status_service | 0d32873ac6e0a78c92a5cf08da373ba4aaf22da4 | [
"MIT"
] | null | null | null | lib/messaging_status_service/calls/call_sids/call_sids.ex | ciroque/messaging_status_service | 0d32873ac6e0a78c92a5cf08da373ba4aaf22da4 | [
"MIT"
] | null | null | null | lib/messaging_status_service/calls/call_sids/call_sids.ex | ciroque/messaging_status_service | 0d32873ac6e0a78c92a5cf08da373ba4aaf22da4 | [
"MIT"
] | null | null | null | defmodule MessagingStatusService.Calls.CallSids do
import Ecto.Query, warn: false
alias MessagingStatusService.Calls.CallSid
alias MessagingStatusService.Repo
def create(attrs \\ %{}) do
%CallSid{}
|> CallSid.changeset(attrs)
|> Repo.insert
end
def get!(id) do
Repo.get!(CallSid, id)
end
end
| 19.117647 | 50 | 0.704615 |
f70ed2a3bfb89418c0d0f65b89a8d222ec527b6e | 922 | ex | Elixir | lib/rocketpay/accounts/transaction.ex | luizDorval/Rocketpay | b8cafbbb8b65b53596a350897aeee3dc4dd474ed | [
"MIT"
] | 2 | 2021-04-22T10:54:55.000Z | 2021-06-17T03:09:14.000Z | lib/rocketpay/accounts/transaction.ex | luizDorval/Rocketpay | b8cafbbb8b65b53596a350897aeee3dc4dd474ed | [
"MIT"
] | null | null | null | lib/rocketpay/accounts/transaction.ex | luizDorval/Rocketpay | b8cafbbb8b65b53596a350897aeee3dc4dd474ed | [
"MIT"
] | null | null | null | defmodule Rocketpay.Accounts.Transaction do
alias Ecto.Multi
alias Rocketpay.Accounts.Operation
alias Rocketpay.Accounts.Transactions.Response, as: TransactionResponse
alias Rocketpay.Repo
def call(%{"from" => from_id, "to" => to_id, "value" => value}) do
withdraw_params = build_params(from_id, value)
deposit_params = build_params(to_id, value)
Multi.new()
|> Multi.merge(fn _changes -> Operation.call(withdraw_params, :withdraw) end)
|> Multi.merge(fn _changes -> Operation.call(deposit_params, :deposit) end)
|> run_transaction()
end
defp build_params(id, value), do: %{"id" => id, "value" => value}
defp run_transaction(multi) do
case Repo.transaction(multi) do
{:error, _operation, reason, _changes} -> {:error, reason}
{:ok, %{deposit: to_account, withdraw: from_account}} -> {:ok, TransactionResponse.build(from_account, to_account)}
end
end
end
| 32.928571 | 121 | 0.700651 |
f70eed110557fc53e4ee4b234e38030782895703 | 26,104 | exs | Elixir | test/floki/html/generated/tokenizer/unicodeChars_part1_test.exs | nathanl/floki | 042b3f60f4d9a6218ec85d558d13cc6dac30c587 | [
"MIT"
] | 1,778 | 2015-01-07T14:12:31.000Z | 2022-03-29T22:42:48.000Z | test/floki/html/generated/tokenizer/unicodeChars_part1_test.exs | nathanl/floki | 042b3f60f4d9a6218ec85d558d13cc6dac30c587 | [
"MIT"
] | 279 | 2015-01-01T15:54:50.000Z | 2022-03-28T18:06:03.000Z | test/floki/html/generated/tokenizer/unicodeChars_part1_test.exs | nathanl/floki | 042b3f60f4d9a6218ec85d558d13cc6dac30c587 | [
"MIT"
] | 166 | 2015-04-24T20:48:02.000Z | 2022-03-28T17:29:05.000Z | defmodule Floki.HTML.Generated.Tokenizer.UnicodecharsPart1Test do
use ExUnit.Case, async: true
# NOTE: This file was generated by "mix generate_tokenizer_tests unicodeChars.test".
# html5lib-tests rev: e52ff68cc7113a6ef3687747fa82691079bf9cc5
alias Floki.HTML.Tokenizer
test "tokenize/1 Invalid Unicode character U+0001" do
input = <<1>>
output = [["Character", <<1>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0002" do
input = <<2>>
output = [["Character", <<2>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0003" do
input = <<3>>
output = [["Character", <<3>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0004" do
input = <<4>>
output = [["Character", <<4>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0005" do
input = <<5>>
output = [["Character", <<5>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0006" do
input = <<6>>
output = [["Character", <<6>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0007" do
input = "\a"
output = [["Character", "\a"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0008" do
input = "\b"
output = [["Character", "\b"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+000B" do
input = "\v"
output = [["Character", "\v"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+000E" do
input = <<14>>
output = [["Character", <<14>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+000F" do
input = <<15>>
output = [["Character", <<15>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0010" do
input = <<16>>
output = [["Character", <<16>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0011" do
input = <<17>>
output = [["Character", <<17>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0012" do
input = <<18>>
output = [["Character", <<18>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0013" do
input = <<19>>
output = [["Character", <<19>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0014" do
input = <<20>>
output = [["Character", <<20>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0015" do
input = <<21>>
output = [["Character", <<21>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0016" do
input = <<22>>
output = [["Character", <<22>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0017" do
input = <<23>>
output = [["Character", <<23>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0018" do
input = <<24>>
output = [["Character", <<24>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+0019" do
input = <<25>>
output = [["Character", <<25>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+001A" do
input = <<26>>
output = [["Character", <<26>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+001B" do
input = "\e"
output = [["Character", "\e"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+001C" do
input = <<28>>
output = [["Character", <<28>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+001D" do
input = <<29>>
output = [["Character", <<29>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+001E" do
input = <<30>>
output = [["Character", <<30>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+001F" do
input = <<31>>
output = [["Character", <<31>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+007F" do
input = "\d"
output = [["Character", "\d"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+10FFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+10FFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+1FFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+1FFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+2FFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+2FFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+3FFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+3FFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+4FFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+4FFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+5FFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+5FFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+6FFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+6FFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+7FFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+7FFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+8FFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+8FFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+9FFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+9FFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+AFFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+AFFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+BFFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+BFFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+CFFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+CFFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+DFFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+DFFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+EFFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+EFFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDD0" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDD1" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDD2" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDD3" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDD4" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDD5" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDD6" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDD7" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDD8" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDD9" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDDA" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDDB" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDDC" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDDD" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDDE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDDF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDE0" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDE1" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDE2" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDE3" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDE4" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDE5" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDE6" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDE7" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDE8" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDE9" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDEA" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDEB" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDEC" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDED" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDEE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FDEF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FFFE" do
input = <<239, 191, 190>>
output = [["Character", <<239, 191, 190>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FFFF" do
input = <<239, 191, 191>>
output = [["Character", <<239, 191, 191>>]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FFFFE" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Invalid Unicode character U+FFFFF" do
input = ""
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Valid Unicode character U+0009" do
input = "\t"
output = [["Character", "\t"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Valid Unicode character U+000A" do
input = "\n"
output = [["Character", "\n"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Valid Unicode character U+0020" do
input = " "
output = [["Character", " "]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Valid Unicode character U+0021" do
input = "!"
output = [["Character", "!"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Valid Unicode character U+0022" do
input = "\""
output = [["Character", "\""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Valid Unicode character U+0023" do
input = "#"
output = [["Character", "#"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
end
| 21.591398 | 86 | 0.61232 |
f70f523cf13505ed1d8c9cfaacf5d89217f0208d | 4,173 | exs | Elixir | test/gcloud_speech_to_text/integration_test.exs | membraneframework/membrane-element-gcloud-speech-to-text | 18483aae342af59f0dbbec1cf26cfbc9ccb9fd3b | [
"Apache-2.0"
] | null | null | null | test/gcloud_speech_to_text/integration_test.exs | membraneframework/membrane-element-gcloud-speech-to-text | 18483aae342af59f0dbbec1cf26cfbc9ccb9fd3b | [
"Apache-2.0"
] | null | null | null | test/gcloud_speech_to_text/integration_test.exs | membraneframework/membrane-element-gcloud-speech-to-text | 18483aae342af59f0dbbec1cf26cfbc9ccb9fd3b | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.Element.GCloud.SpeechToText.IntegrationTest do
use ExUnit.Case
import Membrane.Testing.Assertions
alias Google.Cloud.Speech.V1.{
SpeechRecognitionAlternative,
StreamingRecognitionResult,
StreamingRecognizeResponse,
WordInfo
}
alias Membrane.{FLACParser, GCloud}
alias Membrane.Testing
alias Membrane.Time
@moduletag :external
@fixture_path "../fixtures/sample.flac" |> Path.expand(__DIR__)
@fixture_duration 7_270 |> Time.milliseconds() |> Time.to_nanoseconds()
defp testing_pipeline(recognition_opts) do
children = [
src: %Membrane.File.Source{location: @fixture_path},
parser: FLACParser,
sink:
struct!(
GCloud.SpeechToText,
[
language_code: "en-GB",
word_time_offsets: true,
interim_results: false
] ++ recognition_opts
)
]
Testing.Pipeline.start_link(links: Membrane.ParentSpec.link_linear(children))
end
test "recognition pipeline provides transcription of short file" do
assert {:ok, pid} = testing_pipeline([])
assert_end_of_stream(pid, :sink, :input, 10_000)
assert_pipeline_notified(pid, :sink, %StreamingRecognizeResponse{} = response, 10_000)
assert response.error == nil
assert [%StreamingRecognitionResult{} = res] = response.results
assert res.is_final == true
assert res.result_end_time == 7_270_000_000
assert [%SpeechRecognitionAlternative{} = alt] = res.alternatives
assert alt.confidence > 0.95
assert alt.transcript ==
"Adventure 1 a scandal in Bohemia from the Adventures of Sherlock Holmes by Sir Arthur Conan Doyle"
first_word = alt.words |> hd()
last_word = alt.words |> Enum.reverse() |> hd()
assert first_word == %WordInfo{
start_time: 100_000_000,
end_time: 1_400_000_000,
word: "Adventure"
}
assert last_word == %WordInfo{
start_time: 6_900_000_000,
end_time: 7_200_000_000,
word: "Doyle"
}
end
test "recognition pipeline uses overlap when reconnecting" do
streaming_time_limit = 6 |> Time.seconds()
assert {:ok, pid} =
testing_pipeline(
streaming_time_limit: streaming_time_limit,
reconnection_overlap_time: 2 |> Time.seconds()
)
assert_end_of_stream(pid, :sink, :input, 10_000)
assert_pipeline_notified(pid, :sink, %StreamingRecognizeResponse{} = response, 10_000)
assert response.error == nil
assert [%StreamingRecognitionResult{} = res] = response.results
assert res.is_final == true
delta = 150 |> Time.milliseconds() |> Time.to_nanoseconds()
assert_in_delta res.result_end_time, streaming_time_limit |> Time.to_nanoseconds(), delta
assert [%SpeechRecognitionAlternative{} = alt] = res.alternatives
assert alt.transcript ==
"Adventure 1 a scandal in Bohemia from the Adventures of Sherlock Holmes"
sherlock_word = alt.words |> Enum.find(fn %{word: word} -> word == "Sherlock" end)
assert %WordInfo{
start_time: start_time,
end_time: end_time,
word: "Sherlock"
} = sherlock_word
assert_in_delta start_time, 4_900_000_000, delta
assert_in_delta end_time, 5_200_000_000, delta
assert_pipeline_notified(pid, :sink, %StreamingRecognizeResponse{} = response, 10_000)
assert response.error == nil
assert [%StreamingRecognitionResult{} = res] = response.results
assert res.is_final == true
assert_in_delta res.result_end_time, @fixture_duration |> Time.to_nanoseconds(), delta
assert [%SpeechRecognitionAlternative{} = alt] = res.alternatives
assert alt.transcript =~ "of Sherlock Holmes by Sir Arthur Conan Doyle"
sherlock_word = alt.words |> Enum.find(fn %{word: word} -> word == "Sherlock" end)
assert %WordInfo{
start_time: start_time,
end_time: end_time,
word: "Sherlock"
} = sherlock_word
assert_in_delta start_time, 4_900_000_000, delta
assert_in_delta end_time, 5_200_000_000, delta
end
end
| 32.858268 | 112 | 0.669782 |
f70f535e1f486132e5507df768000bd31666bf2a | 754 | ex | Elixir | lib/model/statement.ex | fbettag/mt940 | c98a0d2b9592ad083478336a1368512b7ed2210a | [
"MIT"
] | 8 | 2015-05-17T20:19:30.000Z | 2021-04-07T09:48:10.000Z | lib/model/statement.ex | fbettag/mt940 | c98a0d2b9592ad083478336a1368512b7ed2210a | [
"MIT"
] | 3 | 2015-05-04T07:44:42.000Z | 2016-10-14T22:59:42.000Z | lib/model/statement.ex | fbettag/mt940 | c98a0d2b9592ad083478336a1368512b7ed2210a | [
"MIT"
] | 6 | 2015-05-04T01:35:24.000Z | 2020-08-29T06:28:32.000Z | defmodule MT940.Statement do
@moduledoc ~S"""
## Statement Number / Sequence Number
Sequential number of the statement, optionally followed by the sequence
number of the message within that statement when more than one message is
sent for one statement.
"""
defstruct [
:modifier,
:content,
:number,
:sheet
]
@type t :: %__MODULE__{}
use MT940.Field
defp parse_content(result = %__MODULE__{content: content}) do
matches = ~r/^(\d+)\/?(\d+)?$/
|> Regex.run(content, capture: :all_but_first)
|> Enum.map(&String.to_integer/1)
case matches do
[number, sheet] -> %__MODULE__{result | number: number, sheet: sheet}
[sheet] -> %__MODULE__{result | sheet: sheet}
end
end
end
| 23.5625 | 75 | 0.647215 |
f70f868f2bce102a17c059d954b681dd63a4ab82 | 4,492 | exs | Elixir | test/temple/link_test.exs | exit9/temple | eab538c1ca584f8d4b0ebe97111251bba28979e1 | [
"MIT"
] | null | null | null | test/temple/link_test.exs | exit9/temple | eab538c1ca584f8d4b0ebe97111251bba28979e1 | [
"MIT"
] | null | null | null | test/temple/link_test.exs | exit9/temple | eab538c1ca584f8d4b0ebe97111251bba28979e1 | [
"MIT"
] | null | null | null | defmodule Temple.LinkTest do
use ExUnit.Case, async: true
use Temple
describe "phx_link" do
test "emits a link" do
{:safe, actual} =
temple do
phx_link("hi", to: "/hello")
end
assert actual =~ ~s{<a}
assert actual =~ ~s{href="/hello"}
assert actual =~ ~s{hi}
end
test "emits a link when passed block that has text" do
{:safe, actual} =
temple do
phx_link to: "/hello" do
text "hi"
end
end
assert String.starts_with?(actual, ~s{<a})
assert actual =~ ~s{href="/hello"}
assert actual =~ ~s{hi}
assert String.ends_with?(actual, ~s{</a>})
end
test "emits a link when passed block that has more markup" do
{:safe, actual} =
temple do
phx_link to: "/hello" do
div do
div "hi"
end
end
end
assert String.starts_with?(actual, ~s{<a})
assert actual =~ ~s{href="/hello"}
assert actual =~ ~s{<div><div>}
assert actual =~ ~s{hi}
assert actual =~ ~s{</div></div>}
assert String.ends_with?(actual, ~s{</a>})
end
test "emits a link with additional html attributes" do
{:safe, actual} =
temple do
phx_link("hi",
to: "/hello",
class: "phoenix",
id: "legendary",
data: [confirm: "Really?"],
method: :delete
)
end
assert actual =~ ~s{<a}
assert actual =~ ~s{href="/hello"}
assert actual =~ ~s{class="phoenix"}
assert actual =~ ~s{id="legendary"}
assert actual =~ ~s{data-confirm="Really?"}
assert actual =~ ~s{hi}
end
test "emits a link with a non GET method" do
{:safe, actual} =
temple do
phx_link("hi",
to: "/hello",
method: :delete
)
end
assert actual =~ ~s{<a}
assert actual =~ ~s{data-csrf="}
assert actual =~ ~s{data-method="delete"}
assert actual =~ ~s{data-to="/hello"}
assert actual =~ ~s{hi}
end
end
describe "phx_button" do
test "emits a button" do
{:safe, actual} =
temple do
phx_button("hi", to: "/hello")
end
assert actual =~ ~s{<button}
assert actual =~ ~s{data-to="/hello"}
assert actual =~ ~s{data-method="post"}
assert actual =~ ~s{hi}
end
test "emits a button when passed block that has text" do
{:safe, actual} =
temple do
phx_button to: "/hello" do
text "hi"
end
end
assert String.starts_with?(actual, ~s{<button})
assert actual =~ ~s{hi}
assert actual =~ ~s{data-to="/hello"}
assert actual =~ ~s{data-method="post"}
assert String.ends_with?(actual, ~s{</button>})
end
test "emits a button when passed block that has more markup" do
{:safe, actual} =
temple do
phx_button to: "/hello" do
div do
div "hi"
end
end
end
assert String.starts_with?(actual, ~s{<button})
assert actual =~ ~s{data-to="/hello"}
assert actual =~ ~s{data-method="post"}
assert actual =~ ~s{<div><div>}
assert actual =~ ~s{hi}
assert actual =~ ~s{</div></div>}
assert String.ends_with?(actual, ~s{</button>})
end
test "emits a button with additional html attributes" do
{:safe, actual} =
temple do
phx_button("hi",
to: "/hello",
class: "phoenix",
id: "legendary",
data: [confirm: "Really?"],
method: :delete
)
end
assert String.starts_with?(actual, ~s{<button})
assert actual =~ ~s{class="phoenix"}
assert actual =~ ~s{id="legendary"}
assert actual =~ ~s{data-confirm="Really?"}
assert actual =~ ~s{hi}
assert String.ends_with?(actual, ~s{</button>})
end
test "emits a button with a non GET method" do
{:safe, actual} =
temple do
phx_button("hi",
to: "/hello",
method: :delete
)
end
assert String.starts_with?(actual, ~s{<button})
assert actual =~ ~s{data-csrf="}
assert actual =~ ~s{data-method="delete"}
assert actual =~ ~s{data-to="/hello"}
assert actual =~ ~s{hi}
assert String.ends_with?(actual, ~s{</button>})
end
end
end
| 26.269006 | 67 | 0.51447 |
f70f9d55cd58d05c2472b6c970df4fbfd8f5a6b5 | 922 | ex | Elixir | lib/model/refund_payment_request.ex | Chatatata/iyzico | 99f195e51b732e124bba82ad78c12d8f5ecbdf08 | [
"MIT"
] | 6 | 2017-08-22T15:27:33.000Z | 2019-01-25T10:31:10.000Z | lib/model/refund_payment_request.ex | i386-64/iyzico | 99f195e51b732e124bba82ad78c12d8f5ecbdf08 | [
"MIT"
] | 12 | 2017-08-23T13:21:26.000Z | 2020-03-31T21:07:23.000Z | lib/model/refund_payment_request.ex | i386-64/iyzico | 99f195e51b732e124bba82ad78c12d8f5ecbdf08 | [
"MIT"
] | 4 | 2017-09-24T15:40:08.000Z | 2020-01-31T10:21:32.000Z | defmodule Iyzico.RefundPaymentRequest do
@moduledoc false
@doc false
@enforce_keys ~w(conversation_id transaction_id price ip currency)a
defstruct [
:conversation_id,
:transaction_id,
:price,
:ip,
:currency
]
@typedoc false
@type currency :: :try
@typedoc false
@type t :: %__MODULE__{
conversation_id: binary,
transaction_id: binary,
price: binary,
ip: tuple,
currency: currency
}
end
defimpl Iyzico.IOListConvertible, for: Iyzico.RefundPaymentRequest do
@default_locale Keyword.get(Application.get_env(:iyzico, Iyzico), :locale, "en")
def to_iolist(data) do
[{"locale", @default_locale},
{"conversationId", data.conversation_id},
{"paymentTransactionId", data.transaction_id},
{"price", data.price},
{"ip", Enum.join(Tuple.to_list(data.ip), ".")},
{"currency", Atom.to_string(data.currency) |> String.upcase()}]
end
end
| 23.641026 | 82 | 0.680043 |
f70fb5b5fc6afd047944754d371df00ce95ea8fe | 1,135 | ex | Elixir | test/support/channel_case.ex | Dhall777/signinsheet | 703f6f2b0bc4e31888bf7e81939cfd4b05d99cfb | [
"BSD-3-Clause"
] | null | null | null | test/support/channel_case.ex | Dhall777/signinsheet | 703f6f2b0bc4e31888bf7e81939cfd4b05d99cfb | [
"BSD-3-Clause"
] | null | null | null | test/support/channel_case.ex | Dhall777/signinsheet | 703f6f2b0bc4e31888bf7e81939cfd4b05d99cfb | [
"BSD-3-Clause"
] | null | null | null | defmodule SigninsheetWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use SigninsheetWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import SigninsheetWeb.ChannelCase
# The default endpoint for testing
@endpoint SigninsheetWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Signinsheet.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Signinsheet.Repo, {:shared, self()})
end
:ok
end
end
| 27.682927 | 73 | 0.73304 |
f70fb88eced914c71b5bc67ff42a60066e331899 | 2,849 | ex | Elixir | architect/lib/git/commit.ex | VJftw/velocity | 8335c39c510dbde1446e6cde03eebb450339d212 | [
"Apache-2.0"
] | 3 | 2017-12-09T21:05:54.000Z | 2019-08-06T08:13:34.000Z | architect/lib/git/commit.ex | VJftw/velocity | 8335c39c510dbde1446e6cde03eebb450339d212 | [
"Apache-2.0"
] | 63 | 2017-09-09T15:44:24.000Z | 2022-03-03T22:16:24.000Z | architect/lib/git/commit.ex | VJftw/velocity | 8335c39c510dbde1446e6cde03eebb450339d212 | [
"Apache-2.0"
] | 5 | 2017-09-14T00:17:22.000Z | 2019-11-27T14:43:45.000Z | defmodule Git.Commit do
@keys [:sha, :author, :gpg_fingerprint, :message]
@enforce_keys @keys
defstruct @keys
defmodule(Author, do: defstruct([:email, :name, :date]))
@doc ~S"""
The format passed to the Git CLI for a commit
see: https://git-scm.com/docs/pretty-formats
%H: commit hash
%aI: author date, strict ISO 8601 format
%aE: author email (respecting .mailmap, see git-shortlog[1] or git-blame[1])
%aN: author name (respecting .mailmap, see git-shortlog[1] or git-blame[1])
%GF: show the fingerprint of the key used to sign a signed commit
%s: subject
"""
def format(), do: "%H%n%aI%n%aE%n%aN%n%GF%n%s"
@doc ~S"""
Parses commit output into a list of Commit structs
"""
def parse({:ok, stdout}), do: parse(stdout)
def parse({:error, error}), do: {:error, error}
def parse(stdout) when is_binary(stdout) do
stdout
|> String.split("\n")
|> Enum.chunk_every(6)
|> Enum.filter(fn x -> x != [""] end)
|> Enum.map(&parse_commit_lines/1)
end
@doc ~S"""
Parses commit output into a single Commit struct
"""
def parse_show({:ok, stdout}), do: parse_show(stdout)
def parse_show({:error, error}), do: {:error, error}
def parse_show(stdout) when is_binary(stdout) do
stdout
|> String.split("\n")
|> parse_commit_lines()
end
defp parse_commit_lines(l) do
{:ok, dt, _} = DateTime.from_iso8601(Enum.at(l, 1))
%__MODULE__{
sha: Enum.at(l, 0),
author: %__MODULE__.Author{
date: dt,
email: Enum.at(l, 2),
name: Enum.at(l, 3)
},
gpg_fingerprint: if(Enum.at(l, 4) != "", do: Enum.at(l, 4), else: nil),
message: Enum.at(l, 5)
}
end
@doc ~S"""
## Examples
iex> Architect.Projects.Commit.parse_count("932\n\n")
932
"""
def parse_count({:ok, stdout}), do: parse_count(stdout)
def parse_count({:error, error}), do: {:error, error}
def parse_count(stdout) when is_binary(stdout) do
stdout
|> String.split("\n")
|> parse_count()
end
def parse_count([line | _]) do
{count, _} = Integer.parse(line)
count
end
def list_for_ref(dir, ref) do
{_out, 0} = System.cmd("git", ["checkout", "--force", ref], cd: dir)
{out, 0} = System.cmd("git", ["log", "--format=#{format()}"], cd: dir)
parse(out)
end
def get_by_sha(dir, sha) do
{out, 0} = System.cmd("git", ["show", "-s", "--format=#{format()}", sha], cd: dir)
out
|> parse_show
end
def count_for_branch(dir, branch) do
{_out, 0} = System.cmd("git", ["checkout", "--force", branch], cd: dir)
{out, 0} = System.cmd("git", ["rev-list", "--count", branch], cd: dir)
out
|> parse_count()
end
def count(dir) do
{out, 0} = System.cmd("git", ["rev-list", "--count", "--all"], cd: dir)
out
|> parse_count()
end
end
| 23.545455 | 86 | 0.591436 |
f70fbc08c485403fc35f7522b84cb85285531413 | 128 | ex | Elixir | testData/org/elixir_lang/reference/module/multiple_alias/nested/reference.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/reference/module/multiple_alias/nested/reference.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/reference/module/multiple_alias/nested/reference.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | defmodule Prefix.Reference do
alias Prefix.{MultipleAliasAye, MultipleAliasBee}
MultipleAliasAye.Nested<caret>
@a 1
end
| 16 | 51 | 0.789063 |
f70fbf156dddf597a5532f74f45e9b65b562a0d5 | 188 | ex | Elixir | lab1/lib/lab1.ex | AlexandruBurlacu/NetworkProgrammingLabs | 289720c6bcf2ce4bcdb22b5f57ec03a2c1200891 | [
"MIT"
] | null | null | null | lab1/lib/lab1.ex | AlexandruBurlacu/NetworkProgrammingLabs | 289720c6bcf2ce4bcdb22b5f57ec03a2c1200891 | [
"MIT"
] | null | null | null | lab1/lib/lab1.ex | AlexandruBurlacu/NetworkProgrammingLabs | 289720c6bcf2ce4bcdb22b5f57ec03a2c1200891 | [
"MIT"
] | null | null | null | defmodule Lab1 do
@moduledoc """
Documentation for Lab1.
"""
@doc """
Hello world.
## Examples
iex> Lab1.hello
:world
"""
def hello do
:world
end
end
| 9.894737 | 25 | 0.547872 |
f710254280d791c4d2c76a109b6e5e28479e892a | 1,850 | ex | Elixir | lib/twirp/protoc/generator.ex | shanna/twirp-elixir | 7f94f829229db47f632acab109455d2c59ade8f0 | [
"Apache-2.0"
] | null | null | null | lib/twirp/protoc/generator.ex | shanna/twirp-elixir | 7f94f829229db47f632acab109455d2c59ade8f0 | [
"Apache-2.0"
] | null | null | null | lib/twirp/protoc/generator.ex | shanna/twirp-elixir | 7f94f829229db47f632acab109455d2c59ade8f0 | [
"Apache-2.0"
] | null | null | null | defmodule Twirp.Protoc.Generator do
@moduledoc false
# What even does this code do?
alias Twirp.Protoc.Generator.Service, as: ServiceGenerator
def generate(ctx, desc) do
name = new_file_name(desc.name)
Google.Protobuf.Compiler.CodeGeneratorResponse.File.new(
name: name,
content: generate_content(ctx, desc)
)
end
defp new_file_name(name) do
String.replace_suffix(name, ".proto", "_twirp.ex")
end
def generate_content(ctx, desc) do
ctx = %{
ctx
| package: desc.package || "",
syntax: syntax(desc.syntax),
module_prefix: (desc.package || "")
}
ctx = %{ctx | dep_type_mapping: get_dep_type_mapping(ctx, desc.dependency, desc.name)}
list = ServiceGenerator.generate_list(ctx, desc.service)
list
|> List.flatten()
|> Enum.join("\n")
|> format_code()
end
@doc false
def get_dep_pkgs(%{pkg_mapping: mapping, package: pkg}, deps) do
pkgs = deps |> Enum.map(fn dep -> mapping[dep] end)
pkgs = if pkg && String.length(pkg) > 0, do: [pkg | pkgs], else: pkgs
Enum.sort(pkgs, &(byte_size(&2) <= byte_size(&1)))
end
def get_dep_type_mapping(%{global_type_mapping: global_mapping}, deps, file_name) do
mapping =
Enum.reduce(deps, %{}, fn dep, acc ->
Map.merge(acc, global_mapping[dep])
end)
Map.merge(mapping, global_mapping[file_name])
end
defp syntax("proto3"), do: :proto3
defp syntax(_), do: :proto2
def format_code(code) do
formated =
if Code.ensure_loaded?(Code) && function_exported?(Code, :format_string!, 2) do
code
|> Code.format_string!(locals_without_parens: [rpc: 4, package: 1, service: 1])
|> IO.iodata_to_binary()
else
code
end
if formated == "" do
formated
else
formated <> "\n"
end
end
end
| 25 | 90 | 0.631892 |
f710270f90c98855276704b0fd522c7549b0218e | 8,064 | exs | Elixir | test/skout/yaml/encoder_test.exs | marcelotto/skout | 89463103ae31d52aee47a513ad83b3ace7cb6afb | [
"MIT"
] | 15 | 2019-09-08T22:35:56.000Z | 2021-07-31T16:14:20.000Z | test/skout/yaml/encoder_test.exs | marcelotto/skout | 89463103ae31d52aee47a513ad83b3ace7cb6afb | [
"MIT"
] | 3 | 2019-09-09T10:12:30.000Z | 2020-07-30T10:06:33.000Z | test/skout/yaml/encoder_test.exs | marcelotto/skout | 89463103ae31d52aee47a513ad83b3ace7cb6afb | [
"MIT"
] | 3 | 2020-07-21T07:43:21.000Z | 2021-12-26T19:32:28.000Z | defmodule Skout.YAML.EncoderTest do
use Skout.Test.Case
doctest Skout.YAML.Encoder
import Skout.YAML.Encoder, only: [encode: 1]
@example_document %Skout.Document{
manifest: ex_manifest(),
skos: ex_skos()
}
test "empty Skout document" do
document = %Skout.Document{
manifest: ex_manifest(),
skos: RDF.Graph.new()
}
assert encode(document) ==
{:ok,
"""
base_iri: #{document.manifest.base_iri}
iri_normalization: #{document.manifest.iri_normalization}
label_type: #{document.manifest.label_type}
---
"""}
end
test "non-empty Skout document" do
assert encode(@example_document) ==
{:ok,
"""
base_iri: #{@example_document.manifest.base_iri}
iri_normalization: #{@example_document.manifest.iri_normalization}
label_type: #{@example_document.manifest.label_type}
---
Foo:
- Bar:
- baz baz:
- qux:
- quux:
"""}
end
test "Skout document with descriptions" do
assert encode(
@example_document
|> Skout.Document.update_graph(fn skos ->
skos
|> Graph.add([
{EX.Foo, SKOS.related(), EX.qux()},
{EX.qux(), SKOS.related(), EX.Foo}
])
|> Graph.add(
EX.Foo
|> SKOS.altLabel(42, 3.14, true, false)
)
|> Graph.add(
EX.Bar
|> RDF.type(EX.Type, EX.Foo)
|> RDFS.seeAlso(
~I<http://example.com/other/Bar>,
~I<http://example.com/another/Bar>,
~I<http://example.com/yet_another/Bar>
)
)
end)
) ==
{:ok,
"""
base_iri: #{@example_document.manifest.base_iri}
iri_normalization: #{@example_document.manifest.iri_normalization}
label_type: #{@example_document.manifest.label_type}
---
Foo:
- :altLabel: [false, true, 3.14, 42]
- :related: qux
- Bar:
- :a: [:Foo, <http://example.com/Type>]
- :seeAlso:
- <http://example.com/another/Bar>
- <http://example.com/other/Bar>
- <http://example.com/yet_another/Bar>
- baz baz:
- qux:
- :related: Foo
- quux:
"""}
end
test "Skout document with non-default label_type" do
assert encode(
Skout.Document.new!(ex_manifest(label_type: :notation))
|> Skout.Document.update_graph(fn skos ->
skos
|> Graph.add(
EX.Foo
|> RDF.type(SKOS.Concept)
|> SKOS.notation(~L"Foo")
|> SKOS.prefLabel(~L"FooBar")
|> SKOS.inScheme(ex_base_iri())
|> SKOS.topConceptOf(ex_base_iri())
)
|> Graph.add(
ex_base_iri()
|> RDF.type(SKOS.ConceptScheme)
|> SKOS.hasTopConcept(EX.Foo)
)
end)
) ==
{:ok,
"""
base_iri: #{@example_document.manifest.base_iri}
iri_normalization: #{@example_document.manifest.iri_normalization}
label_type: notation
---
Foo:
- :prefLabel: FooBar
"""}
end
test "Skout document with circles" do
assert_raise RuntimeError, ~r/concept scheme contains a circle/, fn ->
encode(document_with_circle())
end
end
describe "preamble" do
test "concept_scheme" do
document = %Skout.Document{
manifest: ex_manifest(concept_scheme: "http://example.com/foo#"),
skos: RDF.Graph.new()
}
assert encode(document) ==
{:ok,
"""
base_iri: #{document.manifest.base_iri}
concept_scheme: http://example.com/foo#
iri_normalization: #{document.manifest.iri_normalization}
label_type: #{document.manifest.label_type}
---
"""}
end
test "suppressed concept_scheme" do
document = %Skout.Document{
manifest: ex_manifest(concept_scheme: false),
skos: RDF.Graph.new()
}
assert encode(document) ==
{:ok,
"""
base_iri: #{document.manifest.base_iri}
iri_normalization: #{document.manifest.iri_normalization}
label_type: #{document.manifest.label_type}
---
"""}
end
test "concept scheme with descriptions" do
document = %Skout.Document{
manifest: ex_manifest(concept_scheme: "http://example.com/foo#"),
skos:
RDF.Graph.new(
~I<http://example.com/foo#>
|> RDF.type(SKOS.ConceptScheme)
|> DC.title(~L"An example concept scheme")
|> SKOS.definition(~L"A description of a concept scheme")
|> DC.creator(~L"John Doe")
|> DC.created(XSD.integer(2019))
# This is an unknown property and should be ignored.
|> EX.foo(42)
)
}
assert encode(document) ==
{:ok,
"""
base_iri: #{document.manifest.base_iri}
concept_scheme:
id: http://example.com/foo#
title: An example concept scheme
creator: John Doe
created: 2019
definition: A description of a concept scheme
iri_normalization: #{document.manifest.iri_normalization}
label_type: #{document.manifest.label_type}
---
"""}
end
test "label_type" do
document = %Skout.Document{
manifest: ex_manifest(label_type: :notation),
skos:
Graph.new()
|> Graph.add(
ex_base_iri()
|> SKOS.prefLabel(~L"Foo")
|> SKOS.notation(~L"bar")
)
}
assert encode(document) ==
{:ok,
"""
base_iri: #{document.manifest.base_iri}
iri_normalization: #{document.manifest.iri_normalization}
label_type: notation
---
"""}
end
test "labels for the concept scheme" do
document = %Skout.Document{
manifest: ex_manifest(label_type: :notation, concept_scheme: ex_base_iri()),
skos:
Graph.new()
|> Graph.add(
ex_base_iri()
|> SKOS.prefLabel(~L"Foo")
|> SKOS.notation(~L"bar")
)
}
assert encode(document) ==
{:ok,
"""
base_iri: #{document.manifest.base_iri}
concept_scheme:
id: #{document.manifest.base_iri}
prefLabel: Foo
notation: bar
iri_normalization: #{document.manifest.iri_normalization}
label_type: notation
---
"""}
end
test "default_language" do
document = %Skout.Document{
manifest: ex_manifest(default_language: "en"),
skos: RDF.Graph.new()
}
assert encode(document) ==
{:ok,
"""
base_iri: #{document.manifest.base_iri}
default_language: en
iri_normalization: #{document.manifest.iri_normalization}
label_type: #{document.manifest.label_type}
---
"""}
end
end
end
| 29.977695 | 84 | 0.474578 |
f71058e21d777629bab767994aaf8713de2977c6 | 1,637 | ex | Elixir | apps/neoscan_web/lib/neoscan_web/endpoint.ex | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 75 | 2017-07-23T02:45:32.000Z | 2021-12-13T11:04:17.000Z | apps/neoscan_web/lib/neoscan_web/endpoint.ex | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 252 | 2017-07-13T19:36:00.000Z | 2021-07-28T18:40:00.000Z | apps/neoscan_web/lib/neoscan_web/endpoint.ex | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 87 | 2017-07-23T02:45:34.000Z | 2022-03-02T14:54:27.000Z | defmodule NeoscanWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :neoscan_web
plug(Plug.UrlRewrite)
socket("/socket", NeoscanWeb.UserSocket)
socket("/wobserver", Wobserver.Web.PhoenixSocket)
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug(
Plug.Static,
at: "/",
from: :neoscan_web,
gzip: true,
only: ~w(css fonts images js favicon.ico robots.txt docs)
)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket("/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket)
plug(Phoenix.LiveReloader)
plug(Phoenix.CodeReloader)
end
plug(Plug.RequestId)
plug(Plug.Logger, log: :debug)
plug(
Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
)
plug(Plug.MethodOverride)
plug(Plug.Head)
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug(
Plug.Session,
store: :cookie,
key: "_neoscan_web_key",
signing_salt: "tPiwvjiW"
)
plug(CORSPlug)
plug(NeoscanWeb.Router)
def init(_key, config) do
if config[:load_from_system_env] do
port = System.get_env("PORT") || raise "expected the PORT environment variable to be set"
{:ok, Keyword.put(config, :http, [:inet6, port: port])}
else
{:ok, config}
end
end
end
| 25.578125 | 95 | 0.6854 |
f7107e75599b11e33a766e32f08db2842588ce6b | 706 | ex | Elixir | lib/kdf/pbkdf2/result.ex | impl/ex_crypto | 587aa8a63bd1768a361da207339b4a8d68560c22 | [
"Apache-2.0"
] | 2 | 2016-05-02T18:19:13.000Z | 2016-05-03T16:11:58.000Z | lib/kdf/pbkdf2/result.ex | impl/ex_crypto | 587aa8a63bd1768a361da207339b4a8d68560c22 | [
"Apache-2.0"
] | null | null | null | lib/kdf/pbkdf2/result.ex | impl/ex_crypto | 587aa8a63bd1768a361da207339b4a8d68560c22 | [
"Apache-2.0"
] | null | null | null | defmodule Cryptex.Kdf.Pbkdf2.Result do
alias Cryptex.Kdf.Pbkdf2.Prf
alias Cryptex.Kdf.Pbkdf2.Result
defstruct prf: nil, rounds: nil, salt: nil, digest: nil
@type t :: %__MODULE__{prf: Prf.t, rounds: integer, salt: binary, digest: binary}
@spec new(Prf.t, integer, binary, binary) :: t
def new(prf, rounds, salt, digest) do
%Result{prf: prf, rounds: rounds, salt: salt, digest: digest}
end
@spec prf(t) :: Prf.t
def prf(%Result{prf: prf}), do: prf
@spec rounds(t) :: integer
def rounds(%Result{rounds: rounds}), do: rounds
@spec salt(t) :: binary
def salt(%Result{salt: salt}), do: salt
@spec digest(t) :: binary
def digest(%Result{digest: digest}), do: digest
end
| 26.148148 | 83 | 0.667139 |
f710d280771a15b3319429037adf6dd1dcece725 | 959 | ex | Elixir | lib/game/room/supervisor.ex | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | lib/game/room/supervisor.ex | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | lib/game/room/supervisor.ex | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | defmodule Game.Room.Supervisor do
@moduledoc """
Supervisor for Rooms
"""
use Supervisor
alias Game.Room
alias Game.Zone
def start_link(zone) do
Supervisor.start_link(__MODULE__, zone, id: zone.id)
end
@doc """
Return all rooms that are currently online
"""
@spec rooms(pid) :: [pid]
def rooms(pid) do
pid
|> Supervisor.which_children()
|> Enum.map(&elem(&1, 1))
end
@doc """
Start a newly created room in the zone
"""
@spec start_child(pid, Room.t()) :: :ok
def start_child(pid, room) do
child_spec = worker(Room, [room.id], id: room.id, restart: :permanent)
Supervisor.start_child(pid, child_spec)
end
def init(zone) do
children =
zone.id
|> Room.for_zone()
|> Enum.map(fn room_id ->
worker(Room, [room_id], id: room_id, restart: :permanent)
end)
Zone.room_supervisor(zone.id, self())
supervise(children, strategy: :one_for_one)
end
end
| 20.404255 | 74 | 0.635036 |
f710d5ceab528431b4658822521d89fcad84fa25 | 542 | exs | Elixir | lib/ex_unit/mix.exs | liveforeverx/elixir | cf3cf0bd5443b59206e5733602244bc3543f0a53 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/mix.exs | liveforeverx/elixir | cf3cf0bd5443b59206e5733602244bc3543f0a53 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/mix.exs | liveforeverx/elixir | cf3cf0bd5443b59206e5733602244bc3543f0a53 | [
"Apache-2.0"
] | null | null | null | defmodule ExUnit.Mixfile do
use Mix.Project
def project do
[app: :ex_unit,
version: System.version,
build_per_environment: false]
end
def application do
[registered: [ExUnit.Server],
mod: {ExUnit, []},
env: [
# Calculated on demand
# max_cases: :erlang.system_info(:schedulers_online),
# seed: rand(),
# timeout: 30_000
autorun: true,
colors: [],
exclude: [],
include: [],
formatters: [ExUnit.CLIFormatter],
trace: false]]
end
end
| 20.074074 | 60 | 0.581181 |
f710d8a39caf7267251ae4ccd4b17ffa5c76604f | 185 | ex | Elixir | lib/todo_app/models/base.ex | Angarsk8/todoapp_cowboy_elixir | 38a4b4420e3c8cbd5f77178aa1cd4b292bd1c4fd | [
"MIT"
] | 134 | 2017-03-28T14:47:37.000Z | 2021-11-25T10:40:15.000Z | lib/todo_app/models/base.ex | Angarsk8/todoapp_cowboy_elixir | 38a4b4420e3c8cbd5f77178aa1cd4b292bd1c4fd | [
"MIT"
] | 5 | 2017-03-30T05:56:55.000Z | 2018-01-17T09:22:51.000Z | lib/todo_app/models/base.ex | Angarsk8/todoapp_cowboy_elixir | 38a4b4420e3c8cbd5f77178aa1cd4b292bd1c4fd | [
"MIT"
] | 14 | 2017-03-28T17:04:31.000Z | 2021-08-07T07:09:21.000Z | defmodule TodoApp.BaseModel do
defmacro __using__(_opts) do
quote do
use Ecto.Schema
import Ecto
import Ecto.Changeset
import Ecto.Query
end
end
end | 16.818182 | 30 | 0.67027 |
f71104e054852f9969a9e869262d6a5f55fb4812 | 1,815 | ex | Elixir | clients/civic_info/lib/google_api/civic_info/v2/model/division_search_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/civic_info/lib/google_api/civic_info/v2/model/division_search_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/civic_info/lib/google_api/civic_info/v2/model/division_search_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.CivicInfo.V2.Model.DivisionSearchResponse do
@moduledoc """
The result of a division search query.
## Attributes
* `kind` (*type:* `String.t`, *default:* `civicinfo#divisionSearchResponse`) - Identifies what kind of resource this is. Value: the fixed string "civicinfo#divisionSearchResponse".
* `results` (*type:* `list(GoogleApi.CivicInfo.V2.Model.DivisionSearchResult.t)`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => String.t(),
:results => list(GoogleApi.CivicInfo.V2.Model.DivisionSearchResult.t())
}
field(:kind)
field(:results, as: GoogleApi.CivicInfo.V2.Model.DivisionSearchResult, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.CivicInfo.V2.Model.DivisionSearchResponse do
def decode(value, options) do
GoogleApi.CivicInfo.V2.Model.DivisionSearchResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CivicInfo.V2.Model.DivisionSearchResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.3 | 184 | 0.741598 |
f71109004649596d2cd31d67ab54fdfb6f9d58c9 | 782 | ex | Elixir | lib/group_manager/supervisor.ex | dbeck/groupman_ex | 69231eba468f9e45a7e13b62e717b5fc18f4ff74 | [
"MIT"
] | 1 | 2016-04-12T02:05:30.000Z | 2016-04-12T02:05:30.000Z | lib/group_manager/supervisor.ex | dbeck/groupman_ex | 69231eba468f9e45a7e13b62e717b5fc18f4ff74 | [
"MIT"
] | null | null | null | lib/group_manager/supervisor.ex | dbeck/groupman_ex | 69231eba468f9e45a7e13b62e717b5fc18f4ff74 | [
"MIT"
] | null | null | null | defmodule GroupManager.Supervisor do
use Supervisor
alias GroupManager.TopologyDB
alias GroupManager.Receiver
def start_link(opts \\ []) do
case opts do
[name: name] ->
Supervisor.start_link(__MODULE__, :no_args, opts)
_ ->
Supervisor.start_link(__MODULE__, :no_args, [name: id_atom()] ++ opts)
end
end
def init(:no_args) do
children = [
supervisor(Receiver, [[name: Receiver.id_atom()]]),
supervisor(TopologyDB, [[name: TopologyDB.id_atom()]])
]
{:ok, pid} = supervise(children, strategy: :one_for_one)
end
def locate, do: Process.whereis(id_atom())
def locate! do
case Process.whereis(id_atom()) do
pid when is_pid(pid) ->
pid
end
end
def id_atom, do: __MODULE__
end
| 21.722222 | 78 | 0.640665 |
f7112ecde86593e578d4664a57902fba1aa52e9b | 1,632 | exs | Elixir | implementations/elixir/test/ockam/channel_test.exs | hollyfeld/ockam | 10a618b60b5b913821bfa8df3ad9693a70913ace | [
"Apache-2.0"
] | 1 | 2021-01-02T11:42:16.000Z | 2021-01-02T11:42:16.000Z | implementations/elixir/test/ockam/channel_test.exs | ercanersoy/ockam | b537a4951770d223f5493bcff5358d0677865a0f | [
"Apache-2.0"
] | null | null | null | implementations/elixir/test/ockam/channel_test.exs | ercanersoy/ockam | b537a4951770d223f5493bcff5358d0677865a0f | [
"Apache-2.0"
] | null | null | null | defmodule Ockam.Channel.Tests do
use ExUnit.Case, async: true
doctest Ockam.Channel
alias Ockam.Channel
alias Ockam.Vault
describe "Ockam.Channel" do
test "well known" do
{:ok, responder_vault} = Vault.create()
responder_s =
to_keypair(
responder_vault,
"0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"
)
responder_e =
to_keypair(
responder_vault,
"4142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f60"
)
{:ok, responder} =
Channel.create(
role: :responder,
vault: responder_vault,
identity_keypair: responder_s,
e: responder_e
)
{:ok, initiator_vault} = Vault.create()
initiator_s =
to_keypair(
initiator_vault,
"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"
)
initiator_e =
to_keypair(
initiator_vault,
"202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f"
)
{:ok, _initiator} =
Channel.create(
role: :initiator,
vault: initiator_vault,
identity_keypair: initiator_s,
e: initiator_e,
onward_route: [responder.address]
)
end
end
def to_keypair(vault, hex) do
bin = Base.decode16!(hex, case: :lower)
{:ok, private_key} = Ockam.Vault.import_secret(vault, bin, type: :curve25519_private)
{:ok, public_key} = Ockam.Vault.get_public_key(vault, private_key)
%{private: private_key, public: public_key}
end
end
| 25.5 | 89 | 0.630515 |
f7114d8ef6cd479670b13c454333675a78898598 | 1,994 | ex | Elixir | clients/android_management/lib/google_api/android_management/v1/model/power_management_event.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/android_management/lib/google_api/android_management/v1/model/power_management_event.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/android_management/lib/google_api/android_management/v1/model/power_management_event.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AndroidManagement.V1.Model.PowerManagementEvent do
@moduledoc """
A power management event.
## Attributes
- batteryLevel (float()): For BATTERY_LEVEL_COLLECTED events, the battery level as a percentage. Defaults to: `null`.
- createTime (DateTime.t): The creation time of the event. Defaults to: `null`.
- eventType (String.t): Event type. Defaults to: `null`.
- Enum - one of [POWER_MANAGEMENT_EVENT_TYPE_UNSPECIFIED, BATTERY_LEVEL_COLLECTED, POWER_CONNECTED, POWER_DISCONNECTED, BATTERY_LOW, BATTERY_OKAY, BOOT_COMPLETED, SHUTDOWN]
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:batteryLevel => any(),
:createTime => DateTime.t(),
:eventType => any()
}
field(:batteryLevel)
field(:createTime, as: DateTime)
field(:eventType)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidManagement.V1.Model.PowerManagementEvent do
def decode(value, options) do
GoogleApi.AndroidManagement.V1.Model.PowerManagementEvent.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidManagement.V1.Model.PowerManagementEvent do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.254545 | 176 | 0.74674 |
f7115904ad695eff1a432cf204d297ada6364750 | 275 | ex | Elixir | lib/ex_platform_web/live/page_live.ex | joseph-lozano/ex_platform | 55208dfc50e1bd1ed232141798579fdcb6fc54af | [
"MIT"
] | null | null | null | lib/ex_platform_web/live/page_live.ex | joseph-lozano/ex_platform | 55208dfc50e1bd1ed232141798579fdcb6fc54af | [
"MIT"
] | null | null | null | lib/ex_platform_web/live/page_live.ex | joseph-lozano/ex_platform | 55208dfc50e1bd1ed232141798579fdcb6fc54af | [
"MIT"
] | null | null | null | defmodule ExPlatformWeb.PageLive do
@moduledoc """
homepage
"""
use ExPlatformWeb, :live_view
@impl true
def mount(_params, _session, socket) do
{:ok, socket}
end
@impl true
def handle_params(_param, _url, socket) do
{:noreply, socket}
end
end
| 16.176471 | 44 | 0.672727 |
f711b2ab987843b164fe6ddcfe0fdf727622af3c | 3,995 | ex | Elixir | lib/tint.ex | tlux/tint | 742b3cc8ffd5e90bc78fe0413286af8ec593298c | [
"MIT"
] | 14 | 2019-10-09T19:36:04.000Z | 2022-01-28T12:00:56.000Z | lib/tint.ex | tlux/tint | 742b3cc8ffd5e90bc78fe0413286af8ec593298c | [
"MIT"
] | null | null | null | lib/tint.ex | tlux/tint | 742b3cc8ffd5e90bc78fe0413286af8ec593298c | [
"MIT"
] | null | null | null | defmodule Tint do
@moduledoc """
A library allowing calculations with colors and conversions between different
colorspaces.
"""
alias Tint.{CMYK, DIN99, HSV, Lab, RGB, XYZ}
@typedoc """
A type representing a color.
"""
@type color ::
CMYK.t()
| DIN99.t()
| HSV.t()
| Lab.t()
| RGB.t()
| XYZ.t()
@colorspace_aliases %{
cmyk: Tint.CMYK,
din99: Tint.DIN99,
hsv: Tint.HSV,
lab: Tint.Lab,
rgb: Tint.RGB,
xyz: Tint.XYZ
}
@typedoc """
A type representing a colorspace.
"""
@type colorspace :: atom | module
@doc """
Gets the converted module for the given colorspace atom or module.
"""
@doc since: "1.0.0"
@spec converter_for(colorspace) :: {:ok, module} | :error
def converter_for(colorspace) do
colorspace_mod = Map.get(@colorspace_aliases, colorspace, colorspace)
convertible_mod = Module.concat(colorspace_mod, Convertible)
if Code.ensure_loaded?(convertible_mod) &&
function_exported?(convertible_mod, :convert, 1) do
{:ok, convertible_mod}
else
:error
end
end
@doc """
Converts the given color to another colorspace.
## Examples
iex> Tint.convert(Tint.RGB.new(40, 66, 67), :cmyk)
{:ok, %Tint.CMYK{cyan: 0.403, magenta: 0.0149, yellow: 0.0, key: 0.7373}}
iex> Tint.convert(Tint.RGB.new(255, 127, 30), Tint.HSV)
{:ok, %Tint.HSV{hue: 25.9, saturation: 0.8824, value: 1.0}}
iex> Tint.convert(Tint.RGB.new(255, 127, 30), :invalid)
:error
"""
@doc since: "1.0.0"
@spec convert(color, colorspace) :: {:ok, color} | :error
def convert(color, colorspace) do
with {:ok, convertible_mod} <- converter_for(colorspace) do
{:ok, convertible_mod.convert(color)}
end
end
@doc """
Converts the given color to another colorspace. Raises when the colorspace
is invalid.
## Examples
iex> Tint.convert!(Tint.RGB.new(40, 66, 67), :cmyk)
%Tint.CMYK{cyan: 0.403, magenta: 0.0149, yellow: 0.0, key: 0.7373}
iex> Tint.convert!(Tint.RGB.new(255, 127, 30), Tint.HSV)
%Tint.HSV{hue: 25.9, saturation: 0.8824, value: 1.0}
iex> Tint.convert!(Tint.RGB.new(255, 127, 30), :foo)
** (ArgumentError) Unknown colorspace: :foo
"""
@doc since: "1.0.0"
@spec convert!(color, colorspace) :: color
def convert!(color, colorspace) do
case convert(color, colorspace) do
{:ok, color} ->
color
:error ->
raise ArgumentError, "Unknown colorspace: #{inspect(colorspace)}"
end
end
@doc """
Converts the given color to the CMYK colorspace.
## Example
iex> Tint.to_cmyk(Tint.RGB.new(40, 66, 67))
#Tint.CMYK<40.3%,1.49%,0.0%,73.73%>
"""
@doc since: "0.3.0"
@spec to_cmyk(color) :: CMYK.t()
defdelegate to_cmyk(color), to: CMYK.Convertible, as: :convert
@doc """
Converts the given color to the DIN99 colorspace.
"""
@doc since: "1.0.0"
@spec to_din99(color) :: DIN99.t()
defdelegate to_din99(color), to: DIN99.Convertible, as: :convert
@doc """
Converts the given color to the HSV colorspace.
## Example
iex> Tint.to_hsv(Tint.RGB.new(255, 127, 30))
#Tint.HSV<25.9°,88.24%,100.0%>
"""
@spec to_hsv(color) :: HSV.t()
defdelegate to_hsv(color), to: HSV.Convertible, as: :convert
@doc """
Converts the given color to the CIELAB colorspace.
"""
@doc since: "1.0.0"
@spec to_lab(color) :: Lab.t()
defdelegate to_lab(color), to: Lab.Convertible, as: :convert
@doc """
Converts the given color to the RGB colorspace.
## Example
iex> Tint.to_rgb(Tint.HSV.new(25.8, 0.882, 1))
#Tint.RGB<255,127,30 (#FF7F1E)>
"""
@spec to_rgb(color) :: RGB.t()
defdelegate to_rgb(color), to: RGB.Convertible, as: :convert
@doc """
Converts the given color to the XYZ (CIE 1931) colorspace.
"""
@doc since: "1.0.0"
@spec to_xyz(color) :: XYZ.t()
defdelegate to_xyz(color), to: XYZ.Convertible, as: :convert
end
| 25.774194 | 79 | 0.618773 |
f711bb4d0874d8e526a3a11d65b0399949eb2685 | 2,309 | ex | Elixir | lib/logger_sentry/sentry.ex | RiverFinancial/logger_sentry | e31a50c990aa0640e0ea36c2cb40daf91bd69ad8 | [
"Apache-2.0"
] | null | null | null | lib/logger_sentry/sentry.ex | RiverFinancial/logger_sentry | e31a50c990aa0640e0ea36c2cb40daf91bd69ad8 | [
"Apache-2.0"
] | null | null | null | lib/logger_sentry/sentry.ex | RiverFinancial/logger_sentry | e31a50c990aa0640e0ea36c2cb40daf91bd69ad8 | [
"Apache-2.0"
] | null | null | null | defmodule LoggerSentry.Sentry do
@moduledoc """
Generate output and options for sentry.
"""
@doc """
Generate output.
"""
@spec generate_output(atom, Keyword.t(), list()) :: {Exception.t(), Keyword.t()}
def generate_output(level, metadata, message) do
case Keyword.get(metadata, :crash_reason) do
{reason, stacktrace} -> {reason, Keyword.put(metadata, :stacktrace, stacktrace)}
_ -> generate_output_without_crash_reason(level, metadata, message)
end
end
@doc false
defp generate_output_without_crash_reason(level, metadata, message) do
case Keyword.get(metadata, :exception) do
nil ->
{output, _} =
Exception.blame(
level,
:erlang.iolist_to_binary(message),
Keyword.get(metadata, :stacktrace, [])
)
{output, metadata}
exception ->
{exception, metadata}
end
end
@doc """
Generate options for sentry.
"""
@spec generate_opts(Keyword.t(), list()) :: Keyword.t()
def generate_opts(metadata, message) do
metadata
|> generate_opts_extra(message)
|> generate_opts_fingerprints(message)
end
@doc false
defp generate_opts_extra(metadata, msg) do
%{
application: Keyword.get(metadata, :application),
module: Keyword.get(metadata, :module),
function: Keyword.get(metadata, :function),
file: Keyword.get(metadata, :file),
line: Keyword.get(metadata, :line),
log_message: :erlang.iolist_to_binary(msg)
}
|> Enum.reject(fn {_, v} -> is_nil(v) end)
|> Map.new()
|> Map.merge(Keyword.get(metadata, :extra, %{}))
|> case do
empty when empty == %{} -> metadata
other -> Keyword.put(metadata, :extra, other)
end
end
@doc false
defp generate_opts_fingerprints(metadata, msg) do
case generate_fingerprints(metadata, msg) do
[] -> metadata
other -> Keyword.put(metadata, :fingerprint, other)
end
end
@doc false
defp generate_fingerprints(metadata, msg) do
:logger_sentry
|> Application.get_env(:fingerprints_mods, [])
|> LoggerSentry.Fingerprint.fingerprints(metadata, msg)
|> Kernel.++(Keyword.get(metadata, :fingerprint, []))
|> case do
[] -> []
tmp -> Enum.uniq(tmp)
end
end
# __end_of_module__
end
| 26.848837 | 86 | 0.636206 |
f711c9006983256d06f6c5e60fc6169a559c7e71 | 78 | exs | Elixir | api_sample/test/views/page_view_test.exs | saiidalhalawi/phoenix-ansible | d03843256c20b74fd69bb48f058825785dd7fea2 | [
"Apache-2.0"
] | null | null | null | api_sample/test/views/page_view_test.exs | saiidalhalawi/phoenix-ansible | d03843256c20b74fd69bb48f058825785dd7fea2 | [
"Apache-2.0"
] | null | null | null | api_sample/test/views/page_view_test.exs | saiidalhalawi/phoenix-ansible | d03843256c20b74fd69bb48f058825785dd7fea2 | [
"Apache-2.0"
] | null | null | null | defmodule ApiSample.PageViewTest do
use ApiSample.ConnCase, async: true
end
| 19.5 | 37 | 0.820513 |
f711ef3e6b4dfd1c741147f1ac0c1ae9b443cd2a | 1,500 | ex | Elixir | clients/dataflow/lib/google_api/dataflow/v1b3/model/read_instruction.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/read_instruction.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/read_instruction.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Dataflow.V1b3.Model.ReadInstruction do
@moduledoc """
An instruction that reads records. Takes no inputs, produces one output.
## Attributes
- source (Source): The source to read from. Defaults to: `null`.
"""
defstruct [
:"source"
]
end
defimpl Poison.Decoder, for: GoogleApi.Dataflow.V1b3.Model.ReadInstruction do
import GoogleApi.Dataflow.V1b3.Deserializer
def decode(value, options) do
value
|> deserialize(:"source", :struct, GoogleApi.Dataflow.V1b3.Model.Source, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataflow.V1b3.Model.ReadInstruction do
def encode(value, options) do
GoogleApi.Dataflow.V1b3.Deserializer.serialize_non_nil(value, options)
end
end
| 31.25 | 85 | 0.754 |
f71222e00cdcb2784182061efb2aa741d56c7b1c | 2,052 | ex | Elixir | lib/penelope/nlp/pos_tagger.ex | pylon/penelope | 5b0310dc0647a8e20ab1b4c10d3820f11cfb2601 | [
"Apache-2.0"
] | 53 | 2017-10-13T06:39:49.000Z | 2022-03-28T19:43:42.000Z | lib/penelope/nlp/pos_tagger.ex | pylon/penelope | 5b0310dc0647a8e20ab1b4c10d3820f11cfb2601 | [
"Apache-2.0"
] | 12 | 2018-01-08T23:05:37.000Z | 2019-08-02T12:59:27.000Z | lib/penelope/nlp/pos_tagger.ex | pylon/penelope | 5b0310dc0647a8e20ab1b4c10d3820f11cfb2601 | [
"Apache-2.0"
] | 4 | 2018-06-13T19:45:57.000Z | 2019-10-17T13:37:06.000Z | defmodule Penelope.NLP.POSTagger do
@moduledoc """
The part-of-speech tagger transforms a tokenized sentence into a list of
`{token, pos_tag}` tuples. The tagger takes no responsibility for
tokenization; this means that callers must be careful to maintain the same
tokenization scheme between training and evaluating to ensure the best
results.
As this tagger does not ship with a pretrained model, it is both
language- and tagset-agnostic, though the default feature set used
(see `POSFeaturizer`) was designed for English.
See `POSTaggerTrainer.train/2` for an example
of how to train a new POS tagger model.
"""
alias Penelope.ML.Pipeline
@type model :: %{pos_tagger: [{atom, any}]}
@doc """
Fits the tagger model. Custom featurizers may be supplied.
"""
@spec fit(
context :: map,
x :: [tokens :: [String.t()]],
y :: [tags :: [String.t()]],
featurizers :: [{atom | String.t(), [any]}]
) :: model
def fit(context, x, y, featurizers \\ [{:pos_featurizer, []}]) do
pipeline = featurizers ++ [{:crf_tagger, []}]
%{pos_tagger: Pipeline.fit(context, x, y, pipeline)}
end
@doc """
Attaches part of speech tags to a list of tokens.
Example:
```
iex> POSTagger.tag(model, %{}, ["Judy", "saw", "her"])
[{"Judy", "NNP"}, {"saw", "VBD"}, {"her", "PRP$"}]
```
"""
@spec tag(model :: model, context :: map, tokens :: [String.t()]) :: [
{String.t(), String.t()}
]
def tag(model, context, tokens) do
[{tags, _probability}] =
Pipeline.predict_sequence(model.pos_tagger, context, [tokens])
Enum.zip(tokens, tags)
end
@doc """
Imports parameters from a serialized model.
"""
@spec compile(params :: map) :: model
def compile(params),
do: %{pos_tagger: Pipeline.compile(params["pos_tagger"])}
@doc """
Exports a runtime model to a serializable data structure.
"""
@spec export(model :: model) :: map
def export(model),
do: %{"pos_tagger" => Pipeline.export(model.pos_tagger)}
end
| 30.176471 | 76 | 0.632554 |
f7124759c42a51d1c0a07522e31ff4a4c0120429 | 510 | ex | Elixir | lib/mapa_celulas_web/router.ex | sntpiraquara/mapa_celulas | 5e0b1206748bd5169cefb75b006a5489117bfda3 | [
"MIT"
] | null | null | null | lib/mapa_celulas_web/router.ex | sntpiraquara/mapa_celulas | 5e0b1206748bd5169cefb75b006a5489117bfda3 | [
"MIT"
] | null | null | null | lib/mapa_celulas_web/router.ex | sntpiraquara/mapa_celulas | 5e0b1206748bd5169cefb75b006a5489117bfda3 | [
"MIT"
] | null | null | null | defmodule MapaCelulasWeb.Router do
use MapaCelulasWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/api", MapaCelulasWeb do
pipe_through :api
get "/cells", CellsController, :list_all
end
scope "/", MapaCelulasWeb do
pipe_through :browser
get "/*path", PageController, :index
end
end
| 18.214286 | 44 | 0.688235 |
f712571d20e81d85c3d88b8f47151b7c16c40827 | 415 | exs | Elixir | .credo.exs | mkarbowiak/sitemap | 9bfbc1d401da0ab486445c59c632d1227a6eb0c3 | [
"MIT"
] | 103 | 2016-04-26T17:31:58.000Z | 2022-02-15T12:26:06.000Z | .credo.exs | mkarbowiak/sitemap | 9bfbc1d401da0ab486445c59c632d1227a6eb0c3 | [
"MIT"
] | 31 | 2016-06-02T17:26:56.000Z | 2021-11-17T13:19:26.000Z | .credo.exs | mkarbowiak/sitemap | 9bfbc1d401da0ab486445c59c632d1227a6eb0c3 | [
"MIT"
] | 24 | 2016-09-09T23:45:25.000Z | 2021-02-15T16:36:43.000Z | %{
configs: [
%{
name: "default",
files: %{
included: ["lib/", "src/", "web/", "apps/"],
excluded: []
},
checks: [
{Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 120},
{Credo.Check.Readability.ModuleDoc, false},
{Credo.Check.Refactor.Nesting, false},
{Credo.Check.Refactor.PipeChainStart, false}
]
}
]
}
| 23.055556 | 81 | 0.525301 |
f712831b26aa03202c63cf3c6904ba8c2e889366 | 1,906 | ex | Elixir | clients/remote_build_execution/lib/google_api/remote_build_execution/v2/model/build_bazel_remote_execution_v2_find_missing_blobs_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/remote_build_execution/lib/google_api/remote_build_execution/v2/model/build_bazel_remote_execution_v2_find_missing_blobs_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/remote_build_execution/lib/google_api/remote_build_execution/v2/model/build_bazel_remote_execution_v2_find_missing_blobs_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.RemoteBuildExecution.V2.Model.BuildBazelRemoteExecutionV2FindMissingBlobsRequest do
@moduledoc """
A request message for
ContentAddressableStorage.FindMissingBlobs.
## Attributes
* `blobDigests` (*type:* `list(GoogleApi.RemoteBuildExecution.V2.Model.BuildBazelRemoteExecutionV2Digest.t)`, *default:* `nil`) - A list of the blobs to check.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:blobDigests =>
list(GoogleApi.RemoteBuildExecution.V2.Model.BuildBazelRemoteExecutionV2Digest.t())
}
field(:blobDigests,
as: GoogleApi.RemoteBuildExecution.V2.Model.BuildBazelRemoteExecutionV2Digest,
type: :list
)
end
defimpl Poison.Decoder,
for: GoogleApi.RemoteBuildExecution.V2.Model.BuildBazelRemoteExecutionV2FindMissingBlobsRequest do
def decode(value, options) do
GoogleApi.RemoteBuildExecution.V2.Model.BuildBazelRemoteExecutionV2FindMissingBlobsRequest.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.RemoteBuildExecution.V2.Model.BuildBazelRemoteExecutionV2FindMissingBlobsRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.438596 | 163 | 0.767051 |
f712b0961b59b61beb29173fc4813ac019b6d709 | 1,724 | ex | Elixir | lib/nerves/artifact/providers/docker/volume.ex | konstantinzolotarev/nerves | 35de56656f44ca9e4ab27e87fd00f303caa85265 | [
"Apache-2.0"
] | null | null | null | lib/nerves/artifact/providers/docker/volume.ex | konstantinzolotarev/nerves | 35de56656f44ca9e4ab27e87fd00f303caa85265 | [
"Apache-2.0"
] | null | null | null | lib/nerves/artifact/providers/docker/volume.ex | konstantinzolotarev/nerves | 35de56656f44ca9e4ab27e87fd00f303caa85265 | [
"Apache-2.0"
] | null | null | null | defmodule Nerves.Artifact.Providers.Docker.Volume do
alias Nerves.Artifact
alias Nerves.Artifact.Providers.Docker
import Docker.Utils
def name(pkg) do
if id = id(pkg) do
"#{pkg.app}-#{id}"
end
end
def id(pkg) do
id_file = id_file(pkg)
if File.exists?(id_file) do
File.read!(id_file)
else
create_id(pkg)
id(pkg)
end
end
def id_file(pkg) do
Artifact.build_path(pkg)
|> Path.join(".docker_id")
end
def create_id(pkg) do
id_file = id_file(pkg)
id = Nerves.Utils.random_alpha_num(16)
Path.dirname(id_file)
|> File.mkdir_p!()
File.write!(id_file, id)
end
def delete(volume_name) do
shell_info("Deleting build volume #{volume_name}")
args = ["volume", "rm", volume_name]
case Mix.Nerves.Utils.shell("docker", args) do
{_result, 0} ->
:ok
{_result, _} ->
Mix.raise("""
Nerves Docker provider encountered an error while deleting volume #{volume_name}
""")
end
end
def exists?(volume_name) do
cmd = "docker"
args = ["volume", "ls", "-f", "name=#{volume_name}", "-q"]
case System.cmd(cmd, args, stderr_to_stdout: true) do
{<<^volume_name, _tail::binary>>, 0} ->
true
{<<"Cannot connect to the Docker daemon", _tail::binary>>, _} ->
Mix.raise("Nerves Docker provider is unable to connect to docker daemon")
_ ->
false
end
end
def create(volume_name) do
cmd = "docker"
args = ["volume", "create", "--name", volume_name]
case System.cmd(cmd, args) do
{_, 0} -> :noop
_ -> Mix.raise("Nerves Docker provider could not create docker volume #{volume_name}")
end
end
end
| 21.822785 | 92 | 0.603828 |
f712e0a57237c4729f0951f31dbdbe6d3bd0b321 | 9,882 | ex | Elixir | lib/elixir/lib/module/locals_tracker.ex | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | 1 | 2021-05-20T13:08:37.000Z | 2021-05-20T13:08:37.000Z | lib/elixir/lib/module/locals_tracker.ex | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module/locals_tracker.ex | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | null | null | null | # This is an Elixir module responsible for tracking
# calls in order to extract Elixir modules' behaviour
# during compilation time.
#
# ## Implementation
#
# The implementation uses the digraph module to track
# all dependencies. The graph starts with one main vertex:
#
# * `:local` - points to local functions
#
# We can also have the following vertices:
#
# * `Module` - a module that was invoked via an import
# * `{name, arity}` - a local function/arity pair
# * `{:import, name, arity}` - an invoked function/arity import
#
# Each of those vertices can associate to other vertices
# as described below:
#
# * `Module`
# * in neighbours: `{:import, name, arity}`
#
# * `{name, arity}`
# * in neighbours: `:local`, `{name, arity}`
# * out neighbours: `{:import, name, arity}`
#
# * `{:import, name, arity}`
# * in neighbours: `{name, arity}`
# * out neighbours: `Module`
#
# Note that since this is required for bootstrap, we can't use
# any of the `GenServer` conveniences.
defmodule Module.LocalsTracker do
@moduledoc false
@timeout 30000
@behaviour :gen_server
@type ref :: pid | module
@type name :: atom
@type name_arity :: {name, arity}
@type local :: {name, arity}
@type import :: {:import, name, arity}
# Public API
@doc """
Returns all imported modules that had the given
`{name, arity}` invoked.
"""
@spec imports_with_dispatch(ref, name_arity) :: [module]
def imports_with_dispatch(ref, {name, arity}) do
d = :gen_server.call(to_pid(ref), :digraph, @timeout)
:digraph.out_neighbours(d, {:import, name, arity})
end
@doc """
Returns all locals that are reachable.
By default, all public functions are reachable.
A private function is only reachable if it has
a public function that it invokes directly.
"""
@spec reachable(ref) :: [local]
def reachable(ref) do
ref
|> to_pid()
|> :gen_server.call(:digraph, @timeout)
|> reachable_from(:local)
|> :sets.to_list()
end
defp reachable_from(d, starting) do
reduce_reachable(d, starting, :sets.new())
end
defp reduce_reachable(d, vertex, vertices) do
neighbours = :digraph.out_neighbours(d, vertex)
neighbours = for({_, _} = t <- neighbours, do: t) |> :sets.from_list()
remaining = :sets.subtract(neighbours, vertices)
vertices = :sets.union(neighbours, vertices)
:sets.fold(&reduce_reachable(d, &1, &2), vertices, remaining)
end
defp to_pid(pid) when is_pid(pid), do: pid
defp to_pid(mod) when is_atom(mod) do
table = :elixir_module.data_table(mod)
:ets.lookup_element(table, {:elixir, :locals_tracker}, 2)
end
# Internal API
# Starts the tracker and returns its PID.
@doc false
def start_link do
:gen_server.start_link(__MODULE__, [], [])
end
# Adds a definition into the tracker. A public
# definition is connected with the :local node
# while a private one is left unreachable until
# a call is made to.
@doc false
def add_definition(pid, kind, tuple) when kind in [:def, :defp, :defmacro, :defmacrop] do
:gen_server.cast(pid, {:add_definition, kind, tuple})
end
# Adds and tracks defaults for a definition into the tracker.
@doc false
def add_defaults(pid, kind, tuple, defaults) when kind in [:def, :defp, :defmacro, :defmacrop] do
:gen_server.cast(pid, {:add_defaults, kind, tuple, defaults})
end
# Adds a local dispatch to the given target.
def add_local(pid, to) when is_tuple(to) do
:gen_server.cast(pid, {:add_local, :local, to})
end
# Adds a local dispatch from-to the given target.
@doc false
def add_local(pid, from, to) when is_tuple(from) and is_tuple(to) do
:gen_server.cast(pid, {:add_local, from, to})
end
# Adds an import dispatch to the given target.
@doc false
def add_import(pid, function, module, target) when is_atom(module) and is_tuple(target) do
:gen_server.cast(pid, {:add_import, function, module, target})
end
# Yanks a local node. Returns its in and out vertices in a tuple.
@doc false
def yank(pid, local) do
:gen_server.call(to_pid(pid), {:yank, local}, @timeout)
end
# Reattach a previously yanked node
@doc false
def reattach(pid, tuple, kind, function, neighbours) do
:gen_server.cast(to_pid(pid), {:reattach, tuple, kind, function, neighbours})
end
# Collecting all conflicting imports with the given functions
@doc false
def collect_imports_conflicts(pid, all_defined) do
d = :gen_server.call(pid, :digraph, @timeout)
for {{name, arity}, _, meta, _} <- all_defined,
:digraph.in_neighbours(d, {:import, name, arity}) != [],
n = :digraph.out_neighbours(d, {:import, name, arity}),
n != [] do
{meta, {n, name, arity}}
end
end
# Collect all unused definitions based on the private
# given, also accounting the expected number of default
# clauses a private function have.
@doc false
def collect_unused_locals(ref, private) do
d = :gen_server.call(to_pid(ref), :digraph, @timeout)
reachable = reachable_from(d, :local)
reattached = :digraph.out_neighbours(d, :reattach)
{unreachable(reachable, reattached, private), collect_warnings(reachable, private)}
end
defp unreachable(reachable, reattached, private) do
for {tuple, kind, _, _} <- private,
not reachable?(tuple, kind, reachable, reattached),
do: tuple
end
defp reachable?(tuple, :defmacrop, reachable, reattached) do
# All private micros are unreachable unless they have been
# reattached and they are reachable.
:lists.member(tuple, reattached) and :sets.is_element(tuple, reachable)
end
defp reachable?(tuple, :defp, reachable, _reattached) do
:sets.is_element(tuple, reachable)
end
defp collect_warnings(reachable, private) do
:lists.foldl(&collect_warnings(&1, &2, reachable), [], private)
end
defp collect_warnings({_, _, false, _}, acc, _reachable) do
acc
end
defp collect_warnings({tuple, kind, meta, 0}, acc, reachable) do
if :sets.is_element(tuple, reachable) do
acc
else
[{meta, {:unused_def, tuple, kind}} | acc]
end
end
defp collect_warnings({tuple, kind, meta, default}, acc, reachable) when default > 0 do
{name, arity} = tuple
min = arity - default
max = arity
case min_reachable_default(max, min, :none, name, reachable) do
:none -> [{meta, {:unused_def, tuple, kind}} | acc]
^min -> acc
^max -> [{meta, {:unused_args, tuple}} | acc]
diff -> [{meta, {:unused_args, tuple, diff}} | acc]
end
end
defp min_reachable_default(max, min, last, name, reachable) when max >= min do
case :sets.is_element({name, max}, reachable) do
true -> min_reachable_default(max - 1, min, max, name, reachable)
false -> min_reachable_default(max - 1, min, last, name, reachable)
end
end
defp min_reachable_default(_max, _min, last, _name, _reachable) do
last
end
# Stops the gen server
@doc false
def stop(pid) do
:gen_server.cast(pid, :stop)
end
# Callbacks
def init([]) do
d = :digraph.new([:protected])
:digraph.add_vertex(d, :local)
:digraph.add_vertex(d, :reattach)
{:ok, d}
end
def handle_call({:yank, local}, _from, d) do
out_vertices = :digraph.out_neighbours(d, local)
:digraph.del_edges(d, :digraph.out_edges(d, local))
{:reply, {[], out_vertices}, d}
end
def handle_call(:digraph, _from, d) do
{:reply, d, d}
end
@doc false
def handle_info(_msg, d) do
{:noreply, d}
end
def handle_cast({:add_local, from, to}, d) do
handle_add_local(d, from, to)
{:noreply, d}
end
def handle_cast({:add_import, function, module, {name, arity}}, d) do
handle_import(d, function, module, name, arity)
{:noreply, d}
end
def handle_cast({:add_definition, kind, tuple}, d) do
handle_add_definition(d, kind, tuple)
{:noreply, d}
end
def handle_cast({:add_defaults, kind, {name, arity}, defaults}, d) do
for i <- :lists.seq(arity - defaults, arity - 1) do
handle_add_definition(d, kind, {name, i})
handle_add_local(d, {name, i}, {name, arity})
end
{:noreply, d}
end
def handle_cast({:reattach, tuple, kind, function, {in_neigh, out_neigh}}, d) do
# Reattach the old function
for from <- in_neigh do
:digraph.add_vertex(d, from)
replace_edge!(d, from, function)
end
for to <- out_neigh do
:digraph.add_vertex(d, to)
replace_edge!(d, function, to)
end
# Add the new definition
handle_add_definition(d, kind, tuple)
# Make a call from the old function to the new one
if function != tuple do
handle_add_local(d, function, tuple)
end
# Finally marked the new one as reattached
replace_edge!(d, :reattach, tuple)
{:noreply, d}
end
def handle_cast(:stop, d) do
{:stop, :normal, d}
end
@doc false
def terminate(_reason, _state) do
:ok
end
@doc false
def code_change(_old, state, _extra) do
{:ok, state}
end
defp handle_import(d, function, module, name, arity) do
:digraph.add_vertex(d, module)
tuple = {:import, name, arity}
:digraph.add_vertex(d, tuple)
replace_edge!(d, tuple, module)
if function != nil do
replace_edge!(d, function, tuple)
end
:ok
end
defp handle_add_local(d, from, to) do
:digraph.add_vertex(d, to)
replace_edge!(d, from, to)
end
defp handle_add_definition(d, public, tuple) when public in [:def, :defmacro] do
:digraph.add_vertex(d, tuple)
replace_edge!(d, :local, tuple)
end
defp handle_add_definition(d, private, tuple) when private in [:defp, :defmacrop] do
:digraph.add_vertex(d, tuple)
end
defp replace_edge!(d, from, to) do
unless :lists.member(to, :digraph.out_neighbours(d, from)) do
[:"$e" | _] = :digraph.add_edge(d, from, to)
end
:ok
end
end
| 27.758427 | 99 | 0.663428 |
f712ef6fde894564aaff88e0eb791f5a442d3bca | 41,774 | ex | Elixir | lib/ash/actions/managed_relationships.ex | ChristianTovar/ash | 66435322786c5d0b90a34051da969b68dcc8a045 | [
"MIT"
] | null | null | null | lib/ash/actions/managed_relationships.ex | ChristianTovar/ash | 66435322786c5d0b90a34051da969b68dcc8a045 | [
"MIT"
] | null | null | null | lib/ash/actions/managed_relationships.ex | ChristianTovar/ash | 66435322786c5d0b90a34051da969b68dcc8a045 | [
"MIT"
] | null | null | null | defmodule Ash.Actions.ManagedRelationships do
@moduledoc false
alias Ash.Error.Changes.InvalidRelationship
alias Ash.Error.Query.NotFound
require Ash.Query
def load(_api, created, %{relationships: rels}, _) when rels == %{},
do: {:ok, created}
def load(_api, created, %{relationships: nil}, _), do: {:ok, created}
def load(api, created, changeset, engine_opts) do
Enum.reduce_while(changeset.relationships, {:ok, created}, fn {key, value}, {:ok, acc} ->
relationship = Ash.Resource.Info.relationship(changeset.resource, key)
case Enum.filter(value, fn {_, opts} ->
opts = Ash.Changeset.ManagedRelationshipHelpers.sanitize_opts(relationship, opts)
Ash.Changeset.ManagedRelationshipHelpers.must_load?(opts)
end) do
[] ->
{:cont, {:ok, acc}}
relationships ->
authorize? =
engine_opts[:authorize?] &&
Enum.any?(relationships, fn {_, opts} -> opts[:authorize?] end)
actor = engine_opts[:actor]
case api.load(acc, key, authorize?: authorize?, actor: actor) do
{:ok, loaded} -> {:cont, {:ok, loaded}}
{:error, error} -> {:halt, {:error, error}}
end
end
end)
end
def setup_managed_belongs_to_relationships(changeset, actor, engine_opts) do
changeset.relationships
|> Enum.map(fn {relationship, val} ->
{Ash.Resource.Info.relationship(changeset.resource, relationship), val}
end)
|> Enum.filter(fn {relationship, _val} ->
relationship.type == :belongs_to
end)
|> Enum.flat_map(fn {relationship, inputs} ->
inputs
|> Enum.with_index()
|> Enum.map(fn {{input, opts}, index} ->
{{relationship, {input, opts}}, index}
end)
end)
|> Enum.reject(fn {{_relationship, {input, _opts}}, _index} ->
is_nil(input) || input == []
end)
|> Enum.map(fn
{{relationship, {[input], opts}}, index} ->
{{relationship, {input, opts}}, index}
{{relationship, {other, opts}}, index} ->
{{relationship, {other, opts}}, index}
end)
|> Enum.reduce_while({changeset, %{notifications: []}}, fn {{relationship, {input, opts}},
index},
{changeset, instructions} ->
pkeys = pkeys(relationship)
opts = Ash.Changeset.ManagedRelationshipHelpers.sanitize_opts(relationship, opts)
opts = Keyword.put(opts, :authorize?, engine_opts[:authorize?] && opts[:authorize?])
current_value =
case Map.get(changeset.data, relationship.name) do
%Ash.NotLoaded{} ->
case relationship.cardinality do
:many -> []
:one -> nil
end
other ->
other
end
case find_match(List.wrap(current_value), input, pkeys, relationship) do
nil ->
case opts[:on_lookup] do
:ignore ->
create_belongs_to_record(
changeset,
instructions,
relationship,
input,
actor,
index,
opts
)
{_key, _create_or_update, read} ->
if is_struct(input) do
changeset =
changeset
|> Ash.Changeset.set_context(%{
belongs_to_manage_found: %{relationship.name => %{index => input}}
})
|> Ash.Changeset.force_change_attribute(
relationship.source_field,
Map.get(input, relationship.destination_field)
)
{:cont, {changeset, instructions}}
else
case Ash.Filter.get_filter(relationship.destination, input) do
{:ok, keys} ->
relationship.destination
|> Ash.Query.for_read(read, input, actor: actor)
|> Ash.Query.filter(^keys)
|> Ash.Query.do_filter(relationship.filter)
|> Ash.Query.set_context(relationship.context)
|> Ash.Query.limit(1)
|> Ash.Query.set_tenant(changeset.tenant)
|> changeset.api.read_one(
authorize?: opts[:authorize?],
actor: actor
)
|> case do
{:ok, nil} ->
create_belongs_to_record(
changeset,
instructions,
relationship,
input,
actor,
index,
opts
)
{:ok, found} ->
changeset =
changeset
|> Ash.Changeset.set_context(%{
private: %{
belongs_to_manage_found: %{relationship.name => %{index => found}}
}
})
|> Ash.Changeset.force_change_attribute(
relationship.source_field,
Map.get(found, relationship.destination_field)
)
{:cont, {changeset, instructions}}
{:error, error} ->
{:halt, {Ash.Changeset.add_error(changeset, error), instructions}}
end
_ ->
create_belongs_to_record(
changeset,
instructions,
relationship,
input,
actor,
index,
opts
)
end
end
end
_value ->
{:cont, {changeset, instructions}}
end
end)
|> validate_required_belongs_to()
end
defp validate_required_belongs_to({changeset, instructions}) do
changeset.resource
|> Ash.Resource.Info.relationships()
|> Enum.filter(&(&1.type == :belongs_to))
|> Enum.filter(& &1.required?)
|> Enum.reject(fn relationship ->
changeset.context[:private][:error][relationship.name]
end)
|> Enum.reduce({changeset, instructions}, fn required_relationship,
{changeset, instructions} ->
changeset =
case Ash.Changeset.get_attribute(changeset, required_relationship.source_field) do
nil ->
Ash.Changeset.add_error(
changeset,
Ash.Error.Changes.Required.exception(
field: required_relationship.name,
type: :relationship
)
)
_ ->
changeset
end
{changeset, instructions}
end)
end
defp create_belongs_to_record(
changeset,
instructions,
relationship,
input,
actor,
index,
opts
) do
case opts[:on_no_match] do
:ignore ->
{:cont, {changeset, instructions}}
:error ->
if opts[:on_lookup] != :ignore do
changeset =
changeset
|> Ash.Changeset.add_error(
NotFound.exception(
primary_key: input,
resource: relationship.destination
)
)
|> Ash.Changeset.put_context(:private, %{error: %{relationship.name => true}})
{:halt, {changeset, instructions}}
else
changeset =
changeset
|> Ash.Changeset.add_error(
InvalidRelationship.exception(
relationship: relationship.name,
message: "Changes would create a new related record"
)
)
|> Ash.Changeset.put_context(:private, %{error: %{relationship.name => true}})
{:halt, {changeset, instructions}}
end
{:create, action_name} ->
do_create_belongs_to_record(
relationship,
action_name,
input,
changeset,
actor,
opts,
instructions,
index
)
end
end
defp do_create_belongs_to_record(
relationship,
action_name,
input,
changeset,
actor,
opts,
instructions,
index
) do
relationship.destination
|> Ash.Changeset.for_create(action_name, input,
require?: false,
actor: actor,
relationships: opts[:relationships] || []
)
|> Ash.Changeset.set_context(relationship.context)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> changeset.api.create(
actor: actor,
authorize?: opts[:authorize?],
return_notifications?: true
)
|> case do
{:ok, created, notifications} ->
changeset =
changeset
|> Ash.Changeset.set_context(%{
private: %{
belongs_to_manage_created: %{relationship.name => %{index => created}}
}
})
|> Ash.Changeset.force_change_attribute(
relationship.source_field,
Map.get(created, relationship.destination_field)
)
{:cont,
{changeset, %{instructions | notifications: instructions.notifications ++ notifications}}}
{:error, error} ->
{:halt, {Ash.Changeset.add_error(changeset, error), instructions}}
end
end
def manage_relationships(record, changeset, actor, engine_opts) do
changeset.relationships
|> Enum.map(fn {relationship, val} ->
{Ash.Resource.Info.relationship(changeset.resource, relationship), val}
end)
|> Enum.reject(fn {relationship, _} ->
relationship.type == :belongs_to
end)
|> Enum.flat_map(fn {key, batches} ->
batches
|> Enum.with_index()
|> Enum.map(fn {{batch, opts}, index} ->
opts = Keyword.put(opts, :authorize?, engine_opts[:authorize?] && opts[:authorize?])
{key, batch, opts, index}
end)
end)
|> Enum.reduce_while({:ok, record, []}, fn {relationship, inputs, opts, index},
{:ok, record, all_notifications} ->
inputs =
if relationship.cardinality == :many do
List.wrap(inputs)
else
inputs
end
case manage_relationship(record, relationship, inputs, changeset, actor, index, opts) do
{:ok, record, notifications} ->
record =
if relationship.type == :many_to_many do
Map.put(
record,
relationship.join_relationship,
Map.get(record.__struct__.__struct__, relationship.join_relationship)
)
else
record
end
{:cont, {:ok, record, notifications ++ all_notifications}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
end
defp pkeys(relationship) do
identities =
relationship.destination
|> Ash.Resource.Info.identities()
|> Enum.map(& &1.keys)
[Ash.Resource.Info.primary_key(relationship.destination) | identities]
end
defp manage_relationship(
record,
%{cardinality: :many} = relationship,
inputs,
changeset,
actor,
index,
opts
) do
inputs = List.wrap(inputs)
opts = Ash.Changeset.ManagedRelationshipHelpers.sanitize_opts(relationship, opts)
pkeys = pkeys(relationship)
original_value =
case Map.get(record, relationship.name) do
%Ash.NotLoaded{} -> []
value -> value
end
inputs
|> Enum.reduce_while(
{:ok, [], [], []},
fn input, {:ok, current_value, all_notifications, all_used} ->
case handle_input(
record,
current_value,
original_value,
relationship,
input,
pkeys,
changeset,
actor,
index,
opts
) do
{:ok, new_value, notifications, used} ->
{:cont, {:ok, new_value, all_notifications ++ notifications, all_used ++ used}}
{:error, error} ->
{:halt, {:error, error}}
end
end
)
|> case do
{:ok, new_value, all_notifications, all_used} ->
case delete_unused(
record,
original_value,
relationship,
new_value,
all_used,
changeset,
actor,
opts
) do
{:ok, new_value, notifications} ->
{:ok, Map.put(record, relationship.name, new_value),
all_notifications ++ notifications}
{:error, error} ->
{:error, error}
end
{:error, error} ->
{:error, error}
end
end
defp manage_relationship(
record,
%{cardinality: :one} = relationship,
inputs,
changeset,
actor,
index,
opts
) do
opts = Ash.Changeset.ManagedRelationshipHelpers.sanitize_opts(relationship, opts)
identities =
relationship.destination
|> Ash.Resource.Info.identities()
|> Enum.map(& &1.keys)
pkeys = [Ash.Resource.Info.primary_key(relationship.destination) | identities]
original_value =
case Map.get(record, relationship.name) do
%Ash.NotLoaded{} -> []
value -> value
end
inputs = List.wrap(inputs)
inputs
|> Enum.reduce_while(
{:ok, original_value, [], []},
fn input, {:ok, current_value, all_notifications, all_used} ->
case handle_input(
record,
current_value,
original_value,
relationship,
input,
pkeys,
changeset,
actor,
index,
opts
) do
{:ok, new_value, notifications, used} ->
{:cont, {:ok, new_value, all_notifications ++ notifications, all_used ++ used}}
{:error, error} ->
{:halt, {:error, error}}
end
end
)
|> case do
{:ok, new_value, all_notifications, all_used} ->
case delete_unused(
record,
original_value,
relationship,
new_value,
all_used,
changeset,
actor,
opts
) do
{:ok, new_value, notifications} ->
{:ok, Map.put(record, relationship.name, Enum.at(List.wrap(new_value), 0)),
all_notifications ++ notifications}
{:error, error} ->
{:error, error}
end
{:error, error} ->
{:error, error}
end
end
defp handle_input(
record,
current_value,
original_value,
relationship,
input,
pkeys,
changeset,
actor,
index,
opts
) do
match = find_match(List.wrap(original_value), input, pkeys, relationship)
if is_nil(match) || opts[:on_match] == :no_match do
case handle_create(
record,
current_value,
relationship,
input,
changeset,
actor,
index,
opts
) do
{:ok, current_value, notifications, used} ->
{:ok, current_value, notifications, used}
{:error, error} ->
{:error, error}
end
else
handle_update(record, current_value, relationship, match, input, changeset, actor, opts)
end
end
defp handle_create(record, current_value, relationship, input, changeset, actor, index, opts) do
api = changeset.api
case opts[:on_lookup] do
:ignore ->
do_handle_create(
record,
current_value,
relationship,
input,
changeset,
actor,
index,
opts
)
other ->
case Map.fetch(
changeset.context[:private][:belongs_to_manage_found][relationship.name] || %{},
index
) do
:error ->
{key, create_or_update, read, join_keys} =
case other do
{key, create_or_update, read} -> {key, create_or_update, read, []}
{key, create_or_update, read, keys} -> {key, create_or_update, read, keys}
end
case Ash.Filter.get_filter(relationship.destination, input) do
{:ok, keys} ->
if is_struct(input) do
{:ok, input}
else
relationship.destination
|> Ash.Query.for_read(read, input, actor: actor)
|> Ash.Query.filter(^keys)
|> Ash.Query.do_filter(relationship.filter)
|> Ash.Query.set_context(relationship.context)
|> Ash.Query.set_tenant(changeset.tenant)
|> Ash.Query.limit(1)
|> changeset.api.read_one(
authorize?: opts[:authorize?],
actor: actor
)
end
|> case do
{:ok, found} when not is_nil(found) ->
do_handle_found(
relationship,
join_keys,
input,
api,
opts,
found,
current_value,
create_or_update,
actor,
key,
record,
changeset
)
{:ok, _} ->
do_handle_create(
record,
current_value,
relationship,
input,
changeset,
actor,
index,
opts
)
{:error, error} ->
{:error, error}
end
{:error, _error} ->
do_handle_create(
record,
current_value,
relationship,
input,
changeset,
actor,
index,
opts
)
end
{:ok, found} ->
{:ok, [found | current_value], [], [found]}
end
end
end
defp do_handle_found(
relationship,
join_keys,
input,
api,
opts,
found,
current_value,
create_or_update,
actor,
key,
record,
changeset
) do
case relationship.type do
:many_to_many ->
input =
if is_map(input) do
input
else
Enum.into(input, %{})
end
{join_input, input} = split_join_keys(input, join_keys)
join_relationship =
Ash.Resource.Info.relationship(
relationship.source,
relationship.join_relationship
)
relationship.through
|> Ash.Changeset.new()
|> Ash.Changeset.for_create(create_or_update, join_input, actor: actor)
|> Ash.Changeset.force_change_attribute(
relationship.source_field_on_join_table,
Map.get(record, relationship.source_field)
)
|> Ash.Changeset.force_change_attribute(
relationship.destination_field_on_join_table,
Map.get(found, relationship.destination_field)
)
|> Ash.Changeset.set_context(join_relationship.context)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> api.create(
return_notifications?: true,
authorize?: opts[:authorize?],
actor: actor
)
|> case do
{:ok, _created, notifications} ->
case key do
:relate ->
{:ok, [found | current_value], notifications, [found]}
:relate_and_update ->
case handle_update(
record,
current_value,
relationship,
found,
input,
changeset,
actor,
opts
) do
{:ok, new_value, update_notifications, used} ->
{:ok, new_value, update_notifications ++ notifications, used}
{:error, error} ->
{:error, error}
end
end
{:error, error} ->
{:error, error}
end
type when type in [:has_many, :has_one] ->
{found, input} =
if is_struct(input) do
{input, %{}}
else
{found, input}
end
found
|> Ash.Changeset.for_update(create_or_update, input,
relationships: opts[:relationships] || [],
actor: actor
)
|> Ash.Changeset.force_change_attribute(
relationship.destination_field,
Map.get(record, relationship.source_field)
)
|> Ash.Changeset.set_context(relationship.context)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> api.update(
return_notifications?: true,
authorize?: opts[:authorize?],
actor: actor
)
|> case do
{:ok, updated, notifications} ->
{:ok, [updated | current_value], notifications, [updated]}
{:error, error} ->
{:error, error}
end
:belongs_to ->
{:ok, [found | current_value], [], [found]}
end
end
defp do_handle_create(record, current_value, relationship, input, changeset, actor, index, opts) do
case opts[:on_no_match] do
:error ->
if opts[:on_lookup] != :ignore do
{:error,
NotFound.exception(
primary_key: input,
resource: relationship.destination
)}
else
{:error,
InvalidRelationship.exception(
relationship: relationship.name,
message: "Changes would create a new related record"
)}
end
{:create, action_name} ->
case changeset.context[:private][:belongs_to_manage_created][relationship.name][index] do
nil ->
created =
if is_struct(input) do
{:ok, input, [], []}
else
relationship.destination
|> Ash.Changeset.for_create(action_name, input,
require?: false,
actor: actor,
relationships: opts[:relationships]
)
|> Ash.Changeset.force_change_attribute(
relationship.destination_field,
Map.get(record, relationship.source_field)
)
|> Ash.Changeset.set_context(relationship.context)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> changeset.api.create(
return_notifications?: true,
authorize?: opts[:authorize?],
actor: actor
)
end
case created do
{:ok, created, notifications} ->
{:ok, [created | current_value], notifications, []}
{:error, error} ->
{:error, error}
end
created ->
{:ok, [created | current_value], [], []}
end
{:create, action_name, join_action_name, params} ->
join_keys = params ++ Enum.map(params, &to_string/1)
input =
if is_map(input) do
input
else
Enum.into(input, %{})
end
{join_params, regular_params} = split_join_keys(input, join_keys)
created =
if is_struct(input) do
{:ok, input, []}
else
relationship.destination
|> Ash.Changeset.for_create(action_name, regular_params,
require?: false,
relationships: opts[:relationships],
actor: actor
)
|> Ash.Changeset.set_context(relationship.context)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> changeset.api.create(
return_notifications?: true,
authorize?: opts[:authorize?],
actor: actor
)
end
case created do
{:ok, created, regular_notifications} ->
join_relationship =
Ash.Resource.Info.relationship(relationship.source, relationship.join_relationship)
relationship.through
|> Ash.Changeset.new()
|> Ash.Changeset.for_create(join_action_name, join_params,
require?: false,
actor: actor
)
|> Ash.Changeset.force_change_attribute(
relationship.source_field_on_join_table,
Map.get(record, relationship.source_field)
)
|> Ash.Changeset.force_change_attribute(
relationship.destination_field_on_join_table,
Map.get(created, relationship.destination_field)
)
|> Ash.Changeset.set_context(join_relationship.context)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> changeset.api.create(
return_notifications?: true,
authorize?: opts[:authorize?],
actor: actor
)
|> case do
{:ok, _join_row, notifications} ->
{:ok, [created | current_value], regular_notifications ++ notifications, []}
{:error, error} ->
{:error, error}
end
{:error, error} ->
{:error, error}
end
:ignore ->
{:ok, current_value, [], []}
end
end
# credo:disable-for-next-line Credo.Check.Refactor.Nesting
defp handle_update(
source_record,
current_value,
relationship,
match,
input,
changeset,
actor,
opts
) do
api = changeset.api
case opts[:on_match] do
# :create case is handled when determining updates/creates
:error ->
{:error,
InvalidRelationship.exception(
relationship: relationship.name,
message: "Changes would update a record"
)}
:ignore ->
{:ok, [match | current_value], [], [match]}
:missing ->
{:ok, current_value, [], []}
{:unrelate, action_name} ->
case unrelate_data(
source_record,
match,
api,
actor,
opts,
action_name,
changeset.tenant,
relationship
) do
{:ok, notifications} ->
{:ok, current_value, notifications, []}
{:error, error} ->
{:error, error}
end
{:update, action_name} ->
{match, input} =
if is_struct(input) do
{input, %{}}
else
{match, input}
end
match
|> Ash.Changeset.for_update(action_name, input,
actor: actor,
relationships: opts[:relationships] || []
)
|> Ash.Changeset.set_context(relationship.context)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> api.update(actor: actor, authorize?: opts[:authorize?], return_notifications?: true)
|> case do
{:ok, updated, update_notifications} ->
{:ok, [updated | current_value], update_notifications, [match]}
{:error, error} ->
{:error, error}
end
{:update, action_name, join_action_name, params} ->
join_keys = params ++ Enum.map(params, &to_string/1)
{join_params, regular_params} = split_join_keys(input, join_keys)
{match, regular_params} =
if is_struct(regular_params) do
{regular_params, %{}}
else
{match, regular_params}
end
source_value = Map.get(source_record, relationship.source_field)
match
|> Ash.Changeset.for_update(action_name, regular_params,
actor: actor,
relationships: opts[:relationships]
)
|> Ash.Changeset.set_context(relationship.context)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> api.update(actor: actor, authorize?: opts[:authorize?], return_notifications?: true)
|> case do
{:ok, updated, update_notifications} ->
destination_value = Map.get(updated, relationship.destination_field)
join_relationship =
Ash.Resource.Info.relationship(relationship.source, relationship.join_relationship)
relationship.through
|> Ash.Query.filter(ref(^relationship.source_field_on_join_table) == ^source_value)
|> Ash.Query.filter(
ref(^relationship.destination_field_on_join_table) == ^destination_value
)
|> Ash.Query.set_context(join_relationship.context)
|> Ash.Query.limit(1)
|> Ash.Query.set_tenant(changeset.tenant)
|> changeset.api.read_one(
authorize?: opts[:authorize?],
actor: actor
)
|> case do
{:ok, result} ->
if join_params == %{} do
{:ok, [updated | current_value], update_notifications, [match]}
else
join_relationship =
Ash.Resource.Info.relationship(
relationship.source,
relationship.join_relationship
)
result
|> Ash.Changeset.for_update(join_action_name, join_params, actor: actor)
|> Ash.Changeset.set_context(join_relationship.context)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> api.update(
return_notifications?: true,
authorize?: opts[:authorize?],
actor: actor
)
# credo:disable-for-next-line Credo.Check.Refactor.Nesting
|> case do
{:ok, _updated_join, join_update_notifications} ->
{:ok, [updated | current_value],
update_notifications ++ join_update_notifications, [updated]}
{:error, error} ->
{:error, error}
end
end
{:error, error} ->
{:error, error}
end
{:error, error} ->
{:error, error}
end
end
end
defp find_match(current_value, input, pkeys, relationship \\ nil)
defp find_match(%Ash.NotLoaded{}, _input, _pkeys, _relationship) do
nil
end
defp find_match(current_value, input, pkeys, relationship) do
Enum.find(current_value, fn current_value ->
Enum.any?(pkeys, fn pkey ->
matches?(current_value, input, pkey, relationship)
end)
end)
end
defp matches?(current_value, input, pkey, relationship) do
if relationship && relationship.type in [:has_one, :has_many] &&
relationship.destination_field in pkey do
Enum.all?(pkey, fn field ->
if field == relationship.destination_field do
if is_struct(input) do
do_matches?(current_value, input, field)
else
# We know that it will be the same as all other records in this relationship
# (because thats how has_one and has_many relationships work), so we
# can assume its the same as the current value
true
end
else
do_matches?(current_value, input, field)
end
end)
else
Enum.all?(pkey, fn field ->
do_matches?(current_value, input, field)
end)
end
end
defp do_matches?(current_value, input, field) do
with {:ok, current_val} when not is_nil(current_val) <- Map.fetch(current_value, field),
{:ok, input_val} when not is_nil(input_val) <- fetch_field(input, field) do
current_val == input_val
else
_ ->
false
end
end
defp split_join_keys(%_{__metadata__: metadata} = input, _join_keys) do
{metadata[:join_keys] || %{}, input}
end
defp split_join_keys(input, :all) do
{input, %{}}
end
defp split_join_keys(input, join_keys) do
Map.split(input, join_keys ++ Enum.map(join_keys, &to_string/1))
end
defp fetch_field(input, field) do
case Map.fetch(input, field) do
{:ok, value} ->
{:ok, value}
:error ->
Map.fetch(input, to_string(field))
end
end
defp delete_unused(
source_record,
original_value,
relationship,
current_value,
all_used,
changeset,
actor,
opts
) do
api = changeset.api
pkey = Ash.Resource.Info.primary_key(relationship.destination)
original_value
|> List.wrap()
|> Enum.reject(&find_match(all_used, &1, [pkey]))
|> Enum.reduce_while(
{:ok, current_value, []},
fn record, {:ok, current_value, all_notifications} ->
case opts[:on_missing] do
:ignore ->
{:cont, {:ok, [record | current_value], []}}
{:destroy, action_name, join_action_name} ->
source_value = Map.get(source_record, relationship.source_field)
destination_value = Map.get(record, relationship.destination_field)
join_relationship =
Ash.Resource.Info.relationship(relationship.source, relationship.join_relationship)
relationship.through
|> Ash.Query.filter(ref(^relationship.source_field_on_join_table) == ^source_value)
|> Ash.Query.filter(
ref(^relationship.destination_field_on_join_table) == ^destination_value
)
|> Ash.Query.limit(1)
|> Ash.Query.set_tenant(changeset.tenant)
|> Ash.Query.set_context(join_relationship.context)
|> Ash.Query.do_filter(relationship.filter)
|> api.read_one(
authorize?: opts[:authorize?],
actor: actor
)
|> case do
{:ok, result} ->
join_relationship =
Ash.Resource.Info.relationship(
relationship.source,
relationship.join_relationship
)
result
|> Ash.Changeset.for_destroy(
join_action_name,
%{},
actor: actor
)
|> Ash.Changeset.set_context(join_relationship.context)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> api.destroy(
return_notifications?: true,
authorize?: opts[:authorize?],
actor: actor
)
|> case do
{:ok, join_notifications} ->
notifications = join_notifications ++ all_notifications
record
|> Ash.Changeset.for_destroy(action_name, %{}, actor: actor)
|> Ash.Changeset.set_context(relationship.context)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> api.destroy(
return_notifications?: true,
authorize?: opts[:authorize?],
actor: actor
)
# credo:disable-for-next-line Credo.Check.Refactor.Nesting
|> case do
{:ok, destroy_destination_notifications} ->
{:cont,
{:ok, current_value,
notifications ++
all_notifications ++ destroy_destination_notifications}}
{:error, error} ->
{:halt, {:error, error}}
end
{:error, error} ->
{:halt, {:error, error}}
end
{:error, error} ->
{:halt, {:error, error}}
end
{:destroy, action_name} ->
record
|> Ash.Changeset.for_destroy(action_name, %{}, actor: actor)
|> Ash.Changeset.set_context(relationship.context)
|> Ash.Changeset.set_tenant(changeset.tenant)
|> api.destroy(
authorize?: opts[:authorize?],
actor: actor,
return_notifications?: true
)
|> case do
{:ok, notifications} ->
{:cont, {:ok, current_value, notifications ++ all_notifications}}
{:error, error} ->
{:halt, {:error, error}}
end
:error ->
{:halt,
{:error,
InvalidRelationship.exception(
relationship: relationship.name,
message: "Changes would destroy a record"
)}}
{:unrelate, action_name} ->
case unrelate_data(
source_record,
record,
api,
actor,
opts,
action_name,
changeset.tenant,
relationship
) do
{:ok, notifications} ->
{:cont, {:ok, current_value, notifications}}
{:error, error} ->
{:halt, {:error, error}}
end
end
end
)
end
defp unrelate_data(
source_record,
record,
api,
actor,
opts,
action_name,
tenant,
%{type: :many_to_many} = relationship
) do
action_name =
action_name || Ash.Resource.Info.primary_action(relationship.through, :destroy).name
source_value = Map.get(source_record, relationship.source_field)
destination_value = Map.get(record, relationship.destination_field)
relationship.through
|> Ash.Query.filter(ref(^relationship.source_field_on_join_table) == ^source_value)
|> Ash.Query.filter(ref(^relationship.destination_field_on_join_table) == ^destination_value)
|> Ash.Query.limit(1)
|> Ash.Query.set_tenant(tenant)
|> api.read_one(authorize?: opts[:authorize?], actor: actor)
|> case do
{:ok, result} ->
result
|> Ash.Changeset.for_destroy(action_name, %{}, actor: actor)
|> Ash.Changeset.set_context(relationship.context)
|> Ash.Changeset.set_tenant(tenant)
|> api.destroy(
return_notifications?: true,
authorize?: opts[:authorize?],
actor: actor
)
|> case do
{:ok, notifications} ->
{:ok, notifications}
{:error, error} ->
{:error, error}
end
{:error, error} ->
{:error, error}
end
end
defp unrelate_data(
_source_record,
record,
api,
actor,
opts,
action_name,
tenant,
%{type: type} = relationship
)
when type in [:has_many, :has_one] do
action_name =
action_name || Ash.Resource.Info.primary_action(relationship.destination, :update).name
record
|> Ash.Changeset.for_update(action_name, %{},
relationships: opts[:relationships] || [],
actor: actor
)
|> Ash.Changeset.force_change_attribute(relationship.destination_field, nil)
|> Ash.Changeset.set_context(relationship.context)
|> Ash.Changeset.set_tenant(tenant)
|> api.update(return_notifications?: true, actor: actor, authorize?: opts[:authorize?])
|> case do
{:ok, _unrelated, notifications} ->
{:ok, notifications}
{:error, error} ->
{:error, error}
end
end
defp unrelate_data(
source_record,
_record,
api,
actor,
opts,
action_name,
tenant,
%{type: :belongs_to} = relationship
) do
action_name =
action_name || Ash.Resource.Info.primary_action(relationship.source, :update).name
source_record
|> Ash.Changeset.for_update(action_name, %{},
relationships: opts[:relationships] || [],
actor: actor
)
|> Ash.Changeset.force_change_attribute(relationship.source_field, nil)
|> Ash.Changeset.set_context(relationship.context)
|> Ash.Changeset.set_tenant(tenant)
|> api.update(return_notifications?: true, actor: actor, authorize?: opts[:authorize?])
|> case do
{:ok, _unrelated, notifications} ->
{:ok, notifications}
{:error, error} ->
{:error, error}
end
end
end
| 30.784083 | 101 | 0.505817 |
f71303551314b0717a9d23de94474dd81550fd54 | 320 | ex | Elixir | lib/team_budget/members/core/members_repo.ex | AkioCode/elxpro4-teambudget | a7e67d5e1ec538df6cc369cc4f385d005bf60eda | [
"MIT"
] | null | null | null | lib/team_budget/members/core/members_repo.ex | AkioCode/elxpro4-teambudget | a7e67d5e1ec538df6cc369cc4f385d005bf60eda | [
"MIT"
] | null | null | null | lib/team_budget/members/core/members_repo.ex | AkioCode/elxpro4-teambudget | a7e67d5e1ec538df6cc369cc4f385d005bf60eda | [
"MIT"
] | null | null | null | defmodule TeamBudget.Members.Core.MembersRepo do
import Ecto.Query
alias TeamBudget.Repo
alias TeamBudget.Members.Data.Member
def is_member_of?(user_id, team_id) do
from(
m in Member,
where:
m.user_id == ^user_id and
m.team_id == ^team_id
)
|> Repo.exists?()
end
end
| 20 | 48 | 0.646875 |
f713236083734592906a67c38e5565104bb1bcbc | 754 | ex | Elixir | lib/ex_payrexx/auth_middleware.ex | falti/ex_payrexx | 6775c782f4612456f9edbad1ef4f59c9be7f424e | [
"MIT"
] | null | null | null | lib/ex_payrexx/auth_middleware.ex | falti/ex_payrexx | 6775c782f4612456f9edbad1ef4f59c9be7f424e | [
"MIT"
] | null | null | null | lib/ex_payrexx/auth_middleware.ex | falti/ex_payrexx | 6775c782f4612456f9edbad1ef4f59c9be7f424e | [
"MIT"
] | null | null | null | defmodule ExPayrexx.AuthMiddleware do
@behaviour Tesla.Middleware
@impl Tesla.Middleware
def call(env, next, options) do
env
|> authorize(options)
|> Tesla.run(next)
end
defp authorize(env, _opts) do
body =
case env.body do
nil -> %{}
_ -> Map.from_struct(env.body)
end
request_to_be_signed = URI.encode_query(body)
sig = signature(request_to_be_signed)
body_with_sigature = Map.merge(body, sig)
env
|> Map.put(:body, body_with_sigature)
end
defp signature(request_to_be_signed) do
secret = Application.get_env(:ex_payrexx, :secret)
signature = :crypto.mac(:hmac, :sha256, secret, request_to_be_signed) |> Base.encode64()
%{ApiSignature: signature}
end
end
| 23.5625 | 92 | 0.67374 |
f713355c6034fb9255a251f5557f349643d0a0dc | 6,029 | ex | Elixir | lib/yml_reader/gitlab_ci.ex | crosscloudci/ci_status_repository | 335e8b89bbf59e6cf63e49541ce3ea6b60167e52 | [
"Apache-2.0"
] | 2 | 2019-03-05T16:29:10.000Z | 2020-01-17T14:11:48.000Z | lib/yml_reader/gitlab_ci.ex | crosscloudci/ci_status_repository | 335e8b89bbf59e6cf63e49541ce3ea6b60167e52 | [
"Apache-2.0"
] | 3 | 2019-03-18T20:26:48.000Z | 2020-06-25T14:31:13.000Z | lib/yml_reader/gitlab_ci.ex | crosscloudci/ci_status_repository | 335e8b89bbf59e6cf63e49541ce3ea6b60167e52 | [
"Apache-2.0"
] | 1 | 2018-06-16T15:32:25.000Z | 2018-06-16T15:32:25.000Z | require IEx;
require Logger;
defmodule CncfDashboardApi.YmlReader.GitlabCi do
use Retry
def get do
Application.ensure_all_started :inets
retry with: exp_backoff |> randomize |> cap(1_000) |> expiry(8_000), rescue_only: [MatchError] do
{:ok, resp} = :httpc.request(:get, {System.get_env("GITLAB_CI_YML") |> to_charlist, []}, [], [body_format: :binary])
{{_, 200, 'OK'}, _headers, body} = resp
# Logger.info fn ->
# "cross-cloud body #{body}"
# end
body
end
end
# Convention:
# 1. cross-cloud ci has all the projects (listed under projects)
# 2. cncfci.yml has project specific attributes (such as logo-url)
# -- lives in the {project}-configuration repo
# 3. We need to get the url/location of the project-configuration repo in order to
# get the project specific attributes
# 4. Using convention we can derive the name of the project-configuration repos from a list of valid project names.
# -- if the project name doesn't match the name of the project repo we will fail
# -- see https://en.wikipedia.org/wiki/Connascence
def getcncfci(configuration_repo) do
Application.ensure_all_started :inets
try do
retry with: exp_backoff |> randomize |> cap(1_000) |> expiry(6_000), rescue_only: [MatchError] do
if is_nil(configuration_repo) == false do
Logger.info fn ->
"Trying getcncfci http get on #{inspect(configuration_repo)}"
end
{:ok, {{_, 200, 'OK'}, _headers, body}} = :httpc.request(:get, {configuration_repo |> to_charlist, []}, [], [body_format: :binary])
body
else
{:error, :not_found}
end
end
rescue
e in MatchError ->
Logger.error fn ->
"failed at gitlab_ci http get on #{inspect(configuration_repo)}"
end
{:error, :not_found}
end
end
def cloud_list do
yml = CncfDashboardApi.YmlReader.GitlabCi.get() |> YamlElixir.read_from_string
yml["clouds"]
|> Stream.with_index
|> Enum.reduce([], fn ({{k, v}, idx}, acc) ->
# [%{"id" => (idx + 1),
[%{"id" => 0,
"cloud_name" => k,
"active" => v["active"],
"display_name" => v["display_name"],
# "order" => (idx + 1)} | acc]
"order" => v["order"]} | acc]
end)
end
def cncf_relations_list do
yml = CncfDashboardApi.YmlReader.GitlabCi.get() |> YamlElixir.read_from_string
yml["cncf_relations"]
|> Stream.with_index
|> Enum.reduce([], fn ({v, idx}, acc) ->
[%{"order" => (idx + 1),
"name" => v} | acc]
end)
end
def projects_with_yml do
yml = CncfDashboardApi.YmlReader.GitlabCi.get() |> YamlElixir.read_from_string
yml["projects"]
|> Stream.with_index
|> Enum.reduce([], fn ({{k, v}, idx}, acc) ->
case configuration_repo_path(v["configuration_repo"]) |> getcncfci() do
{:error, :not_found} ->
acc
_ ->
[%{"project_name" => k} | acc]
end
end)
end
def configuration_repo_path(configuration_repo) do
# Logger.info fn ->
# "env variable: #{inspect(System.get_env("PROJECT_SEGMENT_ENV"))}"
# end
"#{configuration_repo}/#{System.get_env("PROJECT_SEGMENT_ENV")}/cncfci.yml"
end
def project_list do
project_names = CncfDashboardApi.YmlReader.GitlabCi.projects_with_yml()
yml = CncfDashboardApi.YmlReader.GitlabCi.get() |> YamlElixir.read_from_string
yml["projects"]
|> Stream.with_index
|> Enum.reduce([], fn ({{k, v}, idx}, acc) ->
# [%{"id" => (idx + 1),
case Enum.find_value(project_names, fn(x) -> x["project_name"] == k end) do
true ->
Logger.info fn ->
"env varible: #{inspect(System.get_env("PROJECT_SEGMENT_ENV"))}"
end
cncfci_yml = configuration_repo_path(v["configuration_repo"]) |> getcncfci() |> YamlElixir.read_from_string
display_name = cncfci_yml["project"]["display_name"]
subtitle = cncfci_yml["project"]["sub_title"]
project_url = cncfci_yml["project"]["project_url"]
logo_url = cncfci_yml["project"]["logo_url"]
stable_ref = cncfci_yml["project"]["stable_ref"]
head_ref = cncfci_yml["project"]["head_ref"]
Logger.info fn ->
"cncfciyml: #{inspect(cncfci_yml)}"
end
_ ->
display_name = v["display_name"]
subtitle = v["sub_title"]
project_url = v["project_url"]
logo_url = v["logo_url"]
stable_ref = v["stable_ref"]
head_ref = v["head_ref"]
end
# global config overwrites the project config
if v["display_name"], do: display_name = v["display_name"]
if v["sub_title"], do: subtitle = v["sub_title"]
if v["project_url"], do: project_url = v["project_url"]
if v["logo_url"], do: logo_url = v["logo_url"]
if v["stable_ref"], do: stable_ref = v["stable_ref"]
if v["head_ref"], do: head_ref = v["head_ref"]
[%{"id" => 0,
"yml_name" => k,
"active" => v["active"],
"logo_url" => logo_url,
"display_name" => display_name,
"sub_title" => subtitle,
"yml_gitlab_name" => v["gitlab_name"],
"project_url" => project_url,
"repository_url" => v["repository_url"],
"configuration_repo" => v["configuration_repo"],
"timeout" => v["timeout"],
"cncf_relation" => v["cncf_relation"],
"stable_ref" => stable_ref,
"head_ref" => head_ref,
# "order" => (idx + 1)} | acc]
"order" => v["order"]} | acc]
end)
end
def gitlab_pipeline_config do
yml = CncfDashboardApi.YmlReader.GitlabCi.get() |> YamlElixir.read_from_string
yml["gitlab_pipeline"]
|> Stream.with_index
|> Enum.reduce([], fn ({{k, v}, idx}, acc) ->
# [%{"id" => (idx + 1),
[%{"id" => 0,
"pipeline_name" => k,
"timeout" => v["timeout"],
"status_jobs" => v["status_jobs"],
} | acc]
end)
end
end
| 36.319277 | 141 | 0.590148 |
f713372154d97768d524dadaceec4b204cc06b30 | 7,309 | ex | Elixir | lib/ueberauth/strategy/facebook.ex | ChannexIO/ueberauth_facebook | 8c9e30dedb17318932d8811bac6ac4dc04d9ffb0 | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/facebook.ex | ChannexIO/ueberauth_facebook | 8c9e30dedb17318932d8811bac6ac4dc04d9ffb0 | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/facebook.ex | ChannexIO/ueberauth_facebook | 8c9e30dedb17318932d8811bac6ac4dc04d9ffb0 | [
"MIT"
] | null | null | null | defmodule Ueberauth.Strategy.Facebook do
@moduledoc """
Facebook Strategy for Überauth.
"""
use Ueberauth.Strategy,
default_scope: "email,public_profile",
profile_fields: "id,email,gender,link,locale,name,timezone,updated_time,verified",
uid_field: :id,
allowed_request_params: [
:auth_type,
:scope,
:locale,
:state,
:display
]
alias Ueberauth.Auth.Info
alias Ueberauth.Auth.Credentials
alias Ueberauth.Auth.Extra
@doc """
Handles initial request for Facebook authentication.
"""
def handle_request!(conn) do
allowed_params =
conn
|> option(:allowed_request_params)
|> Enum.map(&to_string/1)
opts = oauth_client_options_from_conn(conn)
authorize_url =
conn.params
|> maybe_replace_param(conn, "auth_type", :auth_type)
|> maybe_replace_param(conn, "scope", :default_scope)
|> maybe_replace_param(conn, "state", :state)
|> maybe_replace_param(conn, "display", :display)
|> Enum.filter(fn {k, _v} -> Enum.member?(allowed_params, k) end)
|> Enum.map(fn {k, v} -> {String.to_existing_atom(k), v} end)
|> Keyword.put(
:redirect_uri,
conn |> callback_url() |> String.replace_leading("http://", "https://")
)
|> Ueberauth.Strategy.Facebook.OAuth.authorize_url!(opts)
redirect!(conn, authorize_url)
end
@doc """
Handles the callback from Facebook.
"""
def handle_callback!(%Plug.Conn{params: %{"code" => code}} = conn) do
opts = oauth_client_options_from_conn(conn)
config =
:ueberauth
|> Application.get_env(Ueberauth.Strategy.Facebook.OAuth, [])
|> Keyword.merge(opts)
try do
client = Ueberauth.Strategy.Facebook.OAuth.get_token!([code: code], opts)
token = client.token
if token.access_token == nil do
err = token.other_params["error"]
desc = token.other_params["error_description"]
set_errors!(conn, [error(err, desc)])
else
fetch_user(conn, client, config)
end
rescue
OAuth2.Error ->
set_errors!(conn, [error("invalid_code", "The code has been used or has expired")])
end
end
def handle_callback!(%Plug.Conn{params: %{"id_token" => access_token}} = conn) do
opts = oauth_client_options_from_conn(conn)
config =
:ueberauth
|> Application.get_env(Ueberauth.Strategy.Facebook.OAuth, [])
|> Keyword.merge(opts)
client = Ueberauth.Strategy.Facebook.OAuth.client()
token = OAuth2.AccessToken.new(access_token)
if check_access_token(conn, client, token) do
fetch_user(conn, %{client | token: token}, config)
else
set_errors!(conn, [error("token", "Token verification failed")])
end
end
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
@doc false
def handle_cleanup!(conn) do
conn
|> put_private(:facebook_user, nil)
|> put_private(:facebook_token, nil)
end
@doc """
Fetches the uid field from the response.
"""
def uid(conn) do
uid_field =
conn
|> option(:uid_field)
|> to_string
conn.private.facebook_user[uid_field]
end
@doc """
Includes the credentials from the facebook response.
"""
def credentials(conn) do
token = conn.private.facebook_token
scopes = token.other_params["scope"] || ""
scopes = String.split(scopes, ",")
%Credentials{
expires: !!token.expires_at,
expires_at: token.expires_at,
scopes: scopes,
token: token.access_token
}
end
@doc """
Fetches the fields to populate the info section of the
`Ueberauth.Auth` struct.
"""
def info(conn) do
user = conn.private.facebook_user
%Info{
description: user["bio"],
email: user["email"],
first_name: user["first_name"],
image: fetch_image(user["id"]),
last_name: user["last_name"],
name: user["name"],
urls: %{
facebook: user["link"],
website: user["website"]
}
}
end
@doc """
Stores the raw information (including the token) obtained from
the facebook callback.
"""
def extra(conn) do
%Extra{
raw_info: %{
token: conn.private.facebook_token,
user: conn.private.facebook_user
}
}
end
defp fetch_image(uid) do
"https://graph.facebook.com/#{uid}/picture?type=large"
end
defp fetch_user(conn, client, config) do
conn = put_private(conn, :facebook_token, client.token)
query = user_query(conn, client.token, config)
path = "/me?#{query}"
case OAuth2.Client.get(client, path) do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: user}}
when status_code in 200..399 ->
put_private(conn, :facebook_user, user)
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
defp user_query(conn, token, config) do
%{"appsecret_proof" => appsecret_proof(token, config)}
|> Map.merge(query_params(conn, :locale))
|> Map.merge(query_params(conn, :profile))
|> URI.encode_query()
end
defp appsecret_proof(token, config) do
client_secret = Keyword.get(config, :client_secret)
token.access_token
|> hmac(:sha256, client_secret)
|> Base.encode16(case: :lower)
end
defp hmac(data, type, key) do
:crypto.hmac(type, key, data)
end
defp query_params(conn, :profile) do
%{"fields" => option(conn, :profile_fields)}
end
defp query_params(conn, :locale) do
case option(conn, :locale) do
nil -> %{}
locale -> %{"locale" => locale}
end
end
defp option(conn, key) do
default = Keyword.get(default_options(), key)
conn
|> options
|> Keyword.get(key, default)
end
defp option(nil, conn, key), do: option(conn, key)
defp option(value, _conn, _key), do: value
defp maybe_replace_param(params, conn, name, config_key) do
if params[name] || is_nil(option(params[name], conn, config_key)) do
params
else
Map.put(
params,
name,
option(params[name], conn, config_key)
)
end
end
defp oauth_client_options_from_conn(conn) do
base_options = [
redirect_uri: conn |> callback_url() |> String.replace_leading("http://", "https://")
]
request_options = conn.private[:ueberauth_request_options].options
case {request_options[:client_id], request_options[:client_secret]} do
{nil, _} -> base_options
{_, nil} -> base_options
{id, secret} -> [client_id: id, client_secret: secret] ++ base_options
end
end
def check_access_token(_conn, client, token) do
app_id = client.client_id
app_secret = client.client_secret
query =
URI.encode_query(%{
"input_token" => token.access_token,
"access_token" => "#{app_id}|#{app_secret}"
})
path = "/debug_token?#{query}"
case OAuth2.Client.get(client, path) do
{:ok,
%OAuth2.Response{
status_code: 200,
body: %{"data" => %{"is_valid" => true, "app_id" => ^app_id}}
}} ->
true
_ ->
false
end
end
end
| 25.826855 | 91 | 0.630182 |
f7138104f7c3770823066bf21c9ab05686bc9cd2 | 1,726 | exs | Elixir | apps/nerves_hub_device/config/release.exs | nerves-hub/beamware | 5c239b7125a82d8fcb2b84ff1ad22d3bb0af01eb | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_device/config/release.exs | nerves-hub/beamware | 5c239b7125a82d8fcb2b84ff1ad22d3bb0af01eb | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_device/config/release.exs | nerves-hub/beamware | 5c239b7125a82d8fcb2b84ff1ad22d3bb0af01eb | [
"Apache-2.0"
] | null | null | null | import Config
logger_level = System.get_env("LOG_LEVEL", "warn") |> String.to_atom()
config :logger, level: logger_level
sync_nodes_optional =
case System.fetch_env("SYNC_NODES_OPTIONAL") do
{:ok, sync_nodes_optional} ->
sync_nodes_optional
|> String.split(" ", trim: true)
|> Enum.map(&String.to_atom/1)
:error ->
[]
end
config :kernel,
sync_nodes_optional: sync_nodes_optional,
sync_nodes_timeout: 5000,
inet_dist_listen_min: 9100,
inet_dist_listen_max: 9155
if rollbar_access_token = System.get_env("ROLLBAR_ACCESS_TOKEN") do
config :rollbax, access_token: rollbar_access_token
else
config :rollbax, enabled: false
end
config :nerves_hub_web_core,
from_email: System.get_env("FROM_EMAIL", "no-reply@nerves-hub.org")
config :nerves_hub_web_core, NervesHubWebCore.Firmwares.Upload.S3,
bucket: System.fetch_env!("S3_BUCKET_NAME")
config :nerves_hub_web_core, NervesHubWebCore.Workers.FirmwaresTransferS3Ingress,
bucket: System.fetch_env!("S3_LOG_BUCKET_NAME")
config :nerves_hub_device, NervesHubDeviceWeb.Endpoint, server: true
config :nerves_hub_web_core, NervesHubWebCore.Mailer,
adapter: Bamboo.SMTPAdapter,
server: System.fetch_env!("SES_SERVER"),
port: System.fetch_env!("SES_PORT"),
username: System.fetch_env!("SMTP_USERNAME"),
password: System.fetch_env!("SMTP_PASSWORD")
host = System.fetch_env!("HOST")
config :nerves_hub_device, NervesHubDeviceWeb.Endpoint,
url: [host: host],
https: [
port: 443,
otp_app: :nerves_hub_device,
# Enable client SSL
verify: :verify_peer,
fail_if_no_peer_cert: true,
keyfile: "/etc/ssl/#{host}-key.pem",
certfile: "/etc/ssl/#{host}.pem",
cacertfile: "/etc/ssl/ca.pem"
]
| 27.83871 | 81 | 0.739282 |
f71395980dcf27e71cf0e8da965041c1e6a649af | 2,991 | exs | Elixir | apps/tai/test/tai/iex/commands/advisors_test.exs | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 276 | 2018-01-16T06:36:06.000Z | 2021-03-20T21:48:01.000Z | apps/tai/test/tai/iex/commands/advisors_test.exs | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 73 | 2018-10-05T18:45:06.000Z | 2021-02-08T05:46:33.000Z | apps/tai/test/tai/iex/commands/advisors_test.exs | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 43 | 2018-06-09T09:54:51.000Z | 2021-03-07T07:35:17.000Z | defmodule Tai.IEx.Commands.AdvisorsTest do
use Tai.TestSupport.DataCase, async: false
import ExUnit.CaptureIO
test "shows all advisors in all fleets ordered fleet id, advisor id by default" do
mock_advisor_config(%{fleet_id: :log_spread, advisor_id: :a})
mock_advisor_config(%{fleet_id: :log_spread, advisor_id: :b})
mock_advisor_config(%{fleet_id: :trade_spread, advisor_id: :a})
assert capture_io(&Tai.IEx.advisors/0) == """
+--------------+------------+-----------+-----+--------+
| Fleet ID | Advisor ID | Status | PID | Config |
+--------------+------------+-----------+-----+--------+
| log_spread | a | unstarted | - | %{} |
| log_spread | b | unstarted | - | %{} |
| trade_spread | a | unstarted | - | %{} |
+--------------+------------+-----------+-----+--------+\n
"""
end
test "shows an empty table when there are no advisors" do
assert capture_io(&Tai.IEx.advisors/0) == """
+----------+------------+--------+-----+--------+
| Fleet ID | Advisor ID | Status | PID | Config |
+----------+------------+--------+-----+--------+
| - | - | - | - | - |
+----------+------------+--------+-----+--------+\n
"""
end
test "can filter by struct attributes" do
mock_advisor_config(%{fleet_id: :log_spread, advisor_id: :a})
mock_advisor_config(%{fleet_id: :log_spread, advisor_id: :b})
mock_advisor_config(%{fleet_id: :trade_spread, advisor_id: :a})
assert capture_io(fn -> Tai.IEx.advisors(where: [fleet_id: :log_spread]) end) == """
+------------+------------+-----------+-----+--------+
| Fleet ID | Advisor ID | Status | PID | Config |
+------------+------------+-----------+-----+--------+
| log_spread | a | unstarted | - | %{} |
| log_spread | b | unstarted | - | %{} |
+------------+------------+-----------+-----+--------+\n
"""
end
test "can order ascending by struct attributes" do
mock_advisor_config(%{fleet_id: :log_spread, advisor_id: :a})
mock_advisor_config(%{fleet_id: :log_spread, advisor_id: :b})
mock_advisor_config(%{fleet_id: :trade_spread, advisor_id: :a})
assert capture_io(fn -> Tai.IEx.advisors(order: [:advisor_id, :fleet_id]) end) == """
+--------------+------------+-----------+-----+--------+
| Fleet ID | Advisor ID | Status | PID | Config |
+--------------+------------+-----------+-----+--------+
| log_spread | a | unstarted | - | %{} |
| trade_spread | a | unstarted | - | %{} |
| log_spread | b | unstarted | - | %{} |
+--------------+------------+-----------+-----+--------+\n
"""
end
end
| 48.241935 | 89 | 0.401204 |
f71432edfdb3b5107268646fef304aa982f09fda | 2,626 | exs | Elixir | test/controllers/teacher_controller_test.exs | deerob4/salop-teaching-school | 5c4c4a15232859b5b3bdc84ec9874689891ca9d8 | [
"MIT"
] | null | null | null | test/controllers/teacher_controller_test.exs | deerob4/salop-teaching-school | 5c4c4a15232859b5b3bdc84ec9874689891ca9d8 | [
"MIT"
] | null | null | null | test/controllers/teacher_controller_test.exs | deerob4/salop-teaching-school | 5c4c4a15232859b5b3bdc84ec9874689891ca9d8 | [
"MIT"
] | null | null | null | defmodule TeachingSchool.TeacherControllerTest do
use TeachingSchool.ConnCase
alias TeachingSchool.Teacher
@valid_attrs %{additional_contact: "some content", email: "some content", family_name: "some content", forename: "some content", school_type: "some content", subject: "some content", title: "some content"}
@invalid_attrs %{}
test "lists all entries on index", %{conn: conn} do
conn = get conn, teacher_path(conn, :index)
assert html_response(conn, 200) =~ "Listing teachers"
end
test "renders form for new resources", %{conn: conn} do
conn = get conn, teacher_path(conn, :new)
assert html_response(conn, 200) =~ "New teacher"
end
test "creates resource and redirects when data is valid", %{conn: conn} do
conn = post conn, teacher_path(conn, :create), teacher: @valid_attrs
assert redirected_to(conn) == teacher_path(conn, :index)
assert Repo.get_by(Teacher, @valid_attrs)
end
test "does not create resource and renders errors when data is invalid", %{conn: conn} do
conn = post conn, teacher_path(conn, :create), teacher: @invalid_attrs
assert html_response(conn, 200) =~ "New teacher"
end
test "shows chosen resource", %{conn: conn} do
teacher = Repo.insert! %Teacher{}
conn = get conn, teacher_path(conn, :show, teacher)
assert html_response(conn, 200) =~ "Show teacher"
end
test "renders page not found when id is nonexistent", %{conn: conn} do
assert_error_sent 404, fn ->
get conn, teacher_path(conn, :show, -1)
end
end
test "renders form for editing chosen resource", %{conn: conn} do
teacher = Repo.insert! %Teacher{}
conn = get conn, teacher_path(conn, :edit, teacher)
assert html_response(conn, 200) =~ "Edit teacher"
end
test "updates chosen resource and redirects when data is valid", %{conn: conn} do
teacher = Repo.insert! %Teacher{}
conn = put conn, teacher_path(conn, :update, teacher), teacher: @valid_attrs
assert redirected_to(conn) == teacher_path(conn, :show, teacher)
assert Repo.get_by(Teacher, @valid_attrs)
end
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
teacher = Repo.insert! %Teacher{}
conn = put conn, teacher_path(conn, :update, teacher), teacher: @invalid_attrs
assert html_response(conn, 200) =~ "Edit teacher"
end
test "deletes chosen resource", %{conn: conn} do
teacher = Repo.insert! %Teacher{}
conn = delete conn, teacher_path(conn, :delete, teacher)
assert redirected_to(conn) == teacher_path(conn, :index)
refute Repo.get(Teacher, teacher.id)
end
end
| 39.19403 | 207 | 0.699924 |
f7144dec8edccef57e02239d5930f6d42fd3e9cc | 4,333 | ex | Elixir | kousa/lib/routes/Auth.ex | maskeynihal/dogehouse | 915a9648bfb7a368caea5d10551385ec5ab6b04e | [
"MIT"
] | 1 | 2021-05-15T07:05:44.000Z | 2021-05-15T07:05:44.000Z | kousa/lib/routes/Auth.ex | maskeynihal/dogehouse | 915a9648bfb7a368caea5d10551385ec5ab6b04e | [
"MIT"
] | null | null | null | kousa/lib/routes/Auth.ex | maskeynihal/dogehouse | 915a9648bfb7a368caea5d10551385ec5ab6b04e | [
"MIT"
] | null | null | null | defmodule Kousa.Auth do
import Plug.Conn
use Plug.Router
plug(:match)
plug(:dispatch)
get "/web" do
url =
"https://github.com/login/oauth/authorize?client_id=" <>
Application.get_env(:kousa, :client_id) <>
"&state=web" <>
"&redirect_uri=" <>
Application.get_env(:kousa, :api_url) <>
"/auth/github/callback&scope=read:user,user:email"
Kousa.Redirect.redirect(conn, url)
end
get "/" do
url =
"https://github.com/login/oauth/authorize?client_id=" <>
Application.get_env(:kousa, :client_id) <>
"&redirect_uri=" <>
Application.get_env(:kousa, :api_url) <>
"/auth/github/callback&scope=read:user,user:email"
Kousa.Redirect.redirect(conn, url)
end
get "/callback" do
conn_with_qp = fetch_query_params(conn)
code = conn_with_qp.query_params["code"]
base_url =
if Map.get(conn_with_qp.query_params, "state", "") == "web",
do: Application.fetch_env!(:kousa, :web_url),
else: "http://localhost:54321"
case HTTPoison.post(
"https://github.com/login/oauth/access_token",
Poison.encode!(%{
"code" => code,
"client_id" => Application.get_env(:kousa, :client_id),
"client_secret" => Application.get_env(:kousa, :client_secret)
}),
[
{"Content-Type", "application/json"},
{"Accept", "application/json"}
]
) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
json = Poison.decode!(body)
case json do
%{"error" => "bad_verification_code"} ->
conn
|> put_resp_content_type("application/json")
|> send_resp(
500,
Poison.encode!(%{
"error" => "code expired, try to login again"
})
)
%{"access_token" => accessToken} ->
user = Kousa.Github.get_user(accessToken)
if user do
try do
db_user =
case Kousa.Data.User.find_or_create(user, accessToken) do
{:find, uu} ->
uu
{:create, uu} ->
Kousa.BL.User.load_followers(accessToken, uu.id)
uu
end
if not is_nil(db_user.reasonForBan) do
conn
|> Kousa.Redirect.redirect(
base_url <>
"/?error=" <>
URI.encode(
"your account got banned, if you think this was a mistake, please send me an email at benawadapps@gmail.com"
)
)
else
conn
|> Kousa.Redirect.redirect(
base_url <>
"/?accessToken=" <>
Kousa.AccessToken.generate_and_sign!(%{"userId" => db_user.id}) <>
"&refreshToken=" <>
Kousa.RefreshToken.generate_and_sign!(%{
"userId" => db_user.id,
"tokenVersion" => db_user.tokenVersion
})
)
end
rescue
e in RuntimeError ->
conn
|> Kousa.Redirect.redirect(
base_url <>
"/?error=" <>
URI.encode(e.message)
)
end
else
conn
|> Kousa.Redirect.redirect(
base_url <>
"/?error=" <>
URI.encode(
"something went wrong fetching the user, tell ben to check the server logs"
)
)
end
resp ->
conn
|> Kousa.Redirect.redirect(
base_url <>
"/?error=" <>
URI.encode(resp)
)
end
x ->
IO.inspect(x)
conn
|> Kousa.Redirect.redirect(
base_url <>
"/?error=" <>
URI.encode("something went wrong, tell ben to check the server logs")
)
end
end
end
| 30.090278 | 132 | 0.452342 |
f71494a74ef24e550cd02a8de8c5e441ac102147 | 393 | ex | Elixir | lib/oli/delivery/activity_provider.ex | ChristianMurphy/oli-torus | ffeee4996b66b7c6c6eb3e0082d030b8cc6cea97 | [
"MIT"
] | null | null | null | lib/oli/delivery/activity_provider.ex | ChristianMurphy/oli-torus | ffeee4996b66b7c6c6eb3e0082d030b8cc6cea97 | [
"MIT"
] | null | null | null | lib/oli/delivery/activity_provider.ex | ChristianMurphy/oli-torus | ffeee4996b66b7c6c6eb3e0082d030b8cc6cea97 | [
"MIT"
] | null | null | null | defmodule Oli.Delivery.ActivityProvider do
alias Oli.Activities.Realizer
alias Oli.Publishing.DeliveryResolver
alias Oli.Resources.Revision
@doc """
Realizes and resolves activities.
"""
def provide(section_slug, %Revision{} = revision) do
case Realizer.realize(revision) do
[] -> []
ids -> DeliveryResolver.from_resource_id(section_slug, ids)
end
end
end
| 24.5625 | 65 | 0.720102 |
f714b6cbdc3a34d8370973aa77144b62ea1c2500 | 46,353 | ex | Elixir | lib/enum.ex | sabiwara/aja | cde91e4263e54a11a1685a777dbffd4912fe3864 | [
"MIT"
] | 95 | 2020-10-18T09:27:46.000Z | 2022-03-29T20:03:16.000Z | lib/enum.ex | sabiwara/aja | cde91e4263e54a11a1685a777dbffd4912fe3864 | [
"MIT"
] | 1 | 2021-09-22T20:30:08.000Z | 2021-10-13T23:55:34.000Z | lib/enum.ex | sabiwara/aja | cde91e4263e54a11a1685a777dbffd4912fe3864 | [
"MIT"
] | 1 | 2020-12-15T12:36:16.000Z | 2020-12-15T12:36:16.000Z | defmodule Aja.Enum do
@moduledoc """
Drop-in replacement for the `Enum` module, optimized to work with Aja's data structures such as `Aja.Vector`.
It currently only covers a subset of `Enum`, but `Aja.Enum` aims to completely mirror the API of `Enum`,
and should behave exactly the same for any type of `Enumerable`.
The only expected difference should be a significant increase in performance for Aja structures.
## Rationale
Structures such as `Aja.Vector` or `Aja.OrdMap` are implementing the `Enumerable` protocol, which means they can be
used directly with the `Enum` module. The `Enumerable` protocol however comes with its overhead and is strongly
limited in terms of performance.
On the other hand, `Aja.Enum` provides hand-crafted highly-optimized functions that fully take advantage of
immutable vectors. The speedup can easily reach more than a factor 10 compared to `Enum` used on non-list
structures, and sometimes even be noticeably faster than `Enum` used over lists.
One of the main reasons to adopt a specific data structure is the performance.
Using vectors with `Enum` would defeat the purpose, hence the introduction of `Aja.Enum`.
iex> vector = Aja.Vector.new(1..10000)
iex> Enum.sum(vector) # slow
50005000
iex> Aja.Enum.sum(vector) # same result, much faster
50005000
"""
require Aja.Vector.Raw, as: RawVector
alias Aja.EnumHelper, as: H
@compile :inline_list_funcs
@dialyzer :no_opaque
@type index :: integer
@type value :: any
@type t(value) :: Aja.Vector.t(value) | [value] | Enumerable.t()
@empty_vector RawVector.empty()
# TODO optimize ranges (sum, random...)
@doc """
Converts `enumerable` to a list.
Mirrors `Enum.to_list/1` with higher performance for Aja structures.
"""
@spec to_list(t(val)) :: [val] when val: value
defdelegate to_list(enumerable), to: H
@doc """
Returns the size of the `enumerable`.
Mirrors `Enum.count/1` with higher performance for Aja structures.
"""
@spec count(t(any)) :: non_neg_integer
def count(enumerable) do
case enumerable do
list when is_list(list) -> length(list)
%Aja.Vector{__vector__: vector} -> RawVector.size(vector)
%Aja.OrdMap{__ord_map__: map} -> map_size(map)
%MapSet{} -> MapSet.size(enumerable)
start..stop -> abs(start - stop) + 1
_ -> Enum.count(enumerable)
end
end
@doc """
Returns the count of elements in the `enumerable` for which `fun` returns
a truthy value.
Mirrors `Enum.count/2` with higher performance for Aja structures.
"""
@spec count(t(val), (val -> as_boolean(term))) :: non_neg_integer when val: value
def count(enumerable, fun) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
Enum.count(enumerable, fun)
list when is_list(list) ->
count_list(list, fun, 0)
vector ->
RawVector.count(vector, fun)
end
end
defp count_list([], _fun, acc), do: acc
defp count_list([head | tail], fun, acc) do
new_acc =
if fun.(head) do
acc + 1
else
acc
end
count_list(tail, fun, new_acc)
end
@doc """
Returns `true` if `enumerable` is empty, otherwise `false`.
Mirrors `Enum.empty?/1` with higher performance for Aja structures.
"""
@spec empty?(t(any)) :: boolean
def empty?(enumerable) do
case enumerable do
list when is_list(list) -> list == []
%Aja.Vector{__vector__: vector} -> vector === @empty_vector
%Aja.OrdMap{__ord_map__: map} -> map == %{}
%MapSet{} -> MapSet.size(enumerable) == 0
%Range{} -> false
_ -> Enum.empty?(enumerable)
end
end
# Note: Could not optimize it noticeably for vectors
@doc """
Checks if `element` exists within the `enumerable`.
Just an alias for `Enum.member?/2`, does not improve performance.
"""
@spec member?(t(val), val) :: boolean when val: value
defdelegate member?(enumerable, value), to: Enum
# TODO optimize for vector
@doc """
Returns a subset list of the given `enumerable` by `index_range`.
Mirrors `Enum.slice/2` with higher performance for Aja structures.
"""
@spec slice(t(val), Range.t()) :: [val] when val: value
defdelegate slice(enumerable, index_range), to: Enum
@doc """
Returns a subset list of the given `enumerable`, from `start_index` (zero-based)
with `amount` number of elements if available.
Mirrors `Enum.slice/3`.
"""
@spec slice(t(val), index, non_neg_integer) :: [val] when val: value
defdelegate slice(enumerable, start_index, amount), to: Enum
@doc """
Inserts the given `enumerable` into a `collectable`.
Mirrors `Enum.into/2` with higher performance for Aja structures.
"""
@spec into(t(val), Collectable.t()) :: Collectable.t() when val: value
def into(enumerable, collectable)
def into(enumerable, %Aja.Vector{} = vector) do
# TODO improve when this is the empty vector/ord_map
Aja.Vector.concat(vector, enumerable)
end
def into(enumerable, %Aja.OrdMap{} = ord_map) do
Aja.OrdMap.merge_list(ord_map, H.to_list(enumerable))
end
def into(enumerable, collectable) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> enumerable
list when is_list(list) -> list
vector -> RawVector.to_list(vector)
end
|> Enum.into(collectable)
end
@doc """
Inserts the given `enumerable` into a `collectable` according to the `transform` function.
Mirrors `Enum.into/3` with higher performance for Aja structures.
"""
def into(enumerable, collectable, transform)
def into(enumerable, %Aja.Vector{} = vector, transform) do
# TODO we can probably improve this with the builder
Aja.Vector.concat(vector, H.map(enumerable, transform))
end
def into(enumerable, %Aja.OrdMap{} = ord_map, transform) do
Aja.OrdMap.merge_list(ord_map, H.map(enumerable, transform))
end
def into(enumerable, collectable, transform) when is_function(transform, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> enumerable
list when is_list(list) -> list
vector -> RawVector.to_list(vector)
end
|> Enum.into(collectable, transform)
end
@doc """
Given an enumerable of enumerables, concatenates the `enumerables` into
a single list.
Mirrors `Enum.concat/1` with higher performance for Aja structures.
"""
@spec concat(t(t(val))) :: t(val) when val: value
def concat(enumerables) do
case H.try_get_raw_vec_or_list(enumerables) do
nil -> Enum.reverse(enumerables) |> concat_wrap([])
list when is_list(list) -> :lists.reverse(list) |> concat_wrap([])
vector -> RawVector.foldr(vector, [], &concat/2)
end
end
defp concat_wrap(_reversed = [], acc), do: acc
defp concat_wrap([head | tail], acc) do
concat_wrap(tail, concat(head, acc))
end
@doc """
Concatenates the enumerable on the `right` with the enumerable on the `left`.
Mirrors `Enum.concat/2` with higher performance for Aja structures.
"""
@spec concat(t(val), t(val)) :: t(val) when val: value
def concat(left, right)
def concat(left, right) when is_list(left) and is_list(right) do
left ++ right
end
def concat(left, right) do
case H.try_get_raw_vec_or_list(left) do
nil -> Enum.concat(left, right)
list when is_list(list) -> list ++ to_list(right)
vector -> RawVector.to_list(vector, to_list(right))
end
end
@doc """
Finds the element at the given `index` (zero-based).
Mirrors `Enum.at/3` with higher performance for Aja structures.
"""
@spec at(t(val), integer, default) :: val | default when val: value, default: any
def at(enumerable, index, default \\ nil) when is_integer(index) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
Enum.at(enumerable, index, default)
list when is_list(list) ->
Enum.at(list, index, default)
vector ->
size = RawVector.size(vector)
case RawVector.actual_index(index, size) do
nil -> default
actual_index -> RawVector.fetch_positive!(vector, actual_index)
end
end
end
@doc """
Finds the element at the given `index` (zero-based).
Mirrors `Enum.fetch/2` with higher performance for Aja structures.
"""
@spec fetch(t(val), integer) :: {:ok, val} | :error when val: value
def fetch(enumerable, index) when is_integer(index) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
Enum.fetch(enumerable, index)
list when is_list(list) ->
Enum.fetch(list, index)
vector ->
size = RawVector.size(vector)
case RawVector.actual_index(index, size) do
nil -> :error
actual_index -> {:ok, RawVector.fetch_positive!(vector, actual_index)}
end
end
end
@doc """
Finds the element at the given `index` (zero-based).
Mirrors `Enum.fetch!/2` with higher performance for Aja structures.
"""
@spec fetch!(t(val), integer) :: val when val: value
def fetch!(enumerable, index) when is_integer(index) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
Enum.fetch!(enumerable, index)
list when is_list(list) ->
Enum.fetch!(list, index)
vector ->
size = RawVector.size(vector)
case RawVector.actual_index(index, size) do
nil -> raise Enum.OutOfBoundsError
actual_index -> RawVector.fetch_positive!(vector, actual_index)
end
end
end
@doc """
Returns a list of elements in `enumerable` in reverse order.
Mirrors `Enum.reverse/1` with higher performance for Aja structures.
"""
@spec reverse(t(val)) :: [val] when val: value
def reverse(enumerable) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.reverse(enumerable)
list when is_list(list) -> :lists.reverse(list)
vector -> RawVector.reverse_to_list(vector, [])
end
end
@doc """
Reverses the elements in `enumerable`, concatenates the `tail`,
and returns it as a list.
Mirrors `Enum.reverse/2` with higher performance for Aja structures.
"""
@spec reverse(t(val), t(val)) :: [val] when val: value
def reverse(enumerable, tail) do
tail = H.to_list(tail)
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.reverse(enumerable, tail)
list when is_list(list) -> :lists.reverse(list, tail)
vector -> RawVector.reverse_to_list(vector, tail)
end
end
@doc """
Returns a list where each element is the result of invoking
`fun` on each corresponding element of `enumerable`.
Mirrors `Enum.map/2` with higher performance for Aja structures.
"""
@spec map(t(v1), (v1 -> v2)) :: [v2] when v1: value, v2: value
defdelegate map(enumerable, fun), to: H
@doc """
Filters the `enumerable`, i.e. returns only those elements
for which `fun` returns a truthy value.
Mirrors `Enum.filter/2` with higher performance for Aja structures.
"""
@spec filter(t(val), (val -> as_boolean(term))) :: [val] when val: value
def filter(enumerable, fun) when is_function(fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.filter(enumerable, fun)
list when is_list(list) -> filter_list(list, fun, [])
vector -> RawVector.filter_to_list(vector, fun)
end
end
defp filter_list([], _fun, acc), do: :lists.reverse(acc)
defp filter_list([head | tail], fun, acc) do
acc =
if fun.(head) do
[head | acc]
else
acc
end
filter_list(tail, fun, acc)
end
@doc """
Returns a list of elements in `enumerable` excluding those for which the function `fun` returns
a truthy value.
Mirrors `Enum.reject/2` with higher performance for Aja structures.
"""
@spec reject(t(val), (val -> as_boolean(term))) :: [val] when val: value
def reject(enumerable, fun) when is_function(fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.reject(enumerable, fun)
list when is_list(list) -> Enum.reject(list, fun)
vector -> RawVector.reject_to_list(vector, fun)
end
end
@doc """
Splits the `enumerable` in two lists according to the given function `fun`.
Mirrors `Enum.split_with/2` with higher performance for Aja structures.
"""
@spec split_with(t(val), (val -> as_boolean(term))) :: {[val], [val]} when val: value
def split_with(enumerable, fun) when is_function(fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.split_with(enumerable, fun)
list when is_list(list) -> Enum.split_with(list, fun)
vector -> vector |> RawVector.to_list() |> Enum.split_with(fun)
end
end
@doc """
Invokes `fun` for each element in the `enumerable` with the
accumulator.
Mirrors `Enum.reduce/2` with higher performance for Aja structures.
"""
@spec reduce(t(val), (val, val -> val)) :: val when val: value
def reduce(enumerable, fun) when is_function(fun, 2) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.reduce(enumerable, fun)
list when is_list(list) -> Enum.reduce(list, fun)
vector -> RawVector.reduce(vector, fun)
end
end
@doc """
Invokes `fun` for each element in the `enumerable` with the accumulator.
Mirrors `Enum.reduce/3` with higher performance for Aja structures.
"""
@spec reduce(t(val), acc, (val, acc -> acc)) :: acc when val: value, acc: term
def reduce(enumerable, acc, fun) when is_function(fun, 2) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.reduce(enumerable, acc, fun)
list when is_list(list) -> :lists.foldl(fun, acc, list)
vector -> RawVector.foldl(vector, acc, fun)
end
end
# FINDS
@doc """
Returns `true` if at least one element in `enumerable` is truthy.
When an element has a truthy value (neither `false` nor `nil`) iteration stops
immediately and `true` is returned. In all other cases `false` is returned.
## Examples
iex> Aja.Enum.any?([false, false, false])
false
iex> Aja.Enum.any?([false, true, false])
true
iex> Aja.Enum.any?([])
false
"""
@spec any?(t(as_boolean(val))) :: boolean when val: value
def any?(enumerable) do
case enumerable do
%Aja.Vector{__vector__: vector} -> RawVector.any?(vector)
_ -> Enum.any?(enumerable)
end
end
@doc """
Returns `true` if `fun.(element)` is truthy for at least one element in `enumerable`.
Iterates over the `enumerable` and invokes `fun` on each element. When an invocation
of `fun` returns a truthy value (neither `false` nor `nil`) iteration stops
immediately and `true` is returned. In all other cases `false` is returned.
## Examples
iex> Aja.Enum.any?([2, 4, 6], fn x -> rem(x, 2) == 1 end)
false
iex> Aja.Enum.any?([2, 3, 4], fn x -> rem(x, 2) == 1 end)
true
iex> Aja.Enum.any?([], fn x -> x > 0 end)
false
"""
# TODO When only support Elixir 1.12
# @doc copy_doc_for.(:any?, 2)
@spec any?(t(val), (val -> as_boolean(term))) :: boolean when val: value
def any?(enumerable, fun) when is_function(fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.any?(enumerable, fun)
list when is_list(list) -> Enum.any?(list, fun)
vector -> RawVector.any?(vector, fun)
end
end
@doc """
Returns `true` if all elements in `enumerable` are truthy.
When an element has a falsy value (`false` or `nil`) iteration stops immediately
and `false` is returned. In all other cases `true` is returned.
## Examples
iex> Aja.Enum.all?([1, 2, 3])
true
iex> Aja.Enum.all?([1, nil, 3])
false
iex> Aja.Enum.all?([])
true
"""
@spec all?(t(as_boolean(val))) :: boolean when val: value
def all?(enumerable) do
case enumerable do
%Aja.Vector{__vector__: vector} -> RawVector.all?(vector)
_ -> Enum.all?(enumerable)
end
end
@doc """
Returns `true` if `fun.(element)` is truthy for all elements in `enumerable`.
Iterates over `enumerable` and invokes `fun` on each element. If `fun` ever
returns a falsy value (`false` or `nil`), iteration stops immediately and
`false` is returned. Otherwise, `true` is returned.
## Examples
iex> Aja.Enum.all?([2, 4, 6], fn x -> rem(x, 2) == 0 end)
true
iex> Aja.Enum.all?([2, 3, 4], fn x -> rem(x, 2) == 0 end)
false
iex> Aja.Enum.all?([], fn _ -> nil end)
true
"""
# TODO When only support Elixir 1.12
# @doc copy_doc_for.(:all?, 2)
@spec all?(t(val), (val -> as_boolean(term))) :: boolean when val: value
def all?(enumerable, fun) when is_function(fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.all?(enumerable, fun)
list when is_list(list) -> Enum.all?(list, fun)
vector -> RawVector.all?(vector, fun)
end
end
@doc """
Returns the first element for which `fun` returns a truthy value.
If no such element is found, returns `default`.
Mirrors `Enum.find/3` with higher performance for Aja structures.
"""
@spec find(t(val), default, (val -> as_boolean(term))) :: val | default
when val: value, default: value
def find(enumerable, default \\ nil, fun) when is_function(fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.find(enumerable, default, fun)
list when is_list(list) -> Enum.find(list, default, fun)
vector -> RawVector.find(vector, default, fun)
end
end
@doc """
Similar to `find/3`, but returns the value of the function
invocation instead of the element itself.
Mirrors `Enum.find_value/3` with higher performance for Aja structures.
"""
@spec find_value(t(val), default, (val -> new_val)) :: new_val | default
when val: value, new_val: value, default: value
def find_value(enumerable, default \\ nil, fun) when is_function(fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.find_value(enumerable, default, fun)
list when is_list(list) -> Enum.find_value(list, default, fun)
vector -> RawVector.find_value(vector, fun) || default
end
end
@doc """
Similar to `find/3`, but returns the index (zero-based)
of the element instead of the element itself.
Mirrors `Enum.find_index/2` with higher performance for Aja structures.
"""
@spec find_index(t(val), (val -> as_boolean(term))) :: non_neg_integer | nil when val: value
def find_index(enumerable, fun) when is_function(fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.find_index(enumerable, fun)
list when is_list(list) -> Enum.find_index(list, fun)
vector -> RawVector.find_index(vector, fun)
end
end
## FOLDS
@doc """
Returns the sum of all elements.
Mirrors `Enum.sum/1` with higher performance for Aja structures.
"""
@spec sum(t(num)) :: num when num: number
def sum(enumerable) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
Enum.sum(enumerable)
list when is_list(list) ->
:lists.sum(list)
vector ->
RawVector.sum(vector)
end
end
@doc """
Returns the product of all elements in the `enumerable`.
Mirrors Enum.product/1 from Elixir 1.12.
Raises `ArithmeticError` if `enumerable` contains a non-numeric value.
## Examples
iex> 1..5 |> Aja.Enum.product()
120
iex> [] |> Aja.Enum.product()
1
"""
@spec product(t(num)) :: num when num: number
def product(enumerable) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
# TODO use Enum.product/1 for Elixir 1.11
reduce(enumerable, 1, &*/2)
list when is_list(list) ->
product_list(list, 1)
vector ->
RawVector.product(vector)
end
end
defp product_list([], acc), do: acc
defp product_list([head | rest], acc) do
product_list(rest, head * acc)
end
@doc """
Joins the given `enumerable` into a string using `joiner` as a
separator.
Mirrors `Enum.join/2` with higher performance for Aja structures.
"""
@spec join(t(val), String.t()) :: String.t() when val: String.Chars.t()
def join(enumerable, joiner \\ "") when is_binary(joiner) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
Enum.join(enumerable, joiner)
list when is_list(list) ->
Enum.join(list, joiner)
vector ->
# TODO add join_as_iodata
RawVector.join_as_iodata(vector, joiner) |> IO.iodata_to_binary()
end
end
@doc """
Maps and joins the given `enumerable` in one pass.
Mirrors `Enum.map_join/3` with higher performance for Aja structures.
"""
@spec map_join(t(val), String.t(), (val -> String.Chars.t())) :: String.t()
when val: value
def map_join(enumerable, joiner \\ "", mapper)
when is_binary(joiner) and is_function(mapper, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
Enum.map_join(enumerable, joiner, mapper)
list when is_list(list) ->
Enum.map_join(list, joiner, mapper)
# TODO do this in one pass
vector ->
vector
|> RawVector.map(mapper)
|> RawVector.join_as_iodata(joiner)
|> IO.iodata_to_binary()
end
end
@doc """
Intersperses `separator` between each element of the given `enumerable`.
Mirrors `Enum.intersperse/2` with higher performance for Aja structures.
"""
@spec intersperse(t(val), separator) :: [val | separator] when val: value, separator: value
def intersperse(enumerable, separator) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.intersperse(enumerable, separator)
list when is_list(list) -> Enum.intersperse(list, separator)
vector -> RawVector.intersperse_to_list(vector, separator)
end
end
@doc """
Maps and intersperses the given `enumerable` in one pass.
Mirrors `Enum.map_intersperse/3` with higher performance for Aja structures.
"""
@spec map_intersperse(t(val), separator, (val -> mapped_val)) :: [mapped_val | separator]
when val: value, separator: value, mapped_val: value
def map_intersperse(enumerable, separator, mapper)
when is_function(mapper, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.map_intersperse(enumerable, separator, mapper)
list when is_list(list) -> Enum.map_intersperse(list, separator, mapper)
vector -> RawVector.map_intersperse_to_list(vector, separator, mapper)
end
end
@doc """
Maps the given `fun` over `enumerable` and flattens the result.
Mirrors `Enum.flat_map/2` with higher performance for Aja structures.
"""
@spec flat_map(t(val), (val -> t(mapped_val))) :: [mapped_val]
when val: value, mapped_val: value
defdelegate flat_map(enumerable, fun), to: H
@doc """
Returns a map with keys as unique elements of `enumerable` and values
as the count of every element.
Mirrors `Enum.frequencies/1` with higher performance for Aja structures.
"""
@spec frequencies(t(val)) :: %{optional(val) => non_neg_integer} when val: value
def frequencies(enumerable) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.frequencies(enumerable)
list when is_list(list) -> Enum.frequencies(list)
vector -> RawVector.frequencies(vector)
end
end
@doc """
Returns a map with keys as unique elements given by `key_fun` and values
as the count of every element.
Mirrors `Enum.frequencies_by/2` with higher performance for Aja structures.
"""
@spec frequencies_by(t(val), (val -> key)) :: %{optional(key) => non_neg_integer}
when val: value, key: any
def frequencies_by(enumerable, key_fun) when is_function(key_fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.frequencies_by(enumerable, key_fun)
list when is_list(list) -> Enum.frequencies_by(list, key_fun)
vector -> RawVector.frequencies_by(vector, key_fun)
end
end
@doc """
Splits the `enumerable` into groups based on `key_fun`.
Mirrors `Enum.group_by/3` with higher performance for Aja structures.
"""
@spec group_by(t(val), (val -> key), (val -> mapped_val)) :: %{optional(key) => [mapped_val]}
when val: value, key: any, mapped_val: any
def group_by(enumerable, key_fun, value_fun \\ fn x -> x end)
when is_function(key_fun, 1) and is_function(value_fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.group_by(enumerable, key_fun, value_fun)
list when is_list(list) -> Enum.group_by(list, key_fun, value_fun)
vector -> RawVector.group_by(vector, key_fun, value_fun)
end
end
@doc """
Invokes the given `fun` for each element in the `enumerable`.
Mirrors `Enum.each/2` with higher performance for Aja structures.
"""
@spec each(t(val), (val -> term)) :: :ok when val: value
def each(enumerable, fun) when is_function(fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.each(enumerable, fun)
list when is_list(list) -> :lists.foreach(fun, list)
vector -> RawVector.each(vector, fun)
end
end
## RANDOM
@doc """
Returns a random element of an `enumerable`.
Mirrors `Enum.random/1` with higher performance for Aja structures.
"""
@spec random(t(val)) :: val when val: value
def random(enumerable) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.random(enumerable)
list when is_list(list) -> Enum.random(list)
vector -> RawVector.random(vector)
end
end
@doc """
Takes `count` random elements from `enumerable`.
Mirrors `Enum.take_random/2` with higher performance for Aja structures.
"""
@spec take_random(t(val), non_neg_integer) :: [val] when val: value
def take_random(enumerable, count)
def take_random(_enumerable, 0), do: []
# TODO: optimize 1 for non-empty vectors
def take_random(enumerable, count) do
enumerable
|> H.to_list()
|> Enum.take_random(count)
end
@doc """
Returns a list with the elements of `enumerable` shuffled.
Mirrors `Enum.shuffle/1` with higher performance for Aja structures.
"""
@spec shuffle(t(val)) :: [val] when val: value
def shuffle(enumerable) do
enumerable
|> H.to_list()
|> Enum.shuffle()
end
# UNIQ
@doc """
Enumerates the `enumerable`, returning a list where all consecutive
duplicated elements are collapsed to a single element.
Mirrors `Enum.dedup/1` with higher performance for Aja structures.
"""
@spec dedup(t(val)) :: [val] when val: value
def dedup(enumerable)
def dedup(%MapSet{} = set) do
MapSet.to_list(set)
end
def dedup(enumerable) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.dedup(enumerable)
list when is_list(list) -> dedup_list(list)
vector -> RawVector.dedup_list(vector)
end
end
@doc """
Enumerates the `enumerable`, returning a list where all consecutive
duplicated elements are collapsed to a single element.
Mirrors `Enum.dedup_by/2` with higher performance for Aja structures.
"""
@spec dedup_by(t(val), (val -> term)) :: [val] when val: value
def dedup_by(enumerable, fun) when is_function(fun, 1) do
enumerable
|> H.to_list()
|> Enum.dedup_by(fun)
end
@doc """
Enumerates the `enumerable`, removing all duplicated elements.
Mirrors `Enum.uniq/1` with higher performance for Aja structures.
"""
@spec uniq(t(val)) :: [val] when val: value
def uniq(enumerable)
def uniq(%MapSet{} = set) do
MapSet.to_list(set)
end
def uniq(enumerable) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.uniq(enumerable)
list when is_list(list) -> Enum.uniq(list)
vector -> RawVector.uniq_list(vector)
end
end
@doc """
Enumerates the `enumerable`, by removing the elements for which
function `fun` returned duplicate elements.
Mirrors `Enum.uniq_by/2` with higher performance for Aja structures.
"""
@spec uniq_by(t(val), (val -> term)) :: [val] when val: value
def uniq_by(enumerable, fun) when is_function(fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.uniq_by(enumerable, fun)
list when is_list(list) -> Enum.uniq_by(list, fun)
vector -> RawVector.uniq_by_list(vector, fun)
end
end
# ## MIN-MAX
defguardp is_list_or_struct(enumerable)
when is_list(enumerable) or :erlang.map_get(:__struct__, enumerable) |> is_atom()
defguardp is_empty_list_or_vec(list_or_vec)
when list_or_vec === [] or list_or_vec === @empty_vector
@doc false
def min(enumerable) when is_list_or_struct(enumerable) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.min(enumerable)
empty when is_empty_list_or_vec(empty) -> raise Enum.EmptyError
list when is_list(list) -> :lists.min(list)
vector -> RawVector.min(vector)
end
end
@doc false
@spec min(t(val), (() -> empty_result)) :: val | empty_result when val: value, empty_result: any
def min(enumerable, empty_fallback) when is_function(empty_fallback, 0) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.min(enumerable, empty_fallback)
empty when is_empty_list_or_vec(empty) -> empty_fallback.()
list when is_list(list) -> :lists.min(list)
vector -> RawVector.min(vector)
end
end
@doc """
Returns the minimal element in the `enumerable` according
to Erlang's term ordering.
Mirrors `Enum.min/3` with higher performance for Aja structures.
"""
@spec min(t(val), (val, val -> boolean) | module, (() -> empty_result)) :: val | empty_result
when val: value, empty_result: any
def min(enumerable, sorter \\ &<=/2, empty_fallback \\ fn -> raise Enum.EmptyError end)
when is_function(empty_fallback, 0) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.min(enumerable, sorter, empty_fallback)
@empty_vector -> empty_fallback.()
list when is_list(list) -> Enum.min(list, sorter, empty_fallback)
vector -> RawVector.custom_min_max(vector, min_sort_fun(sorter))
end
end
@doc false
def max(enumerable) when is_list_or_struct(enumerable) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.max(enumerable)
empty when is_empty_list_or_vec(empty) -> raise Enum.EmptyError
list when is_list(list) -> :lists.max(list)
vector -> RawVector.max(vector)
end
end
@doc false
@spec max(t(val), (() -> empty_result)) :: val | empty_result when val: value, empty_result: any
def max(enumerable, empty_fallback) when is_function(empty_fallback, 0) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.max(enumerable, empty_fallback)
empty when is_empty_list_or_vec(empty) -> empty_fallback.()
list when is_list(list) -> :lists.max(list)
vector -> RawVector.max(vector)
end
end
@doc """
Returns the maximal element in the `enumerable` according
to Erlang's term ordering.
Mirrors `Enum.max/3` with higher performance for Aja structures.
"""
@spec max(t(val), (val, val -> boolean) | module, (() -> empty_result)) :: val | empty_result
when val: value, empty_result: any
def max(enumerable, sorter \\ &>=/2, empty_fallback \\ fn -> raise Enum.EmptyError end)
when is_function(empty_fallback, 0) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.max(enumerable, sorter, empty_fallback)
@empty_vector -> empty_fallback.()
list when is_list(list) -> Enum.max(list, sorter, empty_fallback)
vector -> RawVector.custom_min_max(vector, max_sort_fun(sorter))
end
end
@doc false
def min_by(enumerable, fun, empty_fallback)
when is_function(fun, 1) and is_function(empty_fallback, 0) do
min_by(enumerable, fun, &<=/2, empty_fallback)
end
@doc """
Returns the minimal element in the `enumerable` as calculated
by the given `fun`.
Mirrors `Enum.min_by/4` with higher performance for Aja structures.
"""
@spec min_by(t(val), (val -> key), (key, key -> boolean) | module, (() -> empty_result)) ::
val | empty_result
when val: value, key: term, empty_result: any
def min_by(enumerable, fun, sorter \\ &<=/2, empty_fallback \\ fn -> raise Enum.EmptyError end)
when is_function(fun, 1) and is_function(empty_fallback, 0) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.min_by(enumerable, fun, sorter, empty_fallback)
list when is_list(list) -> Enum.min_by(list, fun, sorter, empty_fallback)
@empty_vector -> empty_fallback.()
vector -> RawVector.custom_min_max_by(vector, fun, min_sort_fun(sorter))
end
end
@doc false
def max_by(enumerable, fun, empty_fallback)
when is_function(fun, 1) and is_function(empty_fallback, 0) do
max_by(enumerable, fun, &>=/2, empty_fallback)
end
@doc """
Returns the maximal element in the `enumerable` as calculated
by the given `fun`.
Mirrors `Enum.max_by/4` with higher performance for Aja structures.
"""
@spec max_by(t(val), (val -> key), (key, key -> boolean) | module, (() -> empty_result)) ::
val | empty_result
when val: value, key: term, empty_result: any
def max_by(enumerable, fun, sorter \\ &>=/2, empty_fallback \\ fn -> raise Enum.EmptyError end)
when is_function(fun, 1) and is_function(empty_fallback, 0) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.max_by(enumerable, fun, sorter, empty_fallback)
list when is_list(list) -> Enum.max_by(list, fun, sorter, empty_fallback)
@empty_vector -> empty_fallback.()
vector -> RawVector.custom_min_max_by(vector, fun, max_sort_fun(sorter))
end
end
defp max_sort_fun(sorter) when is_function(sorter, 2), do: sorter
defp max_sort_fun(module) when is_atom(module), do: &(module.compare(&1, &2) != :lt)
defp min_sort_fun(sorter) when is_function(sorter, 2), do: sorter
defp min_sort_fun(module) when is_atom(module), do: &(module.compare(&1, &2) != :gt)
## MAP-REDUCE
@doc ~S"""
Returns a list with with each element of `enumerable` wrapped in a tuple alongside its index.
Mirrors `Enum.with_index/2` (Elixir 1.12 version): may receive a function or an integer offset.
If an integer `offset` is given, it will index from the given `offset` instead of from zero.
If a `function` is given, it will index by invoking the function for each
element and index (zero-based) of the `enumerable`.
## Examples
iex> Aja.Enum.with_index([:a, :b, :c])
[a: 0, b: 1, c: 2]
iex> Aja.Enum.with_index([:a, :b, :c], 3)
[a: 3, b: 4, c: 5]
iex> Aja.Enum.with_index([:a, :b, :c], fn element, index -> {index, element} end)
[{0, :a}, {1, :b}, {2, :c}]
"""
@spec with_index(t(val), index) :: [{val, index}] when val: value
@spec with_index(t(val), (val, index -> mapped_val)) :: [mapped_val]
when val: value, mapped_val: value
def with_index(enumerable, offset_or_fun \\ 0)
def with_index(enumerable, offset) when is_integer(offset) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.with_index(enumerable, offset)
list when is_list(list) -> with_index_list_offset(list, offset, [])
vector -> RawVector.with_index(vector, offset) |> RawVector.to_list()
end
end
def with_index(enumerable, fun) when is_function(fun, 2) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
enumerable
|> Enum.map_reduce(0, fn x, i -> {fun.(x, i), i + 1} end)
|> elem(0)
list when is_list(list) ->
with_index_list_fun(list, 0, fun, [])
vector ->
RawVector.with_index(vector, 0, fun) |> RawVector.to_list()
end
end
defp with_index_list_offset([], _offset, acc), do: :lists.reverse(acc)
defp with_index_list_offset([head | tail], offset, acc) do
with_index_list_offset(tail, offset + 1, [{head, offset} | acc])
end
defp with_index_list_fun([], _offset, _fun, acc), do: :lists.reverse(acc)
defp with_index_list_fun([head | tail], offset, fun, acc) do
with_index_list_fun(tail, offset + 1, fun, [fun.(head, offset) | acc])
end
@doc """
Invokes the given function to each element in the `enumerable` to reduce
it to a single element, while keeping an accumulator.
Mirrors `Enum.map_reduce/3` with higher performance for Aja structures.
"""
@spec map_reduce(t(val), acc, (val, acc -> {mapped_val, acc})) :: {t(mapped_val), acc}
when val: value, mapped_val: value, acc: any
def map_reduce(enumerable, acc, fun) when is_function(fun, 2) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
Enum.map_reduce(enumerable, acc, fun)
list when is_list(list) ->
:lists.mapfoldl(fun, acc, list)
vector ->
{new_vector, new_acc} = RawVector.map_reduce(vector, acc, fun)
{RawVector.to_list(new_vector), new_acc}
end
end
@doc """
Applies the given function to each element in the `enumerable`,
storing the result in a list and passing it as the accumulator
for the next computation. Uses the first element in the `enumerable`
as the starting value.
Mirrors `Enum.scan/2` with higher performance for Aja structures.
"""
@spec scan(t(val), (val, val -> val)) :: val when val: value
def scan(enumerable, fun) when is_function(fun, 2) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.scan(enumerable, fun)
list when is_list(list) -> Enum.scan(list, fun)
vector -> RawVector.scan(vector, fun) |> RawVector.to_list()
end
end
@doc """
Applies the given function to each element in the `enumerable`,
storing the result in a list and passing it as the accumulator
for the next computation. Uses the given `acc` as the starting value.
Mirrors `Enum.scan/3` with higher performance for Aja structures.
"""
@spec scan(t(val), acc, (val, acc -> acc)) :: acc when val: value, acc: term
def scan(enumerable, acc, fun) when is_function(fun, 2) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.scan(enumerable, acc, fun)
list when is_list(list) -> Enum.scan(list, acc, fun)
vector -> RawVector.scan(vector, acc, fun) |> RawVector.to_list()
end
end
## SLICING
@doc """
Takes an `amount` of elements from the beginning or the end of the `enumerable`.
Mirrors `Enum.take/2` with higher performance for Aja structures.
"""
@spec take(t(val), integer) :: [val] when val: value
def take(enumerable, amount) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.take(enumerable, amount)
list when is_list(list) -> Enum.take(list, amount)
vector -> do_take_vector(vector, amount)
end
end
defp do_take_vector(_vector, 0), do: []
defp do_take_vector(vector, amount) when amount > 0 do
size = RawVector.size(vector)
if amount < size do
RawVector.slice(vector, 0, amount - 1)
else
RawVector.to_list(vector)
end
end
defp do_take_vector(vector, amount) do
size = RawVector.size(vector)
start = amount + size
if start > 0 do
RawVector.slice(vector, start, size - 1)
else
RawVector.to_list(vector)
end
end
@doc """
Drops the `amount` of elements from the `enumerable`.
Mirrors `Enum.drop/2` with higher performance for Aja structures.
"""
@spec drop(t(val), integer) :: [val] when val: value
def drop(enumerable, amount) do
case H.try_get_raw_vec_or_list(enumerable) do
nil -> Enum.drop(enumerable, amount)
list when is_list(list) -> Enum.drop(list, amount)
vector -> do_drop_vector(vector, amount)
end
end
defp do_drop_vector(vector, 0), do: RawVector.to_list(vector)
defp do_drop_vector(vector, amount) when amount > 0 do
size = RawVector.size(vector)
if amount < size do
RawVector.slice(vector, amount, size - 1)
else
[]
end
end
defp do_drop_vector(vector, amount) do
size = RawVector.size(vector)
last = amount + size
if last > 0 do
RawVector.slice(vector, 0, last - 1)
else
[]
end
end
@doc """
Splits the `enumerable` into two enumerables, leaving `count` elements in the first one.
Mirrors `Enum.split/2` with higher performance for Aja structures.
"""
@spec split(t(val), integer) :: {[val], [val]} when val: value
def split(enumerable, amount) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
Enum.split(enumerable, amount)
list when is_list(list) ->
Enum.split(list, amount)
vector ->
if amount >= 0 do
{do_take_vector(vector, amount), do_drop_vector(vector, amount)}
else
{do_drop_vector(vector, amount), do_take_vector(vector, amount)}
end
end
end
@doc """
Takes the elements from the beginning of the `enumerable` while `fun` returns a truthy value.
Mirrors `Enum.take_while/2` with higher performance for Aja structures.
"""
@spec take_while(t(val), (val -> as_boolean(term()))) :: [val] when val: value
def take_while(enumerable, fun) when is_function(fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
Enum.take_while(enumerable, fun)
list when is_list(list) ->
Enum.take_while(list, fun)
vector ->
case RawVector.find_falsy_index(vector, fun) do
nil -> RawVector.to_list(vector)
index -> do_take_vector(vector, index)
end
end
end
@doc """
Drops elements at the beginning of the `enumerable` while `fun` returns a truthy value.
Mirrors `Enum.drop_while/2` with higher performance for Aja structures.
"""
@spec drop_while(t(val), (val -> as_boolean(term()))) :: [val] when val: value
def drop_while(enumerable, fun) when is_function(fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
Enum.drop_while(enumerable, fun)
list when is_list(list) ->
Enum.drop_while(list, fun)
vector ->
case RawVector.find_falsy_index(vector, fun) do
nil -> []
index -> do_drop_vector(vector, index)
end
end
end
@doc """
Splits `enumerable` in two at the position of the element for which `fun` returns a falsy value
(`false` or `nil`) for the first time.
Mirrors `Enum.split_while/2` with higher performance for Aja structures.
"""
@spec split_while(t(val), (val -> as_boolean(term()))) :: {[val], [val]} when val: value
def split_while(enumerable, fun) when is_function(fun, 1) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
Enum.split_while(enumerable, fun)
list when is_list(list) ->
Enum.split_while(list, fun)
vector ->
case RawVector.find_falsy_index(vector, fun) do
nil -> {RawVector.to_list(vector), []}
index -> {do_take_vector(vector, index), do_drop_vector(vector, index)}
end
end
end
## SORT
@doc """
Sorts the `enumerable` according to Erlang's term ordering.
Mirrors `Enum.sort/1` with higher performance for Aja structures.
"""
@spec sort(t(val)) :: [val] when val: value
def sort(enumerable) do
enumerable
|> H.to_list()
|> Enum.sort()
end
@doc """
Sorts the `enumerable` by the given function.
Mirrors `Enum.sort/2` with higher performance for Aja structures.
"""
@spec sort(
t(val),
(val, val -> boolean)
| :asc
| :desc
| module
| {:asc | :desc, module}
) :: [val]
when val: value
def sort(enumerable, fun) do
enumerable
|> H.to_list()
|> Enum.sort(fun)
end
@doc """
Sorts the mapped results of the `enumerable` according to the provided `sorter`
function.
Mirrors `Enum.sort_by/3` with higher performance for Aja structures.
"""
@spec sort_by(
t(val),
(val -> mapped_val),
(val, val -> boolean)
| :asc
| :desc
| module
| {:asc | :desc, module}
) :: [val]
when val: value, mapped_val: value
def sort_by(enumerable, mapper, sorter \\ &<=/2) do
enumerable
|> H.to_list()
|> Enum.sort_by(mapper, sorter)
end
@doc """
Zips corresponding elements from two enumerables into one list of tuples.
Mirrors `Enum.zip/2` with higher performance for Aja structures.
"""
@spec zip(t(val1), t(val2)) :: list({val1, val2}) when val1: value, val2: value
def zip(enumerable1, enumerable2) do
case {H.try_get_raw_vec_or_list(enumerable1), H.try_get_raw_vec_or_list(enumerable2)} do
{vector1, vector2} when is_tuple(vector1) and is_tuple(vector2) ->
RawVector.zip(vector1, vector2) |> RawVector.to_list()
{list1, list2} when is_list(list1) and is_list(list2) ->
zip_lists(list1, list2, [])
{result1, result2} ->
list_or_enum1 = zip_try_get_list(result1, enumerable1)
list_or_enum2 = zip_try_get_list(result2, enumerable2)
Enum.zip(list_or_enum1, list_or_enum2)
end
end
defp zip_try_get_list(list, _enumerable) when is_list(list), do: list
defp zip_try_get_list(nil, enumerable), do: enumerable
defp zip_try_get_list(vector, _enumerable), do: RawVector.to_list(vector)
defp zip_lists(list1, list2, acc) when list1 == [] or list2 == [] do
:lists.reverse(acc)
end
defp zip_lists([head1 | tail1], [head2 | tail2], acc) do
zip_lists(tail1, tail2, [{head1, head2} | acc])
end
@doc """
Opposite of `zip/2`. Extracts two-element tuples from the given `enumerable`
and groups them together.
Mirrors `Enum.unzip/1` with higher performance for Aja structures.
"""
@spec unzip(t({val1, val2})) :: {list(val1), list(val2)} when val1: value, val2: value
def unzip(enumerable) do
case H.try_get_raw_vec_or_list(enumerable) do
nil ->
Enum.unzip(enumerable)
list when is_list(list) ->
Enum.unzip(list)
vector ->
{vector1, vector2} = RawVector.unzip(vector)
{RawVector.to_list(vector1), RawVector.to_list(vector2)}
end
end
# Private functions
defp dedup_list([]), do: []
defp dedup_list([elem, elem | rest]), do: dedup_list([elem | rest])
defp dedup_list([elem | rest]), do: [elem | dedup_list(rest)]
end
| 31.74863 | 117 | 0.662999 |
f714c0bab323e652b7f9375314d74dec88f7a81a | 3,519 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v3/model/google_cloud_dialogflow_cx_v3_synthesize_speech_config.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dialogflow/lib/google_api/dialogflow/v3/model/google_cloud_dialogflow_cx_v3_synthesize_speech_config.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dialogflow/lib/google_api/dialogflow/v3/model/google_cloud_dialogflow_cx_v3_synthesize_speech_config.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3SynthesizeSpeechConfig do
@moduledoc """
Configuration of how speech should be synthesized.
## Attributes
* `effectsProfileId` (*type:* `list(String.t)`, *default:* `nil`) - Optional. An identifier which selects 'audio effects' profiles that are applied on (post synthesized) text to speech. Effects are applied on top of each other in the order they are given.
* `pitch` (*type:* `float()`, *default:* `nil`) - Optional. Speaking pitch, in the range [-20.0, 20.0]. 20 means increase 20 semitones from the original pitch. -20 means decrease 20 semitones from the original pitch.
* `speakingRate` (*type:* `float()`, *default:* `nil`) - Optional. Speaking rate/speed, in the range [0.25, 4.0]. 1.0 is the normal native speed supported by the specific voice. 2.0 is twice as fast, and 0.5 is half as fast. If unset(0.0), defaults to the native 1.0 speed. Any other values < 0.25 or > 4.0 will return an error.
* `voice` (*type:* `GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3VoiceSelectionParams.t`, *default:* `nil`) - Optional. The desired voice of the synthesized audio.
* `volumeGainDb` (*type:* `float()`, *default:* `nil`) - Optional. Volume gain (in dB) of the normal native volume supported by the specific voice, in the range [-96.0, 16.0]. If unset, or set to a value of 0.0 (dB), will play at normal native signal amplitude. A value of -6.0 (dB) will play at approximately half the amplitude of the normal native signal amplitude. A value of +6.0 (dB) will play at approximately twice the amplitude of the normal native signal amplitude. We strongly recommend not to exceed +10 (dB) as there's usually no effective increase in loudness for any value greater than that.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:effectsProfileId => list(String.t()) | nil,
:pitch => float() | nil,
:speakingRate => float() | nil,
:voice =>
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3VoiceSelectionParams.t() | nil,
:volumeGainDb => float() | nil
}
field(:effectsProfileId, type: :list)
field(:pitch)
field(:speakingRate)
field(:voice, as: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3VoiceSelectionParams)
field(:volumeGainDb)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3SynthesizeSpeechConfig do
def decode(value, options) do
GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3SynthesizeSpeechConfig.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V3.Model.GoogleCloudDialogflowCxV3SynthesizeSpeechConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 54.138462 | 609 | 0.730605 |
f714d4414a63bb099152baaa2865acf485405f31 | 935 | exs | Elixir | test/api/kommander_api_test.exs | JediLuke/flamelex | b38d1171b8f93375d8dc59f1710442860b6c8580 | [
"Apache-2.0"
] | 10 | 2021-03-02T20:05:13.000Z | 2022-03-14T21:10:39.000Z | test/api/kommander_api_test.exs | JediLuke/franklin | 8eb77a342547de3eb43d28dcf9f835ff443ad489 | [
"Apache-2.0"
] | 2 | 2021-12-14T18:29:44.000Z | 2021-12-23T20:38:27.000Z | test/api/kommander_api_test.exs | JediLuke/franklin | 8eb77a342547de3eb43d28dcf9f835ff443ad489 | [
"Apache-2.0"
] | 2 | 2021-12-05T20:41:26.000Z | 2021-12-26T01:46:42.000Z | defmodule Test.Flamelex.API.Kommander do
use ExUnit.Case
# here's my rough script
# - show the command buffer
# - enter some text
# - assert that the contents is equal to what we would expect
# - execute that text, and asswert correct side effects happened
# - assert that after execute, we have changed mode, and the Kommander isn't visible
# - open kommander, use it to open a buffer
# - assert buffer is open, kommander isn't visible
# - open kommander, assert kommander is visible
# - close the buffer, assert kommander isn't visible
# - open the buffer, insert some text, assert the contents are correct
# - clear the buffer, then assert the contents are empty again, the cursor has reset, but the buffer is still visible
# - test out deactivate, which does clear and hides
# - test backspace when inputting text
#TODO vim test - test the keybinding <space>k, calls Kommander.open()
end
| 29.21875 | 119 | 0.724064 |
f715200af20de872a9f3a402246ed0b5d65a1ca0 | 2,823 | ex | Elixir | lib/magritte.ex | hauleth/magritte | 95fbcd02bd61255a54085e371735ce6a1377c46e | [
"MIT"
] | 18 | 2020-08-10T20:28:08.000Z | 2021-11-04T21:55:00.000Z | lib/magritte.ex | hauleth/magritte | 95fbcd02bd61255a54085e371735ce6a1377c46e | [
"MIT"
] | 1 | 2020-08-11T14:39:06.000Z | 2021-05-07T08:01:45.000Z | lib/magritte.ex | hauleth/magritte | 95fbcd02bd61255a54085e371735ce6a1377c46e | [
"MIT"
] | 1 | 2021-01-13T23:25:42.000Z | 2021-01-13T23:25:42.000Z | defmodule Magritte do
@moduledoc """
Alternative pipe operator definition.
## Usage
Just add `use Magritte` to the top of your module and then
follow the documentation for `Margitte.|>/2` below.
"""
defmacro __using__(_) do
quote do
import Kernel, except: [|>: 2]
import unquote(__MODULE__), only: [|>: 2]
end
end
@doc """
Enhanced pipe operator.
This operator introduces the expression on the left-hand side as an argument
to the function call on the right-hand side. The `...` placeholder operator
determines the position of the given argument on the right-hand side.
If the `...` operator is not present, the first position is used by default.
## Examples
```elixir
iex> [1, [2], 3] |> List.flatten()
[1, 2, 3]
```
The example above is the same as calling `List.flatten([1, [2], 3])`.
Using `...` you can pick the position where the result of the left side will
be inserted:
```elixir
iex> 2 |> Integer.to_string(10, ...)
"1010"
```
The example above is the same as calling `Integer.to_string(10, 2)`.
You can also join these into longer chains:
```elixir
iex> 2 |> Integer.to_string(10, ...) |> Integer.parse
{1010, ""}
```
The operator `...` can be used only once in the pipeline, otherwise
it will return compile-time error:
```elixir
2 |> Integer.to_string(..., ...)
** (ArgumentError) Repeated placeholder in Integer.to_string(..., ...)
```
"""
defmacro left |> right do
[{h, _} | t] = unpipe({:|>, [], [left, right]})
fun = fn {x, pos}, acc ->
Macro.pipe(acc, x, pos)
end
:lists.foldl(fun, h, t)
end
defp unpipe(ast), do: :lists.reverse(unpipe(ast, []))
defp unpipe({:|>, _, [left, right]}, acc) do
unpipe(right, unpipe(left, acc))
end
defp unpipe(ast, acc) do
case find_pos(ast) do
{:ok, new_ast, pos} ->
[{new_ast, pos} | acc]
{:error, {:already_found, _, _}} ->
raise ArgumentError,
message: "Repeated placeholder in #{Macro.to_string(ast)}"
end
end
defguardp is_empty(a) when a == [] or not is_list(a)
defp find_pos({fun, env, args}) when not is_empty(args) do
with {:ok, found, new_args} <- locate(args, 0, nil, []),
do: {:ok, {fun, env, new_args}, found}
end
defp find_pos(ast), do: {:ok, ast, 0}
pattern = quote do: {:..., _, var!(args)}
defp locate([unquote(pattern) | rest], pos, nil, acc) when is_empty(args),
do: locate(rest, pos + 1, pos, acc)
defp locate([unquote(pattern) | _], pos, found, _acc) when is_empty(args),
do: {:error, {:already_found, found, pos}}
defp locate([arg | rest], pos, found, args),
do: locate(rest, pos + 1, found, [arg | args])
defp locate([], _, found, args),
do: {:ok, found || 0, :lists.reverse(args)}
end
| 25.432432 | 79 | 0.608218 |
f715c45e3394bfeda0630468b81276b3d0dd6f58 | 1,580 | exs | Elixir | mix.exs | florius0/witchcraft | 6c61c3ecd5b431c52e8b60aafb05596d9182205e | [
"MIT"
] | null | null | null | mix.exs | florius0/witchcraft | 6c61c3ecd5b431c52e8b60aafb05596d9182205e | [
"MIT"
] | null | null | null | mix.exs | florius0/witchcraft | 6c61c3ecd5b431c52e8b60aafb05596d9182205e | [
"MIT"
] | null | null | null | defmodule Witchcraft.Mixfile do
use Mix.Project
def project do
[
app: :witchcraft,
aliases: aliases(),
deps: deps(),
preferred_cli_env: [quality: :test],
# Versions
version: "1.0.4",
elixir: "~> 1.9",
# Docs
name: "Witchcraft",
docs: docs(),
# Hex
description: "Monads and other dark magic (monoids, functors, traversables, &c)",
package: package()
]
end
defp aliases do
[
quality: [
"test",
"credo --strict"
]
]
end
defp deps do
[
{:benchfella, "~> 0.3", only: [:dev, :test]},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:inch_ex, "~> 2.0", only: [:dev, :docs, :test], runtime: false},
{:dialyxir, "~> 1.1", only: :dev, runtime: false},
{:earmark, "~> 1.4", only: :dev, runtime: false},
{:ex_doc, "~> 0.23", only: :dev, runtime: false},
{:exceptional, "~> 2.1"},
{:operator, "~> 0.2"},
{:quark, "~> 2.2"},
{:type_class, "~> 1.2"}
]
end
defp docs do
[
extras: ["README.md"],
logo: "./brand/Icon/PNG/WC-icon-sml@2x-circle.png",
main: "readme",
source_url: "https://github.com/witchcrafters/witchcraft"
]
end
defp package do
[
licenses: ["Apache-2.0"],
links: %{"GitHub" => "https://github.com/witchcrafters/witchcraft"},
maintainers: ["Brooklyn Zelenka", "Steven Vandevelde"]
]
end
end
| 22.571429 | 87 | 0.482278 |
f715ee6f42ccc5df80166ff4db2e05731378ff33 | 1,485 | ex | Elixir | clients/plus/lib/google_api/plus/v1/model/comment_in_reply_to.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/plus/lib/google_api/plus/v1/model/comment_in_reply_to.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/plus/lib/google_api/plus/v1/model/comment_in_reply_to.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Plus.V1.Model.CommentInReplyTo do
@moduledoc """
## Attributes
- id (String.t): The ID of the activity. Defaults to: `null`.
- url (String.t): The URL of the activity. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:id => any(),
:url => any()
}
field(:id)
field(:url)
end
defimpl Poison.Decoder, for: GoogleApi.Plus.V1.Model.CommentInReplyTo do
def decode(value, options) do
GoogleApi.Plus.V1.Model.CommentInReplyTo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Plus.V1.Model.CommentInReplyTo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 29.117647 | 77 | 0.721212 |
f715f55c4f00fcb660899f1a9b69983e5e6038aa | 1,815 | ex | Elixir | lib/exenv/encryption/secrets.ex | nsweeting/exenv | 88a9863fd055aa4d99ab9bf416c0b35bc86eadac | [
"MIT"
] | 35 | 2019-03-10T05:16:16.000Z | 2021-12-05T00:12:55.000Z | lib/exenv/encryption/secrets.ex | nsweeting/exenv | 88a9863fd055aa4d99ab9bf416c0b35bc86eadac | [
"MIT"
] | 2 | 2019-03-08T17:01:50.000Z | 2019-03-14T09:20:22.000Z | lib/exenv/encryption/secrets.ex | nsweeting/exenv | 88a9863fd055aa4d99ab9bf416c0b35bc86eadac | [
"MIT"
] | null | null | null | defmodule Exenv.Encryption.Secrets do
@moduledoc false
alias Exenv.Utils
@aes_block_size 16
@secrets_file_type ".enc"
@spec encrypted_path(binary()) :: binary()
def encrypted_path(path) do
path <> @secrets_file_type
end
@spec encrypt!(binary(), binary() | mfa()) :: binary() | no_return()
def encrypt!(key, path_or_mfa) do
key = Utils.decode(key)
path = Utils.build_path(path_or_mfa)
secrets = File.read!(path)
encrypted_path = encrypted_path(path)
init_vector = :crypto.strong_rand_bytes(16)
secrets = pad(secrets, @aes_block_size)
case :crypto.block_encrypt(:aes_cbc256, key, init_vector, secrets) do
<<cipher_text::binary>> ->
init_vector = Utils.encode(init_vector)
cipher_text = Utils.encode(cipher_text)
File.write!(encrypted_path, "#{init_vector}|#{cipher_text}")
encrypted_path
_x ->
raise Exenv.Error, "encryption failed"
end
end
@spec decrypt!(binary(), binary() | mfa()) :: binary() | no_return()
def decrypt!(key, path_or_mfa) do
path_or_mfa
|> Utils.build_path()
|> File.read!()
|> String.split("|")
|> Enum.map(&String.trim/1)
|> Enum.map(&Utils.decode/1)
|> case do
[init_vector, cipher_text] ->
key = Utils.decode(key)
plain_text = :crypto.block_decrypt(:aes_cbc256, key, init_vector, cipher_text)
unpad(plain_text)
_ ->
raise Exenv.Error, "decryption failed"
end
rescue
_ -> raise Exenv.Error, "decryption failed"
end
defp pad(data, block_size) do
to_add = block_size - rem(byte_size(data), block_size)
data <> to_string(:string.chars(to_add, to_add))
end
defp unpad(data) do
to_remove = :binary.last(data)
:binary.part(data, 0, byte_size(data) - to_remove)
end
end
| 26.691176 | 86 | 0.649036 |
f715f64c1457d170ce783100ac54722ce69d66bb | 192 | ex | Elixir | merkle-tree/elixir/lib/app/application.ex | zemuldo/data_structures | 414400d64679bffd719968b43e7ee8d69aca275f | [
"MIT"
] | null | null | null | merkle-tree/elixir/lib/app/application.ex | zemuldo/data_structures | 414400d64679bffd719968b43e7ee8d69aca275f | [
"MIT"
] | null | null | null | merkle-tree/elixir/lib/app/application.ex | zemuldo/data_structures | 414400d64679bffd719968b43e7ee8d69aca275f | [
"MIT"
] | null | null | null | defmodule App.Application do
use Application
def start(_type, _args) do
children = [{App.TransactionsStore, []}]
Supervisor.start_link(children, strategy: :one_for_one)
end
end
| 21.333333 | 59 | 0.729167 |
f71604103477825a1e2db9fd47958a298e5afce9 | 1,156 | exs | Elixir | mix.exs | mark-b-kauffman/phoenixDSK3LO | 999d7f66515a3bf1974d25c3d7ff3b439266452c | [
"BSD-3-Clause",
"MIT"
] | null | null | null | mix.exs | mark-b-kauffman/phoenixDSK3LO | 999d7f66515a3bf1974d25c3d7ff3b439266452c | [
"BSD-3-Clause",
"MIT"
] | null | null | null | mix.exs | mark-b-kauffman/phoenixDSK3LO | 999d7f66515a3bf1974d25c3d7ff3b439266452c | [
"BSD-3-Clause",
"MIT"
] | null | null | null | defmodule PhoenixDSK3LO.Mixfile do
use Mix.Project
def project do
[app: :phoenixDSK3LO,
version: "0.0.1",
elixir: "~> 1.2",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {PhoenixDSK3LO, []},
applications: [:phoenix, :phoenix_pubsub, :phoenix_html, :cowboy, :logger, :gettext, :httpotion]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[{:phoenix, "~> 1.2.1"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_html, "~> 2.6"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:poison, "~> 2.0"},
{:httpotion, "~> 3.0.2"},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"}]
end
end
| 27.52381 | 102 | 0.601211 |
f716158875162ff97924791cdb4bcc52eb983578 | 3,864 | exs | Elixir | test/exop_data/generators/float_test.exs | madeinussr/exop_data | 71904bdd778d51b2054938f23ccdf6ee27c83a60 | [
"MIT"
] | 9 | 2019-01-30T17:48:54.000Z | 2021-01-25T14:48:21.000Z | test/exop_data/generators/float_test.exs | madeinussr/exop_data | 71904bdd778d51b2054938f23ccdf6ee27c83a60 | [
"MIT"
] | 4 | 2018-11-07T09:15:29.000Z | 2018-11-14T14:58:36.000Z | test/exop_data/generators/float_test.exs | madeinussr/exop_data | 71904bdd778d51b2054938f23ccdf6ee27c83a60 | [
"MIT"
] | 1 | 2021-01-11T13:19:57.000Z | 2021-01-11T13:19:57.000Z | defmodule ExopData.Generators.FloatTest do
use ExUnit.Case, async: true
use ExUnitProperties
import ExopData.Generators.Float, only: [generate: 1]
property "generates float generator" do
check all value <- generate(%{}) do
assert is_float(value)
end
end
describe "with :numericality option" do
property "equal_to" do
generator = generate(%{numericality: %{equal_to: 12.3}})
check all value <- generator do
assert value == 12.3
end
end
property "equals" do
generator = generate(%{numericality: %{equals: 12.3}})
check all value <- generator do
assert value == 12.3
end
end
property "is" do
generator = generate(%{numericality: %{is: 12.3}})
check all value <- generator do
assert value == 12.3
end
end
property "greater_than" do
generator = generate(%{numericality: %{greater_than: 1.0}})
check all value <- generator do
assert value > 1.0
end
end
property "gt" do
generator = generate(%{numericality: %{gt: 1.0}})
check all value <- generator do
assert value > 1.0
end
end
property "greater_than_or_equal_to" do
generator = generate(%{numericality: %{greater_than_or_equal_to: 1.0}})
check all value <- generator do
assert value >= 1.0
end
end
property "gte" do
generator = generate(%{numericality: %{gte: 1.0}})
check all value <- generator do
assert value >= 1.0
end
end
property "min" do
generator = generate(%{numericality: %{min: 1.0}})
check all value <- generator do
assert value >= 1.0
end
end
property "less_than" do
generator = generate(%{numericality: %{less_than: 1.0}})
check all value <- generator do
assert value < 1.0
end
end
property "lt" do
generator = generate(%{numericality: %{lt: 1.0}})
check all value <- generator do
assert value < 1.0
end
end
property "less_than_or_equal_to" do
generator = generate(%{numericality: %{less_than_or_equal_to: 1.0}})
check all value <- generator do
assert value <= 1.0
end
end
property "lte" do
generator = generate(%{numericality: %{lte: 1.0}})
check all value <- generator do
assert value <= 1.0
end
end
property "max" do
generator = generate(%{numericality: %{max: 1.0}})
check all value <- generator do
assert value <= 1.0
end
end
property "equal_to & greater_than" do
generator = generate(%{numericality: %{equal_to: 12.3, greater_than: 1.0}})
check all value <- generator do
assert value == 12.3
end
end
property "greater_than & less_than" do
generator = generate(%{numericality: %{greater_than: 1.0, less_than: 3.0}})
check all value <- generator do
assert value > 1.0
assert value < 3.0
end
end
property "greater_than_or_equal_to & less_than" do
generator = generate(%{numericality: %{greater_than_or_equal_to: 1.0, less_than: 3.0}})
check all value <- generator do
assert value >= 1.0
assert value < 3.0
end
end
property "greater_than_or_equal_to & less_than_or_equal_to" do
generator =
generate(%{numericality: %{greater_than_or_equal_to: 1.0, less_than_or_equal_to: 3.0}})
check all value <- generator do
assert value >= 1.0
assert value <= 3.0
end
end
property "greater_than & less_than_or_equal_to" do
generator = generate(%{numericality: %{greater_than: 1.0, less_than_or_equal_to: 3.0}})
check all value <- generator do
assert value > 1.0
assert value <= 3.0
end
end
end
end
| 23.560976 | 95 | 0.600932 |
f71625c40993b2705a569848c1bd925fd7bc25cc | 884 | ex | Elixir | lib/alambic/waitable.ex | sdanzan/alambic | fdde3d1568e24dac95453337c7e50c12d2cce348 | [
"Apache-2.0"
] | null | null | null | lib/alambic/waitable.ex | sdanzan/alambic | fdde3d1568e24dac95453337c7e50c12d2cce348 | [
"Apache-2.0"
] | null | null | null | lib/alambic/waitable.ex | sdanzan/alambic | fdde3d1568e24dac95453337c7e50c12d2cce348 | [
"Apache-2.0"
] | 1 | 2018-04-19T03:25:31.000Z | 2018-04-19T03:25:31.000Z | # Copyright 2015 Serge Danzanvilliers <serge.danzanvilliers@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defprotocol Alambic.Waitable do
@moduledoc ~S"""
A generic interface for "waitable" objects.
"""
@vsn 1
@doc "Wait for the resource to be available"
def wait(waited)
@doc "Check if the resource is free (wait would not block)"
def free?(waited)
end
| 31.571429 | 74 | 0.745475 |
f71634f143609ae3e7c288bb14e62239aba47f01 | 3,169 | exs | Elixir | test/lifx_client_test.exs | axelson/lifx | 9ac02474d181001efc6bc08d7d39a6f6e3bb0d2a | [
"Apache-2.0"
] | null | null | null | test/lifx_client_test.exs | axelson/lifx | 9ac02474d181001efc6bc08d7d39a6f6e3bb0d2a | [
"Apache-2.0"
] | null | null | null | test/lifx_client_test.exs | axelson/lifx | 9ac02474d181001efc6bc08d7d39a6f6e3bb0d2a | [
"Apache-2.0"
] | null | null | null | defmodule LifxTest do
use ExUnit.Case, async: false
use Lifx.Protocol.Types
require Logger
doctest Lifx
import Mox
setup :set_mox_global
setup :verify_on_exit!
alias Lifx.Protocol
alias Lifx.Protocol.{FrameHeader, FrameAddress, ProtocolHeader}
alias Lifx.Protocol.{Packet}
alias Lifx.Device
@discovery_packet %Packet{
frame_header: %FrameHeader{
addressable: 1,
origin: 0,
protocol: 1024,
size: 36,
source: 4_102_800_990,
tagged: 1
},
frame_address: %FrameAddress{
ack_required: 0,
res_required: 1,
sequence: 0,
target: :all
},
protocol_header: %ProtocolHeader{
type: 2
},
payload: %{}
}
@discovery_response_packet %Packet{
frame_header: %FrameHeader{
addressable: 1,
origin: 0,
protocol: 1024,
size: 36,
source: 4_102_800_990,
tagged: 1
},
frame_address: %FrameAddress{
ack_required: 0,
res_required: 1,
sequence: 0,
target: :"99"
},
protocol_header: %ProtocolHeader{
type: 3
},
payload: %{}
}
test "discovery packet creation" do
data = "240000345EC68BF400000000000000000000000000000100000000000000000002000000"
{:ok, bin} = Base.decode16(data, case: :upper)
assert Protocol.create_packet(@discovery_packet) == bin
end
test "discovery packet parsing" do
data = "240000345EC68BF400000000000000000000000000000100000000000000000002000000"
{:ok, bin} = Base.decode16(data, case: :upper)
assert Protocol.parse_packet(bin) == @discovery_packet
end
test "Send Discovery" do
pid = self()
Mox.expect(Lifx.UdpMock, :open, 1, fn _port, _options -> {:ok, nil} end)
Mox.expect(Lifx.UdpMock, :send, 1, fn _socket, _host, _port, payload ->
packet = Protocol.parse_packet(payload)
cond do
packet.protocol_header.type != 2 -> nil
true -> send(pid, :sent_packet)
end
end)
start_supervised!(Lifx.Supervisor, start: {Lifx.Supervisor, :start_link, []})
assert_receive(:sent_packet)
end
test "Receive Discovery" do
pid = self()
Mox.expect(Lifx.UdpMock, :open, 1, fn _port, _options ->
send(pid, {:pid, self()})
{:ok, nil}
end)
Mox.expect(Lifx.UdpMock, :send, 1, fn _socket, _host, _port, _payload -> nil end)
start_supervised!(Lifx.Supervisor, start: {Lifx.Supervisor, :start_link, []})
client_pid =
receive do
{:pid, client_pid} -> client_pid
end
payload = <<
1::little-integer-size(8),
1234::little-integer-size(32)
>>
Lifx.Client.add_handler(Lifx.Handler)
fake_response = Protocol.create_packet(@discovery_response_packet, payload)
send(client_pid, {:udp, nil, "1.2.3.4", nil, fake_response})
assert_receive({:updated, %Device{}})
devices = Lifx.Client.devices()
assert Enum.count(devices) > 0
Mox.expect(Lifx.UdpMock, :send, 2, fn _socket, _host, _port, _payload -> nil end)
Mox.expect(Lifx.UdpMock, :send, 1, fn _socket, _host, _port, _payload ->
send(pid, :sent_3rd_retry)
end)
assert_receive(:sent_3rd_retry, 5000)
end
end
| 24.952756 | 85 | 0.64563 |
f7164892521f250144f7d1a44ef00547068936fa | 1,850 | ex | Elixir | clients/real_time_bidding/lib/google_api/real_time_bidding/v1/model/list_buyers_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/real_time_bidding/lib/google_api/real_time_bidding/v1/model/list_buyers_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/real_time_bidding/lib/google_api/real_time_bidding/v1/model/list_buyers_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.RealTimeBidding.V1.Model.ListBuyersResponse do
@moduledoc """
A response containing buyer account information.
## Attributes
* `buyers` (*type:* `list(GoogleApi.RealTimeBidding.V1.Model.Buyer.t)`, *default:* `nil`) - List of buyers.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - A token which can be passed to a subsequent call to the `ListBuyers` method to retrieve the next page of results in ListBuyersRequest.pageToken.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:buyers => list(GoogleApi.RealTimeBidding.V1.Model.Buyer.t()) | nil,
:nextPageToken => String.t() | nil
}
field(:buyers, as: GoogleApi.RealTimeBidding.V1.Model.Buyer, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.RealTimeBidding.V1.Model.ListBuyersResponse do
def decode(value, options) do
GoogleApi.RealTimeBidding.V1.Model.ListBuyersResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.RealTimeBidding.V1.Model.ListBuyersResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37 | 207 | 0.74 |
f7164ff914232cd0818082c9144073d6007be799 | 6,259 | exs | Elixir | test/utility_test.exs | buurzx/talib | 9341dafd2dc0aa14c34b659b82375723ff5a37fc | [
"MIT"
] | null | null | null | test/utility_test.exs | buurzx/talib | 9341dafd2dc0aa14c34b659b82375723ff5a37fc | [
"MIT"
] | null | null | null | test/utility_test.exs | buurzx/talib | 9341dafd2dc0aa14c34b659b82375723ff5a37fc | [
"MIT"
] | 1 | 2021-04-29T22:14:28.000Z | 2021-04-29T22:14:28.000Z | defmodule Talib.UtilityTest do
use ExUnit.Case
alias Talib.Utility
doctest Talib.Utility
defmodule Fixtures do
def numbers do
[
89,
77,
53,
64,
78,
67,
30,
6,
24,
53,
46,
30,
100,
48,
34,
69,
40,
44,
66,
89
]
end
def numbers_change do
[
0,
-12,
-24,
11,
14,
-11,
-37,
-24,
18,
29,
-7,
-16,
70,
-52,
-14,
35,
-29,
4,
22,
23
]
end
def numbers_gain do
[
0,
0,
0,
11,
14,
0,
0,
0,
18,
29,
0,
0,
70,
0,
0,
35,
0,
4,
22,
23
]
end
def numbers_loss do
[
0,
12,
24,
0,
0,
11,
37,
24,
0,
0,
7,
16,
0,
52,
14,
0,
29,
0,
0,
0
]
end
def numbers_high, do: 100
def numbers_low, do: 6
def numbers_occur do
%{
89 => 2,
77 => 1,
53 => 2,
64 => 1,
78 => 1,
67 => 1,
30 => 2,
6 => 1,
24 => 1,
46 => 1,
100 => 1,
48 => 1,
34 => 1,
69 => 1,
40 => 1,
44 => 1,
66 => 1
}
end
end
test "change/2" do
assert Utility.change(Fixtures.numbers()) === {:ok, Fixtures.numbers_change()}
assert Utility.change([nil, 3, nil]) === {:ok, [0, 0, 0]}
assert Utility.change([nil, 3]) === {:ok, [0, 0]}
assert Utility.change([nil]) === {:ok, [0]}
assert Utility.change([3]) === {:ok, [0]}
assert Utility.change([]) === {:error, :no_data}
end
test "gain/1" do
assert Utility.gain(Fixtures.numbers()) === {:ok, Fixtures.numbers_gain()}
assert Utility.gain([nil, 3, nil]) === {:ok, [0, 0, 0]}
assert Utility.gain([nil, 3]) === {:ok, [0, 0]}
assert Utility.gain([nil]) === {:ok, [0]}
assert Utility.gain([3]) === {:ok, [0]}
assert Utility.gain([]) === {:error, :no_data}
end
test "high/1" do
assert Utility.high(Fixtures.numbers()) === {:ok, Fixtures.numbers_high()}
assert Utility.high([nil, 3, nil]) === {:ok, 3}
assert Utility.high([nil, 3]) === {:ok, 3}
assert Utility.high([nil]) === {:ok, nil}
assert Utility.high([3]) === {:ok, 3}
assert Utility.high([]) === {:error, :no_data}
end
test "loss/1" do
assert Utility.loss(Fixtures.numbers()) === {:ok, Fixtures.numbers_loss()}
assert Utility.loss([nil, 3, nil]) === {:ok, [0, 0, 0]}
assert Utility.loss([nil, 3]) === {:ok, [0, 0]}
assert Utility.loss([nil]) === {:ok, [0]}
assert Utility.loss([3]) === {:ok, [0]}
assert Utility.loss([]) === {:error, :no_data}
end
test "low/1" do
assert Utility.low(Fixtures.numbers()) === {:ok, Fixtures.numbers_low()}
assert Utility.low([nil, 3, nil]) === {:ok, 3}
assert Utility.low([nil, 3]) === {:ok, 3}
assert Utility.low([nil]) === {:ok, nil}
assert Utility.low([3]) === {:ok, 3}
assert Utility.low([]) === {:error, :no_data}
end
test "occur/1" do
assert Utility.occur(Fixtures.numbers()) === {:ok, Fixtures.numbers_occur()}
assert Utility.occur([nil, 3, nil]) === {:ok, %{3 => 1, nil => 2}}
assert Utility.occur([3]) === {:ok, %{3 => 1}}
assert Utility.occur([]) === {:error, :no_data}
end
test "change!/2" do
assert Utility.change!(Fixtures.numbers()) === Fixtures.numbers_change()
assert Utility.change!([nil, 3, nil]) === [0, 0, 0]
assert Utility.change!([nil, 3]) === [0, 0]
assert Utility.change!([nil]) === [0]
assert Utility.change!([3]) === [0]
assert_raise NoDataError, fn -> Utility.change!([]) end
end
test "gain!/1" do
assert Utility.gain!(Fixtures.numbers()) === Fixtures.numbers_gain()
assert Utility.gain!([nil, 3, nil]) === [0, 0, 0]
assert Utility.gain!([nil, 3]) === [0, 0]
assert Utility.gain!([nil]) === [0]
assert Utility.gain!([3]) === [0]
assert_raise NoDataError, fn -> Utility.gain!([]) end
end
test "high!/1" do
assert Utility.high!(Fixtures.numbers()) === Fixtures.numbers_high()
assert Utility.high!([nil, 3, nil]) === 3
assert Utility.high!([nil, 3]) === 3
assert Utility.high!([nil]) === nil
assert Utility.high!([3]) === 3
assert_raise NoDataError, fn -> Utility.high!([]) end
end
test "loss!/1" do
assert Utility.loss!(Fixtures.numbers()) === Fixtures.numbers_loss()
assert Utility.loss!([nil, 3, nil]) === [0, 0, 0]
assert Utility.loss!([nil, 3]) === [0, 0]
assert Utility.loss!([nil]) === [0]
assert Utility.loss!([3]) === [0]
assert_raise NoDataError, fn -> Utility.loss!([]) end
end
test "low!/1" do
assert Utility.low!(Fixtures.numbers()) === Fixtures.numbers_low()
assert Utility.low!([nil, 3, nil]) === 3
assert Utility.low!([nil, 3]) === 3
assert Utility.low!([nil]) === nil
assert Utility.low!([3]) === 3
assert_raise NoDataError, fn -> Utility.low!([]) end
end
test "occur!/1" do
assert Utility.occur!(Fixtures.numbers()) === Fixtures.numbers_occur()
assert Utility.occur!([nil, 3, nil]) === %{3 => 1, nil => 2}
assert Utility.occur!([3]) === %{3 => 1}
assert_raise NoDataError, fn -> Utility.occur!([]) end
end
test "filter_nil/1" do
assert Utility.filter_nil(Fixtures.numbers()) === Fixtures.numbers()
assert Utility.filter_nil([nil, 3, nil]) === [3]
assert Utility.filter_nil([nil, 3]) === [3]
assert Utility.filter_nil([nil]) === []
assert Utility.filter_nil([3]) === [3]
end
test "to_bang_function/1" do
assert Talib.Utility.to_bang_function({:ok, [1, nil, 5]}) === [1, nil, 5]
assert_raise BadPeriodError, fn ->
Talib.Utility.to_bang_function({:error, :bad_period})
end
assert_raise NoDataError, fn ->
Talib.Utility.to_bang_function({:error, :no_data})
end
end
end
| 24.25969 | 82 | 0.494967 |
f716603748b17536e496fc5ca4a3a0bf852fa2b8 | 1,936 | ex | Elixir | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_uiv1beta3_import_documents_metadata.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_uiv1beta3_import_documents_metadata.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_uiv1beta3_import_documents_metadata.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiUiv1beta3ImportDocumentsMetadata do
@moduledoc """
Metadata of the import document operation.
## Attributes
* `commonMetadata` (*type:* `GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiUiv1beta3CommonOperationMetadata.t`, *default:* `nil`) - The basic metadata of the long running operation.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:commonMetadata =>
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiUiv1beta3CommonOperationMetadata.t()
| nil
}
field(:commonMetadata,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiUiv1beta3CommonOperationMetadata
)
end
defimpl Poison.Decoder,
for: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiUiv1beta3ImportDocumentsMetadata do
def decode(value, options) do
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiUiv1beta3ImportDocumentsMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiUiv1beta3ImportDocumentsMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.571429 | 195 | 0.770145 |
f71664825fd8ed0ab21c185aa687b44ed4089c85 | 591 | ex | Elixir | web/views/helpers/bulma.ex | marick/eecrit | 50b1ebeadc5cf21ea9f9df6add65e4d7037e2482 | [
"MIT"
] | 10 | 2016-07-15T15:57:33.000Z | 2018-06-09T00:40:46.000Z | web/views/helpers/bulma.ex | marick/eecrit | 50b1ebeadc5cf21ea9f9df6add65e4d7037e2482 | [
"MIT"
] | null | null | null | web/views/helpers/bulma.ex | marick/eecrit | 50b1ebeadc5cf21ea9f9df6add65e4d7037e2482 | [
"MIT"
] | 6 | 2016-07-15T15:57:41.000Z | 2018-03-22T16:38:00.000Z | defmodule Eecrit.Helpers.Bulma do
use Phoenix.HTML
use Eecrit.Helpers.Tags
@tab_item_class "nav-item is-tab"
def tab_item_class(on_page_signified_by_tab: true),
do: @tab_item_class <> " is-active"
def tab_item_class(_),
do: @tab_item_class
def tab_item(name, path, opts \\ []) do
link(name, to: path, class: tab_item_class(opts))
end
def tab_button(button_text, path, extra_attributes \\ []) do
link(button_text,
[form: [class: @tab_item_class],
to: path,
class: "button button-primary nav-item"]
++ extra_attributes)
end
end
| 24.625 | 63 | 0.676819 |
f7169e15ed966dd79e785ad2c0e69fa66ffa9c74 | 11,591 | ex | Elixir | clients/app_engine/lib/google_api/app_engine/v1/model/version.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/version.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/version.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AppEngine.V1.Model.Version do
@moduledoc """
A Version resource is a specific set of source code and configuration files that are deployed into a service.
## Attributes
- apiConfig (ApiConfigHandler): Serving configuration for Google Cloud Endpoints (https://cloud.google.com/appengine/docs/python/endpoints/).Only returned in GET requests if view=FULL is set. Defaults to: `null`.
- automaticScaling (AutomaticScaling): Automatic scaling is based on request rate, response latencies, and other application metrics. Defaults to: `null`.
- basicScaling (BasicScaling): A service with basic scaling will create an instance when the application receives a request. The instance will be turned down when the app becomes idle. Basic scaling is ideal for work that is intermittent or driven by user activity. Defaults to: `null`.
- betaSettings (%{optional(String.t) => String.t}): Metadata settings that are supplied to this version to enable beta runtime features. Defaults to: `null`.
- createTime (DateTime.t): Time that this version was created.@OutputOnly Defaults to: `null`.
- createdBy (String.t): Email address of the user who created this version.@OutputOnly Defaults to: `null`.
- defaultExpiration (String.t): Duration that static files should be cached by web proxies and browsers. Only applicable if the corresponding StaticFilesHandler (https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions#StaticFilesHandler) does not specify its own expiration time.Only returned in GET requests if view=FULL is set. Defaults to: `null`.
- deployment (Deployment): Code and application artifacts that make up this version.Only returned in GET requests if view=FULL is set. Defaults to: `null`.
- diskUsageBytes (String.t): Total size in bytes of all the files that are included in this version and currently hosted on the App Engine disk.@OutputOnly Defaults to: `null`.
- endpointsApiService (EndpointsApiService): Cloud Endpoints configuration.If endpoints_api_service is set, the Cloud Endpoints Extensible Service Proxy will be provided to serve the API implemented by the app. Defaults to: `null`.
- entrypoint (Entrypoint): The entrypoint for the application. Defaults to: `null`.
- env (String.t): App Engine execution environment for this version.Defaults to standard. Defaults to: `null`.
- envVariables (%{optional(String.t) => String.t}): Environment variables available to the application.Only returned in GET requests if view=FULL is set. Defaults to: `null`.
- errorHandlers ([ErrorHandler]): Custom static error pages. Limited to 10KB per page.Only returned in GET requests if view=FULL is set. Defaults to: `null`.
- handlers ([UrlMap]): An ordered list of URL-matching patterns that should be applied to incoming requests. The first matching URL handles the request and other request handlers are not attempted.Only returned in GET requests if view=FULL is set. Defaults to: `null`.
- healthCheck (HealthCheck): Configures health checking for instances. Unhealthy instances are stopped and replaced with new instances. Only applicable in the App Engine flexible environment.Only returned in GET requests if view=FULL is set. Defaults to: `null`.
- id (String.t): Relative name of the version within the service. Example: v1. Version names can contain only lowercase letters, numbers, or hyphens. Reserved names: \"default\", \"latest\", and any name with the prefix \"ah-\". Defaults to: `null`.
- inboundServices ([String.t]): Before an application can receive email or XMPP messages, the application must be configured to enable the service. Defaults to: `null`.
- Enum - one of
- instanceClass (String.t): Instance class that is used to run this version. Valid values are: AutomaticScaling: F1, F2, F4, F4_1G ManualScaling or BasicScaling: B1, B2, B4, B8, B4_1GDefaults to F1 for AutomaticScaling and B1 for ManualScaling or BasicScaling. Defaults to: `null`.
- libraries ([Library]): Configuration for third-party Python runtime libraries that are required by the application.Only returned in GET requests if view=FULL is set. Defaults to: `null`.
- livenessCheck (LivenessCheck): Configures liveness health checking for instances. Unhealthy instances are stopped and replaced with new instancesOnly returned in GET requests if view=FULL is set. Defaults to: `null`.
- manualScaling (ManualScaling): A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time. Defaults to: `null`.
- name (String.t): Full path to the Version resource in the API. Example: apps/myapp/services/default/versions/v1.@OutputOnly Defaults to: `null`.
- network (Network): Extra network settings. Only applicable in the App Engine flexible environment. Defaults to: `null`.
- nobuildFilesRegex (String.t): Files that match this pattern will not be built into this version. Only applicable for Go runtimes.Only returned in GET requests if view=FULL is set. Defaults to: `null`.
- readinessCheck (ReadinessCheck): Configures readiness health checking for instances. Unhealthy instances are not put into the backend traffic rotation.Only returned in GET requests if view=FULL is set. Defaults to: `null`.
- resources (Resources): Machine resources for this version. Only applicable in the App Engine flexible environment. Defaults to: `null`.
- runtime (String.t): Desired runtime. Example: python27. Defaults to: `null`.
- runtimeApiVersion (String.t): The version of the API in the given runtime environment. Please see the app.yaml reference for valid values at https://cloud.google.com/appengine/docs/standard/<language>/config/appref Defaults to: `null`.
- runtimeChannel (String.t): The channel of the runtime to use. Only available for some runtimes. Defaults to the default channel. Defaults to: `null`.
- runtimeMainExecutablePath (String.t): The path or name of the app's main executable. Defaults to: `null`.
- servingStatus (String.t): Current serving status of this version. Only the versions with a SERVING status create instances and can be billed.SERVING_STATUS_UNSPECIFIED is an invalid value. Defaults to SERVING. Defaults to: `null`.
- Enum - one of [SERVING_STATUS_UNSPECIFIED, SERVING, STOPPED]
- threadsafe (boolean()): Whether multiple requests can be dispatched to this version at once. Defaults to: `null`.
- versionUrl (String.t): Serving URL for this version. Example: \"https://myversion-dot-myservice-dot-myapp.appspot.com\"@OutputOnly Defaults to: `null`.
- vm (boolean()): Whether to deploy this version in a container on a virtual machine. Defaults to: `null`.
- zones ([String.t]): The Google Compute Engine zones that are supported by this version in the App Engine flexible environment. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:apiConfig => GoogleApi.AppEngine.V1.Model.ApiConfigHandler.t(),
:automaticScaling => GoogleApi.AppEngine.V1.Model.AutomaticScaling.t(),
:basicScaling => GoogleApi.AppEngine.V1.Model.BasicScaling.t(),
:betaSettings => map(),
:createTime => DateTime.t(),
:createdBy => any(),
:defaultExpiration => any(),
:deployment => GoogleApi.AppEngine.V1.Model.Deployment.t(),
:diskUsageBytes => any(),
:endpointsApiService => GoogleApi.AppEngine.V1.Model.EndpointsApiService.t(),
:entrypoint => GoogleApi.AppEngine.V1.Model.Entrypoint.t(),
:env => any(),
:envVariables => map(),
:errorHandlers => list(GoogleApi.AppEngine.V1.Model.ErrorHandler.t()),
:handlers => list(GoogleApi.AppEngine.V1.Model.UrlMap.t()),
:healthCheck => GoogleApi.AppEngine.V1.Model.HealthCheck.t(),
:id => any(),
:inboundServices => list(any()),
:instanceClass => any(),
:libraries => list(GoogleApi.AppEngine.V1.Model.Library.t()),
:livenessCheck => GoogleApi.AppEngine.V1.Model.LivenessCheck.t(),
:manualScaling => GoogleApi.AppEngine.V1.Model.ManualScaling.t(),
:name => any(),
:network => GoogleApi.AppEngine.V1.Model.Network.t(),
:nobuildFilesRegex => any(),
:readinessCheck => GoogleApi.AppEngine.V1.Model.ReadinessCheck.t(),
:resources => GoogleApi.AppEngine.V1.Model.Resources.t(),
:runtime => any(),
:runtimeApiVersion => any(),
:runtimeChannel => any(),
:runtimeMainExecutablePath => any(),
:servingStatus => any(),
:threadsafe => any(),
:versionUrl => any(),
:vm => any(),
:zones => list(any())
}
field(:apiConfig, as: GoogleApi.AppEngine.V1.Model.ApiConfigHandler)
field(:automaticScaling, as: GoogleApi.AppEngine.V1.Model.AutomaticScaling)
field(:basicScaling, as: GoogleApi.AppEngine.V1.Model.BasicScaling)
field(:betaSettings, type: :map)
field(:createTime, as: DateTime)
field(:createdBy)
field(:defaultExpiration)
field(:deployment, as: GoogleApi.AppEngine.V1.Model.Deployment)
field(:diskUsageBytes)
field(:endpointsApiService, as: GoogleApi.AppEngine.V1.Model.EndpointsApiService)
field(:entrypoint, as: GoogleApi.AppEngine.V1.Model.Entrypoint)
field(:env)
field(:envVariables, type: :map)
field(:errorHandlers, as: GoogleApi.AppEngine.V1.Model.ErrorHandler, type: :list)
field(:handlers, as: GoogleApi.AppEngine.V1.Model.UrlMap, type: :list)
field(:healthCheck, as: GoogleApi.AppEngine.V1.Model.HealthCheck)
field(:id)
field(:inboundServices, type: :list)
field(:instanceClass)
field(:libraries, as: GoogleApi.AppEngine.V1.Model.Library, type: :list)
field(:livenessCheck, as: GoogleApi.AppEngine.V1.Model.LivenessCheck)
field(:manualScaling, as: GoogleApi.AppEngine.V1.Model.ManualScaling)
field(:name)
field(:network, as: GoogleApi.AppEngine.V1.Model.Network)
field(:nobuildFilesRegex)
field(:readinessCheck, as: GoogleApi.AppEngine.V1.Model.ReadinessCheck)
field(:resources, as: GoogleApi.AppEngine.V1.Model.Resources)
field(:runtime)
field(:runtimeApiVersion)
field(:runtimeChannel)
field(:runtimeMainExecutablePath)
field(:servingStatus)
field(:threadsafe)
field(:versionUrl)
field(:vm)
field(:zones, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.AppEngine.V1.Model.Version do
def decode(value, options) do
GoogleApi.AppEngine.V1.Model.Version.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AppEngine.V1.Model.Version do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 74.780645 | 392 | 0.738504 |
f716a224ef2ebec9fc0f378b1092eb86938107e5 | 3,274 | exs | Elixir | lib/ex_unit/test/ex_unit/describe_test.exs | moogle19/elixir | 91fa0e3adad1654a4fe2c8bd218e9e6d42b92b47 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/test/ex_unit/describe_test.exs | moogle19/elixir | 91fa0e3adad1654a4fe2c8bd218e9e6d42b92b47 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/test/ex_unit/describe_test.exs | moogle19/elixir | 91fa0e3adad1654a4fe2c8bd218e9e6d42b92b47 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule ExUnit.DescribeTest do
use ExUnit.Case, async: true
@moduletag [attribute_tag: :from_module]
setup _ do
[setup_tag: :from_module]
end
describe "tags" do
@describetag attribute_tag: :from_describe
test "from describe have higher precedence", context do
assert context.attribute_tag == :from_describe
end
@tag attribute_tag: :from_test
test "from test have higher precedence", context do
assert context.attribute_tag == :from_test
end
end
describe "setup" do
setup _ do
[setup_tag: :from_describe]
end
test "from describe runs later", context do
assert context.setup_tag == :from_describe
end
end
describe "setup from import" do
import Map
setup :to_list
test "is expanded within describe block", context do
assert context.setup_tag == :from_module
end
end
describe "failures" do
test "when using setup_all inside describe" do
assert_raise RuntimeError, ~r"cannot invoke setup_all/1-2 inside describe", fn ->
defmodule Sample do
use ExUnit.Case
describe "hello" do
setup_all do
[hello: "world"]
end
end
end
end
end
test "when using describe inside describe" do
regex = ~r{cannot call "describe" inside another "describe"}
assert_raise RuntimeError, regex, fn ->
defmodule Sample do
use ExUnit.Case
describe "hello" do
describe "another" do
end
end
end
end
end
test "when using non-string describe name" do
assert_raise ArgumentError, ~r"describe name must be a string, got: :not_allowed", fn ->
defmodule Sample do
use ExUnit.Case
describe :not_allowed do
end
end
end
end
test "when using the same name for two describe blocks" do
message =
~s(describe "some tests" is already defined in ExUnit.DescribeTest.DescribeWithSameNames)
assert_raise ExUnit.DuplicateDescribeError, message, fn ->
defmodule DescribeWithSameNames do
use ExUnit.Case
describe "some tests" do
end
describe "some tests" do
end
end
end
end
end
test "when @describetag is used outside of a describe block" do
message = ~s(@describetag must be set inside describe/2 blocks)
assert_raise RuntimeError, message, fn ->
defmodule DescribetagOutsideOfDescribeBlock do
use ExUnit.Case
@describetag :integration
describe "some tests" do
end
end
end
end
describe "test names" do
test "merge describe information", context do
assert context.test == :"test test names merge describe information"
end
end
test "attributes from outside describe", context do
assert context.attribute_tag == :from_module
assert context.setup_tag == :from_module
assert context.test == :"test attributes from outside describe"
end
describe "describe block" do
test "sets describe_line", context do
assert context.describe_line == __ENV__.line - 2
end
end
end
| 24.073529 | 97 | 0.642944 |
f716d029681665fccee1d97fc52f1aa063611025 | 2,568 | exs | Elixir | config/prod.exs | peillis/siwapp | f8c11ad2660574395d636674aa449c959f0f87f1 | [
"MIT"
] | 4 | 2015-02-12T09:23:47.000Z | 2022-03-09T18:11:06.000Z | config/prod.exs | peillis/siwapp | f8c11ad2660574395d636674aa449c959f0f87f1 | [
"MIT"
] | 254 | 2021-12-09T14:40:41.000Z | 2022-03-31T08:09:37.000Z | config/prod.exs | peillis/siwapp | f8c11ad2660574395d636674aa449c959f0f87f1 | [
"MIT"
] | 1 | 2022-03-07T10:25:49.000Z | 2022-03-07T10:25:49.000Z | import Config
config :siwapp,
env: :prod
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :siwapp, SiwappWeb.Endpoint,
url: [scheme: "https", host: System.get_env("APP_HOST"), port: 443],
force_ssl: [rewrite_on: [:x_forwarded_proto]],
cache_static_manifest: "priv/static/cache_manifest.json"
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :siwapp, Siwapp.Repo,
ssl: true,
# socket_options: [:inet6],
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10"),
database: "siwapp_prod",
show_sensitive_data_on_connection_error: true
# Do not print debug messages in production
config :logger, level: :info
config :sentry,
dsn: System.get_env("SENTRY_DSN"),
environment_name: :prod,
enable_source_code_context: true,
root_source_code_path: File.cwd!(),
tags: %{
env: "production"
},
included_environments: [:prod]
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :siwapp, SiwappWeb.Endpoint,
# ...,
# url: [host: "example.com", port: 443],
# https: [
# ...,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :siwapp, SiwappWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
| 31.703704 | 70 | 0.70366 |
f716e0d4a6af23a59483856a86a0ba89bb3674b0 | 326 | ex | Elixir | lib/logz/jiffy_codec.ex | rciorba/logz | a31250884b703a5e69e40691b075ad56ab0c4fc1 | [
"Unlicense"
] | null | null | null | lib/logz/jiffy_codec.ex | rciorba/logz | a31250884b703a5e69e40691b075ad56ab0c4fc1 | [
"Unlicense"
] | null | null | null | lib/logz/jiffy_codec.ex | rciorba/logz | a31250884b703a5e69e40691b075ad56ab0c4fc1 | [
"Unlicense"
] | null | null | null | defmodule Logz.JiffyCodec do
@behaviour Elastix.JSON.Codec
def encode!(data) do
try do
:jiffy.encode(data)
catch
err, value ->
IO.inspect({err, value})
IO.inspect(data)
throw({err, value})
end
end
def decode(json, opts \\ []), do: {:ok, :jiffy.decode(json, opts)}
end
| 19.176471 | 68 | 0.582822 |