hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
73f7b508b8e6a84e8ce6b235a182c83e44b59b3d | 2,055 | ex | Elixir | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p3beta1_entity.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p3beta1_entity.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p3beta1_entity.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1Entity do
@moduledoc """
Detected entity from video analysis.
## Attributes
- description (String.t): Textual description, e.g. `Fixed-gear bicycle`. Defaults to: `null`.
- entityId (String.t): Opaque entity ID. Some IDs may be available in [Google Knowledge Graph Search API](https://developers.google.com/knowledge-graph/). Defaults to: `null`.
- languageCode (String.t): Language code for `description` in BCP-47 format. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => any(),
:entityId => any(),
:languageCode => any()
}
field(:description)
field(:entityId)
field(:languageCode)
end
defimpl Poison.Decoder,
for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1Entity do
def decode(value, options) do
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1Entity.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1Entity do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.830508 | 177 | 0.741119 |
73f7bd52fe16420e62d215fc406e5f62ecd2cfcf | 4,540 | ex | Elixir | lib/jsonrpc2.ex | InoMurko/jsonrpc2-elixir | 3edb92061231ddc9edc1a69531f7248d3d00d8f3 | [
"Apache-2.0"
] | null | null | null | lib/jsonrpc2.ex | InoMurko/jsonrpc2-elixir | 3edb92061231ddc9edc1a69531f7248d3d00d8f3 | [
"Apache-2.0"
] | null | null | null | lib/jsonrpc2.ex | InoMurko/jsonrpc2-elixir | 3edb92061231ddc9edc1a69531f7248d3d00d8f3 | [
"Apache-2.0"
] | null | null | null | defmodule JSONRPC2 do
@moduledoc ~S"""
`JSONRPC2` is an Elixir library for JSON-RPC 2.0.
It includes request and response utility modules, a transport-agnostic server handler, a
line-based TCP server and client, which are based on [Ranch](https://github.com/ninenines/ranch)
and [shackle](https://github.com/lpgauth/shackle), respectively, and a JSON-in-the-body HTTP(S)
server and client, based on [Plug](https://github.com/elixir-lang/plug) and
[hackney](https://github.com/benoitc/hackney), respectively.
## TCP Example
# Define a handler
defmodule Handler do
use JSONRPC2.Server.Handler
def handle_request("hello", [name]) do
"Hello, #{name}!"
end
def handle_request("hello2", %{"name" => name}) do
"Hello again, #{name}!"
end
def handle_request("subtract", [minuend, subtrahend]) do
minuend - subtrahend
end
def handle_request("notify", [name]) do
IO.puts "You have been notified, #{name}!"
end
end
# Start the server (this will usually go in your OTP application's start/2)
JSONRPC2.Servers.TCP.start_listener(Handler, 8000)
# Define the client
defmodule Client do
alias JSONRPC2.Clients.TCP
def start(host, port) do
TCP.start(host, port, __MODULE__)
end
def hello(name) do
TCP.call(__MODULE__, "hello", [name])
end
def hello2(args) do
TCP.call(__MODULE__, "hello2", Map.new(args))
end
def subtract(minuend, subtrahend) do
TCP.cast(__MODULE__, "subtract", [minuend, subtrahend])
end
def notify(name) do
TCP.notify(__MODULE__, "notify", [name])
end
end
# Start the client pool (this will also usually go in your OTP application's start/2)
Client.start("localhost", 8000)
# Make a call with the client to the server
IO.inspect Client.hello("Elixir")
#=> {:ok, "Hello, Elixir!"}
# Make a call with the client to the server, using named args
IO.inspect Client.hello2(name: "Elixir")
#=> {:ok, "Hello again, Elixir!"}
# Make a call with the client to the server asynchronously
{:ok, request_id} = Client.subtract(2, 1)
IO.puts "non-blocking!"
#=> non-blocking!
IO.inspect JSONRPC2.Clients.TCP.receive_response(request_id)
#=> {:ok, 1}
# Notifications
Client.notify("Elixir")
#=> You have been notified, Elixir!
## HTTP Example
# Define a handler
defmodule Handler do
use JSONRPC2.Server.Handler
def handle_request("hello", [name]) do
"Hello, #{name}!"
end
def handle_request("hello2", %{"name" => name}) do
"Hello again, #{name}!"
end
def handle_request("notify", [name]) do
IO.puts "You have been notified, #{name}!"
end
end
# Start the server (this will usually go in your OTP application's start/2)
JSONRPC2.Servers.HTTP.http(Handler)
# Define the client
defmodule Client do
alias JSONRPC2.Clients.HTTP
@url "http://localhost:4000/"
def hello(name) do
HTTP.call(@url, "hello", [name])
end
def hello2(args) do
HTTP.call(@url, "hello2", Map.new(args))
end
def notify(name) do
HTTP.notify(@url, "notify", [name])
end
end
# Make a call with the client to the server
IO.inspect Client.hello("Elixir")
#=> {:ok, "Hello, Elixir!"}
# Make a call with the client to the server, using named args
IO.inspect Client.hello2(name: "Elixir")
#=> {:ok, "Hello again, Elixir!"}
# Notifications
Client.notify("Elixir")
#=> You have been notified, Elixir!
## Serializers
Any module which conforms to the same API as Poison's `Poison.encode/1` and `Poison.decode/1` can
be provided as a serializer to the functions which accept them.
"""
@typedoc "A JSON-RPC 2.0 method."
@type method :: String.t()
@typedoc "A decoded JSON object."
@type json ::
nil
| true
| false
| float
| integer
| String.t()
| [json]
| %{optional(String.t()) => json}
@typedoc "A JSON-RPC 2.0 params value."
@type params :: [json] | %{optional(String.t()) => json}
@typedoc "A JSON-RPC 2.0 request ID."
@type id :: String.t() | number
end
| 27.852761 | 99 | 0.590088 |
73f7e3ca51ec77829024f6e10ff77f1d033f4e03 | 1,320 | ex | Elixir | lib/ecto/query/lock_builder.ex | scrogson/ecto | fdcf807901683c73c1117d89751e0d5a7a318154 | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/lock_builder.ex | scrogson/ecto | fdcf807901683c73c1117d89751e0d5a7a318154 | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/lock_builder.ex | scrogson/ecto | fdcf807901683c73c1117d89751e0d5a7a318154 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Query.LockBuilder do
@moduledoc false
alias Ecto.Query.BuilderUtil
@doc """
Validates the expression is an integer or raise.
"""
@spec validate(Macro.t) :: Macro.t | no_return
def validate(expr) when is_boolean(expr) or is_binary(expr), do: expr
def validate(expr) do
raise Ecto.QueryError, reason: "lock expression must be a boolean value" <>
" or a string containing the database-specific locking" <>
" clause, got: #{inspect expr}"
end
@doc """
Builds a quoted expression.
The quoted expression should evaluate to a query at runtime.
If possible, it does all calculations at compile time to avoid
runtime work.
"""
@spec build(:lock, Macro.t, Macro.t, Macro.Env.t) :: Macro.t
def build(type, query, expr, env) do
expr =
case is_boolean(expr) or is_binary(expr) do
true -> expr
false -> quote do: unquote(__MODULE__).validate(unquote(expr))
end
BuilderUtil.apply_query(query, __MODULE__, [type, expr], env)
end
@doc """
The callback applied by `build/4` to build the query.
"""
@spec apply(Ecto.Queryable.t, :lock, term) :: Ecto.Query.t
def apply(query, :lock, value) do
query = Ecto.Queryable.to_query(query)
%{query | lock: value}
end
end
| 29.333333 | 87 | 0.648485 |
73f7f73614a5daadffaedaeb6fcb16ac49d96487 | 845 | ex | Elixir | lib/subscribex/test_subscriber.ex | neggertz/subscribex | 5f40b06a514108dbdc42b4d7a166b7f4aa19e693 | [
"Unlicense"
] | 22 | 2016-08-11T11:51:13.000Z | 2020-01-12T17:22:11.000Z | lib/subscribex/test_subscriber.ex | neggertz/subscribex | 5f40b06a514108dbdc42b4d7a166b7f4aa19e693 | [
"Unlicense"
] | 7 | 2016-10-04T19:26:14.000Z | 2018-12-15T19:18:43.000Z | lib/subscribex/test_subscriber.ex | neggertz/subscribex | 5f40b06a514108dbdc42b4d7a166b7f4aa19e693 | [
"Unlicense"
] | 14 | 2016-08-09T14:27:57.000Z | 2021-04-01T20:18:43.000Z | defmodule Subscribex.TestSubscriber do
@moduledoc false
use Subscribex.Subscriber
@preprocessors &__MODULE__.deserialize/1
def start_link(broker) do
Subscribex.Subscriber.start_link(__MODULE__, broker)
end
def init(broker) do
config = %Config{
auto_ack: false,
broker: broker,
queue: "test-queue",
prefetch_count: 1000,
exchange: "test-exchange",
exchange_type: :topic,
exchange_opts: [durable: true],
binding_opts: [routing_key: "routing_key"]
}
{:ok, config}
end
def deserialize(payload) do
IO.inspect("Deserializing #{payload}")
:hello
end
def second(:hello) do
# IO.inspect("Second!")
:hi
end
def handle_payload(_payload, channel, delivery_tag, _redelivered) do
# IO.inspect(payload)
ack(channel, delivery_tag)
end
end
| 20.119048 | 70 | 0.669822 |
73f7f86e56e4c9619cad3003f90693abcd383447 | 2,200 | ex | Elixir | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/list_functions_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/list_functions_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/list_functions_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudFunctions.V1.Model.ListFunctionsResponse do
@moduledoc """
Response for the `ListFunctions` method.
## Attributes
* `functions` (*type:* `list(GoogleApi.CloudFunctions.V1.Model.CloudFunction.t)`, *default:* `nil`) - The functions that match the request.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - If not empty, indicates that there may be more functions that match the request; this value should be passed in a new google.cloud.functions.v1.ListFunctionsRequest to get more functions.
* `unreachable` (*type:* `list(String.t)`, *default:* `nil`) - Locations that could not be reached. The response does not include any functions from these locations.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:functions => list(GoogleApi.CloudFunctions.V1.Model.CloudFunction.t()) | nil,
:nextPageToken => String.t() | nil,
:unreachable => list(String.t()) | nil
}
field(:functions, as: GoogleApi.CloudFunctions.V1.Model.CloudFunction, type: :list)
field(:nextPageToken)
field(:unreachable, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.CloudFunctions.V1.Model.ListFunctionsResponse do
def decode(value, options) do
GoogleApi.CloudFunctions.V1.Model.ListFunctionsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudFunctions.V1.Model.ListFunctionsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.509434 | 250 | 0.737727 |
73f8038d25ce212f429de75b71f8eef82049272f | 363 | ex | Elixir | exercises/practice/resistor-color-duo/.meta/example.ex | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/practice/resistor-color-duo/.meta/example.ex | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/practice/resistor-color-duo/.meta/example.ex | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule ResistorColorDuo do
@colors %{
black: 0,
brown: 1,
red: 2,
orange: 3,
yellow: 4,
green: 5,
blue: 6,
violet: 7,
grey: 8,
white: 9
}
@doc """
Calculate a resistance value from two colors
"""
@spec value(colors :: [atom]) :: integer
def value([a, b | _]) do
10 * @colors[a] + @colors[b]
end
end
| 15.782609 | 46 | 0.534435 |
73f8344c6fd1c5da6ba4463ec2e03a38fec1285d | 4,669 | ex | Elixir | implementations/elixir/ockam/ockam/lib/ockam/mini_cbor.ex | 0x00A5/ockam | 9710804e20606e70057d65c70d1af7236194aeeb | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam/lib/ockam/mini_cbor.ex | 0x00A5/ockam | 9710804e20606e70057d65c70d1af7236194aeeb | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam/lib/ockam/mini_cbor.ex | 0x00A5/ockam | 9710804e20606e70057d65c70d1af7236194aeeb | [
"Apache-2.0"
] | null | null | null | defmodule MiniCBOR do
@moduledoc """
Wrapper for CBOR encoding library to work with values encoded using structures optimised by Rust
https://twittner.gitlab.io/minicbor/minicbor_derive/index.html library
Changes maps keys for encoded values to integers.
Encodes atoms as index integers.
Map keys optimization:
Given data map `%{field1: 100, field2: "hi"}`
and schema `{:map, [:field1, field2]}` // same as `{:map, [{:field1, :noschema}, {:field2, :noschema}]}`
optimizes keys as `%{1 => 100, 2 => "hi"}`
Enum atoms optimization:
Given atom value `:other_thing`
and schema `{:enum, [:one_thins, :other_thing, :another_thing]}`
optimizes value as `1`
Supports nested schemas in map key mapping:
With data map `%{path: "/resource", method: :get}`
and schema `{:map, [:path, {:method, {:enum, [:get, :post]}}`
optimizes map as `%{1 => "/resource", 2 => 0}`
When encoding a map or struct, the field `0` is reserved for use of type-tags (the tag feature is currently disabled on rust,
and not implemented on elixir)
"""
@type schema() :: {:map, [atom() | {atom(), schema()}]} | {:enum, [atom()]} | :noschema
@reserved_tag_field :minicbor_tag_reserved
@deprecated "Use Ockam.TypedCBOR instead"
def encode(struct, schema) do
schema_map = struct_schema(schema)
optimized = rekey_struct(struct, schema_map)
CBOR.encode(optimized)
end
@deprecated "Use Ockam.TypedCBOR instead"
def decode(binary, schema) do
with {:ok, optimized, rest} <- CBOR.decode(binary) do
schema_map = optimized_schema(schema)
struct = rekey_optimized(optimized, schema_map)
{:ok, struct, rest}
end
end
defp reserve_tag_field(keys) when is_list(keys) do
# As a workaround, set this unused field at position 0.
# Latter we will use position 0 to carry tag information.
[@reserved_tag_field | keys]
end
def struct_schema({:map, keys}) when is_list(keys) do
mapping =
reserve_tag_field(keys)
|> Enum.with_index(fn
{key, inner_schema}, index -> {key, {index, struct_schema(inner_schema)}}
key, index -> {key, index}
end)
|> Map.new()
{:map, mapping}
end
def struct_schema({:enum, options}) when is_list(options) do
mapping =
options
|> Enum.with_index()
|> Map.new()
{:enum, mapping}
end
def struct_schema({:list, schema}) do
{:list, struct_schema(schema)}
end
def optimized_schema({:map, keys}) when is_list(keys) do
mapping =
reserve_tag_field(keys)
|> Enum.with_index(fn
{key, inner_schema}, index -> {index, {key, optimized_schema(inner_schema)}}
key, index -> {index, key}
end)
|> Map.new()
{:map, mapping}
end
def optimized_schema({:enum, options}) when is_list(options) do
mapping =
options
|> Enum.with_index(fn key, index -> {index, key} end)
|> Map.new()
{:enum, mapping}
end
def optimized_schema({:list, schema}) do
{:list, optimized_schema(schema)}
end
def rekey_struct(struct, :noschema) do
struct
end
def rekey_struct(struct, {:list, schema}) do
Enum.map(struct, fn val ->
rekey_struct(val, schema)
end)
end
def rekey_struct(struct, {:map, schema_map}) do
struct
# because enum is not implemented for structs
|> as_map()
# Just in case
|> Map.delete(@reserved_tag_field)
|> Enum.flat_map(fn {key, val} ->
case Map.get(schema_map, key) do
nil ->
[]
index when is_integer(index) ->
[{index, val}]
{index, inner_schema} when is_integer(index) ->
[{index, rekey_struct(val, inner_schema)}]
end
end)
|> Map.new()
end
def rekey_struct(atom, {:enum, option_map}) when is_atom(atom) do
Map.fetch!(option_map, atom)
end
def rekey_optimized(optimized, :noschema) do
optimized
end
def rekey_optimized(optimized, {:list, schema}) do
Enum.map(optimized, fn val ->
rekey_optimized(val, schema)
end)
end
def rekey_optimized(optimized, {:map, schema_map}) do
Enum.flat_map(optimized, fn {index, val} ->
case Map.get(schema_map, index) do
nil ->
[]
{key, inner_schema} ->
[{key, rekey_optimized(val, inner_schema)}]
key ->
[{key, val}]
end
end)
|> Map.new()
end
def rekey_optimized(index, {:enum, option_map}) when is_integer(index) do
Map.fetch!(option_map, index)
end
defp as_map(map) when is_struct(map) do
Map.from_struct(map)
end
defp as_map(map) when is_map(map) do
map
end
end
| 26.083799 | 129 | 0.629471 |
73f84274ecfdc6ca9abe5c440569ebd58f03fb61 | 5,045 | ex | Elixir | web/controllers/membership_controller.ex | mark-b-kauffman/phoenixDSK3LO | 999d7f66515a3bf1974d25c3d7ff3b439266452c | [
"BSD-3-Clause",
"MIT"
] | null | null | null | web/controllers/membership_controller.ex | mark-b-kauffman/phoenixDSK3LO | 999d7f66515a3bf1974d25c3d7ff3b439266452c | [
"BSD-3-Clause",
"MIT"
] | null | null | null | web/controllers/membership_controller.ex | mark-b-kauffman/phoenixDSK3LO | 999d7f66515a3bf1974d25c3d7ff3b439266452c | [
"BSD-3-Clause",
"MIT"
] | null | null | null | defmodule PhoenixDSK3LO.MembershipController do
# For managing one membership. 2017.12.12
# TODO: Modify the following to manage one membership.
use PhoenixDSK3LO.Web, :controller
require Logger
alias PhoenixDSK3LO.Lms, as: Lms
@doc """
Notes:
Regarding Availability and Row Status
%{"availability" => %{"available" => "No"}, ... }
%{"availability" => %{"available" => "Yes"}, ... }
%{"availability" => %{"available" => "Disabled"}, ... }
Regarding the Course data structure
iex(3)> {:ok, course} = PhoenixDSK3LO.Lms.get(fqdn, Learn.Course, "mbk-course-a")
{:ok,
%Learn.Course{availability: %{"available" => "Yes",
"duration" => %{"type" => "Continuous"}}, courseId: "mbk-course-a",
dataSourceId: "_2_1", description: "Test course A.",
externalId: "mbk-course-a", id: "_3_1", name: "mbk-course-a",
organization: false}}
Regarding the dskMap
iex(4)> dskMap = LearnRestClient.get(fqdnAtom, "dskMap")
%{"_10_1" => %{"externalId" => "MicrosoftAzureAD", "id" => "_10_1"},
"_17_1" => %{"description" => "Data source for Google",
"externalId" => "DS_GG", "id" => "_17_1"},
"_19_1" => %{"description" => "Accounts from MH test IdP",
"externalId" => "mh_shib", "id" => "_19_1"},
"_1_1" => %{"description" => "Internal data source used for associating records that are created for use by the Bb system.",
"externalId" => "INTERNAL", "id" => "_1_1"}, ... }
"""
def init() do
# Now the compiler won't complain about the documentation above being re-defiend for the following.
end
@doc """
From router: get "/membership/:courseId/:userName", MembershipController, :show
"""
def show(conn, %{"courseId" => courseId, "userName" => userName }) do
fqdn = Application.get_env(:phoenixDSK3LO, PhoenixDSK3LO.Endpoint)[:learnserver]
{:ok, course} = Lms.get(fqdn, Learn.Course, courseId) # course as struct
{:ok, user} = Lms.get(fqdn, Learn.User, userName)
{:ok, intentionallyUnused, dskMap} = LearnRestClient.get_data_sources(fqdn)
# dskMap = LearnRestClient.get(String.to_atom(fqdn), "dskMap")
# dskList = [%{"id" => "_2_1", "externalId" => "SYSTEM"}, %{"id" => "_1_1", "externalId" => "INTERNAL"}]
# here we need a util method that takes the dskMap and returns a list in the above form....
# What do you know, Elixir lets us do this witha one-liner! No need for a util method!
# dsk_list = Enum.map(dskMap, fn {k, v} -> %{"id" => k, "externalId"=>v["externalId"] } end)
# Now that we do the following we have to change how the template accesses the data.
# The keys are no longer strings so we have to use the . notation.
{:ok, dsk_list} = Lms.all(fqdn, Learn.Dsk, "allpages")
# dsk_list is a list of maps
# [ %Learn.Dsk{description: "blah.", externalId: "INTERNAL", id: "_1_1" }, %Learn.Dsk ... ]
mapout = %{}
dsk_map = LearnRestUtil.listofstructs_to_mapofstructs( dsk_list, mapout, :id )
#dsk_map is a map of structs
{:ok, membership} = Lms.get(fqdn, Learn.Membership, courseId, userName)
render conn, "show.html", courseId: courseId, course: course, userName: userName, user: user, membership: membership, dskMap: dsk_map, dskList: dsk_list
end
@doc """
From router: post "/membership/:courseId/:userName", MembershipController, :update
"""
def update(conn, %{"courseId" => courseId, "userName" => userName, "session" => session}) do
fqdn = Application.get_env(:phoenixDSK3LO, PhoenixDSK3LO.Endpoint)[:learnserver]
{:ok, course} = LearnRestClient.get_course_with_courseId(fqdn, courseId)
{:ok, membership} = LearnRestClient.get_membership(fqdn, courseId, userName)
# Update the membership in the LMS with this line.
Logger.info "DSK value selected #{session["selected_dsk"]}"
Logger.info "'available' value selected #{session["selected_avail"]}"
# Why do we need newCourse before we create the thing we update?
# What was the reasoning behind the hidden inputs? Ensure valid post?
Logger.info "newCourse:#{session["newCourse"]}"
Logger.info "courseId:#{courseId}"
newCourse = session["newCourse"]
# if not(String.equivalent?(newCourse, courseId)) do # TODO: REMOVE
courseId = newCourse
# end
newUser = session["newUser"]
# if not(String.equivalent?(newUser, userName)) do # TODO: REMOVE
userName = newUser
# end
new_avail = session["selected_avail"]
new_dsk = session["selected_dsk"]
Logger.info course["id"]
# Create a new membership with the selected values.
# Elixir values are immutable so create a new membership
temp = %{membership | "availability" => %{"available" => "#{new_avail}"}, "dataSourceId" => "#{new_dsk}"}
newMembership = Map.delete(temp, "created")
# Call the REST APIs to update the membership.
{:ok} = LearnRestClient.update_membership(fqdn, courseId, userName, newMembership)
# Now show.
show(conn, %{"courseId" => courseId, "userName" => userName})
end #update
end
| 47.59434 | 158 | 0.656095 |
73f85f7a3a14c771feedd46aca27d61eff32de91 | 247 | ex | Elixir | lib/deuce.ex | fremantle-industries/deuce | b0b95333bc43d55e25f1c6a3a033bc053223a64c | [
"MIT"
] | 2 | 2022-01-16T23:58:45.000Z | 2022-03-16T09:29:16.000Z | lib/deuce.ex | fremantle-industries/deuce | b0b95333bc43d55e25f1c6a3a033bc053223a64c | [
"MIT"
] | null | null | null | lib/deuce.ex | fremantle-industries/deuce | b0b95333bc43d55e25f1c6a3a033bc053223a64c | [
"MIT"
] | null | null | null | defmodule Deuce do
@moduledoc """
Deuce keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 24.7 | 66 | 0.748988 |
73f89d3003cdd780269b78ae61e5e30604b0c26f | 640 | ex | Elixir | deps/postgrex/lib/postgrex/extensions/int8.ex | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | deps/postgrex/lib/postgrex/extensions/int8.ex | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | deps/postgrex/lib/postgrex/extensions/int8.ex | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | defmodule Postgrex.Extensions.Int8 do
@moduledoc false
import Postgrex.BinaryUtils, warn: false
use Postgrex.BinaryExtension, send: "int8send"
@int8_range -9223372036854775808..9223372036854775807
def encode(_) do
range = Macro.escape(@int8_range)
quote location: :keep do
int when is_integer(int) and int in unquote(range) ->
<<8 :: int32, int :: int64>>
other ->
raise DBConnection.EncodeError, Postgrex.Utils.encode_msg(other, unquote(range))
end
end
def decode(_) do
quote location: :keep do
<<8 :: int32, int :: int64>> -> int
end
end
end
| 26.666667 | 89 | 0.646875 |
73f8a051a8aaed0eb8d050f23d71dfc38dc3799f | 13,473 | ex | Elixir | lib/fxnk/map.ex | matthewsecrist/fxnk | 3f489f6069ba09b959bb3e8f22c7b9f1fa1d5e45 | [
"MIT"
] | 2 | 2020-10-16T19:05:07.000Z | 2020-12-30T19:59:40.000Z | lib/fxnk/map.ex | matthewsecrist/fxnk | 3f489f6069ba09b959bb3e8f22c7b9f1fa1d5e45 | [
"MIT"
] | 1 | 2020-10-16T19:09:55.000Z | 2020-10-16T21:31:30.000Z | lib/fxnk/map.ex | matthewsecrist/fxnk | 3f489f6069ba09b959bb3e8f22c7b9f1fa1d5e45 | [
"MIT"
] | null | null | null | defmodule Fxnk.Map do
@moduledoc """
`Fxnk.Map` are functions that work with maps.
"""
import Fxnk.Functions, only: [curry: 1]
@doc """
Curried `assemble/2`
## Examples
iex> map = %{red: "red", green: "green", blue: "blue" }
iex> fnmap = %{
...> red: Fxnk.Flow.compose([&String.upcase/1, Fxnk.Map.prop(:red)]),
...> blue: Fxnk.Flow.compose([&String.reverse/1, Fxnk.Map.prop(:blue)])
...> }
iex> assembler = Fxnk.Map.assemble(fnmap)
iex> assembler.(map)
%{red: "RED", blue: "eulb"}
"""
@spec assemble(%{any() => function()}) :: (map() -> map())
def assemble(fn_map) do
fn map -> assemble(map, fn_map) end
end
@doc """
Takes an initial map and a "builder" map where each value is a function. Builds a new map by setting the keys in the function map to
the values returned by the function applied to the original map.
## Examples
iex> map = %{red: "red", green: "green", blue: "blue" }
iex> fnmap = %{
...> red: Fxnk.Flow.compose([&String.upcase/1, Fxnk.Map.prop(:red)]),
...> blue: Fxnk.Flow.compose([&String.reverse/1, Fxnk.Map.prop(:blue)])
...> }
iex> Fxnk.Map.assemble(map, fnmap)
%{red: "RED", blue: "eulb"}
"""
@spec assemble(map(), %{any() => function()}) :: any()
def assemble(map, fn_map) do
fn_map
|> Map.to_list()
|> Enum.reduce(%{}, fn {key, function}, acc ->
Map.put_new(acc, key, function.(map))
end)
end
@doc """
Takes a map and a function that accepts a map and returns a map. Runs the map against the function and merges the initial map into the result.
## Examples
iex> map = %{red: "red", green: "green", blue: "blue"}
iex> colorCombiner = Fxnk.Map.combine(fn %{red: red, blue: blue} -> %{purple: red <> blue} end)
iex> colorCombiner.(map)
%{red: "red", green: "green", blue: "blue", purple: "redblue"}
"""
@spec combine((map() -> map())) :: (map() -> map())
def combine(function) do
fn map -> combine(map, function) end
end
@doc """
Takes a map and a function that accepts a map and returns a map. Runs the map against the function and merges the initial map into the result.
## Examples
iex> map = %{red: "red", green: "green", blue: "blue"}
iex> colorCombiner = Fxnk.Functions.always(%{purple: "purple"})
iex> Fxnk.Map.combine(map, colorCombiner)
%{red: "red", green: "green", blue: "blue", purple: "purple"}
"""
@spec combine(map(), (map() -> map())) :: map()
def combine(map, function) do
Map.merge(function.(map), map)
end
@doc """
`combine/2` but also accepts a combining function as the last arguments.
## Examples
iex> map = %{colors: %{red: "red", green: "green", blue: "blue"}}
iex> colorCombiner = Fxnk.Functions.always(%{colors: %{red: "fire red", purple: "purple"}})
iex> Fxnk.Map.combine_with(map, colorCombiner, &Fxnk.Map.merge_deep_right/2)
%{colors: %{red: "fire red", green: "green", blue: "blue", purple: "purple"}}
"""
@spec combine_with(map(), (map() -> map()), (map(), map() -> map())) :: map()
def combine_with(map, function, combining_function) do
apply(combining_function, [map, function.(map)])
end
@doc """
Return a specific element in a nested map. If the path does not exist, returns the orignal map.
## Examples
iex> map = %{one: %{two: %{three: "three" }}}
iex> Fxnk.Map.path(map, [:one, :two, :three])
"three"
iex> Fxnk.Map.path(map, [:one, :two])
%{three: "three"}
iex> Fxnk.Map.path(map, [:one, :four])
%{one: %{two: %{three: "three" }}}
"""
@spec path(map(), [binary() | atom()]) :: map() | any()
def path(map, path_array) do
do_path(map, path_array, map)
end
@doc """
Like `path/2`, but returns the `or_value` when the path is not found.
## Examples
iex> map = %{one: %{two: %{three: "three" }}}
iex> Fxnk.Map.path_or(map, [:one, :two, :three], :foo)
"three"
iex> Fxnk.Map.path_or(map, [:one, :two], :foo)
%{three: "three"}
iex> Fxnk.Map.path_or(map, [:one, :four], :foo)
:foo
"""
@spec path_or(map(), [binary() | atom()], any()) :: map() | any()
def path_or(map, path_array, or_value) do
do_path_or(map, path_array, or_value)
end
@doc """
Accepts a string `key` and returns a function that takes a `map`. Returns the map's value at `key` or `nil`.
## Examples
iex> getProp = Fxnk.Map.prop("foo")
iex> getProp.(%{"foo" => "foo", "bar" => "bar"})
"foo"
iex> getProp2 = Fxnk.Map.prop(:foo)
iex> getProp2.(%{foo: "foo", bar: "bar"})
"foo"
"""
@spec prop(atom() | binary()) :: (map() -> any())
def prop(key) when is_binary(key) or is_atom(key) do
curry(fn map -> prop(map, key) end)
end
@doc """
Accepts a map and a key. Returns the map's value at `key` or `nil`
## Examples
iex> Fxnk.Map.prop(%{"foo" => "foo", "bar" => "bar"}, "foo")
"foo"
iex> Fxnk.Map.prop(%{foo: "foo", bar: "bar"}, :foo)
"foo"
"""
@spec prop(map(), atom() | binary()) :: any()
def prop(map, key) when is_map(map) and (is_binary(key) or is_atom(key)) do
map[key]
end
@doc """
Accepts a list of keys and returns a function that takes a map. Returns a list of the values associated with the keys in the map.
## Examples
iex> getProps = Fxnk.Map.props(["foo", "bar"])
iex> getProps.(%{"foo" => "foo", "bar" => "bar", "baz" => "baz"})
["foo", "bar"]
iex> getProps2 = Fxnk.Map.props([:foo, :bar])
iex> getProps2.(%{foo: "foo", bar: "bar", baz: "baz"})
["foo", "bar"]
"""
@spec props([atom() | binary(), ...]) :: (map() -> [any(), ...])
def props(keys) when is_list(keys) do
curry(fn map -> props(map, keys) end)
end
@doc """
Accepts a map and a list of keys and returns a list of the values associated with the keys in the map.
## Examples
iex> Fxnk.Map.props(%{"foo" => "foo", "bar" => "bar", "baz" => "baz"}, ["foo", "bar"])
["foo", "bar"]
iex> Fxnk.Map.props(%{foo: "foo", bar: "bar", baz: "baz"}, [:foo, :bar])
["foo", "bar"]
"""
@spec props(map(), [atom() | binary(), ...]) :: [any(), ...]
def props(map, keys) when is_list(keys) and is_map(map) do
for key <- keys, do: prop(map, key)
end
@doc """
Curried `prop_equals/3`, takes a value, returns a function that accepts a map and a key.
## Examples
iex> isFoo = Fxnk.Map.prop_equals("foo")
iex> isFoo.(%{foo: "foo"}, :foo)
true
"""
@spec prop_equals(any()) :: (map(), atom() | String.t() -> boolean())
def prop_equals(value) do
fn map, key -> prop_equals(map, key, value) end
end
@doc """
Curried `prop_equals/3`, takes a key and a value. Returns a function that accepts a map.
## Examples
iex> isKeyFoo = Fxnk.Map.prop_equals(:foo, "foo")
iex> isKeyFoo.(%{foo: "foo"})
true
"""
@spec prop_equals(atom | binary, any) :: (map() -> boolean())
def prop_equals(key, value) when is_atom(key) or is_binary(key) do
fn map -> prop_equals(map, key, value) end
end
@doc """
Accepts a map, key and value. Checks to see if the key on the map is equal to the value.any()
## Examples
iex> Fxnk.Map.prop_equals(%{foo: "foo"}, :foo, "foo")
true
iex> Fxnk.Map.prop_equals(%{foo: "bar"}, :foo, "foo")
false
"""
@spec prop_equals(map(), atom() | binary(), any()) :: boolean()
def prop_equals(map, key, value) when is_map(map) and (is_binary(key) or is_atom(key)) do
map[key] === value
end
@doc """
Accepts a list of args, returns a curried `pick/2`.
## Examples
iex> pickArgs = Fxnk.Map.pick([:red, :blue, :orange])
iex> pickArgs.(%{ red: "RED", green: "GREEN", blue: "BLUE", yellow: "YELLOW" })
%{red: "RED", blue: "BLUE"}
"""
@spec pick([atom(), ...]) :: (map() -> map())
def pick(args) when is_list(args) do
curry(fn map -> pick(map, args) end)
end
@doc """
`pick/2` takes a `Map` and a `List` of atoms, and returns a map of only the selected keys that exist. It will
return an empty map if passed an empty map or an empty list.
## Examples
iex> Fxnk.Map.pick(%{ red: "RED", green: "GREEN", blue: "BLUE", yellow: "YELLOW" }, [:red, :blue, :orange])
%{red: "RED", blue: "BLUE"}
"""
@spec pick(map(), [atom(), ...]) :: map()
def pick(map, _) when map_size(map) == 0, do: map
def pick(_, []), do: %{}
def pick(map, args) when is_map(map) and is_list(args) do
do_pick(map, args, %{})
end
@doc """
Curried `has_prop?/2`
## Examples
iex> hasFoo = Fxnk.Map.has_prop?(:foo)
iex> hasFoo.(%{foo: 'foo'})
true
iex> hasFoo.(%{bar: 'bar'})
false
"""
@spec has_prop?(atom() | String.t()) :: (map() -> boolean())
def has_prop?(property) when is_binary(property) or is_atom(property) do
curry(fn map -> has_prop?(map, property) end)
end
@doc """
Takes a map and a property, returns `true` if the property has a value in the map, `false` otherwise.
## Examples
iex> Fxnk.Map.has_prop?(%{foo: "foo"}, :foo)
true
iex> Fxnk.Map.has_prop?(%{foo: "foo"}, :bar)
false
"""
@spec has_prop?(map(), atom() | String.t()) :: boolean()
def has_prop?(map, property) when is_map(map) and (is_binary(property) or is_atom(property)) do
prop(map, property) !== nil
end
@doc """
Merges two maps together, if both maps have the same key, the value on the right will be used.
## Example
iex> Fxnk.Map.merge_right(%{red: "red", blue: "blue"}, %{red: "orange", green: "green"})
%{red: "orange", blue: "blue", green: "green"}
"""
@spec merge_right(map(), map()) :: map()
def merge_right(map1, map2) do
Map.merge(map1, map2)
end
@doc """
Merges two maps together deeply. If both maps have the same key, the value on the right will be used.
If both keys are a map, the maps will be merged together recursively, preferring values on the right.
## Example
iex> map1 = %{red: "red", green: %{green: "green", yellowish: "greenish", with_blue: %{turqoise: "blueish green"}}, blue: "blue"}
iex> map2 = %{red: "orange", green: %{green: "blue and yellow", yellowish: "more yellow than green"}}
iex> Fxnk.Map.merge_deep_right(map1, map2)
%{red: "orange", green: %{green: "blue and yellow", yellowish: "more yellow than green", with_blue: %{turqoise: "blueish green"}}, blue: "blue"}
"""
@spec merge_deep_right(map(), map()) :: map()
def merge_deep_right(map1, map2) do
Map.merge(map1, map2, fn _, v1, v2 ->
if is_map(v1) and is_map(v2) do
merge_deep_right(v1, v2)
else
v2
end
end)
end
@doc """
Merges two maps together, if both maps have the same key, the value on the left will be used.
## Example
iex> Fxnk.Map.merge_left(%{red: "red", blue: "blue"}, %{red: "orange", green: "green"})
%{red: "red", blue: "blue", green: "green"}
"""
@spec merge_left(map(), map()) :: map()
def merge_left(map1, map2) do
Map.merge(map2, map1)
end
@doc """
Merges two maps together deeply. If both maps have the same key, the value on the left will be used.
If both keys are a map, the maps will be merged together recursively, preferring values on the left.
## Example
iex> map1 = %{red: "red", green: %{green: "green", yellowish: "greenish", with_blue: %{turqoise: "blueish green"}}, blue: "blue"}
iex> map2 = %{red: "orange", green: %{green: "blue and yellow", yellowish: "more yellow than green"}}
iex> Fxnk.Map.merge_deep_left(map1, map2)
%{red: "red", green: %{green: "green", yellowish: "greenish", with_blue: %{turqoise: "blueish green"}}, blue: "blue"}
"""
@spec merge_deep_left(map(), map()) :: map()
def merge_deep_left(map1, map2) do
merge_deep_right(map2, map1)
end
@doc """
Rename a key in a map, takes the map, current key and replacement key. Returns the original map with the updated key.
## Example
iex> Fxnk.Map.rename(%{id: "1234"}, :id, :user_id)
%{user_id: "1234"}
iex> Fxnk.Map.rename(%{hello: "world", foo: "foo" }, :foo, :bar)
%{hello: "world", bar: "foo"}
"""
@spec rename(map(), String.t() | atom(), String.t() | atom()) :: map()
def rename(map, key, new_key) do
{value, popped_map} = Access.pop(map, key)
Map.merge(popped_map, %{new_key => value})
end
@doc """
Rename multiple keys in a map. Takes the original map and a map where the key is the original key and the value is the replacement key.
## Example
iex> Fxnk.Map.rename_all(%{user_id: "1234", foo: "foo", bar: "bar"}, %{user_id: :id, bar: :baz})
%{id: "1234", foo: "foo", baz: "bar"}
"""
@spec rename_all(map(), map()) :: map()
def rename_all(map, renames) do
renames
|> Map.to_list()
|> Enum.reduce(map, fn {old, new}, acc -> rename(acc, old, new) end)
end
defp do_pick(_, [], acc), do: acc
defp do_pick(map, [hd | tl], acc) do
case Map.fetch(map, hd) do
{:ok, val} -> do_pick(map, tl, Map.put(acc, hd, val))
_ -> do_pick(map, tl, acc)
end
end
defp do_path(map, _, nil), do: map
defp do_path(_, [], acc), do: acc
defp do_path(map, [hd | tl], acc), do: do_path(map, tl, prop(acc, hd))
defp do_path_or(nil, _, default_to), do: default_to
defp do_path_or(map, [], _), do: map
defp do_path_or(map, [hd | tl], default_to), do: do_path_or(prop(map, hd), tl, default_to)
end
| 34.724227 | 150 | 0.588139 |
73f8a06fed5a517e77f9b3f6c66ff11c4901f0d0 | 5,199 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/target_http_proxy.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/target_http_proxy.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/target_http_proxy.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.TargetHttpProxy do
@moduledoc """
Represents a Target HTTP Proxy resource.
Google Compute Engine has two Target HTTP Proxy resources:
* [Global](/compute/docs/reference/rest/{$api_version}/targetHttpProxies) * [Regional](/compute/docs/reference/rest/{$api_version}/regionTargetHttpProxies)
A target HTTP proxy is a component of GCP HTTP load balancers.
* targetHttpProxies are used by external HTTP load balancers and Traffic Director. * regionTargetHttpProxies are used by internal HTTP load balancers.
Forwarding rules reference a target HTTP proxy, and the target proxy then references a URL map. For more information, read Using Target Proxies and Forwarding rule concepts. (== resource_for {$api_version}.targetHttpProxies ==) (== resource_for {$api_version}.regionTargetHttpProxies ==)
## Attributes
* `creationTimestamp` (*type:* `String.t`, *default:* `nil`) - [Output Only] Creation timestamp in RFC3339 text format.
* `description` (*type:* `String.t`, *default:* `nil`) - An optional description of this resource. Provide this property when you create the resource.
* `fingerprint` (*type:* `String.t`, *default:* `nil`) - Fingerprint of this resource. A hash of the contents stored in this object. This field is used in optimistic locking. This field will be ignored when inserting a TargetHttpProxy. An up-to-date fingerprint must be provided in order to patch/update the TargetHttpProxy; otherwise, the request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve the TargetHttpProxy.
* `id` (*type:* `String.t`, *default:* `nil`) - [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* `kind` (*type:* `String.t`, *default:* `compute#targetHttpProxy`) - [Output Only] Type of resource. Always compute#targetHttpProxy for target HTTP proxies.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
* `proxyBind` (*type:* `boolean()`, *default:* `nil`) - This field only applies when the forwarding rule that references this target proxy has a loadBalancingScheme set to INTERNAL_SELF_MANAGED.
When this field is set to true, Envoy proxies set up inbound traffic interception and bind to the IP address and port specified in the forwarding rule. This is generally useful when using Traffic Director to configure Envoy as a gateway or middle proxy (in other words, not a sidecar proxy). The Envoy proxy listens for inbound requests and handles requests when it receives them.
The default is false.
* `region` (*type:* `String.t`, *default:* `nil`) - [Output Only] URL of the region where the regional Target HTTP Proxy resides. This field is not applicable to global Target HTTP Proxies.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - [Output Only] Server-defined URL for the resource.
* `urlMap` (*type:* `String.t`, *default:* `nil`) - URL to the UrlMap resource that defines the mapping from URL to the BackendService.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:creationTimestamp => String.t() | nil,
:description => String.t() | nil,
:fingerprint => String.t() | nil,
:id => String.t() | nil,
:kind => String.t() | nil,
:name => String.t() | nil,
:proxyBind => boolean() | nil,
:region => String.t() | nil,
:selfLink => String.t() | nil,
:urlMap => String.t() | nil
}
field(:creationTimestamp)
field(:description)
field(:fingerprint)
field(:id)
field(:kind)
field(:name)
field(:proxyBind)
field(:region)
field(:selfLink)
field(:urlMap)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.TargetHttpProxy do
def decode(value, options) do
GoogleApi.Compute.V1.Model.TargetHttpProxy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.TargetHttpProxy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 59.079545 | 490 | 0.721485 |
73f8be384747c96f2b64d035c9d75ce95d0f62c4 | 1,772 | ex | Elixir | test/support/model_case.ex | xelcer/phoenix_demo | 84dc516441dd715db7e24b1384c2fea3adffe0f7 | [
"MIT"
] | null | null | null | test/support/model_case.ex | xelcer/phoenix_demo | 84dc516441dd715db7e24b1384c2fea3adffe0f7 | [
"MIT"
] | null | null | null | test/support/model_case.ex | xelcer/phoenix_demo | 84dc516441dd715db7e24b1384c2fea3adffe0f7 | [
"MIT"
] | null | null | null | defmodule Demo.ModelCase do
@moduledoc """
This module defines the test case to be used by
model tests.
You may define functions here to be used as helpers in
your model tests. See `errors_on/2`'s definition as reference.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Demo.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Demo.ModelCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Demo.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Demo.Repo, {:shared, self()})
end
:ok
end
@doc """
Helper for returning list of errors in a struct when given certain data.
## Examples
Given a User schema that lists `:name` as a required field and validates
`:password` to be safe, it would return:
iex> errors_on(%User{}, %{password: "password"})
[password: "is unsafe", name: "is blank"]
You could then write your assertion like:
assert {:password, "is unsafe"} in errors_on(%User{}, %{password: "password"})
You can also create the changeset manually and retrieve the errors
field directly:
iex> changeset = User.changeset(%User{}, password: "password")
iex> {:password, "is unsafe"} in changeset.errors
true
"""
def errors_on(struct, data) do
struct.__struct__.changeset(struct, data)
|> Ecto.Changeset.traverse_errors(&Demo.ErrorHelpers.translate_error/1)
|> Enum.flat_map(fn {key, errors} -> for msg <- errors, do: {key, msg} end)
end
end
| 26.848485 | 84 | 0.681716 |
73f90178b11c772c224b4ac86e3c1272f33a01af | 1,476 | ex | Elixir | lib/live_sup_web/live/admin/team_live/form_component.ex | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | null | null | null | lib/live_sup_web/live/admin/team_live/form_component.ex | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | 3 | 2022-02-23T15:51:48.000Z | 2022-03-14T22:52:43.000Z | lib/live_sup_web/live/admin/team_live/form_component.ex | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | null | null | null | defmodule LiveSupWeb.Admin.TeamLive.FormComponent do
use LiveSupWeb, :live_component
alias LiveSup.Core.Teams
@impl true
def update(%{team: team} = assigns, socket) do
changeset = Teams.change(team)
{:ok,
socket
|> assign(assigns)
|> assign(:changeset, changeset)}
end
@impl true
def handle_event("validate", %{"team" => team_params}, socket) do
changeset =
socket.assigns.team
|> Teams.change(team_params)
|> Map.put(:action, :validate)
{:noreply, assign(socket, :changeset, changeset)}
end
def handle_event("save", %{"team" => team_params}, socket) do
save_team(socket, socket.assigns.action, team_params)
end
defp save_team(socket, :edit, team_params) do
case Teams.update(socket.assigns.team, team_params) do
{:ok, _team} ->
{:noreply,
socket
|> put_flash(:info, "Team updated successfully")
|> push_redirect(to: socket.assigns.return_to)}
{:error, %Ecto.Changeset{} = changeset} ->
{:noreply, assign(socket, :changeset, changeset)}
end
end
defp save_team(socket, :new, team_params) do
case Teams.create(team_params) do
{:ok, _team} ->
{:noreply,
socket
|> put_flash(:info, "Team created successfully")
|> push_redirect(to: socket.assigns.return_to)}
{:error, %Ecto.Changeset{} = changeset} ->
{:noreply, assign(socket, changeset: changeset)}
end
end
end
| 26.357143 | 67 | 0.631436 |
73f9191d7689ad5786eb5a29da9bb4973d30dd96 | 237 | exs | Elixir | config/test.exs | sger/ex_rabbitmq_rpc | 7e46b471f9bcd2b9a850d783d34c7c806ca5cab6 | [
"MIT"
] | null | null | null | config/test.exs | sger/ex_rabbitmq_rpc | 7e46b471f9bcd2b9a850d783d34c7c806ca5cab6 | [
"MIT"
] | null | null | null | config/test.exs | sger/ex_rabbitmq_rpc | 7e46b471f9bcd2b9a850d783d34c7c806ca5cab6 | [
"MIT"
] | null | null | null | use Mix.Config
config :logger, level: :error
config :exrabbitmq, :test,
username: "guest",
password: "guest",
host: "localhost",
queue: "ex_rabbitmq_rpc_test",
queue_opts: [auto_delete: true],
consume_opts: [no_ack: false]
| 19.75 | 34 | 0.700422 |
73f93bc1f6084aa128b33b83067d9562fa103c73 | 18,612 | exs | Elixir | test/teslamate_web/live/settings_test.exs | AlwindB/teslamate | 59295ce8cf5c737ff26be4ea999206eb131ba38a | [
"MIT"
] | null | null | null | test/teslamate_web/live/settings_test.exs | AlwindB/teslamate | 59295ce8cf5c737ff26be4ea999206eb131ba38a | [
"MIT"
] | null | null | null | test/teslamate_web/live/settings_test.exs | AlwindB/teslamate | 59295ce8cf5c737ff26be4ea999206eb131ba38a | [
"MIT"
] | 1 | 2019-10-24T13:17:57.000Z | 2019-10-24T13:17:57.000Z | defmodule TeslaMateWeb.SettingsLiveTest do
use TeslaMateWeb.ConnCase
alias TeslaMate.{Settings, Locations, Repo}
describe "global settings" do
test "shows km and C by default", %{conn: conn} do
assert {:ok, _view, html} = live(conn, "/settings")
assert [
{"select", _,
[
{"option", [{"value", "km"}, {"selected", "selected"}], ["km"]},
{"option", [{"value", "mi"}], ["mi"]}
]}
] =
html
|> Floki.parse_document!()
|> Floki.find("#global_settings_unit_of_length")
assert [
{"select", _,
[
{"option", [{"value", "C"}, {"selected", "selected"}], ["°C"]},
{"option", [{"value", "F"}], ["°F"]}
]}
] =
html
|> Floki.parse_document!()
|> Floki.find("#global_settings_unit_of_temperature")
end
test "shows :ideal by default", %{conn: conn} do
assert {:ok, _view, html} = live(conn, "/settings")
assert [
{"select", _,
[
{"option", [{"value", "ideal"}, {"selected", "selected"}], ["ideal"]},
{"option", [{"value", "rated"}], ["rated"]}
]}
] =
html
|> Floki.parse_document!()
|> Floki.find("#global_settings_preferred_range")
end
test "changes base_url", %{conn: conn} do
assert {:ok, view, _html} = live(conn, "/settings")
assert render_change(view, :change, %{global_settings: %{base_url: nil}})
|> Floki.parse_document!()
|> Floki.find("#global_settings_base_url")
|> Floki.attribute("value") == []
assert Settings.get_global_settings!().base_url == nil
assert render_change(view, :change, %{
global_settings: %{base_url: " https://example.com/ "}
})
|> Floki.parse_document!()
|> Floki.find("#global_settings_base_url")
|> Floki.attribute("value") == ["https://example.com"]
assert Settings.get_global_settings!().base_url == "https://example.com"
end
test "changes grafana_url", %{conn: conn} do
assert {:ok, view, _html} = live(conn, "/settings")
assert render_change(view, :change, %{global_settings: %{grafana_url: nil}})
|> Floki.parse_document!()
|> Floki.find("#global_settings_grafana_url")
|> Floki.attribute("value") == []
assert Settings.get_global_settings!().grafana_url == nil
assert render_change(view, :change, %{
global_settings: %{grafana_url: " https://example.com/ "}
})
|> Floki.parse_document!()
|> Floki.find("#global_settings_grafana_url")
|> Floki.attribute("value") == ["https://example.com"]
assert Settings.get_global_settings!().grafana_url == "https://example.com"
end
test "reacts to change events", %{conn: conn} do
assert {:ok, view, _html} = live(conn, "/settings")
assert [
{"select", _,
[
{"option", [{"value", "km"}], ["km"]},
{"option", [{"value", "mi"}, {"selected", "selected"}], ["mi"]}
]}
] =
render_change(view, :change, %{global_settings: %{unit_of_length: :mi}})
|> Floki.parse_document!()
|> Floki.find("#global_settings_unit_of_length")
assert settings = Settings.get_global_settings!()
assert settings.unit_of_length == :mi
assert [
{"select", _,
[
{"option", [{"value", "C"}], ["°C"]},
{"option", [{"value", "F"}, {"selected", "selected"}], ["°F"]}
]}
] =
render_change(view, :change, %{global_settings: %{unit_of_temperature: :F}})
|> Floki.parse_document!()
|> Floki.find("#global_settings_unit_of_temperature")
assert settings = Settings.get_global_settings!()
assert settings.unit_of_temperature == :F
end
end
describe "language" do
alias Locations.Address
test "changes language", %{conn: conn} do
{:ok, %Address{id: address_id}} =
Locations.create_address(%{
display_name: "foo",
name: "bar",
latitude: 0,
longitude: 0,
osm_id: 0,
osm_type: "way",
raw: %{}
})
assert {:ok, view, html} = live(conn, "/settings")
assert [{"option", [{"value", "en"}, {"selected", "selected"}], ["English"]}] =
html
|> Floki.parse_document!()
|> Floki.find("#global_settings_language option[selected]")
render_change(view, :change, %{global_settings: %{language: "de"}})
TestHelper.eventually(fn ->
assert [{"option", [{"value", "de"}, {"selected", "selected"}], ["German"]}] =
render(view)
|> Floki.parse_document!()
|> Floki.find("#global_settings_language option[selected]")
assert %Address{country: "de"} = Repo.get(Address, address_id)
end)
end
@tag :capture_log
test "shows error", %{conn: conn} do
{:ok, %Address{id: address_id}} =
Locations.create_address(%{
display_name: "error",
name: "bar",
latitude: 0,
longitude: 0,
osm_id: 0,
osm_type: "way",
raw: %{}
})
assert {:ok, view, html} = live(conn, "/settings")
assert [{"option", [{"value", "en"}, {"selected", "selected"}], ["English"]}] =
html
|> Floki.parse_document!()
|> Floki.find("#global_settings_language option[selected]")
render_change(view, :change, %{global_settings: %{language: "de"}})
TestHelper.eventually(fn ->
html = render(view)
assert "There was a problem retrieving data from OpenStreetMap. Please try again later." =
html
|> Floki.parse_document!()
|> Floki.find("form .field-body")
|> Enum.find(
&match?(
{"div", _,
[
{_, _,
[
{_, _,
[{_, _, [{"select", [{"id", "global_settings_language"}, _], _}]}]},
_
]}
]},
&1
)
)
|> Floki.find("p.help")
|> Floki.text()
assert [{"option", [{"value", "en"}, {"selected", "selected"}], ["English"]}] =
html
|> Floki.parse_document!()
|> Floki.find("#global_settings_language option[selected]")
assert %Address{
display_name: "error",
name: "bar",
latitude: 0.0,
longitude: 0.0,
osm_id: 0,
osm_type: "way",
raw: %{}
} = Repo.get(Address, address_id)
end)
end
end
describe "car settings" do
alias TeslaMate.Log
defp car_fixture(attrs \\ %{}) do
{:ok, car} =
attrs
|> Enum.into(%{
efficiency: 0.153,
eid: 42,
model: "S",
vid: 42,
name: "foo",
trim_badging: "P100D",
vin: "12345F"
})
|> Log.create_car()
car
end
test "Greys out input fields if sleep mode is disabled", %{conn: conn} do
car = car_fixture()
ids = [
"#car_settings_#{car.id}_suspend_min",
"#car_settings_#{car.id}_suspend_after_idle_min",
"#car_settings_#{car.id}_req_no_shift_state_reading",
"#car_settings_#{car.id}_req_no_temp_reading",
"#car_settings_#{car.id}_req_not_unlocked"
]
assert {:ok, view, html} = live(conn, "/settings")
assert ["checked"] ==
html
|> Floki.parse_document!()
|> Floki.find("#car_settings_#{car.id}_sleep_mode_enabled")
|> Floki.attribute("checked")
html =
render_change(view, :change, %{"car_settings_#{car.id}" => %{sleep_mode_enabled: false}})
assert [] =
html
|> Floki.parse_document!()
|> Floki.find("#car_settings_#{car.id}_sleep_mode_enabled")
|> Floki.attribute("checked")
for id <- ids do
assert ["disabled"] =
html
|> Floki.parse_document!()
|> Floki.find(id)
|> Floki.attribute("disabled")
end
html =
render_change(view, :change, %{"car_settings_#{car.id}" => %{sleep_mode_enabled: true}})
|> Floki.parse_document!()
assert ["checked"] =
html
|> Floki.find("#car_settings_#{car.id}_sleep_mode_enabled")
|> Floki.attribute("checked")
for id <- ids do
assert [] = html |> Floki.find(id) |> Floki.attribute("disabled")
end
end
test "shows 21 and 15 minutes by default", %{conn: conn} do
car = car_fixture()
assert {:ok, _view, html} = live(conn, "/settings")
html = Floki.parse_document!(html)
assert [
{"select", _,
[
{"option", [{"value", "12"}], ["12 min"]},
{"option", [{"value", "15"}], ["15 min"]},
{"option", [{"value", "18"}], ["18 min"]},
{"option", [{"value", "21"}, {"selected", "selected"}], ["21 min"]},
{"option", [{"value", "24"}], ["24 min"]},
{"option", [{"value", "27"}], ["27 min"]},
{"option", [{"value", "30"}], ["30 min"]},
{"option", [{"value", "35"}], ["35 min"]},
{"option", [{"value", "40"}], ["40 min"]},
{"option", [{"value", "45"}], ["45 min"]},
{"option", [{"value", "50"}], ["50 min"]},
{"option", [{"value", "55"}], ["55 min"]},
{"option", [{"value", "60"}], ["60 min"]},
{"option", [{"value", "65"}], ["65 min"]},
{"option", [{"value", "70"}], ["70 min"]},
{"option", [{"value", "75"}], ["75 min"]},
{"option", [{"value", "80"}], ["80 min"]},
{"option", [{"value", "85"}], ["85 min"]},
{"option", [{"value", "90"}], ["90 min"]}
]}
] = Floki.find(html, "#car_settings_#{car.id}_suspend_min")
assert [
{"select", _,
[
{"option", [{"value", "5"}], ["5 min"]},
{"option", [{"value", "10"}], ["10 min"]},
{"option", [{"value", "15"}, {"selected", "selected"}], ["15 min"]},
{"option", [{"value", "20"}], ["20 min"]},
{"option", [{"value", "25"}], ["25 min"]},
{"option", [{"value", "30"}], ["30 min"]},
{"option", [{"value", "35"}], ["35 min"]},
{"option", [{"value", "40"}], ["40 min"]},
{"option", [{"value", "45"}], ["45 min"]},
{"option", [{"value", "50"}], ["50 min"]},
{"option", [{"value", "55"}], ["55 min"]},
{"option", [{"value", "60"}], ["60 min"]}
]}
] = Floki.find(html, "#car_settings_#{car.id}_suspend_after_idle_min")
end
test "shows false, false, true by default", %{conn: conn} do
car =
car_fixture(
settings: %{
req_no_shift_state_reading: false,
req_no_temp_reading: false,
req_not_unlocked: true
}
)
assert {:ok, _view, html} = live(conn, "/settings")
html = Floki.parse_document!(html)
assert [] =
html
|> Floki.find("#car_settings_#{car.id}_req_no_shift_state_reading")
|> Floki.attribute("checked")
assert [] =
html
|> Floki.find("#car_settings_#{car.id}_req_no_temp_reading")
|> Floki.attribute("checked")
assert ["checked"] =
html
|> Floki.find("#car_settings_#{car.id}_req_not_unlocked")
|> Floki.attribute("checked")
end
test "reacts to change events", %{conn: conn} do
car =
car_fixture(
settings: %{
suspend_min: 21,
suspend_after_idle_min: 15,
req_no_shift_state_reading: false,
req_no_temp_reading: false,
req_not_unlocked: true,
free_supercharging: false
}
)
assert {:ok, view, html} = live(conn, "/settings")
assert [{"option", [{"value", "90"}, {"selected", "selected"}], ["90 min"]}] =
render_change(view, :change, %{"car_settings_#{car.id}" => %{suspend_min: 90}})
|> Floki.parse_document!()
|> Floki.find("#car_settings_#{car.id}_suspend_min option")
|> Enum.filter(&match?({_, [_, {"selected", "selected"}], _}, &1))
assert [settings] = Settings.get_car_settings()
assert settings.suspend_min == 90
assert [{"option", [{"value", "30"}, {"selected", "selected"}], ["30 min"]}] =
render_change(view, :change, %{
"car_settings_#{car.id}" => %{suspend_after_idle_min: 30}
})
|> Floki.parse_document!()
|> Floki.find("#car_settings_#{car.id}_suspend_after_idle_min option")
|> Enum.filter(&match?({_, [_, {"selected", "selected"}], _}, &1))
assert [settings] = Settings.get_car_settings()
assert settings.suspend_after_idle_min == 30
assert ["checked"] =
render_change(view, :change, %{
"car_settings_#{car.id}" => %{req_no_shift_state_reading: true}
})
|> Floki.parse_document!()
|> Floki.find("#car_settings_#{car.id}_req_no_shift_state_reading")
|> Floki.attribute("checked")
assert [settings] = Settings.get_car_settings()
assert settings.req_no_shift_state_reading == true
assert ["checked"] =
render_change(view, :change, %{
"car_settings_#{car.id}" => %{req_no_temp_reading: true}
})
|> Floki.parse_document!()
|> Floki.find("#car_settings_#{car.id}_req_no_temp_reading")
|> Floki.attribute("checked")
assert [settings] = Settings.get_car_settings()
assert settings.req_no_temp_reading == true
assert [] =
render_change(view, :change, %{
"car_settings_#{car.id}" => %{req_not_unlocked: false}
})
|> Floki.parse_document!()
|> Floki.find("#car_settings_#{car.id}_req_not_unlocked")
|> Floki.attribute("checked")
assert [settings] = Settings.get_car_settings()
assert settings.req_not_unlocked == false
## Charge cost
assert [] ==
html
|> Floki.parse_document!()
|> Floki.find("#car_settings_#{car.id}_free_supercharging")
|> Floki.attribute("checked")
assert ["checked"] ==
render_change(view, :change, %{
"car_settings_#{car.id}" => %{free_supercharging: true}
})
|> Floki.parse_document!()
|> Floki.find("#car_settings_#{car.id}_free_supercharging")
|> Floki.attribute("checked")
assert [settings] = Settings.get_car_settings()
assert settings.free_supercharging == true
end
test "changes between cars", %{conn: conn} do
one = car_fixture(id: 10001, name: "one", eid: 10001, vid: 1001, vin: "10001")
two = car_fixture(id: 10002, name: "two", eid: 10002, vid: 1002, vin: "10002")
assert {:ok, view, html} = live(conn, "/settings")
assert one.name ==
html
|> Floki.parse_document!()
|> Floki.find(".tabs .is-active")
|> Floki.text()
# change settings of car "one"
assert [{"option", [{"value", "90"}, {"selected", "selected"}], ["90 min"]}] =
render_change(view, :change, %{"car_settings_#{one.id}" => %{suspend_min: 90}})
|> Floki.parse_document!()
|> Floki.find("#car_settings_#{one.id}_suspend_min option")
|> Enum.filter(&match?({_, [_, {"selected", "selected"}], _}, &1))
assert [settings, _] = Settings.get_car_settings()
assert settings.suspend_min == 90
# change car
assert html = render_click(view, :car, %{id: two.id})
assert two.name ==
html
|> Floki.parse_document!()
|> Floki.find(".tabs .is-active")
|> Floki.text()
assert [{"option", [{"value", "21"}, {"selected", "selected"}], ["21 min"]}] =
html
|> Floki.parse_document!()
|> Floki.find("#car_settings_#{two.id}_suspend_min option")
|> Enum.filter(&match?({_, [_, {"selected", "selected"}], _}, &1))
# change settings of car "two"
assert [{"option", [{"value", "60"}, {"selected", "selected"}], ["60 min"]}] =
render_click(view, :change, %{"car_settings_#{two.id}" => %{suspend_min: 60}})
|> Floki.parse_document!()
|> Floki.find("#car_settings_#{two.id}_suspend_min option")
|> Enum.filter(&match?({_, [_, {"selected", "selected"}], _}, &1))
# change back
assert html =
render_click(view, :car, %{id: one.id})
|> Floki.parse_document!()
assert one.name ==
html
|> Floki.find(".tabs .is-active")
|> Floki.text()
assert [{"option", [{"value", "90"}, {"selected", "selected"}], ["90 min"]}] =
html
|> Floki.find("#car_settings_#{one.id}_suspend_min option")
|> Enum.filter(&match?({_, [_, {"selected", "selected"}], _}, &1))
end
end
end
| 35.723608 | 98 | 0.475016 |
73f93cf27d3858bc46fec330de4918267ba2ffad | 1,337 | ex | Elixir | apps/dtask/lib/dtask/resource_usage/extractor/mem_info.ex | fehu/elixir-dtask | 93b39a1acb616cdc7b4fffb4950e82021ef5b0f6 | [
"MIT"
] | null | null | null | apps/dtask/lib/dtask/resource_usage/extractor/mem_info.ex | fehu/elixir-dtask | 93b39a1acb616cdc7b4fffb4950e82021ef5b0f6 | [
"MIT"
] | null | null | null | apps/dtask/lib/dtask/resource_usage/extractor/mem_info.ex | fehu/elixir-dtask | 93b39a1acb616cdc7b4fffb4950e82021ef5b0f6 | [
"MIT"
] | null | null | null | defmodule DTask.ResourceUsage.Extractor.MemInfo do
@moduledoc false
@behaviour DTask.ResourceUsage.Extractor
@typep value :: number | :nan
@type usage :: %{ram: value, swap: value}
@typep ignored :: term
@regex_mem_total ~r"MemTotal:\s*(\d+) kB"
@regex_mem_avail ~r"MemAvailable:\s*(\d+) kB"
@regex_swap_total ~r"SwapTotal:\s*(\d+) kB"
@regex_swap_free ~r"SwapFree:\s*(\d+) kB"
@impl true
@spec query_usage(ignored) :: {:ok, usage} | {:error, term}
def query_usage(_) do
with {:ok, data} <- File.read("/proc/meminfo"),
[_, s_mem_total] <- Regex.run(@regex_mem_total, data),
[_, s_mem_avail] <- Regex.run(@regex_mem_avail, data),
[_, s_swap_total] <- Regex.run(@regex_swap_total, data),
[_, s_swap_free] <- Regex.run(@regex_swap_free, data),
{mem_total, ""} <- Integer.parse(s_mem_total),
{mem_avail, ""} <- Integer.parse(s_mem_avail),
{swap_total, ""} <- Integer.parse(s_swap_total),
{swap_free, ""} <- Integer.parse(s_swap_free)
do {:ok, %{
ram: 1 - mem_avail / mem_total,
swap: (if swap_total == 0, do: :nan, else: 1 - swap_free / swap_total)
}}
else
{:error, e} -> {:error, e}
_ -> {:error, "Failed to parse /proc/meminfo"}
end
end
end
| 34.282051 | 81 | 0.584144 |
73f94677e3e2f2d8a31a956a9e238a6116064bdc | 1,382 | ex | Elixir | lib/mix/tasks/tailwind.install.ex | kingdomcoding/tailwind | 0bc48d3b0cf433ad38894ff2eef78dab563640ba | [
"MIT"
] | 311 | 2021-12-20T22:11:07.000Z | 2022-03-31T16:03:30.000Z | lib/mix/tasks/tailwind.install.ex | kingdomcoding/tailwind | 0bc48d3b0cf433ad38894ff2eef78dab563640ba | [
"MIT"
] | 32 | 2021-12-21T05:21:57.000Z | 2022-03-30T13:04:20.000Z | lib/mix/tasks/tailwind.install.ex | kingdomcoding/tailwind | 0bc48d3b0cf433ad38894ff2eef78dab563640ba | [
"MIT"
] | 26 | 2021-12-21T18:49:48.000Z | 2022-03-16T12:53:32.000Z | defmodule Mix.Tasks.Tailwind.Install do
@moduledoc """
Installs tailwind under `_build`.
```bash
$ mix tailwind.install
$ mix tailwind.install --if-missing
```
By default, it installs #{Tailwind.latest_version()} but you
can configure it in your config files, such as:
config :tailwind, :version, "#{Tailwind.latest_version()}"
## Options
* `--runtime-config` - load the runtime configuration
before executing command
* `--if-missing` - install only if the given version
does not exist
"""
@shortdoc "Installs tailwind under _build"
use Mix.Task
@impl true
def run(args) do
valid_options = [runtime_config: :boolean, if_missing: :boolean]
case OptionParser.parse_head!(args, strict: valid_options) do
{opts, []} ->
if opts[:runtime_config], do: Mix.Task.run("app.config")
if opts[:if_missing] && latest_version?() do
:ok
else
Tailwind.install()
end
{_, _} ->
Mix.raise("""
Invalid arguments to tailwind.install, expected one of:
mix tailwind.install
mix tailwind.install --runtime-config
mix tailwind.install --if-missing
""")
end
end
defp latest_version?() do
version = Tailwind.configured_version()
match?({:ok, ^version}, Tailwind.bin_version())
end
end
| 24.245614 | 68 | 0.623734 |
73f95126044383d0ef7a78deb7027ac18aba1560 | 761 | ex | Elixir | test/support/channel_case.ex | joakimk/livecoding_workspace | 2d4a30c80e61503323bccf6a1f097d83242171da | [
"MIT",
"Unlicense"
] | 2 | 2016-07-02T21:53:17.000Z | 2016-07-07T16:35:58.000Z | test/support/channel_case.ex | joakimk/livecoding_workspace | 2d4a30c80e61503323bccf6a1f097d83242171da | [
"MIT",
"Unlicense"
] | null | null | null | test/support/channel_case.ex | joakimk/livecoding_workspace | 2d4a30c80e61503323bccf6a1f097d83242171da | [
"MIT",
"Unlicense"
] | null | null | null | defmodule LivecodingWorkspace.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint LivecodingWorkspace.Endpoint
end
end
setup tags do
:ok
end
end
| 22.382353 | 56 | 0.726675 |
73f957040e65f37b3b9bafee1b2b075601f74193 | 700 | ex | Elixir | documentation/examples/publisher.ex | VanHelmont/gen_rmq | ae6958c7f5d61b64e075020a9760cc5b64d749c3 | [
"MIT"
] | 164 | 2018-03-25T10:02:09.000Z | 2022-03-15T12:39:24.000Z | documentation/examples/publisher.ex | VanHelmont/gen_rmq | ae6958c7f5d61b64e075020a9760cc5b64d749c3 | [
"MIT"
] | 239 | 2018-03-14T13:08:42.000Z | 2022-03-20T08:59:16.000Z | documentation/examples/publisher.ex | VanHelmont/gen_rmq | ae6958c7f5d61b64e075020a9760cc5b64d749c3 | [
"MIT"
] | 41 | 2018-05-15T10:07:13.000Z | 2022-02-11T14:36:27.000Z | defmodule ExamplePublisher do
@moduledoc """
Example GenRMQ.Publisher implementation
Sample usage:
```
MIX_ENV=test iex -S mix
iex(1)> ExamplePublisher.start_link()
iex(2)> ExamplePublisher.publish_message("test", "routing_key")
```
"""
@behaviour GenRMQ.Publisher
require Logger
def start_link() do
GenRMQ.Publisher.start_link(__MODULE__, name: __MODULE__)
end
def publish_message(message, routing_key) do
Logger.info("Publishing message #{inspect(message)}")
GenRMQ.Publisher.publish(__MODULE__, message, routing_key)
end
def init() do
[
exchange: "example_exchange",
connection: "amqp://guest:guest@localhost:5672"
]
end
end
| 21.212121 | 65 | 0.704286 |
73f9760ebc9e3fadceb2bf455f4888863c079e25 | 954 | ex | Elixir | web/views/field_view.ex | snocorp/meansweepx | 585cb25ad22dd2231ab01edd840b0613d6abef77 | [
"MIT"
] | null | null | null | web/views/field_view.ex | snocorp/meansweepx | 585cb25ad22dd2231ab01edd840b0613d6abef77 | [
"MIT"
] | null | null | null | web/views/field_view.ex | snocorp/meansweepx | 585cb25ad22dd2231ab01edd840b0613d6abef77 | [
"MIT"
] | null | null | null | defmodule Meansweepx.FieldView do
use Meansweepx.Web, :view
require Ecto.DateTime
def render("show.json", %{field: field}) do
%{data: render_one(field, Meansweepx.FieldView, "field.json")}
end
def render("field.json", %{field: field}) do
grid_matrix = Enum.map(1..field.height, fn(y) ->
Enum.map(1..field.width, fn(x) ->
v = Map.get(field.grid, "#{x-1},#{y-1}")
value = if v["swept"] do v["value"] else -2 end
%{v: value, f: v["flagged"], s: v["swept"]}
end)
end)
finished = if field.finished_at == nil do
nil
else
(Ecto.DateTime.to_iso8601 field.finished_at) <> "Z"
end
%{
id: field.id,
width: field.width,
height: field.height,
count: field.count,
active: field.active,
grid: grid_matrix,
result: field.result,
started: (Ecto.DateTime.to_iso8601 field.inserted_at) <> "Z",
finished: finished
}
end
end
| 25.783784 | 67 | 0.589099 |
73f97de0d4e59acdc4dd0336a2eb7b0e92759260 | 628 | exs | Elixir | test/unit/strong_migrations/classifiers/add_index_not_concurrently_test.exs | maximemenager/strong_migrations | b7e091d2cfed73098d3bf683c7ce5c8ceee3159b | [
"MIT"
] | 23 | 2021-10-29T19:58:35.000Z | 2021-11-13T21:42:45.000Z | test/unit/strong_migrations/classifiers/add_index_not_concurrently_test.exs | maximemenager/strong_migrations | b7e091d2cfed73098d3bf683c7ce5c8ceee3159b | [
"MIT"
] | 1 | 2021-10-31T03:57:47.000Z | 2021-10-31T14:33:45.000Z | test/unit/strong_migrations/classifiers/add_index_not_concurrently_test.exs | surgeventures/strong_migrations | 3c82e34a6e7a372c6de17ba7a0b07da7664baa26 | [
"MIT"
] | 3 | 2021-10-31T02:14:10.000Z | 2021-11-09T08:07:22.000Z | defmodule StrongMigrations.Classifiers.AddIndexNotConcurrentlyTest do
use ExUnit.Case, async: true
alias StrongMigrations.Classifiers.AddIndexNotConcurrently
alias StrongMigrations.Migration
test "it has failed when creating an index not concurrently" do
migration = %{Migration.new("test.exs") | create_index: true}
assert {:error, :add_index_not_concurrently} == AddIndexNotConcurrently.classify(migration)
end
test "it has passed when not creating an index" do
migration = %{Migration.new("test.exs") | create_index: false}
assert :ok == AddIndexNotConcurrently.classify(migration)
end
end
| 33.052632 | 95 | 0.772293 |
73f97e0571e012c8619e270557858c167162c323 | 2,148 | exs | Elixir | config/prod.exs | bcoop713/rumbl | 831982b86f8f4e6540b6d481e36e2e3c3470b5b1 | [
"MIT"
] | 144 | 2015-04-30T15:12:46.000Z | 2022-01-07T03:40:26.000Z | config/prod.exs | bcoop713/rumbl | 831982b86f8f4e6540b6d481e36e2e3c3470b5b1 | [
"MIT"
] | 57 | 2015-04-29T11:26:54.000Z | 2022-01-05T13:35:10.000Z | config/prod.exs | bcoop713/rumbl | 831982b86f8f4e6540b6d481e36e2e3c3470b5b1 | [
"MIT"
] | 46 | 2015-06-12T11:26:15.000Z | 2021-12-15T00:38:26.000Z | use Mix.Config
# For production, we configure the host to read the PORT
# from the system environment. Therefore, you will need
# to set PORT=80 before running your server.
#
# You should also configure the url host to something
# meaningful, we use this information when generating URLs.
#
# Finally, we also include the path to a manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :rumbl, Rumbl.Endpoint,
http: [port: {:system, "PORT"}],
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :rumbl, Rumbl.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :rumbl, Rumbl.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :rumbl, Rumbl.Endpoint, server: true
#
# You will also need to set the application root to `.` in order
# for the new static assets to be served after a hot upgrade:
#
# config :rumbl, Rumbl.Endpoint, root: "."
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 32.545455 | 67 | 0.709497 |
73f9a9b00807826e598b2e5dc7a0bf1fec05e4c6 | 1,683 | ex | Elixir | lib/vapor/planner.ex | autodidaddict/vapor | 319853bfce86c9b164adfc6c5ccf2455d4e1d771 | [
"MIT"
] | 533 | 2018-05-27T17:54:58.000Z | 2021-09-26T12:21:20.000Z | lib/vapor/planner.ex | autodidaddict/vapor | 319853bfce86c9b164adfc6c5ccf2455d4e1d771 | [
"MIT"
] | 68 | 2018-05-28T14:26:52.000Z | 2021-09-11T23:11:34.000Z | lib/vapor/planner.ex | autodidaddict/vapor | 319853bfce86c9b164adfc6c5ccf2455d4e1d771 | [
"MIT"
] | 37 | 2018-06-22T00:08:38.000Z | 2021-10-06T17:14:19.000Z | defmodule Vapor.Planner do
@moduledoc """
This module provides a DSL for building configuration plans.
```elixir
defmodule MyApp.Config do
use Vapor.Planner
dotenv()
config :env, env([
foo: "FOO",
bar: "BAR",
])
config :file, file("test/support/settings.json", [
foo: "foo",
baz: "baz",
boz: ["biz", "boz"],
])
config :kafka, MyApp.KafkaWorker
end
```
"""
alias Vapor.Provider.{Dotenv, Env, File, Group}
defmacro __using__(_opts) do
quote do
@before_compile unquote(__MODULE__)
Module.register_attribute(__MODULE__, :config_plan, accumulate: true)
@behaviour Vapor.Plan
import unquote(__MODULE__)
end
end
defmacro __before_compile__(%{module: mod}) do
plan = Module.get_attribute(mod, :config_plan)
plan =
plan
|> Enum.reverse
|> Enum.map(fn p -> Macro.escape(p) end)
quote do
@impl Vapor.Plan
def config_plan do
unquote(plan)
|> Enum.map(fn p -> __vapor_config__(p) end)
end
defoverridable config_plan: 0
end
end
defmacro dotenv do
quote do
@config_plan :dotenv
defp __vapor_config__(:dotenv) do
%Dotenv{}
end
end
end
defmacro config(name, provider) do
quote do
@config_plan unquote(name)
defp __vapor_config__(unquote(name)) do
%Group{
name: unquote(name),
providers: [unquote(provider)]
}
end
end
end
def env(bindings) do
%Env{
bindings: bindings
}
end
def file(path, bindings) do
%File{
path: path,
bindings: bindings
}
end
end
| 18.096774 | 75 | 0.592395 |
73f9eee006da386752204b79effd6caf516c3a2b | 887 | ex | Elixir | lib/web/game.ex | sb8244/grapevine | effaaa01294d30114090c20f9cc40b8665d834f2 | [
"MIT"
] | 107 | 2018-10-05T18:20:32.000Z | 2022-02-28T04:02:50.000Z | lib/web/game.ex | sb8244/grapevine | effaaa01294d30114090c20f9cc40b8665d834f2 | [
"MIT"
] | 33 | 2018-10-05T14:11:18.000Z | 2022-02-10T22:19:18.000Z | lib/web/game.ex | sb8244/grapevine | effaaa01294d30114090c20f9cc40b8665d834f2 | [
"MIT"
] | 18 | 2019-02-03T03:08:20.000Z | 2021-12-28T04:29:36.000Z | defmodule Web.Game do
@moduledoc """
Helpers for games in the web view
"""
@doc """
Chose a random game that is online and has a home page
"""
@spec highlighted_game([Game.t()]) :: Game.t()
def highlighted_game(games) do
games
|> Enum.map(& &1.game)
|> Enum.filter(& &1.display)
|> Enum.filter(&(&1.homepage_url != nil))
|> Enum.shuffle()
|> List.first()
end
@doc """
Check if the client is allowed to load
If anonymous users are not allowed, a user must be present
"""
def client_allowed?(game, assigns, user_key) do
case game.allow_anonymous_client do
true ->
{:ok, :allowed}
false ->
case Map.has_key?(assigns, user_key) && !is_nil(Map.get(assigns, user_key)) do
true ->
{:ok, :allowed}
false ->
{:error, :not_signed_in}
end
end
end
end
| 22.175 | 86 | 0.580609 |
73fa17fbbf0e0462bfbcf1f8ef5c214c91d83288 | 9,293 | ex | Elixir | lib/osrs_api.ex | norbert-k/ex_osrs_api | 7884bf2b28628464812269a77e20b1f552a57f38 | [
"MIT"
] | 1 | 2021-04-15T22:26:54.000Z | 2021-04-15T22:26:54.000Z | lib/osrs_api.ex | norbert-k/ex_osrs_api | 7884bf2b28628464812269a77e20b1f552a57f38 | [
"MIT"
] | 1 | 2021-04-19T00:35:24.000Z | 2021-04-19T11:04:19.000Z | lib/osrs_api.ex | norbert-k/ex_osrs_api | 7884bf2b28628464812269a77e20b1f552a57f38 | [
"MIT"
] | null | null | null | defmodule ExOsrsApi.OsrsApi do
@moduledoc """
### OsrsApi
Main module for API requests
"""
use Tesla, only: ~w(get)a, docs: false
alias ExOsrsApi.Ratelimit
alias ExOsrsApi.PlayerHighscores
alias ExOsrsApi.Errors.Error
alias ExOsrsApi.Errors.HttpErrorMetadata
alias ExOsrsApi.PlayerRequest
alias ExOsrsApi.Models.Activities
@highscore_types ~w(regular ironman hardcore_ironman ultimate_ironman deadman seasonal tournament)a
@default_ratelimiter Ratelimit.new_default()
adapter(Tesla.Adapter.Hackney)
plug(Tesla.Middleware.Timeout, timeout: 20_000)
plug(Tesla.Middleware.BaseUrl, "https://secure.runescape.com/")
plug(Tesla.Middleware.Compression, format: "gzip")
plug(Tesla.Middleware.Fuse,
opts: {{:standard, 10, 10_000}, {:reset, 30_000}},
keep_original_error: true,
should_melt: fn
{:ok, %{status: status}} when status in [428, 500, 504] -> true
{:ok, _} -> false
{:error, _} -> true
end
)
@typedoc """
Supported highscore types
"""
@type highscore_type ::
:deadman
| :hardcore_ironman
| :ironman
| :regular
| :seasonal
| :tournament
| :ultimate_ironman
@spec get_highscores(
String.t(),
highscore_type(),
Ratelimit.t(),
list(String.t())
) :: {:error, Error.t()} | {:ok, PlayerHighscores.t()}
@doc """
Get player highscores by player username and highscore type
"""
def get_highscores(
username,
type,
ratelimit \\ @default_ratelimiter,
supported_activities \\ Activities.get_all_default_activities()
)
when is_bitstring(username) and type in @highscore_types do
case Ratelimit.check_ratelimit(ratelimit, type) do
{:ok, _} ->
case create_url(type, username) |> get() do
{:ok, %Tesla.Env{body: body, status: 200}} ->
PlayerHighscores.new_from_bitstring(username, type, body, supported_activities)
{:ok, %Tesla.Env{status: 404, headers: headers}} ->
{:error,
Error.new(
:http_error,
"404 not found (username: #{username}, type: #{type})",
HttpErrorMetadata.new(
404,
"404 not found (username: #{username}, type: #{type})",
headers,
type
)
)}
{:ok, %Tesla.Env{status: status, headers: headers}} when status in [428, 500, 504] ->
{:error,
Error.new(
:http_error,
"Service offline or ratelimiter has kicked in",
HttpErrorMetadata.new(
status,
"Service offline or ratelimiter has kicked in",
headers,
type
)
)}
{:ok, %Tesla.Env{status: status, headers: headers}} ->
{:error,
Error.new(
:http_error,
"Unsupported API response",
HttpErrorMetadata.new(
status,
"Unsupported API response",
headers,
type
)
)}
{:error, error} ->
{:error,
Error.new(
:http_error,
error,
HttpErrorMetadata.new(
nil,
error,
[],
type
)
)}
end
{:error, error} ->
{:error, error}
end
end
@doc """
Get multiple player highscores by player username list and highscore type
"""
@spec get_multiple_highscores(
list(String.t()),
highscore_type(),
Ratelimit.t(),
list(String.t())
) :: list(PlayerHighscores.t() | {:error, Error.t()})
def get_multiple_highscores(
usernames,
type,
ratelimit \\ @default_ratelimiter,
supported_activities \\ Activities.get_all_default_activities()
)
when is_list(usernames) and type in @highscore_types do
tasks =
usernames
|> Enum.uniq()
|> Enum.map(fn username ->
Task.async(fn -> get_highscores(username, type, ratelimit, supported_activities) end)
end)
Task.yield_many(tasks, 30000)
|> Enum.map(fn {task, result} ->
case result do
nil ->
Task.shutdown(task, :brutal_kill)
Error.new(:task_error, "Task timed out")
{:exit, reason} ->
Error.new(:task_error, reason)
{:ok, result} ->
result
end
end)
end
@doc """
Get multiple player highscores by player username and every highscore type
"""
@spec get_all_highscores(String.t(), Ratelimit.t(), list(String.t())) ::
list({:ok, PlayerHighscores.t()} | {:error, Error.t()})
def get_all_highscores(
username,
ratelimit \\ @default_ratelimiter,
supported_activities \\ Activities.get_all_default_activities()
)
when is_bitstring(username) do
tasks =
@highscore_types
|> Enum.map(fn type ->
Task.async(fn -> get_highscores(username, type, ratelimit, supported_activities) end)
end)
Task.yield_many(tasks, 30_000)
|> Enum.map(fn {task, result} ->
case result do
nil ->
Task.shutdown(task, :brutal_kill)
Error.new(:task_error, "Task timed out")
{:exit, reason} ->
Error.new(:task_error, reason)
{:ok, result} ->
result
end
end)
end
@doc """
Get multiple player highscores by player username list and every highscore type
"""
@spec get_multiple_all_highscores(list(String.t()), Ratelimit.t(), list(String.t())) ::
list(PlayerHighscores.t() | {:error, Error.t()})
def get_multiple_all_highscores(
usernames,
ratelimit \\ @default_ratelimiter,
supported_activities \\ Activities.get_all_default_activities()
)
when is_list(usernames) do
tasks =
usernames
|> Enum.uniq()
|> Enum.map(fn username ->
Task.async(fn -> get_all_highscores(username, ratelimit, supported_activities) end)
end)
Task.yield_many(tasks, 30_000)
|> Enum.flat_map(fn {task, result} ->
case result do
nil ->
Task.shutdown(task, :brutal_kill)
Error.new(:task_error, "Task timed out")
{:exit, reason} ->
Error.new(:task_error, reason)
{:ok, result} ->
result
end
end)
end
@doc """
Get player highscores by `ExOsrsApi.PlayerRequest` type
"""
@spec get_player_request(ExOsrsApi.PlayerRequest.t(), Ratelimit.t(), list(String.t())) ::
list(PlayerHighscores.t() | {:error, Error.t()})
def get_player_request(
%PlayerRequest{username: username, types: types},
ratelimit \\ @default_ratelimiter,
supported_activities \\ Activities.get_all_default_activities()
) do
tasks =
types
|> Enum.map(fn type ->
Task.async(fn -> get_highscores(username, type, ratelimit, supported_activities) end)
end)
Task.yield_many(tasks, 30_000)
|> Enum.map(fn {task, result} ->
case result do
nil ->
Task.shutdown(task, :brutal_kill)
Error.new(:task_error, "Task timed out")
{:exit, reason} ->
Error.new(:task_error, reason)
{:ok, result} ->
result
end
end)
end
@doc """
Get multiple player highscores by `ExOsrsApi.PlayerRequest` type
"""
@spec get_multiple_player_request(list(PlayerRequest.t()), Ratelimit.t(), list(String.t())) ::
list(PlayerHighscores.t() | {:error, Error.t()})
def get_multiple_player_request(
player_requests,
ratelimit \\ @default_ratelimiter,
supported_activities \\ Activities.get_all_default_activities()
)
when is_list(player_requests) do
tasks =
player_requests
|> Enum.uniq()
|> Enum.map(fn player_request ->
Task.async(fn -> get_player_request(player_request, ratelimit, supported_activities) end)
end)
Task.yield_many(tasks, 30_000)
|> Enum.flat_map(fn {task, result} ->
case result do
nil ->
Task.shutdown(task, :brutal_kill)
Error.new(:task_error, "Task timed out")
{:exit, reason} ->
Error.new(:task_error, reason)
{:ok, result} ->
result
end
end)
end
@spec create_url(
highscore_type(),
String.t()
) :: String.t()
defp create_url(type, username) when is_atom(type) and is_bitstring(username) do
"m=#{type_transform(type)}/index_lite.ws?player=#{username}"
end
@spec type_transform(highscore_type()) :: String.t()
defp type_transform(type) when is_atom(type) do
case type do
:regular -> "hiscore_oldschool"
:ironman -> "hiscore_oldschool_ironman"
:hardcore_ironman -> "hiscore_oldschool_hardcore_ironman"
:ultimate_ironman -> "hiscore_oldschool_ultimate"
:deadman -> "hiscore_oldschool_deadman"
:seasonal -> "hiscore_oldschool_seasonal"
:tournament -> "hiscore_oldschool_tournament"
end
end
end
| 29.040625 | 101 | 0.577854 |
73fa31e045bbe5a64c8d2292776f947335b1430d | 1,863 | ex | Elixir | lib/level/schemas/space.ex | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 928 | 2018-04-03T16:18:11.000Z | 2019-09-09T17:59:55.000Z | lib/level/schemas/space.ex | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 74 | 2018-04-03T00:46:50.000Z | 2019-03-10T18:57:27.000Z | lib/level/schemas/space.ex | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 89 | 2018-04-03T17:33:20.000Z | 2019-08-19T03:40:20.000Z | defmodule Level.Schemas.Space do
@moduledoc """
The Space schema.
"""
use Ecto.Schema
import Ecto.Changeset
import Level.Gettext
alias Level.Schemas.Group
alias Level.Schemas.SpaceUser
@type t :: %__MODULE__{}
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
schema "spaces" do
field :state, :string, read_after_writes: true
field :is_demo, :boolean, read_after_writes: true
field :name, :string, default: ""
field :slug, :string, default: ""
field :avatar, :string
field :postbot_key, :string
has_many :space_users, SpaceUser
has_many :groups, Group
timestamps()
end
@doc false
def create_changeset(struct, attrs \\ %{}) do
struct
|> cast(attrs, [:name, :slug, :avatar, :is_demo])
|> validate_required([:name, :slug])
|> validate_format(
:slug,
slug_format(),
message: dgettext("errors", "contains invalid characters")
)
|> set_postbot_key()
|> unique_constraint(:slug, name: :spaces_lower_slug_index)
end
@doc false
def update_changeset(struct, attrs \\ %{}) do
struct
|> cast(attrs, [:name, :slug, :avatar])
|> validate_required([:name, :slug])
|> validate_format(
:slug,
slug_format(),
message: dgettext("errors", "contains invalid characters")
)
|> unique_constraint(:slug, name: :spaces_lower_slug_index)
end
@doc """
The regex format for a slug.
"""
def slug_format do
~r/^(?>[A-Za-z][A-Za-z0-9-\.]*[A-Za-z0-9])$/
end
defp set_postbot_key(changeset) do
key =
16
|> :crypto.strong_rand_bytes()
|> Base.encode16()
|> String.downcase()
changeset
|> Ecto.Changeset.change(postbot_key: key)
end
end
defimpl Phoenix.Param, for: Level.Schemas.Space do
def to_param(%{slug: slug}) do
slug
end
end
| 22.719512 | 64 | 0.638755 |
73fa6723ae3e61f52130dc95e07749d7d44e95bd | 895 | exs | Elixir | apps/omg_watcher/priv/repo/migrations/20180813143343_create_txoutput_table.exs | PinkDiamond1/elixir-omg | 70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7 | [
"Apache-2.0"
] | 1 | 2020-05-01T12:30:09.000Z | 2020-05-01T12:30:09.000Z | apps/omg_watcher/priv/repo/migrations/20180813143343_create_txoutput_table.exs | PinkDiamond1/elixir-omg | 70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/priv/repo/migrations/20180813143343_create_txoutput_table.exs | PinkDiamond1/elixir-omg | 70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7 | [
"Apache-2.0"
] | 2 | 2020-06-07T11:14:54.000Z | 2020-08-02T07:36:32.000Z | defmodule OMG.Watcher.Repo.Migrations.CreateTxoutputTable do
use Ecto.Migration
def change do
create table(:txoutputs, primary_key: false) do
add :blknum, :bigint, null: false, primary_key: true
add :txindex, :integer, null: false, primary_key: true
add :oindex, :integer, null: false, primary_key: true
add :creating_txhash, references(:transactions, column: :txhash, type: :binary)
add :creating_deposit, references(:ethevents, column: :hash, type: :binary)
add :spending_txhash, references(:transactions, column: :txhash, type: :binary)
add :spending_exit, references(:ethevents, column: :hash, type: :binary)
add :spending_tx_oindex, :integer
add :owner, :binary, null: false
add :amount, :decimal, precision: 81, scale: 0, null: false
add :currency, :binary, null: false
add :proof, :binary
end
end
end
| 42.619048 | 85 | 0.690503 |
73fa6d32cf274bf9468b90d3755cc2c3c9d325ad | 218 | ex | Elixir | apps/cronitex_web/lib/cronitex_web/views/cron_monitor_view.ex | alayers2/cronitex | cf8da9553b8e93b9171f5e9a0f0a08a24a324689 | [
"MIT"
] | 1 | 2020-11-05T15:38:53.000Z | 2020-11-05T15:38:53.000Z | apps/cronitex_web/lib/cronitex_web/views/cron_monitor_view.ex | alayers2/cronitex | cf8da9553b8e93b9171f5e9a0f0a08a24a324689 | [
"MIT"
] | 36 | 2020-10-24T01:28:42.000Z | 2022-02-07T11:11:37.000Z | apps/cronitex_web/lib/cronitex_web/views/cron_monitor_view.ex | alayers2/cronitex | cf8da9553b8e93b9171f5e9a0f0a08a24a324689 | [
"MIT"
] | null | null | null | defmodule CronitexWeb.CronMonitorView do
use CronitexWeb, :view
alias Crontab.CronExpression.Composer
def cron_str(%Crontab.CronExpression{} = cron_expression) do
Composer.compose(cron_expression)
end
end
| 24.222222 | 62 | 0.798165 |
73fabdd4e79812cb5c84c71367fae657339e6338 | 27,728 | exs | Elixir | test/screens/v2/widget_instance/alert_test.exs | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | 3 | 2021-07-27T14:11:00.000Z | 2022-01-03T14:16:43.000Z | test/screens/v2/widget_instance/alert_test.exs | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | 444 | 2021-03-10T20:57:17.000Z | 2022-03-31T16:00:35.000Z | test/screens/v2/widget_instance/alert_test.exs | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | null | null | null | defmodule Screens.V2.WidgetInstance.AlertTest do
use ExUnit.Case, async: true
alias Screens.Alerts.Alert
alias Screens.Config.Screen
alias Screens.Config.V2.{BusEink, BusShelter, GlEink, Solari}
alias Screens.RouteType
alias Screens.V2.WidgetInstance.Alert, as: AlertWidget
setup :setup_base
defp setup_base(_context) do
%{
widget: %AlertWidget{
alert: %Alert{id: "123"},
screen: %Screen{app_params: nil, vendor: nil, device_id: nil, name: nil, app_id: nil}
}
}
end
defp put_active_period(widget, ap) do
%{widget | alert: %{widget.alert | active_period: ap}}
end
defp put_home_stop(widget, app_config_module, stop_id) do
alias Screens.Config.V2.Alerts
%{
widget
| screen: %{
widget.screen
| app_params: struct(app_config_module, %{alerts: %Alerts{stop_id: stop_id}})
}
}
end
defp put_informed_entities(widget, ies) do
%{widget | alert: %{widget.alert | informed_entities: ies}}
end
defp put_stop_sequences(widget, sequences) do
%{widget | stop_sequences: sequences}
end
defp put_routes_at_stop(widget, routes) do
%{widget | routes_at_stop: routes}
end
defp put_app_id(widget, app_id) do
%{widget | screen: %{widget.screen | app_id: app_id}}
end
defp put_effect(widget, effect) do
%{widget | alert: %{widget.alert | effect: effect}}
end
defp put_now(widget, now) do
%{widget | now: now}
end
defp ie(opts \\ []) do
%{stop: opts[:stop], route: opts[:route], route_type: opts[:route_type]}
end
defp setup_home_stop(%{widget: widget}) do
home_stop = "5"
%{widget: put_home_stop(widget, BusShelter, home_stop)}
end
defp setup_stop_sequences(%{widget: widget}) do
stop_sequences = [
~w[0 1 2 3 4 5 6 7 8 9],
~w[10 20 30 4 5 7],
~w[ 5 6 90],
~w[200 40 5],
~w[111 222 333]
]
%{widget: put_stop_sequences(widget, stop_sequences)}
end
defp setup_routes(%{widget: widget}) do
routes = [
%{route_id: "a", active?: true},
%{route_id: "b", active?: false},
%{route_id: "c", active?: true}
]
%{widget: put_routes_at_stop(widget, routes)}
end
defp setup_screen_config(%{widget: widget}) do
%{widget: put_app_id(widget, :bus_shelter_v2)}
end
defp setup_now(%{widget: widget}) do
%{widget: put_now(widget, ~U[2021-01-01T00:00:00Z])}
end
defp setup_informed_entities(%{widget: widget}) do
%{widget: put_informed_entities(widget, [ie(stop: "5")])}
end
defp setup_active_period(%{widget: widget}) do
active_period = [
{~U[2021-01-01T00:00:00Z], ~U[2021-01-01T22:00:00Z]},
{~U[2021-01-02T00:00:00Z], ~U[2021-01-02T22:00:00Z]}
]
%{widget: put_active_period(widget, active_period)}
end
defp setup_effect(%{widget: widget}) do
%{widget: put_effect(widget, :stop_closure)}
end
# Pass this to `setup` to set up "context" data on the alert widget, without setting up the API alert itself.
@alert_widget_context_setup_group [
:setup_home_stop,
:setup_stop_sequences,
:setup_routes,
:setup_screen_config,
:setup_now
]
# Pass this to `setup` to set up a stop_closure alert that is currently active (just started) and affects the home stop.
@valid_alert_setup_group @alert_widget_context_setup_group ++
[
:setup_informed_entities,
:setup_active_period,
:setup_effect
]
describe "priority/1" do
setup @valid_alert_setup_group
test "returns [1, 2] when slot_names(widget) == [:full_body]", %{widget: widget} do
assert [1, 2] == AlertWidget.priority(widget)
end
test "returns a list of tiebreaker values when widget should be considered for placement", %{
widget: widget
} do
widget = put_effect(widget, :snow_route)
assert [2 | _] = AlertWidget.priority(widget)
end
test "returns :no_render if any of the tiebreaker functions returns :no_render", %{
widget: widget
} do
# Currently active, but happening long enough that we don't want to show it anymore
active_period = [
{~U[2020-01-01T00:00:00Z], ~U[2020-01-01T20:00:00Z]},
{~U[2021-01-01T00:00:00Z], ~U[2021-01-01T20:00:00Z]}
]
widget = put_active_period(widget, active_period)
assert :no_render == AlertWidget.priority(widget)
end
end
describe "serialize/1" do
setup @valid_alert_setup_group ++ [:setup_display_values]
defp setup_display_values(%{widget: widget}) do
widget = %{widget | alert: %{widget.alert | header: "Stop is closed."}}
%{widget: widget}
end
test "serializes an alert widget", %{widget: widget} do
widget = put_informed_entities(widget, [ie(route: "a"), ie(route: "b"), ie(route: "c")])
expected_json_map = %{
route_pills: [
%{type: :text, text: "a", color: :yellow},
%{type: :text, text: "b", color: :yellow},
%{type: :text, text: "c", color: :yellow}
],
icon: :x,
header: "Stop Closed",
body: "Stop is closed.",
url: "mbta.com/alerts"
}
assert expected_json_map == AlertWidget.serialize(widget)
end
test "converts non-route informed entities to route pills as expected", %{widget: widget} do
# widget has informed_entities: [%{stop: "5", route: nil, route_type: nil}]
expected_json_map = %{
route_pills: [
%{type: :text, text: "a", color: :yellow},
%{type: :text, text: "b", color: :yellow},
%{type: :text, text: "c", color: :yellow}
],
icon: :x,
header: "Stop Closed",
body: "Stop is closed.",
url: "mbta.com/alerts"
}
assert expected_json_map == AlertWidget.serialize(widget)
end
test "collapses more than 3 route pills to a single mode pill", %{widget: widget} do
widget =
widget
|> put_routes_at_stop([
%{route_id: "a", active?: true},
%{route_id: "b", active?: false},
%{route_id: "c", active?: true},
%{route_id: "d", active?: true},
%{route_id: "e", active?: true}
])
|> put_informed_entities([ie(route: "a"), ie(route: "b"), ie(route: "c"), ie(route: "d")])
expected_json_map = %{
route_pills: [%{type: :icon, icon: :bus, color: :yellow}],
icon: :x,
header: "Stop Closed",
body: "Stop is closed.",
url: "mbta.com/alerts"
}
assert expected_json_map == AlertWidget.serialize(widget)
end
end
describe "slot_names/1 for bus apps (Bus Shelter and Bus E-Ink)" do
setup @alert_widget_context_setup_group
# active | high-impact | informs all routes || full-screen?
# n | n | n || n
# y | n | n || n
# n | y | n || n
# y | y | n || n
# n | n | y || n
# y | n | y || n
# n | y | y || n
# y | y | y || y
@bus_slot_names_cases %{
{false, false, false} => [:medium_left, :medium_right],
{true, false, false} => [:medium_left, :medium_right],
{false, true, false} => [:medium_left, :medium_right],
{true, true, false} => [:medium_left, :medium_right],
{false, false, true} => [:medium_left, :medium_right],
{true, false, true} => [:medium_left, :medium_right],
{false, true, true} => [:medium_left, :medium_right],
{true, true, true} => [:full_body]
}
for {{set_active?, set_high_impact_effect?, set_informs_all_active_routes?},
expected_slot_names} <- @bus_slot_names_cases do
false_to_not = fn
true -> ""
false -> "not "
end
test_description =
"returns #{inspect(expected_slot_names)} if alert is " <>
false_to_not.(set_active?) <>
"active and does " <>
false_to_not.(set_high_impact_effect?) <>
"have a high-impact effect and does " <>
false_to_not.(set_informs_all_active_routes?) <>
"inform all active routes at home stop"
test test_description, %{widget: widget} do
active_period =
if(unquote(set_active?),
do: [{~U[2021-01-01T00:00:00Z], ~U[2021-01-01T22:00:00Z]}],
else: [{~U[2021-01-02T00:00:00Z], ~U[2021-01-02T22:00:00Z]}]
)
effect = if(unquote(set_high_impact_effect?), do: :stop_closure, else: :snow_route)
informed_entities =
if(unquote(set_informs_all_active_routes?),
do: [ie(route: "a"), ie(route: "c")],
else: [ie(route: "a"), ie(route: "b")]
)
widget =
widget
|> put_active_period(active_period)
|> put_effect(effect)
|> put_informed_entities(informed_entities)
assert unquote(expected_slot_names) == AlertWidget.slot_names(widget)
end
end
test "returns [:medium] for a non-full-screen alert on Bus E-Ink", %{widget: widget} do
widget =
widget
|> put_app_id(:bus_eink_v2)
|> put_home_stop(BusEink, "5")
|> put_active_period([{~U[2021-01-02T00:00:00Z], ~U[2021-01-02T22:00:00Z]}])
|> put_effect(:snow_route)
|> put_informed_entities([ie(route: "a"), ie(route: "b")])
assert [:medium] == AlertWidget.slot_names(widget)
end
end
describe "slot_names/1 for Green Line E-Ink app" do
setup @alert_widget_context_setup_group ++ [:setup_gl_eink_config]
defp setup_gl_eink_config(%{widget: widget}) do
widget =
widget
|> put_app_id(:gl_eink_v2)
|> put_home_stop(GlEink, "5")
%{widget: widget}
end
# active | high-impact | location :inside || full-screen?
# n | n | n || n
# y | n | n || n
# n | y | n || n
# y | y | n || n
# n | n | y || n
# y | n | y || n
# n | y | y || n
# y | y | y || y
@gl_slot_names_cases %{
{false, false, false} => [:medium],
{true, false, false} => [:medium],
{false, true, false} => [:medium],
{true, true, false} => [:medium],
{false, false, true} => [:medium],
{true, false, true} => [:medium],
{false, true, true} => [:medium],
{true, true, true} => [:full_body_top_screen]
}
for {{set_active?, set_high_impact_effect?, set_location_inside?}, expected_slot_names} <-
@gl_slot_names_cases do
false_to_not = fn
true -> " "
false -> " not "
end
test_description =
"returns #{inspect(expected_slot_names)} if alert is " <>
false_to_not.(set_active?) <>
"active and does" <>
false_to_not.(set_high_impact_effect?) <>
"have a high-impact effect and does" <>
false_to_not.(set_location_inside?) <>
"contain home stop in informed region"
test test_description, %{widget: widget} do
active_period =
if(unquote(set_active?),
do: [{~U[2021-01-01T00:00:00Z], ~U[2021-01-01T22:00:00Z]}],
else: [{~U[2021-01-02T00:00:00Z], ~U[2021-01-02T22:00:00Z]}]
)
effect =
if(unquote(set_high_impact_effect?), do: :station_closure, else: :elevator_closure)
informed_entities =
if(unquote(set_location_inside?),
do: [ie(stop: "4"), ie(stop: "5"), ie(stop: "6")],
else: [ie(stop: "5"), ie(stop: "6")]
)
widget =
widget
|> put_active_period(active_period)
|> put_effect(effect)
|> put_informed_entities(informed_entities)
assert unquote(expected_slot_names) == AlertWidget.slot_names(widget)
end
end
end
describe "active?/2" do
test "simply calls Alert.happening_now?/1 on the widget's alert", %{widget: widget} do
yes_happening_now = fn %Alert{id: "123"}, _ -> true end
not_happening_now = fn %Alert{id: "123"}, _ -> false end
assert AlertWidget.active?(widget, yes_happening_now)
assert not AlertWidget.active?(widget, not_happening_now)
end
end
describe "seconds_from_onset/2" do
test "returns difference in seconds between now and first active period's start time", %{
widget: widget
} do
start = ~U[2021-01-01T00:00:00Z]
now = ~U[2021-01-01T01:00:00Z]
widget =
widget
|> put_active_period([{start, nil}])
|> put_now(now)
expected_seconds_elapsed = 3600
assert expected_seconds_elapsed == AlertWidget.seconds_from_onset(widget)
end
test "returns a negative value if current time is before first active period", %{
widget: widget
} do
start = ~U[2021-01-01T01:00:00Z]
now = ~U[2021-01-01T00:00:00Z]
widget =
widget
|> put_active_period([{start, nil}])
|> put_now(now)
expected_seconds_elapsed = -3600
assert expected_seconds_elapsed == AlertWidget.seconds_from_onset(widget)
end
end
describe "seconds_to_next_active_period/2" do
test "returns seconds to start of first active period after current time, if it exists", %{
widget: widget
} do
now = ~U[2021-01-02T01:00:00Z]
next_start = ~U[2021-01-03T00:00:01Z]
widget =
widget
|> put_active_period([
{~U[2021-01-01T00:00:00Z], ~U[2021-01-01T23:00:00Z]},
{~U[2021-01-02T00:00:00Z], ~U[2021-01-02T23:00:00Z]},
{next_start, ~U[2021-01-03T23:00:00Z]}
])
|> put_now(now)
expected_seconds_to_next_active_period = 23 * 60 * 60 + 1
assert expected_seconds_to_next_active_period ==
AlertWidget.seconds_to_next_active_period(widget)
end
test "returns :infinity if no active period starting after current time exists", %{
widget: widget
} do
widget = put_now(widget, ~U[2021-01-02T01:00:00Z])
# no active period at all
assert :infinity == AlertWidget.seconds_to_next_active_period(widget)
# no start date after current time
widget =
put_active_period(widget, [
{nil, ~U[2021-01-01T23:00:00Z]},
{~U[2021-01-02T00:00:00Z], ~U[2021-01-02T23:00:00Z]}
])
assert :infinity == AlertWidget.seconds_to_next_active_period(widget)
end
end
describe "home_stop_id/1" do
test "returns stop ID from config for screen types that use only one stop ID", %{
widget: widget
} do
widget = put_home_stop(widget, BusShelter, "123")
assert "123" == AlertWidget.home_stop_id(widget)
end
test "fails for other screen types", %{widget: widget} do
widget = put_home_stop(widget, Solari, "123")
assert_raise FunctionClauseError, fn -> AlertWidget.home_stop_id(widget) end
end
test "fails when config is not correct shape", %{widget: widget} do
assert_raise FunctionClauseError, fn -> AlertWidget.home_stop_id(widget) end
end
end
describe "informed_entities/1" do
test "returns informed entities list from the widget's alert", %{widget: widget} do
ies = [ie(stop: "123"), ie(stop: "1129", route: "39")]
widget = put_informed_entities(widget, ies)
assert ies == AlertWidget.informed_entities(widget)
end
end
describe "upstream_stop_id_set/1" do
setup @alert_widget_context_setup_group
test "collects all stops upstream of the home stop into a set", %{widget: widget} do
expected_upstream_stops = MapSet.new(~w[0 1 2 3 4] ++ ~w[10 20 30 4] ++ ~w[200 40])
assert MapSet.equal?(expected_upstream_stops, AlertWidget.upstream_stop_id_set(widget))
end
end
describe "downstream_stop_id_set/1" do
setup @alert_widget_context_setup_group
test "collects all stops downstream of the home stop into a set", %{widget: widget} do
expected_downstream_stops = MapSet.new(~w[6 7 8 9] ++ ~w[7] ++ ~w[6 90])
assert MapSet.equal?(expected_downstream_stops, AlertWidget.downstream_stop_id_set(widget))
end
end
describe "location/1" do
setup @alert_widget_context_setup_group
test "handles empty informed entities", %{widget: widget} do
widget = put_informed_entities(widget, [])
assert :elsewhere == AlertWidget.location(widget)
end
test "handles all-nil informed entities", %{widget: widget} do
widget = put_informed_entities(widget, [ie()])
assert :elsewhere == AlertWidget.location(widget)
end
test "returns :elsewhere if an alert's informed entities only apply to routes not serving this stop",
%{widget: widget} do
widget = put_informed_entities(widget, [ie(route: "x"), ie(route: "y")])
assert :elsewhere == AlertWidget.location(widget)
end
test "returns :inside if any of an alert's informed entities is %{route_type: <route type of this screen>}",
%{widget: widget} do
widget =
put_informed_entities(widget, [
ie(stop: "0"),
ie(route_type: RouteType.to_id(:bus)),
ie(route: "x"),
ie(stop: "20", route: "a"),
ie()
])
assert :inside == AlertWidget.location(widget)
end
test "ignores route type if paired with any other specifier", %{widget: widget} do
widget =
put_informed_entities(widget, [
ie(stop: "1", route_type: RouteType.to_id(:bus)),
ie(route: "x", route_type: RouteType.to_id(:bus)),
ie(stop: "1", route: "x", route_type: RouteType.to_id(:bus))
])
assert :upstream == AlertWidget.location(widget)
end
test "ignores route type if it doesn't match this screen's route type", %{widget: widget} do
widget = put_informed_entities(widget, [ie(route_type: RouteType.to_id(:light_rail))])
assert :elsewhere == AlertWidget.location(widget)
end
test "returns :inside if any of an alert's informed entities is %{route: <route that serves this stop>}",
%{widget: widget} do
widget =
put_informed_entities(widget, [
ie(stop: "0"),
ie(route: "b"),
ie(stop: "20", route: "a")
])
assert :inside == AlertWidget.location(widget)
end
test "treats active and inactive (not running on the current day) routes the same", %{
widget: widget
} do
widget = put_informed_entities(widget, [ie(route: "a")])
assert :inside == AlertWidget.location(widget)
widget = put_informed_entities(widget, [ie(route: "b")])
assert :inside == AlertWidget.location(widget)
end
test "ignores route if it doesn't serve this stop", %{widget: widget} do
widget =
put_informed_entities(widget, [
ie(stop: "1"),
ie(route: "x")
])
assert :upstream == AlertWidget.location(widget)
end
test "returns :upstream for an alert that only affects upstream stops", %{widget: widget} do
widget =
put_informed_entities(widget, [
ie(stop: "0"),
ie(stop: "20", route: "a")
])
assert :upstream == AlertWidget.location(widget)
end
test "returns :boundary_upstream for an alert that affects upstream stops and this stop", %{
widget: widget
} do
widget =
put_informed_entities(widget, [
ie(stop: "0"),
ie(stop: "5"),
ie(stop: "20", route: "a")
])
assert :boundary_upstream == AlertWidget.location(widget)
end
test "returns :inside for an alert that only affects this stop", %{widget: widget} do
widget =
put_informed_entities(widget, [
ie(stop: "5"),
ie(stop: "5", route_type: RouteType.to_id(:bus)),
ie(stop: "5", route: "a")
])
assert :inside == AlertWidget.location(widget)
end
test "returns :inside for an alert that affects upstream stops, downstream stops, and this stop",
%{widget: widget} do
widget =
put_informed_entities(widget, [
ie(stop: "4"),
ie(stop: "5"),
ie(stop: "6")
])
assert :inside == AlertWidget.location(widget)
end
test "returns :boundary_downstream for an alert that affects downstream stops and this stop",
%{
widget: widget
} do
widget =
put_informed_entities(widget, [
ie(stop: "6"),
ie(stop: "5"),
ie(stop: "90", route: "a")
])
assert :boundary_downstream == AlertWidget.location(widget)
end
test "returns :downstream for an alert that only affects downstream stops", %{widget: widget} do
widget =
put_informed_entities(widget, [
ie(stop: "6"),
ie(stop: "90", route: "a")
])
assert :downstream == AlertWidget.location(widget)
end
test "returns :downstream for an alert that affects upstream and downstream stops, but not this stop",
%{widget: widget} do
widget =
put_informed_entities(widget, [
ie(stop: "4"),
ie(stop: "6")
])
assert :downstream == AlertWidget.location(widget)
end
end
describe "effect/1" do
test "returns effect from the widget's alert", %{widget: widget} do
effect = :detour
widget = put_effect(widget, effect)
assert effect == AlertWidget.effect(widget)
end
end
describe "tiebreaker_primary_timeframe/1" do
setup @valid_alert_setup_group
test "returns 1 for alerts that are active and started less than 4 weeks ago", %{
widget: widget
} do
widget = put_active_period(widget, [{~U[2021-01-01T00:00:00Z], nil}])
assert 1 == AlertWidget.tiebreaker_primary_timeframe(widget)
end
test "returns 2 for alerts that are active and started 4-12 weeks ago", %{widget: widget} do
widget =
put_active_period(widget, [
{~U[2020-11-01T00:00:00Z], ~U[2020-11-01T20:00:00Z]},
{~U[2021-01-01T00:00:00Z], nil}
])
assert 2 == AlertWidget.tiebreaker_primary_timeframe(widget)
end
test "returns 2 for alerts that are inactive and next active period starts in less than 36 hours",
%{widget: widget} do
widget =
put_active_period(widget, [
{~U[2021-01-02T00:00:00Z], nil}
])
assert 2 == AlertWidget.tiebreaker_primary_timeframe(widget)
end
test "returns 3 for alerts that are inactive and next active period starts in 36 hours or more",
%{widget: widget} do
widget = put_active_period(widget, [{~U[2021-01-10T00:00:00Z], nil}])
assert 3 == AlertWidget.tiebreaker_primary_timeframe(widget)
end
test "returns 4 for alerts that are active and started 12-24 weeks ago", %{widget: widget} do
widget =
put_active_period(widget, [
{~U[2020-10-01T00:00:00Z], ~U[2020-10-01T20:00:00Z]},
{~U[2021-01-01T00:00:00Z], nil}
])
assert 4 == AlertWidget.tiebreaker_primary_timeframe(widget)
end
test "returns :no_render for active alerts older than 24 weeks", %{widget: widget} do
widget =
put_active_period(widget, [
{~U[2020-05-01T00:00:00Z], ~U[2020-05-01T20:00:00Z]},
{~U[2021-01-01T00:00:00Z], nil}
])
assert :no_render == AlertWidget.tiebreaker_primary_timeframe(widget)
end
end
describe "tiebreaker_location" do
setup @valid_alert_setup_group
test "returns 1 if home stop is inside informed region", %{widget: widget} do
widget = put_informed_entities(widget, [ie(stop: "5")])
assert 1 == AlertWidget.tiebreaker_location(widget)
end
test "returns 2 if home stop is at the boundary of informed region", %{widget: widget} do
upstream_boundary_widget = put_informed_entities(widget, [ie(stop: "5"), ie(stop: "4")])
assert 2 == AlertWidget.tiebreaker_location(upstream_boundary_widget)
downstream_boundary_widget = put_informed_entities(widget, [ie(stop: "5"), ie(stop: "6")])
assert 2 == AlertWidget.tiebreaker_location(downstream_boundary_widget)
end
test "returns 3 if informed region is downstream of home stop", %{widget: widget} do
widget = put_informed_entities(widget, [ie(stop: "6")])
assert 3 == AlertWidget.tiebreaker_location(widget)
end
test "returns :no_render if informed region is upstream of home stop or elsewhere", %{
widget: widget
} do
upstream_widget = put_informed_entities(widget, [ie(stop: "4")])
assert :no_render == AlertWidget.tiebreaker_location(upstream_widget)
elsewhere_widget = put_informed_entities(widget, [ie(route: "doesnt_serve_this_stop")])
assert :no_render == AlertWidget.tiebreaker_location(elsewhere_widget)
end
end
describe "tiebreaker_secondary_timeframe/1" do
setup @valid_alert_setup_group
test "returns 1 for alerts that are inactive and next active period starts in less than 36 hours",
%{widget: widget} do
widget =
put_active_period(widget, [
{~U[2021-01-02T00:00:00Z], nil}
])
assert 1 == AlertWidget.tiebreaker_secondary_timeframe(widget)
end
test "returns 2 for alerts that are active and started 4-12 weeks ago", %{widget: widget} do
widget =
put_active_period(widget, [
{~U[2020-11-01T00:00:00Z], ~U[2020-11-01T20:00:00Z]},
{~U[2021-01-01T00:00:00Z], nil}
])
assert 2 == AlertWidget.tiebreaker_secondary_timeframe(widget)
end
test "returns 3 in all other cases", %{widget: widget} do
active_now_widget = put_active_period(widget, [{~U[2021-01-01T00:00:00Z], nil}])
assert 3 == AlertWidget.tiebreaker_secondary_timeframe(active_now_widget)
inactive_for_a_while_widget = put_active_period(widget, [{~U[2021-01-10T00:00:00Z], nil}])
assert 3 == AlertWidget.tiebreaker_secondary_timeframe(inactive_for_a_while_widget)
end
end
describe "tiebreaker_effect" do
setup @valid_alert_setup_group
test "returns priority value corresponding to effect, if supported", %{widget: widget} do
# base widget has stop_closure effect
assert is_integer(AlertWidget.tiebreaker_effect(widget))
shuttle_widget = put_effect(widget, :shuttle)
assert is_integer(AlertWidget.tiebreaker_effect(shuttle_widget))
end
test "returns :no_render for unsupported alert effects", %{widget: widget} do
widget = put_effect(widget, :service_change)
assert :no_render == AlertWidget.tiebreaker_effect(widget)
end
end
describe "audio_serialize/1" do
test "returns empty string", %{widget: widget} do
assert %{} == AlertWidget.audio_serialize(widget)
end
end
describe "audio_sort_key/1" do
test "returns 0", %{widget: widget} do
assert 0 == AlertWidget.audio_sort_key(widget)
end
end
describe "audio_valid_candidate?/1" do
test "returns false", %{widget: widget} do
refute AlertWidget.audio_valid_candidate?(widget)
end
end
describe "audio_view/1" do
test "returns AlertView", %{widget: widget} do
assert ScreensWeb.V2.Audio.AlertView == AlertWidget.audio_view(widget)
end
end
end
| 31.7254 | 122 | 0.607184 |
73fad2eed2910519bb988184889bfcb2b452cd6f | 270 | ex | Elixir | lib/socializer/guardian.ex | yosufali/socializer | abbd75cf1cedbf1a398fc72b64f4129e39d3e373 | [
"MIT"
] | null | null | null | lib/socializer/guardian.ex | yosufali/socializer | abbd75cf1cedbf1a398fc72b64f4129e39d3e373 | [
"MIT"
] | null | null | null | lib/socializer/guardian.ex | yosufali/socializer | abbd75cf1cedbf1a398fc72b64f4129e39d3e373 | [
"MIT"
] | null | null | null | defmodule Socializer.Guardian do
use Guardian, otp_app: :socializer
alias Socializer.User
def subject_for_token(resource, _claims) do
{:ok, to_string(resource.id)}
end
def resource_from_claims(claims) do
{:ok, User.find().(claims["sub"])}
end
end
| 19.285714 | 45 | 0.714815 |
73fae79d3dfe4afbe69492ad42f02e5747e7ef08 | 340 | ex | Elixir | lib/strava/model/time_stream.ex | rkorzeniec/strava | aa99040355f72ff2766c080d5a919c66a53ac44b | [
"MIT"
] | 39 | 2016-04-09T21:50:34.000Z | 2022-03-04T09:16:25.000Z | lib/strava/model/time_stream.ex | rkorzeniec/strava | aa99040355f72ff2766c080d5a919c66a53ac44b | [
"MIT"
] | 24 | 2016-05-29T15:49:07.000Z | 2022-01-17T11:57:05.000Z | lib/strava/model/time_stream.ex | rkorzeniec/strava | aa99040355f72ff2766c080d5a919c66a53ac44b | [
"MIT"
] | 21 | 2016-02-02T01:19:23.000Z | 2022-02-06T23:29:32.000Z | defmodule Strava.TimeStream do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:original_size,
:resolution,
:series_type,
:data
]
@type t :: %__MODULE__{
original_size: integer(),
resolution: String.t(),
series_type: String.t(),
data: [integer()]
}
end
| 16.190476 | 35 | 0.547059 |
73fb0a1c82ca688e96b513b74dcc575f2a947099 | 503 | ex | Elixir | lib/mix/tasks/phoenix.server.ex | knewter/phoenix | 191909d97511ab99f9bbcc776cba9988c8b07596 | [
"MIT"
] | null | null | null | lib/mix/tasks/phoenix.server.ex | knewter/phoenix | 191909d97511ab99f9bbcc776cba9988c8b07596 | [
"MIT"
] | null | null | null | lib/mix/tasks/phoenix.server.ex | knewter/phoenix | 191909d97511ab99f9bbcc776cba9988c8b07596 | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Phoenix.Server do
use Mix.Task
@shortdoc "Starts applications and their servers"
@recursive true
@moduledoc """
Starts the application by configuring all endpoints servers to run.
"""
def run(_args) do
Application.put_env :phoenix, :serve_endpoints, true
Mix.Task.run "app.start", []
no_halt
end
defp no_halt do
unless iex_running?, do: :timer.sleep(:infinity)
end
defp iex_running? do
Code.ensure_loaded?(IEx) && IEx.started?
end
end
| 20.958333 | 69 | 0.701789 |
73fb1d36c861e07f2846b3afafd5a0642c09910b | 1,975 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/xpn_host_list_warning_data.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/xpn_host_list_warning_data.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/compute/lib/google_api/compute/v1/model/xpn_host_list_warning_data.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.XpnHostListWarningData do
@moduledoc """
## Attributes
* `key` (*type:* `String.t`, *default:* `nil`) - [Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).
* `value` (*type:* `String.t`, *default:* `nil`) - [Output Only] A warning data value corresponding to the key.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:key => String.t(),
:value => String.t()
}
field(:key)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.XpnHostListWarningData do
def decode(value, options) do
GoogleApi.Compute.V1.Model.XpnHostListWarningData.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.XpnHostListWarningData do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.5 | 527 | 0.738228 |
73fbe58e9dc698d456f1dafbbd3865b02e65e2d6 | 1,822 | ex | Elixir | clients/machine_learning/lib/google_api/machine_learning/v1/model/google_cloud_ml_v1__study_config_parameter_spec__discrete_value_spec.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/machine_learning/lib/google_api/machine_learning/v1/model/google_cloud_ml_v1__study_config_parameter_spec__discrete_value_spec.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/machine_learning/lib/google_api/machine_learning/v1/model/google_cloud_ml_v1__study_config_parameter_spec__discrete_value_spec.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1_StudyConfigParameterSpec_DiscreteValueSpec do
@moduledoc """
## Attributes
* `values` (*type:* `list(float())`, *default:* `nil`) - Must be specified if type is `DISCRETE`. A list of feasible points. The list should be in strictly increasing order. For instance, this parameter might have possible settings of 1.5, 2.5, and 4.0. This list should not contain more than 1,000 values.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:values => list(float())
}
field(:values, type: :list)
end
defimpl Poison.Decoder,
for:
GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1_StudyConfigParameterSpec_DiscreteValueSpec do
def decode(value, options) do
GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1_StudyConfigParameterSpec_DiscreteValueSpec.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.MachineLearning.V1.Model.GoogleCloudMlV1_StudyConfigParameterSpec_DiscreteValueSpec do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.740741 | 310 | 0.75247 |
73fc0c85201cab46d2e3d36819f34b2ee07232a2 | 583 | exs | Elixir | mix.exs | Stratus3D/ex_note | 21a54c1a39cfe8395eadec7d2249c345c493f4ea | [
"MIT"
] | null | null | null | mix.exs | Stratus3D/ex_note | 21a54c1a39cfe8395eadec7d2249c345c493f4ea | [
"MIT"
] | null | null | null | mix.exs | Stratus3D/ex_note | 21a54c1a39cfe8395eadec7d2249c345c493f4ea | [
"MIT"
] | null | null | null | defmodule ExNote.Mixfile do
use Mix.Project
def project do
[app: :ex_note,
version: "0.0.1",
elixir: "~> 1.0.5",
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[applications: []]
end
# Dependencies can be hex.pm packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1"}
#
# Type `mix help deps` for more examples and options
defp deps do
[]
end
end
| 18.806452 | 75 | 0.603774 |
73fc31c20df3150d0d9da86c8b038361a67f4b2c | 826 | ex | Elixir | test/support/fixtures/events_fixture.ex | davemenninger/ohio_elixir | 9472b71fa906e30a2a5fdc013256a5e80caedc2f | [
"MIT"
] | 7 | 2021-01-22T00:20:04.000Z | 2022-03-30T22:07:32.000Z | test/support/fixtures/events_fixture.ex | davemenninger/ohio_elixir | 9472b71fa906e30a2a5fdc013256a5e80caedc2f | [
"MIT"
] | 11 | 2021-10-05T03:59:28.000Z | 2022-03-20T21:54:44.000Z | test/support/fixtures/events_fixture.ex | davemenninger/ohio_elixir | 9472b71fa906e30a2a5fdc013256a5e80caedc2f | [
"MIT"
] | 3 | 2021-06-10T02:48:54.000Z | 2021-10-09T03:43:06.000Z | defmodule OhioElixir.EventsFixture do
@moduledoc """
This module defines test helpers for creating
entities via the `OhioElixir.Events` context.
"""
alias OhioElixir.Events
alias OhioElixir.Events.Meeting
alias OhioElixir.Repo
def speaker_fixture(attrs \\ %{}) do
valid_attrs = %{
name: "some name",
social_link: "test_url"
}
{:ok, speaker} =
attrs
|> Enum.into(valid_attrs)
|> Events.create_speaker()
speaker
end
def meeting_fixture(attrs \\ %{}) do
valid_attrs = %{date: "2010-04-17T14:00:00Z", title: "some title", event_brite_id: 12_345}
attrs = Enum.into(attrs, valid_attrs)
{:ok, meeting} =
%Meeting{}
|> Meeting.changeset(attrs)
|> Meeting.change_active(attrs[:active])
|> Repo.insert()
meeting
end
end
| 21.179487 | 94 | 0.638015 |
73fc8aeb2572dc1c1fbecced088431153b092065 | 7,512 | ex | Elixir | lib/credo/code/strings.ex | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | 1 | 2021-12-01T13:37:43.000Z | 2021-12-01T13:37:43.000Z | lib/credo/code/strings.ex | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | null | null | null | lib/credo/code/strings.ex | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | 1 | 2020-09-25T11:48:49.000Z | 2020-09-25T11:48:49.000Z | defmodule Credo.Code.Strings do
@moduledoc """
This module lets you strip strings from source code.
"""
alias Credo.Code.InterpolationHelper
alias Credo.SourceFile
@string_sigil_delimiters [
{"(", ")"},
{"[", "]"},
{"{", "}"},
{"<", ">"},
{"|", "|"},
{"\"", "\""},
{"'", "'"}
]
@heredocs__sigil_delimiters [
{"'''", "'''"},
{~s("""), ~s(""")}
]
@all_string_sigils Enum.flat_map(@string_sigil_delimiters, fn {b, e} ->
[{"~s#{b}", e}, {"~S#{b}", e}]
end)
@all_heredocs_sigils Enum.flat_map(@heredocs__sigil_delimiters, fn {b, e} ->
[{"~s#{b}", e}, {"~S#{b}", e}]
end)
# TODO v1.0: this should not remove heredocs, since
# there is a separate module for that
@doc """
Replaces all characters inside string literals and string sigils
with the equivalent amount of white-space.
"""
def replace_with_spaces(
source_file,
replacement \\ " ",
interpolation_replacement \\ " ",
filename \\ "nofilename"
) do
{source, filename} = SourceFile.source_and_filename(source_file, filename)
source
|> InterpolationHelper.replace_interpolations(interpolation_replacement, filename)
|> parse_code("", replacement)
end
defp parse_code("", acc, _replacement) do
acc
end
for {sigil_start, sigil_end} <- @all_heredocs_sigils do
defp parse_code(<<unquote(sigil_start)::utf8, t::binary>>, acc, replacement) do
parse_heredoc(
t,
acc <> unquote(sigil_start),
replacement,
unquote(sigil_end)
)
end
end
defp parse_code(<<"\"\"\""::utf8, t::binary>>, acc, replacement) do
parse_heredoc(t, acc <> ~s("""), replacement, ~s("""))
end
defp parse_code(<<"\'\'\'"::utf8, t::binary>>, acc, replacement) do
parse_heredoc(t, acc <> ~s('''), replacement, ~s('''))
end
for {sigil_start, sigil_end} <- @all_string_sigils do
defp parse_code(<<unquote(sigil_start)::utf8, t::binary>>, acc, replacement) do
parse_string_sigil(
t,
acc <> unquote(sigil_start),
unquote(sigil_end),
replacement
)
end
end
defp parse_code(<<"\\\""::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> "\\\"", replacement)
end
defp parse_code(<<"\\\'"::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> "\\\'", replacement)
end
defp parse_code(<<"?'"::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> "?'", replacement)
end
defp parse_code(<<"'"::utf8, t::binary>>, acc, replacement) do
parse_charlist(t, acc <> "'", replacement)
end
defp parse_code(<<"?\""::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> "?\"", replacement)
end
defp parse_code(<<"#"::utf8, t::binary>>, acc, replacement) do
parse_comment(t, acc <> "#", replacement)
end
defp parse_code(<<"\""::utf8, t::binary>>, acc, replacement) do
parse_string_literal(t, acc <> "\"", replacement)
end
defp parse_code(<<h::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> <<h::utf8>>, replacement)
end
defp parse_code(str, acc, replacement) when is_binary(str) do
{h, t} = String.next_codepoint(str)
parse_code(t, acc <> h, replacement)
end
#
# Charlists
#
defp parse_charlist("", acc, _replacement) do
acc
end
defp parse_charlist(<<"\\\\"::utf8, t::binary>>, acc, replacement) do
parse_charlist(t, acc <> "\\\\", replacement)
end
defp parse_charlist(<<"\\\'"::utf8, t::binary>>, acc, replacement) do
parse_charlist(t, acc <> "\\\'", replacement)
end
defp parse_charlist(<<"\'"::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> "'", replacement)
end
defp parse_charlist(<<"\n"::utf8, t::binary>>, acc, replacement) do
parse_charlist(t, acc <> "\n", replacement)
end
defp parse_charlist(str, acc, replacement) when is_binary(str) do
{h, t} = String.next_codepoint(str)
parse_comment(t, acc <> h, replacement)
end
#
# Comments
#
defp parse_comment("", acc, _replacement) do
acc
end
defp parse_comment(<<"\n"::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> "\n", replacement)
end
defp parse_comment(str, acc, replacement) when is_binary(str) do
{h, t} = String.next_codepoint(str)
parse_comment(t, acc <> h, replacement)
end
#
# String Literals
#
defp parse_string_literal("", acc, _replacement) do
acc
end
defp parse_string_literal(<<"\\\\"::utf8, t::binary>>, acc, replacement) do
parse_string_literal(t, acc, replacement)
end
defp parse_string_literal(<<"\\\""::utf8, t::binary>>, acc, replacement) do
parse_string_literal(t, acc, replacement)
end
defp parse_string_literal(<<"\""::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> ~s("), replacement)
end
defp parse_string_literal(<<"\n"::utf8, t::binary>>, acc, replacement) do
parse_string_literal(t, acc <> "\n", replacement)
end
defp parse_string_literal(<<_::utf8, t::binary>>, acc, replacement) do
parse_string_literal(t, acc <> replacement, replacement)
end
#
# Sigils
#
for {_sigil_start, sigil_end} <- @all_string_sigils do
defp parse_string_sigil("", acc, unquote(sigil_end), _replacement) do
acc
end
defp parse_string_sigil(
<<"\\\\"::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_string_sigil(t, acc, unquote(sigil_end), replacement)
end
defp parse_string_sigil(
<<"\\\""::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_string_sigil(t, acc, unquote(sigil_end), replacement)
end
defp parse_string_sigil(
<<unquote(sigil_end)::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_code(t, acc <> unquote(sigil_end), replacement)
end
defp parse_string_sigil(
<<"\n"::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_string_sigil(t, acc <> "\n", unquote(sigil_end), replacement)
end
defp parse_string_sigil(
<<_::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_string_sigil(t, acc <> replacement, unquote(sigil_end), replacement)
end
end
#
# Heredocs
#
defp parse_heredoc(<<"\"\"\""::utf8, t::binary>>, acc, replacement, "\"\"\"") do
parse_code(t, acc <> ~s("""), replacement)
end
defp parse_heredoc(<<"\'\'\'"::utf8, t::binary>>, acc, replacement, "\'\'\'") do
parse_code(t, acc <> ~s('''), replacement)
end
defp parse_heredoc("", acc, _replacement, _delimiter) do
acc
end
defp parse_heredoc(<<"\\\\"::utf8, t::binary>>, acc, replacement, delimiter) do
parse_heredoc(t, acc, replacement, delimiter)
end
defp parse_heredoc(<<"\\\""::utf8, t::binary>>, acc, replacement, delimiter) do
parse_heredoc(t, acc, replacement, delimiter)
end
defp parse_heredoc(<<"\n"::utf8, t::binary>>, acc, replacement, delimiter) do
parse_heredoc(t, acc <> "\n", replacement, delimiter)
end
defp parse_heredoc(<<_::utf8, t::binary>>, acc, replacement, delimiter) do
parse_heredoc(t, acc <> replacement, replacement, delimiter)
end
end
| 26.450704 | 86 | 0.594249 |
73fca08bc7907fcf9ff624922d9678c19abaf91a | 1,419 | exs | Elixir | mix.exs | kommitters/seedex | 5c9a4ce70288488724a6059930b1b31f0f9dd5ed | [
"MIT"
] | null | null | null | mix.exs | kommitters/seedex | 5c9a4ce70288488724a6059930b1b31f0f9dd5ed | [
"MIT"
] | null | null | null | mix.exs | kommitters/seedex | 5c9a4ce70288488724a6059930b1b31f0f9dd5ed | [
"MIT"
] | null | null | null | defmodule Seedex.Mixfile do
use Mix.Project
@version "0.3.1"
def project do
[
app: :seedex,
version: @version,
elixir: "~> 1.13",
description: "Seed data generation for Ecto",
source_url: "https://github.com/danhper/seedex",
elixirc_paths: elixirc_paths(Mix.env()),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
package: package(),
deps: deps(),
docs: [source_ref: "#{@version}", extras: ["README.md"], main: "readme"]
]
end
def application do
[
extra_applications: [:ecto],
applications: applications(Mix.env()),
description: 'Seed data generation for Ecto'
]
end
defp applications(:test), do: applications(:all) ++ [:ecto, :postgrex]
defp applications(_all), do: [:logger]
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_all), do: ["lib"]
defp deps do
[
{:ecto, "~> 3.7.1"},
{:ecto_sql, "~> 3.7.2"},
{:postgrex, "~> 0.16.1", only: [:test]},
{:earmark, "~> 1.4.20", only: :docs},
{:ex_doc, "~> 0.14", only: :docs}
]
end
defp package do
[
maintainers: ["Daniel Perez"],
files: ["lib", "mix.exs", "README.md"],
licenses: ["MIT"],
links: %{
"GitHub" => "https://github.com/danhper/seedex",
"Docs" => "http://hexdocs.pm/seedex/"
}
]
end
end
| 24.465517 | 78 | 0.552502 |
73fcaa0bfcf7ae2c6fa4217628b430d8a1e1f301 | 662 | exs | Elixir | weather/mix.exs | benjohns1/elixer-app | 6e866ec084c5e75442c0b70f66e35f61b5b74d34 | [
"MIT"
] | null | null | null | weather/mix.exs | benjohns1/elixer-app | 6e866ec084c5e75442c0b70f66e35f61b5b74d34 | [
"MIT"
] | null | null | null | weather/mix.exs | benjohns1/elixer-app | 6e866ec084c5e75442c0b70f66e35f61b5b74d34 | [
"MIT"
] | null | null | null | defmodule Weather.Mixfile do
use Mix.Project
def project do
[
app: :weather,
escript: [ main_module: Weather.CLI ],
version: "0.1.0",
name: "Weather",
elixir: "~> 1.5",
start_permanent: Mix.env == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [
:logger,
:httpoison
]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:httpoison, "~> 1.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"},
]
end
end
| 19.470588 | 88 | 0.555891 |
73fcaea98fbd998e39d19ef19bb8748f6ec5499f | 314 | ex | Elixir | test/support/test_helpers.ex | lenra-io/ex_component_schema | a051ba94057cdd3218c4625e24f56a834a586aa6 | [
"MIT"
] | 2 | 2022-03-18T08:52:29.000Z | 2022-03-18T08:52:33.000Z | test/support/test_helpers.ex | lenra-io/ex_component_schema | a051ba94057cdd3218c4625e24f56a834a586aa6 | [
"MIT"
] | 8 | 2021-09-15T11:52:45.000Z | 2022-01-10T13:13:53.000Z | test/support/test_helpers.ex | lenra-io/ex_component_schema | a051ba94057cdd3218c4625e24f56a834a586aa6 | [
"MIT"
] | null | null | null | defmodule ExComponentSchema.Test.Support.TestHelpers do
@spec load_schema_test(name :: String.t(), schema_tests_path :: String.t()) :: map | no_return
def load_schema_test(name, schema_tests_path) do
schema_tests_path
|> Path.join(name <> ".json")
|> File.read!()
|> Poison.decode!()
end
end
| 31.4 | 96 | 0.694268 |
73fcb3f2ce83b6d2e390b172ea05d785ef2f15d0 | 184 | exs | Elixir | test/fixtures/has_hex_dep/mix.exs | sudix/hex | f739a57d8829ea0b0f7759c164dc9149c3340e49 | [
"Apache-2.0"
] | 1 | 2019-04-29T12:35:49.000Z | 2019-04-29T12:35:49.000Z | test/fixtures/has_hex_dep/mix.exs | starbelly/hex | d84d2558812c454fbc9c26eea3b25e7648d345f1 | [
"Apache-2.0"
] | 1 | 2021-06-25T15:19:59.000Z | 2021-06-25T15:19:59.000Z | test/fixtures/has_hex_dep/mix.exs | sudix/hex | f739a57d8829ea0b0f7759c164dc9149c3340e49 | [
"Apache-2.0"
] | null | null | null | defmodule HasHexDep.Fixture.MixProject do
use Mix.Project
def project do
[app: :has_hex_dep, version: "0.0.1", deps: deps()]
end
defp deps do
[{:ecto, []}]
end
end
| 15.333333 | 55 | 0.63587 |
73fcc13c6328e08ef14066421d95b054f1ac4c35 | 1,628 | ex | Elixir | apps/ewallet/lib/ewallet/web/v1/serializers/key_serializer.ex | enyan94/ewallet | e938e686319867d133b21cd0eb5496e213ae7620 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/lib/ewallet/web/v1/serializers/key_serializer.ex | enyan94/ewallet | e938e686319867d133b21cd0eb5496e213ae7620 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/lib/ewallet/web/v1/serializers/key_serializer.ex | enyan94/ewallet | e938e686319867d133b21cd0eb5496e213ae7620 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.Web.V1.KeySerializer do
@moduledoc """
Serializes key(s) into V1 JSON response format.
"""
alias Ecto.Association.NotLoaded
alias EWallet.Web.Paginator
alias EWallet.Web.V1.PaginatorSerializer
alias EWalletDB.Key
alias Utils.Helpers.DateFormatter
def serialize(%Paginator{} = paginator) do
PaginatorSerializer.serialize(paginator, &serialize/1)
end
def serialize(%Key{} = key) do
%{
object: "key",
id: key.id,
name: key.name,
access_key: key.access_key,
secret_key: key.secret_key,
account_id: nil,
enabled: key.enabled,
created_at: DateFormatter.to_iso8601(key.inserted_at),
updated_at: DateFormatter.to_iso8601(key.updated_at),
deleted_at: DateFormatter.to_iso8601(key.deleted_at),
# Attributes below are DEPRECATED and will be removed in the future:
# "expired" has been replaced by "enabled" in PR #535
expired: !key.enabled
}
end
def serialize(%NotLoaded{}), do: nil
def serialize(nil), do: nil
end
| 32.56 | 74 | 0.719287 |
73fce62171017db0694516f5a83ba1f0fe3304fa | 539 | ex | Elixir | web/router.ex | aortbals/chatter | a0ac7af7a8bca66d183b0f51320f269066171199 | [
"MIT"
] | null | null | null | web/router.ex | aortbals/chatter | a0ac7af7a8bca66d183b0f51320f269066171199 | [
"MIT"
] | null | null | null | web/router.ex | aortbals/chatter | a0ac7af7a8bca66d183b0f51320f269066171199 | [
"MIT"
] | null | null | null | defmodule Chatter.Router do
use Phoenix.Router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", Chatter do
pipe_through :browser # Use the default browser stack
get "/", PageController, :index
end
socket "/ws", Chatter do
channel "rooms:*", RoomChannel
end
# Other scopes may use custom stacks.
# scope "/api", Chatter do
# pipe_through :api
# end
end
| 17.966667 | 57 | 0.658627 |
73fcefc5b6df584a92e7959d656d34b3da6d32a4 | 86 | exs | Elixir | .formatter.exs | mneudert/lindel | 8b1dee8932edbe9bce80d39fce71c6b8d0ea7c03 | [
"WTFPL"
] | 4 | 2016-03-14T13:08:27.000Z | 2020-01-14T00:02:17.000Z | .formatter.exs | mneudert/plug_pagecache | d7d8a6e7fd61186b74e1c9151d955becf0d0e023 | [
"Apache-2.0"
] | null | null | null | .formatter.exs | mneudert/plug_pagecache | d7d8a6e7fd61186b74e1c9151d955becf0d0e023 | [
"Apache-2.0"
] | null | null | null | [
inputs: [
"{config,lib,test}/**/*.{ex,exs}",
"{.formatter,mix}.exs"
]
]
| 12.285714 | 38 | 0.453488 |
73fcfcb541c2d68cef59ce73b3963a646a918a65 | 1,366 | exs | Elixir | mix.exs | topaxi/google-api-elixir-client | 61348309772992b12e9d3069b61e215795866501 | [
"MIT"
] | 10 | 2017-02-08T08:42:37.000Z | 2018-04-27T01:04:34.000Z | mix.exs | topaxi/google-api-elixir-client | 61348309772992b12e9d3069b61e215795866501 | [
"MIT"
] | 7 | 2017-02-08T09:02:43.000Z | 2017-07-14T05:12:01.000Z | mix.exs | topaxi/google-api-elixir-client | 61348309772992b12e9d3069b61e215795866501 | [
"MIT"
] | 10 | 2017-02-08T08:42:40.000Z | 2020-07-20T09:56:21.000Z | defmodule Google.Mixfile do
use Mix.Project
def project do
[app: :google_api_client,
version: "1.1.0",
description: "Google API Client for Elixir",
package: package(),
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
elixirc_paths: elixirc_paths(Mix.env),
preferred_cli_env: [
vcr: :test, "vcr.delete": :test, "vcr.check": :test, "vcr.show": :test
],
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger, :httpoison]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:httpoison, "~> 0.8"},
{:poison, "~> 1.5 or ~> 2.0 or ~> 3.0"},
{:ex_doc, ">= 0.0.0", only: :dev},
{:exvcr, "~> 0.8", only: :test},
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp package do
[
maintainers: ["Sean Abrahams"],
licenses: ["BSD"],
links: %{"GitHub" => "https://github.com/seanabrahams/google-api-elixir-client"},
]
end
end
| 24.836364 | 87 | 0.579063 |
73fd268cc7f23f52dab6f7313e05ad38d21577a2 | 2,419 | ex | Elixir | lib/sftp_client/operations/open_file.ex | zoten/sftp_client | 814d06e36915f60bf98149d790051d03bee189f5 | [
"MIT"
] | 21 | 2019-08-06T01:39:15.000Z | 2021-04-15T20:08:04.000Z | lib/sftp_client/operations/open_file.ex | zoten/sftp_client | 814d06e36915f60bf98149d790051d03bee189f5 | [
"MIT"
] | 19 | 2019-07-10T17:10:34.000Z | 2021-04-18T11:16:58.000Z | lib/sftp_client/operations/open_file.ex | zoten/sftp_client | 814d06e36915f60bf98149d790051d03bee189f5 | [
"MIT"
] | 11 | 2019-07-04T14:56:37.000Z | 2021-04-26T19:09:22.000Z | defmodule SFTPClient.Operations.OpenFile do
@moduledoc """
A module that provides functions to open a file on an SFTP server in order to
read their contents.
"""
import SFTPClient.OperationUtil
alias SFTPClient.Conn
alias SFTPClient.Handle
alias SFTPClient.Operations.CloseHandle
@doc """
Opens a file on the server and returns a handle, which can be used for reading
or writing.
"""
@spec open_file(Conn.t(), Path.t(), [SFTPClient.access_mode()]) ::
{:ok, Handle.t()} | {:error, SFTPClient.error()}
def open_file(%Conn{} = conn, path, modes) do
conn.channel_pid
|> sftp_adapter().open(
to_charlist(path),
modes,
conn.config.operation_timeout
)
|> case do
{:ok, handle} ->
{:ok, %Handle{id: handle, conn: conn, path: to_string(path)}}
{:error, error} ->
{:error, handle_error(error)}
end
end
@doc """
Opens a file on the server and returns a handle, which can be used for reading
or writing, then runs the function and closes the handle when finished.
"""
@spec open_file(
Conn.t(),
Path.t(),
[SFTPClient.access_mode()],
(Handle.t() -> res)
) :: {:ok, res} | {:error, SFTPClient.error()}
when res: var
def open_file(%Conn{} = conn, path, modes, fun) do
with {:ok, handle} <- open_file(conn, path, modes) do
{:ok, run_callback(handle, fun)}
end
end
@doc """
Opens a file on the server and returns a handle, which can be used for reading
or writing. Raises when the operation fails.
"""
@spec open_file!(Conn.t(), Path.t(), [SFTPClient.access_mode()]) ::
Handle.t() | no_return
def open_file!(%Conn{} = conn, path, modes) do
conn |> open_file(path, modes) |> may_bang!()
end
@doc """
Opens a file on the server and returns a handle, which can be used for reading
or writing, then runs the function and closes the handle when finished. Raises
when the operation fails.
"""
@spec open_file!(
Conn.t(),
Path.t(),
[SFTPClient.access_mode()],
(Handle.t() -> res)
) :: res | no_return
when res: var
def open_file!(%Conn{} = conn, path, modes, fun) do
conn
|> open_file!(path, modes)
|> run_callback(fun)
end
defp run_callback(handle, fun) do
fun.(handle)
after
CloseHandle.close_handle(handle)
end
end
| 28.127907 | 80 | 0.617197 |
73fd4e28c13eb2a1e33fe8b9c70fd47a91437d1e | 1,894 | ex | Elixir | clients/content/lib/google_api/content/v21/model/custom_attribute.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v21/model/custom_attribute.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/content/lib/google_api/content/v21/model/custom_attribute.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V21.Model.CustomAttribute do
@moduledoc """
## Attributes
* `groupValues` (*type:* `list(GoogleApi.Content.V21.Model.CustomAttribute.t)`, *default:* `nil`) - Subattributes within this attribute group. Exactly one of value or groupValues must be provided.
* `name` (*type:* `String.t`, *default:* `nil`) - The name of the attribute. Underscores will be replaced by spaces upon insertion.
* `value` (*type:* `String.t`, *default:* `nil`) - The value of the attribute.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:groupValues => list(GoogleApi.Content.V21.Model.CustomAttribute.t()),
:name => String.t(),
:value => String.t()
}
field(:groupValues, as: GoogleApi.Content.V21.Model.CustomAttribute, type: :list)
field(:name)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V21.Model.CustomAttribute do
def decode(value, options) do
GoogleApi.Content.V21.Model.CustomAttribute.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V21.Model.CustomAttribute do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.735849 | 200 | 0.721225 |
73fdcb359529d96862a38eadae649088022a8b07 | 2,314 | ex | Elixir | clients/firestore/lib/google_api/firestore/v1/model/location.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1/model/location.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1/model/location.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Firestore.V1.Model.Location do
@moduledoc """
A resource that represents Google Cloud Platform location.
## Attributes
* `displayName` (*type:* `String.t`, *default:* `nil`) - The friendly name for this location, typically a nearby city name.
For example, "Tokyo".
* `labels` (*type:* `map()`, *default:* `nil`) - Cross-service attributes for the location. For example
{"cloud.googleapis.com/region": "us-east1"}
* `locationId` (*type:* `String.t`, *default:* `nil`) - The canonical id for this location. For example: `"us-east1"`.
* `metadata` (*type:* `map()`, *default:* `nil`) - Service-specific metadata. For example the available capacity at the given
location.
* `name` (*type:* `String.t`, *default:* `nil`) - Resource name for the location, which may vary between implementations.
For example: `"projects/example-project/locations/us-east1"`
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:displayName => String.t(),
:labels => map(),
:locationId => String.t(),
:metadata => map(),
:name => String.t()
}
field(:displayName)
field(:labels, type: :map)
field(:locationId)
field(:metadata, type: :map)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.Firestore.V1.Model.Location do
def decode(value, options) do
GoogleApi.Firestore.V1.Model.Location.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Firestore.V1.Model.Location do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.15625 | 129 | 0.687554 |
73fde2854da9d504ea93db1e6c614c81d313b491 | 2,294 | exs | Elixir | test/plug/adapters/test/conn_test.exs | fishcakez/plug | 9fafa1527536b0b1895cad9fb464bc62e6e80123 | [
"Apache-2.0"
] | null | null | null | test/plug/adapters/test/conn_test.exs | fishcakez/plug | 9fafa1527536b0b1895cad9fb464bc62e6e80123 | [
"Apache-2.0"
] | null | null | null | test/plug/adapters/test/conn_test.exs | fishcakez/plug | 9fafa1527536b0b1895cad9fb464bc62e6e80123 | [
"Apache-2.0"
] | null | null | null | defmodule Plug.Adapters.Test.ConnTest do
use ExUnit.Case, async: true
import Plug.Test
test "read_req_body/2" do
conn = conn(:get, "/", "abcdefghij", headers: [{"content-type", "text/plain"}])
{adapter, state} = conn.adapter
assert {:more, "abcde", state} = adapter.read_req_body(state, length: 5)
assert {:more, "f", state} = adapter.read_req_body(state, length: 1)
assert {:more, "gh", state} = adapter.read_req_body(state, length: 2)
assert {:ok, "ij", state} = adapter.read_req_body(state, length: 5)
assert {:ok, "", _state} = adapter.read_req_body(state, length: 5)
end
test "no body or params" do
conn = conn(:get, "/")
{adapter, state} = conn.adapter
assert conn.req_headers == []
assert {:ok, "", _state} = adapter.read_req_body(state, length: 10)
end
test "custom body requires content-type" do
assert_raise ArgumentError, fn ->
conn(:get, "/", "abcdefgh")
end
end
test "custom params sets content-type to multipart/mixed" do
conn = conn(:get, "/", foo: "bar")
assert conn.req_headers == [{"content-type", "multipart/mixed; charset: utf-8"}]
end
test "parse_req_multipart/4" do
conn = conn(:get, "/", a: "b", c: [%{d: "e"}, "f"])
{adapter, state} = conn.adapter
assert {:ok, params, _} = adapter.parse_req_multipart(state, 1_000_000, fn _ -> end)
assert params == %{"a" => "b", "c" => [%{"d" => "e"}, "f"]}
end
test "recycle/2" do
conn = conn(:get, "/foo", a: "b", c: [%{d: "e"}, "f"], headers: [{"content-type", "text/plain"}])
|> put_req_cookie("req_cookie", "req_cookie")
|> put_req_cookie("del_cookie", "del_cookie")
|> put_req_cookie("over_cookie", "pre_cookie")
|> Plug.Conn.put_resp_cookie("over_cookie", "pos_cookie")
|> Plug.Conn.put_resp_cookie("resp_cookie", "resp_cookie")
|> Plug.Conn.delete_resp_cookie("del_cookie")
conn = recycle(conn(:get, "/"), conn)
assert conn.path_info == []
conn = conn |> Plug.Conn.fetch_params |> Plug.Conn.fetch_cookies
assert conn.params == %{}
assert conn.cookies == %{"req_cookie" => "req_cookie",
"over_cookie" => "pos_cookie",
"resp_cookie" => "resp_cookie"}
end
end
| 37.606557 | 101 | 0.59721 |
73fdff6a98667a7bd159a5fd76eb4b48ca782df4 | 1,116 | ex | Elixir | apps/omg_watcher/lib/omg_watcher/eventer/measure.ex | PinkDiamond1/elixir-omg | 70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7 | [
"Apache-2.0"
] | 1 | 2020-05-01T12:30:09.000Z | 2020-05-01T12:30:09.000Z | apps/omg_watcher/lib/omg_watcher/eventer/measure.ex | PinkDiamond1/elixir-omg | 70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/lib/omg_watcher/eventer/measure.ex | PinkDiamond1/elixir-omg | 70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7 | [
"Apache-2.0"
] | 1 | 2021-12-04T00:37:46.000Z | 2021-12-04T00:37:46.000Z | # Copyright 2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.Eventer.Measure do
@moduledoc """
Counting business metrics sent to Datadog
"""
import OMG.Status.Metric.Event, only: [name: 1]
alias OMG.Status.Metric.Datadog
@supported_events [[:process, OMG.Watcher.Eventer]]
def supported_events, do: @supported_events
def handle_event([:process, OMG.Watcher.Eventer], _state, _metadata, _config) do
value =
self()
|> Process.info(:message_queue_len)
|> elem(1)
_ = Datadog.gauge(name(:eventer_message_queue_len), value)
end
end
| 31.885714 | 82 | 0.733871 |
73fe0612ff948ae2a874334150b54242e2e6334c | 120 | ex | Elixir | lib/console/repo.ex | pluralsh/console | 38a446ce1bc2f7bc3e904fcacb102d3d57835ada | [
"Apache-2.0"
] | 6 | 2021-11-17T21:10:49.000Z | 2022-02-16T19:45:28.000Z | lib/console/repo.ex | pluralsh/console | 38a446ce1bc2f7bc3e904fcacb102d3d57835ada | [
"Apache-2.0"
] | 18 | 2021-11-25T04:31:06.000Z | 2022-03-27T04:54:00.000Z | lib/console/repo.ex | pluralsh/console | 38a446ce1bc2f7bc3e904fcacb102d3d57835ada | [
"Apache-2.0"
] | null | null | null | defmodule Console.Repo do
use Ecto.Repo,
otp_app: :console,
adapter: Ecto.Adapters.Postgres
use Bourne
end
| 15 | 35 | 0.716667 |
73fe0d2962a59726a6743be4fbfec2bca8c41f66 | 1,314 | exs | Elixir | mix.exs | pragdave/quixir | 20381d1b9e7ec5854388dde1569e34a369bfe502 | [
"Apache-2.0"
] | 280 | 2016-07-27T06:58:40.000Z | 2022-03-18T21:27:33.000Z | mix.exs | pragdave/quixir | 20381d1b9e7ec5854388dde1569e34a369bfe502 | [
"Apache-2.0"
] | 17 | 2016-11-30T07:26:05.000Z | 2020-06-04T22:15:23.000Z | mix.exs | pragdave/quixir | 20381d1b9e7ec5854388dde1569e34a369bfe502 | [
"Apache-2.0"
] | 14 | 2016-08-05T14:05:13.000Z | 2018-03-14T15:26:23.000Z | defmodule Quixir.Mixfile do
use Mix.Project
@version "0.9.4"
@package [
licenses: [ "apache 2.0" ],
maintainers: [ "Dave Thomas (pragdave) <dave@pragdave.me>" ],
links: %{
"Github" => "https://github.com/pragdave/quixir",
},
]
@deps (if File.dir?("../pollution") do
[
{ :pollution, [ path: "../pollution" ] },
{ :ex_doc, ">= 0.0.0", only: [ :dev, :test ] },
]
else
[
{ :pollution, "~> 0.9.2" },
{ :ex_doc, ">= 0.0.0", only: [ :dev, :test ] },
]
end)
@docs [
extras: [ "README.md" ],
main: "Quixir"
]
@if_production Mix.env == :prod
@elixirc_paths (case Mix.env do
:prod -> ["lib"]
_ -> ["lib", "scripts"]
end)
@project [
app: :quixir,
version: @version,
elixir: ">= 1.3.0",
elixirc_paths: @elixirc_paths,
build_embedded: @if_production,
start_permanent: @if_production,
deps: @deps,
description: """
A simple property-based testing framework written in pure Elixir.
""",
package: @package,
docs: @docs
]
@application []
# ------------------------------------------------------------
def project, do: @project
def application, do: @application
end
| 21.540984 | 69 | 0.479452 |
73fe2a5899c941b2d32b0b8d9732d7214d336fd2 | 236 | ex | Elixir | lib/phone/es/vi.ex | ajmath/phone | 0c7c7033ea93d028d3bd2a9e445d3aa93a6bc2fa | [
"Apache-2.0"
] | null | null | null | lib/phone/es/vi.ex | ajmath/phone | 0c7c7033ea93d028d3bd2a9e445d3aa93a6bc2fa | [
"Apache-2.0"
] | null | null | null | lib/phone/es/vi.ex | ajmath/phone | 0c7c7033ea93d028d3bd2a9e445d3aa93a6bc2fa | [
"Apache-2.0"
] | null | null | null | defmodule Phone.ES.VI do
@moduledoc false
use Helper.Area
def regex, do: ~r/^(34)(945|845)(.{6})/
def area_name, do: "Álava"
def area_type, do: "province"
def area_abbreviation, do: "VI"
matcher(["34945", "34845"])
end
| 18.153846 | 41 | 0.644068 |
73fe42b7cda42a34c7d8348bec12774809022a78 | 718 | exs | Elixir | mix.exs | craigspaeth/elixir-api | 6b89274ef90af6b1255c263321c31c3630b14944 | [
"MIT"
] | null | null | null | mix.exs | craigspaeth/elixir-api | 6b89274ef90af6b1255c263321c31c3630b14944 | [
"MIT"
] | null | null | null | mix.exs | craigspaeth/elixir-api | 6b89274ef90af6b1255c263321c31c3630b14944 | [
"MIT"
] | null | null | null | defmodule Myapp.Mixfile do
use Mix.Project
def project do
[app: :myapp,
version: "0.0.1",
elixir: "~> 1.1",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[ applications: [:maru] ]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:maru, "~> 0.8"},
{:mongo, "~> 0.5"}
]
end
end
| 19.944444 | 77 | 0.575209 |
73fe49493c0ae414ba44ed256ec41bbcaeb9e482 | 933 | ex | Elixir | lib/codes/codes_n87.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_n87.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_n87.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_N87 do
alias IcdCode.ICDCode
def _N870 do
%ICDCode{full_code: "N870",
category_code: "N87",
short_code: "0",
full_name: "Mild cervical dysplasia",
short_name: "Mild cervical dysplasia",
category_name: "Mild cervical dysplasia"
}
end
def _N871 do
%ICDCode{full_code: "N871",
category_code: "N87",
short_code: "1",
full_name: "Moderate cervical dysplasia",
short_name: "Moderate cervical dysplasia",
category_name: "Moderate cervical dysplasia"
}
end
def _N879 do
%ICDCode{full_code: "N879",
category_code: "N87",
short_code: "9",
full_name: "Dysplasia of cervix uteri, unspecified",
short_name: "Dysplasia of cervix uteri, unspecified",
category_name: "Dysplasia of cervix uteri, unspecified"
}
end
end
| 27.441176 | 65 | 0.609861 |
73fe5de781f44ab9d4db824d977ed0f8b492fcfc | 183 | ex | Elixir | lib/jason.ex | thedelchop/monetized | 6366b62738573516340e5dd5a46c4edec0c2f61b | [
"MIT"
] | null | null | null | lib/jason.ex | thedelchop/monetized | 6366b62738573516340e5dd5a46c4edec0c2f61b | [
"MIT"
] | null | null | null | lib/jason.ex | thedelchop/monetized | 6366b62738573516340e5dd5a46c4edec0c2f61b | [
"MIT"
] | null | null | null | if Code.ensure_loaded?(Jason) do
defimpl Jason.Encoder, for: Monetized.Money do
def encode(%Monetized.Money{} = money, _options), do: Monetized.Money.to_string(money)
end
end
| 30.5 | 90 | 0.743169 |
73fe82ae62edb8255abe2ebdffa6ee5450cf03d3 | 497 | ex | Elixir | test/helpers/database_supervisor.ex | coladarci/geolix | 0a0508db410732fa8a24cbcd28e44f89b1b30afa | [
"Apache-2.0"
] | null | null | null | test/helpers/database_supervisor.ex | coladarci/geolix | 0a0508db410732fa8a24cbcd28e44f89b1b30afa | [
"Apache-2.0"
] | null | null | null | test/helpers/database_supervisor.ex | coladarci/geolix | 0a0508db410732fa8a24cbcd28e44f89b1b30afa | [
"Apache-2.0"
] | null | null | null | defmodule Geolix.TestHelpers.DatabaseSupervisor do
@moduledoc false
@doc """
Restarts the database supervisor.
"""
@spec restart() :: :ok
def restart do
:ok =
case Process.whereis(Geolix.Database.Supervisor) do
nil ->
:ok
_pid ->
Supervisor.stop(Geolix.Database.Supervisor, :normal)
:timer.sleep(50)
end
_ = Application.ensure_all_started(:geolix)
:ok = Geolix.reload_databases()
:timer.sleep(50)
end
end
| 19.88 | 62 | 0.62173 |
73feaa956e1b4c7ae72f56466cff0e90edcaac1e | 259 | ex | Elixir | lib/web_driver_client/w3c_wire_protocol_client/response.ex | fimassuda/web_driver_client | 09d373c9a8a923c5e2860f107f84b16565e338f7 | [
"MIT"
] | 8 | 2019-11-24T18:33:12.000Z | 2020-12-09T10:20:09.000Z | lib/web_driver_client/w3c_wire_protocol_client/response.ex | fimassuda/web_driver_client | 09d373c9a8a923c5e2860f107f84b16565e338f7 | [
"MIT"
] | 67 | 2019-12-20T16:33:30.000Z | 2021-09-14T03:50:10.000Z | lib/web_driver_client/w3c_wire_protocol_client/response.ex | fimassuda/web_driver_client | 09d373c9a8a923c5e2860f107f84b16565e338f7 | [
"MIT"
] | 10 | 2020-06-19T16:15:03.000Z | 2021-09-13T17:56:25.000Z | defmodule WebDriverClient.W3CWireProtocolClient.Response do
@moduledoc false
alias WebDriverClient.HTTPResponse
defstruct [:body, :http_response]
@type t :: %__MODULE__{
body: term,
http_response: HTTPResponse.t()
}
end
| 19.923077 | 59 | 0.69112 |
73feb71b553bd845fee00a323e732cd8879100de | 1,487 | ex | Elixir | apps/chex/lib/chex_web.ex | chrisfishwood/chex | 04d3446f3d25c8c3c0badd282b50abccc59cc41d | [
"MIT"
] | null | null | null | apps/chex/lib/chex_web.ex | chrisfishwood/chex | 04d3446f3d25c8c3c0badd282b50abccc59cc41d | [
"MIT"
] | null | null | null | apps/chex/lib/chex_web.ex | chrisfishwood/chex | 04d3446f3d25c8c3c0badd282b50abccc59cc41d | [
"MIT"
] | null | null | null | defmodule ChexWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use ChexWeb, :controller
use ChexWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: ChexWeb
import Plug.Conn
import ChexWeb.Router.Helpers
import ChexWeb.Gettext
end
end
def view do
quote do
use Phoenix.View, root: "lib/chex_web/templates",
namespace: ChexWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 2, view_module: 1]
import ChexWeb.Router.Helpers
import ChexWeb.ErrorHelpers
import ChexWeb.Gettext
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import ChexWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 22.876923 | 69 | 0.68191 |
73fec3abf4430700c9254aa62b44a40e55272539 | 122 | exs | Elixir | test/httpdigest_test.exs | Hermanverschooten/httpdigest | 30369b9b40ef8600be527a1b3233ff8d5b2461e3 | [
"MIT"
] | null | null | null | test/httpdigest_test.exs | Hermanverschooten/httpdigest | 30369b9b40ef8600be527a1b3233ff8d5b2461e3 | [
"MIT"
] | null | null | null | test/httpdigest_test.exs | Hermanverschooten/httpdigest | 30369b9b40ef8600be527a1b3233ff8d5b2461e3 | [
"MIT"
] | null | null | null | defmodule HttpdigestTest do
use ExUnit.Case
doctest Httpdigest
test "the truth" do
assert 1 + 1 == 2
end
end
| 13.555556 | 27 | 0.688525 |
73fedcb04907adcaca5c2342c2cf6ff744556951 | 18,857 | ex | Elixir | lib/domo.ex | zetaron/Domo | 2159163378f1ad8dea5cbc31dea2ed827c9024ab | [
"MIT"
] | null | null | null | lib/domo.ex | zetaron/Domo | 2159163378f1ad8dea5cbc31dea2ed827c9024ab | [
"MIT"
] | null | null | null | lib/domo.ex | zetaron/Domo | 2159163378f1ad8dea5cbc31dea2ed827c9024ab | [
"MIT"
] | null | null | null | defmodule Domo do
@moduledoc Domo.Doc.readme_doc("<!-- Documentation -->")
@using_options Domo.Doc.readme_doc("<!-- using_options -->")
@new_raise_doc Domo.Doc.readme_doc("<!-- new!/1 -->")
@new_ok_doc Domo.Doc.readme_doc("<!-- new/2 -->")
@ensure_type_raise_doc Domo.Doc.readme_doc("<!-- ensure_type!/1 -->")
@ensure_type_ok_doc Domo.Doc.readme_doc("<!-- ensure_type/2 -->")
@typed_fields_doc Domo.Doc.readme_doc("<!-- typed_fields/1 -->")
@required_fields_doc Domo.Doc.readme_doc("<!-- required_fields/1 -->")
@callback new!() :: struct()
@doc @new_raise_doc
@callback new!(enumerable :: Enumerable.t()) :: struct()
@callback new() :: {:ok, struct()} | {:error, any()}
@callback new(enumerable :: Enumerable.t()) :: {:ok, struct()} | {:error, any()}
@doc @new_ok_doc
@callback new(enumerable :: Enumerable.t(), opts :: keyword()) :: {:ok, struct()} | {:error, any()}
@doc @ensure_type_raise_doc
@callback ensure_type!(struct :: struct()) :: struct()
@callback ensure_type(struct :: struct()) :: {:ok, struct()} | {:error, any()}
@doc @ensure_type_ok_doc
@callback ensure_type(struct :: struct(), opts :: keyword()) :: {:ok, struct()} | {:error, any()}
@callback typed_fields() :: [atom()]
@doc @typed_fields_doc
@callback typed_fields(opts :: keyword()) :: [atom()]
@callback required_fields() :: [atom()]
@doc @required_fields_doc
@callback required_fields(opts :: keyword()) :: [atom()]
@mix_project Application.compile_env(:domo, :mix_project, Mix.Project)
alias Domo.ErrorBuilder
alias Domo.CodeEvaluation
alias Domo.Raises
alias Domo.TypeEnsurerFactory
alias Domo.TypeEnsurerFactory.Error
alias Mix.Tasks.Compile.DomoCompiler, as: DomoMixTask
@doc """
Uses Domo in the current struct's module to add constructor, validation,
and reflection functions.
defmodule Model do
use Domo
defstruct [:first_field, :second_field]
@type t :: %__MODULE__{first_field: atom() | nil, second_field: any() | nil}
# have added:
# new!/1
# new/2
# ensure_type!/1
# ensure_type/2
# typed_fields/1
# required_fields/1
end
`use Domo` can be called only within the struct module having
`t()` type defined because it's used to generate `__MODULE__.TypeEnsurer`
with validation functions for each field in the definition.
See details about `t()` type definition in Elixir
[TypeSpecs](https://hexdocs.pm/elixir/typespecs.html) document.
The macro collects `t()` type definitions for the `:domo_compiler` which
generates `TypeEnsurer` modules during the second pass of the compilation
of the project. Generated validation functions rely on guards appropriate
for the field types.
The generated code of each `TypeEnsurer` module can be found
in `_build/MIX_ENV/domo_generated_code` folder. However, that is for information
purposes only. The following compilation will overwrite all changes there.
The macro adds the following functions to the current module, that are the
facade for the generated `TypeEnsurer` module:
`new!/1`, `new/2`, `ensure_type!/1`, `ensure_type/2`, `typed_fields/1`,
`required_fields/1`.
## Options
#{@using_options}
"""
# credo:disable-for-lines:332
defmacro __using__(opts) do
Raises.raise_use_domo_out_of_module!(__CALLER__)
in_mix_compile? = CodeEvaluation.in_mix_compile?()
config = @mix_project.config()
if in_mix_compile? do
Raises.maybe_raise_absence_of_domo_compiler!(config, __CALLER__)
else
do_test_env_ckeck =
case Application.fetch_env(:domo, :skip_test_env_check) do
{:ok, true} -> false
_ -> true
end
if do_test_env_ckeck and CodeEvaluation.in_mix_test?() do
Raises.raise_cant_build_in_test_environment(__CALLER__.module)
end
# We consider to be in interactive mode
opts = [verbose?: Application.get_env(:domo, :verbose_in_iex, false)]
TypeEnsurerFactory.start_resolve_planner(:in_memory, :in_memory, opts)
end
maybe_build_type_ensurer_after_compile =
unless in_mix_compile? do
quote do
@after_compile {Domo, :_build_in_memory_type_ensurer}
end
end
global_anys =
if global_anys = Application.get_env(:domo, :remote_types_as_any) do
Raises.raise_incorrect_remote_types_as_any_format!(global_anys)
global_anys
end
local_anys =
if local_anys = Keyword.get(opts, :remote_types_as_any) do
Raises.raise_incorrect_remote_types_as_any_format!(local_anys)
Enum.map(local_anys, fn {module, types} -> {Macro.expand_once(module, __CALLER__), types} end)
end
plan_path =
if in_mix_compile? do
DomoMixTask.manifest_path(@mix_project, :plan)
else
:in_memory
end
unless is_nil(global_anys) and is_nil(local_anys) do
TypeEnsurerFactory.collect_types_to_treat_as_any(plan_path, __CALLER__.module, global_anys, local_anys)
end
global_new_func_name = Application.get_env(:domo, :name_of_new_function, :new)
new_ok_fun_name = Keyword.get(opts, :name_of_new_function, global_new_func_name)
new_raise_fun_name =
new_ok_fun_name
|> Atom.to_string()
|> List.wrap()
|> Enum.concat(["!"])
|> Enum.join()
|> String.to_atom()
long_module = TypeEnsurerFactory.module_name_string(__CALLER__.module)
short_module = long_module |> String.split(".") |> List.last()
type_ensurer = TypeEnsurerFactory.type_ensurer(__CALLER__.module)
quote do
Module.register_attribute(__MODULE__, :domo_options, accumulate: false)
Module.put_attribute(__MODULE__, :domo_options, unquote(opts))
Module.register_attribute(__MODULE__, :domo_plan_path, accumulate: false)
Module.put_attribute(__MODULE__, :domo_plan_path, unquote(plan_path))
@compile {:no_warn_undefined, unquote(type_ensurer)}
import Domo, only: [precond: 1]
@doc """
#{unquote(@new_raise_doc)}
## Examples
alias #{unquote(long_module)}
#{unquote(short_module)}.#{unquote(new_raise_fun_name)}(first_field: value1, second_field: value2, ...)
"""
def unquote(new_raise_fun_name)(enumerable \\ []) do
skip_ensurance? =
if CodeEvaluation.in_plan_collection?() do
Domo._plan_struct_integrity_ensurance(__MODULE__, enumerable)
true
else
false
end
struct = struct!(__MODULE__, enumerable)
unless skip_ensurance? do
{errors, t_precondition_error} = Domo._do_validate_fields(unquote(type_ensurer), struct, :pretty_error)
unless Enum.empty?(errors) do
Raises.raise_or_warn_values_should_have_expected_types(unquote(opts), __MODULE__, errors)
end
unless is_nil(t_precondition_error) do
Raises.raise_or_warn_struct_precondition_should_be_true(unquote(opts), t_precondition_error)
end
end
struct
end
@doc """
#{unquote(@new_ok_doc)}
## Examples
alias #{unquote(long_module)}
#{unquote(short_module)}.#{unquote(new_ok_fun_name)}(first_field: value1, second_field: value2, ...)
"""
def unquote(new_ok_fun_name)(enumerable \\ [], opts \\ []) do
skip_ensurance? =
if CodeEvaluation.in_plan_collection?() do
Domo._plan_struct_integrity_ensurance(__MODULE__, enumerable)
true
else
false
end
struct = struct(__MODULE__, enumerable)
if skip_ensurance? do
{:ok, struct}
else
{errors, t_precondition_error} = Domo._do_validate_fields(unquote(type_ensurer), struct, :pretty_error_by_key, opts)
cond do
not Enum.empty?(errors) -> {:error, errors}
not is_nil(t_precondition_error) -> {:error, [t_precondition_error]}
true -> {:ok, struct}
end
end
end
@doc """
#{unquote(@ensure_type_raise_doc)}
## Examples
alias #{unquote(long_module)}
struct = #{unquote(short_module)}.#{unquote(new_raise_fun_name)}(first_field: value1, second_field: value2, ...)
#{unquote(short_module)}.ensure_type!(%{struct | first_field: new_value})
struct
|> Map.put(:first_field, new_value1)
|> Map.put(:second_field, new_value2)
|> #{unquote(short_module)}.ensure_type!()
"""
def ensure_type!(struct) do
%name{} = struct
unless name == __MODULE__ do
Raises.raise_struct_should_be_passed(__MODULE__, instead_of: name)
end
skip_ensurance? =
if CodeEvaluation.in_plan_collection?() do
Domo._plan_struct_integrity_ensurance(__MODULE__, Map.from_struct(struct))
true
else
false
end
unless skip_ensurance? do
{errors, t_precondition_error} = Domo._do_validate_fields(unquote(type_ensurer), struct, :pretty_error)
unless Enum.empty?(errors) do
Raises.raise_or_warn_values_should_have_expected_types(unquote(opts), __MODULE__, errors)
end
unless is_nil(t_precondition_error) do
Raises.raise_or_warn_struct_precondition_should_be_true(unquote(opts), t_precondition_error)
end
end
struct
end
@doc """
#{unquote(@ensure_type_ok_doc)}
Options are the same as for `#{unquote(new_ok_fun_name)}/2`.
## Examples
alias #{unquote(long_module)}
struct = #{unquote(short_module)}.#{unquote(new_raise_fun_name)}(first_field: value1, second_field: value2, ...)
{:ok, _updated_struct} =
#{unquote(short_module)}.ensure_type(%{struct | first_field: new_value})
{:ok, _updated_struct} =
struct
|> Map.put(:first_field, new_value1)
|> Map.put(:second_field, new_value2)
|> #{unquote(short_module)}.ensure_type()
"""
def ensure_type(struct, opts \\ []) do
%name{} = struct
unless name == __MODULE__ do
Raises.raise_struct_should_be_passed(__MODULE__, instead_of: name)
end
skip_ensurance? =
if CodeEvaluation.in_plan_collection?() do
Domo._plan_struct_integrity_ensurance(__MODULE__, Map.from_struct(struct))
true
else
false
end
if skip_ensurance? do
{:ok, struct}
else
Domo._validate_fields_ok(unquote(type_ensurer), struct, opts)
end
end
@doc unquote(@typed_fields_doc)
def typed_fields(opts \\ []) do
field_kind =
cond do
opts[:include_any_typed] && opts[:include_meta] -> :typed_with_meta_with_any
opts[:include_meta] -> :typed_with_meta_no_any
opts[:include_any_typed] -> :typed_no_meta_with_any
true -> :typed_no_meta_no_any
end
unquote(type_ensurer).fields(field_kind)
end
@doc unquote(@required_fields_doc)
def required_fields(opts \\ []) do
field_kind = if opts[:include_meta], do: :required_with_meta, else: :required_no_meta
unquote(type_ensurer).fields(field_kind)
end
@before_compile {Raises, :raise_not_in_a_struct_module!}
@before_compile {Raises, :raise_no_type_t_defined!}
@before_compile {Domo, :_plan_struct_defaults_ensurance}
@after_compile {Domo, :_collect_types_for_domo_compiler}
unquote(maybe_build_type_ensurer_after_compile)
end
end
@doc false
def _plan_struct_defaults_ensurance(env) do
plan_path = Module.get_attribute(env.module, :domo_plan_path)
TypeEnsurerFactory.plan_struct_defaults_ensurance(plan_path, env)
end
@doc false
def _collect_types_for_domo_compiler(env, bytecode) do
plan_path = Module.get_attribute(env.module, :domo_plan_path)
TypeEnsurerFactory.collect_types_for_domo_compiler(plan_path, env, bytecode)
end
@doc false
def _build_in_memory_type_ensurer(env, bytecode) do
verbose? = Application.get_env(:domo, :verbose_in_iex, false)
TypeEnsurerFactory.register_in_memory_types(env.module, bytecode)
# struct's types are collected with separate _collect_types_for_domo_compiler call
TypeEnsurerFactory.maybe_collect_lib_structs_to_treat_as_any_to_existing_plan(:in_memory)
{:ok, plan, preconds} = TypeEnsurerFactory.get_plan_state(:in_memory)
with {:ok, module_filed_types, dependencies_by_module} <- TypeEnsurerFactory.resolve_plan(plan, preconds, verbose?),
TypeEnsurerFactory.build_type_ensurers(module_filed_types, verbose?),
:ok <- TypeEnsurerFactory.ensure_struct_defaults(plan, verbose?) do
{:ok, dependants} = TypeEnsurerFactory.get_dependants(:in_memory, env.module)
unless dependants == [] do
TypeEnsurerFactory.invalidate_type_ensurers(dependants)
Raises.warn_invalidated_type_ensurers(env.module, dependants)
end
TypeEnsurerFactory.register_dependants_from(:in_memory, dependencies_by_module)
TypeEnsurerFactory.clean_plan(:in_memory)
:ok
else
{:error, [%Error{message: {:no_types_registered, _} = error}]} -> Raises.raise_cant_find_type_in_memory(error)
{:error, {:batch_ensurer, _details} = message} -> Raises.raise_incorrect_defaults(message)
end
end
@doc false
def _plan_struct_integrity_ensurance(module, enumerable) do
plan_path = DomoMixTask.manifest_path(@mix_project, :plan)
TypeEnsurerFactory.plan_struct_integrity_ensurance(plan_path, module, enumerable)
end
@doc false
def _validate_fields_ok(type_ensurer, struct, opts) do
{errors, t_precondition_error} = Domo._do_validate_fields(type_ensurer, struct, :pretty_error_by_key, opts)
cond do
not Enum.empty?(errors) -> {:error, errors}
not is_nil(t_precondition_error) -> {:error, [t_precondition_error]}
true -> {:ok, struct}
end
end
def _do_validate_fields(type_ensurer, struct, err_fun, opts \\ []) do
maybe_filter_precond_errors = Keyword.get(opts, :maybe_filter_precond_errors, false)
maybe_bypass_precond_errors = Keyword.get(opts, :maybe_bypass_precond_errors, false)
typed_no_any_fields = type_ensurer.fields(:typed_with_meta_no_any)
errors =
Enum.reduce(typed_no_any_fields, [], fn field, errors ->
field_value = {field, Map.get(struct, field)}
case type_ensurer.ensure_field_type(field_value, opts) do
{:error, _} = error ->
[apply(ErrorBuilder, err_fun, [error, maybe_filter_precond_errors, maybe_bypass_precond_errors]) | errors]
_ ->
errors
end
end)
t_precondition_error =
if Enum.empty?(errors) do
case type_ensurer.t_precondition(struct) do
{:error, _} = error -> apply(ErrorBuilder, err_fun, [error, maybe_filter_precond_errors, maybe_bypass_precond_errors])
:ok -> nil
end
end
{errors, t_precondition_error}
end
@doc """
Defines a precondition function for a field's type or the struct's type.
The `type_fun` argument is one element `[type: fun]` keyword list where
`type` is the name of the type defined with the `@type` attribute
and `fun` is a single argument user-defined precondition function.
The precondition function validates the value of the given type to match
a specific format or to fulfil a set of invariants for the field's type
or struct's type respectfully.
The macro should be called with a type in the same module where the `@type`
definition is located. If that is no fulfilled, f.e., when the previously
defined type has been renamed, the macro raises an `ArgumentError`.
defstruct [id: "I-000", amount: 0, limit: 15]
@type id :: String.t()
precond id: &validate_id/1
defp validate_id(id), do: match?(<<"I-", _::8*3>>, id)
@type t :: %__MODULE__{id: id(), amount: integer(), limit: integer()}
precond t: &validate_invariants/1
defp validate_invariants(s) do
cond do
s.amount >= s.limit ->
{:error, "Amount \#{s.amount} should be less then limit \#{s.limit}."}
true ->
:ok
end
end
`TypeEnsurer` module generated by Domo calls the precondition function with
value of the valid type. Precondition function should return the following
values: `true | false | :ok | {:error, any()}`.
In case of `true` or `:ok` return values `TypeEnsurer` finishes
the validation of the field with ok.
For the `false` return value, the `TypeEnsurer` generates an error message
referencing the failed precondition function. And for the `{:error, message}`
return value, it passes the `message` as one of the errors for the field value.
`message` can be of any shape.
Macro adds the `__precond__/2` function to the current module that routes
a call to the user-defined function. The added function should be called
only by Domo modules.
Attaching a precondition function to the type via this macro can be helpful
to keep the same level of consistency across the domains modelled
with structs sharing the given type.
"""
defmacro precond([{type_name, {fn?, _, _} = fun}] = _type_fun)
when is_atom(type_name) and fn? in [:&, :fn] do
module = __CALLER__.module
unless Module.has_attribute?(module, :domo_precond) do
Module.register_attribute(module, :domo_precond, accumulate: true)
Module.put_attribute(module, :after_compile, {Domo, :_plan_precond_checks})
end
fun_as_string = Macro.to_string(fun) |> Code.format_string!() |> to_string()
precond_name_description = {type_name, fun_as_string}
Module.put_attribute(module, :domo_precond, precond_name_description)
quote do
def __precond__(unquote(type_name), value) do
unquote(fun).(value)
end
end
end
defmacro precond(_arg) do
Raises.raise_precond_arguments()
end
@doc false
def _plan_precond_checks(env, bytecode) do
in_mix_compile? = CodeEvaluation.in_mix_compile?()
if in_mix_compile? do
config = @mix_project.config()
Raises.maybe_raise_absence_of_domo_compiler!(config, env)
end
plan_path =
if in_mix_compile? do
DomoMixTask.manifest_path(@mix_project, :plan)
else
:in_memory
end
TypeEnsurerFactory.plan_precond_checks(plan_path, env, bytecode)
end
@doc """
Checks whether the `TypeEnsurer` module exists for the given struct module.
Structs having `TypeEnsurer` can be validated with `Domo` generated callbacks.
"""
defdelegate has_type_ensurer?(struct_module), to: TypeEnsurerFactory
end
| 34.855823 | 128 | 0.676566 |
73fef35b46292a1cf9b133fcb20174134d3ea050 | 1,794 | exs | Elixir | config/dev.exs | zgohr/distillery-aws-example | 543daeae2d5b0d13a875414fea4f19b40ba68358 | [
"Apache-2.0"
] | null | null | null | config/dev.exs | zgohr/distillery-aws-example | 543daeae2d5b0d13a875414fea4f19b40ba68358 | [
"Apache-2.0"
] | null | null | null | config/dev.exs | zgohr/distillery-aws-example | 543daeae2d5b0d13a875414fea4f19b40ba68358 | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :distillery_example, ExampleWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [],
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/example_web/views/.*(ex)$},
~r{lib/example_web/controllers/.*(ex)$},
~r{lib/example_web/templates/.*(eex)$}
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# command from your terminal:
#
# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
#
# The `http:` config above can be replaced with:
#
# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
{username, 0} = System.cmd("whoami", [])
config :distillery_example, Example.Repo,
username: String.trim(username),
database: "distillery_example_dev",
hostname: "localhost",
pool_size: 10
| 32.035714 | 170 | 0.707358 |
73fef6ce40d36ace36cd4d77c606d83ee0849cad | 44,456 | exs | Elixir | lib/elixir/test/elixir/kernel/errors_test.exs | DmitryKakurin/elixir | a5df6a5a830d4cff8b7c8da54342b66cab999e0f | [
"Apache-2.0"
] | 1 | 2020-01-14T18:44:56.000Z | 2020-01-14T18:44:56.000Z | lib/elixir/test/elixir/kernel/errors_test.exs | DmitryKakurin/elixir | a5df6a5a830d4cff8b7c8da54342b66cab999e0f | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/errors_test.exs | DmitryKakurin/elixir | a5df6a5a830d4cff8b7c8da54342b66cab999e0f | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule Kernel.ErrorsTest do
use ExUnit.Case, async: true
import ExUnit.CaptureIO
defmacro hello do
quote location: :keep do
def hello, do: :world
end
end
test "no optional arguments in fn" do
assert_eval_raise CompileError,
"nofile:1: anonymous functions cannot have optional arguments",
'fn x \\\\ 1 -> x end'
assert_eval_raise CompileError,
"nofile:1: anonymous functions cannot have optional arguments",
'fn x, y \\\\ 1 -> x + y end'
end
test "invalid fn" do
assert_eval_raise SyntaxError,
"nofile:1: expected anonymous functions to be defined with -> inside: 'fn'",
'fn 1 end'
assert_eval_raise SyntaxError,
~r"nofile:2: unexpected operator ->. If you want to define multiple clauses, ",
'fn 1\n2 -> 3 end'
end
test "invalid token" do
assert_eval_raise SyntaxError,
"nofile:1: unexpected token: \"\u200B\" (column 7, code point U+200B)",
'[foo: \u200B]\noops'
end
test "reserved tokens" do
assert_eval_raise SyntaxError, "nofile:1: reserved token: __aliases__", '__aliases__'
assert_eval_raise SyntaxError, "nofile:1: reserved token: __block__", '__block__'
end
test "invalid alias terminator" do
assert_eval_raise SyntaxError, ~r"nofile:1: unexpected \( after alias Foo", 'Foo()'
end
test "invalid __CALLER__" do
assert_eval_raise CompileError,
"nofile:1: __CALLER__ is available only inside defmacro and defmacrop",
'defmodule Sample do def hello do __CALLER__ end end'
end
test "invalid __STACKTRACE__" do
assert_eval_raise CompileError,
"nofile:1: __STACKTRACE__ is available only inside catch and rescue clauses of try expressions",
'defmodule Sample do def hello do __STACKTRACE__ end end'
assert_eval_raise CompileError,
"nofile:1: __STACKTRACE__ is available only inside catch and rescue clauses of try expressions",
'defmodule Sample do try do raise "oops" rescue _ -> def hello do __STACKTRACE__ end end end'
end
test "invalid quoted token" do
assert_eval_raise SyntaxError,
"nofile:1: syntax error before: \"world\"",
'"hello" "world"'
assert_eval_raise SyntaxError,
"nofile:1: syntax error before: 'Foobar'",
'1 Foobar'
assert_eval_raise SyntaxError,
"nofile:1: syntax error before: foo",
'Foo.:foo'
assert_eval_raise SyntaxError,
"nofile:1: syntax error before: \"foo\"",
'Foo.:"foo\#{:bar}"'
assert_eval_raise SyntaxError,
"nofile:1: syntax error before: \"",
'Foo.:"\#{:bar}"'
end
test "invalid identifier" do
message = fn name ->
"nofile:1: invalid character \"@\" (code point U+0040) in identifier: #{name}"
end
assert_eval_raise SyntaxError, message.("foo@"), 'foo@'
assert_eval_raise SyntaxError, message.("foo@"), 'foo@ '
assert_eval_raise SyntaxError, message.("foo@bar"), 'foo@bar'
message = fn name ->
"nofile:1: invalid character \"@\" (code point U+0040) in alias: #{name}"
end
assert_eval_raise SyntaxError, message.("Foo@"), 'Foo@'
assert_eval_raise SyntaxError, message.("Foo@bar"), 'Foo@bar'
message = "nofile:1: invalid character \"!\" (code point U+0021) in alias: Foo!"
assert_eval_raise SyntaxError, message, 'Foo!'
message = "nofile:1: invalid character \"?\" (code point U+003F) in alias: Foo?"
assert_eval_raise SyntaxError, message, 'Foo?'
message =
"nofile:1: invalid character \"ó\" (code point U+00F3) in alias (only ASCII characters are allowed): Foó"
assert_eval_raise SyntaxError, message, 'Foó'
message = ~r"""
Elixir expects unquoted Unicode atoms, variables, and calls to be in NFC form.
Got:
"foó" \(code points 0x0066 0x006F 0x006F 0x0301\)
Expected:
"foó" \(code points 0x0066 0x006F 0x00F3\)
"""
assert_eval_raise SyntaxError, message, :unicode.characters_to_nfd_list("foó")
end
test "kw missing space" do
msg = "nofile:1: keyword argument must be followed by space after: foo:"
assert_eval_raise SyntaxError, msg, "foo:bar"
assert_eval_raise SyntaxError, msg, "foo:+"
assert_eval_raise SyntaxError, msg, "foo:+1"
end
test "invalid map start" do
assert_eval_raise SyntaxError,
"nofile:1: expected %{ to define a map, got: %[",
"{:ok, %[], %{}}"
end
test "sigil terminator" do
assert_eval_raise TokenMissingError,
"nofile:3: missing terminator: \" (for sigil ~r\" starting at line 1)",
'~r"foo\n\n'
assert_eval_raise TokenMissingError,
"nofile:3: missing terminator: } (for sigil ~r{ starting at line 1)",
'~r{foo\n\n'
end
test "dot terminator" do
assert_eval_raise TokenMissingError,
"nofile:1: missing terminator: \" (for function name starting at line 1)",
'foo."bar'
end
test "string terminator" do
assert_eval_raise TokenMissingError,
"nofile:1: missing terminator: \" (for string starting at line 1)",
'"bar'
end
test "heredoc start" do
assert_eval_raise SyntaxError,
"nofile:1: heredoc allows only zero or more whitespace characters followed by a new line after \"\"\"",
'"""bar\n"""'
end
test "heredoc with incomplete interpolation" do
assert_eval_raise TokenMissingError,
"nofile:2: missing interpolation terminator: \"}\" (for heredoc starting at line 1)",
'"""\n\#{\n"""'
end
test "heredoc terminator" do
assert_eval_raise TokenMissingError,
"nofile:2: missing terminator: \"\"\" (for heredoc starting at line 1)",
'"""\nbar'
assert_eval_raise SyntaxError,
"nofile:2: invalid location for heredoc terminator, please escape token or move it to its own line: \"\"\"",
'"""\nbar"""'
end
test "unexpected end" do
assert_eval_raise SyntaxError, "nofile:1: unexpected token: end", '1 end'
assert_eval_raise SyntaxError,
~r" HINT: it looks like the \"end\" on line 2 does not have a matching \"do\" defined before it",
'''
defmodule MyApp do
def one end
def two do end
end
'''
assert_eval_raise SyntaxError,
~r" HINT: it looks like the \"end\" on line 3 does not have a matching \"do\" defined before it",
'''
defmodule MyApp do
def one
end
def two do
end
end
'''
assert_eval_raise SyntaxError,
~r" HINT: it looks like the \"end\" on line 6 does not have a matching \"do\" defined before it",
'''
defmodule MyApp do
def one do
end
def two
end
end
'''
end
test "missing end" do
assert_eval_raise TokenMissingError,
"nofile:1: missing terminator: end (for \"do\" starting at line 1)",
'foo do 1'
assert_eval_raise TokenMissingError,
~r"HINT: it looks like the \"do\" on line 2 does not have a matching \"end\"",
'''
defmodule MyApp do
def one do
# end
def two do
end
end
'''
assert_eval_raise SyntaxError,
~r"HINT: it looks like the \"do\" on line 3 does not have a matching \"end\"",
'''
defmodule MyApp do
(
def one do
# end
def two do
end
)
end
'''
end
test "syntax error" do
assert_eval_raise SyntaxError,
"nofile:1: syntax error before: '.'",
'+.foo'
assert_eval_raise SyntaxError,
~r"nofile:1: syntax error before: after. \"after\" is a keyword",
'after = 1'
end
test "syntax error before sigil" do
msg = fn x -> "nofile:1: syntax error before: sigil ~s starting with content '#{x}'" end
assert_eval_raise SyntaxError, msg.("bar baz"), '~s(foo) ~s(bar baz)'
assert_eval_raise SyntaxError, msg.(""), '~s(foo) ~s()'
assert_eval_raise SyntaxError, msg.("bar "), '~s(foo) ~s(bar \#{:baz})'
assert_eval_raise SyntaxError, msg.(""), '~s(foo) ~s(\#{:bar} baz)'
end
test "op ambiguity" do
max = 1
assert max == 1
assert max(1, 2) == 2
end
test "syntax error with do" do
assert_eval_raise SyntaxError, ~r/nofile:1: unexpected token: do./, 'if true, do\n'
assert_eval_raise SyntaxError, ~r/nofile:1: unexpected keyword: do:./, 'if true do:\n'
end
test "syntax error on parens call" do
msg =
"nofile:1: unexpected parentheses. If you are making a function call, do not " <>
"insert spaces between the function name and the opening parentheses. " <>
"Syntax error before: '('"
assert_eval_raise SyntaxError, msg, 'foo (hello, world)'
end
test "syntax error on nested no parens call" do
msg = ~r"nofile:1: unexpected comma. Parentheses are required to solve ambiguity"
assert_eval_raise SyntaxError, msg, '[foo 1, 2]'
assert_eval_raise SyntaxError, msg, '[foo bar 1, 2]'
assert_eval_raise SyntaxError, msg, '[do: foo 1, 2]'
assert_eval_raise SyntaxError, msg, 'foo(do: bar 1, 2)'
assert_eval_raise SyntaxError, msg, '{foo 1, 2}'
assert_eval_raise SyntaxError, msg, '{foo bar 1, 2}'
assert_eval_raise SyntaxError, msg, 'foo 1, foo 2, 3'
assert_eval_raise SyntaxError, msg, 'foo 1, @bar 3, 4'
assert_eval_raise SyntaxError, msg, 'foo 1, 2 + bar 3, 4'
assert_eval_raise SyntaxError, msg, 'foo(1, foo 2, 3)'
assert is_list(List.flatten([1]))
assert is_list(Enum.reverse([3, 2, 1], [4, 5, 6]))
assert is_list(Enum.reverse([3, 2, 1], [4, 5, 6]))
assert false || is_list(Enum.reverse([3, 2, 1], [4, 5, 6]))
assert [List.flatten(List.flatten([1]))] == [[1]]
interpret = fn x -> Macro.to_string(Code.string_to_quoted!(x)) end
assert interpret.("f 1 + g h 2, 3") == "f(1 + g(h(2, 3)))"
assert interpret.("assert [] = TestRepo.all from p in Post, where: p.title in ^[]") ==
"assert([] = TestRepo.all(from(p in Post, where: p.title in ^[])))"
end
test "syntax error on atom dot alias" do
msg =
"nofile:1: atom cannot be followed by an alias. If the '.' was meant to be " <>
"part of the atom's name, the atom name must be quoted. Syntax error before: '.'"
assert_eval_raise SyntaxError, msg, ':foo.Bar'
assert_eval_raise SyntaxError, msg, ':"+".Bar'
end
test "syntax error with no token" do
assert_eval_raise TokenMissingError,
"nofile:1: missing terminator: ) (for \"(\" starting at line 1)",
'case 1 ('
end
test "clause with defaults" do
message = ~r"nofile:3: def hello/1 defines defaults multiple times"
assert_eval_raise CompileError,
message,
~C'''
defmodule Kernel.ErrorsTest.ClauseWithDefaults do
def hello(_arg \\ 0)
def hello(_arg \\ 1)
end
'''
assert_eval_raise CompileError,
message,
~C'''
defmodule Kernel.ErrorsTest.ClauseWithDefaults do
def hello(_arg \\ 0), do: nil
def hello(_arg \\ 1), do: nil
end
'''
assert_eval_raise CompileError,
message,
~C'''
defmodule Kernel.ErrorsTest.ClauseWithDefaults do
def hello(_arg \\ 0)
def hello(_arg \\ 1), do: nil
end
'''
assert_eval_raise CompileError,
message,
~C'''
defmodule Kernel.ErrorsTest.ClauseWithDefaults do
def hello(_arg \\ 0), do: nil
def hello(_arg \\ 1)
end
'''
assert_eval_raise CompileError,
~r"nofile:4: undefined function foo/0",
~C'''
defmodule Kernel.ErrorsTest.ClauseWithDefaults5 do
def hello(
foo,
bar \\ foo()
)
def hello(foo, bar), do: foo + bar
end
'''
end
test "different defs with defaults" do
assert_eval_raise CompileError, "nofile:3: def hello/3 defaults conflicts with hello/2", ~C'''
defmodule Kernel.ErrorsTest.DifferentDefsWithDefaults1 do
def hello(a, b \\ nil), do: a + b
def hello(a, b \\ nil, c \\ nil), do: a + b + c
end
'''
assert_eval_raise CompileError,
"nofile:3: def hello/2 conflicts with defaults from hello/3",
~C'''
defmodule Kernel.ErrorsTest.DifferentDefsWithDefaults2 do
def hello(a, b \\ nil, c \\ nil), do: a + b + c
def hello(a, b \\ nil), do: a + b
end
'''
end
test "undefined function" do
assert_eval_raise CompileError,
~r"hello.ex:4: undefined function bar/0",
'''
defmodule Kernel.ErrorsTest.BadForm do
@file "hello.ex"
def foo do
bar()
end
end
'''
assert capture_io(:stderr, fn ->
assert_eval_raise CompileError,
~r"nofile:3: undefined function bar/1",
'''
defmodule Kernel.ErrorsTest.BadForm do
def foo do
bar(
baz(1, 2)
)
end
end
'''
end) =~ "undefined function baz/2"
assert_eval_raise CompileError, ~r"nofile:8: undefined function baz/0", '''
defmodule Sample do
def foo do
bar()
end
defoverridable [foo: 0]
def foo do
baz()
end
end
'''
end
test "undefined non-local function" do
assert_eval_raise CompileError, "nofile:1: undefined function call/2", 'call foo, do: :foo'
end
test "function without definition" do
assert_eval_raise CompileError,
"nofile:2: implementation not provided for predefined def foo/0",
'''
defmodule Kernel.ErrorsTest.FunctionWithoutDefition do
def foo
end
'''
end
test "guard without definition" do
assert_eval_raise CompileError,
"nofile:2: implementation not provided for predefined defmacro foo/1",
'''
defmodule Kernel.ErrorsTest.GuardWithoutDefition do
defguard foo(bar)
end
'''
end
test "literal on map and struct" do
assert_eval_raise SyntaxError, "nofile:1: syntax error before: '}'", '%{:a}'
assert_eval_raise SyntaxError, "nofile:1: syntax error before: '}'", '%{{:a, :b}}'
assert_eval_raise SyntaxError, "nofile:1: syntax error before: '{'", '%{a, b}{a: :b}'
assert_eval_raise CompileError,
"nofile:1: expected key-value pairs in a map, got: put_in(foo.bar.baz, nil)",
'foo = 1; %{put_in(foo.bar.baz, nil), foo}'
end
test "struct fields on defstruct" do
assert_eval_raise ArgumentError, "struct field names must be atoms, got: 1", '''
defmodule Kernel.ErrorsTest.StructFieldsOnDefstruct do
defstruct [1, 2, 3]
end
'''
end
test "struct access on body" do
assert_eval_raise CompileError,
"nofile:3: cannot access struct Kernel.ErrorsTest.StructAccessOnBody, " <>
"the struct was not yet defined or the struct " <>
"is being accessed in the same context that defines it",
'''
defmodule Kernel.ErrorsTest.StructAccessOnBody do
defstruct %{name: "Brasilia"}
%Kernel.ErrorsTest.StructAccessOnBody{}
end
'''
end
test "struct errors" do
assert_eval_raise CompileError,
~r"nofile:1: BadStruct.__struct__/1 is undefined, cannot expand struct BadStruct",
'%BadStruct{}'
assert_eval_raise CompileError,
~r"nofile:1: BadStruct.__struct__/0 is undefined, cannot expand struct BadStruct",
'%BadStruct{} = %{}'
bad_struct_type_error =
~r"expected Kernel.ErrorsTest.BadStructType.__struct__/(0|1) to return a map.*, got: :invalid"
defmodule BadStructType do
def __struct__, do: :invalid
def __struct__(_), do: :invalid
assert_raise CompileError, bad_struct_type_error, fn ->
Macro.struct!(__MODULE__, __ENV__)
end
end
assert_eval_raise CompileError,
bad_struct_type_error,
'%#{BadStructType}{} = %{}'
assert_eval_raise CompileError,
bad_struct_type_error,
'%#{BadStructType}{}'
assert_raise ArgumentError, bad_struct_type_error, fn ->
struct(BadStructType)
end
assert_raise ArgumentError, bad_struct_type_error, fn ->
struct(BadStructType, foo: 1)
end
missing_struct_key_error =
~r"expected Kernel.ErrorsTest.MissingStructKey.__struct__/(0|1) to return a map.*, got: %\{\}"
defmodule MissingStructKey do
def __struct__, do: %{}
def __struct__(_), do: %{}
assert_raise CompileError, missing_struct_key_error, fn ->
Macro.struct!(__MODULE__, __ENV__)
end
end
assert_eval_raise CompileError,
missing_struct_key_error,
'%#{MissingStructKey}{} = %{}'
assert_eval_raise CompileError,
missing_struct_key_error,
'%#{MissingStructKey}{}'
assert_raise ArgumentError, missing_struct_key_error, fn ->
struct(MissingStructKey)
end
assert_raise ArgumentError, missing_struct_key_error, fn ->
struct(MissingStructKey, foo: 1)
end
invalid_struct_key_error =
~r"expected Kernel.ErrorsTest.InvalidStructKey.__struct__/(0|1) to return a map.*, got: %\{__struct__: 1\}"
defmodule InvalidStructKey do
def __struct__, do: %{__struct__: 1}
def __struct__(_), do: %{__struct__: 1}
assert_raise CompileError, invalid_struct_key_error, fn ->
Macro.struct!(__MODULE__, __ENV__)
end
end
assert_eval_raise CompileError,
invalid_struct_key_error,
'%#{InvalidStructKey}{} = %{}'
assert_eval_raise CompileError,
invalid_struct_key_error,
'%#{InvalidStructKey}{}'
assert_raise ArgumentError, invalid_struct_key_error, fn ->
struct(InvalidStructKey)
end
assert_raise ArgumentError, invalid_struct_key_error, fn ->
struct(InvalidStructKey, foo: 1)
end
invalid_struct_name_error =
~r"expected struct name returned by Kernel.ErrorsTest.InvalidStructName.__struct__/(0|1) to be Kernel.ErrorsTest.InvalidStructName, got: InvalidName"
defmodule InvalidStructName do
def __struct__, do: %{__struct__: InvalidName}
def __struct__(_), do: %{__struct__: InvalidName}
assert_raise CompileError, invalid_struct_name_error, fn ->
Macro.struct!(__MODULE__, __ENV__)
end
end
assert_eval_raise CompileError,
invalid_struct_name_error,
'%#{InvalidStructName}{} = %{}'
assert_eval_raise CompileError,
invalid_struct_name_error,
'%#{InvalidStructName}{}'
assert_raise ArgumentError, invalid_struct_name_error, fn ->
struct(InvalidStructName)
end
assert_raise ArgumentError, invalid_struct_name_error, fn ->
struct(InvalidStructName, foo: 1)
end
defmodule GoodStruct do
defstruct name: "john"
end
assert_eval_raise KeyError,
"key :age not found",
'%#{GoodStruct}{age: 27}'
assert_eval_raise CompileError,
"nofile:1: unknown key :age for struct Kernel.ErrorsTest.GoodStruct",
'%#{GoodStruct}{age: 27} = %{}'
end
test "name for defmodule" do
assert_eval_raise CompileError, "nofile:1: invalid module name: 3", 'defmodule 1 + 2, do: 3'
end
test "invalid unquote" do
assert_eval_raise CompileError, "nofile:1: unquote called outside quote", 'unquote 1'
end
test "invalid unquote splicing in oneliners" do
assert_eval_raise ArgumentError,
"unquote_splicing only works inside arguments and block contexts, " <>
"wrap it in parens if you want it to work with one-liners",
'''
defmodule Kernel.ErrorsTest.InvalidUnquoteSplicingInOneliners do
defmacro oneliner2 do
quote do: unquote_splicing 1
end
def callme do
oneliner2
end
end
'''
end
test "invalid attribute" do
msg = ~r"cannot inject attribute @foo into function/macro because cannot escape "
assert_raise ArgumentError, msg, fn ->
defmodule InvalidAttribute do
@foo fn -> nil end
def bar, do: @foo
end
end
end
test "typespec attributes set via Module.put_attribute/4" do
message =
"attributes type, typep, opaque, spec, callback, and macrocallback " <>
"must be set directly via the @ notation"
for kind <- [:type, :typep, :opaque, :spec, :callback, :macrocallback] do
assert_eval_raise ArgumentError,
message,
"""
defmodule PutTypespecAttribute do
Module.put_attribute(__MODULE__, #{inspect(kind)}, {})
end
"""
end
end
test "invalid struct field value" do
msg = ~r"invalid value for struct field baz, cannot escape "
assert_raise ArgumentError, msg, fn ->
defmodule InvalidStructFieldValue do
defstruct baz: fn -> nil end
end
end
end
test "match attribute in module" do
msg = "invalid write attribute syntax, you probably meant to use: @foo expression"
assert_raise ArgumentError, msg, fn ->
defmodule MatchAttributeInModule do
@foo = 42
end
end
end
test "invalid case clauses" do
assert_eval_raise CompileError,
"nofile:1: expected one argument for :do clauses (->) in \"case\"",
'case nil do 0, z when not is_nil(z) -> z end'
end
test "invalid fn args" do
assert_eval_raise TokenMissingError,
"nofile:1: missing terminator: end (for \"fn\" starting at line 1)",
'fn 1'
end
test "invalid escape" do
assert_eval_raise TokenMissingError, "nofile:1: invalid escape \\ at end of file", '1 \\'
end
test "function local conflict" do
assert_eval_raise CompileError,
"nofile:3: imported Kernel.&&/2 conflicts with local function",
'''
defmodule Kernel.ErrorsTest.FunctionLocalConflict do
def other, do: 1 && 2
def _ && _, do: :error
end
'''
end
test "macro local conflict" do
assert_eval_raise CompileError,
"nofile:6: call to local macro &&/2 conflicts with imported Kernel.&&/2, " <>
"please rename the local macro or remove the conflicting import",
'''
defmodule Kernel.ErrorsTest.MacroLocalConflict do
def hello, do: 1 || 2
defmacro _ || _, do: :ok
defmacro _ && _, do: :error
def world, do: 1 && 2
end
'''
end
test "macro with undefined local" do
assert_eval_raise UndefinedFunctionError,
"function Kernel.ErrorsTest.MacroWithUndefinedLocal.unknown/1" <>
" is undefined (function not available)",
'''
defmodule Kernel.ErrorsTest.MacroWithUndefinedLocal do
defmacrop bar, do: unknown(1)
def baz, do: bar()
end
'''
end
test "private macro" do
assert_eval_raise UndefinedFunctionError,
"function Kernel.ErrorsTest.PrivateMacro.foo/0 is undefined (function not available)",
'''
defmodule Kernel.ErrorsTest.PrivateMacro do
defmacrop foo, do: 1
defmacro bar, do: __MODULE__.foo()
defmacro baz, do: bar()
end
'''
end
test "macro invoked before its definition" do
assert_eval_raise CompileError,
~r"nofile:2: cannot invoke macro bar/0 before its definition",
'''
defmodule Kernel.ErrorsTest.IncorrectMacroDispatch do
def foo, do: bar()
defmacro bar, do: :bar
end
'''
assert_eval_raise CompileError,
~r"nofile:2: cannot invoke macro bar/0 before its definition",
'''
defmodule Kernel.ErrorsTest.IncorrectMacropDispatch do
def foo, do: bar()
defmacrop bar, do: :ok
end
'''
assert_eval_raise CompileError,
~r"nofile:2: cannot invoke macro bar/1 before its definition",
'''
defmodule Kernel.ErrorsTest.IncorrectMacroDispatch do
defmacro bar(a) when is_atom(a), do: bar([a])
end
'''
end
test "macro captured before its definition" do
assert_eval_raise CompileError,
~r"nofile:3: cannot invoke macro is_ok/1 before its definition",
'''
defmodule Kernel.ErrorsTest.IncorrectMacroDispatch.Capture do
def foo do
predicate = &is_ok/1
Enum.any?([:ok, :error, :foo], predicate)
end
defmacro is_ok(atom), do: atom == :ok
end
'''
end
test "function definition with alias" do
assert_eval_raise CompileError,
"nofile:2: function names should start with lowercase characters or underscore, invalid name Bar",
'''
defmodule Kernel.ErrorsTest.FunctionDefinitionWithAlias do
def Bar do
:baz
end
end
'''
end
test "function import conflict" do
assert_eval_raise CompileError,
"nofile:3: function exit/1 imported from both :erlang and Kernel, call is ambiguous",
'''
defmodule Kernel.ErrorsTest.FunctionImportConflict do
import :erlang, warn: false
def foo, do: exit(:test)
end
'''
end
test "duplicated function on import options" do
assert_eval_raise CompileError,
"nofile:2: invalid :only option for import, flatten/1 is duplicated",
'''
defmodule Kernel.ErrorsTest.DuplicatedFunctionOnImportOnly do
import List, only: [flatten: 1, keyfind: 4, flatten: 1]
end
'''
assert_eval_raise CompileError,
"nofile:2: invalid :except option for import, flatten/1 is duplicated",
'''
defmodule Kernel.ErrorsTest.DuplicatedFunctionOnImportExcept do
import List, except: [flatten: 1, keyfind: 4, flatten: 1]
end
'''
end
test "ensure valid import :only option" do
assert_eval_raise CompileError,
"nofile:3: invalid :only option for import, expected value to be an atom " <>
":functions, :macros, or a list literal, got: x",
'''
defmodule Kernel.ErrorsTest.Only do
x = [flatten: 1]
import List, only: x
end
'''
end
test "ensure valid import :except option" do
assert_eval_raise CompileError,
"nofile:3: invalid :except option for import, expected value to be an atom " <>
":functions, :macros, or a list literal, got: " <>
"Module.__get_attribute__(Kernel.ErrorsTest.Only, :x, 3)",
'''
defmodule Kernel.ErrorsTest.Only do
@x [flatten: 1]
import List, except: @x
end
'''
end
test "unrequired macro" do
assert_eval_raise CompileError,
"nofile:2: you must require Kernel.ErrorsTest before invoking " <>
"the macro Kernel.ErrorsTest.hello/0",
'''
defmodule Kernel.ErrorsTest.UnrequiredMacro do
Kernel.ErrorsTest.hello()
end
'''
end
test "def defmacro clause change" do
assert_eval_raise CompileError, "nofile:3: defmacro foo/1 already defined as def", '''
defmodule Kernel.ErrorsTest.DefDefmacroClauseChange do
def foo(1), do: 1
defmacro foo(x), do: x
end
'''
end
test "def defp clause change from another file" do
assert_eval_raise CompileError, ~r"nofile:4: def hello/0 already defined as defp", '''
defmodule Kernel.ErrorsTest.DefDefmacroClauseChange do
require Kernel.ErrorsTest
defp hello, do: :world
Kernel.ErrorsTest.hello()
end
'''
end
test "internal function overridden" do
assert_eval_raise CompileError,
"nofile:2: cannot define def __info__/1 as it is automatically defined by Elixir",
'''
defmodule Kernel.ErrorsTest.InternalFunctionOverridden do
def __info__(_), do: []
end
'''
end
test "no macros" do
assert_eval_raise CompileError, "nofile:2: could not load macros from module :lists", '''
defmodule Kernel.ErrorsTest.NoMacros do
import :lists, only: :macros
end
'''
end
test "invalid macro" do
assert_eval_raise CompileError,
~r"nofile: invalid quoted expression: {:foo, :bar, :baz, :bat}",
'''
defmodule Kernel.ErrorsTest.InvalidMacro do
defmacrop oops do
{:foo, :bar, :baz, :bat}
end
def test, do: oops()
end
'''
end
test "unloaded module" do
assert_eval_raise CompileError,
"nofile:1: module Certainly.Doesnt.Exist is not loaded and could not be found",
'import Certainly.Doesnt.Exist'
end
test "module imported from the context it was defined in" do
assert_eval_raise CompileError,
~r"nofile:4: module Kernel.ErrorsTest.ScheduledModule.Hygiene is not loaded but was defined.",
'''
defmodule Kernel.ErrorsTest.ScheduledModule do
defmodule Hygiene do
end
import Kernel.ErrorsTest.ScheduledModule.Hygiene
end
'''
end
test "module imported from the same module" do
assert_eval_raise CompileError,
~r"nofile:3: you are trying to use the module Kernel.ErrorsTest.ScheduledModule.Hygiene which is currently being defined",
'''
defmodule Kernel.ErrorsTest.ScheduledModule do
defmodule Hygiene do
import Kernel.ErrorsTest.ScheduledModule.Hygiene
end
end
'''
end
test "already compiled module" do
assert_eval_raise ArgumentError,
"could not call Module.eval_quoted/4 because the module Record is already compiled",
'Module.eval_quoted Record, quote(do: 1), [], file: __ENV__.file'
end
test "@compile inline with undefined function" do
assert_eval_raise CompileError,
"nofile:1: inlined function foo/1 undefined",
'defmodule Test do @compile {:inline, foo: 1} end'
end
test "@on_load attribute format" do
assert_raise ArgumentError, ~r/should be an atom or a {atom, 0} tuple/, fn ->
defmodule BadOnLoadAttribute do
Module.put_attribute(__MODULE__, :on_load, "not an atom")
end
end
end
test "duplicated @on_load attribute" do
assert_raise ArgumentError, "the @on_load attribute can only be set once per module", fn ->
defmodule DuplicatedOnLoadAttribute do
@on_load :foo
@on_load :bar
end
end
end
test "@on_load attribute with undefined function" do
assert_eval_raise CompileError,
"nofile:1: @on_load function foo/0 is undefined",
'defmodule UndefinedOnLoadFunction do @on_load :foo end'
end
test "wrong kind for @on_load attribute" do
assert_eval_raise CompileError,
"nofile:1: expected @on_load function foo/0 to be defined as \"def\", " <>
"got \"defp\"",
'''
defmodule PrivateOnLoadFunction do
@on_load :foo
defp foo do
:ok
end
# To avoid warning: function foo/0 is unused
def bar do
foo()
end
end
'''
end
test "interpolation error" do
assert_eval_raise SyntaxError,
"nofile:1: unexpected token: ). The \"do\" at line 1 is missing terminator \"end\"",
'"foo\#{case 1 do )}bar"'
end
test "in definition module" do
assert_eval_raise CompileError,
"nofile:2: cannot define module Kernel.ErrorsTest.InDefinitionModule " <>
"because it is currently being defined in nofile:1",
'''
defmodule Kernel.ErrorsTest.InDefinitionModule do
defmodule Elixir.Kernel.ErrorsTest.InDefinitionModule, do: true
end
'''
end
test "invalid definition" do
assert_eval_raise CompileError,
"nofile:1: invalid syntax in def 1.(hello)",
'defmodule Kernel.ErrorsTest.InvalidDefinition, do: (def 1.(hello), do: true)'
end
test "invalid size in bitstrings" do
assert_eval_raise CompileError,
"nofile:1: cannot use ^x outside of match clauses",
'x = 8; <<a, b::size(^x)>> = <<?a, ?b>>'
end
test "end of expression" do
# All valid examples
Code.eval_quoted('''
1;
2;
3
(;)
(;1)
(1;)
(1; 2)
fn -> 1; 2 end
fn -> ; end
if true do
;
end
try do
;
catch
_, _ -> ;
after
;
end
''')
# All invalid examples
assert_eval_raise SyntaxError, "nofile:1: syntax error before: ';'", '1+;\n2'
assert_eval_raise SyntaxError, "nofile:1: syntax error before: ';'", 'max(1, ;2)'
end
test "new line error" do
assert_eval_raise SyntaxError,
"nofile:3: unexpectedly reached end of line. The current expression is invalid or incomplete",
'if true do\n foo = [],\n baz\nend'
end
test "characters literal are printed correctly in syntax errors" do
assert_eval_raise SyntaxError, "nofile:1: syntax error before: ?a", ':ok ?a'
assert_eval_raise SyntaxError, "nofile:1: syntax error before: ?\\s", ':ok ?\\s'
assert_eval_raise SyntaxError, "nofile:1: syntax error before: ?す", ':ok ?す'
end
test "numbers are printed correctly in syntax errors" do
assert_eval_raise SyntaxError, "nofile:1: syntax error before: \"12\"", ':ok 12'
assert_eval_raise SyntaxError, "nofile:1: syntax error before: \"0b1\"", ':ok 0b1'
assert_eval_raise SyntaxError, "nofile:1: syntax error before: \"12.3\"", ':ok 12.3'
end
test "invalid \"fn do expr end\"" do
assert_eval_raise SyntaxError,
"nofile:1: unexpected token: do. Anonymous functions are written as:\n\n fn pattern -> expression end",
'fn do :ok end'
end
test "bodyless function with guard" do
assert_eval_raise CompileError, "nofile:2: missing :do option in \"def\"", '''
defmodule Kernel.ErrorsTest.BodyessFunctionWithGuard do
def foo(n) when is_number(n)
end
'''
assert_eval_raise CompileError, "nofile:2: missing :do option in \"def\"", '''
defmodule Kernel.ErrorsTest.BodyessFunctionWithGuard do
def foo(n) when is_number(n), true
end
'''
end
test "invalid args for function head" do
assert_eval_raise CompileError,
~r"nofile:2: only variables and \\\\ are allowed as arguments in function head.",
'''
defmodule Kernel.ErrorsTest.InvalidArgsForBodylessClause do
def foo(nil)
def foo(_), do: :ok
end
'''
end
test "bad multi-call" do
assert_eval_raise CompileError,
"nofile:1: invalid argument for alias, expected a compile time atom or alias, got: 42",
'alias IO.{ANSI, 42}'
assert_eval_raise CompileError,
"nofile:1: :as option is not supported by multi-alias call",
'alias Elixir.{Map}, as: Dict'
assert_eval_raise UndefinedFunctionError,
"function List.\"{}\"/1 is undefined or private",
'[List.{Chars}, "one"]'
end
test "macros error stacktrace" do
assert [
{:erlang, :+, [1, :foo], _},
{Kernel.ErrorsTest.MacrosErrorStacktrace, :sample, 1, _} | _
] =
rescue_stacktrace("""
defmodule Kernel.ErrorsTest.MacrosErrorStacktrace do
defmacro sample(num), do: num + :foo
def other, do: sample(1)
end
""")
end
test "macros function clause stacktrace" do
assert [{__MODULE__, :sample, 1, _} | _] =
rescue_stacktrace("""
defmodule Kernel.ErrorsTest.MacrosFunctionClauseStacktrace do
import Kernel.ErrorsTest
sample(1)
end
""")
end
test "macros interpreted function clause stacktrace" do
assert [{Kernel.ErrorsTest.MacrosInterpretedFunctionClauseStacktrace, :sample, 1, _} | _] =
rescue_stacktrace("""
defmodule Kernel.ErrorsTest.MacrosInterpretedFunctionClauseStacktrace do
defmacro sample(0), do: 0
def other, do: sample(1)
end
""")
end
test "macros compiled callback" do
assert [{Kernel.ErrorsTest, :__before_compile__, [env], _} | _] =
rescue_stacktrace("""
defmodule Kernel.ErrorsTest.MacrosCompiledCallback do
Module.put_attribute(__MODULE__, :before_compile, Kernel.ErrorsTest)
end
""")
assert %Macro.Env{module: Kernel.ErrorsTest.MacrosCompiledCallback} = env
end
test "failed remote call stacktrace includes file/line info" do
try do
bad_remote_call(1)
rescue
ArgumentError ->
assert [
{:erlang, :apply, [1, :foo, []], []},
{__MODULE__, :bad_remote_call, 1, [file: _, line: _]} | _
] = __STACKTRACE__
end
end
test "def fails when rescue, else or catch don't have clauses" do
assert_eval_raise CompileError, ~r"expected -> clauses for :rescue in \"def\"", """
defmodule Example do
def foo do
bar()
rescue
baz()
end
end
"""
end
test "duplicate map keys" do
assert_eval_raise CompileError, "nofile:1: key :a will be overridden in map", """
%{a: :b, a: :c}
"""
assert_eval_raise CompileError, "nofile:1: key :a will be overridden in map", """
%{a: :b, a: :c} = %{a: :c}
"""
assert_eval_raise CompileError, "nofile:1: key :m will be overridden in map", """
%{m: :n, m: :o, m: :p}
"""
assert_eval_raise CompileError, "nofile:1: key 1 will be overridden in map", """
%{1 => 2, 1 => 3}
"""
assert map_size(%{System.unique_integer() => 1, System.unique_integer() => 2}) == 2
end
defp bad_remote_call(x), do: x.foo
defmacro sample(0), do: 0
defmacro before_compile(_) do
quote(do: _)
end
## Helpers
defp assert_eval_raise(given_exception, given_message, string) do
assert_raise given_exception, given_message, fn ->
Code.eval_string(string)
end
end
defp rescue_stacktrace(string) do
try do
Code.eval_string(string)
nil
rescue
_ -> __STACKTRACE__
else
_ -> flunk("Expected expression to fail")
end
end
end
| 34.867451 | 155 | 0.536643 |
73ff2bcde5470e8b80e2fc5105697268e3b124d2 | 1,215 | ex | Elixir | lib/groupher_server/delivery/delegates/postman.ex | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | lib/groupher_server/delivery/delegates/postman.ex | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | lib/groupher_server/delivery/delegates/postman.ex | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule GroupherServer.Delivery.Delegate.Postman do
@moduledoc """
The Delivery context.
"""
alias GroupherServer.Delivery.Delegate.{Mention, Notification}
def send(:mention, artiment, mentions, from_user) do
Mention.handle(artiment, mentions, from_user)
end
def send(:notify, attrs, from_user), do: Notification.handle(attrs, from_user)
def revoke(:notify, attrs, from_user), do: Notification.revoke(attrs, from_user)
def fetch(:mention, user, filter), do: Mention.paged_mentions(user, filter)
def fetch(:notification, user, filter), do: Notification.paged_notifications(user, filter)
def unread_count(:mention, user_id), do: Mention.unread_count(user_id)
def unread_count(:notification, user_id), do: Notification.unread_count(user_id)
def mark_read(:mention, ids, user), do: Mention.mark_read(ids, user)
def mark_read(:notification, ids, user), do: Notification.mark_read(ids, user)
def mark_read_all(:mention, user), do: Mention.mark_read_all(user)
def mark_read_all(:notification, user), do: Notification.mark_read_all(user)
# def send(_, _, _), do: {:error, "delivery, not such service"}
# def send(_, _, _, _), do: {:error, "delivery, not such service"}
end
| 40.5 | 92 | 0.740741 |
73ff41bb3c1d919526037268843b98fbf41b5434 | 217 | ex | Elixir | lib/graphql_with_absinthe_on_phoenix/graphql/mutations.ex | wbotelhos/graphql-with-absinthe-on-phoenix | f831d4c78bbba821ef2dd6deee5e3640aff864a5 | [
"MIT"
] | 1 | 2021-08-03T20:30:49.000Z | 2021-08-03T20:30:49.000Z | lib/graphql_with_absinthe_on_phoenix/graphql/mutations.ex | wbotelhos/graphql-with-absinthe-on-phoenix | f831d4c78bbba821ef2dd6deee5e3640aff864a5 | [
"MIT"
] | null | null | null | lib/graphql_with_absinthe_on_phoenix/graphql/mutations.ex | wbotelhos/graphql-with-absinthe-on-phoenix | f831d4c78bbba821ef2dd6deee5e3640aff864a5 | [
"MIT"
] | null | null | null | defmodule GraphqlWithAbsintheOnPhoenix.GraphQL.Mutations do
use Absinthe.Schema.Notation
alias GraphqlWithAbsintheOnPhoenix.GraphQL.Mutations
import_types(Mutations.Book)
import_types(Mutations.Session)
end
| 24.111111 | 59 | 0.852535 |
73ff654d8edc8e7c8dcde1b71f4744d5b4a8e404 | 903 | ex | Elixir | priv/catalogue/breadcrumb/example01.ex | dclausen/surface_bootstrap | f282b7c653160fb4b4ca687cf8fb13e68937884f | [
"MIT"
] | 17 | 2021-02-18T20:10:52.000Z | 2021-12-30T14:41:00.000Z | priv/catalogue/breadcrumb/example01.ex | dclausen/surface_bootstrap | f282b7c653160fb4b4ca687cf8fb13e68937884f | [
"MIT"
] | 9 | 2021-03-21T20:15:48.000Z | 2021-06-23T07:45:20.000Z | priv/catalogue/breadcrumb/example01.ex | dclausen/surface_bootstrap | f282b7c653160fb4b4ca687cf8fb13e68937884f | [
"MIT"
] | 10 | 2021-03-21T20:10:37.000Z | 2021-12-28T00:06:06.000Z | defmodule SurfaceBootstrap.Catalogue.Breadcrumb.Example01 do
use Surface.Catalogue.Example,
subject: SurfaceBootstrap.Breadcrumb,
catalogue: SurfaceBootstrap.Catalogue,
title: "As proplist",
height: "90px",
direction: "vertical"
@moduledoc """
Done in assigns:
```
breadcrumbs = [
%{
url: "",
text: "home"
},
%{
url: "#users",
text: "users"
},
%{
url: "#users/1234",
text: "John Doe",
active: true
}
]
```
"""
def render(assigns) do
breadcrumbs = [
%{
url: "",
text: "home"
},
%{
url: "#users",
text: "users"
},
%{
url: "#/users/1234",
text: "John Doe",
active: true
}
]
~F"""
<Breadcrumb link_type="link" breadcrumbs={breadcrumbs} />
"""
end
end
| 17.705882 | 61 | 0.471761 |
73ff6f9707791152c05bfe11958ac36f7a99eb2b | 847 | ex | Elixir | apps/admin_api/lib/admin_api/router.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/admin_api/lib/admin_api/router.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/admin_api/lib/admin_api/router.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule AdminAPI.Router do
use AdminAPI, :router
use EWallet.Web.APIDocs, scope: "/api/admin"
alias AdminAPI.{StatusController, VersionedRouter}
scope "/api/admin" do
get("/", StatusController, :status)
forward("/", VersionedRouter)
end
end
| 33.88 | 74 | 0.747344 |
73ff879ed5287c50058c20d67a900736c00f15ea | 7,391 | ex | Elixir | lib/glimesh/accounts/user.ex | FROSADO/glimesh.tv | d395e89bcf8ab33ccb723ad603c0a6fb6d4ca139 | [
"MIT"
] | null | null | null | lib/glimesh/accounts/user.ex | FROSADO/glimesh.tv | d395e89bcf8ab33ccb723ad603c0a6fb6d4ca139 | [
"MIT"
] | null | null | null | lib/glimesh/accounts/user.ex | FROSADO/glimesh.tv | d395e89bcf8ab33ccb723ad603c0a6fb6d4ca139 | [
"MIT"
] | null | null | null | defmodule Glimesh.Accounts.User do
use Ecto.Schema
use Waffle.Ecto.Schema
import Ecto.Changeset
@derive {Inspect, except: [:password]}
schema "users" do
field :username, :string
field :displayname, :string
field :email, :string
field :password, :string, virtual: true
field :hashed_password, :string
field :confirmed_at, :naive_datetime
field :can_stream, :boolean, default: false
field :is_admin, :boolean, default: false
field :avatar, Glimesh.Avatar.Type
field :social_twitter, :string
field :social_youtube, :string
field :social_instagram, :string
field :social_discord, :string
field :stripe_user_id, :string
field :stripe_customer_id, :string
field :stripe_payment_method, :string
field :youtube_intro_url, :string
field :profile_content_md, :string
field :profile_content_html, :string
timestamps()
end
@doc """
A user changeset for registration.
It is important to validate the length of both e-mail and password.
Otherwise databases may truncate the e-mail without warnings, which
could lead to unpredictable or insecure behaviour. Long passwords may
also be very expensive to hash for certain algorithms.
"""
def registration_changeset(user, attrs) do
user
|> cast(attrs, [:username, :email, :password, :displayname, :is_admin])
|> validate_username()
|> validate_email()
|> validate_password()
end
defp validate_username(changeset) do
changeset
|> validate_required([:username])
|> validate_format(:username, ~r/^(?![_.])(?!.*[_.]{2})[a-zA-Z0-9._]+(?<![_.])$/i)
|> validate_length(:username, min: 3, max: 50)
|> unsafe_validate_unique(:username, Glimesh.Repo)
|> unique_constraint(:username)
|> validate_username_reserved_words(:username)
|> validate_username_no_bad_words(:username)
# Disabled for now
# |> validate_username_contains_no_bad_words(:username)
end
def validate_username_reserved_words(changeset, field) when is_atom(field) do
validate_change(changeset, field, fn current_field, value ->
if Enum.member?(Application.get_env(:glimesh, :reserved_words), value) do
[{current_field, "This username is reserved"}]
else
[]
end
end)
end
def validate_username_no_bad_words(changeset, field) when is_atom(field) do
validate_change(changeset, field, fn current_field, value ->
if Enum.member?(Application.get_env(:glimesh, :bad_words), value) do
[{current_field, "This username contains a bad word"}]
else
[]
end
end)
end
def validate_username_contains_no_bad_words(changeset, field) when is_atom(field) do
validate_change(changeset, field, fn current_field, value ->
if Enum.any?(Application.get_env(:glimesh, :bad_words), fn w ->
String.contains?(value, w)
end) do
[{current_field, "This username contains a bad word"}]
else
[]
end
end)
end
defp validate_email(changeset) do
changeset
|> validate_required([:email])
|> validate_format(:email, ~r/^[^\s]+@[^\s]+$/, message: "must have the @ sign and no spaces")
|> validate_length(:email, max: 160)
|> unsafe_validate_unique(:email, Glimesh.Repo)
|> unique_constraint(:email)
end
defp validate_password(changeset) do
changeset
|> validate_required([:password])
|> validate_length(:password, min: 8, max: 80)
# |> validate_format(:password, ~r/[a-z]/, message: "at least one lower case character")
# |> validate_format(:password, ~r/[A-Z]/, message: "at least one upper case character")
# |> validate_format(:password, ~r/[!?@#$%^&*_0-9]/, message: "at least one digit or punctuation character")
|> prepare_changes(&hash_password/1)
end
defp hash_password(changeset) do
password = get_change(changeset, :password)
changeset
|> put_change(:hashed_password, Bcrypt.hash_pwd_salt(password))
|> delete_change(:password)
end
def validate_displayname(changeset) do
validate_change(changeset, :displayname, fn current_field, value ->
if String.downcase(value) !== get_field(changeset, :username) do
[{current_field, "Display name must match Username"}]
else
[]
end
end)
end
@doc """
A user changeset for changing the e-mail.
It requires the e-mail to change otherwise an error is added.
"""
def email_changeset(user, attrs) do
user
|> cast(attrs, [:email])
|> validate_email()
|> case do
%{changes: %{email: _}} = changeset -> changeset
%{} = changeset -> add_error(changeset, :email, "Email is the same")
end
end
@doc """
A user changeset for changing the password.
"""
def password_changeset(user, attrs) do
user
|> cast(attrs, [:password])
|> validate_confirmation(:password, message: "Password does not match")
|> validate_password()
end
@doc """
A user changeset for changing the password.
"""
def profile_changeset(user, attrs) do
user
|> cast(attrs, [
:displayname,
:social_twitter,
:social_youtube,
:social_instagram,
:social_discord,
:youtube_intro_url,
:profile_content_md
])
|> validate_length(:profile_content_md, max: 8192)
|> validate_youtube_url(:youtube_intro_url)
|> validate_displayname()
|> set_profile_content_html()
|> cast_attachments(attrs, [:avatar])
end
@doc """
A user changeset for changing the stripe customer id.
"""
def stripe_changeset(user, attrs) do
user
|> cast(attrs, [:stripe_customer_id, :stripe_user_id, :stripe_payment_method])
end
@doc """
Confirms the account by setting `confirmed_at`.
"""
def confirm_changeset(user) do
now = NaiveDateTime.utc_now() |> NaiveDateTime.truncate(:second)
change(user, confirmed_at: now)
end
@doc """
Verifies the password.
If there is no user or the user doesn't have a password, we call
`Bcrypt.no_user_verify/0` to avoid timing attacks.
"""
def valid_password?(%Glimesh.Accounts.User{hashed_password: hashed_password}, password)
when is_binary(hashed_password) and byte_size(password) > 0 do
Bcrypt.verify_pass(password, hashed_password)
end
def valid_password?(_, _) do
Bcrypt.no_user_verify()
false
end
@doc """
Validates the current password otherwise adds an error to the changeset.
"""
def validate_current_password(changeset, password) do
if valid_password?(changeset.data, password) do
changeset
else
add_error(changeset, :current_password, "Invalid Password")
end
end
def validate_youtube_url(changeset, field) when is_atom(field) do
validate_change(changeset, field, fn current_field, value ->
matches = Regex.run(~r/.*(?:youtu.be\/|v\/|u\/\w\/|embed\/|watch\?v=)([^#\&\?]*).*/, value)
if matches < 2 do
[{current_field, "Incorrect YouTube URL format"}]
else
[]
end
end)
end
def set_profile_content_html(changeset) do
case changeset do
%Ecto.Changeset{valid?: true, changes: %{profile_content_md: profile_content_md}} ->
put_change(
changeset,
:profile_content_html,
Glimesh.Accounts.Profile.safe_user_markdown_to_html(profile_content_md)
)
_ ->
changeset
end
end
end
| 29.682731 | 112 | 0.672034 |
73ff8ac9731c9631a5c01a56c548cb64a917689d | 1,026 | ex | Elixir | test/support/data_case.ex | victorprs/ecto-job-scheduler | 233e312f24ab3edf7b53e6da3d6667925450d60d | [
"MIT"
] | null | null | null | test/support/data_case.ex | victorprs/ecto-job-scheduler | 233e312f24ab3edf7b53e6da3d6667925450d60d | [
"MIT"
] | null | null | null | test/support/data_case.ex | victorprs/ecto-job-scheduler | 233e312f24ab3edf7b53e6da3d6667925450d60d | [
"MIT"
] | null | null | null | defmodule EctoJobScheduler.DataBase do
@moduledoc false
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
alias Ecto.Changeset
using do
quote do
alias EctoJobScheduler.Test.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
end
end
setup tags do
:ok = Sandbox.checkout(EctoJobScheduler.Test.Repo)
unless tags[:async] do
Sandbox.mode(EctoJobScheduler.Test.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transform changeset errors to a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Changeset.traverse_errors(changeset, fn {message, opts} ->
Enum.reduce(opts, message, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 23.860465 | 77 | 0.668616 |
73ff935ad9c98f68f8ca6980f6ea541ac0ff002f | 1,106 | ex | Elixir | lib/mix/lib/mix/tasks/deps.check.ex | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | 1 | 2017-09-09T20:59:04.000Z | 2017-09-09T20:59:04.000Z | lib/mix/lib/mix/tasks/deps.check.ex | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/deps.check.ex | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Deps.Check do
use Mix.Task
import Mix.Deps, only: [all: 0, format_dep: 1, format_status: 1, check_lock: 2, out_of_date?: 1]
@hidden true
@shortdoc "Check if all dependencies are valid"
@recursive :both
@moduledoc """
Checks if all dependencies are valid and if not, abort.
Prints the invalid dependencies' status before aborting.
This task is not shown in `mix help` but it is part
of the `mix` public API and can be depended on.
"""
def run(_) do
lock = Mix.Deps.Lock.read
all = Enum.map all, check_lock(&1, lock)
case Enum.partition all, ok?(&1) do
{ _, [] } -> :ok
{ _, not_ok } ->
shell = Mix.shell
shell.error "Unchecked dependencies for environment #{Mix.env}:"
Enum.each not_ok, fn(dep) ->
shell.error "* #{format_dep(dep)}"
shell.error " #{format_status dep}"
end
raise Mix.Error, message: "Can't continue due to errors on dependencies"
end
end
defp ok?(Mix.Dep[status: { :ok, _ }]), do: true
defp ok?(_), do: false
end
| 28.358974 | 98 | 0.610307 |
73ff95613551ac86f98a3a953b4e17f952d1bf43 | 1,220 | ex | Elixir | 2020/otp/dash/test/support/conn_case.ex | herminiotorres/programmer_passport | d1786518a3a5f82471457e0ace41c4c33343739a | [
"MIT"
] | null | null | null | 2020/otp/dash/test/support/conn_case.ex | herminiotorres/programmer_passport | d1786518a3a5f82471457e0ace41c4c33343739a | [
"MIT"
] | null | null | null | 2020/otp/dash/test/support/conn_case.ex | herminiotorres/programmer_passport | d1786518a3a5f82471457e0ace41c4c33343739a | [
"MIT"
] | null | null | null | defmodule DashWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use DashWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import DashWeb.ConnCase
alias DashWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint DashWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Dash.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Dash.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 27.727273 | 66 | 0.719672 |
73ff9c922684ecfe253b0f44d4f7499d06aa05c4 | 1,619 | ex | Elixir | clients/health_care/lib/google_api/health_care/v1beta1/model/fhir_filter.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/model/fhir_filter.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/model/fhir_filter.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1beta1.Model.FhirFilter do
@moduledoc """
Filter configuration.
## Attributes
* `resources` (*type:* `GoogleApi.HealthCare.V1beta1.Model.Resources.t`, *default:* `nil`) - List of resources to include in the output. If this list is empty or
not specified, all resources are included in the output.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:resources => GoogleApi.HealthCare.V1beta1.Model.Resources.t()
}
field(:resources, as: GoogleApi.HealthCare.V1beta1.Model.Resources)
end
defimpl Poison.Decoder, for: GoogleApi.HealthCare.V1beta1.Model.FhirFilter do
def decode(value, options) do
GoogleApi.HealthCare.V1beta1.Model.FhirFilter.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HealthCare.V1beta1.Model.FhirFilter do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.729167 | 165 | 0.747993 |
73ffd3e01d81335b9275087c200cc8ae0cb97779 | 499 | ex | Elixir | projects/sirena_email/lib/sirena_email_web/views/error_view.ex | idfumg/ElixirSynopsis | 74c668d84300812dd41eb18772aecfb89bc7628b | [
"MIT"
] | null | null | null | projects/sirena_email/lib/sirena_email_web/views/error_view.ex | idfumg/ElixirSynopsis | 74c668d84300812dd41eb18772aecfb89bc7628b | [
"MIT"
] | null | null | null | projects/sirena_email/lib/sirena_email_web/views/error_view.ex | idfumg/ElixirSynopsis | 74c668d84300812dd41eb18772aecfb89bc7628b | [
"MIT"
] | null | null | null | defmodule SirenaEmailWeb.ErrorView do
use SirenaEmailWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 29.352941 | 61 | 0.739479 |