hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
list | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
list | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
list | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9e38794edb26954874096a38181251e63b0ebe58
| 8,134
|
ex
|
Elixir
|
lib/elixir/lib/code/identifier.ex
|
kenichi/elixir
|
8c27da88c70623cbe516d5310c885943395a82a2
|
[
"Apache-2.0"
] | 2
|
2018-11-15T06:38:14.000Z
|
2018-11-17T18:03:14.000Z
|
lib/elixir/lib/code/identifier.ex
|
kenichi/elixir
|
8c27da88c70623cbe516d5310c885943395a82a2
|
[
"Apache-2.0"
] | 1
|
2018-09-10T23:36:45.000Z
|
2018-09-10T23:36:45.000Z
|
lib/elixir/lib/code/identifier.ex
|
kenichi/elixir
|
8c27da88c70623cbe516d5310c885943395a82a2
|
[
"Apache-2.0"
] | 1
|
2018-09-10T23:32:56.000Z
|
2018-09-10T23:32:56.000Z
|
defmodule Code.Identifier do
@moduledoc false
@doc """
Checks if the given identifier is an unary op.
## Examples
iex> Code.Identifier.unary_op(:+)
{:non_associative, 300}
"""
@spec unary_op(atom) :: {:non_associative, precedence :: pos_integer} | :error
def unary_op(op) do
cond do
op in [:&] -> {:non_associative, 90}
op in [:!, :^, :not, :+, :-, :~~~] -> {:non_associative, 300}
op in [:@] -> {:non_associative, 320}
true -> :error
end
end
@doc """
Checks if the given identifier is a binary op.
## Examples
iex> Code.Identifier.binary_op(:+)
{:left, 210}
"""
@spec binary_op(atom) :: {:left | :right, precedence :: pos_integer} | :error
def binary_op(op) do
cond do
op in [:<-, :\\] -> {:left, 40}
op in [:when] -> {:right, 50}
op in [:::] -> {:right, 60}
op in [:|] -> {:right, 70}
op in [:=] -> {:right, 100}
op in [:||, :|||, :or] -> {:left, 130}
op in [:&&, :&&&, :and] -> {:left, 140}
op in [:==, :!=, :=~, :===, :!==] -> {:left, 150}
op in [:<, :<=, :>=, :>] -> {:left, 160}
op in [:|>, :<<<, :>>>, :<~, :~>, :<<~, :~>>, :<~>, :<|>] -> {:left, 170}
op in [:in] -> {:left, 180}
op in [:^^^] -> {:left, 190}
op in [:++, :--, :.., :<>] -> {:right, 200}
op in [:+, :-] -> {:left, 210}
op in [:*, :/] -> {:left, 220}
op in [:.] -> {:left, 310}
true -> :error
end
end
@doc """
Classifies the given atom into one of the following categories:
* `:alias` - a valid Elixir alias, like `Foo`, `Foo.Bar` and so on
* `:callable_local` - an atom that can be used as a local call;
this category includes identifiers like `:foo`
* `:callable_operators` - all callable operators, such as `:<>`. Note
operators such as `:..` are not callable because of ambiguity
* `:not_callable` - an atom that cannot be used as a function call after the
`.` operator (for example, `:<<>>` is not callable because `Foo.<<>>` is a
syntax error); this category includes atoms like `:Foo`, since they are
valid identifiers but they need quotes to be used in function calls
(`Foo."Bar"`)
* `:other` - any other atom (these are usually escaped when inspected, like
`:"foo and bar"`)
"""
def classify(atom) when is_atom(atom) do
charlist = Atom.to_charlist(atom)
cond do
atom in [:%, :%{}, :{}, :<<>>, :..., :.., :., :->] ->
:not_callable
unary_op(atom) != :error or binary_op(atom) != :error ->
:callable_operator
valid_alias?(charlist) ->
:alias
true ->
case :elixir_config.get(:identifier_tokenizer, String.Tokenizer).tokenize(charlist) do
{kind, _acc, [], _, _, special} ->
if kind == :identifier and not :lists.member(?@, special) do
:callable_local
else
:not_callable
end
_ ->
:other
end
end
end
defp valid_alias?('Elixir' ++ rest), do: valid_alias_piece?(rest)
defp valid_alias?(_other), do: false
defp valid_alias_piece?([?., char | rest]) when char >= ?A and char <= ?Z,
do: valid_alias_piece?(trim_leading_while_valid_identifier(rest))
defp valid_alias_piece?([]), do: true
defp valid_alias_piece?(_other), do: false
defp trim_leading_while_valid_identifier([char | rest])
when char >= ?a and char <= ?z
when char >= ?A and char <= ?Z
when char >= ?0 and char <= ?9
when char == ?_ do
trim_leading_while_valid_identifier(rest)
end
defp trim_leading_while_valid_identifier(other) do
other
end
@doc """
Inspects the identifier as an atom.
"""
def inspect_as_atom(atom) when is_nil(atom) or is_boolean(atom) do
Atom.to_string(atom)
end
def inspect_as_atom(atom) when is_atom(atom) do
binary = Atom.to_string(atom)
case classify(atom) do
:alias ->
case binary do
binary when binary in ["Elixir", "Elixir.Elixir"] -> binary
"Elixir.Elixir." <> _rest -> binary
"Elixir." <> rest -> rest
end
type when type in [:callable_local, :callable_operator, :not_callable] ->
":" <> binary
:other ->
{escaped, _} = escape(binary, ?")
IO.iodata_to_binary([?:, ?", escaped, ?"])
end
end
@doc """
Inspects the given identifier as a key.
"""
def inspect_as_key(atom) when is_atom(atom) do
binary = Atom.to_string(atom)
case classify(atom) do
type when type in [:callable_local, :callable_operator, :not_callable] ->
IO.iodata_to_binary([binary, ?:])
_ ->
{escaped, _} = escape(binary, ?")
IO.iodata_to_binary([?", escaped, ?", ?:])
end
end
@doc """
Inspects the given identifier as a function name.
"""
def inspect_as_function(atom) when is_atom(atom) do
binary = Atom.to_string(atom)
case classify(atom) do
type when type in [:callable_local, :callable_operator] ->
binary
type ->
escaped =
if type in [:not_callable, :alias] do
binary
else
elem(escape(binary, ?"), 0)
end
IO.iodata_to_binary([?", escaped, ?"])
end
end
@doc """
Extracts the name and arity of the parent from the anonymous function identifier.
"""
# Example of this format: -NAME/ARITY-fun-COUNT-
def extract_anonymous_fun_parent(atom) when is_atom(atom) do
with "-" <> rest <- Atom.to_string(atom),
[trailing | reversed] = rest |> String.split("/") |> Enum.reverse(),
[arity, _inner, _count, ""] <- String.split(trailing, "-") do
{reversed |> Enum.reverse() |> Enum.join("/") |> String.to_atom(), arity}
else
_ -> :error
end
end
@doc """
Escapes the given identifier.
"""
def escape(other, char, count \\ :infinity, fun \\ &escape_map/1) do
escape(other, char, count, [], fun)
end
defp escape(<<_, _::binary>> = binary, _char, 0, acc, _fun) do
{acc, binary}
end
defp escape(<<char, t::binary>>, char, count, acc, fun) do
escape(t, char, decrement(count), [acc | [?\\, char]], fun)
end
defp escape(<<?#, ?{, t::binary>>, char, count, acc, fun) do
escape(t, char, decrement(count), [acc | '\\\#{'], fun)
end
defp escape(<<h::utf8, t::binary>>, char, count, acc, fun) do
escaped = if value = fun.(h), do: value, else: escape_char(h)
escape(t, char, decrement(count), [acc | escaped], fun)
end
defp escape(<<a::4, b::4, t::binary>>, char, count, acc, fun) do
escape(t, char, decrement(count), [acc | ['\\x', to_hex(a), to_hex(b)]], fun)
end
defp escape(<<>>, _char, _count, acc, _fun) do
{acc, <<>>}
end
defp escape_char(0), do: '\\0'
defp escape_char(65279), do: '\\uFEFF'
defp escape_char(char)
when char in 0x20..0x7E
when char in 0xA0..0xD7FF
when char in 0xE000..0xFFFD
when char in 0x10000..0x10FFFF do
<<char::utf8>>
end
defp escape_char(char) when char < 0x100 do
<<a::4, b::4>> = <<char::8>>
['\\x', to_hex(a), to_hex(b)]
end
defp escape_char(char) when char < 0x10000 do
<<a::4, b::4, c::4, d::4>> = <<char::16>>
['\\x{', to_hex(a), to_hex(b), to_hex(c), to_hex(d), ?}]
end
defp escape_char(char) when char < 0x1000000 do
<<a::4, b::4, c::4, d::4, e::4, f::4>> = <<char::24>>
['\\x{', to_hex(a), to_hex(b), to_hex(c), to_hex(d), to_hex(e), to_hex(f), ?}]
end
defp escape_map(?\a), do: '\\a'
defp escape_map(?\b), do: '\\b'
defp escape_map(?\d), do: '\\d'
defp escape_map(?\e), do: '\\e'
defp escape_map(?\f), do: '\\f'
defp escape_map(?\n), do: '\\n'
defp escape_map(?\r), do: '\\r'
defp escape_map(?\t), do: '\\t'
defp escape_map(?\v), do: '\\v'
defp escape_map(?\\), do: '\\\\'
defp escape_map(_), do: false
@compile {:inline, to_hex: 1, decrement: 1}
defp to_hex(c) when c in 0..9, do: ?0 + c
defp to_hex(c) when c in 10..15, do: ?A + c - 10
defp decrement(:infinity), do: :infinity
defp decrement(counter), do: counter - 1
end
| 28.843972
| 94
| 0.559012
|
9e387a183b175d765805cb069eb283342aad7322
| 2,773
|
exs
|
Elixir
|
mix.exs
|
ZPVIP/mipha
|
a7df054f72eec7de88b60d94c501488375bdff6a
|
[
"MIT"
] | 1
|
2019-06-11T20:20:26.000Z
|
2019-06-11T20:20:26.000Z
|
mix.exs
|
ZPVIP/mipha
|
a7df054f72eec7de88b60d94c501488375bdff6a
|
[
"MIT"
] | null | null | null |
mix.exs
|
ZPVIP/mipha
|
a7df054f72eec7de88b60d94c501488375bdff6a
|
[
"MIT"
] | null | null | null |
defmodule Mipha.Mixfile do
use Mix.Project
def project do
[
app: :mipha,
version: "0.0.1",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
]
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Mipha.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.4.0"},
{:phoenix_pubsub, "~> 1.1"},
{:phoenix_ecto, "~> 4.0"},
{:ecto_sql, "~> 3.0"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.10"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.11"},
{:plug_cowboy, "~> 2.0"},
{:plug, "~> 1.7"},
{:credo, "~> 0.10.2", only: [:dev, :test], runtime: false},
{:comeonin, "~> 4.1"},
{:bcrypt_elixir, "~> 1.0"},
{:ueberauth, "~> 0.5.0"},
{:ueberauth_identity, "~> 0.2.3"},
{:ueberauth_github, "~> 0.7.0"},
{:faker, "~> 0.10.0"},
{:earmark, "~> 1.3.0"},
{:html_sanitize_ex, "~> 1.3"},
{:timex, "~> 3.3"},
{:ecto_enum, "~> 1.1"},
{:qiniu, "~> 0.4.0"},
{:exmoji, "~> 0.2.2"},
{:bamboo, "~> 1.0"},
{:bamboo_smtp, "~> 1.5"},
{:cachex, "~> 3.1.1"},
{:jason, "~> 1.1"},
{:poison, "~> 3.0", override: true},
{:captcha, github: "zven21/elixir-captcha"},
{:turbo_ecto, github: "zven21/turbo_ecto"},
{:turbo_html, github: "zven21/turbo_html"},
{:remote_ip, "~> 0.1.4"},
{:plug_attack, "~> 0.3.1"},
{:sentry, "~> 6.4"},
{:ex_machina, "~> 2.2.2"},
{:excoveralls, "~> 0.10", only: :test}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 28.885417
| 79
| 0.524702
|
9e38b63ab0316e469f4e5481862b678dc532bd53
| 3,501
|
ex
|
Elixir
|
lib/oban/plugins/stager.ex
|
polvalente/oban
|
7267f8f37f8257b735d498d81cc1e43147cefc49
|
[
"Apache-2.0"
] | null | null | null |
lib/oban/plugins/stager.ex
|
polvalente/oban
|
7267f8f37f8257b735d498d81cc1e43147cefc49
|
[
"Apache-2.0"
] | null | null | null |
lib/oban/plugins/stager.ex
|
polvalente/oban
|
7267f8f37f8257b735d498d81cc1e43147cefc49
|
[
"Apache-2.0"
] | null | null | null |
defmodule Oban.Plugins.Stager do
@moduledoc """
Transition jobs to the `available` state when they reach their scheduled time.
This module is necessary for the execution of scheduled and retryable jobs.
## Options
* `:interval` - the number of milliseconds between database updates. This is directly tied to
the resolution of _scheduled_ jobs. For example, with an `interval` of `5_000ms`, scheduled
jobs are checked every 5 seconds. The default is `1_000ms`.
* `:limit` — the number of jobs that will be staged each time the plugin runs. Defaults to
`5,000`, which you can increase if staging can't keep up with your insertion rate or decrease
if you're experiencing staging timeouts.
## Instrumenting with Telemetry
The `Oban.Plugins.Stager` plugin adds the following metadata to the `[:oban, :plugin, :stop]` event:
* :staged_count - the number of jobs that were staged in the database
"""
use GenServer
import Ecto.Query,
only: [
distinct: 2,
join: 5,
limit: 2,
order_by: 2,
select: 3,
where: 3
]
alias Oban.{Config, Job, Notifier, Peer, Repo}
@type option :: {:conf, Config.t()} | {:name, GenServer.name()} | {:interval, pos_integer()}
defmodule State do
@moduledoc false
defstruct [
:conf,
:name,
:timer,
limit: 5_000,
interval: :timer.seconds(1)
]
end
@doc false
@spec start_link([option()]) :: GenServer.on_start()
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: opts[:name])
end
@impl GenServer
def init(opts) do
Process.flag(:trap_exit, true)
state =
State
|> struct!(opts)
|> schedule_staging()
{:ok, state}
end
@impl GenServer
def terminate(_reason, %State{timer: timer}) do
if is_reference(timer), do: Process.cancel_timer(timer)
:ok
end
@impl GenServer
def handle_info(:stage, %State{} = state) do
meta = %{conf: state.conf, plugin: __MODULE__}
:telemetry.span([:oban, :plugin], meta, fn ->
case check_leadership_and_stage(state) do
{:ok, staged_count} when is_integer(staged_count) ->
{:ok, Map.put(meta, :staged_count, staged_count)}
error ->
{:error, Map.put(meta, :error, error)}
end
end)
{:noreply, schedule_staging(state)}
end
defp check_leadership_and_stage(state) do
if Peer.leader?(state.conf) do
Repo.transaction(state.conf, fn ->
{sched_count, nil} = stage_scheduled(state)
notify_queues(state)
sched_count
end)
else
{:ok, 0}
end
end
defp stage_scheduled(state) do
subquery =
Job
|> where([j], j.state in ["scheduled", "retryable"])
|> where([j], not is_nil(j.queue))
|> where([j], j.scheduled_at <= ^DateTime.utc_now())
|> order_by(asc: :id)
|> limit(^state.limit)
Repo.update_all(
state.conf,
join(Job, :inner, [j], x in subquery(subquery), on: j.id == x.id),
set: [state: "available"]
)
end
defp notify_queues(state) do
query =
Job
|> where([j], j.state == "available")
|> where([j], not is_nil(j.queue))
|> select([j], %{queue: j.queue})
|> distinct(true)
payload = Repo.all(state.conf, query)
Notifier.notify(state.conf, :insert, payload)
end
defp schedule_staging(state) do
timer = Process.send_after(self(), :stage, state.interval)
%{state | timer: timer}
end
end
| 24.65493
| 102
| 0.627249
|
9e38c21ecf47ae46e608b13fdf54b221dc91c07d
| 1,446
|
ex
|
Elixir
|
lib/ex_locale.ex
|
dinarly/ex_locale
|
29597e1f2b24623e1c9c64352f0a992784ffc77f
|
[
"MIT"
] | null | null | null |
lib/ex_locale.ex
|
dinarly/ex_locale
|
29597e1f2b24623e1c9c64352f0a992784ffc77f
|
[
"MIT"
] | null | null | null |
lib/ex_locale.ex
|
dinarly/ex_locale
|
29597e1f2b24623e1c9c64352f0a992784ffc77f
|
[
"MIT"
] | null | null | null |
defmodule ExLocale do
@moduledoc """
Documentation for ExLocale.
"""
alias ExLocale.Locale
alias NimbleCSV.RFC4180, as: CSV
def list_locales do
[
%Locale{
id: :en_GB,
name: "English (United Kingdom)",
translations: %{
"fr_FR" => %{name: "anglais (Royaume-Uni)"},
"ar_SY" => %{name: "الإنجليزية (المملكة المتحدة)"}
}
},
%Locale{
id: :fr_FR,
name: "French (France)",
translations: %{
"fr_FR" => %{name: "français (France)"},
"ar_SY" => %{name: "الفرنسية (فرنسا)"}
}
},
%Locale{
id: :ar_SY,
name: "Arabic (Syria)",
translations: %{
"fr_FR" => %{name: "arabe (Syrie)"},
"ar_SY" => %{name: "العربية (سوريا)"}
}
}
]
end
def shuffle(locales) do
Enum.shuffle(locales)
end
def get_locale_ids do
list_locales
|> Enum.each fn locale ->
Map.get(locale, :id)
|> IO.puts
end
end
@doc """
Hello world.
## Examples
iex> ExLocale.create_locales("en_GB")
563
"""
def create_locales(locale) do
load_csv("locale-list/data/#{locale}/locales.csv")
|> Enum.count()
end
def load_csv(filename) do
filename
|> File.stream!
|> CSV.parse_stream
|> Stream.map(fn [id, name] ->
%{id: id, name: name}
IO.puts inspect("#{id}: #{name}")
end)
end
end
| 19.540541
| 60
| 0.514523
|
9e38c4b5d6d9183f287e0629ea553cf899742d6e
| 2,823
|
ex
|
Elixir
|
apps/itest/lib/transactions/currency.ex
|
omgnetwork/specs
|
4028b2b9a6a4d5f0057b6ac9e3298c66e32a9deb
|
[
"Apache-2.0"
] | 4
|
2020-10-31T15:16:16.000Z
|
2021-02-06T22:44:19.000Z
|
apps/itest/lib/transactions/currency.ex
|
omgnetwork/specs
|
4028b2b9a6a4d5f0057b6ac9e3298c66e32a9deb
|
[
"Apache-2.0"
] | 4
|
2020-11-02T17:12:09.000Z
|
2021-02-10T20:35:19.000Z
|
apps/itest/lib/transactions/currency.ex
|
omgnetwork/specs
|
4028b2b9a6a4d5f0057b6ac9e3298c66e32a9deb
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019-2020 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule Itest.Transactions.Currency do
@moduledoc false
import Itest.Poller, only: [wait_on_receipt_confirmed: 1]
alias Itest.Transactions.Encoding
@ether <<0::160>>
@approve_gas 50_000
#
# ETH
#
def ether(), do: @ether
def to_wei(ether) when is_binary(ether) do
ether
|> String.to_integer()
|> to_wei()
end
def to_wei(ether) when is_integer(ether), do: ether * 1_000_000_000_000_000_000
#
# ERC-20
#
def erc20() do
contracts = parse_contracts()
Encoding.to_binary(contracts["CONTRACT_ERC20_MINTABLE"])
end
def mint_erc20(to_addr, amount) do
{:ok, [faucet | _]} = Ethereumex.HttpClient.eth_accounts()
data = ABI.encode("mint(address,uint256)", [Encoding.to_binary(to_addr), amount])
txmap = %{
from: faucet,
to: Encoding.to_hex(erc20()),
data: Encoding.to_hex(data),
gas: Encoding.to_hex(80_000)
}
{:ok, receipt_hash} = Ethereumex.HttpClient.eth_send_transaction(txmap)
wait_on_receipt_confirmed(receipt_hash)
{:ok, receipt_hash}
end
def approve_erc20(owner_address, amount_in_wei, spender_address) do
data = ABI.encode("approve(address,uint256)", [spender_address, amount_in_wei])
txmap = %{
from: owner_address,
to: Encoding.to_hex(erc20()),
data: Encoding.to_hex(data),
gas: Encoding.to_hex(@approve_gas)
}
{:ok, receipt_hash} = Ethereumex.HttpClient.eth_send_transaction(txmap)
wait_on_receipt_confirmed(receipt_hash)
{:ok, receipt_hash}
end
# taken from the plasma-contracts deployment snapshot
# this parsing occurs in several places around the codebase
defp parse_contracts() do
local_umbrella_path = Application.get_env(:itest, :localchain_contract_env_path)
contract_addreses_path =
case File.exists?(local_umbrella_path) do
true ->
local_umbrella_path
_ ->
# CI/CD
Path.join([File.cwd!(), "localchain_contract_addresses.env"])
end
contract_addreses_path
|> File.read!()
|> String.split("\n", trim: true)
|> List.flatten()
|> Enum.reduce(%{}, fn line, acc ->
[key, value] = String.split(line, "=")
Map.put(acc, key, value)
end)
end
end
| 27.144231
| 85
| 0.686149
|
9e38c8c9426cf8aee1385fb2fdf18883610e41c9
| 29,284
|
exs
|
Elixir
|
lib/elixir/test/elixir/task_test.exs
|
spencerdcarlson/elixir
|
23d75ecdf58df80969e12f4420282238e19219a1
|
[
"Apache-2.0"
] | 2
|
2020-06-02T18:00:28.000Z
|
2021-12-10T03:21:42.000Z
|
lib/elixir/test/elixir/task_test.exs
|
spencerdcarlson/elixir
|
23d75ecdf58df80969e12f4420282238e19219a1
|
[
"Apache-2.0"
] | 1
|
2020-09-14T16:23:33.000Z
|
2021-03-25T17:38:59.000Z
|
lib/elixir/test/elixir/task_test.exs
|
spencerdcarlson/elixir
|
23d75ecdf58df80969e12f4420282238e19219a1
|
[
"Apache-2.0"
] | 1
|
2020-11-25T02:22:55.000Z
|
2020-11-25T02:22:55.000Z
|
Code.require_file("test_helper.exs", __DIR__)
defmodule TaskTest do
use ExUnit.Case
doctest Task
@moduletag :capture_log
def wait_and_send(caller, atom) do
send(caller, :ready)
receive do: (true -> true)
send(caller, atom)
end
defp create_task_in_other_process do
caller = self()
spawn(fn -> send(caller, Task.async(fn -> nil end)) end)
receive do: (task -> task)
end
defp create_dummy_task(reason) do
{pid, ref} = spawn_monitor(Kernel, :exit, [reason])
receive do
{:DOWN, ^ref, _, _, _} ->
%Task{ref: ref, pid: pid, owner: self()}
end
end
def sleep(number) do
Process.sleep(number)
number
end
test "can be supervised directly" do
assert {:ok, _} = Supervisor.start_link([{Task, fn -> :ok end}], strategy: :one_for_one)
end
test "generates child_spec/1" do
defmodule MyTask do
use Task
end
assert MyTask.child_spec([:hello]) == %{
id: MyTask,
restart: :temporary,
start: {MyTask, :start_link, [[:hello]]}
}
defmodule CustomTask do
use Task, id: :id, restart: :permanent, shutdown: :infinity, start: {:foo, :bar, []}
end
assert CustomTask.child_spec([:hello]) == %{
id: :id,
restart: :permanent,
shutdown: :infinity,
start: {:foo, :bar, []}
}
end
test "async/1" do
parent = self()
fun = fn -> wait_and_send(parent, :done) end
task = Task.async(fun)
# Assert the struct
assert task.__struct__ == Task
assert is_pid(task.pid)
assert is_reference(task.ref)
# Assert the link
{:links, links} = Process.info(self(), :links)
assert task.pid in links
receive do: (:ready -> :ok)
# Assert the initial call
{:name, fun_name} = Function.info(fun, :name)
assert {__MODULE__, fun_name, 0} === :proc_lib.translate_initial_call(task.pid)
# Run the task
send(task.pid, true)
# Assert response and monitoring messages
ref = task.ref
assert_receive {^ref, :done}
assert_receive {:DOWN, ^ref, _, _, :normal}
end
test "async/3" do
task = Task.async(__MODULE__, :wait_and_send, [self(), :done])
assert task.__struct__ == Task
{:links, links} = Process.info(self(), :links)
assert task.pid in links
receive do: (:ready -> :ok)
assert {__MODULE__, :wait_and_send, 2} === :proc_lib.translate_initial_call(task.pid)
send(task.pid, true)
assert Task.await(task) === :done
assert_receive :done
end
test "async with $callers" do
grandparent = self()
Task.async(fn ->
parent = self()
assert Process.get(:"$callers") == [grandparent]
Task.async(fn ->
assert Process.get(:"$callers") == [parent, grandparent]
end)
|> Task.await()
end)
|> Task.await()
end
test "start/1" do
parent = self()
fun = fn -> wait_and_send(parent, :done) end
{:ok, pid} = Task.start(fun)
{:links, links} = Process.info(self(), :links)
refute pid in links
receive do: (:ready -> :ok)
{:name, fun_name} = Function.info(fun, :name)
assert {__MODULE__, fun_name, 0} === :proc_lib.translate_initial_call(pid)
send(pid, true)
assert_receive :done
end
test "start/3" do
{:ok, pid} = Task.start(__MODULE__, :wait_and_send, [self(), :done])
{:links, links} = Process.info(self(), :links)
refute pid in links
receive do: (:ready -> :ok)
assert {__MODULE__, :wait_and_send, 2} === :proc_lib.translate_initial_call(pid)
send(pid, true)
assert_receive :done
end
test "start_link/1" do
parent = self()
fun = fn -> wait_and_send(parent, :done) end
{:ok, pid} = Task.start_link(fun)
{:links, links} = Process.info(self(), :links)
assert pid in links
receive do: (:ready -> :ok)
{:name, fun_name} = Function.info(fun, :name)
assert {__MODULE__, fun_name, 0} === :proc_lib.translate_initial_call(pid)
send(pid, true)
assert_receive :done
end
test "start_link/3" do
{:ok, pid} = Task.start_link(__MODULE__, :wait_and_send, [self(), :done])
{:links, links} = Process.info(self(), :links)
assert pid in links
receive do: (:ready -> :ok)
assert {__MODULE__, :wait_and_send, 2} === :proc_lib.translate_initial_call(pid)
send(pid, true)
assert_receive :done
end
test "start_link with $callers" do
grandparent = self()
Task.start_link(fn ->
parent = self()
assert Process.get(:"$callers") == [grandparent]
Task.start_link(fn ->
assert Process.get(:"$callers") == [parent, grandparent]
send(grandparent, :done)
end)
end)
assert_receive :done
end
describe "await/2" do
test "exits on timeout" do
task = %Task{ref: make_ref(), owner: self(), pid: nil}
assert catch_exit(Task.await(task, 0)) == {:timeout, {Task, :await, [task, 0]}}
end
test "exits on normal exit" do
task = Task.async(fn -> exit(:normal) end)
assert catch_exit(Task.await(task)) == {:normal, {Task, :await, [task, 5000]}}
end
test "exits on task throw" do
Process.flag(:trap_exit, true)
task = Task.async(fn -> throw(:unknown) end)
assert {{{:nocatch, :unknown}, _}, {Task, :await, [^task, 5000]}} =
catch_exit(Task.await(task))
end
test "exits on task error" do
Process.flag(:trap_exit, true)
task = Task.async(fn -> raise "oops" end)
assert {{%RuntimeError{}, _}, {Task, :await, [^task, 5000]}} = catch_exit(Task.await(task))
end
@compile {:no_warn_undefined, :module_does_not_exist}
test "exits on task undef module error" do
Process.flag(:trap_exit, true)
task = Task.async(&:module_does_not_exist.undef/0)
assert {exit_status, mfa} = catch_exit(Task.await(task))
assert {:undef, [{:module_does_not_exist, :undef, _, _} | _]} = exit_status
assert {Task, :await, [^task, 5000]} = mfa
end
@compile {:no_warn_undefined, {TaskTest, :undef, 0}}
test "exits on task undef function error" do
Process.flag(:trap_exit, true)
task = Task.async(&TaskTest.undef/0)
assert {{:undef, [{TaskTest, :undef, _, _} | _]}, {Task, :await, [^task, 5000]}} =
catch_exit(Task.await(task))
end
test "exits on task exit" do
Process.flag(:trap_exit, true)
task = Task.async(fn -> exit(:unknown) end)
assert {:unknown, {Task, :await, [^task, 5000]}} = catch_exit(Task.await(task))
end
test "exits on :noconnection" do
ref = make_ref()
task = %Task{ref: ref, pid: self(), owner: self()}
send(self(), {:DOWN, ref, :process, self(), :noconnection})
assert catch_exit(Task.await(task)) |> elem(0) == {:nodedown, :nonode@nohost}
end
test "exits on :noconnection from named monitor" do
ref = make_ref()
task = %Task{ref: ref, owner: self(), pid: nil}
send(self(), {:DOWN, ref, :process, {:name, :node}, :noconnection})
assert catch_exit(Task.await(task)) |> elem(0) == {:nodedown, :node}
end
test "raises when invoked from a non-owner process" do
task = create_task_in_other_process()
message =
"task #{inspect(task)} must be queried from the owner " <>
"but was queried from #{inspect(self())}"
assert_raise ArgumentError, message, fn -> Task.await(task, 1) end
end
end
describe "await_many/2" do
test "returns list of replies" do
tasks = for val <- [1, 3, 9], do: Task.async(fn -> val end)
assert Task.await_many(tasks) == [1, 3, 9]
end
test "returns replies in input order ignoring response order" do
refs = [ref_1 = make_ref(), ref_2 = make_ref(), ref_3 = make_ref()]
tasks = Enum.map(refs, fn ref -> %Task{ref: ref, owner: self(), pid: nil} end)
send(self(), {ref_2, 3})
send(self(), {ref_3, 9})
send(self(), {ref_1, 1})
assert Task.await_many(tasks) == [1, 3, 9]
end
test "returns an empty list immediately" do
assert Task.await_many([]) == []
end
test "ignores messages from other processes" do
other_ref = make_ref()
tasks = for val <- [:a, :b], do: Task.async(fn -> val end)
send(self(), other_ref)
send(self(), {other_ref, :z})
send(self(), {:DOWN, other_ref, :process, 1, :goodbye})
assert Task.await_many(tasks) == [:a, :b]
assert_received ^other_ref
assert_received {^other_ref, :z}
assert_received {:DOWN, ^other_ref, :process, 1, :goodbye}
end
test "ignores additional messages after reply" do
refs = [ref_1 = make_ref(), ref_2 = make_ref()]
tasks = Enum.map(refs, fn ref -> %Task{ref: ref, owner: self(), pid: nil} end)
send(self(), {ref_2, :b})
send(self(), {ref_2, :other})
send(self(), {ref_1, :a})
assert Task.await_many(tasks) == [:a, :b]
assert_received {^ref_2, :other}
end
test "exits on timeout" do
tasks = [Task.async(fn -> Process.sleep(:infinity) end)]
assert catch_exit(Task.await_many(tasks, 0)) == {:timeout, {Task, :await_many, [tasks, 0]}}
end
test "exits with same reason when task exits" do
tasks = [Task.async(fn -> exit(:normal) end)]
assert catch_exit(Task.await_many(tasks)) == {:normal, {Task, :await_many, [tasks, 5000]}}
end
test "exits immediately when any task exits" do
tasks = [
Task.async(fn -> Process.sleep(:infinity) end),
Task.async(fn -> exit(:normal) end)
]
assert catch_exit(Task.await_many(tasks)) == {:normal, {Task, :await_many, [tasks, 5000]}}
end
test "exits immediately when any task crashes" do
Process.flag(:trap_exit, true)
tasks = [
Task.async(fn -> Process.sleep(:infinity) end),
Task.async(fn -> exit(:unknown) end)
]
assert catch_exit(Task.await_many(tasks)) == {:unknown, {Task, :await_many, [tasks, 5000]}}
# Make sure all monitors are cleared up afterwards too
Enum.each(tasks, &Process.exit(&1.pid, :kill))
refute_received {:DOWN, _, _, _, _}
end
test "exits immediately when any task throws" do
Process.flag(:trap_exit, true)
tasks = [
Task.async(fn -> Process.sleep(:infinity) end),
Task.async(fn -> throw(:unknown) end)
]
assert {{{:nocatch, :unknown}, _}, {Task, :await_many, [^tasks, 5000]}} =
catch_exit(Task.await_many(tasks))
end
test "exits immediately on any task error" do
Process.flag(:trap_exit, true)
tasks = [
Task.async(fn -> Process.sleep(:infinity) end),
Task.async(fn -> raise "oops" end)
]
assert {{%RuntimeError{}, _}, {Task, :await_many, [^tasks, 5000]}} =
catch_exit(Task.await_many(tasks))
end
test "exits immediately on :noconnection" do
tasks = [
Task.async(fn -> Process.sleep(:infinity) end),
%Task{ref: ref = make_ref(), owner: self(), pid: self()}
]
send(self(), {:DOWN, ref, :process, self(), :noconnection})
assert catch_exit(Task.await_many(tasks)) |> elem(0) == {:nodedown, :nonode@nohost}
end
test "exits immediately on :noconnection from named monitor" do
tasks = [
Task.async(fn -> Process.sleep(:infinity) end),
%Task{ref: ref = make_ref(), owner: self(), pid: nil}
]
send(self(), {:DOWN, ref, :process, {:name, :node}, :noconnection})
assert catch_exit(Task.await_many(tasks)) |> elem(0) == {:nodedown, :node}
end
test "raises when invoked from a non-owner process" do
tasks = [
Task.async(fn -> Process.sleep(:infinity) end),
bad_task = create_task_in_other_process()
]
message =
"task #{inspect(bad_task)} must be queried from the owner " <>
"but was queried from #{inspect(self())}"
assert_raise ArgumentError, message, fn -> Task.await_many(tasks, 1) end
end
end
describe "yield/2" do
test "returns {:ok, result} when reply and :DOWN in message queue" do
task = %Task{ref: make_ref(), owner: self(), pid: nil}
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, self(), :abnormal})
assert Task.yield(task, 0) == {:ok, :result}
refute_received {:DOWN, _, _, _, _}
end
test "returns nil on timeout" do
task = %Task{ref: make_ref(), pid: nil, owner: self()}
assert Task.yield(task, 0) == nil
end
test "return exit on normal exit" do
task = Task.async(fn -> exit(:normal) end)
assert Task.yield(task) == {:exit, :normal}
end
test "exits on :noconnection" do
ref = make_ref()
task = %Task{ref: ref, pid: self(), owner: self()}
send(self(), {:DOWN, ref, self(), self(), :noconnection})
assert catch_exit(Task.yield(task)) |> elem(0) == {:nodedown, :nonode@nohost}
end
test "raises when invoked from a non-owner process" do
task = create_task_in_other_process()
message =
"task #{inspect(task)} must be queried from the owner " <>
"but was queried from #{inspect(self())}"
assert_raise ArgumentError, message, fn -> Task.yield(task, 1) end
end
end
describe "yield_many/2" do
test "returns {:ok, result} when reply and :DOWN in message queue" do
task = %Task{ref: make_ref(), owner: self(), pid: nil}
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, self(), :abnormal})
assert Task.yield_many([task], 0) == [{task, {:ok, :result}}]
refute_received {:DOWN, _, _, _, _}
end
test "returns nil on timeout" do
task = %Task{ref: make_ref(), owner: self(), pid: nil}
assert Task.yield_many([task], 0) == [{task, nil}]
end
test "return exit on normal exit" do
task = Task.async(fn -> exit(:normal) end)
assert Task.yield_many([task]) == [{task, {:exit, :normal}}]
end
test "exits on :noconnection" do
ref = make_ref()
task = %Task{ref: ref, pid: self(), owner: self()}
send(self(), {:DOWN, ref, :process, self(), :noconnection})
assert catch_exit(Task.yield_many([task])) |> elem(0) == {:nodedown, :nonode@nohost}
end
test "raises when invoked from a non-owner process" do
task = create_task_in_other_process()
message =
"task #{inspect(task)} must be queried from the owner " <>
"but was queried from #{inspect(self())}"
assert_raise ArgumentError, message, fn -> Task.yield_many([task], 1) end
end
test "returns results from multiple tasks" do
task1 = %Task{ref: make_ref(), owner: self(), pid: nil}
task2 = %Task{ref: make_ref(), owner: self(), pid: nil}
task3 = %Task{ref: make_ref(), owner: self(), pid: nil}
send(self(), {task1.ref, :result})
send(self(), {:DOWN, task3.ref, :process, self(), :normal})
assert Task.yield_many([task1, task2, task3], 0) ==
[{task1, {:ok, :result}}, {task2, nil}, {task3, {:exit, :normal}}]
end
test "returns results on infinity timeout" do
task1 = %Task{ref: make_ref(), owner: self(), pid: nil}
task2 = %Task{ref: make_ref(), owner: self(), pid: nil}
task3 = %Task{ref: make_ref(), owner: self(), pid: nil}
send(self(), {task1.ref, :result})
send(self(), {task2.ref, :result})
send(self(), {:DOWN, task3.ref, :process, self(), :normal})
assert Task.yield_many([task1, task2, task3], :infinity) ==
[{task1, {:ok, :result}}, {task2, {:ok, :result}}, {task3, {:exit, :normal}}]
end
end
describe "shutdown/2" do
test "returns {:ok, result} when reply and abnormal :DOWN in message queue" do
task = create_dummy_task(:abnormal)
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, task.pid, :abnormal})
assert Task.shutdown(task) == {:ok, :result}
refute_received {:DOWN, _, _, _, _}
end
test "returns {:ok, result} when reply and normal :DOWN in message queue" do
task = create_dummy_task(:normal)
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, task.pid, :normal})
assert Task.shutdown(task) == {:ok, :result}
refute_received {:DOWN, _, _, _, _}
end
test "returns {:ok, result} when reply and shut down :DOWN in message queue" do
task = create_dummy_task(:shutdown)
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, task.pid, :shutdown})
assert Task.shutdown(task) == {:ok, :result}
refute_received {:DOWN, _, _, _, _}
end
test "returns nil on shutting down task" do
task = Task.async(:timer, :sleep, [:infinity])
assert Task.shutdown(task) == nil
end
test "returns exit on abnormal :DOWN in message queue" do
task = create_dummy_task(:abnormal)
send(self(), {:DOWN, task.ref, :process, task.pid, :abnormal})
assert Task.shutdown(task) == {:exit, :abnormal}
end
test "returns exit on normal :DOWN in message queue" do
task = create_dummy_task(:normal)
send(self(), {:DOWN, task.ref, :process, task.pid, :normal})
assert Task.shutdown(task) == {:exit, :normal}
end
test "returns nil on shutdown :DOWN in message queue" do
task = create_dummy_task(:shutdown)
send(self(), {:DOWN, task.ref, :process, task.pid, :shutdown})
assert Task.shutdown(task) == nil
end
test "returns exit on killed :DOWN in message queue" do
task = create_dummy_task(:killed)
send(self(), {:DOWN, task.ref, :process, task.pid, :killed})
assert Task.shutdown(task) == {:exit, :killed}
end
test "exits on noconnection :DOWN in message queue" do
task = create_dummy_task(:noconnection)
send(self(), {:DOWN, task.ref, :process, task.pid, :noconnection})
assert catch_exit(Task.shutdown(task)) ==
{{:nodedown, node()}, {Task, :shutdown, [task, 5000]}}
end
test "raises if task PID is nil" do
task = %Task{ref: make_ref(), owner: nil, pid: nil}
message = "task #{inspect(task)} does not have an associated task process"
assert_raise ArgumentError, message, fn -> Task.shutdown(task) end
end
test "raises when invoked from a non-owner process" do
task = create_task_in_other_process()
message =
"task #{inspect(task)} must be queried from the owner " <>
"but was queried from #{inspect(self())}"
assert_raise ArgumentError, message, fn -> Task.shutdown(task) end
end
test "returns nil on killing task" do
caller = self()
task =
Task.async(fn ->
Process.flag(:trap_exit, true)
wait_and_send(caller, :ready)
Process.sleep(:infinity)
end)
receive do: (:ready -> :ok)
assert Task.shutdown(task, :brutal_kill) == nil
refute_received {:DOWN, _, _, _, _}
end
test "returns {:exit, :noproc} if task handled" do
task = create_dummy_task(:noproc)
assert Task.shutdown(task) == {:exit, :noproc}
end
end
describe "shutdown/2 with :brutal_kill" do
test "returns {:ok, result} when reply and abnormal :DOWN in message queue" do
task = create_dummy_task(:abnormal)
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, task.pid, :abnormal})
assert Task.shutdown(task, :brutal_kill) == {:ok, :result}
refute_received {:DOWN, _, _, _, _}
end
test "returns {:ok, result} when reply and normal :DOWN in message queue" do
task = create_dummy_task(:normal)
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, task.pid, :normal})
assert Task.shutdown(task, :brutal_kill) == {:ok, :result}
refute_received {:DOWN, _, _, _, _}
end
test "returns {:ok, result} when reply and shut down :DOWN in message queue" do
task = create_dummy_task(:shutdown)
send(self(), {task.ref, :result})
send(self(), {:DOWN, task.ref, :process, task.pid, :shutdown})
assert Task.shutdown(task, :brutal_kill) == {:ok, :result}
refute_received {:DOWN, _, _, _, _}
end
test "returns nil on killed :DOWN in message queue" do
task = create_dummy_task(:killed)
send(self(), {:DOWN, task.ref, :process, task.pid, :killed})
assert Task.shutdown(task, :brutal_kill) == nil
end
test "returns exit on abnormal :DOWN in message queue" do
task = create_dummy_task(:abnormal)
send(self(), {:DOWN, task.ref, :process, task.pid, :abnormal})
assert Task.shutdown(task, :brutal_kill) == {:exit, :abnormal}
end
test "returns exit on normal :DOWN in message queue" do
task = create_dummy_task(:normal)
send(self(), {:DOWN, task.ref, :process, task.pid, :normal})
assert Task.shutdown(task, :brutal_kill) == {:exit, :normal}
end
test "returns exit on shutdown :DOWN in message queue" do
task = create_dummy_task(:shutdown)
send(self(), {:DOWN, task.ref, :process, task.pid, :shutdown})
assert Task.shutdown(task, :brutal_kill) == {:exit, :shutdown}
end
test "exits on noconnection :DOWN in message queue" do
task = create_dummy_task(:noconnection)
send(self(), {:DOWN, task.ref, :process, task.pid, :noconnection})
assert catch_exit(Task.shutdown(task, :brutal_kill)) ==
{{:nodedown, node()}, {Task, :shutdown, [task, :brutal_kill]}}
end
test "returns exit on killing task after shutdown timeout" do
caller = self()
task =
Task.async(fn ->
Process.flag(:trap_exit, true)
wait_and_send(caller, :ready)
Process.sleep(:infinity)
end)
receive do: (:ready -> :ok)
assert Task.shutdown(task, 1) == {:exit, :killed}
end
test "returns {:exit, :noproc} if task handled" do
task = create_dummy_task(:noproc)
assert Task.shutdown(task, :brutal_kill) == {:exit, :noproc}
end
end
describe "async_stream/2" do
test "timeout" do
assert catch_exit([:infinity] |> Task.async_stream(&sleep/1, timeout: 0) |> Enum.to_list()) ==
{:timeout, {Task.Supervised, :stream, [0]}}
refute_received _
end
test "streams an enumerable with ordered: false" do
opts = [max_concurrency: 1, ordered: false]
assert 4..1
|> Task.async_stream(&sleep(&1 * 100), opts)
|> Enum.to_list() == [ok: 400, ok: 300, ok: 200, ok: 100]
opts = [max_concurrency: 4, ordered: false]
assert 4..1
|> Task.async_stream(&sleep(&1 * 100), opts)
|> Enum.to_list() == [ok: 100, ok: 200, ok: 300, ok: 400]
end
test "streams an enumerable with ordered: false, on_timeout: :kill_task" do
opts = [max_concurrency: 4, ordered: false, on_timeout: :kill_task, timeout: 50]
assert [100, 1, 100, 1]
|> Task.async_stream(&sleep/1, opts)
|> Enum.to_list() == [ok: 1, ok: 1, exit: :timeout, exit: :timeout]
refute_received _
end
test "streams an enumerable with infinite timeout" do
[ok: :ok] = Task.async_stream([1], fn _ -> :ok end, timeout: :infinity) |> Enum.to_list()
end
test "streams with fake down messages on the inbox" do
parent = self()
assert Task.async_stream([:ok], fn :ok ->
{:links, links} = Process.info(self(), :links)
for link <- links do
send(link, {:DOWN, make_ref(), :process, parent, :oops})
end
:ok
end)
|> Enum.to_list() == [ok: :ok]
end
test "with $callers" do
grandparent = self()
Task.async_stream([1], fn 1 ->
parent = self()
assert Process.get(:"$callers") == [grandparent]
Task.async_stream([1], fn 1 ->
assert Process.get(:"$callers") == [parent, grandparent]
send(grandparent, :done)
end)
|> Stream.run()
end)
|> Stream.run()
assert_receive :done
end
end
for {desc, concurrency} <- [==: 4, <: 2, >: 8] do
describe "async_stream with max_concurrency #{desc} tasks" do
@opts [max_concurrency: concurrency]
test "streams an enumerable with fun" do
assert 1..4
|> Task.async_stream(&sleep/1, @opts)
|> Enum.to_list() == [ok: 1, ok: 2, ok: 3, ok: 4]
end
test "streams an enumerable with mfa" do
assert 1..4
|> Task.async_stream(__MODULE__, :sleep, [], @opts)
|> Enum.to_list() == [ok: 1, ok: 2, ok: 3, ok: 4]
end
test "streams an enumerable without leaking tasks" do
assert 1..4
|> Task.async_stream(&sleep/1, @opts)
|> Enum.to_list() == [ok: 1, ok: 2, ok: 3, ok: 4]
refute_received _
end
test "streams an enumerable with slowest first" do
Process.flag(:trap_exit, true)
assert 4..1
|> Task.async_stream(&sleep/1, @opts)
|> Enum.to_list() == [ok: 4, ok: 3, ok: 2, ok: 1]
end
test "streams an enumerable with exits" do
Process.flag(:trap_exit, true)
assert 1..4
|> Task.async_stream(&exit/1, @opts)
|> Enum.to_list() == [exit: 1, exit: 2, exit: 3, exit: 4]
refute_received {:EXIT, _, _}
end
test "shuts down unused tasks" do
assert [0, :infinity, :infinity, :infinity]
|> Task.async_stream(&sleep/1, @opts)
|> Enum.take(1) == [ok: 0]
assert Process.info(self(), :links) == {:links, []}
end
test "shuts down unused tasks without leaking messages" do
assert [0, :infinity, :infinity, :infinity]
|> Task.async_stream(&sleep/1, @opts)
|> Enum.take(1) == [ok: 0]
refute_received _
end
test "is zippable on success" do
task = 1..4 |> Task.async_stream(&sleep/1, @opts) |> Stream.map(&elem(&1, 1))
assert Enum.zip(task, task) == [{1, 1}, {2, 2}, {3, 3}, {4, 4}]
end
test "is zippable on failure" do
Process.flag(:trap_exit, true)
task = 1..4 |> Task.async_stream(&exit/1, @opts) |> Stream.map(&elem(&1, 1))
assert Enum.zip(task, task) == [{1, 1}, {2, 2}, {3, 3}, {4, 4}]
end
test "is zippable with slowest first" do
task = 4..1 |> Task.async_stream(&sleep/1, @opts) |> Stream.map(&elem(&1, 1))
assert Enum.zip(task, task) == [{4, 4}, {3, 3}, {2, 2}, {1, 1}]
end
test "with inner halt on success" do
assert 1..8
|> Stream.take(4)
|> Task.async_stream(&sleep/1, @opts)
|> Enum.to_list() == [ok: 1, ok: 2, ok: 3, ok: 4]
end
test "with inner halt on failure" do
Process.flag(:trap_exit, true)
assert 1..8
|> Stream.take(4)
|> Task.async_stream(&exit/1, @opts)
|> Enum.to_list() == [exit: 1, exit: 2, exit: 3, exit: 4]
end
test "with inner halt and slowest first" do
assert 8..1
|> Stream.take(4)
|> Task.async_stream(&sleep/1, @opts)
|> Enum.to_list() == [ok: 8, ok: 7, ok: 6, ok: 5]
end
test "with outer halt on success" do
assert 1..8
|> Task.async_stream(&sleep/1, @opts)
|> Enum.take(4) == [ok: 1, ok: 2, ok: 3, ok: 4]
end
test "with outer halt on failure" do
Process.flag(:trap_exit, true)
assert 1..8
|> Task.async_stream(&exit/1, @opts)
|> Enum.take(4) == [exit: 1, exit: 2, exit: 3, exit: 4]
end
test "with outer halt and slowest first" do
assert 8..1
|> Task.async_stream(&sleep/1, @opts)
|> Enum.take(4) == [ok: 8, ok: 7, ok: 6, ok: 5]
end
test "terminates inner effect" do
stream =
1..4
|> Task.async_stream(&sleep/1, @opts)
|> Stream.transform(fn -> :ok end, fn x, acc -> {[x], acc} end, fn _ ->
Process.put(:stream_transform, true)
end)
Process.put(:stream_transform, false)
assert Enum.to_list(stream) == [ok: 1, ok: 2, ok: 3, ok: 4]
assert Process.get(:stream_transform)
end
test "terminates outer effect" do
stream =
1..4
|> Stream.transform(fn -> :ok end, fn x, acc -> {[x], acc} end, fn _ ->
Process.put(:stream_transform, true)
end)
|> Task.async_stream(&sleep/1, @opts)
Process.put(:stream_transform, false)
assert Enum.to_list(stream) == [ok: 1, ok: 2, ok: 3, ok: 4]
assert Process.get(:stream_transform)
end
test "with :on_timeout set to :kill_task" do
opts = Keyword.merge(@opts, on_timeout: :kill_task, timeout: 50)
assert [100, 1, 100, 1]
|> Task.async_stream(&sleep/1, opts)
|> Enum.to_list() == [exit: :timeout, ok: 1, exit: :timeout, ok: 1]
refute_received _
end
end
end
end
| 32.215622
| 100
| 0.582298
|
9e38dd61f06ed97503299963f3095ae80c9c28d3
| 1,371
|
ex
|
Elixir
|
lib/slack/rtm.ex
|
motionless/Elixir-Slack
|
7a2e8fc889fd60fdb26e0c6c6cb3ec3d7704597e
|
[
"MIT"
] | null | null | null |
lib/slack/rtm.ex
|
motionless/Elixir-Slack
|
7a2e8fc889fd60fdb26e0c6c6cb3ec3d7704597e
|
[
"MIT"
] | null | null | null |
lib/slack/rtm.ex
|
motionless/Elixir-Slack
|
7a2e8fc889fd60fdb26e0c6c6cb3ec3d7704597e
|
[
"MIT"
] | null | null | null |
defmodule Slack.JsonDecodeError do
@moduledoc false
defexception [:reason, :string]
def message(%Slack.JsonDecodeError{reason: reason, string: string}) do
"Poison could not decode string for reason: `:#{reason}`, string given:\n#{string}"
end
end
defmodule Slack.Rtm do
@moduledoc false
def start(token) do
with url <- slack_url(token),
headers <- ["Authorization": "Bearer #{token}", "Content-type": "application/x-www-form-urlencoded"],
options <- Application.get_env(:slack, :web_http_client_opts, []) do
url
|> HTTPoison.get(headers, options)
|> handle_response()
end
end
defp handle_response({:ok, %HTTPoison.Response{body: body}}) do
case Jason.decode!(body, keys: :atoms) do
%{ok: true} = json ->
{:ok, json}
%{error: reason} ->
{:error, "Slack API returned an error `#{reason}.\n Response: #{body}"}
_ ->
{:error, "Invalid RTM response"}
end
rescue
error in Jason.DecodeError ->
%Jason.DecodeError{data: reason, position: _, token: _} = error
{:error, %Slack.JsonDecodeError{reason: reason, string: body}}
end
defp handle_response(error), do: error
defp slack_url(_token) do
Application.get_env(:slack, :url, "https://slack.com") <>
"/api/rtm.connect?batch_presence_aware=true&presence_sub=true"
end
end
| 28.5625
| 110
| 0.646244
|
9e390c130ad427d7248efbb115bf36d396b50715
| 18,233
|
ex
|
Elixir
|
lib/phoenix/channel.ex
|
jshahs/magiclogo
|
5b18cb4efff04c3547d167bab194d2806ec20d8a
|
[
"MIT"
] | null | null | null |
lib/phoenix/channel.ex
|
jshahs/magiclogo
|
5b18cb4efff04c3547d167bab194d2806ec20d8a
|
[
"MIT"
] | null | null | null |
lib/phoenix/channel.ex
|
jshahs/magiclogo
|
5b18cb4efff04c3547d167bab194d2806ec20d8a
|
[
"MIT"
] | null | null | null |
defmodule Phoenix.Channel do
@moduledoc ~S"""
Defines a Phoenix Channel.
Channels provide a means for bidirectional communication from clients that
integrate with the `Phoenix.PubSub` layer for soft-realtime functionality.
## Topics & Callbacks
Every time you join a channel, you need to choose which particular topic you
want to listen to. The topic is just an identifier, but by convention it is
often made of two parts: `"topic:subtopic"`. Using the `"topic:subtopic"`
approach pairs nicely with the `Phoenix.Socket.channel/2` allowing you to
match on all topics starting with a given prefix:
channel "room:*", MyApp.RoomChannel
Any topic coming into the router with the `"room:"` prefix would dispatch
to `MyApp.RoomChannel` in the above example. Topics can also be pattern
matched in your channels' `join/3` callback to pluck out the scoped pattern:
# handles the special `"lobby"` subtopic
def join("room:lobby", _auth_message, socket) do
{:ok, socket}
end
# handles any other subtopic as the room ID, for example `"room:12"`, `"room:34"`
def join("room:" <> room_id, auth_message, socket) do
{:ok, socket}
end
## Authorization
Clients must join a channel to send and receive PubSub events on that channel.
Your channels must implement a `join/3` callback that authorizes the socket
for the given topic. For example, you could check if the user is allowed to
join that particular room.
To authorize a socket in `join/3`, return `{:ok, socket}`.
To refuse authorization in `join/3`, return `{:error, reply}`.
## Incoming Events
After a client has successfully joined a channel, incoming events from the
client are routed through the channel's `handle_in/3` callbacks. Within these
callbacks, you can perform any action. Typically you'll either forward a
message to all listeners with `broadcast!/3`, or push a message directly down
the socket with `push/3`. Incoming callbacks must return the `socket` to
maintain ephemeral state.
Here's an example of receiving an incoming `"new_msg"` event from one client,
and broadcasting the message to all topic subscribers for this socket.
def handle_in("new_msg", %{"uid" => uid, "body" => body}, socket) do
broadcast! socket, "new_msg", %{uid: uid, body: body}
{:noreply, socket}
end
You can also push a message directly down the socket:
# client asks for their current rank, push sent directly as a new event.
def handle_in("current_rank", socket) do
push socket, "current_rank", %{val: Game.get_rank(socket.assigns[:user])}
{:noreply, socket}
end
## Replies
In addition to pushing messages out when you receive a `handle_in` event,
you can also reply directly to a client event for request/response style
messaging. This is useful when a client must know the result of an operation
or to simply ack messages.
For example, imagine creating a resource and replying with the created record:
def handle_in("create:post", attrs, socket) do
changeset = Post.changeset(%Post{}, attrs)
if changeset.valid? do
Repo.insert!(changeset)
{:reply, {:ok, changeset}, socket}
else
{:reply,{:error, MyApp.ChangesetView.render("errors.json",
%{changeset: changeset}), socket}
end
end
Alternatively, you may just want to ack the status of the operation:
def handle_in("create:post", attrs, socket) do
changeset = Post.changeset(%Post{}, attrs)
if changeset.valid? do
Repo.insert!(changeset)
{:reply, :ok, socket}
else
{:reply, :error, socket}
end
end
## Intercepting Outgoing Events
When an event is broadcasted with `broadcast/3`, each channel subscriber can
choose to intercept the event and have their `handle_out/3` callback triggered.
This allows the event's payload to be customized on a socket by socket basis
to append extra information, or conditionally filter the message from being
delivered. If the event is not intercepted with `Phoenix.Channel.intercept/1`,
then the message is pushed directly to the client:
intercept ["new_msg", "user_joined"]
# for every socket subscribing to this topic, append an `is_editable`
# value for client metadata.
def handle_out("new_msg", msg, socket) do
push socket, "new_msg", Map.merge(msg,
%{is_editable: User.can_edit_message?(socket.assigns[:user], msg)}
)
{:noreply, socket}
end
# do not send broadcasted `"user_joined"` events if this socket's user
# is ignoring the user who joined.
def handle_out("user_joined", msg, socket) do
unless User.ignoring?(socket.assigns[:user], msg.user_id) do
push socket, "user_joined", msg
end
{:noreply, socket}
end
## Broadcasting to an external topic
In some cases, you will want to broadcast messages without the context of
a `socket`. This could be for broadcasting from within your channel to an
external topic, or broadcasting from elsewhere in your application like a
controller or another process. Such can be done via your endpoint:
# within channel
def handle_in("new_msg", %{"uid" => uid, "body" => body}, socket) do
...
broadcast_from! socket, "new_msg", %{uid: uid, body: body}
MyApp.Endpoint.broadcast_from! self(), "room:superadmin",
"new_msg", %{uid: uid, body: body}
{:noreply, socket}
end
# within controller
def create(conn, params) do
...
MyApp.Endpoint.broadcast! "room:" <> rid, "new_msg", %{uid: uid, body: body}
MyApp.Endpoint.broadcast! "room:superadmin", "new_msg", %{uid: uid, body: body}
redirect conn, to: "/"
end
## Terminate
On termination, the channel callback `terminate/2` will be invoked with
the error reason and the socket.
If we are terminating because the client left, the reason will be
`{:shutdown, :left}`. Similarly, if we are terminating because the
client connection was closed, the reason will be `{:shutdown, :closed}`.
If any of the callbacks return a `:stop` tuple, it will also
trigger terminate with the reason given in the tuple.
`terminate/2`, however, won't be invoked in case of errors nor in
case of exits. This is the same behaviour as you find in Elixir
abstractions like `GenServer` and others. Typically speaking, if you
want to clean something up, it is better to monitor your channel
process and do the clean up from another process. Similar to GenServer,
it would also be possible `:trap_exit` to guarantee that `terminate/2`
is invoked. This practice is not encouraged though.
## Exit reasons when stopping a channel
When the channel callbacks return a `:stop` tuple, such as:
{:stop, :shutdown, socket}
{:stop, {:error, :enoent}, socket}
the second argument is the exit reason, which follows the same behaviour as
standard `GenServer` exits.
You have three options to choose from when shutting down a channel:
* `:normal` - in such cases, the exit won't be logged, there is no restart
in transient mode, and linked processes do not exit
* `:shutdown` or `{:shutdown, term}` - in such cases, the exit won't be
logged, there is no restart in transient mode, and linked processes exit
with the same reason unless they're trapping exits
* any other term - in such cases, the exit will be logged, there are
restarts in transient mode, and linked processes exit with the same reason
unless they're trapping exits
## Subscribing to external topics
Sometimes you may need to programmatically subscribe a socket to external
topics in addition to the the internal `socket.topic`. For example,
imagine you have a bidding system where a remote client dynamically sets
preferences on products they want to receive bidding notifications on.
Instead of requiring a unique channel process and topic per
preference, a more efficient and simple approach would be to subscribe a
single channel to relevant notifications via your endpoint. For example:
defmodule MyApp.Endpoint.NotificationChannel do
use Phoenix.Channel
def join("notification:" <> user_id, %{"ids" => ids}, socket) do
topics = for product_id <- ids, do: "product:#{product_id}"
{:ok, socket
|> assign(:topics, [])
|> put_new_topics(topics)}
end
def handle_in("watch", %{"product_id" => id}, socket) do
{:reply, :ok, put_new_topics(socket, ["product:#{id}"])}
end
def handle_in("unwatch", %{"product_id" => id}, socket) do
{:reply, :ok, MyApp.Endpoint.unsubscribe("product:#{id}")}
end
defp put_new_topics(socket, topics) do
Enum.reduce(topics, socket, fn topic, acc ->
topics = acc.assigns.topics
if topic in topics do
acc
else
:ok = MyApp.Endpoint.subscribe(topic)
assign(acc, :topics, [topic | topics])
end
end)
end
end
Note: the caller must be responsible for preventing duplicate subscriptions.
After calling `subscribe/1` from your endpoint, the same flow applies to
handling regular Elixir messages within your channel. Most often, you'll
simply relay the `%Phoenix.Socket.Broadcast{}` event and payload:
alias Phoenix.Socket.Broadcast
def handle_info(%Broadcast{topic: _, event: ev, payload: payload}, socket) do
push socket, ev, payload
{:noreply, socket}
end
## Logging
By default, channel `"join"` and `"handle_in"` events are logged, using
the level `:info` and `:debug`, respectively. Logs can be customized per
event type or disabled by setting the `:log_join` and `:log_handle_in`
options when using `Phoenix.Channel`. For example, the following
configuration logs join events as `:info`, but disables logging for
incoming events:
use Phoenix.Channel, log_join: :info, log_handle_in: false
"""
alias Phoenix.Socket
alias Phoenix.Channel.Server
@type reply :: status :: atom | {status :: atom, response :: map}
@type socket_ref :: {transport_pid :: Pid, serializer :: module,
topic :: binary, ref :: binary}
@callback code_change(old_vsn, Socket.t, extra :: term) ::
{:ok, Socket.t} |
{:error, reason :: term} when old_vsn: term | {:down, term}
@callback join(topic :: binary, auth_msg :: map, Socket.t) ::
{:ok, Socket.t} |
{:ok, map, Socket.t} |
{:error, map}
@callback handle_in(event :: String.t, msg :: map, Socket.t) ::
{:noreply, Socket.t} |
{:reply, reply, Socket.t} |
{:stop, reason :: term, Socket.t} |
{:stop, reason :: term, reply, Socket.t}
@callback handle_info(term, Socket.t) ::
{:noreply, Socket.t} |
{:stop, reason :: term, Socket.t}
@callback terminate(msg :: map, Socket.t) ::
{:shutdown, :left | :closed} |
term
defmacro __using__(opts \\ []) do
quote do
opts = unquote(opts)
@behaviour unquote(__MODULE__)
@on_definition unquote(__MODULE__)
@before_compile unquote(__MODULE__)
@phoenix_intercepts []
@phoenix_log_join Keyword.get(opts, :log_join, :info)
@phoenix_log_handle_in Keyword.get(opts, :log_handle_in, :debug)
import unquote(__MODULE__)
import Phoenix.Socket, only: [assign: 3]
def __socket__(:private) do
%{log_join: @phoenix_log_join,
log_handle_in: @phoenix_log_handle_in}
end
def code_change(_old, socket, _extra), do: {:ok, socket}
def handle_in(_event, _message, socket) do
{:noreply, socket}
end
def handle_info(message, state) do
Phoenix.Channel.Server.unhandled_handle_info(message, state)
end
def terminate(_reason, _socket), do: :ok
defoverridable code_change: 3, handle_info: 2, handle_in: 3, terminate: 2
end
end
defmacro __before_compile__(_) do
quote do
def __intercepts__, do: @phoenix_intercepts
end
end
@doc """
Defines which Channel events to intercept for `handle_out/3` callbacks.
By default, broadcasted events are pushed directly to the client, but
intercepting events gives your channel a chance to customize the event
for the client to append extra information or filter the message from being
delivered.
*Note*: intercepting events can introduce significantly more overhead if a
large number of subscribers must customize a message since the broadcast will
be encoded N times instead of a single shared encoding across all subscribers.
## Examples
intercept ["new_msg"]
def handle_out("new_msg", payload, socket) do
push socket, "new_msg", Map.merge(payload,
is_editable: User.can_edit_message?(socket.assigns[:user], payload)
)
{:noreply, socket}
end
`handle_out/3` callbacks must return one of:
{:noreply, Socket.t} |
{:stop, reason :: term, Socket.t}
"""
defmacro intercept(events) do
quote do
@phoenix_intercepts unquote(events)
end
end
@doc false
def __on_definition__(env, :def, :handle_out, [event, _payload, _socket], _, _)
when is_binary(event) do
unless event in Module.get_attribute(env.module, :phoenix_intercepts) do
IO.write "#{Path.relative_to(env.file, File.cwd!)}:#{env.line}: [warning] " <>
"An intercept for event \"#{event}\" has not yet been defined in #{env.module}.handle_out/3. " <>
"Add \"#{event}\" to your list of intercepted events with intercept/1"
end
end
def __on_definition__(_env, _kind, _name, _args, _guards, _body) do
end
@doc """
Broadcast an event to all subscribers of the socket topic.
The event's message must be a serializable map.
## Examples
iex> broadcast socket, "new_message", %{id: 1, content: "hello"}
:ok
"""
def broadcast(socket, event, message) do
%{pubsub_server: pubsub_server, topic: topic} = assert_joined!(socket)
Server.broadcast pubsub_server, topic, event, message
end
@doc """
Same as `broadcast/3`, but raises if broadcast fails.
"""
def broadcast!(socket, event, message) do
%{pubsub_server: pubsub_server, topic: topic} = assert_joined!(socket)
Server.broadcast! pubsub_server, topic, event, message
end
@doc """
Broadcast event from pid to all subscribers of the socket topic.
The channel that owns the socket will not receive the published
message. The event's message must be a serializable map.
## Examples
iex> broadcast_from socket, "new_message", %{id: 1, content: "hello"}
:ok
"""
def broadcast_from(socket, event, message) do
%{pubsub_server: pubsub_server, topic: topic, channel_pid: channel_pid} = assert_joined!(socket)
Server.broadcast_from pubsub_server, channel_pid, topic, event, message
end
@doc """
Same as `broadcast_from/3`, but raises if broadcast fails.
"""
def broadcast_from!(socket, event, message) do
%{pubsub_server: pubsub_server, topic: topic, channel_pid: channel_pid} = assert_joined!(socket)
Server.broadcast_from! pubsub_server, channel_pid, topic, event, message
end
@doc """
Sends event to the socket.
The event's message must be a serializable map.
## Examples
iex> push socket, "new_message", %{id: 1, content: "hello"}
:ok
"""
def push(socket, event, message) do
%{transport_pid: transport_pid, topic: topic} = assert_joined!(socket)
Server.push(transport_pid, topic, event, message, socket.serializer)
end
@doc """
Replies asynchronously to a socket push.
Useful when you need to reply to a push that can't otherwise be handled using
the `{:reply, {status, payload}, socket}` return from your `handle_in`
callbacks. `reply/2` will be used in the rare cases you need to perform work in
another process and reply when finished by generating a reference to the push
with `socket_ref/1`.
*Note*: In such cases, a `socket_ref` should be generated and
passed to the external process, so the `socket` itself is not leaked outside
the channel. The `socket` holds information such as assigns and transport
configuration, so it's important to not copy this information outside of the
channel that owns it.
## Examples
def handle_in("work", payload, socket) do
Worker.perform(payload, socket_ref(socket))
{:noreply, socket}
end
def handle_info({:work_complete, result, ref}, socket) do
reply ref, {:ok, result}
{:noreply, socket}
end
"""
@spec reply(socket_ref, reply) :: :ok
def reply({transport_pid, serializer, topic, ref}, {status, payload}) do
Server.reply(transport_pid, ref, topic, {status, payload}, serializer)
end
@doc """
Generates a `socket_ref` for an async reply.
See `reply/2` for example usage.
"""
@spec socket_ref(Socket.t) :: socket_ref
def socket_ref(%Socket{joined: true, ref: ref} = socket) when not is_nil(ref) do
{socket.transport_pid, socket.serializer, socket.topic, ref}
end
def socket_ref(_socket) do
raise ArgumentError, """
Socket refs can only be generated for a socket that has joined with a push ref
"""
end
defp assert_joined!(%Socket{joined: true} = socket) do
socket
end
defp assert_joined!(%Socket{joined: false}) do
raise """
`push`, `reply`, and `broadcast` can only be called after the socket has finished joining.
To push a message on join, send to self and handle in handle_info/2, ie:
def join(topic, auth_msg, socket) do
...
send(self, :after_join)
{:ok, socket}
end
def handle_info(:after_join, socket) do
push socket, "feed", %{list: feed_items(socket)}
{:noreply, socket}
end
"""
end
end
| 35.335271
| 112
| 0.669665
|
9e392555b368c72b7ee07102438391bb3f512962
| 6,629
|
ex
|
Elixir
|
lib/chart/pie_chart.ex
|
shadowRR/contex
|
86730b38a8ec06726e9233ef1f58661ca85cb704
|
[
"MIT"
] | 455
|
2020-01-15T22:21:40.000Z
|
2022-03-29T23:20:45.000Z
|
lib/chart/pie_chart.ex
|
shadowRR/contex
|
86730b38a8ec06726e9233ef1f58661ca85cb704
|
[
"MIT"
] | 48
|
2020-02-10T06:19:17.000Z
|
2022-03-29T03:02:52.000Z
|
lib/chart/pie_chart.ex
|
shadowRR/contex
|
86730b38a8ec06726e9233ef1f58661ca85cb704
|
[
"MIT"
] | 30
|
2020-01-15T22:21:35.000Z
|
2022-03-10T18:11:51.000Z
|
defmodule Contex.PieChart do
@moduledoc """
A Pie Chart that displays data in a circular graph.
The pieces of the graph are proportional to the fraction of the whole in each category.
Each slice of the pie is relative to the size of that category in the group as a whole.
The entire “pie” represents 100 percent of a whole, while the pie “slices” represent portions of the whole.
Fill colours for each slice can be specified with `colour_palette` parameter in chart options.
"""
alias __MODULE__
alias Contex.{Dataset, Mapping, CategoryColourScale}
defstruct [
:dataset,
:mapping,
:options
]
@type t() :: %__MODULE__{}
@required_mappings [
category_col: :zero_or_one,
value_col: :zero_or_one
]
@default_options [
width: 600,
height: 400,
colour_palette: :default,
data_labels: true
]
@doc """
Create a new PieChart struct from Dataset.
Options may be passed to control the settings for the barchart. Options available are:
- `:data_labels` : `true` (default) or false - display labels for each slice value
- `:colour_palette` : `:default` (default) or colour palette - see `colours/2`
An example:
data = [
["Cat", 10.0],
["Dog", 20.0],
["Hamster", 5.0]
]
dataset = DataSet.new(data, ["Pet", "Preference"])
opts = [
mapping: %{category_col: "Pet", value_col: "Preference"},
colour_palette: ["fbb4ae", "b3cde3", "ccebc5"],
legend_setting: :legend_right,
data_labels: false,
title: "Why dogs are better than cats"
]
Contex.Plot.new(dataset, Contex.PieChart, 600, 400, opts)
"""
def new(%Dataset{} = dataset, options \\ []) when is_list(options) do
options = Keyword.merge(@default_options, options)
mapping = Mapping.new(@required_mappings, Keyword.get(options, :mapping), dataset)
%PieChart{
dataset: dataset,
mapping: mapping,
options: options
}
end
@doc false
def set_size(%PieChart{} = chart, width, height) do
chart
|> set_option(:width, width)
|> set_option(:height, height)
end
@doc false
def get_svg_legend(%PieChart{} = chart) do
get_colour_palette(chart)
|> Contex.Legend.to_svg()
end
@doc """
Overrides the default colours.
Colours can either be a named palette defined in `Contex.CategoryColourScale` or a list of strings representing hex code
of the colour as per CSS colour hex codes, but without the #. For example:
```
barchart = BarChart.colours(barchart, ["fbb4ae", "b3cde3", "ccebc5"])
```
The colours will be applied to the data series in the same order as the columns are specified in `set_val_col_names/2`
"""
@deprecated "Set in new/2 options"
@spec colours(PieChart.t(), Contex.CategoryColourScale.colour_palette()) ::
PieChart.t()
def colours(%PieChart{} = chart, colour_palette) when is_list(colour_palette) do
set_option(chart, :colour_palette, colour_palette)
end
def colours(%PieChart{} = chart, colour_palette) when is_atom(colour_palette) do
set_option(chart, :colour_palette, colour_palette)
end
def colours(%PieChart{} = chart, _) do
set_option(chart, :colour_palette, :default)
end
@doc """
Renders the PieChart to svg, including the svg wrapper, as a string or improper string list that
is marked safe.
"""
def to_svg(%PieChart{} = chart) do
[
"<g>",
generate_slices(chart),
"</g>"
]
end
def get_categories(%PieChart{dataset: dataset, mapping: mapping}) do
cat_accessor = dataset |> Dataset.value_fn(mapping.column_map[:category_col])
dataset.data
|> Enum.map(&cat_accessor.(&1))
end
defp set_option(%PieChart{options: options} = plot, key, value) do
options = Keyword.put(options, key, value)
%{plot | options: options}
end
defp get_option(%PieChart{options: options}, key) do
Keyword.get(options, key)
end
defp get_colour_palette(%PieChart{} = chart),
do:
get_categories(chart)
|> CategoryColourScale.new()
|> CategoryColourScale.set_palette(get_option(chart, :colour_palette))
defp generate_slices(%PieChart{} = chart) do
height = get_option(chart, :height)
with_labels? = get_option(chart, :data_labels)
colour_palette = get_colour_palette(chart)
r = height / 2
stroke_circumference = 2 * :math.pi() * r / 2
scale_values(chart)
|> Enum.map_reduce({0, 0}, fn {value, category}, {idx, offset} ->
text_rotation = rotate_for(value, offset)
label =
if with_labels? do
~s"""
<text x="#{negate_if_flipped(r, text_rotation)}"
y="#{negate_if_flipped(r, text_rotation)}"
text-anchor="middle"
fill="white"
stroke-width="1"
transform="rotate(#{text_rotation},#{r},#{r})
translate(#{r / 2}, #{negate_if_flipped(5, text_rotation)})
#{if need_flip?(text_rotation), do: "scale(-1,-1)"}"
>
#{Float.round(value, 2)}%
</text>
"""
else
""
end
{
~s"""
<circle r="#{r / 2}" cx="#{r}" cy="#{r}" fill="transparent"
stroke="##{CategoryColourScale.colour_for_value(colour_palette, category)}"
stroke-width="#{r}"
stroke-dasharray="#{slice_value(value, stroke_circumference)} #{stroke_circumference}"
stroke-dashoffset="-#{slice_value(offset, stroke_circumference)}">
</circle>
#{label}
""",
{idx + 1, offset + value}
}
end)
|> elem(0)
|> Enum.join()
end
defp slice_value(value, stroke_circumference) do
value * stroke_circumference / 100
end
defp rotate_for(n, offset) do
n / 2 * 3.6 + offset * 3.6
end
defp need_flip?(rotation) do
90 < rotation and rotation < 270
end
defp negate_if_flipped(number, rotation) do
if need_flip?(rotation),
do: -number,
else: number
end
@spec scale_values(PieChart.t()) :: [{value :: number(), label :: any()}]
defp scale_values(%PieChart{dataset: dataset, mapping: mapping}) do
val_accessor = dataset |> Dataset.value_fn(mapping.column_map[:value_col])
cat_accessor = dataset |> Dataset.value_fn(mapping.column_map[:category_col])
sum = dataset.data |> Enum.reduce(0, fn col, acc -> val_accessor.(col) + acc end)
dataset.data
|> Enum.map_reduce(sum, &{{val_accessor.(&1) / &2 * 100, cat_accessor.(&1)}, &2})
|> elem(0)
end
end
| 29.331858
| 122
| 0.627696
|
9e39583e056fc74228371f714f90f24b70f943c7
| 1,864
|
exs
|
Elixir
|
test/processlru_test.exs
|
diodechain/diode_server
|
1692788bd92cc17654965878abd059d13b5e236c
|
[
"Apache-2.0"
] | 8
|
2021-03-12T15:35:09.000Z
|
2022-03-06T06:37:49.000Z
|
test/processlru_test.exs
|
diodechain/diode_server_ex
|
5cf47e5253a0caafd335d0af4dba711d4dcad42d
|
[
"Apache-2.0"
] | 15
|
2019-09-06T07:58:01.000Z
|
2021-03-06T17:04:46.000Z
|
test/processlru_test.exs
|
diodechain/diode_server
|
1692788bd92cc17654965878abd059d13b5e236c
|
[
"Apache-2.0"
] | 5
|
2021-10-01T12:52:28.000Z
|
2022-02-02T19:29:56.000Z
|
# Diode Server
# Copyright 2021 Diode
# Licensed under the Diode License, Version 1.1
defmodule ProcessLruTest do
use ExUnit.Case
test "base" do
lru = ProcessLru.new(10)
assert ProcessLru.size(lru) == 0
ProcessLru.put(lru, "key", "value")
assert ProcessLru.size(lru) == 1
assert ProcessLru.get(lru, "key") == "value"
# ProcessLru should not cache nil return values
assert ProcessLru.fetch(lru, "nothing", fn -> nil end) == nil
assert ProcessLru.fetch(lru, "nothing", fn -> "yay" end) == "yay"
assert ProcessLru.get(lru, "nothing") == "yay"
end
test "limit" do
lru = ProcessLru.new(3)
assert ProcessLru.size(lru) == 0
ProcessLru.put(lru, "a", "avalue")
ProcessLru.put(lru, "b", "bvalue")
ProcessLru.put(lru, "c", "cvalue")
assert ProcessLru.size(lru) == 3
assert ProcessLru.get(lru, "a") == "avalue"
assert ProcessLru.get(lru, "b") == "bvalue"
assert ProcessLru.get(lru, "c") == "cvalue"
ProcessLru.put(lru, "d", "dvalue")
assert ProcessLru.size(lru) == 3
assert ProcessLru.get(lru, "a") == nil
assert ProcessLru.get(lru, "b") == "bvalue"
assert ProcessLru.get(lru, "c") == "cvalue"
assert ProcessLru.get(lru, "d") == "dvalue"
end
test "repeat" do
lru = ProcessLru.new(3)
assert ProcessLru.size(lru) == 0
ProcessLru.put(lru, "a", "avalue")
ProcessLru.put(lru, "b", "bvalue")
ProcessLru.put(lru, "c", "cvalue")
assert ProcessLru.size(lru) == 3
assert ProcessLru.get(lru, "a") == "avalue"
assert ProcessLru.get(lru, "b") == "bvalue"
assert ProcessLru.get(lru, "c") == "cvalue"
ProcessLru.put(lru, "a", "avalue2")
assert ProcessLru.size(lru) == 3
assert ProcessLru.get(lru, "a") == "avalue2"
assert ProcessLru.get(lru, "b") == "bvalue"
assert ProcessLru.get(lru, "c") == "cvalue"
end
end
| 28.676923
| 69
| 0.623391
|
9e3967a94ee2aeb84e913300e7d59aaa7502502d
| 1,902
|
ex
|
Elixir
|
clients/health_care/lib/google_api/health_care/v1/model/list_consent_revisions_response.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/health_care/lib/google_api/health_care/v1/model/list_consent_revisions_response.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/health_care/lib/google_api/health_care/v1/model/list_consent_revisions_response.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1.Model.ListConsentRevisionsResponse do
@moduledoc """
## Attributes
* `consents` (*type:* `list(GoogleApi.HealthCare.V1.Model.Consent.t)`, *default:* `nil`) - The returned Consent revisions. The maximum number of revisions returned is determined by the value of `page_size` in the ListConsentRevisionsRequest.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Token to retrieve the next page of results, or empty if there are no more results in the list.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:consents => list(GoogleApi.HealthCare.V1.Model.Consent.t()) | nil,
:nextPageToken => String.t() | nil
}
field(:consents, as: GoogleApi.HealthCare.V1.Model.Consent, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.HealthCare.V1.Model.ListConsentRevisionsResponse do
def decode(value, options) do
GoogleApi.HealthCare.V1.Model.ListConsentRevisionsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HealthCare.V1.Model.ListConsentRevisionsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.04
| 245
| 0.743954
|
9e399df294232f371bcbba4cd8f65111f58e35ed
| 199
|
ex
|
Elixir
|
lib/weather.ex
|
jessie-morris/elixir-weather
|
1f54cc08dca4be7d5943d278d45eee336f4706ff
|
[
"MIT"
] | null | null | null |
lib/weather.ex
|
jessie-morris/elixir-weather
|
1f54cc08dca4be7d5943d278d45eee336f4706ff
|
[
"MIT"
] | 7
|
2020-09-05T02:06:15.000Z
|
2022-02-26T12:10:05.000Z
|
current/server/lib/weather.ex
|
aosteraas/weather
|
56bbf9e3dfa811860be2dd2b6cc801574983ef7e
|
[
"CC-BY-3.0"
] | null | null | null |
defmodule Weather do
@moduledoc """
Documentation for Weather.
"""
@doc """
Hello world.
## Examples
iex> Weather.hello()
:world
"""
def hello do
:world
end
end
| 10.473684
| 28
| 0.562814
|
9e39a940b17517ee2e45f70a499a3deb43d9fd9e
| 2,726
|
exs
|
Elixir
|
mix.exs
|
carlosviana/blog
|
1dcf58c3ca40bc3a7105d75de6f51954eb44bca8
|
[
"MIT"
] | null | null | null |
mix.exs
|
carlosviana/blog
|
1dcf58c3ca40bc3a7105d75de6f51954eb44bca8
|
[
"MIT"
] | 3
|
2021-06-20T14:51:14.000Z
|
2021-06-25T00:56:11.000Z
|
mix.exs
|
carlosviana/blog
|
1dcf58c3ca40bc3a7105d75de6f51954eb44bca8
|
[
"MIT"
] | null | null | null |
defmodule Blog.MixProject do
use Mix.Project
@github_url "https://github.com/carlosviana/blog"
def project do
[
app: :blog,
version: "0.1.0",
elixir: "~> 1.11",
descrition: "Aprende phoenix com um projeto de Blog",
source_url: @github_url,
homepage_url: @github_url,
files: ~w[mix.exs lib LICENSE.md README.md CHANGELOG.md],
package: [
maintainers: ["Carlos Viana"],
licences: ["MIT"],
links: %{
"Github" => @github_url
}
],
docs: [
main: "readme",
extras: ["README.md", "CHANGELOG.md"]
],
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test,
"coveralls.json": :test
]
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Blog.Application, []},
extra_applications: [:logger, :runtime_tools, :ueberauth_google]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.7"},
{:phoenix_ecto, "~> 4.1"},
{:ecto_sql, "~> 3.4"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_dashboard, "~> 0.4"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:sobelow, "~> 0.8", only: :dev},
{:excoveralls, "~> 0.10", only: :test},
{:ueberauth_google, "~> 0.10"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup", "cmd npm install --prefix assets"],
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.reset --quiet", "test"]
]
end
end
| 28.694737
| 84
| 0.561996
|
9e3a4f53fa6b23b5629e9481e5164d5229268c4f
| 82
|
exs
|
Elixir
|
test/dnsierge_web/views/page_view_test.exs
|
jcamenisch/dnsierge
|
cfc1654ce0b2c34c8952845aa0df6113ff3bb406
|
[
"MIT"
] | null | null | null |
test/dnsierge_web/views/page_view_test.exs
|
jcamenisch/dnsierge
|
cfc1654ce0b2c34c8952845aa0df6113ff3bb406
|
[
"MIT"
] | null | null | null |
test/dnsierge_web/views/page_view_test.exs
|
jcamenisch/dnsierge
|
cfc1654ce0b2c34c8952845aa0df6113ff3bb406
|
[
"MIT"
] | null | null | null |
defmodule DnsiergeWeb.PageViewTest do
use DnsiergeWeb.ConnCase, async: true
end
| 20.5
| 39
| 0.829268
|
9e3a562a0a91480df2946215403d8daa89cfa297
| 589
|
ex
|
Elixir
|
lib/events_api_web/views/changeset_view.ex
|
gissandrogama/events
|
1b21b151a336ae5eef8bf1d68f2a792194f21be3
|
[
"MIT"
] | null | null | null |
lib/events_api_web/views/changeset_view.ex
|
gissandrogama/events
|
1b21b151a336ae5eef8bf1d68f2a792194f21be3
|
[
"MIT"
] | 4
|
2021-02-10T22:04:56.000Z
|
2021-02-12T22:01:42.000Z
|
lib/events_api_web/views/changeset_view.ex
|
gissandrogama/events
|
1b21b151a336ae5eef8bf1d68f2a792194f21be3
|
[
"MIT"
] | 1
|
2021-03-22T13:48:48.000Z
|
2021-03-22T13:48:48.000Z
|
defmodule EventsApiWeb.ChangesetView do
use EventsApiWeb, :view
@doc """
Está função percorre e traduz os erros do changeset.
Veja `Ecto.Changeset.traverse_errors/2` e
`EventsApiWeb.ErrorHelpers.translate_error/1` para mais detalhes.
"""
def translate_errors(changeset) do
Ecto.Changeset.traverse_errors(changeset, &translate_error/1)
end
def render("error.json", %{changeset: changeset}) do
# Quando codificado, o changeset retorna seus erros
# como um objeto JSON. Então, nós apenas passamos adiante.
%{errors: translate_errors(changeset)}
end
end
| 29.45
| 67
| 0.745331
|
9e3a587488b92e437b162171e281c34c80dd6eac
| 110
|
ex
|
Elixir
|
lib/movement/migration.ex
|
samuelnygaard/accent
|
db753badab1d885397b48a42ac3fb43024345467
|
[
"BSD-3-Clause"
] | 1
|
2020-07-01T16:08:34.000Z
|
2020-07-01T16:08:34.000Z
|
lib/movement/migration.ex
|
samuelnygaard/accent
|
db753badab1d885397b48a42ac3fb43024345467
|
[
"BSD-3-Clause"
] | 6
|
2021-03-11T07:37:48.000Z
|
2022-02-13T21:10:33.000Z
|
lib/movement/migration.ex
|
doc-ai/accent
|
e337e16f3658cc0728364f952c0d9c13710ebb06
|
[
"BSD-3-Clause"
] | 1
|
2020-05-29T21:47:35.000Z
|
2020-05-29T21:47:35.000Z
|
defmodule Movement.Migration do
@type t :: {:ok, map} | {:error, map}
@callback call(atom, map) :: t
end
| 18.333333
| 39
| 0.627273
|
9e3a68672946ac01d9ec4acb7300c079729f5a48
| 591
|
ex
|
Elixir
|
lib/oracleex/result.ex
|
MikeAlbertFleetSolutions/oracleex
|
71c096af28531d3e8f52a7ba6eebfa369f0759fa
|
[
"Apache-2.0"
] | 1
|
2020-02-28T15:13:32.000Z
|
2020-02-28T15:13:32.000Z
|
lib/oracleex/result.ex
|
MikeAlbertFleetSolutions/oracleex
|
71c096af28531d3e8f52a7ba6eebfa369f0759fa
|
[
"Apache-2.0"
] | null | null | null |
lib/oracleex/result.ex
|
MikeAlbertFleetSolutions/oracleex
|
71c096af28531d3e8f52a7ba6eebfa369f0759fa
|
[
"Apache-2.0"
] | null | null | null |
defmodule Oracleex.Result do
@moduledoc """
Result struct returned from any successful query. Its fields are:
* `columns` - The names of each column in the result set;
* `rows` - The result set. A list of tuples, each tuple corresponding to a
row, each element in the tuple corresponds to a column;
* `num_rows` - The number of fetched or affected rows;
"""
@type t :: %__MODULE__{
columns: [String.t] | nil,
rows: [[term] | binary] | nil,
num_rows: integer | :undefined}
defstruct [columns: nil, rows: nil, num_rows: :undefined]
end
| 34.764706
| 78
| 0.65313
|
9e3a85cfd995c9944ec881f2a94596d5ef53f888
| 1,229
|
ex
|
Elixir
|
lib/ex_jenga/send_money_queries/transaction_status.ex
|
beamkenya/ex_jenga
|
03a936a04d99614043d120d0e3ee787f1b8a5b8d
|
[
"AML",
"MIT"
] | 1
|
2021-09-14T09:50:22.000Z
|
2021-09-14T09:50:22.000Z
|
lib/ex_jenga/send_money_queries/transaction_status.ex
|
beamkenya/ex_jenga
|
03a936a04d99614043d120d0e3ee787f1b8a5b8d
|
[
"AML",
"MIT"
] | 15
|
2021-04-23T11:28:49.000Z
|
2021-06-23T04:42:35.000Z
|
lib/ex_jenga/send_money_queries/transaction_status.ex
|
beamkenya/ex_jenga
|
03a936a04d99614043d120d0e3ee787f1b8a5b8d
|
[
"AML",
"MIT"
] | null | null | null |
defmodule ExJenga.SendMoneyQueries.TransactionStatus do
@moduledoc """
Use this API to check the status of a B2C transaction
"""
import ExJenga.JengaBase
@doc """
Check the status of a B2C transaction
## Parameters
attrs: - a map containing:
- `requestId` - a string
- `destination` - a map containing; `type`
- `transfer` - a map containing; `date`
Read More about the parameters' descriptions here: https://developer.jengaapi.io/reference#query-status-b2c-transactions
## Example
iex> ExJenga.SendMoneyQueries.TransactionStatus.request(%{ requestId: "192108062104", destination: %{ type: "Mpesa" }, transfer: %{ date: "2020-12-17" } })
{:ok,
%{
"transactionId" => "1452854",
"status" => "SUCCESS"
}}
"""
@spec request(map()) :: {:error, any()} | {:ok, any()}
def request(
%{
requestId: _ri,
destination: %{
type: _type
},
transfer: %{
date: _date
}
} = requestBody
) do
make_request("/transaction/v2/b2c/status/query", requestBody)
end
def request(_) do
{:error, "Required Parameters missing, check your request body"}
end
end
| 25.081633
| 162
| 0.597234
|
9e3ab060d19be7d21eaf843a16ed7ea4bd5e39d8
| 384
|
ex
|
Elixir
|
web/models/role.ex
|
melbystyle/jw_ministry_api
|
2065d628a84e829b805c71b7e73fb0bee3d0dd44
|
[
"Apache-2.0"
] | 1
|
2017-06-19T18:18:11.000Z
|
2017-06-19T18:18:11.000Z
|
web/models/role.ex
|
melbystyle/jw_ministry_api
|
2065d628a84e829b805c71b7e73fb0bee3d0dd44
|
[
"Apache-2.0"
] | null | null | null |
web/models/role.ex
|
melbystyle/jw_ministry_api
|
2065d628a84e829b805c71b7e73fb0bee3d0dd44
|
[
"Apache-2.0"
] | null | null | null |
defmodule JwMinistryApi.Role do
use JwMinistryApi.Web, :model
schema "roles" do
field :name, :string
has_many :publisher, JwMinistryApi.Publisher
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:name])
|> validate_required([:name])
end
end
| 19.2
| 56
| 0.648438
|
9e3b09f64a010c18037a243e39c0b9fb72a285ce
| 1,153
|
exs
|
Elixir
|
test/snappy_test.exs
|
cocoa-xu/snappy_elixir
|
2d67b18a4009b68edf2c301094f2083b1bde0731
|
[
"Apache-2.0"
] | null | null | null |
test/snappy_test.exs
|
cocoa-xu/snappy_elixir
|
2d67b18a4009b68edf2c301094f2083b1bde0731
|
[
"Apache-2.0"
] | null | null | null |
test/snappy_test.exs
|
cocoa-xu/snappy_elixir
|
2d67b18a4009b68edf2c301094f2083b1bde0731
|
[
"Apache-2.0"
] | null | null | null |
defmodule SnappyTest do
use ExUnit.Case, async: true
doctest Snappy
test "compress" do
assert {:ok, <<20, 0, 97, 74, 1, 0>>} = Snappy.compress("aaaaaaaaaaaaaaaaaaaa")
end
test "uncompress" do
assert {:ok, "aaaaaaaaaaaaaaaaaaaa"} = Snappy.uncompress(<<20, 0, 97, 74, 1, 0>>)
assert {:error, "snappy::GetUncompressedLength failed"} = Snappy.uncompress(<<>>)
assert {:error, "snappy::RawUncompress failed"} = Snappy.uncompress(<<1>>)
end
test "max_compressed_length" do
assert {:ok, 55} = Snappy.max_compressed_length("aaaaaaaaaaaaaaaaaaaa")
end
test "uncompressed_length" do
assert {:ok, 20} = Snappy.uncompressed_length(<<20, 0, 97, 74, 1, 0>>)
assert {:error, "snappy::GetUncompressedLength failed"} = Snappy.uncompressed_length(<<>>)
end
test "valid_compressed_buffer?" do
{:ok, compressed} = Snappy.compress("aaaaaaaaaaaaaaaaaaaa")
assert true == Snappy.valid_compressed_buffer?(compressed)
assert false == Snappy.valid_compressed_buffer?(<<>>)
assert false == Snappy.valid_compressed_buffer?(compressed, 1)
assert false == Snappy.valid_compressed_buffer?(<<>>, 1)
end
end
| 34.939394
| 94
| 0.692975
|
9e3b13c38ccfa65ba211dd8909029d9df06c2bbe
| 1,274
|
ex
|
Elixir
|
lib/ash/resource/validation/confirm.ex
|
axelson/ash
|
5992fc00f7bdc0ba0ebdb476a5191245145ef7c8
|
[
"MIT"
] | null | null | null |
lib/ash/resource/validation/confirm.ex
|
axelson/ash
|
5992fc00f7bdc0ba0ebdb476a5191245145ef7c8
|
[
"MIT"
] | null | null | null |
lib/ash/resource/validation/confirm.ex
|
axelson/ash
|
5992fc00f7bdc0ba0ebdb476a5191245145ef7c8
|
[
"MIT"
] | null | null | null |
defmodule Ash.Resource.Validation.Confirm do
@moduledoc false
use Ash.Resource.Validation
alias Ash.Changeset
alias Ash.Error.Changes.InvalidAttribute
def init(opts) do
case opts[:field] do
nil ->
{:error, "Field is required"}
field when is_atom(field) ->
case opts[:confirmation] do
nil ->
{:error, "Confirmation is required"}
confirmation when is_atom(confirmation) ->
{:ok, [confirmation: confirmation, field: field]}
confirmation ->
{:error, "Expected an atom for confirmation, got: #{inspect(confirmation)}"}
end
field ->
{:error, "Expected an atom for field, got: #{inspect(field)}"}
end
end
def validate(changeset, opts) do
confirmation_value =
Changeset.get_argument(changeset, opts[:confirmation]) ||
Changeset.get_attribute(changeset, opts[:value])
value =
Changeset.get_argument(changeset, opts[:field]) ||
Changeset.get_attribute(changeset, opts[:field])
if confirmation_value == value do
:ok
else
{:error,
InvalidAttribute.exception(
field: opts[:confirmation],
message: "Confirmation did not match value"
)}
end
end
end
| 26
| 88
| 0.620879
|
9e3b391ef4631bc67e6750238045882ec4e4e545
| 6,109
|
ex
|
Elixir
|
lib/quantity/math.ex
|
wise-home/quantity
|
62cae5d049d6909a47b37a2b9df2a93ef61461a0
|
[
"MIT"
] | 8
|
2020-01-17T08:00:51.000Z
|
2022-03-07T10:30:01.000Z
|
lib/quantity/math.ex
|
wise-home/quantity
|
62cae5d049d6909a47b37a2b9df2a93ef61461a0
|
[
"MIT"
] | 15
|
2020-01-17T10:35:06.000Z
|
2021-11-19T07:40:18.000Z
|
lib/quantity/math.ex
|
wise-home/quantity
|
62cae5d049d6909a47b37a2b9df2a93ef61461a0
|
[
"MIT"
] | null | null | null |
defmodule Quantity.Math do
@moduledoc """
Functions for doing math with Quantities
"""
import Kernel, except: [div: 2]
@doc """
Add two Quantities, keeping the unit
iex> add(~Q[1.34 MWh], ~Q[3.49 MWh])
{:ok, ~Q[4.83 MWh]}
iex> add(~Q[1.234567 days], ~Q[3.5 days])
{:ok, ~Q[4.734567 days]}
iex> add(~Q[10 goats], ~Q[40 sheep])
:error
"""
@spec add(Quantity.t(), Quantity.t()) :: {:ok, Quantity.t()} | :error
def add(%{unit: unit} = a, %{unit: unit} = b) do
{:ok, Quantity.new(Decimal.add(a.value, b.value), unit)}
end
def add(_, _) do
:error
end
@doc """
Add two Quantities, but raise an ArgumentError on error
iex> add!(~Q[50.94 kWh], ~Q[49.40 kWh])
~Q[100.34 kWh]
"""
@spec add!(Quantity.t(), Quantity.t()) :: Quantity.t()
def add!(a, b) do
case add(a, b) do
{:ok, result} -> result
:error -> raise(ArgumentError)
end
end
@doc """
Subtract two Quantities, keeping the unit
iex> sub(~Q[99 bottles of beer], ~Q[2 bottles of beer])
{:ok, ~Q[97 bottles of beer]}
iex> sub(~Q[2 bananas], ~Q[1 apple])
:error
"""
@spec sub(Quantity.t(), Quantity.t()) :: {:ok, Quantity.t()} | :error
def sub(%{unit: unit} = a, %{unit: unit} = b) do
{:ok, Quantity.new(Decimal.sub(a.value, b.value), unit)}
end
def sub(_, _) do
:error
end
@doc """
Subtract two Quantities, but raise ArgumentError on error
iex> sub!(~Q[99 problems], ~Q[2 problems])
~Q[97 problems]
"""
@spec sub!(Quantity.t(), Quantity.t()) :: Quantity.t()
def sub!(a, b) do
case sub(a, b) do
{:ok, result} -> result
:error -> raise(ArgumentError)
end
end
@doc """
Sum a list of Quantities with identical units. Errors when addition fails or when the list is empty.
iex> sum([~Q[11.11 DKK], ~Q[22.22 DKK], ~Q[33.33 DKK]])
{:ok, ~Q[66.66 DKK]}
iex> sum([~Q[1 EUR], ~Q[2 DKK]])
:error
iex> sum([])
:error
"""
@spec sum([Quantity.t()]) :: {:ok, Quantity.t()} | :error
def sum([]), do: :error
def sum(quantities) do
{first, remaining} = quantities |> List.pop_at(0)
remaining
|> Enum.reduce_while(first, fn quantity, acc ->
case add(quantity, acc) do
{:ok, result} -> {:cont, result}
:error -> {:halt, :error}
end
end)
|> case do
:error -> :error
result -> {:ok, result}
end
end
@doc """
Sum a list of Quantities with identical units. Includes a fallback value.
The exp and unit will be used to create a Quantity with value 0 if the list is empty.
iex> sum([~Q[0.11 DKK], ~Q[0.22 DKK], ~Q[0.33 DKK]], -2, "DKK")
{:ok, ~Q[0.66 DKK]}
iex> sum([], 0, "DKK")
{:ok, ~Q[0 DKK]}
iex> sum([], -2, "DKK")
{:ok, ~Q[0.00 DKK]}
iex> sum([~Q[1 EUR], ~Q[2 EUR]], -1, "DKK")
{:ok, ~Q[3 EUR]}
iex> sum([~Q[1 EUR], ~Q[2 DKK]], -2, "EUR")
:error
"""
@spec sum([Quantity.t()], integer, String.t()) :: {:ok, Quantity.t()} | :error
def sum([], exp, unit), do: {:ok, Quantity.new(0, exp, unit)}
def sum(quantities, _exp, _unit), do: quantities |> sum()
@doc """
Sum a list of Quantities with identical units, raises ArgumentError on error
iex> sum!([~Q[123 DKK], ~Q[10 DKK], ~Q[39 DKK]])
~Q[172 DKK]
"""
@spec sum!([Quantity.t()]) :: Quantity.t()
def sum!(quantities) do
case sum(quantities) do
{:ok, result} -> result
:error -> raise(ArgumentError)
end
end
@doc """
Sum a list of Quantities with identical units. Includes a fallback value.
The exp and unit will be used to create a Quantity with value 0 if the list is empty.
Raises ArgumentError on error.
iex> sum!([~Q[123 apples], ~Q[10 apples]], 0, "apples")
~Q[133 apples]
iex> sum!([], -2, "DKK")
~Q[0.00 DKK]
iex> sum!([~Q[1 apples], ~Q[2 apples]], -2, "pears")
~Q[3 apples]
"""
@spec sum!([Quantity.t()], integer, String.t()) :: Quantity.t()
def sum!(quantities, exp, unit) do
case sum(quantities, exp, unit) do
{:ok, result} -> result
:error -> raise(ArgumentError)
end
end
@doc """
Divide a Quantity by a scalar or another Quantity
iex> Quantity.div(~Q[15 $], ~Q[10 banana])
~Q[1.5 $/banana]
iex> Quantity.div(~Q[15 $], ~d[7.5])
~Q[2 $]
iex> Quantity.div(~Q[15 $], 10)
~Q[1.5 $]
iex> Quantity.div(~Q[15 $], ~Q[10 $])
~Q[1.5]
"""
@spec div(Quantity.t(), Quantity.t() | Decimal.t() | integer) :: Quantity.t()
def div(%Quantity{} = quantity, scalar) when is_integer(scalar) do
div(quantity, Quantity.new(scalar, 0, 1))
end
def div(%Quantity{} = quantity, %Decimal{} = scalar) do
div(quantity, Quantity.new(scalar, 1))
end
def div(%Quantity{} = q1, %Quantity{} = q2) do
Quantity.new(Decimal.div(q1.value, q2.value), {:div, q1.unit, q2.unit})
end
@doc """
Inverse a Quantity, similar to 1/quantity
iex> Quantity.inverse(~Q[10 DKK/m³])
~Q[0.1 m³/DKK]
"""
@spec inverse(Quantity.t()) :: Quantity.t()
def inverse(%Quantity{} = quantity) do
div(Quantity.new(Decimal.new(1), 1), quantity)
end
@doc """
Multiply a quantity by a scalar or another quantity
iex> Quantity.mult(~Q[15 $], ~d[4.5])
~Q[67.5 $]
iex> Quantity.mult(~Q[15 $], 4)
~Q[60 $]
iex> Quantity.mult(~Q[15 $], ~Q[4 banana])
~Q[60 $*banana]
iex> Quantity.mult(~Q[15 $/banana], ~Q[4 banana])
~Q[60 $]
"""
@spec mult(Quantity.t(), Quantity.t() | Decimal.t() | integer) :: Quantity.t()
def mult(%Quantity{} = quantity, scalar) when is_integer(scalar) do
mult(quantity, Quantity.new(scalar, 0, 1))
end
def mult(%Quantity{} = quantity, %Decimal{} = scalar) do
mult(quantity, Quantity.new(scalar, 1))
end
def mult(%Quantity{} = q1, %Quantity{} = q2) do
Quantity.new(Decimal.mult(q1.value, q2.value), {:mult, q1.unit, q2.unit})
end
@doc """
Round a Quantity to match a precision using the :half_up strategy
iex> Quantity.round(~Q[1.49 DKK], 1)
~Q[1.5 DKK]
iex> Quantity.round(~Q[0.5 DKK], 2)
~Q[0.50 DKK]
"""
def round(quantity, decimal_count) do
Quantity.new(Decimal.round(quantity.value, decimal_count, :half_up), quantity.unit)
end
end
| 24.833333
| 102
| 0.586348
|
9e3b4c32f74ce8a8a0e10a5cb2b313e1ca7c4baf
| 2,672
|
ex
|
Elixir
|
clients/compute/lib/google_api/compute/v1/model/ssl_certificate_list.ex
|
hauptbenutzer/elixir-google-api
|
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
|
[
"Apache-2.0"
] | null | null | null |
clients/compute/lib/google_api/compute/v1/model/ssl_certificate_list.ex
|
hauptbenutzer/elixir-google-api
|
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
|
[
"Apache-2.0"
] | null | null | null |
clients/compute/lib/google_api/compute/v1/model/ssl_certificate_list.ex
|
hauptbenutzer/elixir-google-api
|
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
|
[
"Apache-2.0"
] | 1
|
2020-11-10T16:58:27.000Z
|
2020-11-10T16:58:27.000Z
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.SslCertificateList do
@moduledoc """
Contains a list of SslCertificate resources.
## Attributes
- id (String.t): [Output Only] Unique identifier for the resource; defined by the server. Defaults to: `null`.
- items ([SslCertificate]): A list of SslCertificate resources. Defaults to: `null`.
- kind (String.t): Type of resource. Defaults to: `null`.
- nextPageToken (String.t): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. Defaults to: `null`.
- selfLink (String.t): [Output Only] Server-defined URL for this resource. Defaults to: `null`.
- warning (AcceleratorTypeAggregatedListWarning): Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:id => any(),
:items => list(GoogleApi.Compute.V1.Model.SslCertificate.t()),
:kind => any(),
:nextPageToken => any(),
:selfLink => any(),
:warning => GoogleApi.Compute.V1.Model.AcceleratorTypeAggregatedListWarning.t()
}
field(:id)
field(:items, as: GoogleApi.Compute.V1.Model.SslCertificate, type: :list)
field(:kind)
field(:nextPageToken)
field(:selfLink)
field(:warning, as: GoogleApi.Compute.V1.Model.AcceleratorTypeAggregatedListWarning)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.SslCertificateList do
def decode(value, options) do
GoogleApi.Compute.V1.Model.SslCertificateList.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.SslCertificateList do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.412698
| 381
| 0.734656
|
9e3b5e04b486c9f20f306046e3aeecd09c73f15e
| 1,823
|
exs
|
Elixir
|
clients/analytics_admin/mix.exs
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
clients/analytics_admin/mix.exs
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
clients/analytics_admin/mix.exs
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AnalyticsAdmin.Mixfile do
use Mix.Project
@version "0.18.1"
def project() do
[
app: :google_api_analytics_admin,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/analytics_admin"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Google Analytics Admin API client library.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/analytics_admin",
"Homepage" => "http://code.google.com/apis/analytics/docs/mgmt/home.html"
}
]
end
end
| 27.208955
| 106
| 0.655513
|
9e3b64403321dba77c99b42f97025b757819b769
| 2,998
|
ex
|
Elixir
|
lib/exhoedown.ex
|
nipinium/exhoedown
|
707205bcdc446282915492f55bf73a61be7df0a2
|
[
"Unlicense"
] | null | null | null |
lib/exhoedown.ex
|
nipinium/exhoedown
|
707205bcdc446282915492f55bf73a61be7df0a2
|
[
"Unlicense"
] | null | null | null |
lib/exhoedown.ex
|
nipinium/exhoedown
|
707205bcdc446282915492f55bf73a61be7df0a2
|
[
"Unlicense"
] | null | null | null |
defmodule ExHoedown do
@moduledoc """
Markdown to HTML conversion.
"""
@on_load { :init, 0 }
@app Mix.Project.config[:app]
def init do
path = :filename.join(:code.priv_dir(unquote(@app)), 'exhoedown')
:ok = :erlang.load_nif(path, 0)
end
@doc ~S"""
Converts a Markdown document to HTML:
iex> ExHoedown.to_html "# Hello World"
"<h1>Hello World</h1>\n"
iex> ExHoedown.to_html "http://elixir-lang.org/", autolink: true
"<p><a href=\"http://elixir-lang.org/\">http://elixir-lang.org/</a></p>\n"
Available output options:
* `:tables` - Enables Markdown Extra style tables (default: `false`)
* `:fenced_code` - Enables fenced code blocks (default: `false`)
* `:footnotes` - Parse footnotes. (default: `false`)
* `:autolink` - Automatically turn URLs into links (default: `false`)
* `:strikethrough` - Parse `~~stikethrough~~` spans. (default: `false`)
* `:underline` - Parse `_underline_` instead of emphasis. (default: `false`)
* `:highlight` - Parse `==highlight==` spans. (default: `false`)
* `:quote` - Render `"quotes"` as `<q>quotes</q>`. (default: `false`)
* `:superscript` - Parse `super^script`. (default: `false`)
* `:no_intra_emphasis` - Disable `emphasis_between_words.` (default: `false`)
* `:space_headers` - Require a space after '#' in headers. (default: `false`)
* `:disable_indented_code` - Don't parse indented code blocks. (default: `false`)
* `:strip_html` - Strip all HTML tags. (default: `false`)
* `:escape_html` - Escape all HTML. (default: `false`)
* `:hard_wrap` - Render each linebreak as `<br>`. (default: `false`)
* `:xhtml` - Render XHTML. (default: `false`)
"""
@spec to_html(doc :: String.t) :: String.t
@spec to_html(doc :: String.t, options :: Keyword.t) :: String.t
def to_html(doc, options \\ []) do
{html_flags, extension_flags} = extract_flags(options)
to_html_nif(doc, html_flags, extension_flags)
end
def to_html_nif(_, _, _) do
exit(:nif_library_not_loaded)
end
use Bitwise
@html_flags %{
strip_html: (1 <<< 0),
escape_html: (1 <<< 1),
hard_wrap: (1 <<< 2),
xhtml: (1 <<< 3),
}
@extension_flags %{
# block-level extensions
tables: (1 <<< 0),
fenced_code: (1 <<< 1),
footnotes: (1 <<< 2),
# span-level extensions
autolink: (1 <<< 3),
strikethrough: (1 <<< 4),
underline: (1 <<< 5),
highlight: (1 <<< 6),
quote: (1 <<< 7),
superscript: (1 <<< 8),
math: (1 <<< 9),
# other flags
no_intra_emphasis: (1 <<< 11),
space_headers: (1 <<< 12),
math_explicit: (1 <<< 13),
# negative flags
disable_indented_code: (1 <<< 14)
}
defp extract_flags(options) do
Enum.reduce options, {0, 0}, fn {key, value}, {html_flags, extension_flags} ->
case value do
true -> {html_flags ||| Map.get(@html_flags, key, 0), extension_flags ||| Map.get(@extension_flags, key, 0)}
false -> {html_flags, extension_flags}
end
end
end
end
| 32.236559
| 116
| 0.610407
|
9e3b65eeedfb164778e6d6c515abe04b08c5bea9
| 11,174
|
ex
|
Elixir
|
clients/sql_admin/lib/google_api/sql_admin/v1beta4/api/projects.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/sql_admin/lib/google_api/sql_admin/v1beta4/api/projects.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/sql_admin/lib/google_api/sql_admin/v1beta4/api/projects.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SQLAdmin.V1beta4.Api.Projects do
@moduledoc """
API calls for all endpoints tagged `Projects`.
"""
alias GoogleApi.SQLAdmin.V1beta4.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Reschedules the maintenance on the given instance.
## Parameters
* `connection` (*type:* `GoogleApi.SQLAdmin.V1beta4.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - ID of the project that contains the instance.
* `instance` (*type:* `String.t`) - Cloud SQL instance ID. This does not include the project ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.SQLAdmin.V1beta4.Model.SqlInstancesRescheduleMaintenanceRequestBody.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SQLAdmin.V1beta4.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec sql_projects_instances_reschedule_maintenance(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SQLAdmin.V1beta4.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def sql_projects_instances_reschedule_maintenance(
connection,
project,
instance,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/sql/v1beta4/projects/{project}/instances/{instance}/rescheduleMaintenance",
%{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"instance" => URI.encode(instance, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SQLAdmin.V1beta4.Model.Operation{}])
end
@doc """
Start External master migration.
## Parameters
* `connection` (*type:* `GoogleApi.SQLAdmin.V1beta4.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - ID of the project that contains the first generation instance.
* `instance` (*type:* `String.t`) - Cloud SQL instance ID. This does not include the project ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:syncMode` (*type:* `String.t`) - External sync mode
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SQLAdmin.V1beta4.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec sql_projects_instances_start_external_sync(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SQLAdmin.V1beta4.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def sql_projects_instances_start_external_sync(
connection,
project,
instance,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:syncMode => :query
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/sql/v1beta4/projects/{project}/instances/{instance}/startExternalSync", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"instance" => URI.encode(instance, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SQLAdmin.V1beta4.Model.Operation{}])
end
@doc """
Verify External master external sync settings.
## Parameters
* `connection` (*type:* `GoogleApi.SQLAdmin.V1beta4.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID of the project that contains the instance.
* `instance` (*type:* `String.t`) - Cloud SQL instance ID. This does not include the project ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:syncMode` (*type:* `String.t`) - External sync mode
* `:verifyConnectionOnly` (*type:* `boolean()`) - Flag to enable verifying connection only
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SQLAdmin.V1beta4.Model.SqlInstancesVerifyExternalSyncSettingsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec sql_projects_instances_verify_external_sync_settings(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok,
GoogleApi.SQLAdmin.V1beta4.Model.SqlInstancesVerifyExternalSyncSettingsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def sql_projects_instances_verify_external_sync_settings(
connection,
project,
instance,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:syncMode => :query,
:verifyConnectionOnly => :query
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/sql/v1beta4/projects/{project}/instances/{instance}/verifyExternalSyncSettings",
%{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"instance" => URI.encode(instance, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[
struct:
%GoogleApi.SQLAdmin.V1beta4.Model.SqlInstancesVerifyExternalSyncSettingsResponse{}
]
)
end
end
| 41.69403
| 196
| 0.617147
|
9e3b7baffce95f2913bfce07fa8cd75c5374d1b2
| 2,361
|
exs
|
Elixir
|
config/prod.exs
|
feihong/elixir-quickstart
|
bbdb839c3db4f1470b4172b7036dc6d9ed9c6251
|
[
"Apache-2.0"
] | null | null | null |
config/prod.exs
|
feihong/elixir-quickstart
|
bbdb839c3db4f1470b4172b7036dc6d9ed9c6251
|
[
"Apache-2.0"
] | null | null | null |
config/prod.exs
|
feihong/elixir-quickstart
|
bbdb839c3db4f1470b4172b7036dc6d9ed9c6251
|
[
"Apache-2.0"
] | null | null | null |
use Mix.Config
# For production, we often load configuration from external
# sources, such as your system environment. For this reason,
# you won't find the :http configuration below, but set inside
# QuickstartWeb.Endpoint.init/2 when load_from_system_env is
# true. Any dynamic configuration should be done there.
#
# Don't forget to configure the url host to something meaningful,
# Phoenix uses this information when generating URLs.
#
# Finally, we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the mix phx.digest task
# which you typically run after static files are built.
config :quickstart, QuickstartWeb.Endpoint,
http: [port: {:system, "PORT"}],
url: [host: "localhost", port: {:system, "PORT"}], # This is critical for ensuring web-sockets properly authorize.
# cache_static_manifest: "priv/static/cache_manifest.json",
server: true,
root: ".",
version: Application.spec(:quickstart, :vsn)
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :quickstart, QuickstartWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [:inet6,
# port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :quickstart, QuickstartWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :quickstart, QuickstartWeb.Endpoint, server: true
#
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 34.720588
| 116
| 0.718763
|
9e3b8ceb329a1c8c68a86140c90e4820eae82e2b
| 1,060
|
ex
|
Elixir
|
lib/soundcloud/utils.ex
|
adolfosilva/soundcloud
|
2b1671be3e8cffa8a2d4ec89f87a4b295028fba0
|
[
"MIT"
] | null | null | null |
lib/soundcloud/utils.ex
|
adolfosilva/soundcloud
|
2b1671be3e8cffa8a2d4ec89f87a4b295028fba0
|
[
"MIT"
] | null | null | null |
lib/soundcloud/utils.ex
|
adolfosilva/soundcloud
|
2b1671be3e8cffa8a2d4ec89f87a4b295028fba0
|
[
"MIT"
] | null | null | null |
defmodule Soundcloud.Utils do
@moduledoc """
Utilities module.
Collection of useful functions.
"""
@doc """
Flattens a list of maps to a single map.
## Examples
iex> Soundcloud.Utils.list_of_maps_to_map([%{"a" => 5}, %{"b" => 10}])
%{"a" => 5, "b" => 10}
"""
@spec list_of_maps_to_map(list(map), map) :: map
def list_of_maps_to_map(list, acc \\ %{}) do
Enum.reduce(list, acc, &Map.merge(&2, &1 || %{}))
end
@doc """
Transforms a map with string for keys to a map with atoms as keys.
## Examples
iex> Soundcloud.Utils.map_string_keys_to_atoms(%{"foo" => 5, "bar" => %{"tar" => 10}})
%{foo: 5, bar: %{tar: 10}}
"""
@spec map_string_keys_to_atoms(%{optional(binary()) => any()}) :: map
def map_string_keys_to_atoms(map) do
Enum.reduce(map, %{}, fn {k, v}, m -> map_to_atom(m, k, v) end)
end
defp map_to_atom(m, k, v) when is_map(v) do
Map.put_new(m, String.to_atom(k), map_string_keys_to_atoms(v))
end
defp map_to_atom(m, k, v), do: Map.put_new(m, String.to_atom(k), v)
end
| 25.238095
| 92
| 0.606604
|
9e3bb5dc475817dfc113c5b889e3785992a8efbd
| 576
|
ex
|
Elixir
|
lib/alcsmg/queue/worker/root_supervisor.ex
|
velimir0xff/alcsmg
|
7f8845b4e4698009a44769fc593a551c5f9387ee
|
[
"MIT"
] | null | null | null |
lib/alcsmg/queue/worker/root_supervisor.ex
|
velimir0xff/alcsmg
|
7f8845b4e4698009a44769fc593a551c5f9387ee
|
[
"MIT"
] | null | null | null |
lib/alcsmg/queue/worker/root_supervisor.ex
|
velimir0xff/alcsmg
|
7f8845b4e4698009a44769fc593a551c5f9387ee
|
[
"MIT"
] | null | null | null |
defmodule Alcsmg.Queue.Worker.RootSupervisor do
use Supervisor
@init_worker_number 10
def start_link do
Supervisor.start_link(__MODULE__, [], name: __MODULE__)
end
def init(_args) do
children = [
supervisor(Alcsmg.Queue.Worker.Supervisor, []),
worker(Alcsmg.Queue.Worker.Manager, [%{init_number: get_workers_number}])
]
# TODO: check it, I bet it's not a good approach
supervise(children, strategy: :one_for_all)
end
defp get_workers_number do
Application.get_env(:alcsmg, :init_work_number, @init_worker_number)
end
end
| 25.043478
| 79
| 0.722222
|
9e3be9c8ba04f243396629b80ca4455abcea74c6
| 200
|
exs
|
Elixir
|
test/fatex_web/controllers/page_controller_test.exs
|
vinicius-molina/FaTex
|
bbc05acef7a9697efe9ec0fe64511bce9f26a9b3
|
[
"Apache-2.0"
] | 3
|
2019-12-08T19:42:39.000Z
|
2020-03-17T13:02:56.000Z
|
test/fatex_web/controllers/page_controller_test.exs
|
vinicius-molina/FaTex
|
bbc05acef7a9697efe9ec0fe64511bce9f26a9b3
|
[
"Apache-2.0"
] | 1
|
2021-03-09T19:49:41.000Z
|
2021-03-09T19:49:41.000Z
|
test/fatex_web/controllers/page_controller_test.exs
|
vinicius-molina/FaTex
|
bbc05acef7a9697efe9ec0fe64511bce9f26a9b3
|
[
"Apache-2.0"
] | 1
|
2019-12-08T19:42:42.000Z
|
2019-12-08T19:42:42.000Z
|
defmodule FatexWeb.PageControllerTest do
use FatexWeb.ConnCase
#test "GET /", %{conn: conn} do
# conn = get(conn, "/")
# assert html_response(conn, 200) =~ "Welcome to Phoenix!"
#end
end
| 22.222222
| 61
| 0.66
|
9e3c0473fdef2e1e4565f912e7edfa6986df600e
| 1,553
|
ex
|
Elixir
|
lib/history/funding_rates/funding_rate_history_job.ex
|
fremantle-industries/history
|
a8a33744279ff4ca62620785f9a2e9c0c99e4de7
|
[
"MIT"
] | 20
|
2021-08-06T01:09:48.000Z
|
2022-03-28T18:44:56.000Z
|
lib/history/funding_rates/funding_rate_history_job.ex
|
fremantle-industries/history
|
a8a33744279ff4ca62620785f9a2e9c0c99e4de7
|
[
"MIT"
] | 13
|
2021-08-21T21:17:02.000Z
|
2022-03-27T06:33:51.000Z
|
lib/history/funding_rates/funding_rate_history_job.ex
|
fremantle-industries/history
|
a8a33744279ff4ca62620785f9a2e9c0c99e4de7
|
[
"MIT"
] | 2
|
2021-09-23T11:31:59.000Z
|
2022-01-09T16:19:35.000Z
|
defmodule History.FundingRates.FundingRateHistoryJob do
use Ecto.Schema
import Ecto.Changeset
alias History.FundingRates
@type t :: %__MODULE__{}
@type id :: integer
defmodule Product do
use Ecto.Schema
import Ecto.Changeset
@derive {Jason.Encoder, only: [:venue, :symbol]}
@primary_key false
embedded_schema do
field(:venue, :string)
field(:symbol, :string)
end
@doc false
def changeset(product, attrs) do
product
|> cast(attrs, [:venue, :symbol])
|> validate_required([:venue, :symbol])
end
end
schema "funding_rate_history_jobs" do
field(:from_date, :date)
field(:from_time, :time)
field(:to_date, :date)
field(:to_time, :time)
field(:status, History.JobStatusType)
embeds_many(:products, Product)
has_many(:chunks, FundingRates.FundingRateHistoryChunk, foreign_key: :job_id)
timestamps()
end
@doc false
def changeset(funding_rate, attrs) do
funding_rate
|> cast(attrs, [:from_date, :from_time, :to_date, :to_time, :status])
|> cast_embed(:products, required: true)
|> validate_required([:from_date, :from_time, :to_date, :to_time, :status, :products])
end
defimpl History.RangeJob do
def from(job) do
DateTime.new(job.from_date, job.from_time)
end
def to(job) do
DateTime.new(job.to_date, job.to_time)
end
def from!(job) do
DateTime.new!(job.from_date, job.from_time)
end
def to!(job) do
DateTime.new!(job.to_date, job.to_time)
end
end
end
| 23.530303
| 90
| 0.663876
|
9e3c40275df1fc362c58428c4faaa94ef5c759c6
| 929
|
ex
|
Elixir
|
test/support/channel_case.ex
|
MatthieuSegret/yummy-phoenix
|
85b490075e3a0395b4e7cfa9f06936659e9d12b5
|
[
"MIT"
] | 5
|
2017-08-27T19:45:43.000Z
|
2019-06-28T08:12:25.000Z
|
test/support/channel_case.ex
|
MatthieuSegret/yummy-phoenix
|
85b490075e3a0395b4e7cfa9f06936659e9d12b5
|
[
"MIT"
] | null | null | null |
test/support/channel_case.ex
|
MatthieuSegret/yummy-phoenix
|
85b490075e3a0395b4e7cfa9f06936659e9d12b5
|
[
"MIT"
] | null | null | null |
defmodule YummyWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint YummyWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Yummy.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Yummy.Repo, {:shared, self()})
end
:ok
end
end
| 24.447368
| 67
| 0.713671
|
9e3c4e4abb64614b5dcdd6f218578d02f513bcd0
| 609
|
exs
|
Elixir
|
_templates/elixir/puzzle_test.exs
|
ardhena/advent-of-code-2018
|
573f44c02f4308febdba2d09eda013a3cb205387
|
[
"MIT"
] | null | null | null |
_templates/elixir/puzzle_test.exs
|
ardhena/advent-of-code-2018
|
573f44c02f4308febdba2d09eda013a3cb205387
|
[
"MIT"
] | null | null | null |
_templates/elixir/puzzle_test.exs
|
ardhena/advent-of-code-2018
|
573f44c02f4308febdba2d09eda013a3cb205387
|
[
"MIT"
] | null | null | null |
Code.load_file("puzzle.exs", __DIR__)
ExUnit.start()
defmodule PuzzleTest do
use ExUnit.Case
describe "part one" do
test "calculates ... from input" do
# assert Puzzle.calc_part_1(input) == result
end
test "calculates ... from input file" do
# assert Puzzle.load_input() |> Puzzle.calc_part_1() == result
end
end
describe "part two" do
test "calculates ... from input" do
# assert Puzzle.calc_part_2(input) == result
end
test "calculates ... from input file" do
# assert Puzzle.load_input() |> Puzzle.calc_part_2() == result
end
end
end
| 21.75
| 68
| 0.648604
|
9e3c9f88fd82d24c4b4fdbfd8289004cbd111eeb
| 1,094
|
ex
|
Elixir
|
tests/server/lib/server_web/channels/user_socket.ex
|
Webtrekk/Webtrekk-Smart-Pixel
|
8ac5e5132d600ccd486573f88be84908b614530b
|
[
"MIT"
] | 6
|
2019-05-10T00:01:17.000Z
|
2022-03-03T12:52:51.000Z
|
tests/server/lib/server_web/channels/user_socket.ex
|
Webtrekk/Webtrekk-Smart-Pixel
|
8ac5e5132d600ccd486573f88be84908b614530b
|
[
"MIT"
] | 4
|
2020-01-20T13:19:30.000Z
|
2021-10-15T07:46:59.000Z
|
tests/server/lib/server_web/channels/user_socket.ex
|
Webtrekk/Webtrekk-Smart-Pixel
|
8ac5e5132d600ccd486573f88be84908b614530b
|
[
"MIT"
] | 2
|
2020-05-02T15:59:43.000Z
|
2020-10-21T09:00:38.000Z
|
defmodule ServerWeb.UserSocket do
use Phoenix.Socket
## Channels
channel "requests:*", ServerWeb.RequestsChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
@impl true
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# ServerWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
@impl true
def id(_socket), do: nil
end
| 30.388889
| 83
| 0.697441
|
9e3ca0b25db296249c3da99fd381b5cb3232ae92
| 401
|
ex
|
Elixir
|
takso-hw5 2/web/models/allocation.ex
|
alpdenizz/ElixirPlayground
|
58b5a16c489058e0067e4811042d96fe6a5f8d59
|
[
"MIT"
] | null | null | null |
takso-hw5 2/web/models/allocation.ex
|
alpdenizz/ElixirPlayground
|
58b5a16c489058e0067e4811042d96fe6a5f8d59
|
[
"MIT"
] | null | null | null |
takso-hw5 2/web/models/allocation.ex
|
alpdenizz/ElixirPlayground
|
58b5a16c489058e0067e4811042d96fe6a5f8d59
|
[
"MIT"
] | null | null | null |
defmodule Takso.Allocation do
use Takso.Web, :model
schema "allocations" do
field :status, :string
belongs_to :taxi, Takso.Taxi, foreign_key: :taxi_id
belongs_to :booking, Takso.Booking, foreign_key: :booking_id
timestamps()
end
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:status])
|> validate_required([:status])
end
end
| 22.277778
| 65
| 0.645885
|
9e3cbe7b91796e93a7559fc29e70fc4d05870a48
| 945
|
exs
|
Elixir
|
code/odds/simple_csv.exs
|
alvarocamillont/introdu-o_elixir
|
1d72d4f4b01d9312c4b066ce3c0fe8d9bfaaade1
|
[
"MIT"
] | null | null | null |
code/odds/simple_csv.exs
|
alvarocamillont/introdu-o_elixir
|
1d72d4f4b01d9312c4b066ce3c0fe8d9bfaaade1
|
[
"MIT"
] | 1
|
2021-03-09T16:27:25.000Z
|
2021-03-09T16:27:25.000Z
|
programming-elixir-book/code/odds/simple_csv.exs
|
jordanhubbard/elixir-projects
|
dee341d672e83a45a17a4a85abd54a480f95c506
|
[
"BSD-2-Clause"
] | null | null | null |
#---
# Excerpted from "Programming Elixir ≥ 1.6",
# published by The Pragmatic Bookshelf.
# Copyrights apply to this code. It may not be used to create training material,
# courses, books, articles, and the like. Contact us if you are in doubt.
# We make no guarantees that this code is fit for any purpose.
# Visit http://www.pragmaticprogrammer.com/titles/elixir16 for more book information.
#---
defmodule SimpleCsv do
def sigil_v(lines, options) do
separator = cond do
?c in options -> ","
?t in options -> "\t"
true -> ","
end
lines
|> String.rstrip
|> String.split("\n")
|> Enum.map(&String.split(&1, separator))
end
end
defmodule Example do
import SimpleCsv
def comma do
~v"""
1,2,cat
4,5,6,7
linus,lucy
"""
end
def tab do
~v"""
1\t2\tcat
4\t5\t6\t7
linus\tlucy
"""t
end
end
IO.inspect Example.comma
IO.inspect Example.tab
| 19.285714
| 85
| 0.628571
|
9e3d238bc8dbe533f5378c3f736ad72650ce8e5a
| 563
|
exs
|
Elixir
|
apps/tai/test/tai/new_orders/search_transitions_count_test.exs
|
yurikoval/tai
|
94254b45d22fa0307b01577ff7c629c7280c0295
|
[
"MIT"
] | null | null | null |
apps/tai/test/tai/new_orders/search_transitions_count_test.exs
|
yurikoval/tai
|
94254b45d22fa0307b01577ff7c629c7280c0295
|
[
"MIT"
] | 78
|
2020-10-12T06:21:43.000Z
|
2022-03-28T09:02:00.000Z
|
apps/tai/test/tai/new_orders/search_transitions_count_test.exs
|
yurikoval/tai
|
94254b45d22fa0307b01577ff7c629c7280c0295
|
[
"MIT"
] | null | null | null |
defmodule Tai.NewOrders.SearchTransitionsCountTest do
use Tai.TestSupport.DataCase, async: false
test "returns the count of order transitions matching the search query" do
{:ok, order} = create_order()
{:ok, _order_transition_1} = create_order_transition(order.client_id, %{}, :cancel)
{:ok, _order_transition_2} = create_order_transition(order.client_id, %{}, :cancel)
{:ok, _order_transition_3} = create_order_transition(order.client_id, %{}, :cancel)
assert Tai.NewOrders.search_transitions_count(order.client_id, nil) == 3
end
end
| 43.307692
| 87
| 0.749556
|
9e3d41fe2617023d27b632f2f52f22fccccf7887
| 7,540
|
ex
|
Elixir
|
apps/omg/lib/omg/state.ex
|
PinkDiamond1/elixir-omg
|
70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7
|
[
"Apache-2.0"
] | 1
|
2020-05-01T12:30:09.000Z
|
2020-05-01T12:30:09.000Z
|
apps/omg/lib/omg/state.ex
|
PinkDiamond1/elixir-omg
|
70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7
|
[
"Apache-2.0"
] | null | null | null |
apps/omg/lib/omg/state.ex
|
PinkDiamond1/elixir-omg
|
70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7
|
[
"Apache-2.0"
] | 1
|
2021-12-04T00:37:46.000Z
|
2021-12-04T00:37:46.000Z
|
# Copyright 2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.State do
@moduledoc """
Imperative shell - a GenServer serving the ledger, for functional core and more info see `OMG.State.Core`.
"""
alias OMG.Block
alias OMG.DB
alias OMG.Eth
alias OMG.Fees
alias OMG.State.Core
alias OMG.State.Transaction
alias OMG.State.Transaction.Validator
alias OMG.Utxo
use GenServer
use OMG.Utils.LoggerExt
@type exec_error :: Validator.exec_error()
### Client
def start_link(_args) do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
@spec exec(tx :: Transaction.Recovered.t(), fees :: Fees.fee_t()) ::
{:ok, {Transaction.tx_hash(), pos_integer, non_neg_integer}}
| {:error, exec_error()}
def exec(tx, input_fees) do
GenServer.call(__MODULE__, {:exec, tx, input_fees})
end
def form_block do
GenServer.cast(__MODULE__, :form_block)
end
@spec close_block(pos_integer) :: {:ok, list(Core.db_update())}
def close_block(eth_height) do
GenServer.call(__MODULE__, {:close_block, eth_height})
end
@spec deposit(deposits :: [Core.deposit()]) :: {:ok, list(Core.db_update())}
def deposit(deposits) do
GenServer.call(__MODULE__, {:deposits, deposits})
end
@spec exit_utxos(utxos :: Core.exiting_utxos_t()) ::
{:ok, list(Core.db_update()), Core.validities_t()}
def exit_utxos(utxos) do
GenServer.call(__MODULE__, {:exit_utxos, utxos})
end
@spec utxo_exists?(Utxo.Position.t()) :: boolean()
def utxo_exists?(utxo) do
GenServer.call(__MODULE__, {:utxo_exists, utxo})
end
@spec get_status :: {non_neg_integer(), boolean()}
def get_status do
GenServer.call(__MODULE__, :get_status)
end
### Server
@doc """
Start processing state using the database entries
"""
def init(:ok) do
# Get data essential for the State and Blockgetter. And it takes a while. TODO - measure it!
# Our approach is simply blocking the supervision boot tree
# until we've processed history.
{:ok, utxos_query_result} = DB.utxos()
{:ok, height_query_result} = DB.get_single_value(:child_top_block_number)
{:ok, last_deposit_query_result} = DB.get_single_value(:last_deposit_child_blknum)
{:ok, [utxos_query_result, height_query_result, last_deposit_query_result], {:continue, :setup}}
end
def handle_continue(:setup, [utxos_query_result, height_query_result, last_deposit_query_result]) do
{:ok, child_block_interval} = Eth.RootChain.get_child_block_interval()
{:ok, state} =
with {:ok, _data} = result <-
Core.extract_initial_state(
utxos_query_result,
height_query_result,
last_deposit_query_result,
child_block_interval
) do
_ =
Logger.info(
"Started #{inspect(__MODULE__)}, height: #{height_query_result}, deposit height: #{
last_deposit_query_result
}"
)
{:ok, _} =
:timer.send_interval(Application.fetch_env!(:omg, :metrics_collection_interval), self(), :send_metrics)
result
else
{:error, reason} = error when reason in [:top_block_number_not_found, :last_deposit_not_found] ->
_ = Logger.error("It seems that Child chain database is not initialized. Check README.md")
error
other ->
other
end
{:noreply, state}
end
def handle_info(:send_metrics, state) do
:ok = :telemetry.execute([:process, __MODULE__], %{}, state)
{:noreply, state}
end
@doc """
Checks (stateful validity) and executes a spend transaction. Assuming stateless validity!
"""
def handle_call({:exec, tx, fees}, _from, state) do
case Core.exec(state, tx, fees) do
{:ok, tx_result, new_state} ->
{:reply, {:ok, tx_result}, new_state}
{tx_result, new_state} ->
{:reply, tx_result, new_state}
end
end
@doc """
Includes a deposit done on the root chain contract (see above - not sure about this)
"""
def handle_call({:deposits, deposits}, _from, state) do
{:ok, {event_triggers, db_updates}, new_state} = Core.deposit(deposits, state)
:ok = OMG.Bus.broadcast("events", {:preprocess_emit_events, event_triggers})
{:reply, {:ok, db_updates}, new_state}
end
@doc """
Exits (spends) utxos on child chain, explicitly signals all utxos that have already been spent
"""
def handle_call({:exit_utxos, utxos}, _from, state) do
{:ok, {db_updates, validities}, new_state} = Core.exit_utxos(utxos, state)
{:reply, {:ok, db_updates, validities}, new_state}
end
@doc """
Tells if utxo exists
"""
def handle_call({:utxo_exists, utxo}, _from, state) do
{:reply, Core.utxo_exists?(utxo, state), state}
end
@doc """
Gets the current block's height and whether at the beginning of a block.
Beginning of block is true if and only if the last block has been committed
and none transaction from the next block has been executed.
"""
def handle_call(:get_status, _from, state) do
{:reply, Core.get_status(state), state}
end
@doc """
Works exactly like handle_cast(:form_block) but:
- is synchronous
- `eth_height` given is the Ethereum chain height where the block being closed got submitted, to be used with events.
- relies on the caller to handle persistence, instead of handling itself
Someday, one might want to skip some of computations done (like calculating the root hash, which is scrapped)
"""
def handle_call({:close_block, eth_height}, _from, state) do
{:ok, {block, event_triggers, db_updates}, new_state} = do_form_block(state, eth_height)
publish_block_to_event_bus(block, event_triggers)
{:reply, {:ok, db_updates}, new_state}
end
@doc """
Wraps up accumulated transactions submissions into a block, triggers db update and:
- pushes events to subscribers of `"event_triggers"` internal event bus topic
- pushes the new block to subscribers of `"blocks"` internal event bus topic
Does its on persistence!
"""
def handle_cast(:form_block, state) do
_ = Logger.debug("Forming new block...")
{:ok, {%Block{number: blknum} = block, event_triggers, db_updates}, new_state} = do_form_block(state)
_ = Logger.debug("Formed new block ##{blknum}")
# persistence is required to be here, since propagating the block onwards requires restartability including the
# new block
:ok = DB.multi_update(db_updates)
publish_block_to_event_bus(block, event_triggers)
{:noreply, new_state}
end
defp do_form_block(state, eth_height \\ nil) do
{:ok, child_block_interval} = Eth.RootChain.get_child_block_interval()
Core.form_block(child_block_interval, eth_height, state)
end
defp publish_block_to_event_bus(block, event_triggers) do
:ok = OMG.Bus.broadcast("events", {:preprocess_emit_events, event_triggers})
:ok = OMG.Bus.direct_local_broadcast("blocks", {:enqueue_block, block})
end
end
| 33.215859
| 120
| 0.688196
|
9e3d44fc9a3d890af632244e53941bde81cfc10a
| 4,071
|
exs
|
Elixir
|
test/downstream_test.exs
|
regularfellow/downstream
|
436de5d57eabc58e60e1bf43c3dab78170dbe8e3
|
[
"MIT"
] | 21
|
2018-03-23T03:40:38.000Z
|
2022-01-31T01:51:30.000Z
|
test/downstream_test.exs
|
regularfellow/downstream
|
436de5d57eabc58e60e1bf43c3dab78170dbe8e3
|
[
"MIT"
] | 3
|
2021-05-06T18:52:57.000Z
|
2021-12-23T13:07:24.000Z
|
test/downstream_test.exs
|
regularfellow/downstream
|
436de5d57eabc58e60e1bf43c3dab78170dbe8e3
|
[
"MIT"
] | 6
|
2020-02-27T00:16:22.000Z
|
2021-12-22T19:39:37.000Z
|
defmodule DownstreamTest do
use ExUnit.Case
import Mimic
doctest Downstream
alias Downstream.{Download, Error, Response}
@success_url "https://httpstat.us/200"
@error_url "https://httpstat.us/403"
describe "get/3" do
setup :verify_on_exit!
setup :set_mimic_global
setup _context do
{:ok, pid} = StringIO.open("get test")
[io_device: pid]
end
test "successfully downloads a file with a get request", context do
stub(Download, :stream, fn _ ->
{:ok, %Response{device: context.io_device, status_code: 200}}
end)
{:ok, response} = Downstream.get(@success_url, context.io_device)
assert response.device == context.io_device
assert response.status_code == 200
end
test "returns an error for an unsuccessful download", context do
stub(Download, :stream, fn _ ->
{:error, %Error{status_code: 403}}
end)
{:error, response} = Downstream.get(@error_url, context.io_device)
assert response.status_code == 403
end
test "accepts a configurable timeout", context do
stub(Download, :stream, fn _ ->
{:error, %Error{reason: :timeout}}
end)
{:error, error} = Downstream.get(@success_url, context.io_device, timeout: 0)
assert error.reason == :timeout
end
end
describe "get!/3" do
setup :verify_on_exit!
setup :set_mimic_global
setup _context do
{:ok, pid} = StringIO.open("get! test")
[io_device: pid]
end
test "successfully downloads a file with a get request", context do
stub(Download, :stream, fn _ ->
{:ok, %Response{device: context.io_device, status_code: 200}}
end)
response = Downstream.get!(@success_url, context.io_device)
assert response.device == context.io_device
assert response.status_code == 200
end
test "raises an error for an unsuccessful download", context do
stub(Download, :stream, fn _ ->
{:error, %Error{status_code: 403}}
end)
assert_raise Error, fn ->
Downstream.get!(@error_url, context.io_device)
end
end
end
describe "post/4" do
setup :verify_on_exit!
setup :set_mimic_global
setup _context do
{:ok, pid} = StringIO.open("post test")
[io_device: pid]
end
test "successfully downloads a file with a post request", context do
stub(Download, :stream, fn _ ->
{:ok, %Response{device: context.io_device, status_code: 200}}
end)
{:ok, response} = Downstream.post(@success_url, context.io_device)
assert response.device == context.io_device
assert response.status_code == 200
end
test "returns an error for an unsuccessful download", context do
stub(Download, :stream, fn _ ->
{:error, %Error{status_code: 403}}
end)
{:error, error} = Downstream.post(@error_url, context.io_device)
assert error.status_code == 403
end
test "accepts a configurable timeout", context do
stub(Download, :stream, fn _ ->
{:error, %Error{reason: :timeout}}
end)
{:error, error} = Downstream.get(@success_url, context.io_device, timeout: 0)
assert error.reason == :timeout
end
end
describe "post!/4" do
setup :verify_on_exit!
setup :set_mimic_global
setup _context do
{:ok, pid} = StringIO.open("post! test")
[io_device: pid]
end
test "successfully downloads a file with a post request", context do
stub(Download, :stream, fn _ ->
{:ok, %Response{device: context.io_device, status_code: 200}}
end)
response = Downstream.post!(@success_url, context.io_device)
assert response.device == context.io_device
assert response.status_code == 200
end
test "raises an error for an unsuccessful download", context do
stub(Download, :stream, fn _ ->
{:error, %Error{status_code: 403}}
end)
assert_raise Error, fn ->
Downstream.post!(@error_url, context.io_device)
end
end
end
end
| 25.603774
| 83
| 0.638909
|
9e3d5ad496c7ea6878d3b1b688a05a738033a551
| 5,944
|
ex
|
Elixir
|
apps/omg_watcher/test/support/exit_processor/test_helper.ex
|
boolafish/elixir-omg
|
46b568404972f6e4b4da3195d42d4fb622edb934
|
[
"Apache-2.0"
] | null | null | null |
apps/omg_watcher/test/support/exit_processor/test_helper.ex
|
boolafish/elixir-omg
|
46b568404972f6e4b4da3195d42d4fb622edb934
|
[
"Apache-2.0"
] | null | null | null |
apps/omg_watcher/test/support/exit_processor/test_helper.ex
|
boolafish/elixir-omg
|
46b568404972f6e4b4da3195d42d4fb622edb934
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019-2020 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.ExitProcessor.TestHelper do
@moduledoc """
Common utilities to manipulate the `ExitProcessor`
"""
import ExUnit.Assertions
alias OMG.State.Transaction
alias OMG.Utxo
alias OMG.Watcher.ExitProcessor.Core
require Utxo
# default exit_id used when starting exits using `start_se_from` and `start_ife_from`
@exit_id 9876
def start_se_from(%Core{} = processor, tx, exiting_pos, opts \\ []) do
{event, status} = se_event_status(tx, exiting_pos, opts)
{processor, _} = Core.new_exits(processor, [event], [status])
processor
end
def se_event_status(tx, exiting_pos, opts \\ []) do
Utxo.position(_, _, oindex) = exiting_pos
txbytes = Transaction.raw_txbytes(tx)
enc_pos = Utxo.Position.encode(exiting_pos)
owner = tx |> Transaction.get_outputs() |> Enum.at(oindex) |> Map.get(:owner)
eth_height = Keyword.get(opts, :eth_height, 2)
exit_id = Keyword.get(opts, :exit_id, @exit_id)
call_data = %{utxo_pos: enc_pos, output_tx: txbytes}
root_chain_txhash = <<1::256>>
block_timestamp = :os.system_time(:second)
scheduled_finalization_time = block_timestamp + 100
event = %{
owner: owner,
eth_height: eth_height,
exit_id: exit_id,
call_data: call_data,
root_chain_txhash: root_chain_txhash,
block_timestamp: block_timestamp,
scheduled_finalization_time: scheduled_finalization_time
}
exitable = not Keyword.get(opts, :inactive, false)
# those should be unused so setting to `nil`
fake_output_id = enc_pos
amount = nil
bond_size = nil
status = Keyword.get(opts, :status) || {exitable, enc_pos, fake_output_id, owner, amount, bond_size}
{event, status}
end
def start_ife_from(%Core{} = processor, tx, opts \\ []) do
exit_id = Keyword.get(opts, :exit_id, @exit_id)
status = Keyword.get(opts, :status, active_ife_status())
status = if status == :inactive, do: inactive_ife_status(), else: status
{processor, _} = Core.new_in_flight_exits(processor, [ife_event(tx, opts)], [{status, exit_id}])
processor
end
# See `OMG.Eth.RootChain.get_in_flight_exits_structs/2` for reference of where this comes from
# `nil`s are unused portions of the returns data from the contract
def active_ife_status(), do: {nil, 1, nil, nil, nil, nil, nil}
def inactive_ife_status(), do: {nil, 0, nil, nil, nil, nil, nil}
def piggyback_ife_from(%Core{} = processor, tx_hash, output_index, piggyback_type) do
{processor, _} =
Core.new_piggybacks(processor, [
%{
tx_hash: tx_hash,
output_index: output_index,
omg_data: %{piggyback_type: piggyback_type}
}
])
processor
end
def ife_event(tx, opts \\ []) do
sigs = Keyword.get(opts, :sigs) || sigs(tx)
input_utxos_pos = Transaction.get_inputs(tx) |> Enum.map(&Utxo.Position.encode/1)
input_txs = Keyword.get(opts, :input_txs) || List.duplicate("input_tx", length(input_utxos_pos))
eth_height = Keyword.get(opts, :eth_height, 2)
%{
call_data: %{
in_flight_tx: Transaction.raw_txbytes(tx),
input_txs: input_txs,
input_utxos_pos: input_utxos_pos,
in_flight_tx_sigs: sigs
},
eth_height: eth_height
}
end
def ife_response(tx, position),
do: %{tx_hash: Transaction.raw_txhash(tx), challenge_position: Utxo.Position.encode(position)}
def ife_challenge(tx, comp, opts \\ []) do
competitor_position = Keyword.get(opts, :competitor_position)
competitor_position =
if competitor_position,
do: Utxo.Position.encode(competitor_position),
else: not_included_competitor_pos()
%{
tx_hash: Transaction.raw_txhash(tx),
competitor_position: competitor_position,
call_data: %{
competing_tx: txbytes(comp),
competing_tx_input_index: Keyword.get(opts, :competing_tx_input_index, 0),
competing_tx_sig: Keyword.get(opts, :competing_tx_sig, sig(comp))
}
}
end
def txbytes(tx), do: Transaction.raw_txbytes(tx)
def sigs(tx), do: tx.signed_tx.sigs
def sig(tx, idx \\ 0), do: tx |> sigs() |> Enum.at(idx)
def assert_proof_sound(proof_bytes) do
# NOTE: checking of actual proof working up to the contract integration test
assert is_binary(proof_bytes)
# hash size * merkle tree depth
assert byte_size(proof_bytes) == 32 * 16
end
def assert_events(events, expected_events) do
assert MapSet.new(events) == MapSet.new(expected_events)
end
def check_validity_filtered(request, processor, opts) do
exclude_events = Keyword.get(opts, :exclude, [])
only_events = Keyword.get(opts, :only, [])
{result, events} = Core.check_validity(request, processor)
any? = fn filtering_events, event ->
Enum.any?(filtering_events, fn filtering_event -> event.__struct__ == filtering_event end)
end
filtered_events =
events
|> Enum.filter(fn event ->
Enum.empty?(exclude_events) or not any?.(exclude_events, event)
end)
|> Enum.filter(fn event ->
Enum.empty?(only_events) or any?.(only_events, event)
end)
{result, filtered_events}
end
defp not_included_competitor_pos() do
<<long::256>> =
List.duplicate(<<255::8>>, 32)
|> Enum.reduce(fn val, acc -> val <> acc end)
long
end
end
| 32.304348
| 104
| 0.684556
|
9e3d5fe85e0bdcedfde7b5900374aed3325b7a5b
| 43
|
exs
|
Elixir
|
v02/ch12/pm1.edit3.exs
|
oiax/elixir-primer
|
c8b89a29f108cc335b8e1341b7a1e90ec12adc66
|
[
"MIT"
] | null | null | null |
v02/ch12/pm1.edit3.exs
|
oiax/elixir-primer
|
c8b89a29f108cc335b8e1341b7a1e90ec12adc66
|
[
"MIT"
] | null | null | null |
v02/ch12/pm1.edit3.exs
|
oiax/elixir-primer
|
c8b89a29f108cc335b8e1341b7a1e90ec12adc66
|
[
"MIT"
] | null | null | null |
x = {:a, 3, 4}
x = {:a, y, 4}
IO.inspect y
| 10.75
| 14
| 0.418605
|
9e3d7a4b46272276065d90fb446b6237cac49aac
| 1,178
|
ex
|
Elixir
|
lib/chat_api/companies.ex
|
guanghuizeng/papercups
|
0d7ca893edddea0d23f0772ccc5694edd407fc63
|
[
"MIT"
] | 1
|
2021-01-18T09:57:23.000Z
|
2021-01-18T09:57:23.000Z
|
lib/chat_api/companies.ex
|
psolvy/papercups
|
723711f00b34afc2ae91b9157d81a80bf1d94009
|
[
"MIT"
] | null | null | null |
lib/chat_api/companies.ex
|
psolvy/papercups
|
723711f00b34afc2ae91b9157d81a80bf1d94009
|
[
"MIT"
] | null | null | null |
defmodule ChatApi.Companies do
@moduledoc """
The Companies context.
"""
import Ecto.Query, warn: false
alias ChatApi.Repo
alias ChatApi.Companies.Company
@spec list_companies(binary()) :: [Company.t()]
def list_companies(account_id) do
Company |> where(account_id: ^account_id) |> Repo.all()
end
@spec get_company!(binary()) :: Company.t()
def get_company!(id), do: Repo.get!(Company, id)
@spec create_company(map()) :: {:ok, Company.t()} | {:error, Ecto.Changeset.t()}
def create_company(attrs \\ %{}) do
%Company{}
|> Company.changeset(attrs)
|> Repo.insert()
end
@spec update_company(Company.t(), map()) :: {:ok, Company.t()} | {:error, Ecto.Changeset.t()}
def update_company(%Company{} = company, attrs) do
company
|> Company.changeset(attrs)
|> Repo.update()
end
@spec delete_company(Company.t()) :: {:ok, Company.t()} | {:error, Ecto.Changeset.t()}
def delete_company(%Company{} = company) do
Repo.delete(company)
end
@spec change_company(Company.t(), map()) :: Ecto.Changeset.t()
def change_company(%Company{} = company, attrs \\ %{}) do
Company.changeset(company, attrs)
end
end
| 27.395349
| 95
| 0.648557
|
9e3db3971b8f40a6f8294f2eb259b5d82efad59b
| 385
|
ex
|
Elixir
|
elixir/lib/homework_web/schemas/types.ex
|
Arthurb101/web-homework
|
d3f768a71c7d9e13e456028c491ced8e71e6cda4
|
[
"MIT"
] | 1
|
2021-01-24T06:15:39.000Z
|
2021-01-24T06:15:39.000Z
|
elixir/lib/homework_web/schemas/types.ex
|
Arthurb101/web-homework
|
d3f768a71c7d9e13e456028c491ced8e71e6cda4
|
[
"MIT"
] | null | null | null |
elixir/lib/homework_web/schemas/types.ex
|
Arthurb101/web-homework
|
d3f768a71c7d9e13e456028c491ced8e71e6cda4
|
[
"MIT"
] | null | null | null |
defmodule HomeworkWeb.Schemas.Types do
@moduledoc """
Defines the types for the Schema to use.
"""
use Absinthe.Schema.Notation
import_types(Absinthe.Type.Custom)
import_types(HomeworkWeb.Schemas.CompaniesSchema)
import_types(HomeworkWeb.Schemas.MerchantsSchema)
import_types(HomeworkWeb.Schemas.TransactionsSchema)
import_types(HomeworkWeb.Schemas.UsersSchema)
end
| 29.615385
| 54
| 0.81039
|
9e3dfeb674758db61e7b8b686f87be6e3f183bc4
| 296
|
ex
|
Elixir
|
apps/rtc/lib/rtc.ex
|
michaeljguarino/forge
|
50ee583ecb4aad5dee4ef08fce29a8eaed1a0824
|
[
"Apache-2.0"
] | null | null | null |
apps/rtc/lib/rtc.ex
|
michaeljguarino/forge
|
50ee583ecb4aad5dee4ef08fce29a8eaed1a0824
|
[
"Apache-2.0"
] | 2
|
2019-12-13T23:55:50.000Z
|
2019-12-17T05:49:58.000Z
|
apps/rtc/lib/rtc.ex
|
michaeljguarino/chartmart
|
a34c949cc29d6a1ab91c04c5e4f797e6f0daabfc
|
[
"Apache-2.0"
] | null | null | null |
defmodule Rtc do
@moduledoc """
Rtc keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
def conf(key), do: Application.get_env(:rtc, key)
end
| 24.666667
| 66
| 0.733108
|
9e3e1cb8211c2b95aae0a33160dc1414573bd0bc
| 1,211
|
ex
|
Elixir
|
lang/elixir/learning-fp-with-elixir/exercises/lib/chapter2/fourth.ex
|
Tyyagoo/studies
|
f8fcc3a539cfb6d04a149174c88bf2208e220b96
|
[
"Unlicense"
] | null | null | null |
lang/elixir/learning-fp-with-elixir/exercises/lib/chapter2/fourth.ex
|
Tyyagoo/studies
|
f8fcc3a539cfb6d04a149174c88bf2208e220b96
|
[
"Unlicense"
] | null | null | null |
lang/elixir/learning-fp-with-elixir/exercises/lib/chapter2/fourth.ex
|
Tyyagoo/studies
|
f8fcc3a539cfb6d04a149174c88bf2208e220b96
|
[
"Unlicense"
] | null | null | null |
defmodule Exercises.Chapter2.Fourth do
@moduledoc """
Create a module called MatchstickFactory and a function called boxes/1.
The factory has three types of boxes: the big ones hold fifty matchsticks,
the medium ones hold twenty, and the small ones hold five.
The boxes can’t have fewer matchstick that they can hold; they
must be full. The returning map should contain the remaining matchsticks.
"""
@big 50
@medium 20
@small 5
@doc """
Calculate the number of boxes necessary to accommodate some matchsticks.
It returns a map with the number of boxes necessary for each type of box.
Examples:
iex> alias Exercises.Chapter2.Fourth
Exercises.Chapter2.Fourth
iex> Fourth.boxes(98)
%{big: 1, medium: 2, remaining_matchsticks: 3, small: 1}
iex> Fourth.boxes(39)
%{big: 0, medium: 1, remaining_matchsticks: 4, small: 3}
"""
def boxes(sticks) do
big = div(sticks, @big)
remaining = rem(sticks, @big)
medium = div(remaining, @medium)
remaining = rem(remaining, @medium)
small = div(remaining, @small)
remaining = rem(remaining, @small)
%{big: big, medium: medium, small: small, remaining_matchsticks: remaining}
end
end
| 30.275
| 79
| 0.701073
|
9e3e370dca3edcc9537b7f33dad9824482ef0e5d
| 2,157
|
ex
|
Elixir
|
clients/dfa_reporting/lib/google_api/dfa_reporting/v35/model/creative_asset_selection.ex
|
renovate-bot/elixir-google-api
|
1da34cd39b670c99f067011e05ab90af93fef1f6
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/dfa_reporting/lib/google_api/dfa_reporting/v35/model/creative_asset_selection.ex
|
swansoffiee/elixir-google-api
|
9ea6d39f273fb430634788c258b3189d3613dde0
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/dfa_reporting/lib/google_api/dfa_reporting/v35/model/creative_asset_selection.ex
|
dazuma/elixir-google-api
|
6a9897168008efe07a6081d2326735fe332e522c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V35.Model.CreativeAssetSelection do
@moduledoc """
Encapsulates the list of rules for asset selection and a default asset in case none of the rules match. Applicable to INSTREAM_VIDEO creatives.
## Attributes
* `defaultAssetId` (*type:* `String.t`, *default:* `nil`) - A creativeAssets[].id. This should refer to one of the parent assets in this creative, and will be served if none of the rules match. This is a required field.
* `rules` (*type:* `list(GoogleApi.DFAReporting.V35.Model.Rule.t)`, *default:* `nil`) - Rules determine which asset will be served to a viewer. Rules will be evaluated in the order in which they are stored in this list. This list must contain at least one rule. Applicable to INSTREAM_VIDEO creatives.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:defaultAssetId => String.t() | nil,
:rules => list(GoogleApi.DFAReporting.V35.Model.Rule.t()) | nil
}
field(:defaultAssetId)
field(:rules, as: GoogleApi.DFAReporting.V35.Model.Rule, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V35.Model.CreativeAssetSelection do
def decode(value, options) do
GoogleApi.DFAReporting.V35.Model.CreativeAssetSelection.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V35.Model.CreativeAssetSelection do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.14
| 305
| 0.747334
|
9e3e900210e4e041b1a61c61e6a176b82da6baed
| 335
|
exs
|
Elixir
|
apps/bus_detective_web/config/test.exs
|
bus-detective/bus_detective_ng
|
ef54684d4f640384bd20a4d5550ff51ab440190b
|
[
"MIT"
] | 8
|
2018-07-06T14:44:10.000Z
|
2021-08-19T17:24:25.000Z
|
apps/bus_detective_web/config/test.exs
|
bus-detective/bus_detective_ng
|
ef54684d4f640384bd20a4d5550ff51ab440190b
|
[
"MIT"
] | 12
|
2018-07-15T18:43:04.000Z
|
2022-02-10T16:07:47.000Z
|
apps/bus_detective_web/config/test.exs
|
bus-detective/bus_detective_ng
|
ef54684d4f640384bd20a4d5550ff51ab440190b
|
[
"MIT"
] | 1
|
2018-07-13T17:30:20.000Z
|
2018-07-13T17:30:20.000Z
|
use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :bus_detective_web, BusDetectiveWeb.Endpoint,
http: [port: 4001],
server: true
config :bus_detective, :sql_sandbox, true
config :wallaby,
driver: Wallaby.Experimental.Chrome,
screenshot_on_failure: true
| 23.928571
| 56
| 0.764179
|
9e3eab1e875bdf31be26dd3dfce9655b533aa6cb
| 5,388
|
exs
|
Elixir
|
test/rex_test.exs
|
vic/rex
|
04f41e7ae7be0f8325e641ba72504acf68781f6a
|
[
"Apache-2.0"
] | 7
|
2016-10-10T08:56:16.000Z
|
2020-05-19T20:11:42.000Z
|
test/rex_test.exs
|
vic/rex
|
04f41e7ae7be0f8325e641ba72504acf68781f6a
|
[
"Apache-2.0"
] | 1
|
2016-10-09T15:57:25.000Z
|
2016-10-09T15:57:45.000Z
|
test/rex_test.exs
|
vic/rex
|
04f41e7ae7be0f8325e641ba72504acf68781f6a
|
[
"Apache-2.0"
] | null | null | null |
defmodule Rex.Examples do
use Rex
drex double(a) (a * 2)
drex double_swap double swap double
drex mult Kernel.*/2
drex triple 3 ~> mult
drex puts IO.puts/1 ~> drop
drex sum 0 ~> (&Kernel.+/2) ~> List.foldr/3
drex sumr List.foldr/3 <~ (&Kernel.+/2) <~ 0
drex sum3(c, b, a) (a + b + c)
def answer, do: 42
def square(x), do: x * x
drex tatata do
triple show
swap double
end
def a(stack) do
stack
end
drex caca(a, _, c) do
^c
^a
^c ^a
end
end
defmodule Rex.ExamplesTest do
use ExUnit.Case
import ExUnit.CaptureIO
doctest Rex
doctest Rex.ReadmeExamples
use Rex
import Rex.Examples
defmacrop rex_stack(stack, expr) do
quote do
{stack, _} = {unquote(stack), []} |> unquote(Rex.Core.rex_fn(expr, __CALLER__)).()
stack
end
end
test "swap changes topmost two elements on stack" do
assert [2, 1, 3] == [1, 2, 3] |> rex_stack(swap)
end
test "double duplicates topmost value" do
assert [6, 1] == [3, 1] |> rex_stack(double)
end
test "can call a sequence of functions" do
assert [6, 2, 5] == [1, 3, 5] |> rex_stack(double ~> swap ~> double)
end
test "can place arguments as part of program" do
assert [6, 2, 5] == [] |> rex_stack(5 ~> 3 ~> 1 ~> double ~> swap ~> double)
end
test "can use drex to define a name for a program" do
assert [6, 2, 5] == [] |> rex_stack(5 ~> 3 ~> 1 ~> double_swap)
end
test "can call to native function reference" do
assert [16, 3] == [] |> rex_stack(3 ~> 2 ~> 8 ~> mult)
end
test "can call partial function" do
assert [24, 4] == [] |> rex_stack(4 ~> 8 ~> triple)
end
test "can apply kernel function directly" do
assert [3] == [] |> rex_stack(1 ~> 2 ~> Kernel.+/2)
end
test "can push function reference to stack" do
assert [[4, 5], "old"] == ["old"] |> rex_stack(["hola", "mundo"] ~> (&String.length/1) ~> Enum.map/2)
end
test "can apply partial refeference" do
assert [6] == [] |> rex_stack([1, 2, 3] ~> sum)
end
test "can apply partial refeference defined in reverse" do
assert [6] == [] |> rex_stack([1, 2, 3] ~> sumr)
end
test "can push local function reference" do
assert [&Rex.Examples.answer/0] == [] |> rex_stack(&answer/0)
end
test "can call remote function with zero arity" do
assert [42] == [] |> rex_stack(Rex.Examples.answer/0)
end
test "can call local function with zero arity" do
assert [42] == [] |> rex_stack(answer/0)
end
test "can call local function with non-zero arity" do
assert [25] == [5] |> rex_stack(square/1)
end
test "show prints the current stack" do
fun = fn ->
assert [5] == [5] |> rex_stack(show)
end
assert capture_io(fun) == "[5]\n"
end
test "quote pushes the elixir ast without changing it into the stack" do
assert [{{:~>, _, [{:~>, _, [1, 2]}, {:foo, _, nil}]}, _env}] = [] |> rex_stack(@[1 ~> 2 ~> foo])
end
test "quote pushes the definition environment alongide the quoted code" do
assert [{code, env}] = [] |> rex_stack(@[2 * answer])
assert {84, _} = Code.eval_quoted(code, [], env)
end
test "dequote executes a quoted program on top of stack with the rest of the stack" do
assert [5, 4] = [2, 3, 4] |> rex_stack(@[Kernel.+/2] ~> dequote)
end
test "ifte selects if condition is true" do
assert [:wii] = [] |> rex_stack(ifte <~ true <~ @[:wii] <~ @[:woo])
end
test "ifte selects if condition is non-true" do
assert [:woo] = [] |> rex_stack(ifte <~ nil <~ @[:wii] <~ @[:woo])
end
test "ifte executes with remainding of stack when true" do
assert [12, 15] = [4, 3, 15] |> rex_stack(ifte <~ true <~ @[Kernel.*/2] <~ @[:nop])
end
test "dequote can execute a function by binding" do
assert [12] = [3] |> rex_stack(@[4 ~> mult] ~> dequote)
end
test "ifte can execute a remote rex function" do
require Rex.Macro
assert [12] = [] |> rex_stack(ifte <~ true <~ @[Rex.Examples.mult <~ 3 <~ 4] <~ @[:noop])
end
test "ifte can execute a function by binding" do
assert [12] = [4] |> rex_stack(ifte <~ true <~ @[mult <~ 3] <~ [:noop])
end
test "rex can take a do with a line" do
assert [4, 18, 5] == [3, 4, 5] |> rex_stack(do: double triple swap)
end
test "rex can take a multiline block with one word per line" do
assert [9] == [] |> (rex_stack do
1
2
Kernel.+/2
Rex.Examples.triple
end)
end
test "defined function with do performs in order" do
fun = fn ->
assert [10, 12] == [4, 5] |> rex_stack(tatata)
end
assert capture_io(fun) == "[12, 5]\n"
end
test "word with value reference takes values from stack" do
assert [1, 3, 1, 3, 1, 2, 3] = [1, 2, 3] |> rex_stack(caca)
end
test "app calls a function with values from stack" do
assert ["22"] == [&Kernel.inspect/1, 22] |> rex_stack(app)
end
test "app doesnt calls if not enough values on stack" do
x = fn ->
[&Kernel.inspect/1] |> rex_stack(app)
end
assert_raise FunctionClauseError, x
end
test "can execute a function specifying its arity" do
assert [6] == [] |> rex_stack([1, 2, 3] ~> (&Kernel.+/2) ~> Enum.reduce/2)
end
test "calling Elixir quote doesnt modify the ast" do
assert [{:quote, _, [[do: {:+, _, [1, 2]}]]}] = [] |> rex_stack(quote(do: 1 + 2))
end
end
| 26.80597
| 105
| 0.58853
|
9e3edb156cfebbcf78863776af989f2f8cfb2e35
| 1,676
|
ex
|
Elixir
|
test/support/apps/example/lib/example_web/endpoint.ex
|
mitchellhenke/torch
|
2d0ab68f4e2d7f3bc37fbf7edbd1298b29b36e71
|
[
"MIT"
] | 528
|
2019-09-13T15:10:36.000Z
|
2022-03-31T10:28:27.000Z
|
test/support/apps/example/lib/example_web/endpoint.ex
|
mitchellhenke/torch
|
2d0ab68f4e2d7f3bc37fbf7edbd1298b29b36e71
|
[
"MIT"
] | 133
|
2019-09-13T17:46:59.000Z
|
2022-03-01T13:37:10.000Z
|
test/support/apps/example/lib/example_web/endpoint.ex
|
mitchellhenke/torch
|
2d0ab68f4e2d7f3bc37fbf7edbd1298b29b36e71
|
[
"MIT"
] | 38
|
2019-10-29T20:37:13.000Z
|
2022-03-03T05:19:33.000Z
|
defmodule ExampleWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :example
socket("/socket", ExampleWeb.UserSocket)
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :example,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket("/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket)
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_example_key",
signing_salt: "qwCfJR1h"
plug ExampleWeb.Router
@doc """
Callback invoked for dynamically configuring the endpoint.
It receives the endpoint configuration and checks if
configuration should be loaded from the system environment.
"""
def init(_key, config) do
if config[:load_from_system_env] do
port = System.get_env("PORT") || raise "expected the PORT environment variable to be set"
{:ok, Keyword.put(config, :http, [:inet6, port: port])}
else
{:ok, config}
end
end
end
| 27.933333
| 95
| 0.703461
|
9e3edd8f760e9a92a0047928c30a138c41b4107c
| 3,599
|
ex
|
Elixir
|
lib/stripy.ex
|
moomerman/stripy
|
06ae411412d14fe48edcbaaf29774c59228d51e6
|
[
"MIT"
] | null | null | null |
lib/stripy.ex
|
moomerman/stripy
|
06ae411412d14fe48edcbaaf29774c59228d51e6
|
[
"MIT"
] | null | null | null |
lib/stripy.ex
|
moomerman/stripy
|
06ae411412d14fe48edcbaaf29774c59228d51e6
|
[
"MIT"
] | null | null | null |
defmodule Stripy do
@moduledoc """
Stripy is a micro wrapper intended to be
used for sending requests to Stripe's REST API. It is
made for developers who prefer to work directly with the
official API and provide their own abstractions on top
if such are needed.
Stripy takes care of setting headers, encoding the data,
configuration settings, etc (the usual boring boilerplate);
it also provides a `parse/1` helper function for decoding.
Some basic examples:
iex> Stripy.req(:get, "subscriptions")
{:ok, %HTTPoison.Response{...}}
iex> Stripy.req(:post, "customers", %{"email" => "a@b.c", "metadata[user_id]" => 1})
{:ok, %HTTPoison.Response{...}}
You are expected to build your business logic on top
of Stripy and abstract things such as Subscriptions
and Customers; if that's not your cup of tea,
check out "stripity_stripe" or "stripe_elixir" on Hex.
"""
@content_type_header %{"Content-Type" => "application/x-www-form-urlencoded"}
@doc "Constructs url with query params from given data."
def url(api_url, resource, data) do
api_url <> resource <> "?" <> URI.encode_query(data)
end
@doc """
Makes request to the Stripe API.
Will return an HTTPoison standard response; see `parse/1`
for decoding the response body.
You can specify custom headers to be included in the request
to Stripe, such as `Idempotency-Key`, `Stripe-Account` or any
other header. Just pass a map as the fourth argument.
See example below.
You can also provide any configuration option per-request in
the optional fifth argument.
## Examples
iex> Stripy.req(:get, "subscriptions")
{:ok, %HTTPoison.Response{...}}
iex> Stripy.req(:post, "customers", %{"email" => "a@b.c", "metadata[user_id]" => 1})
{:ok, %HTTPoison.Response{...}}
iex> Stripy.req(:post, "customers", %{"email" => "a@b.c"}, %{"Idempotency-Key" => "ABC"})
{:ok, %HTTPoison.Response{...}}
iex> Stripy.req(:post, "customers", %{"email" => "a@b.c"}, %{"Idempotency-Key" => "ABC"}, secret_key: "my-secret)
{:ok, %HTTPoison.Response{...}}
"""
def req(action, resource, data \\ %{}, headers \\ %{}, opts \\ [])
when action in [:get, :post, :delete] do
testing = Keyword.get(opts, :testing) || Application.get_env(:stripy, :testing, false)
if testing do
mock_server = Application.get_env(:stripy, :mock_server, Stripy.MockServer)
mock_server.request(action, resource, data)
else
secret_key = Keyword.get(opts, :secret_key) || Application.fetch_env!(:stripy, :secret_key)
version =
Keyword.get(opts, :version) || Application.get_env(:stripy, :version, "2020-03-02")
headers =
@content_type_header
|> Map.merge(%{"Authorization" => "Bearer #{secret_key}", "Stripe-Version" => version})
|> Map.merge(headers)
|> Map.to_list()
endpoint =
Keyword.get(opts, :endpoint) ||
Application.get_env(:stripy, :endpoint, "https://api.stripe.com/v1/")
httpoison_opts =
Keyword.get(opts, :httpoison) || Application.get_env(:stripy, :httpoison, [])
url = url(endpoint, resource, data)
HTTPoison.request(action, url, "", headers, httpoison_opts)
end
end
@doc "Parses an HTTPoison response from a Stripe API call."
def parse({:ok, %{status_code: 200, body: body}}) do
{:ok, Jason.decode!(body)}
end
def parse({:ok, %{body: body}}) do
error = Jason.decode!(body) |> Map.fetch!("error")
{:error, error}
end
def parse({:error, error}), do: {:error, error}
end
| 34.605769
| 119
| 0.648236
|
9e3efd063dbfa1f2736d2e8688aa90a170ecb15b
| 2,206
|
exs
|
Elixir
|
mix.exs
|
jeanparpaillon/ex_cast
|
fd67d0dcf9ac3794c5fbf1070a7be33925361c2b
|
[
"Apache-2.0"
] | null | null | null |
mix.exs
|
jeanparpaillon/ex_cast
|
fd67d0dcf9ac3794c5fbf1070a7be33925361c2b
|
[
"Apache-2.0"
] | null | null | null |
mix.exs
|
jeanparpaillon/ex_cast
|
fd67d0dcf9ac3794c5fbf1070a7be33925361c2b
|
[
"Apache-2.0"
] | null | null | null |
defmodule ExCast.MixProject do
use Mix.Project
def project do
[
app: :cast,
version: "0.1.0",
elixir: "~> 1.9",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
aliases: aliases(Mix.env()),
deps: deps(),
description: description(),
package: package(),
docs: docs()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:sweet_xml, "~> 0.6"},
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false}
]
end
defp aliases(:test) do
[
compile: [&compile_examples/1, "compile"],
"deps.get": [&prereqs/1, "deps.get"],
clean: [&clean_examples/1, "clean"]
]
end
defp aliases(_), do: []
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp prereqs(_) do
[
{"castxml",
"castxml not found - install it with apt-get install castxml or by following the instructions under: https://github.com/fnchooft/CastXML"},
{"make", "make not found"}
]
|> Enum.reduce([], &check_tool/2)
|> Enum.map(&Mix.shell().info/1)
|> case do
[] -> :ok
_ -> raise "Missing pre-requisite(s)"
end
end
defp check_tool({tool, err}, errors) do
tool
|> System.find_executable()
|> case do
nil -> [err | errors]
_path -> errors
end
end
@examples ["c_example"]
defp compile_examples(_),
do:
@examples
|> Enum.each(&System.cmd("make", [], cd: Path.join("test/support", &1)))
defp clean_examples(_),
do:
@examples
|> Enum.each(&System.cmd("make", ["clean"], cd: Path.join("test/support", &1)))
defp description,
do: "C(++) AST to elixir"
defp package,
do: [
maintainers: ["Jean Parpaillon"],
licenses: ["Apache 2"],
links: %{"GitHub" => "https://github.com/jeanparpaillon/ex_cast"},
files: ~w(mix.exs README.md lib test .formatter.exs)
]
defp docs,
do: [
main: "Cast",
extras: ["README.md"]
]
end
| 22.282828
| 146
| 0.56437
|
9e3f10c93388d650e03fb77f66eb36e2521a0ec8
| 983
|
exs
|
Elixir
|
apps/core/priv/repo/migrations/20210509211102_add_service_accounts.exs
|
michaeljguarino/forge
|
50ee583ecb4aad5dee4ef08fce29a8eaed1a0824
|
[
"Apache-2.0"
] | 59
|
2021-09-16T19:29:39.000Z
|
2022-03-31T20:44:24.000Z
|
apps/core/priv/repo/migrations/20210509211102_add_service_accounts.exs
|
svilenkov/plural
|
ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026
|
[
"Apache-2.0"
] | 111
|
2021-08-15T09:56:37.000Z
|
2022-03-31T23:59:32.000Z
|
apps/core/priv/repo/migrations/20210509211102_add_service_accounts.exs
|
svilenkov/plural
|
ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026
|
[
"Apache-2.0"
] | 4
|
2021-12-13T09:43:01.000Z
|
2022-03-29T18:08:44.000Z
|
defmodule Core.Repo.Migrations.AddServiceAccounts do
use Ecto.Migration
def change do
alter table(:users) do
add :service_account, :boolean, default: false
end
create table(:impersonation_policies, primary_key: false) do
add :id, :uuid, primary_key: true
add :user_id, references(:users, type: :uuid, on_delete: :delete_all)
timestamps()
end
create table(:impersonation_policy_bindings, primary_key: false) do
add :id, :uuid, primary_key: true
add :user_id, references(:users, type: :uuid, on_delete: :delete_all)
add :group_id, references(:groups, type: :uuid, on_delete: :delete_all)
add :policy_id, references(:impersonation_policies, type: :uuid, on_delete: :delete_all)
timestamps()
end
create unique_index(:impersonation_policies, [:user_id])
create index(:impersonation_policy_bindings, [:user_id])
create index(:impersonation_policy_bindings, [:group_id])
end
end
| 31.709677
| 94
| 0.70295
|
9e3f1edd17f4b5468a4a7088c27f4d4333333f47
| 11,105
|
ex
|
Elixir
|
lib/mechanize/page.ex
|
paultannenbaum/mechanize
|
97fd54c0421689026c01b9bf38206fa74e8f7e1a
|
[
"MIT"
] | 25
|
2020-06-26T02:21:35.000Z
|
2022-03-05T18:51:46.000Z
|
lib/mechanize/page.ex
|
paultannenbaum/mechanize
|
97fd54c0421689026c01b9bf38206fa74e8f7e1a
|
[
"MIT"
] | 29
|
2019-07-02T21:50:06.000Z
|
2020-05-28T18:34:01.000Z
|
lib/mechanize/page.ex
|
paultannenbaum/mechanize
|
97fd54c0421689026c01b9bf38206fa74e8f7e1a
|
[
"MIT"
] | 4
|
2020-06-24T02:11:47.000Z
|
2022-03-06T00:50:59.000Z
|
defmodule Mechanize.Page do
@moduledoc """
The HTML Page.
This module defines `Mechanize.Page` and the main functions for working with Pages.
The Page is created as a result of a successful HTTP request.
```
alias Mechanize.{Browser, Page}
browser = Browser.new()
page = Browser.get!(browser, "https://www.example.com")
```
"""
alias Mechanize.{Response, Query, Form}
alias Mechanize.Query.BadQueryError
alias Mechanize.Page.{Link, Element}
defstruct [:response_chain, :status_code, :content, :url, :browser, :parser]
@typedoc """
The HTML Page struct.
"""
@type t :: %__MODULE__{
response_chain: [Response.t()],
status_code: integer(),
content: String.t(),
url: String.t(),
browser: Browser.t(),
parser: module()
}
@typedoc """
A fragment of a page. It is an array of `Mechanize.Page.Element` struct in most of the cases,
but it could be any struct that implements `Mechanize.Page.Elementable` protocol.
"""
@type fragment :: [any]
defmodule ClickError do
@moduledoc """
Raised when an error occurs on a click action.
"""
defexception [:message]
end
defmodule InvalidMetaRefreshError do
@moduledoc """
Raised when Mechanize cannot parse the `content` attribute of a
`<meta http-equiv="refresh" ...>` element inside the page content.
"""
defexception [:message]
end
@doc """
Returns the browser that fetched the `page`.
"""
@spec get_browser(t()) :: Browser.t()
def get_browser(nil), do: raise(ArgumentError, "page is nil")
def get_browser(%__MODULE__{} = page), do: page.browser
@doc """
Returns the `page` url.
"""
@spec get_url(t()) :: String.t()
def get_url(nil), do: raise(ArgumentError, "page is nil")
def get_url(%__MODULE__{} = page), do: page.url
@doc """
Returns the page content.
"""
@spec get_content(t()) :: String.t()
def get_content(%__MODULE__{} = page), do: page.content
@doc """
Extracts meta-refresh data from a `page`.
A two element tuple with a integer representing the delay in the first position and
the a string representing the URL in the second position will be returned if a
`<meta http-equiv="refresh" ...>` is found, otherwise `nil` will be returned.
Raises `Mechanize.Page.InvalidMetaRefreshError` if Mechanize cannot parse the `content` attribute
of the meta-refresh.
## Example
```
# <meta http-equiv="refresh" content="10; url=https://www.example.com">
{delay, url} = Page.meta_refresh(page)
delay # => 10
url # => https://www.example.com
```
"""
@spec meta_refresh(t()) :: {integer(), String.t()}
def meta_refresh(nil), do: raise(ArgumentError, "page is nil")
def meta_refresh(%__MODULE__{} = page) do
page
|> search("meta[http-equiv=refresh]")
|> List.first()
|> case do
nil ->
nil
meta ->
meta
|> Element.attr(:content)
|> parse_meta_refresh_content(page)
end
end
defp parse_meta_refresh_content(content, page) do
content =
content
|> String.split(";")
|> Enum.map(&String.trim/1)
|> Enum.join(";")
case Regex.scan(~r/^(\d+)(?:;url\s*=\s*(.*))?$/, content) do
[[_, delay, url]] -> {String.to_integer(delay), url}
[[_, delay]] -> {String.to_integer(delay), nil}
_ -> raise InvalidMetaRefreshError, "can't parse meta-refresh content of #{page.url}"
end
end
@doc """
Returns the response headers of a `page`.
In case of Mechanize Browser has followed one or more redirects when `page` was fetched,
the headers returned corresponds to the headers of the last response.
"""
@spec get_headers(t()) :: Header.headers()
def get_headers(%__MODULE__{} = page) do
page
|> get_response()
|> Response.headers()
end
@doc """
Return the response of a `page`.
In case of Mechanize Browser has followed one or more redirects when `page` was fetched,
the response returned correspond to the last respose.
"""
@spec get_response(t()) :: Response.t()
def get_response(%__MODULE__{} = page), do: List.first(page.response_chain)
@doc """
Clicks on a link that matches `query`.
Links are all elements defined by `a` and `area` html tags. In case of more than one link matches
the query, Mechanize will click on the first matched link.
Raises `Mechanize.Page.ClickError` if the matched link has no href attribute.
Raises `Mechanize.Page.BadQueryError` if no link matches with given `query`.
Raises additional exceptions from `Mechanize.Browser.request!/5`.
See `Mechanize.Query` module documentation to know all query capabilities in depth.
## Examples
Click on the first link with text equals to "Back":
```
Page.click_link!(page, "Back")
```
Click on the first link by its "href" attribute:
```
Page.click_link!(page, href: "sun.html")
```
"""
@dialyzer :no_return
@spec click_link!(t() | fragment(), Query.t()) :: t()
def click_link!(page_or_fragment, query) do
page_or_fragment
|> link_with!(query)
|> Link.click!()
end
@doc """
Returns a list containing all links from a page or fragment of a page, or an empty list in
case it has no links.
"""
@spec links(t() | fragment()) :: [Link.t()]
defdelegate links(page_or_fragment), to: __MODULE__, as: :links_with
@doc """
Return the first link matched by `query`.
Nil is returned if no link was matched.
See `Mechanize.Page.links_with/2` for more details about how to query links.
"""
@spec link_with(t() | fragment(), Query.t()) :: Link.t() | nil
def link_with(page_or_fragment, query \\ []) do
page_or_fragment
|> links_with(query)
|> List.first()
end
@doc """
Return the first link matched by `query`.
Raise `Mechanize.Query.BadQueryError` if no link was matched.
See `Mechanize.Page.links_with/2` for more details about how to query links.
"""
@spec link_with!(t() | fragment(), Query.t()) :: Link.t() | nil
def link_with!(page_or_fragment, query \\ []) do
case link_with(page_or_fragment, query) do
nil -> raise BadQueryError, "no link found with given query"
link -> link
end
end
@doc """
Return all links matched by `query`.
An empty list is returned if no link was matched.
See `Mechanize.Query` module documentation to know all query capabilities in depth.
## Examples
Retrieving all links containing "Back" text of `page`:
```
Page.links_with(page, "Back")
```
Retrieving all links by attribute:
```
Page.links_with(page, href: "sun.html")
```
"""
@spec links_with(t() | fragment(), Query.t()) :: [Link.t()]
def links_with(page_or_fragment, query \\ []) do
page_or_fragment
|> elements_with("a, area", query)
|> Enum.map(&Link.new/1)
end
@doc """
Return all links matched by `query`.
Raise `Mechanize.Query.BadQueryError` if no link was matched.
See `Mechanize.Page.links_with/2` for more details about how to query links.
"""
@spec links_with!(t() | fragment(), Query.t()) :: [Link.t()]
def links_with!(page_or_fragment, query \\ []) do
case links_with(page_or_fragment, query) do
[] -> raise BadQueryError, "no link found with given query"
link -> link
end
end
@doc """
Returns the first form in a given page or fragment or nil in case of the given page or fragment
does not have a form.
"""
@spec form(t() | fragment()) :: Form.t() | nil
def form(page_or_fragment) do
page_or_fragment
|> forms()
|> List.first()
end
@doc """
Returns a list containing all forms of a given page or fragment.
In case of a page or fragment does not have a form, returns a empty list.
"""
@spec forms(t() | fragment()) :: [Form.t()]
defdelegate forms(page_or_fragment), to: __MODULE__, as: :forms_with
@doc """
Returns the first form that matches the `query` for the given page or fragment.
In case of no form matches, returns nil instead.
See `Mechanize.Query` module documentation to know all query capabilities in depth.
## Examples
Fetch the first form which name is equal to "login".
```
%Form{} = Page.form_with(page, name: "login")
```
"""
@spec form_with(t() | fragment(), Query.t()) :: Form.t() | nil
def form_with(page_or_fragment, query \\ []) do
page_or_fragment
|> forms_with(query)
|> List.first()
end
@doc """
Returns a list containing all forms matching `query` for the given page or fragment.
In case of no form matches, returns an empty list instead.
See `Mechanize.Query` module documentation to know all query capabilities in depth.
## Examples
Fetch all forms which name is equal to "login".
```
list = Page.forms_with(page, name: "login")
```
"""
@spec forms_with(t() | fragment(), Query.t()) :: [Form.t()]
def forms_with(page_or_fragment, query \\ []) do
page_or_fragment
|> elements_with("form", query)
|> Enum.map(&Form.new(page_or_fragment, &1))
end
@doc """
Search for elements on a given page or fragment using a CSS selector.
A list of `Mechanize.Page.Element` matching the selector will be return. In case of no element
matches the selector, an empty list will be returned instead.
See also `Mechanize.Page.elements_with/3`.
## Example
Printing in console todos of a todo html unordered list:
```
page
|> Page.search("ul.todo > li")
|> Enum.map(&Element.text/1)
|> Enum.each(&IO.puts/1)
```
"""
@spec search(t() | fragment(), String.t()) :: [Element.t()]
defdelegate search(page, selector), to: Query
@doc """
Returns all elements not matching the selector.
A list of `Mechanize.Page.Element` matching the selector will be return. In case of all elements
match the selector, and empty list will be returned instead.
## Example
Removing a unordered list with "todo" class from the content of a page.
```
Page.filter_out(page, "ul.todo > li")
```
"""
@spec filter_out(t() | fragment(), String.t()) :: [Element.t()]
defdelegate filter_out(page, selector), to: Query
@doc """
Search for elements on a given page or fragment both using a CSS selector and queries.
This function is similar to `Mechanize.Page.search/2`, but you can also use the power of
queries combined. First, the function will match the page or the fragments against the
CSS selector, after it will perform a match of the remaining elements to the query. A list of
`Mechanize.Page.Element` will be return. In case of no element both matches the selector and
the query, an empty list will be returned instead.
See `Mechanize.Query` module documentation to know all query capabilities in depth.
## Example
Printing in console todos of a todo html unordered list starting with "A":
```
page
|> Page.elements_with("ul.todo > li", text: ~r/^A/i)
|> Enum.map(&Element.text/1)
|> Enum.each(&IO.puts/1)
```
"""
@spec elements_with(t() | fragment(), String.t(), Query.t()) :: [Element.t()]
defdelegate elements_with(page_or_fragment, selector, query \\ []), to: Query
end
| 29.534574
| 99
| 0.662044
|
9e3f84fac2f0fafaff121f40611b40595dc6180e
| 2,105
|
exs
|
Elixir
|
installer/templates/new/mix.exs
|
angwoontiong/phoenix-ui
|
03227b13f3c780b5626d56207bfa99c8a06525d8
|
[
"MIT"
] | null | null | null |
installer/templates/new/mix.exs
|
angwoontiong/phoenix-ui
|
03227b13f3c780b5626d56207bfa99c8a06525d8
|
[
"MIT"
] | null | null | null |
installer/templates/new/mix.exs
|
angwoontiong/phoenix-ui
|
03227b13f3c780b5626d56207bfa99c8a06525d8
|
[
"MIT"
] | null | null | null |
defmodule <%= application_module %>.Mixfile do
use Mix.Project
def project do
[app: :<%= application_name %>,
version: "0.0.1",<%= if in_umbrella do %>
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",<% end %>
elixir: "~> 1.2",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,<%= if ecto do %>
aliases: aliases,<% end %>
deps: deps]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {<%= application_module %>, []},
applications: [:phoenix<%= if html do %>, :phoenix_html<% end %>, :cowboy, :logger, :gettext<%= if ecto do %>,
:phoenix_ecto, <%= inspect adapter_app %><% end %>]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[<%= phoenix_dep %>,<%= if ecto do %>
{:phoenix_ecto, "~> 3.0-rc"},
{<%= inspect adapter_app %>, ">= 0.0.0"},<% end %><%= if html do %>
{:phoenix_html, "~> 2.5"},
{:phoenix_live_reload, "~> 1.0", only: :dev},<% end %>
# TODO move to hex release
{:phoenix_pubsub, github: "phoenixframework/phoenix_pubsub"},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"}]
end<%= if ecto do %>
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
["ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"]]
end<% end %>
end
| 35.083333
| 115
| 0.588599
|
9e3fe88772dcbf92cdbdea3907108cb37a4fa78a
| 2,122
|
exs
|
Elixir
|
mix.exs
|
fireproofsocks/pockets
|
b0e7ebd8836e22f7354f083dea9e52d6b4ce365a
|
[
"Apache-2.0"
] | 4
|
2020-09-29T17:54:47.000Z
|
2022-03-01T19:26:07.000Z
|
mix.exs
|
fireproofsocks/pockets
|
b0e7ebd8836e22f7354f083dea9e52d6b4ce365a
|
[
"Apache-2.0"
] | 3
|
2020-11-20T20:54:39.000Z
|
2021-10-09T00:39:57.000Z
|
mix.exs
|
fireproofsocks/pockets
|
b0e7ebd8836e22f7354f083dea9e52d6b4ce365a
|
[
"Apache-2.0"
] | 1
|
2020-11-04T14:09:30.000Z
|
2020-11-04T14:09:30.000Z
|
defmodule Pockets.MixProject do
use Mix.Project
@source_url "https://github.com/fireproofsocks/pockets"
@version "1.2.0"
def project do
[
app: :pockets,
name: "Pockets",
description: description(),
version: @version,
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases(),
package: package(),
elixirc_paths: elixirc_paths(Mix.env()),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [coveralls: :test, "coveralls.detail": :test],
docs: [
main: "readme",
source_ref: "v#{@version}",
source_url: @source_url,
logo: "assets/logo.png",
extras: ["README.md", "CHANGELOG.md"]
]
]
end
defp description do
"""
Pockets is an Elixir wrapper around Erlang :ets and :dets, a disk-based
term storage. It offers a simple key/value store with a familiar interface.
This is a simple alternative to :mnesia or Redis.
"""
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp package do
[
maintainers: ["Everett Griffiths"],
licenses: ["Apache 2.0"],
logo: "assets/logo.png",
links: links(),
files: [
"lib",
"assets/logo.png",
"mix.exs",
"README*",
"CHANGELOG*",
"LICENSE*"
]
]
end
def links do
%{
"GitHub" => @source_url,
"Readme" => "#{@source_url}/blob/v#{@version}/README.md",
"Changelog" => "#{@source_url}/blob/v#{@version}/CHANGELOG.md"
}
end
def application do
[
mod: {Pockets.Application, []},
extra_applications: [:logger]
]
end
defp aliases do
[
lint: ["format --check-formatted", "credo --strict"]
]
end
defp deps do
[
{:credo, "~> 1.5.6", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.1.0", only: [:dev], runtime: false},
{:ex_doc, "~> 0.25.3", only: :dev, runtime: false},
{:excoveralls, "~> 0.14.3", only: [:dev, :test], runtime: false}
]
end
end
| 23.842697
| 79
| 0.555137
|
9e3fffcdbbede8a487bd26d9c68e2d7406ffcb46
| 1,141
|
exs
|
Elixir
|
clients/plus_domains/config/config.exs
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | 1
|
2018-12-03T23:43:10.000Z
|
2018-12-03T23:43:10.000Z
|
clients/plus_domains/config/config.exs
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
clients/plus_domains/config/config.exs
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :google+_domains_api, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:google+_domains_api, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.806452
| 73
| 0.754601
|
9e401b19de9429177b9cacf212ea53693612d218
| 2,819
|
ex
|
Elixir
|
lib/credo/cli/task/prepare_checks_to_run.ex
|
andyl/credo
|
4743c3e378a77177f79b5a822f38566ebcf85116
|
[
"MIT"
] | null | null | null |
lib/credo/cli/task/prepare_checks_to_run.ex
|
andyl/credo
|
4743c3e378a77177f79b5a822f38566ebcf85116
|
[
"MIT"
] | null | null | null |
lib/credo/cli/task/prepare_checks_to_run.ex
|
andyl/credo
|
4743c3e378a77177f79b5a822f38566ebcf85116
|
[
"MIT"
] | null | null | null |
defmodule Credo.CLI.Task.PrepareChecksToRun do
@moduledoc false
use Credo.Execution.Task
def call(exec, _opts \\ []) do
source_files = Execution.get_source_files(exec)
exec
|> set_config_comments(source_files)
|> enable_disabled_checks_if_applicable()
|> exclude_low_priority_checks(exec.min_priority - 9)
|> exclude_checks_based_on_elixir_version
end
defp set_config_comments(exec, source_files) do
config_comment_map =
source_files
|> Credo.Check.ConfigCommentFinder.run()
|> Enum.into(%{})
%Execution{exec | config_comment_map: config_comment_map}
end
defp enable_disabled_checks_if_applicable(%Execution{enable_disabled_checks: nil} = exec) do
exec
end
defp enable_disabled_checks_if_applicable(exec) do
enable_disabled_checks_regexes = to_match_regexes(exec.enable_disabled_checks)
checks =
Enum.map(exec.checks, fn
{check, false} ->
if matches?(to_string(check), enable_disabled_checks_regexes) do
{check, []}
else
{check, false}
end
{check, params} ->
{check, params}
end)
%Execution{exec | checks: checks}
end
defp exclude_low_priority_checks(exec, below_priority) do
checks =
Enum.reject(exec.checks, fn
# deprecated
{check} ->
Credo.Priority.to_integer(check.base_priority) < below_priority
{_check, false} ->
true
{check, params} ->
priority =
params
|> Credo.Check.Params.priority(check)
|> Credo.Priority.to_integer()
priority < below_priority
end)
%Execution{exec | checks: checks}
end
defp exclude_checks_based_on_elixir_version(exec) do
elixir_version = System.version()
skipped_checks = Enum.reject(exec.checks, &matches_requirement?(&1, elixir_version))
checks = Enum.filter(exec.checks, &matches_requirement?(&1, elixir_version))
%Execution{exec | checks: checks, skipped_checks: skipped_checks}
end
defp matches_requirement?({check, _}, elixir_version) do
matches_requirement?({check}, elixir_version)
end
defp matches_requirement?({check}, elixir_version) do
Version.match?(elixir_version, check.elixir_version)
end
defp to_match_regexes(nil), do: []
defp to_match_regexes(list) do
Enum.map(list, fn match_check ->
{:ok, match_pattern} = Regex.compile(match_check, "i")
match_pattern
end)
end
defp matches?(_string, nil), do: false
defp matches?(string, list) when is_list(list), do: Enum.any?(list, &matches?(string, &1))
defp matches?(string, %Regex{} = regex), do: Regex.match?(regex, string)
defp matches?(string, pattern) when is_binary(pattern), do: String.contains?(string, pattern)
end
| 27.637255
| 95
| 0.675772
|
9e4042ec2a9b3c1ba4b72ae69e21d7879f6d3ba2
| 369
|
exs
|
Elixir
|
ch8/defstruct1.exs
|
rafanoronha/prog_elixir_1_6
|
5ae2137da4ffdb6de1c9b164812ed6e43c0922d3
|
[
"MIT"
] | null | null | null |
ch8/defstruct1.exs
|
rafanoronha/prog_elixir_1_6
|
5ae2137da4ffdb6de1c9b164812ed6e43c0922d3
|
[
"MIT"
] | null | null | null |
ch8/defstruct1.exs
|
rafanoronha/prog_elixir_1_6
|
5ae2137da4ffdb6de1c9b164812ed6e43c0922d3
|
[
"MIT"
] | null | null | null |
defmodule Attendee do
defstruct name: "", paid: false, over_18: true
def may_attend_after_party(attendee = %Attendee{}) do
attendee.paid && attendee.over_18
end
def print_vip_badge(%Attendee{name: name}) when name != "" do
IO.puts "Very cheap badge for #{name}"
end
def print_vip_badge(%Attendee{}) do
raise "missing name for badge"
end
end
| 24.6
| 63
| 0.696477
|
9e404679bdd27bdddcc282713ce77e027aaaf4b8
| 2,113
|
exs
|
Elixir
|
test/ada/workflow_test.exs
|
cloud8421/ada
|
384f5e5e85e95c4d5883298259e781cc0a54bd07
|
[
"MIT"
] | 7
|
2019-05-11T12:14:48.000Z
|
2021-04-02T18:42:51.000Z
|
test/ada/workflow_test.exs
|
cloud8421/ada
|
384f5e5e85e95c4d5883298259e781cc0a54bd07
|
[
"MIT"
] | 3
|
2019-05-11T08:01:47.000Z
|
2019-05-14T12:06:50.000Z
|
test/ada/workflow_test.exs
|
cloud8421/ada
|
384f5e5e85e95c4d5883298259e781cc0a54bd07
|
[
"MIT"
] | 1
|
2021-01-06T14:57:32.000Z
|
2021-01-06T14:57:32.000Z
|
defmodule Ada.Schema.WorkflowTest do
use ExUnit.Case, async: true
alias Ada.{TestWorkflow, Workflow}
defmodule TestEmailAdapter do
@behaviour Ada.Email.Adapter
def send_email(%Ada.Email{} = email), do: {:ok, {:email_delivered, email}}
end
describe "run/4" do
test "validates params" do
assert {:error, :invalid_params, [name: {"is invalid", [type: :string, validation: :cast]}]} ==
Workflow.run(TestWorkflow, %{name: 1}, :email, email_adapter: TestEmailAdapter)
end
test "it uses the transport" do
assert {:ok, {:email_delivered, %Ada.Email{subject: "ADA"}}} ==
Workflow.run(TestWorkflow, %{name: "Ada"}, :email, email_adapter: TestEmailAdapter)
end
end
describe "raw_data/3" do
test "validates params" do
assert {:error, :invalid_params, [name: {"is invalid", [type: :string, validation: :cast]}]} ==
Workflow.raw_data(TestWorkflow, %{name: 1}, [])
end
test "it returns raw data" do
assert {:ok, %{name: "ADA"}} ==
Workflow.raw_data(TestWorkflow, %{name: "Ada"}, [])
end
end
describe "valid_name?/1" do
test "it passes for valid workflows" do
assert Workflow.valid_name?(TestWorkflow)
refute Workflow.valid_name?(Map)
refute Workflow.valid_name?(NonExistentWorkflow)
end
end
describe "validate/2" do
test "it casts and validate params" do
assert {:ok, %{name: "Ada"}} == Workflow.validate(TestWorkflow, %{name: "Ada"})
assert {:ok, %{name: "Ada"}} = Workflow.validate(TestWorkflow, %{"name" => "Ada"})
assert {:error, :invalid_params, [name: {"is invalid", [type: :string, validation: :cast]}]} ==
Workflow.validate(TestWorkflow, %{"name" => 1})
end
end
describe "normalize_name/1" do
test "it handles atoms and strings" do
assert "Ada.TestWorkflow" == Workflow.normalize_name(TestWorkflow)
assert "Ada.TestWorkflow" == Workflow.normalize_name("Ada.TestWorkflow")
assert "Ada.TestWorkflow" == Workflow.normalize_name("Elixir.Ada.TestWorkflow")
end
end
end
| 34.080645
| 101
| 0.640795
|
9e4052a2c6ea02dc360f7c7448b1154b60dbc090
| 731
|
ex
|
Elixir
|
lib/xml_parser/xml_node_finder.ex
|
paggi-com/lib-quinn
|
debbeff809e31f15156486e51226175f355223a9
|
[
"Apache-2.0"
] | null | null | null |
lib/xml_parser/xml_node_finder.ex
|
paggi-com/lib-quinn
|
debbeff809e31f15156486e51226175f355223a9
|
[
"Apache-2.0"
] | null | null | null |
lib/xml_parser/xml_node_finder.ex
|
paggi-com/lib-quinn
|
debbeff809e31f15156486e51226175f355223a9
|
[
"Apache-2.0"
] | null | null | null |
defmodule Quinn.XmlNodeFinder do
def find([], _), do: []
def find(nil, _), do: []
def find([value], _) when is_binary(value), do: []
def find([value | _], _) when is_binary(value), do: []
def find(%{} = node, node_names) do
find([node], node_names)
end
def find([%{name: name} = head | tail], node_name) when name == node_name do
[head] ++ find(tail, node_name)
end
def find(nodes, [node_name]) do
find(nodes, node_name)
end
def find([%{name: name} = head | tail], [node_name | child_name] = names) when name == node_name do
find(head.value, child_name) ++ find(tail, names)
end
def find([head | tail], node_names) do
find(head.value, node_names) ++ find(tail, node_names)
end
end
| 26.107143
| 101
| 0.630643
|
9e40535635ec8aed5228790abe54b9bbb65a2a9c
| 437
|
exs
|
Elixir
|
robotica_face/test/robotica_face_web/views/error_view_test.exs
|
brianmay/robotica-elixir
|
8656510e54b7e32a547e3a54bf946f0e327911c9
|
[
"RSA-MD"
] | 1
|
2019-04-23T09:16:44.000Z
|
2019-04-23T09:16:44.000Z
|
robotica_face/test/robotica_face_web/views/error_view_test.exs
|
brianmay/robotica-elixir
|
8656510e54b7e32a547e3a54bf946f0e327911c9
|
[
"RSA-MD"
] | 107
|
2019-05-26T08:03:26.000Z
|
2022-02-03T19:13:56.000Z
|
robotica_face/test/robotica_face_web/views/error_view_test.exs
|
brianmay/robotica-elixir
|
8656510e54b7e32a547e3a54bf946f0e327911c9
|
[
"RSA-MD"
] | 1
|
2019-08-10T20:44:24.000Z
|
2019-08-10T20:44:24.000Z
|
defmodule RoboticaFaceWeb.ErrorViewTest do
use RoboticaFaceWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(RoboticaFaceWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(RoboticaFaceWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 29.133333
| 97
| 0.745995
|
9e4077f3b4fa0de14761c7ac8f5942ceeb24c623
| 1,607
|
ex
|
Elixir
|
clients/composer/lib/google_api/composer/v1beta1/model/encryption_config.ex
|
kyleVsteger/elixir-google-api
|
3a0dd498af066a4361b5b0fd66ffc04a57539488
|
[
"Apache-2.0"
] | null | null | null |
clients/composer/lib/google_api/composer/v1beta1/model/encryption_config.ex
|
kyleVsteger/elixir-google-api
|
3a0dd498af066a4361b5b0fd66ffc04a57539488
|
[
"Apache-2.0"
] | null | null | null |
clients/composer/lib/google_api/composer/v1beta1/model/encryption_config.ex
|
kyleVsteger/elixir-google-api
|
3a0dd498af066a4361b5b0fd66ffc04a57539488
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Composer.V1beta1.Model.EncryptionConfig do
@moduledoc """
The encryption options for the Cloud Composer environment and its dependencies.
## Attributes
* `kmsKeyName` (*type:* `String.t`, *default:* `nil`) - Optional. Customer-managed Encryption Key available through Google's Key Management Service. Cannot be updated. If not specified, Google-managed key will be used.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kmsKeyName => String.t() | nil
}
field(:kmsKeyName)
end
defimpl Poison.Decoder, for: GoogleApi.Composer.V1beta1.Model.EncryptionConfig do
def decode(value, options) do
GoogleApi.Composer.V1beta1.Model.EncryptionConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Composer.V1beta1.Model.EncryptionConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.191489
| 222
| 0.750467
|
9e40e8761c8b47929636dcdb9c8d41fffec25cc3
| 904
|
ex
|
Elixir
|
lib/noa_web/plugs/provider_loader.ex
|
handnot2/noa
|
c071798e2baf4649466ba37190070c3192c7c2c2
|
[
"MIT"
] | 9
|
2017-08-16T14:55:48.000Z
|
2019-03-06T15:06:49.000Z
|
lib/noa_web/plugs/provider_loader.ex
|
handnot2/noa
|
c071798e2baf4649466ba37190070c3192c7c2c2
|
[
"MIT"
] | null | null | null |
lib/noa_web/plugs/provider_loader.ex
|
handnot2/noa
|
c071798e2baf4649466ba37190070c3192c7c2c2
|
[
"MIT"
] | 1
|
2022-03-17T18:32:59.000Z
|
2022-03-17T18:32:59.000Z
|
defmodule NoaWeb.Plugs.ProviderLoader do
@moduledoc false
import Plug.Conn
require Logger
alias Noa.Actors.{Provider, Providers}
def init(opts), do: opts
def call(conn, _opts) do
%{"provider_id" => provider_id} = conn.path_params
case Providers.lookup(provider_id) do
%Provider{} = provider -> conn |> add_to_ctxt(provider)
_ ->
conn
|> put_resp_header("content-type", "application/json")
|> send_resp(400, ~s({"error": "invalid_request", "error_description": "unknown_provider"}))
|> halt()
end
rescue
_ ->
conn
|> put_resp_header("content-type", "application/json")
|> send_resp(500, ~s({"error": "server_error"}))
|> halt()
end
defp add_to_ctxt(conn, provider) do
ctxt = Map.get(conn.assigns, :noa_ctxt, %{})
conn |> assign(:noa_ctxt, Map.put(ctxt, :provider, provider))
end
end
| 27.393939
| 101
| 0.627212
|
9e41078d53f357996c75cdbe4edd4d066e240f19
| 492
|
exs
|
Elixir
|
text_client/mix.exs
|
jakoubek/hangman-elixir
|
2917755bda4dca2bafb20f0ef5caad579e3e99dd
|
[
"MIT"
] | null | null | null |
text_client/mix.exs
|
jakoubek/hangman-elixir
|
2917755bda4dca2bafb20f0ef5caad579e3e99dd
|
[
"MIT"
] | null | null | null |
text_client/mix.exs
|
jakoubek/hangman-elixir
|
2917755bda4dca2bafb20f0ef5caad579e3e99dd
|
[
"MIT"
] | null | null | null |
defmodule TextClient.MixProject do
use Mix.Project
def project do
[
app: :text_client,
version: "0.1.0",
elixir: "~> 1.13",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{ :hangman, path: "../hangman" },
]
end
end
| 17.571429
| 59
| 0.577236
|
9e410d1473a908cedcd97beaabddf96526af946a
| 2,281
|
ex
|
Elixir
|
clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/demote_master_configuration.ex
|
kyleVsteger/elixir-google-api
|
3a0dd498af066a4361b5b0fd66ffc04a57539488
|
[
"Apache-2.0"
] | 1
|
2021-10-01T09:20:41.000Z
|
2021-10-01T09:20:41.000Z
|
clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/demote_master_configuration.ex
|
kyleVsteger/elixir-google-api
|
3a0dd498af066a4361b5b0fd66ffc04a57539488
|
[
"Apache-2.0"
] | null | null | null |
clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/demote_master_configuration.ex
|
kyleVsteger/elixir-google-api
|
3a0dd498af066a4361b5b0fd66ffc04a57539488
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SQLAdmin.V1beta4.Model.DemoteMasterConfiguration do
@moduledoc """
Read-replica configuration for connecting to the on-premises primary instance.
## Attributes
* `kind` (*type:* `String.t`, *default:* `nil`) - This is always *sql#demoteMasterConfiguration*.
* `mysqlReplicaConfiguration` (*type:* `GoogleApi.SQLAdmin.V1beta4.Model.DemoteMasterMySqlReplicaConfiguration.t`, *default:* `nil`) - MySQL specific configuration when replicating from a MySQL on-premises primary instance. Replication configuration information such as the username, password, certificates, and keys are not stored in the instance metadata. The configuration information is used only to set up the replication connection and is stored by MySQL in a file named *master.info* in the data directory.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => String.t() | nil,
:mysqlReplicaConfiguration =>
GoogleApi.SQLAdmin.V1beta4.Model.DemoteMasterMySqlReplicaConfiguration.t() | nil
}
field(:kind)
field(:mysqlReplicaConfiguration,
as: GoogleApi.SQLAdmin.V1beta4.Model.DemoteMasterMySqlReplicaConfiguration
)
end
defimpl Poison.Decoder, for: GoogleApi.SQLAdmin.V1beta4.Model.DemoteMasterConfiguration do
def decode(value, options) do
GoogleApi.SQLAdmin.V1beta4.Model.DemoteMasterConfiguration.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SQLAdmin.V1beta4.Model.DemoteMasterConfiguration do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.240741
| 517
| 0.7637
|
9e412de2f3675aff38dfe308aa8414148bbc2951
| 640
|
ex
|
Elixir
|
lib/exdns/config.ex
|
jeanparpaillon/exdns
|
53b7fc780399eda96d42052e11e03d5eb0dcd789
|
[
"MIT"
] | 16
|
2016-05-26T10:11:57.000Z
|
2021-01-08T15:09:19.000Z
|
lib/exdns/config.ex
|
jeanparpaillon/exdns
|
53b7fc780399eda96d42052e11e03d5eb0dcd789
|
[
"MIT"
] | 9
|
2016-08-11T00:48:27.000Z
|
2020-09-16T22:10:07.000Z
|
lib/exdns/config.ex
|
jeanparpaillon/exdns
|
53b7fc780399eda96d42052e11e03d5eb0dcd789
|
[
"MIT"
] | 11
|
2016-08-10T08:13:36.000Z
|
2021-04-03T10:20:11.000Z
|
defmodule Exdns.Config do
@moduledoc """
Configuration details for exdns.
"""
def catch_exceptions?, do: Application.get_env(:exdns, :catch_exceptions, true)
def use_root_hints?, do: Application.get_env(:exdns, :use_root_hints, false)
def zone_file, do: Application.get_env(:exdns, :zone_file, "zones.json")
def storage_type, do: Application.get_env(:exdns, :storage_type, Exdns.Storage.EtsStorage)
def num_workers, do: Application.get_env(:exdns, :num_workers, 16)
def servers, do: Application.get_env(:exdns, :servers, [])
def wildcard_fallback?, do: Application.get_env(:exdns, :wildcard_fallback, false)
end
| 30.47619
| 92
| 0.745313
|
9e412ea8efe5dd594e44e357ff2e77a7f454fbb9
| 535
|
ex
|
Elixir
|
lib/decompilerl/cli.ex
|
aerosol/decompilerl
|
aa4b3e8e9fc3542ce6baf058dd2cfdb413b4bb81
|
[
"WTFPL"
] | 27
|
2016-05-07T21:29:25.000Z
|
2022-02-03T08:38:39.000Z
|
lib/decompilerl/cli.ex
|
aerosol/decompilerl
|
aa4b3e8e9fc3542ce6baf058dd2cfdb413b4bb81
|
[
"WTFPL"
] | 2
|
2017-05-18T23:17:02.000Z
|
2018-01-24T22:55:00.000Z
|
lib/decompilerl/cli.ex
|
aerosol/decompilerl
|
aa4b3e8e9fc3542ce6baf058dd2cfdb413b4bb81
|
[
"WTFPL"
] | 4
|
2017-05-18T22:05:59.000Z
|
2021-03-23T18:57:29.000Z
|
defmodule Decompilerl.CLI do
@switches [
help: :boolean,
output: :string,
skip_info: :boolean
]
@aliases [
h: :help,
o: :output
]
def main(args) do
{opts, argv} = OptionParser.parse!(args, switches: @switches, aliases: @aliases)
case argv do
[file] ->
Decompilerl.decompile(file, opts)
_ ->
IO.puts("""
Decompilerl
usage: decompilerl <file> [-o <erl_file> | --output=<erl_file> | --skip-info]
""")
System.halt(1)
end
end
end
| 17.258065
| 85
| 0.549533
|
9e413301c1b95ddd3dbd3e3f764a20fcf10898c4
| 799
|
ex
|
Elixir
|
lib/eb/user/loader/producer.ex
|
peterkrenn/ecto-genstage-batcher-example
|
b9f935c5db3ee6127fef518d2197d020a36840f5
|
[
"Apache-2.0"
] | 1
|
2018-11-27T09:31:06.000Z
|
2018-11-27T09:31:06.000Z
|
lib/eb/user/loader/producer.ex
|
peterkrenn/ecto-genstage-batcher-example
|
b9f935c5db3ee6127fef518d2197d020a36840f5
|
[
"Apache-2.0"
] | null | null | null |
lib/eb/user/loader/producer.ex
|
peterkrenn/ecto-genstage-batcher-example
|
b9f935c5db3ee6127fef518d2197d020a36840f5
|
[
"Apache-2.0"
] | null | null | null |
defmodule EB.User.Loader.Producer do
use GenStage
def start_link([]) do
GenStage.start_link(__MODULE__, :ok, name: __MODULE__)
end
def init(:ok) do
{:producer, {:queue.new, 0}}
end
def handle_cast({:load, id, from}, {queue, demand}) do
dispatch_jobs(:queue.in({id, from}, queue), demand, [])
end
def handle_demand(incoming_demand, {queue, demand}) do
dispatch_jobs(queue, incoming_demand + demand, [])
end
defp dispatch_jobs(queue, 0, jobs) do
{:noreply, Enum.reverse(jobs), {queue, 0}}
end
defp dispatch_jobs(queue, demand, jobs) do
case :queue.out(queue) do
{{:value, job}, queue} ->
dispatch_jobs(queue, demand - 1, [job | jobs])
{:empty, queue} ->
{:noreply, Enum.reverse(jobs), {queue, demand}}
end
end
end
| 24.96875
| 59
| 0.635795
|
9e415153848653da99dd3a36e5811bcae4e24f4a
| 962
|
ex
|
Elixir
|
lib/exq/adapters/queue/redis.ex
|
onpointvn/exq
|
ee4db22624fb8a00ddc01c35a135e24de31b5a66
|
[
"Apache-2.0"
] | 1,406
|
2015-01-16T03:00:32.000Z
|
2022-03-28T11:38:22.000Z
|
lib/exq/adapters/queue/redis.ex
|
onpointvn/exq
|
ee4db22624fb8a00ddc01c35a135e24de31b5a66
|
[
"Apache-2.0"
] | 372
|
2015-01-08T05:15:11.000Z
|
2022-03-18T18:05:34.000Z
|
lib/exq/adapters/queue/redis.ex
|
onpointvn/exq
|
ee4db22624fb8a00ddc01c35a135e24de31b5a66
|
[
"Apache-2.0"
] | 217
|
2015-02-01T20:21:36.000Z
|
2022-01-28T16:19:55.000Z
|
defmodule Exq.Adapters.Queue.Redis do
@moduledoc """
Redis based Asynchronous queue.
Enqueue the job by using the GenServer API. Default queue. Designed to be used in production.
"""
alias Exq.Support.Config
alias Exq.Redis.JobQueue
@behaviour Exq.Adapters.Queue
def enqueue(pid, queue, worker, args, options) do
{redis, namespace} = GenServer.call(pid, :redis, Config.get(:genserver_timeout))
JobQueue.enqueue(redis, namespace, queue, worker, args, options)
end
def enqueue_at(pid, queue, time, worker, args, options) do
{redis, namespace} = GenServer.call(pid, :redis, Config.get(:genserver_timeout))
JobQueue.enqueue_at(redis, namespace, queue, time, worker, args, options)
end
def enqueue_in(pid, queue, offset, worker, args, options) do
{redis, namespace} = GenServer.call(pid, :redis, Config.get(:genserver_timeout))
JobQueue.enqueue_in(redis, namespace, queue, offset, worker, args, options)
end
end
| 35.62963
| 95
| 0.72869
|
9e41a992eb7a91c73f554fee5c344d95b4c6c9fc
| 2,434
|
exs
|
Elixir
|
test/shared/cache_test.exs
|
Atlas42/nebulex_redis_adapter
|
7f56aadb2fc41ca9fe54833a3d8402bc23261139
|
[
"MIT"
] | null | null | null |
test/shared/cache_test.exs
|
Atlas42/nebulex_redis_adapter
|
7f56aadb2fc41ca9fe54833a3d8402bc23261139
|
[
"MIT"
] | null | null | null |
test/shared/cache_test.exs
|
Atlas42/nebulex_redis_adapter
|
7f56aadb2fc41ca9fe54833a3d8402bc23261139
|
[
"MIT"
] | null | null | null |
defmodule NebulexRedisAdapter.CacheTest do
@moduledoc """
Shared Tests
"""
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@cache Keyword.fetch!(opts, :cache)
use Nebulex.Cache.ObjectTest, cache: @cache
use Nebulex.Cache.TransactionTest, cache: @cache
test "all" do
set1 = for x <- 1..50, do: @cache.set(x, x)
set2 = for x <- 51..100, do: @cache.set(x, x)
for x <- 1..100, do: assert(@cache.get(x) == x)
expected = set1 ++ set2
assert expected == to_int(@cache.all())
set3 = for x <- 20..60, do: @cache.delete(x, return: :key)
expected = :lists.usort(expected -- set3)
assert expected == to_int(@cache.all())
end
test "stream" do
entries = for x <- 1..10, do: {x, x * 2}
assert :ok == @cache.set_many(entries)
expected = Keyword.keys(entries)
stream = @cache.stream()
assert expected == stream |> Enum.to_list() |> to_int()
assert Keyword.values(entries) ==
entries
|> Keyword.keys()
|> @cache.get_many()
|> Map.values()
stream = @cache.stream()
[1 | _] = stream |> Enum.to_list() |> to_int()
assert_raise Nebulex.QueryError, fn ->
:invalid_query
|> @cache.stream()
|> Enum.to_list()
end
end
test "all and stream with key pattern" do
@cache.set_many(%{
"firstname" => "Albert",
"lastname" => "Einstein",
"age" => 76
})
assert ["firstname", "lastname"] == "**name**" |> @cache.all() |> :lists.sort()
assert ["age"] == "a??" |> @cache.all()
assert ["age", "firstname", "lastname"] == :lists.sort(@cache.all())
stream = @cache.stream("**name**")
assert ["firstname", "lastname"] == stream |> Enum.to_list() |> :lists.sort()
stream = @cache.stream("a??")
assert ["age"] == stream |> Enum.to_list()
stream = @cache.stream()
assert ["age", "firstname", "lastname"] == stream |> Enum.to_list() |> :lists.sort()
assert %{"firstname" => "Albert", "lastname" => "Einstein"} ==
"**name**" |> @cache.all() |> @cache.get_many()
end
## Private Functions
defp to_int(keys), do: :lists.usort(for(k <- keys, do: String.to_integer(k)))
end
end
end
| 29.682927
| 92
| 0.518899
|
9e41aa647479529e569c4a224d6f0978af7d3051
| 92
|
exs
|
Elixir
|
test/shiritorishi_web/views/layout_view_test.exs
|
Foo-x/Shiritorishi
|
ee9b58f74c51941f958c986578c95c26a9920816
|
[
"Apache-2.0"
] | null | null | null |
test/shiritorishi_web/views/layout_view_test.exs
|
Foo-x/Shiritorishi
|
ee9b58f74c51941f958c986578c95c26a9920816
|
[
"Apache-2.0"
] | null | null | null |
test/shiritorishi_web/views/layout_view_test.exs
|
Foo-x/Shiritorishi
|
ee9b58f74c51941f958c986578c95c26a9920816
|
[
"Apache-2.0"
] | null | null | null |
defmodule ShiritorishiWeb.LayoutViewTest do
use ShiritorishiWeb.ConnCase, async: true
end
| 23
| 43
| 0.847826
|
9e41dd56da807c66b4794e5c076f19dfe5adffdd
| 154
|
ex
|
Elixir
|
apps/xee/lib/xee/repo.ex
|
xeejp/xee
|
669e5a8030948e6d37ee0d33d3b37b4abdc34001
|
[
"MIT"
] | 11
|
2015-12-24T07:51:45.000Z
|
2019-03-05T12:29:31.000Z
|
apps/xee/lib/xee/repo.ex
|
xeejp/xee
|
669e5a8030948e6d37ee0d33d3b37b4abdc34001
|
[
"MIT"
] | 62
|
2015-12-25T02:58:20.000Z
|
2020-03-04T06:52:11.000Z
|
apps/xee/lib/xee/repo.ex
|
xeejp/xee
|
669e5a8030948e6d37ee0d33d3b37b4abdc34001
|
[
"MIT"
] | 1
|
2016-07-19T03:28:08.000Z
|
2016-07-19T03:28:08.000Z
|
defmodule Xee.Repo do
use Ecto.Repo, otp_app: :xee
def init(_, opts) do
{:ok, Keyword.put(opts, :url, System.get_env("DATABASE_URL"))}
end
end
| 19.25
| 66
| 0.668831
|
9e41ef36114a84b75640a06fa2d1b0788203a2b4
| 3,758
|
ex
|
Elixir
|
lib/koans/15_processes.ex
|
addupe/elixir-koans
|
7dad0666201e36916fa7aedd20b5f2ee6cf94fb1
|
[
"MIT"
] | null | null | null |
lib/koans/15_processes.ex
|
addupe/elixir-koans
|
7dad0666201e36916fa7aedd20b5f2ee6cf94fb1
|
[
"MIT"
] | null | null | null |
lib/koans/15_processes.ex
|
addupe/elixir-koans
|
7dad0666201e36916fa7aedd20b5f2ee6cf94fb1
|
[
"MIT"
] | null | null | null |
defmodule Processes do
use Koans
@intro "Processes"
koan "You are a process" do
assert Process.alive?(self()) == true
end
koan "You can ask a process to introduce itself" do
information = Process.info(self())
assert information[:status] == :running
end
koan "Processes are referenced by their process ID (pid)" do
assert is_pid(self()) == true
end
koan "New processes are spawned functions" do
value =
spawn(fn ->
receive do
end
end)
assert is_pid(value) == true
end
koan "Processes die when their function exits" do
fast_process = spawn(fn -> :timer.sleep(10) end)
slow_process = spawn(fn -> :timer.sleep(1000) end)
# All spawned functions are executed concurrently with the current process.
# You check back on slow_process and fast_process 50ms later. Let's
# see if they are still alive!
:timer.sleep(50)
assert Process.alive?(fast_process) == false
assert Process.alive?(slow_process) == true
end
koan "Processes can send and receive messages" do
send(self(), "hola!")
receive do
msg -> assert msg == "hola!"
end
end
koan "A process will wait forever for a message" do
wait_forever = fn ->
receive do
end
end
pid = spawn(wait_forever)
assert Process.alive?(pid) == true
end
koan "Received messages are queued, first in first out" do
send(self(), "hola!")
send(self(), "como se llama?")
assert_receive "hola!"
assert_receive "como se llama?"
end
koan "A common pattern is to include the sender in the message, so that it can reply" do
greeter = fn ->
receive do
{:hello, sender} -> send(sender, :how_are_you?)
end
end
pid = spawn(greeter)
send(pid, {:hello, self()})
assert_receive pid
end
def yelling_echo_loop do
receive do
{caller, value} ->
send(caller, String.upcase(value))
yelling_echo_loop()
end
end
koan "Use tail recursion to receive multiple messages" do
pid = spawn_link(&yelling_echo_loop/0)
send(pid, {self(), "o"})
assert_receive ____
send(pid, {self(), "hai"})
assert_receive ____
end
def state(value) do
receive do
{caller, :get} ->
send(caller, value)
state(value)
{caller, :set, new_value} ->
state(new_value)
end
end
koan "Processes can be used to hold state" do
initial_state = "foo"
pid =
spawn(fn ->
state(initial_state)
end)
send(pid, {self(), :get})
assert_receive "foo"
send(pid, {self(), :set, "bar"})
send(pid, {self(), :get})
assert_receive "bar"
end
koan "Waiting for a message can get boring" do
parent = self()
spawn(fn ->
receive do
after
5 -> send(parent, {:waited_too_long, "I am impatient"})
end
end)
assert_receive _____
end
koan "Trapping will allow you to react to someone terminating the process" do
parent = self()
pid =
spawn(fn ->
Process.flag(:trap_exit, true)
send(parent, :ready)
receive do
{:EXIT, _pid, reason} -> send(parent, {:exited, reason})
end
end)
receive do
:ready -> true
end
Process.exit(pid, :random_reason)
assert_receive _____
end
koan "Parent processes can trap exits for children they are linked to" do
Process.flag(:trap_exit, true)
spawn_link(fn -> Process.exit(self(), :normal) end)
assert_receive {:EXIT, _pid, ____}
end
koan "If you monitor your children, you'll be automatically informed of their departure" do
spawn_monitor(fn -> Process.exit(self(), :normal) end)
assert_receive _____
end
end
| 21.231638
| 93
| 0.621607
|
9e42108bf5ce4c0e7100f9aab14a9eedeb5635cd
| 593
|
exs
|
Elixir
|
apps/arbitrage/mix.exs
|
joaop21/hermes
|
2e274221ef8365edec847307231c134921db0270
|
[
"Apache-2.0"
] | null | null | null |
apps/arbitrage/mix.exs
|
joaop21/hermes
|
2e274221ef8365edec847307231c134921db0270
|
[
"Apache-2.0"
] | null | null | null |
apps/arbitrage/mix.exs
|
joaop21/hermes
|
2e274221ef8365edec847307231c134921db0270
|
[
"Apache-2.0"
] | null | null | null |
defmodule Arbitrage.MixProject do
use Mix.Project
def project do
[
app: :arbitrage,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.12",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
def application do
[
extra_applications: [:logger],
mod: {Arbitrage.Application, []}
]
end
defp deps do
[
{:phoenix_pubsub, "~> 2.0"},
{:streamer, in_umbrella: true}
]
end
end
| 18.53125
| 45
| 0.527825
|
9e4229cd43fd9f5bf99bcba5b4b0da2f5104301d
| 1,612
|
ex
|
Elixir
|
lib/super_issuer_web/controllers/credential_controller.ex
|
WeLightProject/WeLight-Portal
|
6e701469423e3a62affdc415c4e8c186d603d324
|
[
"MIT"
] | 2
|
2021-02-12T09:21:56.000Z
|
2021-02-22T08:52:20.000Z
|
lib/super_issuer_web/controllers/credential_controller.ex
|
WeLightProject/WeLight-Portal
|
6e701469423e3a62affdc415c4e8c186d603d324
|
[
"MIT"
] | 4
|
2021-02-22T08:53:43.000Z
|
2021-06-09T09:24:46.000Z
|
lib/super_issuer_web/controllers/credential_controller.ex
|
WeLightProject/WeLight-Portal
|
6e701469423e3a62affdc415c4e8c186d603d324
|
[
"MIT"
] | null | null | null |
defmodule SuperIssuerWeb.CredentialController do
use SuperIssuerWeb, :controller
@explorer_prefix "https://weimang.cyberemd.com/explorer/#/transaction/transactionDetail?pkHash="
def index(conn, params) do
credential =
%{cptId: cptId}=
conn
|> get_session(:credential)
|> StructTranslater.to_atom_struct()
|> credential_handler()
render(
clean_session(conn),
"#{cptId}.html",
%{
credential: credential
})
end
@spec credential_handler(%{:claim => any, :cptId => 1 | 100_002, optional(any) => any}) :: %{
:claim => any,
:cptId => 1 | 100_002,
optional(any) => any
}
@doc """
100002: Lesson Study Cred.
1: WeLight Node Cred.
"""
def credential_handler(
%{
cptId: 100002,
claim: claim
} =credential) do
%{credential | claim: claim}
end
def credential_handler(
%{
cptId: 1,
claim: claim
} = credential) do
repre_handled
= StructTranslater.str_to_atom_map(claim.representative)
evidence_value_handled
= StructTranslater.str_to_atom_map(claim.evidence_info.value)
evidence_handled =
claim.evidence_info
|> Map.put(:value, evidence_value_handled)
|> Map.put(:tx_link, @explorer_prefix <> claim.evidence_info.tx_id)
claim =
claim
|> Map.put(:representative, repre_handled)
|> Map.put(:evidence_info, evidence_handled)
%{credential | claim: claim}
end
def clean_session(conn) do
conn
|> fetch_session()
# |> delete_session(:credential)
end
end
| 24.424242
| 98
| 0.623449
|
9e4268fc006afc07e7567078476e04d3bea91340
| 8,041
|
ex
|
Elixir
|
lib/ex_aws/auth.ex
|
andrewhr/ex_aws
|
47fcd13b2767aecddd2471388263539f3d9a6851
|
[
"MIT"
] | 643
|
2017-10-17T12:55:11.000Z
|
2022-03-24T15:23:37.000Z
|
lib/ex_aws/auth.ex
|
andrewhr/ex_aws
|
47fcd13b2767aecddd2471388263539f3d9a6851
|
[
"MIT"
] | 338
|
2017-10-17T11:12:21.000Z
|
2022-03-20T09:48:23.000Z
|
lib/ex_aws/auth.ex
|
hixio-mh/ex_aws
|
26e0a7896e8f2c4bda1735ac7814f01179eb0811
|
[
"MIT"
] | 319
|
2017-10-17T17:39:44.000Z
|
2022-03-29T13:35:34.000Z
|
defmodule ExAws.Auth do
import ExAws.Auth.Utils
alias ExAws.Auth.Credentials
alias ExAws.Auth.Signatures
alias ExAws.Request.Url
@moduledoc false
@unsignable_headers ["x-amzn-trace-id"]
@unsignable_headers_multi_case ["x-amzn-trace-id", "X-Amzn-Trace-Id"]
def validate_config(config) do
with :ok <- get_key(config, :secret_access_key),
:ok <- get_key(config, :access_key_id) do
{:ok, config}
end
end
defp get_key(config, key) do
case Map.fetch(config, key) do
:error ->
{:error, "Required key: #{inspect(key)} not found in config!"}
{:ok, nil} ->
{:error, "Required key: #{inspect(key)} is nil in config!"}
{:ok, val} when is_binary(val) ->
:ok
{:ok, val} ->
{:error, "Required key: #{inspect(key)} must be a string, but instead is #{inspect(val)}"}
end
end
def headers(http_method, url, service, config, headers, body) do
with {:ok, config} <- validate_config(config) do
datetime = :calendar.universal_time()
headers =
[
{"host", URI.parse(url).authority},
{"x-amz-date", amz_date(datetime)}
| headers
]
|> handle_temp_credentials(config)
auth_header =
auth_header(
http_method,
url,
headers,
body,
service |> service_override(config) |> service_name,
datetime,
config
)
{:ok, [{"Authorization", auth_header} | headers]}
end
end
def presigned_url(
http_method,
url,
service,
datetime,
config,
expires,
query_params \\ [],
body \\ nil,
headers \\ []
) do
with {:ok, config} <- validate_config(config) do
service = service_name(service)
signed_headers = presigned_url_headers(url, headers)
uri = URI.parse(url)
uri_query = query_from_parsed_uri(uri)
org_query_params =
Enum.reduce(query_params, uri_query, fn {k, v}, acc -> [{to_string(k), v} | acc] end)
amz_query_params =
build_amz_query_params(service, datetime, config, expires, signed_headers)
query_to_sign = (org_query_params ++ amz_query_params) |> canonical_query_params()
amz_query_string = canonical_query_params(amz_query_params)
query_for_url =
if Enum.any?(org_query_params) do
canonical_query_params(org_query_params) <> "&" <> amz_query_string
else
amz_query_string
end
path = url |> Url.get_path(service) |> Url.uri_encode()
signature =
signature(
http_method,
url,
query_to_sign,
signed_headers,
body,
service,
datetime,
config
)
{:ok,
"#{uri.scheme}://#{uri.authority}#{path}?#{query_for_url}&X-Amz-Signature=#{signature}"}
end
end
defp handle_temp_credentials(headers, %{security_token: token}) do
[{"X-Amz-Security-Token", token} | headers]
end
defp handle_temp_credentials(headers, _), do: headers
defp auth_header(http_method, url, headers, body, service, datetime, config) do
query =
url
|> URI.parse()
|> query_from_parsed_uri()
|> canonical_query_params()
signature = signature(http_method, url, query, headers, body, service, datetime, config)
[
"AWS4-HMAC-SHA256 Credential=",
Credentials.generate_credential_v4(service, config, datetime),
",",
"SignedHeaders=",
signed_headers(headers),
",",
"Signature=",
signature
]
|> IO.iodata_to_binary()
end
defp query_from_parsed_uri(%{query: nil}), do: []
defp query_from_parsed_uri(%{query: query_string}) do
query_string
|> URI.decode_query()
|> Enum.to_list()
end
defp signature(http_method, url, query, headers, body, service, datetime, config) do
path = url |> Url.get_path(service) |> Url.uri_encode()
request = build_canonical_request(http_method, path, query, headers, body)
string_to_sign = string_to_sign(request, service, datetime, config)
Signatures.generate_signature_v4(service, config, datetime, string_to_sign)
end
def build_canonical_request(http_method, path, query, headers, body) do
http_method = http_method |> method_string |> String.upcase()
headers = headers |> canonical_headers
header_string =
headers
|> Enum.map(fn {k, v} -> "#{k}:#{remove_dup_spaces(to_string(v))}" end)
|> Enum.join("\n")
signed_headers_list = signed_headers_value(headers)
payload =
case body do
nil -> "UNSIGNED-PAYLOAD"
_ -> ExAws.Auth.Utils.hash_sha256(body)
end
[
http_method,
"\n",
path,
"\n",
query,
"\n",
header_string,
"\n",
"\n",
signed_headers_list,
"\n",
payload
]
|> IO.iodata_to_binary()
end
defp remove_dup_spaces(str), do: remove_dup_spaces(str, "")
defp remove_dup_spaces(str, str), do: str
defp remove_dup_spaces(str, _last),
do: str |> String.replace(" ", " ") |> remove_dup_spaces(str)
defp string_to_sign(request, service, datetime, config) do
request = hash_sha256(request)
"""
AWS4-HMAC-SHA256
#{amz_date(datetime)}
#{Credentials.generate_credential_scope_v4(service, config, datetime)}
#{request}
"""
|> String.trim_trailing()
end
defp signed_headers(headers) do
headers
|> Enum.map(fn {k, _} -> String.downcase(k) end)
|> Kernel.--(@unsignable_headers)
|> Enum.sort(&(&1 < &2))
|> Enum.join(";")
end
defp canonical_query_params(params) do
params
|> Enum.sort(&compare_query_params/2)
|> Enum.map_join("&", &pair/1)
end
defp compare_query_params({key, value1}, {key, value2}), do: value1 < value2
defp compare_query_params({key_1, _}, {key_2, _}), do: key_1 < key_2
defp pair({k, _}) when is_list(k) do
raise ArgumentError, "encode_query/1 keys cannot be lists, got: #{inspect(k)}"
end
defp pair({_, v}) when is_list(v) do
raise ArgumentError, "encode_query/1 values cannot be lists, got: #{inspect(v)}"
end
defp pair({k, v}) do
URI.encode_www_form(Kernel.to_string(k)) <> "=" <> aws_encode_www_form(Kernel.to_string(v))
end
# is basically the same as URI.encode_www_form
# but doesn't use %20 instead of "+"
def aws_encode_www_form(str) when is_binary(str) do
import Bitwise
for <<c <- str>>, into: "" do
case URI.char_unreserved?(c) do
true -> <<c>>
false -> "%" <> hex(bsr(c, 4)) <> hex(band(c, 15))
end
end
end
defp hex(n) when n <= 9, do: <<n + ?0>>
defp hex(n), do: <<n + ?A - 10>>
defp canonical_headers(headers) do
headers
|> Enum.reduce([], fn
{k, _v}, acc when k in @unsignable_headers_multi_case -> acc
{k, v}, acc when is_binary(v) -> [{String.downcase(to_string(k)), String.trim(v)} | acc]
{k, v}, acc -> [{String.downcase(to_string(k)), v} | acc]
end)
|> Enum.sort(fn {k1, _}, {k2, _} -> k1 < k2 end)
end
defp presigned_url_headers(url, headers) do
uri = URI.parse(url)
canonical_headers([{"host", uri.authority} | headers])
end
defp build_amz_query_params(service, datetime, config, expires, signed_headers) do
[
{"X-Amz-Algorithm", "AWS4-HMAC-SHA256"},
{"X-Amz-Credential", Credentials.generate_credential_v4(service, config, datetime)},
{"X-Amz-Date", amz_date(datetime)},
{"X-Amz-Expires", expires},
{"X-Amz-SignedHeaders", signed_headers_value(signed_headers)}
] ++
if config[:security_token] do
[{"X-Amz-Security-Token", config[:security_token]}]
else
[]
end
end
defp signed_headers_value(headers) do
headers
|> Enum.map(&elem(&1, 0))
|> Enum.join(";")
end
defp service_override(service, config) do
if config[:service_override] do
config[:service_override]
else
service
end
end
end
| 26.625828
| 98
| 0.612113
|
9e428fe49226ae7f50addd2332a478996e342de8
| 2,498
|
ex
|
Elixir
|
clients/content/lib/google_api/content/v2/model/orders_reject_return_line_item_request.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v2/model/orders_reject_return_line_item_request.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v2/model/orders_reject_return_line_item_request.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.OrdersRejectReturnLineItemRequest do
@moduledoc """
## Attributes
* `lineItemId` (*type:* `String.t`, *default:* `nil`) - The ID of the line item to return. Either lineItemId or productId is required.
* `operationId` (*type:* `String.t`, *default:* `nil`) - The ID of the operation. Unique across all operations for a given order.
* `productId` (*type:* `String.t`, *default:* `nil`) - The ID of the product to return. This is the REST ID used in the products service. Either lineItemId or productId is required.
* `quantity` (*type:* `integer()`, *default:* `nil`) - The quantity to return and refund.
* `reason` (*type:* `String.t`, *default:* `nil`) - The reason for the return.
Acceptable values are:
- "damagedOrUsed"
- "missingComponent"
- "notEligible"
- "other"
- "outOfReturnWindow"
* `reasonText` (*type:* `String.t`, *default:* `nil`) - The explanation of the reason.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:lineItemId => String.t(),
:operationId => String.t(),
:productId => String.t(),
:quantity => integer(),
:reason => String.t(),
:reasonText => String.t()
}
field(:lineItemId)
field(:operationId)
field(:productId)
field(:quantity)
field(:reason)
field(:reasonText)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.OrdersRejectReturnLineItemRequest do
def decode(value, options) do
GoogleApi.Content.V2.Model.OrdersRejectReturnLineItemRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.OrdersRejectReturnLineItemRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.202899
| 185
| 0.688151
|
9e429b37d8a05af11b3969e420cd71f72336adbc
| 822
|
ex
|
Elixir
|
lib/models/key_metadata.ex
|
tino415/u2f_ex
|
d95f52335dc85140b2b1b3d7975d29c6d71868ea
|
[
"BSD-3-Clause"
] | 13
|
2018-10-15T10:39:24.000Z
|
2019-01-15T06:42:24.000Z
|
lib/models/key_metadata.ex
|
tino415/u2f_ex
|
d95f52335dc85140b2b1b3d7975d29c6d71868ea
|
[
"BSD-3-Clause"
] | 19
|
2018-08-10T04:33:46.000Z
|
2018-12-02T04:55:48.000Z
|
lib/models/key_metadata.ex
|
tino415/u2f_ex
|
d95f52335dc85140b2b1b3d7975d29c6d71868ea
|
[
"BSD-3-Clause"
] | 4
|
2019-07-06T04:38:31.000Z
|
2021-02-01T10:45:57.000Z
|
defmodule U2FEx.KeyMetadata do
@moduledoc """
This represents the key metadata your application is expected to store. These values are B64 encoded and safe to store as-is.
"""
@type t :: %__MODULE__{
public_key: String.t(),
key_handle: String.t(),
app_id: String.t(),
version: String.t()
}
@required_keys [:public_key, :key_handle, :app_id, :version]
defstruct @required_keys
@spec new(
public_key :: String.t(),
key_handle :: String.t(),
app_id :: String.t(),
version :: String.t()
) :: __MODULE__.t()
def new(public_key, key_handle, app_id, version) do
struct!(
__MODULE__,
public_key: public_key,
key_handle: key_handle,
app_id: app_id,
version: version
)
end
end
| 25.6875
| 127
| 0.596107
|
9e42a5f2fef82ae855829c9d7c43db1ed50a2e9f
| 411
|
ex
|
Elixir
|
lib/sahara/randomizers/faker.ex
|
ojizero/sahara
|
ee886ed1e3434cb1193770037e3c029773f0b192
|
[
"MIT"
] | null | null | null |
lib/sahara/randomizers/faker.ex
|
ojizero/sahara
|
ee886ed1e3434cb1193770037e3c029773f0b192
|
[
"MIT"
] | null | null | null |
lib/sahara/randomizers/faker.ex
|
ojizero/sahara
|
ee886ed1e3434cb1193770037e3c029773f0b192
|
[
"MIT"
] | null | null | null |
defmodule Sahara.Randomizers.Faker do
@behaviour Faker.Random
@impl Faker.Random
def random_between(left, right) do
Random.randint(left, right)
end
@impl Faker.Random
def random_bytes(total) do
1..total
|> Enum.map(fn _ -> Sahara.Randomizer.random_alphanum() end)
|> :erlang.iolist_to_binary()
end
@impl Faker.Random
def random_uniform do
Random.uniform(0, 1)
end
end
| 19.571429
| 64
| 0.70073
|
9e42bb48a494db36abdbbe097c2c22338d2265b3
| 92
|
exs
|
Elixir
|
examples/bloggy/.formatter.exs
|
wojtekmach/resourceful
|
8425140aa1e89dfababcb2faa7bc4e2f59722661
|
[
"Apache-2.0"
] | 1
|
2020-01-06T00:38:43.000Z
|
2020-01-06T00:38:43.000Z
|
examples/bloggy/.formatter.exs
|
wojtekmach/resourceful
|
8425140aa1e89dfababcb2faa7bc4e2f59722661
|
[
"Apache-2.0"
] | null | null | null |
examples/bloggy/.formatter.exs
|
wojtekmach/resourceful
|
8425140aa1e89dfababcb2faa7bc4e2f59722661
|
[
"Apache-2.0"
] | null | null | null |
[
inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"],
import_deps: [:resourceful]
]
| 18.4
| 57
| 0.565217
|
9e42d43dc063da0d65149fdc215b2c56820395c6
| 384
|
exs
|
Elixir
|
clients/mirror/test/test_helper.exs
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | 1
|
2018-12-03T23:43:10.000Z
|
2018-12-03T23:43:10.000Z
|
clients/mirror/test/test_helper.exs
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
clients/mirror/test/test_helper.exs
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
ExUnit.start()
defmodule GoogleApi.Mirror.V1.TestHelper do
defmacro __using__(opts) do
quote do
use ExUnit.Case, unquote(opts)
import GoogleApi.Mirror.V1.TestHelper
end
end
def for_scope(scopes) when is_list(scopes), do: for_scope(Enum.join(scopes, " "))
def for_scope(scope) do
{:ok, token} = Goth.Token.for_scope(scope)
token.token
end
end
| 20.210526
| 83
| 0.695313
|
9e42d75036356aa4e170642ebe6c2461e172bfa2
| 355
|
exs
|
Elixir
|
config/test.exs
|
suranyami/eliximote
|
62d01bd6419de8289c77a32b3341605e27c1364a
|
[
"MIT"
] | 2
|
2017-02-20T19:20:16.000Z
|
2017-03-31T09:06:34.000Z
|
config/test.exs
|
suranyami/eliximote
|
62d01bd6419de8289c77a32b3341605e27c1364a
|
[
"MIT"
] | null | null | null |
config/test.exs
|
suranyami/eliximote
|
62d01bd6419de8289c77a32b3341605e27c1364a
|
[
"MIT"
] | null | null | null |
use Mix.Config
config :phoenix, Eliximote.Router,
port: System.get_env("PORT") || 4001,
ssl: false,
code_reload: false,
cookies: true,
consider_all_requests_local: true,
session_key: "_eliximote_key",
session_secret: "O@0V3K(DI417J3!V+ZZM2ONZ*E19C125+5V13(Q$B51=FJKXG%UYZ)FBE%HWZM9O1(V3GTQSGIL"
config :phoenix, :logger,
level: :debug
| 22.1875
| 95
| 0.735211
|
9e42e898a70fe67f1a9e12c1601a0e61f09c3319
| 3,899
|
ex
|
Elixir
|
lib/sanbase/metric/behaviour.ex
|
sitedata/sanbase2
|
8da5e44a343288fbc41b68668c6c80ae8547d557
|
[
"MIT"
] | null | null | null |
lib/sanbase/metric/behaviour.ex
|
sitedata/sanbase2
|
8da5e44a343288fbc41b68668c6c80ae8547d557
|
[
"MIT"
] | 1
|
2021-07-24T16:26:03.000Z
|
2021-07-24T16:26:03.000Z
|
lib/sanbase/metric/behaviour.ex
|
sitedata/sanbase2
|
8da5e44a343288fbc41b68668c6c80ae8547d557
|
[
"MIT"
] | null | null | null |
defmodule Sanbase.Metric.Behaviour do
@moduledoc ~s"""
Behaviour describing a metric fetcher
"""
@type slug :: String.t()
@type metric :: String.t()
@type interval :: String.t()
@type options :: Keyword.t()
@type available_data_types :: :timeseries | :histogram
@type direction :: :asc | :desc
@type operator ::
:greater_than | :less_than | :greater_than_or_equal_to | :less_than_or_equal_to
@type selector :: slug | map()
@type metadata :: %{
metric: metric,
min_interval: interval(),
default_aggregation: atom(),
available_aggregations: list(atom()),
available_selectors: list(atom()),
data_type: available_data_types(),
complexity_weight: number()
}
@type histogram_value :: String.t() | float() | integer()
@type histogram_label :: String.t()
@type histogram_data_map :: %{
range: list(float()) | list(DateTime.t()),
value: float()
}
@type histogram_data :: list(histogram_data_map())
@type aggregation :: nil | :any | :sum | :avg | :min | :max | :last | :first | :median
@type timeseries_data_point :: %{datetime: Datetime.t(), value: float()}
@callback timeseries_data(
metric :: metric(),
selector :: selector,
from :: DatetTime.t(),
to :: DateTime.t(),
interval :: interval(),
aggregation :: aggregation
) ::
{:ok, list(timeseries_data_point)} | {:error, String.t()}
@callback histogram_data(
metric :: metric(),
selector :: selector,
from :: DateTime.t(),
to :: DateTime.t(),
interval :: interval(),
limit :: non_neg_integer()
) :: {:ok, histogram_data} | {:error, String.t()}
@callback aggregated_timeseries_data(
metric :: metric,
selector :: selector,
from :: DatetTime.t(),
to :: DateTime.t(),
aggregation :: aggregation
) :: {:ok, map()} | {:error, String.t()}
@callback slugs_by_filter(
metric :: metric,
from :: DateTime.t(),
to :: DateTime.t(),
operator :: operator,
threshold :: number(),
aggregation :: aggregation
) :: {:ok, list(slug())} | {:error, String.t()}
@callback slugs_order(
metric :: metric,
from :: DateTime.t(),
to :: DateTime.t(),
direction :: direction,
aggregation :: aggregation()
) :: {:ok, list(slug())} | {:error, String.t()}
@callback has_incomplete_data?(metric :: metric) :: true | false
@callback complexity_weight(metric :: metric) :: number
@callback first_datetime(metric, selector) ::
{:ok, DateTime.t()} | {:error, String.t()}
@callback last_datetime_computed_at(metric, selector) ::
{:ok, DateTime.t()} | {:error, String.t()}
@callback human_readable_name(metric) :: {:ok, String.t()} | {:error, String.t()}
@callback metadata(metric) :: {:ok, metadata()} | {:error, String.t()}
@callback available_aggregations() :: list(aggregation)
@callback available_slugs() :: {:ok, list(slug)} | {:error, String.t()}
@callback available_slugs(metric) :: {:ok, list(slug)} | {:error, String.t()}
@callback available_metrics() :: list(metric)
@callback available_metrics(selector) :: {:ok, list(metric)} | {:error, String.t()}
@callback available_timeseries_metrics() :: list(metric)
@callback available_histogram_metrics() :: list(metric)
@callback free_metrics() :: list(metric)
@callback restricted_metrics() :: list(metric)
@callback access_map() :: map()
@callback min_plan_map() :: map()
@optional_callbacks [histogram_data: 6]
end
| 31.699187
| 89
| 0.564504
|
9e42ee29ca6f37a01a7d10d8e623bb65c1eba650
| 9,934
|
ex
|
Elixir
|
lib/elixir/lib/module/locals_tracker.ex
|
elkinsd/elixir
|
810965e193cb57b82363e7c0c97b719743b7964f
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/lib/module/locals_tracker.ex
|
elkinsd/elixir
|
810965e193cb57b82363e7c0c97b719743b7964f
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/lib/module/locals_tracker.ex
|
elkinsd/elixir
|
810965e193cb57b82363e7c0c97b719743b7964f
|
[
"Apache-2.0"
] | null | null | null |
# This is an Elixir module responsible for tracking
# calls in order to extract Elixir modules' behaviour
# during compilation time.
#
# ## Implementation
#
# The implementation uses the digraph module to track
# all dependencies. The graph starts with one main vertex:
#
# * `:local` - points to local functions
#
# We can also have the following vertices:
#
# * `Module` - a module that was invoked via an import
# * `{name, arity}` - a local function/arity pair
# * `{:import, name, arity}` - an invoked function/arity import
#
# Each of those vertices can associate to other vertices
# as described below:
#
# * `Module`
# * in neighbours: `{:import, name, arity}`
#
# * `{name, arity}`
# * in neighbours: `:local`, `{name, arity}`
# * out neighbours: `{:import, name, arity}`
#
# * `{:import, name, arity}`
# * in neighbours: `{name, arity}`
# * out neighbours: `Module`
#
# Note that since this is required for bootstrap, we can't use
# any of the `GenServer` conveniences.
defmodule Module.LocalsTracker do
@moduledoc false
@timeout 30_000
@behaviour :gen_server
@type ref :: pid | module
@type name :: atom
@type name_arity :: {name, arity}
@type local :: {name, arity}
@type import :: {:import, name, arity}
# Public API
@doc """
Returns all imported modules that had the given
`{name, arity}` invoked.
"""
@spec imports_with_dispatch(ref, name_arity) :: [module]
def imports_with_dispatch(ref, {name, arity}) do
d = :gen_server.call(to_pid(ref), :digraph, @timeout)
:digraph.out_neighbours(d, {:import, name, arity})
end
@doc """
Returns all locals that are reachable.
By default, all public functions are reachable.
A private function is only reachable if it has
a public function that it invokes directly.
"""
@spec reachable(ref) :: [local]
def reachable(ref) do
reachable_from(:gen_server.call(to_pid(ref), :digraph, @timeout), :local)
end
defp reachable_from(d, starting) do
:sets.to_list(reduce_reachable(d, starting, :sets.new))
end
defp reduce_reachable(d, vertex, vertices) do
neighbours = :digraph.out_neighbours(d, vertex)
neighbours = (for {_, _} = t <- neighbours, do: t) |> :sets.from_list
remaining = :sets.subtract(neighbours, vertices)
vertices = :sets.union(neighbours, vertices)
:sets.fold(&reduce_reachable(d, &1, &2), vertices, remaining)
end
defp to_pid(pid) when is_pid(pid), do: pid
defp to_pid(mod) when is_atom(mod) do
table = :elixir_module.data_table(mod)
[{_, val}] = :ets.lookup(table, {:elixir, :locals_tracker})
val
end
# Internal API
# Starts the tracker and returns its PID.
@doc false
def start_link do
:gen_server.start_link(__MODULE__, [], [])
end
# Adds a definition into the tracker. A public
# definition is connected with the :local node
# while a private one is left unreachable until
# a call is made to.
@doc false
def add_definition(pid, kind, tuple) when kind in [:def, :defp, :defmacro, :defmacrop] do
:gen_server.cast(pid, {:add_definition, kind, tuple})
end
# Adds and tracks defaults for a definition into the tracker.
@doc false
def add_defaults(pid, kind, tuple, defaults) when kind in [:def, :defp, :defmacro, :defmacrop] do
:gen_server.cast(pid, {:add_defaults, kind, tuple, defaults})
end
# Adds a local dispatch to the given target.
def add_local(pid, to) when is_tuple(to) do
:gen_server.cast(pid, {:add_local, :local, to})
end
# Adds a local dispatch from-to the given target.
@doc false
def add_local(pid, from, to) when is_tuple(from) and is_tuple(to) do
:gen_server.cast(pid, {:add_local, from, to})
end
# Adds an import dispatch to the given target.
@doc false
def add_import(pid, function, module, target) when is_atom(module) and is_tuple(target) do
:gen_server.cast(pid, {:add_import, function, module, target})
end
# Yanks a local node. Returns its in and out vertices in a tuple.
@doc false
def yank(pid, local) do
:gen_server.call(to_pid(pid), {:yank, local}, @timeout)
end
# Reattach a previously yanked node
@doc false
def reattach(pid, kind, tuple, neighbours) do
:gen_server.cast(to_pid(pid), {:reattach, kind, tuple, neighbours})
end
# Collecting all conflicting imports with the given functions
@doc false
def collect_imports_conflicts(pid, all_defined) do
d = :gen_server.call(pid, :digraph, @timeout)
for {name, arity} <- all_defined,
:digraph.in_neighbours(d, {:import, name, arity}) != [],
n = :digraph.out_neighbours(d, {:import, name, arity}),
n != [] do
{n, name, arity}
end
end
# Collect all unused definitions based on the private
# given also accounting the expected amount of default
# clauses a private function have.
@doc false
def collect_unused_locals(ref, private) do
d = :gen_server.call(to_pid(ref), :digraph, @timeout)
{unreachable(d, private), collect_warnings(d, private)}
end
defp unreachable(d, private) do
unreachable = for {tuple, _, _} <- private, do: tuple
private =
for {tuple, :defp, _} <- private do
neighbours = :digraph.in_neighbours(d, tuple)
neighbours = for {_, _} = t <- neighbours, do: t
{tuple, :sets.from_list(neighbours)}
end
reduce_unreachable(private, [], :sets.from_list(unreachable))
end
defp reduce_unreachable([{vertex, callers} | t], acc, unreachable) do
if :sets.is_subset(callers, unreachable) do
reduce_unreachable(t, [{vertex, callers} | acc], unreachable)
else
reduce_unreachable(acc ++ t, [], :sets.del_element(vertex, unreachable))
end
end
defp reduce_unreachable([], _acc, unreachable) do
:sets.to_list(unreachable)
end
defp collect_warnings(d, private) do
reachable = reachable_from(d, :local)
:lists.foldl(&collect_warnings(&1, &2, reachable), [], private)
end
defp collect_warnings({tuple, kind, 0}, acc, reachable) do
if :lists.member(tuple, reachable) do
acc
else
[{:unused_def, tuple, kind} | acc]
end
end
defp collect_warnings({tuple, kind, default}, acc, reachable) when default > 0 do
{name, arity} = tuple
min = arity - default
max = arity
invoked = for {n, a} <- reachable, n == name, a in min..max, do: a
if invoked == [] do
[{:unused_def, tuple, kind} | acc]
else
case :lists.min(invoked) - min do
0 -> acc
^default -> [{:unused_args, tuple} | acc]
unused_args -> [{:unused_args, tuple, unused_args} | acc]
end
end
end
@doc false
def cache_env(pid, env) do
:gen_server.call(pid, {:cache_env, env}, @timeout)
end
@doc false
def get_cached_env(pid, ref) do
:gen_server.call(pid, {:get_cached_env, ref}, @timeout)
end
# Stops the gen server
@doc false
def stop(pid) do
:gen_server.cast(pid, :stop)
end
# Callbacks
def init([]) do
d = :digraph.new([:protected])
:digraph.add_vertex(d, :local)
{:ok, {d, []}}
end
@doc false
def handle_call({:cache_env, env}, _from, {d, cache}) do
case cache do
[{i, ^env} | _] ->
{:reply, i, {d, cache}}
t ->
i = length(t)
{:reply, i, {d, [{i, env} | t]}}
end
end
def handle_call({:get_cached_env, ref}, _from, {_, cache} = state) do
{^ref, env} = :lists.keyfind(ref, 1, cache)
{:reply, env, state}
end
def handle_call({:yank, local}, _from, {d, _} = state) do
out_vertices = :digraph.out_neighbours(d, local)
:digraph.del_edges(d, :digraph.out_edges(d, local))
{:reply, {[], out_vertices}, state}
end
def handle_call(:digraph, _from, {d, _} = state) do
{:reply, d, state}
end
@doc false
def handle_info(_msg, state) do
{:noreply, state}
end
def handle_cast({:add_local, from, to}, {d, _} = state) do
handle_add_local(d, from, to)
{:noreply, state}
end
def handle_cast({:add_import, function, module, {name, arity}}, {d, _} = state) do
handle_import(d, function, module, name, arity)
{:noreply, state}
end
def handle_cast({:add_definition, kind, tuple}, {d, _} = state) do
handle_add_definition(d, kind, tuple)
{:noreply, state}
end
def handle_cast({:add_defaults, kind, {name, arity}, defaults}, {d, _} = state) do
for i <- :lists.seq(arity - defaults, arity - 1) do
handle_add_definition(d, kind, {name, i})
handle_add_local(d, {name, i}, {name, i + 1})
end
{:noreply, state}
end
def handle_cast({:reattach, _kind, tuple, {in_neigh, out_neigh}}, {d, _} = state) do
for from <- in_neigh do
:digraph.add_vertex(d, from)
replace_edge!(d, from, tuple)
end
for to <- out_neigh do
:digraph.add_vertex(d, to)
replace_edge!(d, tuple, to)
end
{:noreply, state}
end
def handle_cast(:stop, state) do
{:stop, :normal, state}
end
@doc false
def terminate(_reason, _state) do
:ok
end
@doc false
def code_change(_old, state, _extra) do
{:ok, state}
end
defp handle_import(d, function, module, name, arity) do
:digraph.add_vertex(d, module)
tuple = {:import, name, arity}
:digraph.add_vertex(d, tuple)
replace_edge!(d, tuple, module)
if function != nil do
replace_edge!(d, function, tuple)
end
:ok
end
defp handle_add_local(d, from, to) do
:digraph.add_vertex(d, to)
replace_edge!(d, from, to)
end
defp handle_add_definition(d, public, tuple) when public in [:def, :defmacro] do
:digraph.add_vertex(d, tuple)
replace_edge!(d, :local, tuple)
end
defp handle_add_definition(d, private, tuple) when private in [:defp, :defmacrop] do
:digraph.add_vertex(d, tuple)
end
defp replace_edge!(d, from, to) do
_ = unless :lists.member(to, :digraph.out_neighbours(d, from)) do
[:"$e" | _] = :digraph.add_edge(d, from, to)
end
:ok
end
end
| 27.518006
| 99
| 0.648983
|
9e42f196f80fad3f6940c1f0ade1a55e35278231
| 14,388
|
ex
|
Elixir
|
deps/plug/lib/plug/static.ex
|
matin360/TaksoWebApp
|
4dd8fef625ecc2364fe1d6e18e73c96c59d15349
|
[
"MIT"
] | 1
|
2019-11-11T21:48:20.000Z
|
2019-11-11T21:48:20.000Z
|
deps/plug/lib/plug/static.ex
|
matin360/TaksoWebApp
|
4dd8fef625ecc2364fe1d6e18e73c96c59d15349
|
[
"MIT"
] | 4
|
2021-03-04T13:00:52.000Z
|
2021-03-12T12:42:09.000Z
|
deps/plug/lib/plug/static.ex
|
adrianomota/blog
|
ef3b2d2ed54f038368ead8234d76c18983caa75b
|
[
"MIT"
] | null | null | null |
defmodule Plug.Static do
@moduledoc """
A plug for serving static assets.
It requires two options:
* `:at` - the request path to reach for static assets.
It must be a string.
* `:from` - the file system path to read static assets from.
It can be either: a string containing a file system path, an
atom representing the application name (where assets will
be served from `priv/static`), a tuple containing the
application name and the directory to serve assets from (besides
`priv/static`), or an MFA tuple.
The preferred form is to use `:from` with an atom or tuple, since
it will make your application independent from the starting directory.
For example, if you pass:
plug Plug.Static, from: "priv/app/path"
Plug.Static will be unable to serve assets if you build releases
or if you change the current directory. Instead do:
plug Plug.Static, from: {:app_name, "priv/app/path"}
If a static asset cannot be found, `Plug.Static` simply forwards
the connection to the rest of the pipeline.
## Cache mechanisms
`Plug.Static` uses etags for HTTP caching. This means browsers/clients
should cache assets on the first request and validate the cache on
following requests, not downloading the static asset once again if it
has not changed. The cache-control for etags is specified by the
`cache_control_for_etags` option and defaults to `"public"`.
However, `Plug.Static` also supports direct cache control by using
versioned query strings. If the request query string starts with
"?vsn=", `Plug.Static` assumes the application is versioning assets
and does not set the `ETag` header, meaning the cache behaviour will
be specified solely by the `cache_control_for_vsn_requests` config,
which defaults to `"public, max-age=31536000"`.
## Options
* `:gzip` - given a request for `FILE`, serves `FILE.gz` if it exists
in the static directory and if the `accept-encoding` header is set
to allow gzipped content (defaults to `false`).
* `:brotli` - given a request for `FILE`, serves `FILE.br` if it exists
in the static directory and if the `accept-encoding` header is set
to allow brotli-compressed content (defaults to `false`).
`FILE.br` is checked first and dominates `FILE.gz` due to the better
compression ratio.
* `:cache_control_for_etags` - sets the cache header for requests
that use etags. Defaults to `"public"`.
* `:etag_generation` - specify a `{module, function, args}` to be used
to generate an etag. The `path` of the resource will be passed to
the function, as well as the `args`. If this option is not supplied,
etags will be generated based off of file size and modification time.
Note it is [recommended for the etag value to be quoted](https://tools.ietf.org/html/rfc7232#section-2.3),
which Plug won't do automatically.
* `:cache_control_for_vsn_requests` - sets the cache header for
requests starting with "?vsn=" in the query string. Defaults to
`"public, max-age=31536000"`.
* `:only` - filters which requests to serve. This is useful to avoid
file system access on every request when this plug is mounted
at `"/"`. For example, if `only: ["images", "favicon.ico"]` is
specified, only files in the "images" directory and the
"favicon.ico" file will be served by `Plug.Static`.
Note that `Plug.Static` matches these filters against request
uri and not against the filesystem. When requesting
a file with name containing non-ascii or special characters,
you should use urlencoded form. For example, you should write
`only: ["file%20name"]` instead of `only: ["file name"]`.
Defaults to `nil` (no filtering).
* `:only_matching` - a relaxed version of `:only` that will
serve any request as long as one of the given values matches the
given path. For example, `only_matching: ["images", "favicon"]`
will match any request that starts at "images" or "favicon",
be it "/images/foo.png", "/images-high/foo.png", "/favicon.ico"
or "/favicon-high.ico". Such matches are useful when serving
digested files at the root. Defaults to `nil` (no filtering).
* `:headers` - other headers to be set when serving static assets. Specify either
an enum of key-value pairs or a `{module, function, args}` to return an enum. The
`conn` will be passed to the function, as well as the `args`.
* `:content_types` - custom MIME type mapping. As a map with filename as key
and content type as value. For example:
`content_types: %{"apple-app-site-association" => "application/json"}`.
## Examples
This plug can be mounted in a `Plug.Builder` pipeline as follows:
defmodule MyPlug do
use Plug.Builder
plug Plug.Static,
at: "/public",
from: :my_app,
only: ~w(images robots.txt)
plug :not_found
def not_found(conn, _) do
send_resp(conn, 404, "not found")
end
end
"""
@behaviour Plug
@allowed_methods ~w(GET HEAD)
import Plug.Conn
alias Plug.Conn
# In this module, the `:prim_file` Erlang module along with the `:file_info`
# record are used instead of the more common and Elixir-y `File` module and
# `File.Stat` struct, respectively. The reason behind this is performance: all
# the `File` operations pass through a single process in order to support node
# operations that we simply don't need when serving assets.
require Record
Record.defrecordp(:file_info, Record.extract(:file_info, from_lib: "kernel/include/file.hrl"))
defmodule InvalidPathError do
defexception message: "invalid path for static asset", plug_status: 400
end
@impl true
def init(opts) do
from =
case Keyword.fetch!(opts, :from) do
{_, _} = from -> from
{_, _, _} = from -> from
from when is_atom(from) -> {from, "priv/static"}
from when is_binary(from) -> from
_ -> raise ArgumentError, ":from must be an atom, a binary or a tuple"
end
%{
gzip?: Keyword.get(opts, :gzip, false),
brotli?: Keyword.get(opts, :brotli, false),
only_rules: {Keyword.get(opts, :only, []), Keyword.get(opts, :only_matching, [])},
qs_cache: Keyword.get(opts, :cache_control_for_vsn_requests, "public, max-age=31536000"),
et_cache: Keyword.get(opts, :cache_control_for_etags, "public"),
et_generation: Keyword.get(opts, :etag_generation, nil),
headers: Keyword.get(opts, :headers, %{}),
content_types: Keyword.get(opts, :content_types, %{}),
from: from,
at: opts |> Keyword.fetch!(:at) |> Plug.Router.Utils.split()
}
end
@impl true
def call(
conn = %Conn{method: meth},
%{at: at, only_rules: only_rules, from: from, gzip?: gzip?, brotli?: brotli?} = options
)
when meth in @allowed_methods do
segments = subset(at, conn.path_info)
if allowed?(only_rules, segments) do
segments = Enum.map(segments, &uri_decode/1)
if invalid_path?(segments) do
raise InvalidPathError
end
path = path(from, segments)
range = get_req_header(conn, "range")
encoding = file_encoding(conn, path, range, gzip?, brotli?)
serve_static(encoding, conn, segments, range, options)
else
conn
end
end
def call(conn, _options) do
conn
end
defp uri_decode(path) do
# TODO: Remove rescue as this can't fail from Elixir v1.13
try do
URI.decode(path)
rescue
ArgumentError ->
raise InvalidPathError
end
end
defp allowed?(_only_rules, []), do: false
defp allowed?({[], []}, _list), do: true
defp allowed?({full, prefix}, [h | _]) do
h in full or (prefix != [] and match?({0, _}, :binary.match(h, prefix)))
end
defp serve_static({content_encoding, file_info, path}, conn, segments, range, options) do
%{
qs_cache: qs_cache,
et_cache: et_cache,
et_generation: et_generation,
headers: headers,
content_types: types
} = options
case put_cache_header(conn, qs_cache, et_cache, et_generation, file_info, path) do
{:stale, conn} ->
filename = List.last(segments)
content_type = Map.get(types, filename) || MIME.from_path(filename)
conn
|> put_resp_header("content-type", content_type)
|> put_resp_header("accept-ranges", "bytes")
|> maybe_add_encoding(content_encoding)
|> merge_headers(headers)
|> serve_range(file_info, path, range, options)
{:fresh, conn} ->
conn
|> maybe_add_vary(options)
|> send_resp(304, "")
|> halt()
end
end
defp serve_static(:error, conn, _segments, _range, _options) do
conn
end
defp serve_range(conn, file_info, path, [range], options) do
file_info(size: file_size) = file_info
with %{"bytes" => bytes} <- Plug.Conn.Utils.params(range),
{range_start, range_end} <- start_and_end(bytes, file_size) do
send_range(conn, path, range_start, range_end, file_size, options)
else
_ -> send_entire_file(conn, path, options)
end
end
defp serve_range(conn, _file_info, path, _range, options) do
send_entire_file(conn, path, options)
end
defp start_and_end("-" <> rest, file_size) do
case Integer.parse(rest) do
{last, ""} when last > 0 and last <= file_size -> {file_size - last, file_size - 1}
_ -> :error
end
end
defp start_and_end(range, file_size) do
case Integer.parse(range) do
{first, "-"} when first >= 0 ->
{first, file_size - 1}
{first, "-" <> rest} when first >= 0 ->
case Integer.parse(rest) do
{last, ""} when last >= first -> {first, min(last, file_size - 1)}
_ -> :error
end
_ ->
:error
end
end
defp send_range(conn, path, 0, range_end, file_size, options) when range_end == file_size - 1 do
send_entire_file(conn, path, options)
end
defp send_range(conn, path, range_start, range_end, file_size, _options) do
length = range_end - range_start + 1
conn
|> put_resp_header("content-range", "bytes #{range_start}-#{range_end}/#{file_size}")
|> send_file(206, path, range_start, length)
|> halt()
end
defp send_entire_file(conn, path, options) do
conn
|> maybe_add_vary(options)
|> send_file(200, path)
|> halt()
end
defp maybe_add_encoding(conn, nil), do: conn
defp maybe_add_encoding(conn, ce), do: put_resp_header(conn, "content-encoding", ce)
defp maybe_add_vary(conn, %{gzip?: gzip?, brotli?: brotli?}) do
# If we serve gzip or brotli at any moment, we need to set the proper vary
# header regardless of whether we are serving gzip content right now.
# See: http://www.fastly.com/blog/best-practices-for-using-the-vary-header/
if gzip? or brotli? do
update_in(conn.resp_headers, &[{"vary", "Accept-Encoding"} | &1])
else
conn
end
end
defp put_cache_header(
%Conn{query_string: "vsn=" <> _} = conn,
qs_cache,
_et_cache,
_et_generation,
_file_info,
_path
)
when is_binary(qs_cache) do
{:stale, put_resp_header(conn, "cache-control", qs_cache)}
end
defp put_cache_header(conn, _qs_cache, et_cache, et_generation, file_info, path)
when is_binary(et_cache) do
etag = etag_for_path(file_info, et_generation, path)
conn =
conn
|> put_resp_header("cache-control", et_cache)
|> put_resp_header("etag", etag)
if etag in get_req_header(conn, "if-none-match") do
{:fresh, conn}
else
{:stale, conn}
end
end
defp put_cache_header(conn, _, _, _, _, _) do
{:stale, conn}
end
defp etag_for_path(file_info, et_generation, path) do
case et_generation do
{module, function, args} ->
apply(module, function, [path | args])
nil ->
file_info(size: size, mtime: mtime) = file_info
<<?", {size, mtime} |> :erlang.phash2() |> Integer.to_string(16)::binary, ?">>
end
end
defp file_encoding(conn, path, [_range], _gzip?, _brotli?) do
# We do not support compression for range queries.
file_encoding(conn, path, nil, false, false)
end
defp file_encoding(conn, path, _range, gzip?, brotli?) do
cond do
file_info = brotli? and accept_encoding?(conn, "br") && regular_file_info(path <> ".br") ->
{"br", file_info, path <> ".br"}
file_info = gzip? and accept_encoding?(conn, "gzip") && regular_file_info(path <> ".gz") ->
{"gzip", file_info, path <> ".gz"}
file_info = regular_file_info(path) ->
{nil, file_info, path}
true ->
:error
end
end
defp regular_file_info(path) do
case :prim_file.read_file_info(path) do
{:ok, file_info(type: :regular) = file_info} ->
file_info
_ ->
nil
end
end
defp accept_encoding?(conn, encoding) do
encoding? = &String.contains?(&1, [encoding, "*"])
Enum.any?(get_req_header(conn, "accept-encoding"), fn accept ->
accept |> Plug.Conn.Utils.list() |> Enum.any?(encoding?)
end)
end
defp path({module, function, arguments}, segments)
when is_atom(module) and is_atom(function) and is_list(arguments),
do: Enum.join([apply(module, function, arguments) | segments], "/")
defp path({app, from}, segments) when is_atom(app) and is_binary(from),
do: Enum.join([Application.app_dir(app), from | segments], "/")
defp path(from, segments),
do: Enum.join([from | segments], "/")
defp subset([h | expected], [h | actual]), do: subset(expected, actual)
defp subset([], actual), do: actual
defp subset(_, _), do: []
defp invalid_path?(list) do
invalid_path?(list, :binary.compile_pattern(["/", "\\", ":", "\0"]))
end
defp invalid_path?([h | _], _match) when h in [".", "..", ""], do: true
defp invalid_path?([h | t], match), do: String.contains?(h, match) or invalid_path?(t)
defp invalid_path?([], _match), do: false
defp merge_headers(conn, {module, function, args}) do
merge_headers(conn, apply(module, function, [conn | args]))
end
defp merge_headers(conn, headers) do
merge_resp_headers(conn, headers)
end
end
| 33.774648
| 112
| 0.650959
|
9e434f4352e058c06a64c37d8c623211fa1dacff
| 6,964
|
exs
|
Elixir
|
integration_test/cases/client_test.exs
|
lukebakken/db_connection
|
aa53f2d9c78aa6b5f6a9c0615459e97ec89f0c32
|
[
"Apache-2.0"
] | 227
|
2016-06-16T13:56:02.000Z
|
2022-03-09T23:03:58.000Z
|
integration_test/cases/client_test.exs
|
lukebakken/db_connection
|
aa53f2d9c78aa6b5f6a9c0615459e97ec89f0c32
|
[
"Apache-2.0"
] | 198
|
2016-06-20T08:08:15.000Z
|
2022-03-06T17:54:37.000Z
|
integration_test/cases/client_test.exs
|
lukebakken/db_connection
|
aa53f2d9c78aa6b5f6a9c0615459e97ec89f0c32
|
[
"Apache-2.0"
] | 110
|
2016-06-20T03:50:39.000Z
|
2022-03-03T20:53:01.000Z
|
defmodule ClientTest do
use ExUnit.Case, async: true
alias TestPool, as: P
alias TestAgent, as: A
alias TestQuery, as: Q
alias TestResult, as: R
test "reconnect when client exits" do
stack = [
{:ok, :state},
{:idle, :state},
:ok,
fn(opts) ->
send(opts[:parent], :reconnected)
{:ok, :state}
end,
{:idle, :state},
{:idle, :state}]
{:ok, agent} = A.start_link(stack)
opts = [agent: agent, parent: self()]
{:ok, pool} = P.start_link(opts)
_ = spawn(fn() ->
_ = Process.put(:agent, agent)
P.run(pool, fn(_) ->
Process.exit(self(), :shutdown)
end)
end)
assert_receive :reconnected
assert P.run(pool, fn(_) -> :result end) == :result
assert [
{:connect, _},
{:handle_status, _},
{:disconnect, _},
{:connect, _},
{:handle_status, _},
{:handle_status, _}] = A.record(agent)
end
test "reconnect when client timeout" do
stack = [
{:ok, :state},
{:idle, :state},
:ok,
fn(opts) ->
send(opts[:parent], :reconnected)
{:ok, :state}
end,
{:idle, :state},
{:idle, :state},
{:idle, :state}]
{:ok, agent} = A.start_link(stack)
parent = self()
opts = [agent: agent, parent: parent]
{:ok, pool} = P.start_link(opts)
pid = spawn_link(fn() ->
_ = Process.put(:agent, agent)
assert_receive {:go, ^parent}
assert P.run(pool, fn(_) -> :result end) == :result
send(parent, {:done, self()})
end)
P.run(pool, fn(conn) ->
assert_receive :reconnected
assert {:error, %DBConnection.ConnectionError{}} =
P.execute(conn, %Q{}, [:first])
send(pid, {:go, parent})
assert_receive {:done, ^pid}
end, [timeout: 100])
assert [
{:connect, _},
{:handle_status, _},
{:disconnect, _},
{:connect, _},
{:handle_status, _},
{:handle_status, _}] = A.record(agent)
end
test "reconnect when client timeout and then returns ok even when disconnected" do
stack = [
{:ok, :state},
{:idle, :state},
fn(_, _, _, _) ->
assert_receive :reconnected
{:ok, %Q{}, %R{}, :new_state}
end,
:ok,
fn(opts) ->
send(opts[:parent], :reconnected)
{:ok, :new_state}
end,
{:idle, :new_state},
{:idle, :new_state},
{:ok, %Q{}, %R{}, :newer_state}]
{:ok, agent} = A.start_link(stack)
parent = self()
opts = [agent: agent, parent: parent]
{:ok, pool} = P.start_link(opts)
assert P.run(pool, fn(conn) ->
assert {:ok, %Q{}, %R{}} =
P.execute(conn, %Q{}, [:first])
spawn_link(fn() ->
_ = Process.put(:agent, agent)
assert P.run(pool, fn(_) -> :result end) == :result
send(parent, :done)
end)
:result
end, [timeout: 100]) == :result
assert_receive :done
assert P.execute(pool, %Q{}, [:second]) == {:ok, %Q{}, %R{}}
assert [
{:connect, _},
{:handle_status, _},
{:handle_execute, [%Q{}, [:first], _, :state]},
{:disconnect, _},
{:connect, _},
{:handle_status, _},
{:handle_status, _},
{:handle_execute, [%Q{}, [:second], _, :new_state]}] = A.record(agent)
end
test "reconnect when client timeout and then returns error when disconnected" do
stack = [
{:ok, :state},
{:idle, :state},
fn(_, _, _, _) ->
assert_receive :reconnected
{:disconnect, DBConnection.ConnectionError.exception("oops"), :new_state}
end,
:ok,
fn(opts) ->
send(opts[:parent], :reconnected)
{:ok, :new_state}
end,
{:idle, :new_state},
{:idle, :new_state},
{:ok, %Q{}, %R{}, :newer_state}]
{:ok, agent} = A.start_link(stack)
parent = self()
opts = [agent: agent, parent: parent]
{:ok, pool} = P.start_link(opts)
assert P.run(pool, fn(conn) ->
message =
"oops (the connection was closed by the pool, possibly due to a timeout or because the pool has been terminated)"
assert {:error, %DBConnection.ConnectionError{message: ^message}} =
P.execute(conn, %Q{}, [:first])
spawn_link(fn() ->
_ = Process.put(:agent, agent)
assert P.run(pool, fn(_) -> :result end) == :result
send(parent, :done)
end)
:result
end, [timeout: 100]) == :result
assert_receive :done
assert P.execute(pool, %Q{}, [:second]) == {:ok, %Q{}, %R{}}
assert [
{:connect, _},
{:handle_status, _},
{:handle_execute, [%Q{}, [:first], _, :state]},
{:disconnect, _},
{:connect, _},
{:handle_status, _},
{:handle_status, _},
{:handle_execute, [%Q{}, [:second], _, :new_state]}] = A.record(agent)
end
test "reconnect when client timeout and then crashes" do
stack = [
{:ok, :state},
{:idle, :state},
fn(_, _, _, _) ->
throw(:oops)
end,
fn(opts) ->
send(opts[:parent], :reconnected)
{:ok, :new_state}
end,
{:idle, :new_state},
{:idle, :new_state},
{:ok, %Q{}, %R{}, :newer_state}]
{:ok, agent} = A.start_link(stack)
parent = self()
opts = [agent: agent, parent: parent]
{:ok, pool} = P.start_link(opts)
try do
P.run(pool, fn(conn) ->
spawn_link(fn ->
_ = Process.put(:agent, agent)
assert P.run(pool, fn(_) -> :result end) == :result
send(parent, :done)
end)
P.execute(conn, %Q{}, [:first])
end, [timeout: 100])
catch
:throw, :oops ->
:ok
end
assert_receive :done
assert P.execute(pool, %Q{}, [:second]) == {:ok, %Q{}, %R{}}
assert [
{:connect, _},
{:handle_status, _},
{:handle_execute, [%Q{}, [:first], _, :state]},
{:connect, _},
{:handle_status, _},
{:handle_status, _},
{:handle_execute, [%Q{}, [:second], _, :new_state]}] = A.record(agent)
end
test "fails when using an outdated connection reference" do
stack = [
{:ok, :state},
{:idle, :state},
{:idle, :state},
{:idle, :new_state},
{:idle, :new_state},
]
{:ok, agent} = A.start_link(stack)
parent = self()
opts = [agent: agent, parent: parent]
{:ok, pool} = P.start_link(opts)
outdated_conn = P.run(pool, fn conn -> conn end)
assert_raise RuntimeError, ~r"an outdated connection has been given to DBConnection", fn ->
P.execute(outdated_conn, %Q{}, [:first])
end
P.run(pool, fn _ ->
assert_raise RuntimeError, ~r"an outdated connection has been given to DBConnection", fn ->
P.execute(outdated_conn, %Q{}, [:first])
end
end)
assert [
{:connect, _},
{:handle_status, _},
{:handle_status, _},
{:handle_status, _},
{:handle_status, _}] = A.record(agent)
end
end
| 25.602941
| 121
| 0.524986
|
9e434fc2df85514a7161cf8d81249a32c7385e8c
| 2,011
|
ex
|
Elixir
|
lib/sitemapper/index_generator.ex
|
kotsius/sitemapper
|
67807a9c4d609a86feb7fd890f5ff48c7bb08223
|
[
"MIT"
] | 23
|
2019-10-17T22:13:21.000Z
|
2021-06-26T05:25:22.000Z
|
lib/sitemapper/index_generator.ex
|
kotsius/sitemapper
|
67807a9c4d609a86feb7fd890f5ff48c7bb08223
|
[
"MIT"
] | 5
|
2020-07-17T15:20:18.000Z
|
2021-06-27T10:20:56.000Z
|
lib/sitemapper/index_generator.ex
|
kotsius/sitemapper
|
67807a9c4d609a86feb7fd890f5ff48c7bb08223
|
[
"MIT"
] | 6
|
2019-11-25T15:02:12.000Z
|
2021-06-26T05:06:59.000Z
|
defmodule Sitemapper.IndexGenerator do
alias Sitemapper.{Encoder, File, SitemapReference}
@max_length 52_428_800
@max_count 50_000
@dec "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
@index_start "<sitemapindex xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.sitemaps.org/schemas/sitemap/0.9 http://www.sitemaps.org/schemas/sitemap/0.9/siteindex.xsd\" xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\">"
@index_end "</sitemapindex>"
@line_sep "\n"
@line_sep_length String.length(@line_sep)
@end_length String.length(@index_end) + @line_sep_length
@max_length_offset @max_length - @end_length
def new() do
body = [@dec, @line_sep, @index_start, @line_sep]
length = IO.iodata_length(body)
%File{count: 0, length: length, body: body}
end
def add_sitemap(
%File{count: count, length: length, body: body},
%SitemapReference{} = reference
) do
element =
sitemap_element(reference)
|> XmlBuilder.generate()
element_length = IO.iodata_length(element)
new_length = length + element_length + @line_sep_length
new_count = count + 1
cond do
new_length >= @max_length_offset ->
{:error, :over_length}
new_count > @max_count ->
{:error, :over_count}
true ->
new_body = [body, element, @line_sep]
%File{count: new_count, length: new_length, body: new_body}
end
end
def finalize(%File{count: count, length: length, body: body}) do
new_body = [body, @index_end, @line_sep]
new_length = length + @end_length
%File{count: count, length: new_length, body: new_body}
end
defp sitemap_element(%SitemapReference{} = reference) do
elements =
[:loc, :lastmod]
|> Enum.reduce([], fn k, acc ->
case Map.get(reference, k) do
nil ->
acc
v ->
acc ++ [{k, Encoder.encode(v)}]
end
end)
XmlBuilder.element(:sitemap, elements)
end
end
| 28.728571
| 266
| 0.641472
|
9e43534cdcbc0018fa844b0557479fe3af8b27cc
| 371
|
ex
|
Elixir
|
lib/nudge_api/matches/match.ex
|
feelja-tech/feelja-api
|
03ce15430460cf2dac24a7740242c7e5ac5c5804
|
[
"MIT"
] | null | null | null |
lib/nudge_api/matches/match.ex
|
feelja-tech/feelja-api
|
03ce15430460cf2dac24a7740242c7e5ac5c5804
|
[
"MIT"
] | null | null | null |
lib/nudge_api/matches/match.ex
|
feelja-tech/feelja-api
|
03ce15430460cf2dac24a7740242c7e5ac5c5804
|
[
"MIT"
] | null | null | null |
defmodule NudgeApi.Matches.Match do
use Ecto.Schema
import Ecto.Changeset
schema "matches" do
field :finalized_at, :utc_datetime
has_many :user_matches, NudgeApi.Matches.UserMatch
timestamps(type: :utc_datetime)
end
@doc false
def changeset(match, attrs) do
match
|> cast(attrs, [:finalized_at])
|> validate_required([])
end
end
| 18.55
| 54
| 0.703504
|
9e4354bdff351a8c0cf22f8bc1f48440460365d8
| 1,112
|
ex
|
Elixir
|
lib/options_tracker_web/channels/user_socket.ex
|
mgwidmann/options_tracker
|
5520f88a9a5873842a63a23d4bcc5da82a51feba
|
[
"MIT"
] | 12
|
2020-06-25T17:25:15.000Z
|
2021-09-30T20:13:33.000Z
|
lib/options_tracker_web/channels/user_socket.ex
|
mgwidmann/options_tracker
|
5520f88a9a5873842a63a23d4bcc5da82a51feba
|
[
"MIT"
] | 5
|
2020-08-05T03:12:31.000Z
|
2021-07-15T04:59:03.000Z
|
lib/options_tracker_web/channels/user_socket.ex
|
mgwidmann/options_tracker
|
5520f88a9a5873842a63a23d4bcc5da82a51feba
|
[
"MIT"
] | 2
|
2021-07-03T17:20:15.000Z
|
2021-09-01T15:38:58.000Z
|
defmodule OptionsTrackerWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", OptionsTrackerWeb.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
@impl true
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# OptionsTrackerWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
@impl true
def id(_socket), do: nil
end
| 30.888889
| 89
| 0.70054
|
9e43ace3d2c6cc5a31778d0fab3137bfa7a1d118
| 393
|
ex
|
Elixir
|
lib/api/auth/errors.ex
|
ARKultur/naboo
|
ab26c2e82cdc485e23d428fbb1d4798f1fb1388b
|
[
"MIT"
] | 16
|
2021-12-14T12:25:59.000Z
|
2021-12-16T21:56:27.000Z
|
lib/api/auth/errors.ex
|
ARKultur/naboo
|
ab26c2e82cdc485e23d428fbb1d4798f1fb1388b
|
[
"MIT"
] | 19
|
2021-12-06T08:35:30.000Z
|
2022-03-23T18:20:46.000Z
|
lib/api/auth/errors.ex
|
ARKultur/naboo
|
ab26c2e82cdc485e23d428fbb1d4798f1fb1388b
|
[
"MIT"
] | null | null | null |
defmodule NabooAPI.Auth.Errors do
@behaviour Guardian.Plug.ErrorHandler
import Plug.Conn
@impl Guardian.Plug.ErrorHandler
def auth_error(conn, {_type, _reason}, _opts) do
# Yes, this is ugly. No, I do not care. It works. Fuck you.
conn
|> put_resp_content_type("application/json")
|> send_resp(401, "{\"message\": \"authentication error\"}")
|> halt()
end
end
| 24.5625
| 64
| 0.679389
|
9e440ae08fe911ef918cda9a0d9d26cc720efff9
| 1,619
|
ex
|
Elixir
|
apps/admin_panel/lib/admin_panel/controllers/page_controller.ex
|
vanmil/ewallet
|
6c1aca95a83e0a9d93007670a40d8c45764a8122
|
[
"Apache-2.0"
] | 1
|
2018-12-07T06:21:21.000Z
|
2018-12-07T06:21:21.000Z
|
apps/admin_panel/lib/admin_panel/controllers/page_controller.ex
|
vanmil/ewallet
|
6c1aca95a83e0a9d93007670a40d8c45764a8122
|
[
"Apache-2.0"
] | null | null | null |
apps/admin_panel/lib/admin_panel/controllers/page_controller.ex
|
vanmil/ewallet
|
6c1aca95a83e0a9d93007670a40d8c45764a8122
|
[
"Apache-2.0"
] | null | null | null |
defmodule AdminPanel.PageController do
use AdminPanel, :controller
import Ecto.Query
import EWalletDB.SoftDelete
alias EWalletDB.{APIKey, Repo}
alias Plug.Conn
@not_found_message """
The assets are not available. If you think this is incorrect,
please make sure that the front-end assets have been built.
"""
def index(conn, _params) do
content =
conn
|> index_file_path()
|> File.read!()
|> inject_api_key()
conn
|> put_resp_header("content-type", "text/html; charset=utf-8")
|> Conn.send_resp(200, content)
rescue
File.Error ->
conn
|> put_resp_header("content-type", "text/plain; charset=utf-8")
|> Conn.send_resp(:not_found, @not_found_message)
end
defp index_file_path(%{private: %{override_dist_path: dist_path}}) do
Path.join(dist_path, "index.html")
end
defp index_file_path(_conn) do
:admin_panel
|> Application.get_env(:dist_path)
|> Path.join("index.html")
end
defp inject_api_key(content) do
String.replace(content, ~s("app"></div>), api_key_script(), insert_replaced: 0)
end
defp api_key_script do
APIKey
|> exclude_deleted()
|> limit(1)
|> Repo.get_by(%{owner_app: "admin_api"})
|> api_key_script()
end
defp api_key_script(%APIKey{} = api_key) do
"""
<script>
var adminConfig = {};
adminConfig.apiKeyId = "#{api_key.id}";
adminConfig.apiKey = "#{api_key.key}";
window.adminConfig = adminConfig;
</script>
"""
end
# For troubleshooting purposes
defp api_key_script(_), do: "<!-- No API key found -->"
end
| 23.808824
| 83
| 0.649784
|
9e442f3dbd1732b462ebf2a1c89e82316421bf7f
| 2,218
|
ex
|
Elixir
|
clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p3beta1__annotate_video_response.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p3beta1__annotate_video_response.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p3beta1__annotate_video_response.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse do
@moduledoc """
Video annotation response. Included in the `response` field of the `Operation` returned by the `GetOperation` call of the `google::longrunning::Operations` service.
## Attributes
* `annotationResults` (*type:* `list(GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults.t)`, *default:* `nil`) - Annotation results for all videos specified in `AnnotateVideoRequest`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:annotationResults =>
list(
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults.t()
)
| nil
}
field(:annotationResults,
as:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults,
type: :list
)
end
defimpl Poison.Decoder,
for:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse do
def decode(value, options) do
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.774194
| 234
| 0.761497
|
9e443336fb83a2f92ebbaf142a0c9506cad485a6
| 8,585
|
ex
|
Elixir
|
lib/mix/lib/mix/tasks/compile.erlang.ex
|
guilleiguaran/elixir
|
952052869ff7af0e293d2a7160b1aebc68fc46be
|
[
"Apache-2.0"
] | null | null | null |
lib/mix/lib/mix/tasks/compile.erlang.ex
|
guilleiguaran/elixir
|
952052869ff7af0e293d2a7160b1aebc68fc46be
|
[
"Apache-2.0"
] | null | null | null |
lib/mix/lib/mix/tasks/compile.erlang.ex
|
guilleiguaran/elixir
|
952052869ff7af0e293d2a7160b1aebc68fc46be
|
[
"Apache-2.0"
] | null | null | null |
defmodule Mix.Tasks.Compile.Erlang do
alias :epp, as: Epp
alias :digraph, as: Graph
alias :digraph_utils, as: GraphUtils
use Mix.Task
@hidden true
@shortdoc "Compile Erlang source files"
@recursive true
@manifest ".compile.erlang"
@moduledoc """
A task to compile Erlang source files.
When this task runs, it will first check the modification times of
all files to be compiled and if they haven't been
changed since the last compilation, it will not compile
them. If any of them have changed, it compiles
everything.
For this reason, the task touches your `:compile_path`
directory and sets the modification time to the current
time and date at the end of each compilation. You can
force compilation regardless of modification times by passing
the `--force` option.
## Command line options
* `--force` - forces compilation regardless of modification times
## Configuration
* `ERL_COMPILER_OPTIONS` - can be used to give default compile options.
The value must be a valid Erlang term. If the value is a list, it will
be used as is. If it is not a list, it will be put into a list.
* `:erlc_paths` - directories to find source files.
Defaults to `["src"]`, can be configured as:
```
[erlc_paths: ["src", "other"]]
```
* `:erlc_include_path` - directory for adding include files.
Defaults to `"include"`, can be configured as:
```
[erlc_include_path: "other"]
```
* `:erlc_options` - compilation options that apply to Erlang's
compiler. `:debug_info` is enabled by default.
There are many available options here:
http://www.erlang.org/doc/man/compile.html#file-2
"""
defrecord Erl, file: nil, module: nil, behaviours: [], compile: [],
includes: [], mtime: nil, invalid: false
@doc """
Runs this task.
"""
def run(args) do
{ opts, _, _ } = OptionParser.parse(args, switches: [force: :boolean])
project = Mix.project
source_paths = project[:erlc_paths]
include_path = to_erl_file project[:erlc_include_path]
compile_path = to_erl_file Mix.Project.compile_path(project)
files = Mix.Utils.extract_files(source_paths, [:erl])
erlc_options = project[:erlc_options] || []
erlc_options = erlc_options ++ [{:outdir, compile_path}, {:i, include_path}, :report]
erlc_options = Enum.map erlc_options, fn
{ kind, dir } when kind in [:i, :outdit] ->
{ kind, to_erl_file(dir) }
opt ->
opt
end
tuples = files
|> scan_sources(include_path, source_paths)
|> sort_dependencies
|> Enum.map(&annotate_target(&1, compile_path, opts[:force]))
compile_mappings(manifest(), tuples, fn
input, _output ->
file = to_erl_file(Path.rootname(input, ".erl"))
:compile.file(file, erlc_options)
end)
end
@doc """
Returns Erlang manifests.
"""
def manifests, do: [manifest]
defp manifest, do: Path.join(Mix.Project.manifest_path, @manifest)
@doc """
Extracts the extensions from the mappings, automatically
invoking the callback for each stale input and output pair
(or for all if `force` is true) and removing files that no
longer have a source, while keeping the manifest up
to date.
## Examples
For example, a simple compiler for Lisp Flavored Erlang
would be implemented like:
compile_mappings ".compile.lfe",
[{ "src", "ebin" }],
:lfe, :beam, opts[:force], fn
input, output ->
:lfe_comp.file(to_erl_file(input),
[output_dir: Path.dirname(output)])
end
The command above will:
1. Look for files ending with the `lfe` extension in `src`
and their `beam` counterpart in `ebin`;
2. For each stale file (or for all if `force` is true),
invoke the callback passing the calculated input
and output;
3. Update the manifest with the newly compiled outputs;
4. Remove any output in the manifest that that does not
have an equivalent source;
The callback must return `{ :ok, mod }` or `:error` in case
of error. An error is raised at the end if any of the
files failed to compile.
"""
def compile_mappings(manifest, mappings, src_ext, dest_ext, force, callback) do
files = lc { src, dest } inlist mappings do
extract_targets(src, src_ext, dest, dest_ext, force)
end |> Enum.concat
compile_mappings(manifest, files, callback)
end
@doc """
Converts the given file to a format accepted by
the Erlang compilation tools.
"""
def to_erl_file(file) do
to_char_list(file)
end
## Internal helpers
defp scan_sources(files, include_path, source_paths) do
include_paths = [include_path | source_paths]
Enum.reduce(files, [], &scan_source(&2, &1, include_paths)) |> Enum.reverse
end
defp scan_source(acc, file, include_paths) do
erl_file = Erl[file: file, module: module_from_artifact(file)]
case Epp.parse_file(to_erl_file(file), include_paths, []) do
{ :ok, forms } ->
[List.foldl(tl(forms), erl_file, &do_form(file, &1, &2)) | acc]
{ :error, _error } ->
acc
end
end
defp do_form(file, form, Erl[] = erl) do
case form do
{:attribute, _, :file, {include_file, _}} when file != include_file ->
if File.regular?(include_file) do
erl.update_includes &[include_file|&1]
else
erl
end
{:attribute, _, :behaviour, behaviour} ->
erl.update_behaviours &[behaviour|&1]
{:attribute, _, :compile, value} ->
erl.update_compile &[value|&1]
_ ->
erl
end
end
defp sort_dependencies(erls) do
graph = Graph.new
lc erl inlist erls do
Graph.add_vertex(graph, erl.module, erl)
end
lc erl inlist erls do
lc b inlist erl.behaviours, do: Graph.add_edge(graph, b, erl.module)
lc c inlist erl.compile do
case c do
{:parse_transform, transform} -> Graph.add_edge(graph, transform, erl.module)
_ -> :ok
end
end
end
result =
case GraphUtils.topsort(graph) do
false -> erls
mods ->
lc m inlist mods, do: elem(Graph.vertex(graph, m), 1)
end
Graph.delete(graph)
result
end
defp annotate_target(erl, compile_path, force) do
beam = Path.join(compile_path, "#{erl.module}#{:code.objfile_extension}")
if force || Mix.Utils.stale?([erl.file|erl.includes], [beam]) do
{ erl.file, erl.module, beam }
else
{ erl.file, erl.module, nil }
end
end
defp module_from_artifact(artifact) do
artifact |> Path.basename |> Path.rootname
end
defp extract_targets(dir1, src_ext, dir2, dest_ext, force) do
files = Mix.Utils.extract_files([dir1], List.wrap(src_ext))
lc file inlist files do
module = module_from_artifact(file)
target = Path.join(dir2, module <> "." <> to_string(dest_ext))
if force || Mix.Utils.stale?([file], [target]) do
{ file, module, target }
else
{ file, module, nil }
end
end
end
defp compile_mappings(manifest, tuples, callback) do
# Stale files are the ones with a destination
stale = lc { src, _mod, dest } inlist tuples, dest != nil, do: { src, dest }
# Get the previous entries from the manifest
entries = Mix.Utils.read_manifest(manifest)
# Files to remove are the ones in the
# manifest but they no longer have a source
removed = Enum.filter(entries, fn entry ->
module = module_from_artifact(entry)
not Enum.any?(tuples, fn { _src, mod, _dest } -> module == mod end)
end)
if stale == [] && removed == [] do
:noop
else
# Build the project structure so we can write down compiled files.
Mix.Project.build_structure
# Remove manifest entries with no source
Enum.each(removed, &File.rm/1)
# Compile stale files and print the results
results = lc { input, output } inlist stale do
interpret_result(input, callback.(input, output))
end
# Write final entries to manifest
entries = (entries -- removed) ++ Enum.map(stale, &elem(&1, 1))
Mix.Utils.write_manifest(manifest, :lists.usort(entries))
# Raise if any error, return :ok otherwise
if :error in results, do: raise CompileError
:ok
end
end
defp interpret_result(file, result) do
case result do
{ :ok, _ } -> Mix.shell.info "Compiled #{file}"
:error -> :error
end
result
end
end
| 29.603448
| 89
| 0.64205
|
9e443fdc6259fb65c1de468b91992c8e13a9d254
| 1,246
|
exs
|
Elixir
|
test/grizzly/zwave/commands/switch_multilevel_set_test.exs
|
jellybob/grizzly
|
290bee04cb16acbb9dc996925f5c501697b7ac94
|
[
"Apache-2.0"
] | null | null | null |
test/grizzly/zwave/commands/switch_multilevel_set_test.exs
|
jellybob/grizzly
|
290bee04cb16acbb9dc996925f5c501697b7ac94
|
[
"Apache-2.0"
] | null | null | null |
test/grizzly/zwave/commands/switch_multilevel_set_test.exs
|
jellybob/grizzly
|
290bee04cb16acbb9dc996925f5c501697b7ac94
|
[
"Apache-2.0"
] | null | null | null |
defmodule Grizzly.ZWave.Commands.SwitchMultilevelSetTest do
use ExUnit.Case, async: true
alias Grizzly.ZWave.Commands.SwitchMultilevelSet
test "creates the command and validates params" do
params = [target_value: :off]
{:ok, _command} = SwitchMultilevelSet.new(params)
end
test "encodes v1 params correctly" do
params = [target_value: 99]
{:ok, command} = SwitchMultilevelSet.new(params)
expected_binary = <<0x63>>
assert expected_binary == SwitchMultilevelSet.encode_params(command)
end
test "encodes v2 params correctly" do
params = [target_value: 99, duration: 10]
{:ok, command} = SwitchMultilevelSet.new(params)
expected_binary = <<0x63, 0x0A>>
assert expected_binary == SwitchMultilevelSet.encode_params(command)
end
test "decodes v1 params correctly" do
binary_params = <<0xFF>>
{:ok, params} = SwitchMultilevelSet.decode_params(binary_params)
assert Keyword.get(params, :target_value) == :previous
end
test "decodes v2 params correctly" do
binary_params = <<0x32, 0x0A>>
{:ok, params} = SwitchMultilevelSet.decode_params(binary_params)
assert Keyword.get(params, :target_value) == 0x32
assert Keyword.get(params, :duration) == 0x0A
end
end
| 32.789474
| 72
| 0.723917
|
9e44c564fa4f93f5ea5fe87078a335ee23421f69
| 17,954
|
ex
|
Elixir
|
lib/mix/lib/mix.ex
|
skunkwerks/elixir
|
b498c3790e570e341f24ae8e7b73b0d45eae9279
|
[
"Apache-2.0"
] | null | null | null |
lib/mix/lib/mix.ex
|
skunkwerks/elixir
|
b498c3790e570e341f24ae8e7b73b0d45eae9279
|
[
"Apache-2.0"
] | null | null | null |
lib/mix/lib/mix.ex
|
skunkwerks/elixir
|
b498c3790e570e341f24ae8e7b73b0d45eae9279
|
[
"Apache-2.0"
] | null | null | null |
defmodule Mix do
@moduledoc ~S"""
Mix is a build tool that provides tasks for creating, compiling,
and testing Elixir projects, managing its dependencies, and more.
## Mix.Project
The foundation of Mix is a project. A project can be defined by using
`Mix.Project` in a module, usually placed in a file named `mix.exs`:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0"
]
end
end
See the `Mix.Project` module for detailed documentation on Mix projects.
Once the project is defined, a number of default Mix tasks can be run
directly from the command line:
* `mix compile` - compiles the current project
* `mix test` - runs tests for the given project
* `mix run` - runs a particular command inside the project
Each task has its own options and sometimes specific configuration
to be defined in the `project/0` function. You can use `mix help`
to list all available tasks and `mix help NAME` to show help for
a particular task.
The best way to get started with your first project is by calling
`mix new my_project` from the command line.
## Mix.Task
Tasks are what make Mix extensible.
Projects can extend Mix behaviour by adding their own tasks. For
example, adding the task below inside your project will
make it available to everyone that uses your project:
defmodule Mix.Tasks.Hello do
use Mix.Task
def run(_) do
Mix.shell().info("Hello world")
end
end
The task can now be invoked with `mix hello`.
See the `Mix.Task` behaviour for detailed documentation on Mix tasks.
## Dependencies
Mix also manages your dependencies and integrates nicely with the [Hex package
manager](https://hex.pm).
In order to use dependencies, you need to add a `:deps` key
to your project configuration. We often extract the list of dependencies
into its own function:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0",
deps: deps()
]
end
defp deps do
[
{:ecto, "~> 2.0"},
{:plug, github: "elixir-lang/plug"}
]
end
end
You can run `mix help deps` to learn more about dependencies in Mix.
## Environments
Mix supports different environments. Environments allow developers
to prepare and organize their project specifically for different
scenarios. By default, Mix provides three environments:
* `:dev` - the default environment
* `:test` - the environment `mix test` runs on
* `:prod` - the environment your dependencies run on
The environment can be changed via the command line by setting
the `MIX_ENV` environment variable, for example:
$ MIX_ENV=prod mix run server.exs
You can also specify that certain dependencies are available only for
certain environments:
{:some_test_dependency, "~> 1.0", only: :test}
The environment can be read via `Mix.env/0`.
## Targets
Besides environments, Mix supports targets. Targets are useful when a
project needs to compile to different architectures and some of the
dependencies are only available to some of them. By default, the target
is `:host` but it can be set via the `MIX_TARGET` environment variable.
The target can be read via `Mix.target/0`.
## Aliases
Aliases are shortcuts or tasks specific to the current project.
In the [Mix.Task section](#module-mix-task), we have defined a task that would be
available to everyone using our project as a dependency. What if
we wanted the task to only be available for our project? Just
define an alias:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0",
aliases: aliases()
]
end
defp aliases do
[
c: "compile",
hello: &hello/1
]
end
defp hello(_) do
Mix.shell().info("Hello world")
end
end
In the example above, we have defined two aliases. One is `mix c`
which is a shortcut for `mix compile`. The other is named
`mix hello`, which is the equivalent to the `Mix.Tasks.Hello`
we have defined in the [Mix.Task section](#module-mix-task).
Aliases may also be lists, specifying multiple tasks to be run
consecutively:
[all: [&hello/1, "deps.get --only #{Mix.env()}", "compile"]]
In the example above, we have defined an alias named `mix all`,
that prints "Hello world", then fetches dependencies specific
to the current environment, and compiles the project.
Aliases can also be used to augment existing tasks. Let's suppose
you want to augment `mix clean` to clean another directory Mix does
not know about:
[clean: ["clean", &clean_extra/1]]
Where `&clean_extra/1` would be a function in your `mix.exs`
with extra cleanup logic.
Arguments given to the alias will be appended to the arguments of
the last task in the list. Except when overriding an existing task.
In this case, the arguments will be given to the original task,
in order to preserve semantics. For example, in the `:clean` alias
above, the arguments given to the alias will be passed to "clean"
and not to `clean_extra/1`.
Aliases defined in the current project do not affect its dependencies
and aliases defined in dependencies are not accessible from the
current project.
Aliases can be used very powerfully to also run Elixir scripts and
shell commands, for example:
# priv/hello1.exs
IO.puts("Hello One")
# priv/hello2.exs
IO.puts("Hello Two")
# priv/world.sh
#!/bin/sh
echo "world!"
# mix.exs
defp aliases do
[
some_alias: ["hex.info", "run priv/hello1.exs", "cmd priv/world.sh"]
]
end
In the example above we have created the alias `some_alias` that will
run the task `mix hex.info`, then `mix run` to run an Elixir script,
then `mix cmd` to execute a command line shell script. This shows how
powerful aliases mixed with Mix tasks can be.
Mix tasks are designed to run only once. This prevents the same task
from being executed multiple times. For example, if there are several tasks
depending on `mix compile`, the code will be compiled once. Tasks can
be executed again if they are explicitly reenabled using `Mix.Task.reenable/1`:
another_alias: [
"format --check-formatted priv/hello1.exs",
"cmd priv/world.sh",
fn _ -> Mix.Task.reenable("format") end,
"format --check-formatted priv/hello2.exs"
]
Some tasks are automatically reenabled though, as they are expected to
be invoked multiple times. They are: `mix cmd`, `mix do`, `mix loadconfig`,
`mix profile.cprof`, `mix profile.eprof`, `mix profile.fprof`, `mix run`,
and `mix xref`.
It is worth mentioning that some tasks, such as in the case of the
`mix format` command in the example above, can accept multiple files so it
could be rewritten as:
another_alias: ["format --check-formatted priv/hello1.exs priv/hello2.exs"]
## Environment variables
Several environment variables can be used to modify Mix's behaviour.
Mix responds to the following variables:
* `MIX_ARCHIVES` - specifies the directory into which the archives should be installed
(default: `~/.mix/archives`)
* `MIX_BUILD_ROOT` - sets the root directory where build artifacts
should be written to. For example, "_build". If `MIX_BUILD_PATH` is set, this
option is ignored.
* `MIX_BUILD_PATH` - sets the project `Mix.Project.build_path/0` config. This option
must always point to a subdirectory inside a temporary directory. For instance,
never "/tmp" or "_build" but "_build/PROD" or "/tmp/PROD", as required by Mix
* `MIX_DEPS_PATH` - sets the project `Mix.Project.deps_path/0` config (default: `deps`)
* `MIX_DEBUG` - outputs debug information about each task before running it
* `MIX_ENV` - specifies which environment should be used. See [Environments](#module-environments)
* `MIX_TARGET` - specifies which target should be used. See [Targets](#module-targets)
* `MIX_EXS` - changes the full path to the `mix.exs` file
* `MIX_HOME` - path to Mix's home directory, stores configuration files and scripts used by Mix
(default: `~/.mix`)
* `MIX_INSTALL_DIR` - (since v1.12.0) specifies directory where `Mix.install/2` keeps
installs cache
* `MIX_PATH` - appends extra code paths
* `MIX_QUIET` - does not print information messages to the terminal
* `MIX_REBAR` - path to rebar command that overrides the one Mix installs
(default: `~/.mix/rebar`)
* `MIX_REBAR3` - path to rebar3 command that overrides the one Mix installs
(default: `~/.mix/rebar3`)
* `MIX_XDG` - asks Mix to follow the [XDG Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html)
for its home directory and configuration files. This behaviour needs to
be opt-in due to backwards compatibility. `MIX_HOME` has higher preference
than `MIX_XDG`. If none of the variables are set, the default directory
`~/.mix` will be used
Environment variables that are not meant to hold a value (and act basically as
flags) should be set to either `1` or `true`, for example:
$ MIX_DEBUG=1 mix compile
"""
use Application
import Kernel, except: [raise: 2]
@doc false
def start do
{:ok, _} = Application.ensure_all_started(:mix)
:ok
end
@doc false
def start(_type, []) do
children = [Mix.State, Mix.TasksServer, Mix.ProjectStack]
opts = [strategy: :one_for_one, name: Mix.Supervisor, max_restarts: 0]
Supervisor.start_link(children, opts)
end
@doc """
Returns the current Mix environment.
This function should not be used at runtime in application code (as opposed
to infrastructure and build code like Mix tasks). Mix is a build tool and may
not be available after the code is compiled (for example in a release).
To differentiate the program behavior depending on the environment, it is
recommended to use application environment through `Application.get_env/3`.
Proper configuration can be set in config files, often per-environment
(see the `Config` module for more information).
"""
@spec env() :: atom()
def env do
# env is not available on bootstrapping, so set a :dev default
Mix.State.get(:env, :dev)
end
@doc """
Changes the current Mix environment to `env`.
Be careful when invoking this function as any project
configuration won't be reloaded.
This function should not be used at runtime in application code
(see `env/0` for more information).
"""
@spec env(atom()) :: :ok
def env(env) when is_atom(env) do
Mix.State.put(:env, env)
end
@doc """
Returns the Mix target.
"""
@spec target() :: atom()
def target do
# target is not available on bootstrapping, so set a :host default
Mix.State.get(:target, :host)
end
@doc """
Changes the current Mix target to `target`.
Be careful when invoking this function as any project
configuration won't be reloaded.
"""
@spec target(atom()) :: :ok
def target(target) when is_atom(target) do
Mix.State.put(:target, target)
end
@doc """
Returns the default compilers used by Mix.
It can be used in your `mix.exs` to prepend or
append new compilers to Mix:
def project do
[compilers: Mix.compilers() ++ [:foo, :bar]]
end
"""
@spec compilers() :: [atom()]
def compilers do
[:yecc, :leex, :erlang, :elixir, :app]
end
@doc """
Returns the current shell.
`shell/0` can be used as a wrapper for the current shell. It contains
conveniences for requesting information from the user, printing to the
shell and so forth. The Mix shell is swappable (see `shell/1`), allowing
developers to use a test shell that simply sends messages to the current
process instead of performing IO (see `Mix.Shell.Process`).
By default, this returns `Mix.Shell.IO`.
## Examples
Mix.shell().info("Preparing to do something dangerous...")
if Mix.shell().yes?("Are you sure?") do
# do something dangerous
end
"""
@spec shell() :: module
def shell do
Mix.State.get(:shell, Mix.Shell.IO)
end
@doc """
Sets the current shell.
As an argument you may pass `Mix.Shell.IO`, `Mix.Shell.Process`,
`Mix.Shell.Quiet`, or any module that implements the `Mix.Shell`
behaviour.
After calling this function, `shell` becomes the shell that is
returned by `shell/0`.
## Examples
iex> Mix.shell(Mix.Shell.IO)
:ok
You can use `shell/0` and `shell/1` to temporarily switch shells,
for example, if you want to run a Mix Task that normally produces
a lot of output:
shell = Mix.shell()
Mix.shell(Mix.Shell.Quiet)
try do
Mix.Task.run("noisy.task")
after
Mix.shell(shell)
end
"""
@spec shell(module) :: :ok
def shell(shell) do
Mix.State.put(:shell, shell)
end
@doc """
Returns `true` if Mix is in debug mode, `false` otherwise.
"""
@spec debug?() :: boolean()
def debug? do
Mix.State.get(:debug, false)
end
@doc """
Sets Mix debug mode.
"""
@spec debug(boolean()) :: :ok
def debug(debug) when is_boolean(debug) do
Mix.State.put(:debug, debug)
end
@doc """
Raises a Mix error that is nicely formatted, defaulting to exit code `1`.
"""
@spec raise(binary) :: no_return
def raise(message) do
__MODULE__.raise(message, exit_code: 1)
end
@doc """
Raises a Mix error that is nicely formatted.
## Options
* `:exit_code` - defines exit code value, defaults to `1`
"""
@doc since: "1.12.0"
@spec raise(binary, exit_code: non_neg_integer()) :: no_return
def raise(message, opts) when is_binary(message) and is_list(opts) do
Kernel.raise(Mix.Error, mix: Keyword.get(opts, :exit_code, 1), message: message)
end
@doc """
The path for local archives or escripts.
"""
@doc since: "1.10.0"
@spec path_for(:archives | :escripts) :: String.t()
def path_for(:archives) do
System.get_env("MIX_ARCHIVES") || Path.join(Mix.Utils.mix_home(), "archives")
end
def path_for(:escripts) do
Path.join(Mix.Utils.mix_home(), "escripts")
end
@doc """
Installs and starts dependencies.
The given `deps` should be in the same format as defined in a regular Mix
project. See `mix help deps` for more information. As a shortcut, an atom
can be given as dependency to mean the latest version. In other words,
specifying `:decimal` is the same as `{:decimal, ">= 0.0.0"}`.
After each successful installation, a given set of dependencies is cached
so starting another VM and calling `Mix.install/2` with the same dependencies
will avoid unnecessary downloads and compilations. The location of the cache
directory can be controlled using the `MIX_INSTALL_DIR` environment variable.
This function can only be called outside of a Mix project and only with the
same dependencies in the given VM.
**Note:** this feature is currently experimental and it may change
in future releases.
## Options
* `:force` - if `true`, removes install cache. This is useful when you want
to update your dependencies or your install got into an inconsistent state
(Default: `false`)
* `:verbose` - if `true`, prints additional debugging information
(Default: `false`)
* `:consolidate_protocols` - if `true`, runs protocol
consolidation via the `mix compile.protocols` task (Default: `true`)
## Examples
Mix.install([
:decimal,
{:jason, "~> 1.0"}
])
"""
@doc since: "1.12.0"
def install(deps, opts \\ [])
def install(deps, opts) when is_list(deps) and is_list(opts) do
Mix.start()
if Mix.Project.get() do
Mix.raise("Mix.install/2 cannot be used inside a Mix project")
end
deps =
Enum.map(deps, fn
dep when is_atom(dep) -> {dep, ">= 0.0.0"}
dep -> dep
end)
force? = !!opts[:force]
case Mix.State.get(:installed) do
nil ->
:ok
^deps when not force? ->
:ok
_ ->
Mix.raise("Mix.install/2 can only be called with the same dependencies in the given VM")
end
installs_root =
System.get_env("MIX_INSTALL_DIR") ||
Path.join(Mix.Utils.mix_cache(), "installs")
id = deps |> :erlang.term_to_binary() |> :erlang.md5() |> Base.encode16(case: :lower)
version = "elixir-#{System.version()}-erts-#{:erlang.system_info(:version)}"
dir = Path.join([installs_root, version, id])
if opts[:verbose] do
Mix.shell().info("using #{dir}")
end
if force? do
File.rm_rf!(dir)
end
config = [
version: "0.1.0",
build_per_environment: true,
build_path: "_build",
lockfile: "mix.lock",
deps_path: "deps",
deps: deps,
app: :mix_install,
erlc_paths: ["src"],
elixirc_paths: ["lib"],
compilers: [],
consolidate_protocols: Keyword.get(opts, :consolidate_protocols, true)
]
:ok = Mix.Local.append_archives()
:ok = Mix.ProjectStack.push(__MODULE__.InstallProject, config, "nofile")
try do
dir? = File.dir?(dir)
File.mkdir_p!(dir)
File.cd!(dir, fn ->
unless dir? do
Mix.Task.run("deps.get")
end
Mix.Task.run("compile")
end)
for app <- Mix.Project.deps_apps() do
Application.ensure_all_started(app)
end
Mix.State.put(:installed, deps)
:ok
after
Mix.ProjectStack.pop()
end
end
end
| 30.225589
| 148
| 0.665367
|
9e44ed0e6f5d582913903b750652bfece2aafbbb
| 1,760
|
ex
|
Elixir
|
lib/movement/mappers/operation.ex
|
charlesdemers/accent
|
eeea52feb30d16ada5023c05fef37c08c267eff0
|
[
"BSD-3-Clause"
] | null | null | null |
lib/movement/mappers/operation.ex
|
charlesdemers/accent
|
eeea52feb30d16ada5023c05fef37c08c267eff0
|
[
"BSD-3-Clause"
] | null | null | null |
lib/movement/mappers/operation.ex
|
charlesdemers/accent
|
eeea52feb30d16ada5023c05fef37c08c267eff0
|
[
"BSD-3-Clause"
] | null | null | null |
defmodule Movement.Mappers.Operation do
alias Accent.PreviousTranslation
@spec map(binary, map, map) :: Movement.Operation.t()
def map(action = "new", current_translation, suggested_translation) do
%Movement.Operation{
action: action,
text: suggested_translation.text,
key: suggested_translation.key,
file_comment: suggested_translation.file_comment,
file_index: suggested_translation.file_index,
value_type: Map.get(suggested_translation, :value_type),
revision_id: Map.get(suggested_translation, :revision_id),
document_id: Map.get(suggested_translation, :document_id),
version_id: Map.get(suggested_translation, :version_id),
previous_translation: PreviousTranslation.from_translation(current_translation)
}
end
def map(action, current_translation, suggested_translation) do
%Movement.Operation{
action: action,
text: suggested_translation.text,
key: Map.get(suggested_translation, :key, current_translation.key),
file_comment: Map.get(suggested_translation, :file_comment, current_translation.file_comment),
file_index: Map.get(suggested_translation, :file_index, current_translation.file_index),
document_id: Map.get(suggested_translation, :document_id, current_translation.document_id),
revision_id: Map.get(suggested_translation, :revision_id, current_translation.revision_id),
version_id: Map.get(suggested_translation, :version_id, current_translation.version_id),
value_type: Map.get(suggested_translation, :value_type, current_translation.value_type),
translation_id: Map.get(current_translation, :id),
previous_translation: PreviousTranslation.from_translation(current_translation)
}
end
end
| 48.888889
| 100
| 0.769886
|
9e45160175c74225580e6a97e80863afb7df5fc9
| 1,026
|
ex
|
Elixir
|
lib/exhort/sat/vars.ex
|
josejimenezjr0/exhort
|
07c9b735d82a96d6437554f51229c38cf287b39f
|
[
"Apache-2.0"
] | null | null | null |
lib/exhort/sat/vars.ex
|
josejimenezjr0/exhort
|
07c9b735d82a96d6437554f51229c38cf287b39f
|
[
"Apache-2.0"
] | null | null | null |
lib/exhort/sat/vars.ex
|
josejimenezjr0/exhort
|
07c9b735d82a96d6437554f51229c38cf287b39f
|
[
"Apache-2.0"
] | null | null | null |
defmodule Exhort.SAT.Vars do
@moduledoc """
Hold the defined varaibles and raise an error if an undefined variable is
referenced.
"""
alias __MODULE__
@type t :: %__MODULE__{}
defstruct list: [], map: %{}
@doc """
Add a variable, using the name in the struct for lookup.
Variables are kept in order so when they are resolved, referenced variables
are available.
"""
@spec add(Vars.t(), map()) :: Vars.t()
def add(%Vars{list: list, map: map} = vars, %{name: name} = var) do
%Vars{vars | list: list ++ [var], map: Map.put(map, name, var)}
end
@doc """
Get a variable by name.
"""
@spec get(Vars.t(), name :: atom() | String.t() | map()) :: nil | any()
def get(%Vars{} = vars, %{name: name}), do: get(vars, name)
def get(%Vars{map: map} = _vars, name) do
case Map.get(map, name) do
nil -> raise "Undefined variable: #{inspect(name)}"
var -> var
end
end
@doc """
Provide an ordered list of variables.
"""
def iter(%Vars{list: list}), do: list
end
| 25.02439
| 77
| 0.606238
|
9e451c8769f14b5a1e1de9699567145b79bf9ad6
| 1,818
|
ex
|
Elixir
|
lib/forecastr/renderer/colours.ex
|
densefog/forecastr
|
5b95b52ecd6a1324cac5e8616f693e929135a331
|
[
"Apache-2.0"
] | 11
|
2018-03-26T07:09:17.000Z
|
2020-01-12T21:52:03.000Z
|
lib/forecastr/renderer/colours.ex
|
densefog/forecastr
|
5b95b52ecd6a1324cac5e8616f693e929135a331
|
[
"Apache-2.0"
] | 3
|
2018-03-27T09:10:54.000Z
|
2018-04-10T19:08:03.000Z
|
lib/forecastr/renderer/colours.ex
|
densefog/forecastr
|
5b95b52ecd6a1324cac5e8616f693e929135a331
|
[
"Apache-2.0"
] | 7
|
2018-03-26T07:09:20.000Z
|
2021-03-06T14:43:44.000Z
|
defmodule Forecastr.Renderer.Colours do
@moduledoc """
Return the right colour sequence given an output_type such as (:ascii, :ansi, :png)
"""
alias IO.ANSI
def bright_yellow(:ascii), do: ""
def bright_yellow(:ansi) do
[ANSI.bright(), ANSI.yellow()]
end
def bright_yellow(:png) do
[~S(<span foreground="yellow">)]
end
def bright_yellow(:html) do
[~S(<span style="color: yellow">)]
end
def yellow(:ascii), do: ""
def yellow(:ansi) do
[ANSI.yellow()]
end
def yellow(:png) do
[~S(<span foreground="yellow">)]
end
def yellow(:html) do
[~S(<span style="color: yellow">)]
end
def magenta(:ascii), do: ""
def magenta(:ansi) do
[ANSI.light_magenta()]
end
def magenta(:png) do
[~S(<span foreground="magenta">)]
end
def white(:ascii), do: ""
def white(:ansi) do
[ANSI.white()]
end
def white(:png) do
[~S(<span foreground="white">)]
end
def white(:html) do
[~S(<span style="color: white">)]
end
def light_white(:ascii), do: ""
def light_white(:ansi) do
[ANSI.light_white()]
end
def light_white(:png) do
[~S(<span foreground="white">)]
end
def light_white(:html) do
[~S(<span style="color: white">)]
end
def blue(:ascii), do: ""
def blue(:ansi) do
[ANSI.blue()]
end
def blue(:png) do
[~S(<span foreground="blue">)]
end
def blue(:html) do
[~S(<span style="color: blue">)]
end
def normal(:ascii), do: ""
def normal(:ansi) do
[ANSI.normal()]
end
def normal(:png) do
[~S(<span foreground="gray">)]
end
def normal(:html) do
[~S(<span style="color: gray">)]
end
def reset(:ascii), do: ""
def reset(:ansi) do
ANSI.reset()
end
def reset(:png) do
"</span>"
end
def reset(:html) do
"</span>"
end
end
| 15.672414
| 85
| 0.582508
|
9e451ebaa9ab949b68011da388c75699376beeaa
| 72,302
|
ex
|
Elixir
|
lib/elixir/lib/enum.ex
|
gsphanikumar/elixir
|
6ca225da4e016200a462888348ff1c3feb625b78
|
[
"Apache-2.0"
] | 4
|
2015-12-22T02:46:39.000Z
|
2016-04-26T06:11:09.000Z
|
lib/elixir/lib/enum.ex
|
alco/elixir
|
4407170349aa12c58664cab2122374167e827f5e
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/lib/enum.ex
|
alco/elixir
|
4407170349aa12c58664cab2122374167e827f5e
|
[
"Apache-2.0"
] | null | null | null |
defprotocol Enumerable do
@moduledoc """
Enumerable protocol used by `Enum` and `Stream` modules.
When you invoke a function in the `Enum` module, the first argument
is usually a collection that must implement this protocol.
For example, the expression:
Enum.map([1, 2, 3], &(&1 * 2))
invokes `Enumerable.reduce/3` to perform the reducing
operation that builds a mapped list by calling the mapping function
`&(&1 * 2)` on every element in the collection and consuming the
element with an accumulated list.
Internally, `Enum.map/2` is implemented as follows:
def map(enum, fun) do
reducer = fn x, acc -> {:cont, [fun.(x)|acc]} end
Enumerable.reduce(enum, {:cont, []}, reducer) |> elem(1) |> :lists.reverse()
end
Notice the user-supplied function is wrapped into a `t:reducer/0` function.
The `t:reducer/0` function must return a tagged tuple after each step,
as described in the `t:acc/0` type.
The reason the accumulator requires a tagged tuple is to allow the
`t:reducer/0` function to communicate the end of enumeration to the underlying
enumerable, allowing any open resources to be properly closed.
It also allows suspension of the enumeration, which is useful when
interleaving between many enumerables is required (as in zip).
Finally, `Enumerable.reduce/3` will return another tagged tuple,
as represented by the `t:result/0` type.
"""
@typedoc """
The accumulator value for each step.
It must be a tagged tuple with one of the following "tags":
* `:cont` - the enumeration should continue
* `:halt` - the enumeration should halt immediately
* `:suspend` - the enumeration should be suspended immediately
Depending on the accumulator value, the result returned by
`Enumerable.reduce/3` will change. Please check the `t:result/0`
type documentation for more information.
In case a `t:reducer/0` function returns a `:suspend` accumulator,
it must be explicitly handled by the caller and never leak.
"""
@type acc :: {:cont, term} | {:halt, term} | {:suspend, term}
@typedoc """
The reducer function.
Should be called with the enumerable element and the
accumulator contents.
Returns the accumulator for the next enumeration step.
"""
@type reducer :: (term, term -> acc)
@typedoc """
The result of the reduce operation.
It may be *done* when the enumeration is finished by reaching
its end, or *halted*/*suspended* when the enumeration was halted
or suspended by the `t:reducer/0` function.
In case a `t:reducer/0` function returns the `:suspend` accumulator, the
`:suspended` tuple must be explicitly handled by the caller and
never leak. In practice, this means regular enumeration functions
just need to be concerned about `:done` and `:halted` results.
Furthermore, a `:suspend` call must always be followed by another call,
eventually halting or continuing until the end.
"""
@type result :: {:done, term} |
{:halted, term} |
{:suspended, term, continuation}
@typedoc """
A partially applied reduce function.
The continuation is the closure returned as a result when
the enumeration is suspended. When invoked, it expects
a new accumulator and it returns the result.
A continuation is easily implemented as long as the reduce
function is defined in a tail recursive fashion. If the function
is tail recursive, all the state is passed as arguments, so
the continuation would simply be the reducing function partially
applied.
"""
@type continuation :: (acc -> result)
@doc """
Reduces the enumerable into an element.
Most of the operations in `Enum` are implemented in terms of reduce.
This function should apply the given `t:reducer/0` function to each
item in the enumerable and proceed as expected by the returned
accumulator.
As an example, here is the implementation of `reduce` for lists:
def reduce(_, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(list, &1, fun)}
def reduce([], {:cont, acc}, _fun), do: {:done, acc}
def reduce([h|t], {:cont, acc}, fun), do: reduce(t, fun.(h, acc), fun)
"""
@spec reduce(t, acc, reducer) :: result
def reduce(enumerable, acc, fun)
@doc """
Checks if an element exists within the enumerable.
It should return `{:ok, boolean}`.
If `{:error, __MODULE__}` is returned a default algorithm using
`reduce` and the match (`===`) operator is used. This algorithm runs
in linear time.
_Please force use of the default algorithm unless you can implement an
algorithm that is significantly faster._
"""
@spec member?(t, term) :: {:ok, boolean} | {:error, module}
def member?(enumerable, element)
@doc """
Retrieves the enumerable's size.
It should return `{:ok, size}`.
If `{:error, __MODULE__}` is returned a default algorithm using
`reduce` and the match (`===`) operator is used. This algorithm runs
in linear time.
_Please force use of the default algorithm unless you can implement an
algorithm that is significantly faster._
"""
@spec count(t) :: {:ok, non_neg_integer} | {:error, module}
def count(enumerable)
end
defmodule Enum do
import Kernel, except: [max: 2, min: 2]
@moduledoc """
Provides a set of algorithms that enumerate over enumerables according
to the `Enumerable` protocol.
iex> Enum.map([1, 2, 3], fn(x) -> x * 2 end)
[2, 4, 6]
Some particular types, like maps, yield a specific format on enumeration.
For example, the argument is always a `{key, value}` tuple for maps:
iex> map = %{a: 1, b: 2}
iex> Enum.map(map, fn {k, v} -> {k, v * 2} end)
[a: 2, b: 4]
Note that the functions in the `Enum` module are eager: they always
start the enumeration of the given enumerable. The `Stream` module
allows lazy enumeration of enumerables and provides infinite streams.
Since the majority of the functions in `Enum` enumerate the whole
enumerable and return a list as result, infinite streams need to
be carefully used with such functions, as they can potentially run
forever. For example:
Enum.each Stream.cycle([1, 2, 3]), &IO.puts(&1)
"""
@compile :inline_list_funcs
@type t :: Enumerable.t
@type element :: any
@type index :: non_neg_integer
@type default :: any
# Require Stream.Reducers and its callbacks
require Stream.Reducers, as: R
defmacrop skip(acc) do
acc
end
defmacrop next(_, entry, acc) do
quote do: [unquote(entry)|unquote(acc)]
end
defmacrop acc(h, n, _) do
quote do: {unquote(h), unquote(n)}
end
defmacrop next_with_acc(f, entry, h, n, _) do
quote do
{[unquote(entry)|unquote(h)], unquote(n)}
end
end
@doc """
Invokes the given `fun` for each item in the enumerable.
It stops the iteration at the first invocation that returns `false` or `nil`.
It returns `false` if at least one invocation returns `false` or `nil`.
Otherwise returns `true`.
## Examples
iex> Enum.all?([2, 4, 6], fn(x) -> rem(x, 2) == 0 end)
true
iex> Enum.all?([2, 3, 4], fn(x) -> rem(x, 2) == 0 end)
false
If no function is given, it defaults to checking if
all items in the enumerable are truthy values.
iex> Enum.all?([1, 2, 3])
true
iex> Enum.all?([1, nil, 3])
false
"""
@spec all?(t) :: boolean
@spec all?(t, (element -> as_boolean(term))) :: boolean
def all?(enumerable, fun \\ fn(x) -> x end)
def all?(enumerable, fun) when is_list(enumerable) do
do_all?(enumerable, fun)
end
def all?(enumerable, fun) do
Enumerable.reduce(enumerable, {:cont, true}, fn(entry, _) ->
if fun.(entry), do: {:cont, true}, else: {:halt, false}
end) |> elem(1)
end
@doc """
Invokes the given `fun` for each item in the enumerable.
It stops the iteration at the first invocation that returns a truthy value.
Returns `true` if at least one invocation returns a truthy value.
Otherwise returns `false`.
## Examples
iex> Enum.any?([2, 4, 6], fn(x) -> rem(x, 2) == 1 end)
false
iex> Enum.any?([2, 3, 4], fn(x) -> rem(x, 2) == 1 end)
true
If no function is given, it defaults to checking if at least one item
in the enumerable is a truthy value.
iex> Enum.any?([false, false, false])
false
iex> Enum.any?([false, true, false])
true
"""
@spec any?(t) :: boolean
@spec any?(t, (element -> as_boolean(term))) :: boolean
def any?(enumerable, fun \\ fn(x) -> x end)
def any?(enumerable, fun) when is_list(enumerable) do
do_any?(enumerable, fun)
end
def any?(enumerable, fun) do
Enumerable.reduce(enumerable, {:cont, false}, fn(entry, _) ->
if fun.(entry), do: {:halt, true}, else: {:cont, false}
end) |> elem(1)
end
@doc """
Finds the element at the given `index` (zero-based).
Returns `default` if `index` is out of bounds.
A negative `index` can be passed, which means the `enumerable` is
enumerated once and the `index` is counted from the end (e.g.
`-1` finds the last element).
Note this operation takes linear time. In order to access
the element at index `index`, it will need to traverse `index`
previous elements.
## Examples
iex> Enum.at([2, 4, 6], 0)
2
iex> Enum.at([2, 4, 6], 2)
6
iex> Enum.at([2, 4, 6], 4)
nil
iex> Enum.at([2, 4, 6], 4, :none)
:none
"""
@spec at(t, integer, default) :: element | default
def at(enumerable, index, default \\ nil) do
case fetch(enumerable, index) do
{:ok, h} -> h
:error -> default
end
end
@doc """
Shortcut to `chunk(enumerable, count, count)`.
"""
@spec chunk(t, pos_integer) :: [list]
def chunk(enumerable, count), do: chunk(enumerable, count, count, nil)
@doc """
Returns list of lists containing `count` items each, where
each new chunk starts `step` elements into the enumerable.
`step` is optional and, if not passed, defaults to `count`, i.e.
chunks do not overlap.
If the final chunk does not have `count` elements to fill the chunk,
elements are taken as necessary from `pad` if it was passed.
If `pad` is passed and does not have enough elements to fill the
chunk, then the chunk is returned anyway with less than `count`
elements.
If `pad` is not passed at all or is `nil`, then the partial chunk is
discarded from the result.
## Examples
iex> Enum.chunk([1, 2, 3, 4, 5, 6], 2)
[[1, 2], [3, 4], [5, 6]]
iex> Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2)
[[1, 2, 3], [3, 4, 5]]
iex> Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2, [7])
[[1, 2, 3], [3, 4, 5], [5, 6, 7]]
iex> Enum.chunk([1, 2, 3, 4, 5, 6], 3, 3, [])
[[1, 2, 3], [4, 5, 6]]
"""
@spec chunk(t, pos_integer, pos_integer, t | nil) :: [list]
def chunk(enumerable, count, step, pad \\ nil) when count > 0
and step > 0 do
limit = :erlang.max(count, step)
{acc, {buffer, i}} =
reduce(enumerable, {[], {[], 0}}, R.chunk(count, step, limit))
if is_nil(pad) || i == 0 do
:lists.reverse(acc)
else
buffer = :lists.reverse(buffer, take(pad, count - i))
:lists.reverse([buffer|acc])
end
end
@doc """
Splits enumerable on every element for which `fun` returns a new
value.
Returns a list of lists.
## Examples
iex> Enum.chunk_by([1, 2, 2, 3, 4, 4, 6, 7, 7], &(rem(&1, 2) == 1))
[[1], [2, 2], [3], [4, 4, 6], [7, 7]]
"""
@spec chunk_by(t, (element -> any)) :: [list]
def chunk_by(enumerable, fun) do
{acc, res} = reduce(enumerable, {[], nil}, R.chunk_by(fun))
case res do
{buffer, _} ->
:lists.reverse([:lists.reverse(buffer) | acc])
nil ->
[]
end
end
@doc """
Given an enumerable of enumerables, concatenates the enumerables into
a single list.
## Examples
iex> Enum.concat([1..3, 4..6, 7..9])
[1, 2, 3, 4, 5, 6, 7, 8, 9]
iex> Enum.concat([[1, [2], 3], [4], [5, 6]])
[1, [2], 3, 4, 5, 6]
"""
@spec concat(t) :: t
def concat(enumerables) do
do_concat(enumerables)
end
@doc """
Concatenates the enumerable on the right with the enumerable on the
left.
This function produces the same result as the `Kernel.++/2` operator
for lists.
## Examples
iex> Enum.concat(1..3, 4..6)
[1, 2, 3, 4, 5, 6]
iex> Enum.concat([1, 2, 3], [4, 5, 6])
[1, 2, 3, 4, 5, 6]
"""
@spec concat(t, t) :: t
def concat(left, right) when is_list(left) and is_list(right) do
left ++ right
end
def concat(left, right) do
do_concat([left, right])
end
defp do_concat(enumerable) do
fun = &[&1|&2]
reduce(enumerable, [], &reduce(&1, &2, fun)) |> :lists.reverse
end
@doc """
Returns the size of the enumerable.
## Examples
iex> Enum.count([1, 2, 3])
3
"""
@spec count(t) :: non_neg_integer
def count(enumerable) when is_list(enumerable) do
:erlang.length(enumerable)
end
def count(enumerable) do
case Enumerable.count(enumerable) do
{:ok, value} when is_integer(value) ->
value
{:error, module} ->
module.reduce(enumerable, {:cont, 0}, fn
_, acc -> {:cont, acc + 1}
end) |> elem(1)
end
end
@doc """
Returns the count of items in the enumerable for which `fun` returns
a truthy value.
## Examples
iex> Enum.count([1, 2, 3, 4, 5], fn(x) -> rem(x, 2) == 0 end)
2
"""
@spec count(t, (element -> as_boolean(term))) :: non_neg_integer
def count(enumerable, fun) do
Enumerable.reduce(enumerable, {:cont, 0}, fn(entry, acc) ->
{:cont, if(fun.(entry), do: acc + 1, else: acc)}
end) |> elem(1)
end
@doc """
Enumerates the `enumerable`, returning a list where all consecutive
duplicated elements are collapsed to a single element.
Elements are compared using `===`.
## Examples
iex> Enum.dedup([1, 2, 3, 3, 2, 1])
[1, 2, 3, 2, 1]
iex> Enum.dedup([1, 1, 2, 2.0, :three, :"three"])
[1, 2, 2.0, :three]
"""
@spec dedup(t) :: list
def dedup(enumerable) do
dedup_by(enumerable, fn x -> x end)
end
@doc """
Enumerates the `enumerable`, returning a list where all consecutive
duplicated elements are collapsed to a single element.
The function `fun` maps every element to a term which is used to
determine if two elements are duplicates.
## Examples
iex> Enum.dedup_by([{1, :a}, {2, :b}, {2, :c}, {1, :a}], fn {x, _} -> x end)
[{1, :a}, {2, :b}, {1, :a}]
iex> Enum.dedup_by([5, 1, 2, 3, 2, 1], fn x -> x > 2 end)
[5, 1, 3, 2]
"""
@spec dedup_by(t, (element -> term)) :: list
def dedup_by(enumerable, fun) when is_function(fun, 1) do
{list, _} = reduce(enumerable, {[], []}, R.dedup(fun))
:lists.reverse(list)
end
@doc """
Drops the first `n` items from then enumerable.
If a negative value `n` is given, the last `n` values will be dropped.
The `enumerable` is enumerated once to retrieve the proper index and
the remaining calculation is performed from the end.
## Examples
iex> Enum.drop([1, 2, 3], 2)
[3]
iex> Enum.drop([1, 2, 3], 10)
[]
iex> Enum.drop([1, 2, 3], 0)
[1, 2, 3]
iex> Enum.drop([1, 2, 3], -1)
[1, 2]
"""
@spec drop(t, integer) :: list
def drop(enumerable, n) when is_list(enumerable) and n >= 0 do
do_drop(enumerable, n)
end
def drop(enumerable, n) when n >= 0 do
res =
reduce(enumerable, n, fn
x, acc when is_list(acc) -> [x|acc]
x, 0 -> [x]
_, acc when acc > 0 -> acc - 1
end)
if is_list(res), do: :lists.reverse(res), else: []
end
def drop(enumerable, n) when n < 0 do
do_drop(reverse(enumerable), abs(n)) |> :lists.reverse
end
@doc """
Drops items at the beginning of the enumerable while `fun` returns a
truthy value.
## Examples
iex> Enum.drop_while([1, 2, 3, 4, 5], fn(x) -> x < 3 end)
[3, 4, 5]
"""
@spec drop_while(t, (element -> as_boolean(term))) :: list
def drop_while(enumerable, fun) when is_list(enumerable) do
do_drop_while(enumerable, fun)
end
def drop_while(enumerable, fun) do
{res, _} = reduce(enumerable, {[], true}, R.drop_while(fun))
:lists.reverse(res)
end
@doc """
Invokes the given `fun` for each item in the enumerable.
Returns `:ok`.
## Examples
Enum.each(["some", "example"], fn(x) -> IO.puts x end)
"some"
"example"
#=> :ok
"""
@spec each(t, (element -> any)) :: :ok
def each(enumerable, fun) when is_list(enumerable) do
:lists.foreach(fun, enumerable)
:ok
end
def each(enumerable, fun) do
reduce(enumerable, nil, fn(entry, _) ->
fun.(entry)
nil
end)
:ok
end
@doc """
Determines if the enumerable is empty.
Returns `true` if `enumerable` is empty, otherwise `false`.
## Examples
iex> Enum.empty?([])
true
iex> Enum.empty?([1, 2, 3])
false
"""
@spec empty?(t) :: boolean
def empty?(enumerable) when is_list(enumerable) do
enumerable == []
end
def empty?(enumerable) do
case Enumerable.count(enumerable) do
{:ok, value} when is_integer(value) ->
value == 0
{:error, module} ->
module.reduce(enumerable, {:cont, true},
fn(_, _) -> {:halt, false} end)
|> elem(1)
end
end
@doc """
Finds the element at the given `index` (zero-based).
Returns `{:ok, element}` if found, otherwise `:error`.
A negative `index` can be passed, which means the `enumerable` is
enumerated once and the `index` is counted from the end (e.g.
`-1` fetches the last element).
Note this operation takes linear time. In order to access
the element at index `index`, it will need to traverse `index`
previous elements.
## Examples
iex> Enum.fetch([2, 4, 6], 0)
{:ok, 2}
iex> Enum.fetch([2, 4, 6], 2)
{:ok, 6}
iex> Enum.fetch([2, 4, 6], 4)
:error
"""
@spec fetch(t, integer) :: {:ok, element} | :error
def fetch(enumerable, index) when is_list(enumerable)
and is_integer(index) and index >= 0 do
do_fetch(enumerable, index)
end
def fetch(enumerable, index) when is_integer(index) and index >= 0 do
res =
Enumerable.reduce(enumerable, {:cont, 0}, fn(entry, acc) ->
if acc == index do
{:halt, entry}
else
{:cont, acc + 1}
end
end)
case res do
{:halted, entry} -> {:ok, entry}
{:done, _} -> :error
end
end
def fetch(enumerable, index) when is_integer(index) and index < 0 do
do_fetch(reverse(enumerable), abs(index + 1))
end
@doc """
Finds the element at the given `index` (zero-based).
Raises `OutOfBoundsError` if the given `index` is outside the range of
the enumerable.
Note this operation takes linear time. In order to access the element
at index `index`, it will need to traverse `index` previous elements.
## Examples
iex> Enum.fetch!([2, 4, 6], 0)
2
iex> Enum.fetch!([2, 4, 6], 2)
6
iex> Enum.fetch!([2, 4, 6], 4)
** (Enum.OutOfBoundsError) out of bounds error
"""
@spec fetch!(t, integer) :: element | no_return
def fetch!(enumerable, index) do
case fetch(enumerable, index) do
{:ok, h} -> h
:error -> raise Enum.OutOfBoundsError
end
end
@doc """
Filters the enumerable, i.e. returns only those elements
for which `fun` returns a truthy value.
## Examples
iex> Enum.filter([1, 2, 3], fn(x) -> rem(x, 2) == 0 end)
[2]
"""
@spec filter(t, (element -> as_boolean(term))) :: list
def filter(enumerable, fun) when is_list(enumerable) do
for item <- enumerable, fun.(item), do: item
end
def filter(enumerable, fun) do
reduce(enumerable, [], R.filter(fun)) |> :lists.reverse
end
@doc """
Filters the enumerable and maps its elements in one pass.
## Examples
iex> Enum.filter_map([1, 2, 3], fn(x) -> rem(x, 2) == 0 end, &(&1 * 2))
[4]
"""
@spec filter_map(t, (element -> as_boolean(term)),
(element -> element)) :: list
def filter_map(enumerable, filter, mapper) when is_list(enumerable) do
for item <- enumerable, filter.(item), do: mapper.(item)
end
def filter_map(enumerable, filter, mapper) do
reduce(enumerable, [], R.filter_map(filter, mapper))
|> :lists.reverse
end
@doc """
Returns the first item for which `fun` returns a truthy value.
If no such item is found, returns `default`.
## Examples
iex> Enum.find([2, 4, 6], fn(x) -> rem(x, 2) == 1 end)
nil
iex> Enum.find([2, 4, 6], 0, fn(x) -> rem(x, 2) == 1 end)
0
iex> Enum.find([2, 3, 4], fn(x) -> rem(x, 2) == 1 end)
3
"""
@spec find(t, default, (element -> any)) :: element | default
def find(enumerable, default \\ nil, fun)
def find(enumerable, default, fun) when is_list(enumerable) do
do_find(enumerable, default, fun)
end
def find(enumerable, default, fun) do
Enumerable.reduce(enumerable, {:cont, default}, fn(entry, default) ->
if fun.(entry), do: {:halt, entry}, else: {:cont, default}
end) |> elem(1)
end
@doc """
Similar to `find/3`, but returns the value of the function
invocation instead of the element itself.
## Examples
iex> Enum.find_value([2, 4, 6], fn(x) -> rem(x, 2) == 1 end)
nil
iex> Enum.find_value([2, 3, 4], fn(x) -> rem(x, 2) == 1 end)
true
iex> Enum.find_value([1, 2, 3], "no bools!", &is_boolean/1)
"no bools!"
"""
@spec find_value(t, any, (element -> any)) :: any | :nil
def find_value(enumerable, default \\ nil, fun)
def find_value(enumerable, default, fun) when is_list(enumerable) do
do_find_value(enumerable, default, fun)
end
def find_value(enumerable, default, fun) do
Enumerable.reduce(enumerable, {:cont, default}, fn(entry, default) ->
fun_entry = fun.(entry)
if fun_entry, do: {:halt, fun_entry}, else: {:cont, default}
end) |> elem(1)
end
@doc """
Similar to `find/3`, but returns the index (zero-based)
of the element instead of the element itself.
## Examples
iex> Enum.find_index([2, 4, 6], fn(x) -> rem(x, 2) == 1 end)
nil
iex> Enum.find_index([2, 3, 4], fn(x) -> rem(x, 2) == 1 end)
1
"""
@spec find_index(t, (element -> any)) :: index | :nil
def find_index(enumerable, fun) when is_list(enumerable) do
do_find_index(enumerable, 0, fun)
end
def find_index(enumerable, fun) do
res =
Enumerable.reduce(enumerable, {:cont, 0}, fn(entry, acc) ->
if fun.(entry), do: {:halt, acc}, else: {:cont, acc + 1}
end)
case res do
{:halted, entry} -> entry
{:done, _} -> nil
end
end
@doc """
Returns a new enumerable appending the result of invoking `fun` on
each corresponding item of `enumerable`.
The given function must return an enumerable.
## Examples
iex> Enum.flat_map([:a, :b, :c], fn(x) -> [x, x] end)
[:a, :a, :b, :b, :c, :c]
iex> Enum.flat_map([{1, 3}, {4, 6}], fn({x, y}) -> x..y end)
[1, 2, 3, 4, 5, 6]
"""
@spec flat_map(t, (element -> t)) :: list
def flat_map(enumerable, fun) do
reduce(enumerable, [], fn(entry, acc) ->
reduce(fun.(entry), acc, &[&1|&2])
end) |> :lists.reverse
end
@doc """
Maps and reduces an enumerable, flattening the given results.
It expects an accumulator and a function that receives each stream
item, and must return a tuple containing a new stream (often a list)
with the new accumulator or a tuple with `:halt` as first element and
the accumulator as second.
## Examples
iex> enum = 1..100
iex> n = 3
iex> Enum.flat_map_reduce(enum, 0, fn i, acc ->
...> if acc < n, do: {[i], acc + 1}, else: {:halt, acc}
...> end)
{[1, 2, 3], 3}
"""
@spec flat_map_reduce(t, acc, fun) :: {[any], any} when
fun: (element, acc -> {t, acc} | {:halt, acc}),
acc: any
def flat_map_reduce(enumerable, acc, fun) do
{_, {list, acc}} =
Enumerable.reduce(enumerable, {:cont, {[], acc}},
fn(entry, {list, acc}) ->
case fun.(entry, acc) do
{:halt, acc} ->
{:halt, {list, acc}}
{[], acc} ->
{:cont, {list, acc}}
{[entry], acc} ->
{:cont, {[entry|list], acc}}
{entries, acc} ->
{:cont, {reduce(entries, list, &[&1|&2]), acc}}
end
end)
{:lists.reverse(list), acc}
end
@doc """
Intersperses `element` between each element of the enumeration.
Complexity: O(n).
## Examples
iex> Enum.intersperse([1, 2, 3], 0)
[1, 0, 2, 0, 3]
iex> Enum.intersperse([1], 0)
[1]
iex> Enum.intersperse([], 0)
[]
"""
@spec intersperse(t, element) :: list
def intersperse(enumerable, element) do
list =
reduce(enumerable, [], fn(x, acc) ->
[x, element | acc]
end) |> :lists.reverse()
case list do
[] -> []
[_|t] -> t # Head is a superfluous intersperser element
end
end
@doc """
Inserts the given `enumerable` into a `collectable`.
## Examples
iex> Enum.into([1, 2], [0])
[0, 1, 2]
iex> Enum.into([a: 1, b: 2], %{})
%{a: 1, b: 2}
"""
@spec into(Enumerable.t, Collectable.t) :: Collectable.t
def into(enumerable, collectable) when is_list(collectable) do
collectable ++ to_list(enumerable)
end
def into(%{__struct__: _} = enumerable, collectable) do
do_into(enumerable, collectable)
end
def into(enumerable, %{__struct__: _} = collectable) do
do_into(enumerable, collectable)
end
def into(%{} = enumerable, %{} = collectable) do
Map.merge(collectable, enumerable)
end
def into(enumerable, %{} = collectable) when is_list(enumerable) do
Map.merge(collectable, :maps.from_list(enumerable))
end
def into(enumerable, %{} = collectable) do
reduce(enumerable, collectable, fn {k, v}, acc ->
Map.put(acc, k, v)
end)
end
def into(enumerable, collectable) do
do_into(enumerable, collectable)
end
defp do_into(enumerable, collectable) do
{initial, fun} = Collectable.into(collectable)
into(enumerable, initial, fun, fn x, acc ->
fun.(acc, {:cont, x})
end)
end
@doc """
Inserts the given `enumerable` into a `collectable` according to the
transformation function.
## Examples
iex> Enum.into([2, 3], [3], fn x -> x * 3 end)
[3, 6, 9]
"""
@spec into(Enumerable.t, Collectable.t, (term -> term))
:: Collectable.t
def into(enumerable, collectable, transform) when is_list(collectable)
and is_function(transform, 1) do
collectable ++ map(enumerable, transform)
end
def into(enumerable, collectable, transform)
when is_function(transform, 1) do
{initial, fun} = Collectable.into(collectable)
into(enumerable, initial, fun, fn x, acc ->
fun.(acc, {:cont, transform.(x)})
end)
end
defp into(enumerable, initial, fun, callback) do
try do
reduce(enumerable, initial, callback)
catch
kind, reason ->
stacktrace = System.stacktrace
fun.(initial, :halt)
:erlang.raise(kind, reason, stacktrace)
else
acc -> fun.(acc, :done)
end
end
@doc """
Joins the given enumerable into a binary using `joiner` as a
separator.
If `joiner` is not passed at all, it defaults to the empty binary.
All items in the enumerable must be convertible to a binary,
otherwise an error is raised.
## Examples
iex> Enum.join([1, 2, 3])
"123"
iex> Enum.join([1, 2, 3], " = ")
"1 = 2 = 3"
"""
@spec join(t, String.t) :: String.t
def join(enumerable, joiner \\ "")
def join(enumerable, joiner) when is_binary(joiner) do
reduced = reduce(enumerable, :first, fn
entry, :first -> enum_to_string(entry)
entry, acc -> [acc, joiner|enum_to_string(entry)]
end)
if reduced == :first do
""
else
IO.iodata_to_binary reduced
end
end
@doc """
Returns a list where each item is the result of invoking
`fun` on each corresponding item of `enumerable`.
For maps, the function expects a key-value tuple.
## Examples
iex> Enum.map([1, 2, 3], fn(x) -> x * 2 end)
[2, 4, 6]
iex> Enum.map([a: 1, b: 2], fn({k, v}) -> {k, -v} end)
[a: -1, b: -2]
"""
@spec map(t, (element -> any)) :: list
def map(enumerable, fun)
def map(enumerable, fun) when is_list(enumerable) do
:lists.map(fun, enumerable)
end
def map(enumerable, fun) do
reduce(enumerable, [], R.map(fun)) |> :lists.reverse
end
@doc """
Maps and joins the given enumerable in one pass.
`joiner` can be either a binary or a list and the result will be of
the same type as `joiner`.
If `joiner` is not passed at all, it defaults to an empty binary.
All items in the enumerable must be convertible to a binary,
otherwise an error is raised.
## Examples
iex> Enum.map_join([1, 2, 3], &(&1 * 2))
"246"
iex> Enum.map_join([1, 2, 3], " = ", &(&1 * 2))
"2 = 4 = 6"
"""
@spec map_join(t, String.t, (element -> any)) :: String.t
def map_join(enumerable, joiner \\ "", mapper)
def map_join(enumerable, joiner, mapper) when is_binary(joiner) do
reduced = reduce(enumerable, :first, fn
entry, :first -> enum_to_string(mapper.(entry))
entry, acc -> [acc, joiner|enum_to_string(mapper.(entry))]
end)
if reduced == :first do
""
else
IO.iodata_to_binary reduced
end
end
@doc """
Invokes the given function to each item in the enumerable to reduce
it to a single element, while keeping an accumulator.
Returns a tuple where the first element is the mapped enumerable and
the second one is the final accumulator.
The function, `fun`, receives two arguments: the first one is the
element, and the second one is the accumulator. `fun` must return a
a tuple with two elements in the form of `{result, accumulator}`.
For maps, the first tuple element must be a `{key, value}` tuple.
## Examples
iex> Enum.map_reduce([1, 2, 3], 0, fn(x, acc) -> {x * 2, x + acc} end)
{[2, 4, 6], 6}
"""
@spec map_reduce(t, any, (element, any -> {any, any})) :: {any, any}
def map_reduce(enumerable, acc, fun) when is_list(enumerable) do
:lists.mapfoldl(fun, acc, enumerable)
end
def map_reduce(enumerable, acc, fun) do
{list, acc} = reduce(enumerable, {[], acc},
fn(entry, {list, acc}) ->
{new_entry, acc} = fun.(entry, acc)
{[new_entry|list], acc}
end)
{:lists.reverse(list), acc}
end
@doc """
Returns the biggest of the elements in the enumerable according
to Erlang's term ordering.
If more than one elements compare equal, the first one that was found
is returned.
Raises `Enum.EmptyError` if `enumerable` is empty.
## Examples
iex> Enum.max([1, 2, 3])
3
"""
@spec max(t) :: element | no_return
def max(enumerable) do
reduce(enumerable, &Kernel.max(&1, &2))
end
@doc """
Returns the biggest of the elements in the enumerable as calculated
by the given function.
If more than one elements compare equal, the first one that was found
is returned.
Raises `Enum.EmptyError` if `enumerable` is empty.
## Examples
iex> Enum.max_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end)
"aaa"
"""
@spec max_by(t, (element -> any)) :: element | no_return
def max_by([h|t], fun) do
reduce(t, {h, fun.(h)}, fn(entry, {_, fun_max} = old) ->
fun_entry = fun.(entry)
if(fun_entry > fun_max, do: {entry, fun_entry}, else: old)
end) |> elem(0)
end
def max_by([], _fun) do
raise Enum.EmptyError
end
def max_by(enumerable, fun) do
result =
reduce(enumerable, :first, fn
entry, {_, fun_max} = old ->
fun_entry = fun.(entry)
if(fun_entry > fun_max, do: {entry, fun_entry}, else: old)
entry, :first ->
{entry, fun.(entry)}
end)
case result do
:first -> raise Enum.EmptyError
{entry, _} -> entry
end
end
@doc """
Checks if `element` exists within the enumerable.
Membership is tested with the match (`===`) operator.
## Examples
iex> Enum.member?(1..10, 5)
true
iex> Enum.member?(1..10, 5.0)
false
iex> Enum.member?([1.0, 2.0, 3.0], 2)
false
iex> Enum.member?([1.0, 2.0, 3.0], 2.000)
true
iex> Enum.member?([:a, :b, :c], :d)
false
"""
@spec member?(t, element) :: boolean
def member?(enumerable, element) when is_list(enumerable) do
:lists.member(element, enumerable)
end
def member?(enumerable, element) do
case Enumerable.member?(enumerable, element) do
{:ok, element} when is_boolean(element) ->
element
{:error, module} ->
module.reduce(enumerable, {:cont, false}, fn
v, _ when v === element -> {:halt, true}
_, _ -> {:cont, false}
end) |> elem(1)
end
end
@doc """
Returns the smallest of the elements in the enumerable according
to Erlang's term ordering.
If more than one elements compare equal, the first one that was found
is returned.
Raises `Enum.EmptyError` if `enumerable` is empty.
## Examples
iex> Enum.min([1, 2, 3])
1
"""
@spec min(t) :: element | no_return
def min(enumerable) do
reduce(enumerable, &Kernel.min(&1, &2))
end
@doc """
Returns the smallest of the elements in the enumerable as calculated
by the given function.
If more than one elements compare equal, the first one that was found
is returned.
Raises `Enum.EmptyError` if `enumerable` is empty.
## Examples
iex> Enum.min_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end)
"a"
"""
@spec min_by(t, (element -> any)) :: element | no_return
def min_by([h|t], fun) do
reduce(t, {h, fun.(h)}, fn(entry, {_, fun_min} = old) ->
fun_entry = fun.(entry)
if(fun_entry < fun_min, do: {entry, fun_entry}, else: old)
end) |> elem(0)
end
def min_by([], _fun) do
raise Enum.EmptyError
end
def min_by(enumerable, fun) do
result =
reduce(enumerable, :first, fn
entry, {_, fun_min} = old ->
fun_entry = fun.(entry)
if(fun_entry < fun_min, do: {entry, fun_entry}, else: old)
entry, :first ->
{entry, fun.(entry)}
end)
case result do
:first -> raise Enum.EmptyError
{entry, _} -> entry
end
end
@doc """
Returns a tuple with the smallest and the biggest elements in the
enumerable according to Erlang's term ordering.
If more than one elements compare equal, the first one that was found
is picked.
Raises `Enum.EmptyError` if `enumerable` is empty.
## Examples
iex> Enum.min_max([2, 3, 1])
{1, 3}
"""
@spec min_max(t) :: {element, element} | no_return
def min_max(enumerable) do
result =
Enum.reduce(enumerable, :first, fn
entry, {min_value, max_value} ->
{Kernel.min(entry, min_value), Kernel.max(entry, max_value)}
entry, :first ->
{entry, entry}
end)
case result do
:first -> raise Enum.EmptyError
result -> result
end
end
@doc """
Returns a tuple with the smallest and the biggest elements in the
enumerable as calculated by the given function.
If more than one elements compare equal, the first one that was found
is picked.
Raises `Enum.EmptyError` if `enumerable` is empty.
## Examples
iex> Enum.min_max_by(["aaa", "bb", "c"], fn(x) -> String.length(x) end)
{"c", "aaa"}
"""
@spec min_max_by(t, (element -> any)) :: {element, element} | no_return
def min_max_by(enumerable, fun) do
result =
Enum.reduce(enumerable, :first, fn
entry, {{_, fun_min} = acc_min, {_, fun_max} = acc_max} ->
fun_entry = fun.(entry)
acc_min = if fun_entry < fun_min, do: {entry, fun_entry}, else: acc_min
acc_max = if fun_entry > fun_max, do: {entry, fun_entry}, else: acc_max
{acc_min, acc_max}
entry, :first ->
fun_entry = fun.(entry)
{{entry, fun_entry}, {entry, fun_entry}}
end)
case result do
:first ->
raise Enum.EmptyError
{{min_entry, _}, {max_entry, _}} ->
{min_entry, max_entry}
end
end
@doc """
Returns the sum of all elements.
Raises `ArithmeticError` if `enumerable` contains a non-numeric value.
## Examples
iex> Enum.sum([1, 2, 3])
6
"""
@spec sum(t) :: number
def sum(enumerable) do
reduce(enumerable, 0, &+/2)
end
@doc """
Partitions `enumerable` into two enumerables, where the first one
contains elements for which `fun` returns a truthy value, and the
second one – for which `fun` returns `false` or `nil`.
## Examples
iex> Enum.partition([1, 2, 3], fn(x) -> rem(x, 2) == 0 end)
{[2], [1, 3]}
"""
@spec partition(t, (element -> any)) :: {list, list}
def partition(enumerable, fun) do
{acc1, acc2} =
reduce(enumerable, {[], []}, fn(entry, {acc1, acc2}) ->
if fun.(entry) do
{[entry|acc1], acc2}
else
{acc1, [entry|acc2]}
end
end)
{:lists.reverse(acc1), :lists.reverse(acc2)}
end
@doc """
Splits the enumerable into groups based on `fun`.
The result is a map where each key is a group and each value is
a list of elements from enumerable for which `fun` returned that
group. Ordering is preserved.
## Examples
iex> Enum.group_by(~w{ant buffalo cat dingo}, &String.length/1)
%{3 => ["ant", "cat"], 7 => ["buffalo"], 5 => ["dingo"]}
"""
@spec group_by(t, (element -> any)) :: map
def group_by(enumerable, map \\ %{}, fun)
def group_by(enumerable, %{__struct__: _} = dict, fun) do
group_by_dict(enumerable, dict, fun)
end
def group_by(enumerable, map, fun) when is_map(map) do
reduce(reverse(enumerable), map, fn entry, categories ->
Map.update(categories, fun.(entry), [entry], &[entry|&1])
end)
end
def group_by(enumerable, dict, fun) do
group_by_dict(enumerable, dict, fun)
end
defp group_by_dict(enumerable, dict, fun) do
IO.write :stderr, "warning: Enum.group_by/3 with a dictionary is deprecated, please use a map instead\n" <>
Exception.format_stacktrace
reduce(reverse(enumerable), dict, fn(entry, categories) ->
Dict.update(categories, fun.(entry), [entry], &[entry|&1])
end)
end
@doc """
Invokes `fun` for each element in the `enumerable`, passing that
element and the accumulator `acc` as arguments. `fun`'s return value
is stored in `acc`.
Returns the accumulator.
## Examples
iex> Enum.reduce([1, 2, 3], 0, fn(x, acc) -> x + acc end)
6
"""
@spec reduce(t, any, (element, any -> any)) :: any
def reduce(enumerable, acc, fun) when is_list(enumerable) do
:lists.foldl(fun, acc, enumerable)
end
def reduce(%{__struct__: _} = enumerable, acc, fun) do
Enumerable.reduce(enumerable, {:cont, acc},
fn x, acc -> {:cont, fun.(x, acc)} end) |> elem(1)
end
def reduce(%{} = enumerable, acc, fun) do
:maps.fold(fn k, v, acc -> fun.({k, v}, acc) end, acc, enumerable)
end
def reduce(enumerable, acc, fun) do
Enumerable.reduce(enumerable, {:cont, acc},
fn x, acc -> {:cont, fun.(x, acc)} end) |> elem(1)
end
@doc """
Invokes `fun` for each element in the `enumerable`, passing that
element and the accumulator as arguments. `fun`'s return value
is stored in the accumulator.
The first element of the enumerable is used as the initial value of
the accumulator.
If you wish to use another value for the accumulator, use
`Enumerable.reduce/3`.
This function won't call the specified function for enumerables that
are 1-element long.
Returns the accumulator.
Note that since the first element of the enumerable is used as the
initial value of the accumulator, `fun` will only be executed `n - 1`
times where `n` is the length of the enumerable.
## Examples
iex> Enum.reduce([1, 2, 3, 4], fn(x, acc) -> x * acc end)
24
"""
@spec reduce(t, (element, any -> any)) :: any
def reduce(enumerable, fun)
def reduce([h|t], fun) do
reduce(t, h, fun)
end
def reduce([], _fun) do
raise Enum.EmptyError
end
def reduce(enumerable, fun) do
result =
Enumerable.reduce(enumerable, {:cont, :first}, fn
x, :first ->
{:cont, {:acc, x}}
x, {:acc, acc} ->
{:cont, {:acc, fun.(x, acc)}}
end) |> elem(1)
case result do
:first -> raise Enum.EmptyError
{:acc, acc} -> acc
end
end
@doc """
Reduces the enumerable until `halt` is emitted.
The return value for `fun` is expected to be `{:cont, acc}`, return
`{:halt, acc}` to end the reduction early.
Returns the accumulator.
## Examples
iex> Enum.reduce_while(1..100, 0, fn i, acc ->
...> if i < 3, do: {:cont, acc + i}, else: {:halt, acc}
...> end)
3
"""
def reduce_while(enumerable, acc, fun) do
Enumerable.reduce(enumerable, {:cont, acc}, fun) |> elem(1)
end
@doc """
Returns elements of `enumerable` for which the function `fun` returns
`false` or `nil`.
## Examples
iex> Enum.reject([1, 2, 3], fn(x) -> rem(x, 2) == 0 end)
[1, 3]
"""
@spec reject(t, (element -> as_boolean(term))) :: list
def reject(enumerable, fun) when is_list(enumerable) do
for item <- enumerable, !fun.(item), do: item
end
def reject(enumerable, fun) do
reduce(enumerable, [], R.reject(fun)) |> :lists.reverse
end
@doc """
Returns a list of elements in `enumerable` in reverse order.
## Examples
iex> Enum.reverse([1, 2, 3])
[3, 2, 1]
"""
@spec reverse(t) :: list
def reverse(enumerable) when is_list(enumerable) do
:lists.reverse(enumerable)
end
def reverse(enumerable) do
reverse(enumerable, [])
end
@doc """
Reverses the elements in `enumerable`, appends the tail, and returns
it as a list.
This is an optimization for
`Enum.concat(Enum.reverse(enumerable), tail)`.
## Examples
iex> Enum.reverse([1, 2, 3], [4, 5, 6])
[3, 2, 1, 4, 5, 6]
"""
@spec reverse(t, t) :: list
def reverse(enumerable, tail) when is_list(enumerable)
and is_list(tail) do
:lists.reverse(enumerable, tail)
end
def reverse(enumerable, tail) do
reduce(enumerable, to_list(tail), fn(entry, acc) ->
[entry|acc]
end)
end
@doc """
Reverses the enumerable in the range from initial position `start`
through `count` elements.
If `count` is greater than the size of the rest of the enumerable,
then this function will reverse the rest of the enumerable.
## Examples
iex> Enum.reverse_slice([1, 2, 3, 4, 5, 6], 2, 4)
[1, 2, 6, 5, 4, 3]
"""
@spec reverse_slice(t, non_neg_integer, non_neg_integer) :: list
def reverse_slice(enumerable, start, count) when start >= 0
and count >= 0 do
list = reverse(enumerable)
length = length(list)
count = Kernel.min(count, length - start)
if count > 0 do
reverse_slice(list, length, start + count, count, [])
else
:lists.reverse(list)
end
end
@doc """
Returns a random element of an enumerable.
Raises `Enum.EmptyError` if `enumerable` is empty.
This function uses Erlang's `:rand` module to calculate
the random value. Check its documentation for setting a
different random algorithm or a different seed.
The implementation is based on the
[reservoir sampling](https://en.wikipedia.org/wiki/Reservoir_sampling#Relation_to_Fisher-Yates_shuffle)
algorithm.
It assumes that the sample being returned can fit into memory;
the input `enumerable` doesn't have to, as it is traversed just once.
## Examples
# Although not necessary, let's seed the random algorithm
iex> :rand.seed(:exsplus, {1, 2, 3})
iex> Enum.random([1, 2, 3])
2
iex> Enum.random([1, 2, 3])
1
"""
@spec random(t) :: element | no_return
def random(enumerable) do
case take_random(enumerable, 1) do
[] -> raise Enum.EmptyError
[e] -> e
end
end
@doc """
Applies the given function to each element in the enumerable,
storing the result in a list and passing it as the accumulator
for the next computation.
## Examples
iex> Enum.scan(1..5, &(&1 + &2))
[1, 3, 6, 10, 15]
"""
@spec scan(t, (element, any -> any)) :: list
def scan(enumerable, fun) do
{res, _} = reduce(enumerable, {[], :first}, R.scan_2(fun))
:lists.reverse(res)
end
@doc """
Applies the given function to each element in the enumerable,
storing the result in a list and passing it as the accumulator
for the next computation. Uses the given `acc` as the starting value.
## Examples
iex> Enum.scan(1..5, 0, &(&1 + &2))
[1, 3, 6, 10, 15]
"""
@spec scan(t, any, (element, any -> any)) :: list
def scan(enumerable, acc, fun) do
{res, _} = reduce(enumerable, {[], acc}, R.scan_3(fun))
:lists.reverse(res)
end
@doc """
Returns a list with the elements of `enumerable` shuffled.
This function uses Erlang's `:rand` module to calculate
the random value. Check its documentation for setting a
different random algorithm or a different seed.
## Examples
# Although not necessary, let's seed the random algorithm
iex> :rand.seed(:exsplus, {1, 2, 3})
iex> Enum.shuffle([1, 2, 3])
[2, 1, 3]
iex> Enum.shuffle([1, 2, 3])
[2, 3, 1]
"""
@spec shuffle(t) :: list
def shuffle(enumerable) do
randomized = reduce(enumerable, [], fn x, acc ->
[{:rand.uniform, x}|acc]
end)
unwrap(:lists.keysort(1, randomized), [])
end
@doc """
Returns a subset list of the given enumerable. Drops elements
until element position `start`, then takes `count` elements.
If the count is greater than `enumerable` length, it returns as
many as possible. If zero, then it returns `[]`.
## Examples
iex> Enum.slice(1..100, 5, 10)
[6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
iex> Enum.slice(1..10, 5, 100)
[6, 7, 8, 9, 10]
iex> Enum.slice(1..10, 5, 0)
[]
"""
@spec slice(t, integer, non_neg_integer) :: list
def slice(_enumerable, start, 0) when is_integer(start), do: []
def slice(enumerable, start, count) when is_integer(start)
and start < 0 and is_integer(count) and count >= 0 do
{list, new_start} = enumerate_and_count(enumerable, start)
if new_start >= 0 do
slice(list, new_start, count)
else
[]
end
end
def slice(enumerable, start, count) when is_list(enumerable)
and is_integer(start) and start >= 0 and is_integer(count)
and count > 0 do
do_slice(enumerable, start, count)
end
def slice(enumerable, start, count) when is_integer(start)
and start >= 0 and is_integer(count) and count > 0 do
{_, _, list} = Enumerable.reduce(enumerable,
{:cont, {start, count, []}}, fn
_entry, {start, count, _list} when start > 0 ->
{:cont, {start-1, count, []}}
entry, {start, count, list} when count > 1 ->
{:cont, {start, count-1, [entry|list]}}
entry, {start, count, list} ->
{:halt, {start, count, [entry|list]}}
end) |> elem(1)
:lists.reverse(list)
end
@doc """
Returns a subset list of the given enumerable. Drops elements
until element position `range.first`, then takes elements until
element position `range.last` (inclusive).
Positions are calculated by adding the number of items in the
enumerable to negative positions (e.g. position -3 in an
enumerable with count 5 becomes position 2).
The first position (after adding count to negative positions) must be
smaller or equal to the last position.
If the start of the range is not a valid offset for the given
enumerable or if the range is in reverse order, returns `[]`.
## Examples
iex> Enum.slice(1..100, 5..10)
[6, 7, 8, 9, 10, 11]
iex> Enum.slice(1..10, 5..20)
[6, 7, 8, 9, 10]
iex> Enum.slice(1..10, 11..20)
[]
iex> Enum.slice(1..10, 6..5)
[]
"""
@spec slice(t, Range.t) :: list
def slice(enumerable, range)
def slice(enumerable, first..last) when is_integer(first)
and first >= 0 and is_integer(last) and last >= 0 do
# Simple case, which works on infinite enumerables
if last - first >= 0 do
slice(enumerable, first, last - first + 1)
else
[]
end
end
def slice(enumerable, first..last) when is_integer(first)
and is_integer(last) do
{list, count} = enumerate_and_count(enumerable, 0)
corr_first = if first >= 0, do: first, else: first + count
corr_last = if last >= 0, do: last, else: last + count
length = corr_last - corr_first + 1
if corr_first >= 0 and length > 0 do
slice(list, corr_first, length)
else
[]
end
end
@doc """
Sorts the enumerable according to Erlang's term ordering.
Uses the merge sort algorithm.
## Examples
iex> Enum.sort([3, 2, 1])
[1, 2, 3]
"""
@spec sort(t) :: list
def sort(enumerable) when is_list(enumerable) do
:lists.sort(enumerable)
end
def sort(enumerable) do
sort(enumerable, &(&1 <= &2))
end
@doc """
Sorts the enumerable by the given function.
This function uses the merge sort algorithm. The given function should compare
two arguments, and return `false` if the first argument follows the second one.
## Examples
iex> Enum.sort([1, 2, 3], &(&1 > &2))
[3, 2, 1]
The sorting algorithm will be stable as long as the given function
returns `true` for values considered equal:
iex> Enum.sort ["some", "kind", "of", "monster"], &(byte_size(&1) <= byte_size(&2))
["of", "some", "kind", "monster"]
If the function does not return `true` for equal values, the sorting
is not stable and the order of equal terms may be shuffled.
For example:
iex> Enum.sort ["some", "kind", "of", "monster"], &(byte_size(&1) < byte_size(&2))
["of", "kind", "some", "monster"]
"""
@spec sort(t, (element, element -> boolean)) :: list
def sort(enumerable, fun) when is_list(enumerable) do
:lists.sort(fun, enumerable)
end
def sort(enumerable, fun) do
reduce(enumerable, [], &sort_reducer(&1, &2, fun))
|> sort_terminator(fun)
end
@doc """
Sorts the mapped results of the enumerable according to the `sorter`
function.
This function maps each element of the enumerable using the `mapper`
function. The enumerable is then sorted by the mapped elements
using the `sorter` function, which defaults to `Kernel.<=/2`
`sort_by/3` differs from `sort/2` in that it only calculates the
comparison value for each element in the enumerable once instead of
once for each element in each comparison.
If the same function is being called on both element, it's also more
compact to use `sort_by/3`.
This technique is also known as a
_[Schwartzian Transform](https://en.wikipedia.org/wiki/Schwartzian_transform)_,
or the _Lisp decorate-sort-undecorate idiom_ as the `mapper`
is decorating the original `enumerable`; then `sorter` is sorting the
decorations; and finally the enumerable is being undecorated so only
the original elements remain, but now in sorted order.
## Examples
Using the default `sorter` of `<=/2`:
iex> Enum.sort_by ["some", "kind", "of", "monster"], &byte_size/1
["of", "some", "kind", "monster"]
Using a custom `sorter` to override the order:
iex> Enum.sort_by ["some", "kind", "of", "monster"], &byte_size/1, &>=/2
["monster", "some", "kind", "of"]
"""
@spec sort_by(t, (element -> mapped_element),
(mapped_element, mapped_element -> boolean))
:: list when mapped_element: element
def sort_by(enumerable, mapper, sorter \\ &<=/2) do
enumerable
|> map(&{&1, mapper.(&1)})
|> sort(&sorter.(elem(&1, 1), elem(&2, 1)))
|> map(&elem(&1, 0))
end
@doc """
Splits the `enumerable` into two enumerables, leaving `count`
elements in the first one. If `count` is a negative number,
it starts counting from the back to the beginning of the
enumerable.
Be aware that a negative `count` implies the `enumerable`
will be enumerated twice: once to calculate the position, and
a second time to do the actual splitting.
## Examples
iex> Enum.split([1, 2, 3], 2)
{[1, 2], [3]}
iex> Enum.split([1, 2, 3], 10)
{[1, 2, 3], []}
iex> Enum.split([1, 2, 3], 0)
{[], [1, 2, 3]}
iex> Enum.split([1, 2, 3], -1)
{[1, 2], [3]}
iex> Enum.split([1, 2, 3], -5)
{[], [1, 2, 3]}
"""
@spec split(t, integer) :: {list, list}
def split(enumerable, count) when is_list(enumerable) and count >= 0 do
do_split(enumerable, count, [])
end
def split(enumerable, count) when count >= 0 do
{_, list1, list2} =
reduce(enumerable, {count, [], []},
fn(entry, {counter, acc1, acc2}) ->
if counter > 0 do
{counter - 1, [entry|acc1], acc2}
else
{counter, acc1, [entry|acc2]}
end
end)
{:lists.reverse(list1), :lists.reverse(list2)}
end
def split(enumerable, count) when count < 0 do
do_split_reverse(reverse(enumerable), abs(count), [])
end
@doc """
Splits enumerable in two at the position of the element for which
`fun` returns `false` for the first time.
## Examples
iex> Enum.split_while([1, 2, 3, 4], fn(x) -> x < 3 end)
{[1, 2], [3, 4]}
"""
@spec split_while(t, (element -> as_boolean(term))) :: {list, list}
def split_while(enumerable, fun) when is_list(enumerable) do
do_split_while(enumerable, fun, [])
end
def split_while(enumerable, fun) do
{list1, list2} =
reduce(enumerable, {[], []}, fn
entry, {acc1, []} ->
if(fun.(entry), do: {[entry|acc1], []}, else: {acc1, [entry]})
entry, {acc1, acc2} ->
{acc1, [entry|acc2]}
end)
{:lists.reverse(list1), :lists.reverse(list2)}
end
@doc """
Takes the first `count` items from the enumerable.
`count` must be an integer. If a negative `count` is given, the last
`count` values will be taken.
For such, the enumerable is fully enumerated keeping up
to `2 * count` elements in memory. Once the end of the enumerable is
reached, the last `count` elements are returned.
## Examples
iex> Enum.take([1, 2, 3], 2)
[1, 2]
iex> Enum.take([1, 2, 3], 10)
[1, 2, 3]
iex> Enum.take([1, 2, 3], 0)
[]
iex> Enum.take([1, 2, 3], -1)
[3]
"""
@spec take(t, integer) :: list
def take(enumerable, count)
def take(_enumerable, 0), do: []
def take([], _count), do: []
def take(enumerable, count) when is_list(enumerable)
and is_integer(count) and count > 0 do
do_take(enumerable, count)
end
def take(enumerable, count) when is_integer(count) and count > 0 do
{_, {res, _}} =
Enumerable.reduce(enumerable, {:cont, {[], count}},
fn(entry, {list, n}) ->
case n do
0 -> {:halt, {list, n}}
1 -> {:halt, {[entry|list], n - 1}}
_ -> {:cont, {[entry|list], n - 1}}
end
end)
:lists.reverse(res)
end
def take(enumerable, count) when is_integer(count) and count < 0 do
count = abs(count)
{_count, buf1, buf2} =
reduce(enumerable, {0, [], []}, fn entry, {n, buf1, buf2} ->
buf1 = [entry|buf1]
n = n + 1
if n == count do
{0, [], buf1}
else
{n, buf1, buf2}
end
end)
do_take_last(buf1, buf2, count, [])
end
defp do_take_last(_buf1, _buf2, 0, acc),
do: acc
defp do_take_last([], [], _, acc),
do: acc
defp do_take_last([], [h|t], count, acc),
do: do_take_last([], t, count-1, [h|acc])
defp do_take_last([h|t], buf2, count, acc),
do: do_take_last(t, buf2, count-1, [h|acc])
@doc """
Returns a list of every `nth` item in the enumerable,
starting with the first element.
The first item is always included, unless `nth` is 0.
The second argument specifying every `nth` item must be a non-negative
integer, otherwise `FunctionClauseError` will be raised.
## Examples
iex> Enum.take_every(1..10, 2)
[1, 3, 5, 7, 9]
iex> Enum.take_every(1..10, 0)
[]
iex> Enum.take_every([1, 2, 3], 1)
[1, 2, 3]
"""
@spec take_every(t, non_neg_integer) :: list | no_return
def take_every(enumerable, nth)
def take_every(enumerable, 1), do: to_list(enumerable)
def take_every(_enumerable, 0), do: []
def take_every([], _nth), do: []
def take_every(enumerable, nth) when is_integer(nth) and nth > 0 do
{res, _} = reduce(enumerable, {[], :first}, R.take_every(nth))
:lists.reverse(res)
end
@doc """
Takes random items from the enumerable.
Notice this function will traverse the whole enumerable to
get the random sublist of `enumerable`.
See `random/1` for notes on implementation and random seed.
## Examples
# Although not necessary, let's seed the random algorithm
iex> :rand.seed(:exsplus, {1, 2, 3})
iex> Enum.take_random(1..10, 2)
[5, 8]
iex> Enum.take_random(?a..?z, 5)
'fhjni'
"""
@spec take_random(t, integer) :: list
def take_random(_enumerable, 0), do: []
def take_random(first..last, 1) when first > last do
take_random(last..first, 1)
end
def take_random(first..last, 1) do
[random_index(last - first) + first]
end
def take_random(enumerable, count) when count > 128 do
reducer = fn(elem, {idx, sample}) ->
jdx = random_index(idx)
cond do
idx < count ->
value = Map.get(sample, jdx)
{idx + 1, Map.put(sample, idx, value) |> Map.put(jdx, elem)}
jdx < count ->
{idx + 1, Map.put(sample, jdx, elem)}
true ->
{idx + 1, sample}
end
end
{size, sample} = reduce(enumerable, {0, %{}}, reducer)
take_random(sample, Kernel.min(count, size), [])
end
def take_random(enumerable, count) when count > 0 do
sample = Tuple.duplicate(nil, count)
reducer = fn(elem, {idx, sample}) ->
jdx = random_index(idx)
cond do
idx < count ->
value = elem(sample, jdx)
{idx + 1, put_elem(sample, idx, value) |> put_elem(jdx, elem)}
jdx < count ->
{idx + 1, put_elem(sample, jdx, elem)}
true ->
{idx + 1, sample}
end
end
{size, sample} = reduce(enumerable, {0, sample}, reducer)
sample |> Tuple.to_list |> take(Kernel.min(count, size))
end
defp take_random(_sample, 0, acc), do: acc
defp take_random(sample, position, acc) do
position = position - 1
acc = [Map.get(sample, position) | acc]
take_random(sample, position, acc)
end
@doc """
Takes the items from the beginning of the enumerable while `fun` returns
a truthy value.
## Examples
iex> Enum.take_while([1, 2, 3], fn(x) -> x < 3 end)
[1, 2]
"""
@spec take_while(t, (element -> as_boolean(term))) :: list
def take_while(enumerable, fun) when is_list(enumerable) do
do_take_while(enumerable, fun)
end
def take_while(enumerable, fun) do
{_, res} =
Enumerable.reduce(enumerable, {:cont, []}, fn(entry, acc) ->
if fun.(entry) do
{:cont, [entry|acc]}
else
{:halt, acc}
end
end)
:lists.reverse(res)
end
@doc """
Converts `enumerable` to a list.
## Examples
iex> Enum.to_list(1..3)
[1, 2, 3]
"""
@spec to_list(t) :: [element]
def to_list(enumerable) when is_list(enumerable) do
enumerable
end
def to_list(enumerable) do
reverse(enumerable) |> :lists.reverse
end
@doc """
Enumerates the `enumerable`, removing all duplicated elements.
## Examples
iex> Enum.uniq([1, 2, 3, 3, 2, 1])
[1, 2, 3]
"""
@spec uniq(t) :: list
def uniq(enumerable) do
uniq_by(enumerable, fn x -> x end)
end
@doc false
def uniq(enumerable, fun) do
# TODO: Deprecate on 1.3 or 1.4 depending on warnings on projects
# IO.write :stderr, "warning: Enum.uniq/2 is deprecated, please use Enum.uniq_by/2 instead\n" <>
# Exception.format_stacktrace
uniq_by(enumerable, fun)
end
@doc """
Enumerates the `enumerable`, by removing the elements for which
function `fun` returned duplicate items.
The function `fun` maps every element to a term which is used to
determine if two elements are duplicates.
## Example
iex> Enum.uniq_by([{1, :x}, {2, :y}, {1, :z}], fn {x, _} -> x end)
[{1, :x}, {2, :y}]
iex> Enum.uniq_by([a: {:tea, 2}, b: {:tea, 2}, c: {:coffee, 1}], fn {_, y} -> y end)
[a: {:tea, 2}, c: {:coffee, 1}]
"""
@spec uniq_by(t, (element -> term)) :: list
def uniq_by(enumerable, fun) when is_list(enumerable) do
do_uniq(enumerable, %{}, fun)
end
def uniq_by(enumerable, fun) do
{list, _} = reduce(enumerable, {[], %{}}, R.uniq(fun))
:lists.reverse(list)
end
@doc """
Opposite of `Enum.zip/2`; extracts a two-element tuples from the
enumerable and groups them together.
It takes an enumerable with items being two-element tuples and returns
a tuple with two lists, each of which is formed by the first and
second element of each tuple, respectively.
This function fails unless `enumerable` is or can be converted into a
list of tuples with *exactly* two elements in each tuple.
## Examples
iex> Enum.unzip([{:a, 1}, {:b, 2}, {:c, 3}])
{[:a, :b, :c], [1, 2, 3]}
iex> Enum.unzip(%{a: 1, b: 2})
{[:a, :b], [1, 2]}
"""
@spec unzip(t) :: {[element], [element]}
def unzip(enumerable) do
{list1, list2} = reduce(enumerable, {[], []},
fn({el1, el2}, {list1, list2}) ->
{[el1|list1], [el2|list2]}
end)
{:lists.reverse(list1), :lists.reverse(list2)}
end
@doc """
Zips corresponding elements from two enumerables into one list
of tuples.
The zipping finishes as soon as any enumerable completes.
## Examples
iex> Enum.zip([1, 2, 3], [:a, :b, :c])
[{1, :a}, {2, :b}, {3, :c}]
iex> Enum.zip([1, 2, 3, 4, 5], [:a, :b, :c])
[{1, :a}, {2, :b}, {3, :c}]
"""
@spec zip(t, t) :: [{any, any}]
def zip(enumerable1, enumerable2) when is_list(enumerable1)
and is_list(enumerable2) do
do_zip(enumerable1, enumerable2)
end
def zip(enumerable1, enumerable2) do
Stream.zip(enumerable1, enumerable2).({:cont, []}, &{:cont, [&1|&2]})
|> elem(1)
|> :lists.reverse
end
@doc """
Returns the enumerable with each element wrapped in a tuple
alongside its index.
## Examples
iex> Enum.with_index([:a, :b, :c])
[a: 0, b: 1, c: 2]
iex> Enum.with_index([:a, :b, :c], 3)
[a: 3, b: 4, c: 5]
"""
@spec with_index(t) :: [{element, integer}]
@spec with_index(t, integer) :: [{element, integer}]
def with_index(enumerable, offset \\ 0) do
map_reduce(enumerable, offset, fn x, acc ->
{{x, acc}, acc + 1}
end) |> elem(0)
end
## Helpers
@compile {:inline, enum_to_string: 1}
defp enumerate_and_count(enumerable, count) when is_list(enumerable) do
{enumerable, length(enumerable) - abs(count)}
end
defp enumerate_and_count(enumerable, count) do
map_reduce(enumerable, -abs(count), fn(x, acc) -> {x, acc + 1} end)
end
defp enum_to_string(entry) when is_binary(entry), do: entry
defp enum_to_string(entry), do: String.Chars.to_string(entry)
defp random_index(n) do
:rand.uniform(n + 1) - 1
end
## Implementations
## all?
defp do_all?([h|t], fun) do
if fun.(h) do
do_all?(t, fun)
else
false
end
end
defp do_all?([], _) do
true
end
## any?
defp do_any?([h|t], fun) do
if fun.(h) do
true
else
do_any?(t, fun)
end
end
defp do_any?([], _) do
false
end
## fetch
defp do_fetch([h|_], 0), do: {:ok, h}
defp do_fetch([_|t], n), do: do_fetch(t, n - 1)
defp do_fetch([], _), do: :error
## drop
defp do_drop([_|t], counter) when counter > 0 do
do_drop(t, counter - 1)
end
defp do_drop(list, 0) do
list
end
defp do_drop([], _) do
[]
end
## drop_while
defp do_drop_while([h|t], fun) do
if fun.(h) do
do_drop_while(t, fun)
else
[h|t]
end
end
defp do_drop_while([], _) do
[]
end
## find
defp do_find([h|t], default, fun) do
if fun.(h) do
h
else
do_find(t, default, fun)
end
end
defp do_find([], default, _) do
default
end
## find_index
defp do_find_index([h|t], counter, fun) do
if fun.(h) do
counter
else
do_find_index(t, counter + 1, fun)
end
end
defp do_find_index([], _, _) do
nil
end
## find_value
defp do_find_value([h|t], default, fun) do
fun.(h) || do_find_value(t, default, fun)
end
defp do_find_value([], default, _) do
default
end
## shuffle
defp unwrap([{_, h} | enumerable], t) do
unwrap(enumerable, [h|t])
end
defp unwrap([], t), do: t
## sort
defp sort_reducer(entry, {:split, y, x, r, rs, bool}, fun) do
cond do
fun.(y, entry) == bool ->
{:split, entry, y, [x|r], rs, bool}
fun.(x, entry) == bool ->
{:split, y, entry, [x|r], rs, bool}
r == [] ->
{:split, y, x, [entry], rs, bool}
true ->
{:pivot, y, x, r, rs, entry, bool}
end
end
defp sort_reducer(entry, {:pivot, y, x, r, rs, s, bool}, fun) do
cond do
fun.(y, entry) == bool ->
{:pivot, entry, y, [x | r], rs, s, bool}
fun.(x, entry) == bool ->
{:pivot, y, entry, [x | r], rs, s, bool}
fun.(s, entry) == bool ->
{:split, entry, s, [], [[y, x | r] | rs], bool}
true ->
{:split, s, entry, [], [[y, x | r] | rs], bool}
end
end
defp sort_reducer(entry, [x], fun) do
{:split, entry, x, [], [], fun.(x, entry)}
end
defp sort_reducer(entry, acc, _fun) do
[entry|acc]
end
defp sort_terminator({:split, y, x, r, rs, bool}, fun) do
sort_merge([[y, x | r] | rs], fun, bool)
end
defp sort_terminator({:pivot, y, x, r, rs, s, bool}, fun) do
sort_merge([[s], [y, x | r] | rs], fun, bool)
end
defp sort_terminator(acc, _fun) do
acc
end
defp sort_merge(list, fun, true), do:
reverse_sort_merge(list, [], fun, true)
defp sort_merge(list, fun, false), do:
sort_merge(list, [], fun, false)
defp sort_merge([t1, [h2 | t2] | l], acc, fun, true), do:
sort_merge(l, [sort_merge_1(t1, h2, t2, [], fun, false) | acc], fun, true)
defp sort_merge([[h2 | t2], t1 | l], acc, fun, false), do:
sort_merge(l, [sort_merge_1(t1, h2, t2, [], fun, false) | acc], fun, false)
defp sort_merge([l], [], _fun, _bool), do: l
defp sort_merge([l], acc, fun, bool), do:
reverse_sort_merge([:lists.reverse(l, []) | acc], [], fun, bool)
defp sort_merge([], acc, fun, bool), do:
reverse_sort_merge(acc, [], fun, bool)
defp reverse_sort_merge([[h2 | t2], t1 | l], acc, fun, true), do:
reverse_sort_merge(l, [sort_merge_1(t1, h2, t2, [], fun, true) | acc], fun, true)
defp reverse_sort_merge([t1, [h2 | t2] | l], acc, fun, false), do:
reverse_sort_merge(l, [sort_merge_1(t1, h2, t2, [], fun, true) | acc], fun, false)
defp reverse_sort_merge([l], acc, fun, bool), do:
sort_merge([:lists.reverse(l, []) | acc], [], fun, bool)
defp reverse_sort_merge([], acc, fun, bool), do:
sort_merge(acc, [], fun, bool)
defp sort_merge_1([h1 | t1], h2, t2, m, fun, bool) do
if fun.(h1, h2) == bool do
sort_merge_2(h1, t1, t2, [h2 | m], fun, bool)
else
sort_merge_1(t1, h2, t2, [h1 | m], fun, bool)
end
end
defp sort_merge_1([], h2, t2, m, _fun, _bool), do:
:lists.reverse(t2, [h2 | m])
defp sort_merge_2(h1, t1, [h2 | t2], m, fun, bool) do
if fun.(h1, h2) == bool do
sort_merge_2(h1, t1, t2, [h2 | m], fun, bool)
else
sort_merge_1(t1, h2, t2, [h1 | m], fun, bool)
end
end
defp sort_merge_2(h1, t1, [], m, _fun, _bool), do:
:lists.reverse(t1, [h1 | m])
## reverse_slice
defp reverse_slice(rest, idx, idx, count, acc) do
{slice, rest} = head_slice(rest, count, [])
:lists.reverse(rest, :lists.reverse(slice, acc))
end
defp reverse_slice([elem | rest], idx, start, count, acc) do
reverse_slice(rest, idx - 1, start, count, [elem | acc])
end
defp head_slice(rest, 0, acc), do: {acc, rest}
defp head_slice([elem | rest], count, acc) do
head_slice(rest, count - 1, [elem | acc])
end
## split
defp do_split([h|t], counter, acc) when counter > 0 do
do_split(t, counter - 1, [h|acc])
end
defp do_split(list, 0, acc) do
{:lists.reverse(acc), list}
end
defp do_split([], _, acc) do
{:lists.reverse(acc), []}
end
defp do_split_reverse([h|t], counter, acc) when counter > 0 do
do_split_reverse(t, counter - 1, [h|acc])
end
defp do_split_reverse(list, 0, acc) do
{:lists.reverse(list), acc}
end
defp do_split_reverse([], _, acc) do
{[], acc}
end
## split_while
defp do_split_while([h|t], fun, acc) do
if fun.(h) do
do_split_while(t, fun, [h|acc])
else
{:lists.reverse(acc), [h|t]}
end
end
defp do_split_while([], _, acc) do
{:lists.reverse(acc), []}
end
## take
defp do_take([h|t], counter) when counter > 0 do
[h|do_take(t, counter - 1)]
end
defp do_take(_list, 0) do
[]
end
defp do_take([], _) do
[]
end
## take_while
defp do_take_while([h|t], fun) do
if fun.(h) do
[h|do_take_while(t, fun)]
else
[]
end
end
defp do_take_while([], _) do
[]
end
## uniq
defp do_uniq([h|t], acc, fun) do
fun_h = fun.(h)
if Map.has_key?(acc, fun_h) do
do_uniq(t, acc, fun)
else
[h|do_uniq(t, Map.put(acc, fun_h, true), fun)]
end
end
defp do_uniq([], _acc, _fun) do
[]
end
## zip
defp do_zip([h1|next1], [h2|next2]) do
[{h1, h2}|do_zip(next1, next2)]
end
defp do_zip(_, []), do: []
defp do_zip([], _), do: []
## slice
defp do_slice([], _start, _count) do
[]
end
defp do_slice(_list, _start, 0) do
[]
end
defp do_slice([h|t], 0, count) do
[h|do_slice(t, 0, count-1)]
end
defp do_slice([_|t], start, count) do
do_slice(t, start-1, count)
end
end
defimpl Enumerable, for: List do
def reduce(_, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(list, &1, fun)}
def reduce([], {:cont, acc}, _fun), do: {:done, acc}
def reduce([h|t], {:cont, acc}, fun), do: reduce(t, fun.(h, acc), fun)
def member?(_list, _value),
do: {:error, __MODULE__}
def count(_list),
do: {:error, __MODULE__}
end
defimpl Enumerable, for: Map do
def reduce(map, acc, fun) do
do_reduce(:maps.to_list(map), acc, fun)
end
defp do_reduce(_, {:halt, acc}, _fun), do: {:halted, acc}
defp do_reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &do_reduce(list, &1, fun)}
defp do_reduce([], {:cont, acc}, _fun), do: {:done, acc}
defp do_reduce([h|t], {:cont, acc}, fun), do: do_reduce(t, fun.(h, acc), fun)
def member?(map, {key, value}) do
{:ok, match?({:ok, ^value}, :maps.find(key, map))}
end
def member?(_map, _other) do
{:ok, false}
end
def count(map) do
{:ok, map_size(map)}
end
end
defimpl Enumerable, for: Function do
def reduce(function, acc, fun) when is_function(function, 2),
do: function.(acc, fun)
def member?(_function, _value),
do: {:error, __MODULE__}
def count(_function),
do: {:error, __MODULE__}
end
| 25.730249
| 111
| 0.602971
|
9e4537e1193968c0455d2409aeafc3a554460e36
| 1,942
|
exs
|
Elixir
|
rel/config.exs
|
thomasvolk/lighthouse
|
566f4029a1ef1e5863e45b7f9b1b7afa914980ec
|
[
"Apache-2.0"
] | null | null | null |
rel/config.exs
|
thomasvolk/lighthouse
|
566f4029a1ef1e5863e45b7f9b1b7afa914980ec
|
[
"Apache-2.0"
] | null | null | null |
rel/config.exs
|
thomasvolk/lighthouse
|
566f4029a1ef1e5863e45b7f9b1b7afa914980ec
|
[
"Apache-2.0"
] | null | null | null |
# Import all plugins from `rel/plugins`
# They can then be used by adding `plugin MyPlugin` to
# either an environment, or release definition, where
# `MyPlugin` is the name of the plugin module.
Path.join(["rel", "plugins", "*.exs"])
|> Path.wildcard()
|> Enum.map(&Code.eval_file(&1))
use Mix.Releases.Config,
# This sets the default release built by `mix release`
default_release: :default,
# This sets the default environment used by `mix release`
default_environment: Mix.env()
# For a full list of config options for both releases
# and environments, visit https://hexdocs.pm/distillery/configuration.html
# You may define one or more environments in this file,
# an environment's settings will override those of a release
# when building in that environment, this combination of release
# and environment configuration is called a profile
environment :dev do
# If you are running Phoenix, you should make sure that
# server: true is set and the code reloader is disabled,
# even in dev mode.
# It is recommended that you build with MIX_ENV=prod and pass
# the --env flag to Distillery explicitly if you want to use
# dev mode.
set dev_mode: true
set include_erts: false
set cookie: :"~9IWj<SRR8zz)S4VoTit!qF.A=3L[/k_ldt]E^8=Yn?H[R78gAP_28M2h<(c)ql1"
end
environment :prod do
set include_erts: true
set include_src: false
set cookie: :"&)x}SSlo,;nYP[;|T=owH(rVzbr~%~Zm&Vlj7i%F_qTc@2qU9*,l/2c2D.v[Ltm:"
end
environment :integration_test do
set include_erts: true
set include_src: false
set cookie: :"&)x}SSlo,;nYP[;|T=owH(rVzbr~%~Zm&Vlj7i%F_qTc@2qU9*,l/2c2D.v[Ltm:"
end
# You may define one or more releases in this file.
# If you have not set a default release, or selected one
# when running `mix release`, the first release in the file
# will be used by default
release :lighthouse do
set version: current_version(:lighthouse)
set applications: [
:runtime_tools
]
end
| 32.366667
| 81
| 0.731205
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.