hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
list | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
list | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
list | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
038b04663d2e8b06ba1234474410ccd2d71dd1c3
| 1,717
|
ex
|
Elixir
|
clients/tasks/lib/google_api/tasks/v1/model/task_lists.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | null | null | null |
clients/tasks/lib/google_api/tasks/v1/model/task_lists.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | null | null | null |
clients/tasks/lib/google_api/tasks/v1/model/task_lists.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | 1
|
2018-07-28T20:50:50.000Z
|
2018-07-28T20:50:50.000Z
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Tasks.V1.Model.TaskLists do
@moduledoc """
## Attributes
- etag (String): ETag of the resource. Defaults to: `null`.
- items (List[TaskList]): Collection of task lists. Defaults to: `null`.
- kind (String): Type of the resource. This is always \"tasks#taskLists\". Defaults to: `null`.
- nextPageToken (String): Token that can be used to request the next page of this result. Defaults to: `null`.
"""
defstruct [
:"etag",
:"items",
:"kind",
:"nextPageToken"
]
end
defimpl Poison.Decoder, for: GoogleApi.Tasks.V1.Model.TaskLists do
import GoogleApi.Tasks.V1.Deserializer
def decode(value, options) do
value
|> deserialize(:"items", :list, GoogleApi.Tasks.V1.Model.TaskList, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Tasks.V1.Model.TaskLists do
def encode(value, options) do
GoogleApi.Tasks.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 31.796296
| 112
| 0.727432
|
038b3093471fa0e7bc089bbf38f385d1e4fe7014
| 892
|
ex
|
Elixir
|
clients/composer/lib/google_api/composer/v1beta1/metadata.ex
|
jechol/elixir-google-api
|
0290b683dfc6491ca2ef755a80bc329378738d03
|
[
"Apache-2.0"
] | null | null | null |
clients/composer/lib/google_api/composer/v1beta1/metadata.ex
|
jechol/elixir-google-api
|
0290b683dfc6491ca2ef755a80bc329378738d03
|
[
"Apache-2.0"
] | null | null | null |
clients/composer/lib/google_api/composer/v1beta1/metadata.ex
|
jechol/elixir-google-api
|
0290b683dfc6491ca2ef755a80bc329378738d03
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Composer.V1beta1 do
@moduledoc """
API client metadata for GoogleApi.Composer.V1beta1.
"""
@discovery_revision "20210227"
def discovery_revision(), do: @discovery_revision
end
| 33.037037
| 74
| 0.761211
|
038b334f155cf2cd25befb8f029aac1cf309b452
| 118
|
ex
|
Elixir
|
lib/ecto_network.ex
|
kianmeng/ecto_network
|
ed999f408310135d506abc9cb39c6907686b7ed5
|
[
"MIT"
] | 41
|
2016-06-30T09:29:35.000Z
|
2022-02-02T21:19:44.000Z
|
lib/ecto_network.ex
|
kianmeng/ecto_network
|
ed999f408310135d506abc9cb39c6907686b7ed5
|
[
"MIT"
] | 22
|
2016-09-02T02:39:35.000Z
|
2021-07-18T12:30:53.000Z
|
lib/ecto_network.ex
|
kianmeng/ecto_network
|
ed999f408310135d506abc9cb39c6907686b7ed5
|
[
"MIT"
] | 23
|
2016-08-24T03:16:02.000Z
|
2021-09-08T18:19:05.000Z
|
defmodule EctoNetwork do
@moduledoc ~S"""
Ecto types to support MACADDR, CIDR, and INET Postgrex types.
"""
end
| 19.666667
| 63
| 0.711864
|
038b3ac6fbc71e8a7b10e6b4427ce5871f03f46b
| 132
|
ex
|
Elixir
|
lib/rmc_web/views/screen_view.ex
|
robbyronk/race-control
|
f44922cb151c04ccf0e30fa226e49323c537ce2b
|
[
"Apache-2.0"
] | null | null | null |
lib/rmc_web/views/screen_view.ex
|
robbyronk/race-control
|
f44922cb151c04ccf0e30fa226e49323c537ce2b
|
[
"Apache-2.0"
] | null | null | null |
lib/rmc_web/views/screen_view.ex
|
robbyronk/race-control
|
f44922cb151c04ccf0e30fa226e49323c537ce2b
|
[
"Apache-2.0"
] | null | null | null |
defmodule RmcWeb.ScreenView do
use RmcWeb, :view
def render("index.json", %{screen_data: data}) do
%{data: data}
end
end
| 16.5
| 51
| 0.674242
|
038b4b03c5fc97cbaaccbe41fcaad3a75f7acdcd
| 922
|
ex
|
Elixir
|
lib/types/parsed/hark/unread.ex
|
mirtyl-wacdec/urbit_ex
|
82db4e96c2f3dc2a28e65c442350d7f8b228901f
|
[
"MIT"
] | 7
|
2021-05-22T12:05:41.000Z
|
2021-08-11T20:05:59.000Z
|
lib/types/parsed/hark/unread.ex
|
mirtyl-wacdec/urbit_ex
|
82db4e96c2f3dc2a28e65c442350d7f8b228901f
|
[
"MIT"
] | null | null | null |
lib/types/parsed/hark/unread.ex
|
mirtyl-wacdec/urbit_ex
|
82db4e96c2f3dc2a28e65c442350d7f8b228901f
|
[
"MIT"
] | 2
|
2021-05-29T10:10:52.000Z
|
2021-08-11T20:06:07.000Z
|
defmodule UrbitEx.Unread do
@derive Jason.Encoder
alias UrbitEx.Resource
defstruct resource: %Resource{},
index: "/",
last: DateTime.utc_now(),
count: nil,
each: nil
def new(map) do
[type] = map["stats"]["unreads"] |> Map.keys
pair = [{String.to_atom(type), map["stats"]["unreads"][type]}]
s = %UrbitEx.Unread{
resource: Resource.from_url(map["index"]["graph"]["graph"]),
index: map["index"]["graph"]["index"],
last: map["stats"]["last"] |> DateTime.from_unix!(:millisecond)
}
struct(s, pair)
end
def newcount(resource, index, timestamp) do
%__MODULE__{
resource: resource,
index: index,
last: timestamp,
count: 1
}
end
def neweach(resource, index, timestamp) do
%__MODULE__{
resource: resource,
index: "/",
last: timestamp,
each: [index]
}
end
end
| 22.487805
| 69
| 0.570499
|
038b5c9512387a0dac5ee189d4092ef806b9fc8c
| 576
|
ex
|
Elixir
|
lib/reflux/packet.ex
|
lowlandresearch/reflux
|
1fa23e4d609bfa0a27f5f231abbfa28c8835f1f2
|
[
"MIT"
] | null | null | null |
lib/reflux/packet.ex
|
lowlandresearch/reflux
|
1fa23e4d609bfa0a27f5f231abbfa28c8835f1f2
|
[
"MIT"
] | null | null | null |
lib/reflux/packet.ex
|
lowlandresearch/reflux
|
1fa23e4d609bfa0a27f5f231abbfa28c8835f1f2
|
[
"MIT"
] | null | null | null |
defmodule PacketDecoder do
@moduledoc """
Given network packet data as a bitstring, this behaviour responsible
for decoding it into the relevant data structure.
"""
@callback from_bitstring(bitstring) :: String.t
end
defprotocol PacketEncoder do
@moduledoc """
Given some packet data structure, this protocol is responsible for
encoding that information as network-packet-ready bitstrings
"""
@doc "Encode struct as bitstring (i.e. network data)"
@spec to_bitstring(map) :: {:ok, term} | {:error, {:atom, String.t}}
def to_bitstring(packet)
end
| 26.181818
| 70
| 0.730903
|
038b63f672cce9dfac23cde21e8eba7d7d0e9995
| 1,266
|
ex
|
Elixir
|
lib/blog_api_web/views/error_helpers.ex
|
ivoferro/CSIAN_Blog_Management
|
ad95a7e479090adb04c80e1fc635a400b3aa69c2
|
[
"MIT"
] | null | null | null |
lib/blog_api_web/views/error_helpers.ex
|
ivoferro/CSIAN_Blog_Management
|
ad95a7e479090adb04c80e1fc635a400b3aa69c2
|
[
"MIT"
] | null | null | null |
lib/blog_api_web/views/error_helpers.ex
|
ivoferro/CSIAN_Blog_Management
|
ad95a7e479090adb04c80e1fc635a400b3aa69c2
|
[
"MIT"
] | null | null | null |
defmodule BlogApiWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn (error) ->
content_tag :span, translate_error(error), class: "help-block"
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# Because error messages were defined within Ecto, we must
# call the Gettext module passing our Gettext backend. We
# also use the "errors" domain as translations are placed
# in the errors.po file.
# Ecto will pass the :count keyword if the error message is
# meant to be pluralized.
# On your own code and templates, depending on whether you
# need the message to be pluralized or not, this could be
# written simply as:
#
# dngettext "errors", "1 file", "%{count} files", count
# dgettext "errors", "is invalid"
#
if count = opts[:count] do
Gettext.dngettext(BlogApiWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(BlogApiWeb.Gettext, "errors", msg, opts)
end
end
end
| 30.878049
| 76
| 0.670616
|
038b68a245325dcf015bc8aacc1edd890161a034
| 1,666
|
ex
|
Elixir
|
apps/mcam_server_web/lib/mcam_server_web/image_streaming/camera_comms_socket.ex
|
paulanthonywilson/mcam
|
df9c5aaae00b568749dff22613636f5cb92f905a
|
[
"MIT"
] | null | null | null |
apps/mcam_server_web/lib/mcam_server_web/image_streaming/camera_comms_socket.ex
|
paulanthonywilson/mcam
|
df9c5aaae00b568749dff22613636f5cb92f905a
|
[
"MIT"
] | 8
|
2020-11-16T09:59:12.000Z
|
2020-11-16T10:13:07.000Z
|
apps/mcam_server_web/lib/mcam_server_web/image_streaming/camera_comms_socket.ex
|
paulanthonywilson/mcam
|
df9c5aaae00b568749dff22613636f5cb92f905a
|
[
"MIT"
] | null | null | null |
defmodule McamServerWeb.ImageStreaming.CameraCommsSocket do
@moduledoc """
Socket that communicates with "cameras" (Pi Zeros).
"""
@behaviour Phoenix.Socket.Transport
require Logger
alias McamServer.Cameras
@acknowledge_image_receipt "\n"
@ten_minutes 10 * 60 * 1_000
@token_refresh_period @ten_minutes
def child_spec(_opts) do
%{id: __MODULE__, start: {Task, :start_link, [fn -> :ok end]}, restart: :transient}
end
def connect(%{params: %{"token" => token}}) do
case Cameras.from_token(token, :camera) do
{:ok, %{id: camera_id}} ->
Logger.debug(fn -> "Connection from camera #{camera_id}" end)
Monitoring.camera_connected(camera_id)
{:ok, %{camera_id: camera_id}}
err ->
Logger.info(fn -> "Invalid camera connection request #{inspect(err)}" end)
:error
end
end
def init(state) do
send(self(), :refresh_token)
{:ok, state}
end
def handle_in({image, [opcode: :binary]}, %{camera_id: camera_id} = state) do
Monitoring.image_received(camera_id)
Cameras.broadcast_image(camera_id, image)
{:reply, :ok, {:binary, @acknowledge_image_receipt}, state}
end
def handle_info(:refresh_token, %{camera_id: camera_id} = state) do
Process.send_after(self(), :refresh_token, @token_refresh_period)
refreshed_token = Cameras.token_for(camera_id, :camera)
message = :erlang.term_to_binary({:token_refresh, refreshed_token})
{:push, {:binary, message}, state}
end
def handle_info(_, state) do
{:ok, state}
end
def terminate(_reason, %{camera_id: camera_id}) do
Monitoring.camera_disconnected(camera_id)
:ok
end
end
| 27.766667
| 87
| 0.678872
|
038b89abcfc80a57d49c88d32e7e883ebd8ed9e8
| 937
|
exs
|
Elixir
|
lib/mix/test/mix/tasks/compile.erlang_test.exs
|
Nicd/elixir
|
e62ef92a4be1b562033d35b2d822cc9d6c661077
|
[
"Apache-2.0"
] | null | null | null |
lib/mix/test/mix/tasks/compile.erlang_test.exs
|
Nicd/elixir
|
e62ef92a4be1b562033d35b2d822cc9d6c661077
|
[
"Apache-2.0"
] | null | null | null |
lib/mix/test/mix/tasks/compile.erlang_test.exs
|
Nicd/elixir
|
e62ef92a4be1b562033d35b2d822cc9d6c661077
|
[
"Apache-2.0"
] | null | null | null |
Code.require_file "../../test_helper.exs", __DIR__
defmodule Mix.Tasks.Compile.ErlangTest do
use MixTest.Case
test "tries to compile src/a.erl" do
in_fixture "compile_erlang", fn ->
output = mix "compile"
assert output =~ "src/a.erl:4: syntax error"
end
end
test "compiles src/b.erl and src/c.erl" do
in_fixture "compile_erlang", fn ->
output = mix "compile"
assert output =~ "Compiled src/b.erl"
assert output =~ "Compiled src/c.erl"
assert File.regular?("ebin/b.beam")
assert File.regular?("ebin/c.beam")
end
end
test "compiles with --force src/b.erl and src/c.erl" do
in_fixture "compile_erlang", fn ->
mix "compile"
output = mix "compile --force"
assert output =~ "Compiled src/b.erl"
assert output =~ "Compiled src/c.erl"
assert File.regular?("ebin/b.beam")
assert File.regular?("ebin/c.beam")
end
end
end
| 23.425
| 57
| 0.629669
|
038b9c1290f1cc95555295d320cf6fdda0f977af
| 5,301
|
ex
|
Elixir
|
lib/oban/telemetry.ex
|
milmazz/oban
|
a45c986d4f58870fa8dda47543f5d04e75054960
|
[
"Apache-2.0"
] | 1
|
2021-11-20T19:16:24.000Z
|
2021-11-20T19:16:24.000Z
|
lib/oban/telemetry.ex
|
benwilson512/oban
|
caccd675ee7b4537bc1e58fa49973932ad685506
|
[
"Apache-2.0"
] | null | null | null |
lib/oban/telemetry.ex
|
benwilson512/oban
|
caccd675ee7b4537bc1e58fa49973932ad685506
|
[
"Apache-2.0"
] | null | null | null |
defmodule Oban.Telemetry do
@moduledoc """
Telemetry integration for event metrics, logging and error reporting.
### Job Events
Oban emits an event after a job executes: `[:oban, :success]` if the job succeeded or `[:oban,
:failure]` if there was an error or the process crashed.
All job events share the same details about the job that was executed. In addition, failed jobs
provide the error type, the error itself, and the stacktrace. The following chart shows which
metadata you can expect for each event:
| event | measures | metadata |
| ---------- | ----------- | ---------------------------------------------------------------------------- |
| `:success` | `:duration` | `:id, :args, :queue, :worker, :attempt, :max_attempts` |
| `:failure` | `:duration` | `:id, :args, :queue, :worker, :attempt, :max_attempts, :kind, :error, :stack` |
For `:failure` events the metadata includes details about what caused the failure. The `:kind`
value is determined by how an error occurred. Here are the possible kinds:
* `:error` — from an `{:error, error}` return value. Some Erlang functions may also throw an
`:error` tuple, which will be reported as `:error`.
* `:exception` — from a rescued exception
* `:exit` — from a caught process exit
* `:throw` — from a caught value, this doesn't necessarily mean that an error occurred and the
error value is unpredictable
### Circuit Events
All processes that interact with the database have circuit breakers to prevent errors from
crashing the entire supervision tree. Processes emit a `[:oban, :trip_circuit]` event when a
circuit is tripped and `[:oban, :open_circuit]` when the breaker is subsequently opened again.
| event | measures | metadata |
| --------------- | -------- | ------------------- |
| `:trip_circuit` | | `:name`, `:message` |
| `:open_circuit` | | `:name` |
Metadata
* `:name` — the registered name of the process that tripped a circuit, i.e. `Oban.Notifier`
* `:message` — a formatted error message describing what went wrong
## Default Logger
A default log handler that emits structured JSON is provided, see `attach_default_logger/0` for
usage. Otherwise, if you would prefer more control over logging or would like to instrument
events you can write your own handler.
## Examples
A handler that only logs a few details about failed jobs:
```elixir
defmodule MicroLogger do
require Logger
def handle_event([:oban, :failure], %{duration: duration}, meta, nil) do
Logger.warn("[#\{meta.queue}] #\{meta.worker} failed in #\{duration}")
end
end
:telemetry.attach("oban-logger", [:oban, :failure], &MicroLogger.handle_event/4, nil)
```
Another great use of execution data is error reporting. Here is an example of integrating with
[Honeybadger][honey], but only reporting jobs that have failed 3 times or more:
```elixir
defmodule ErrorReporter do
def handle_event([:oban, :failure], _timing, %{attempt: attempt} = meta, nil) do
if attempt >= 3 do
context = Map.take(meta, [:id, :args, :queue, :worker])
Honeybadger.notify(meta.error, context, meta.stack)
end
end
end
:telemetry.attach("oban-errors", [:oban, :failure], &ErrorReporter.handle_event/4, nil)
```
[honey]: https://honeybadger.io
"""
@moduledoc since: "0.4.0"
require Logger
@doc """
Attaches a default structured JSON Telemetry handler for logging.
This function attaches a handler that outputs logs with the following fields:
* `source` — always "oban"
* `event` — either `:success` or `:failure` dependening on whether the job succeeded or errored
* `args` — a map of the job's raw arguments
* `worker` — the job's worker module
* `queue` — the job's queue
* `duration` — the job's runtime duration in microseconds
## Examples
Attach a logger at the default `:info` level:
:ok = Oban.Telemetry.attach_default_logger()
Attach a logger at the `:debug` level:
:ok = Oban.Telemetry.attach_default_logger(:debug)
"""
@doc since: "0.4.0"
@spec attach_default_logger() :: :ok | {:error, :already_exists}
def attach_default_logger(level \\ :info) do
events = [
[:oban, :success],
[:oban, :failure],
[:oban, :trip_circuit],
[:oban, :open_circuit]
]
:telemetry.attach_many("oban-default-logger", events, &handle_event/4, level)
end
@doc false
@spec handle_event([atom()], map(), map(), Logger.level()) :: :ok
def handle_event([:oban, event], measure, meta, level)
when event in [:success, :failure] do
log_message(
level,
%{
source: "oban",
event: event,
args: meta[:args],
worker: meta[:worker],
queue: meta[:queue],
duration: measure[:duration]
}
)
end
def handle_event([:oban, event], _measure, meta, level)
when event in [:trip_circuit, :open_circuit] do
log_message(level, Map.merge(meta, %{source: "oban", event: event}))
end
defp log_message(level, message) do
Logger.log(level, fn -> Jason.encode!(message) end)
end
end
| 35.10596
| 110
| 0.631956
|
038ba745e95810af6fc61b12d98d819f97a5df62
| 489
|
ex
|
Elixir
|
system5/lib/beb.ex
|
mihaivanea/broadcast_algorithms
|
124a93791c0589e4f56088fe98b0a3469b3d90c7
|
[
"MIT"
] | null | null | null |
system5/lib/beb.ex
|
mihaivanea/broadcast_algorithms
|
124a93791c0589e4f56088fe98b0a3469b3d90c7
|
[
"MIT"
] | null | null | null |
system5/lib/beb.ex
|
mihaivanea/broadcast_algorithms
|
124a93791c0589e4f56088fe98b0a3469b3d90c7
|
[
"MIT"
] | null | null | null |
# Mihail Vanea (mv1315)
defmodule BEB do
def start() do
receive do
{:bind, pl, app, neighbours} -> next(pl, app, neighbours)
end
end
defp next(pl, app, neighbours) do
receive do
{:broadcast, message, from} ->
for n <- neighbours, do:
send(pl, {:beb_broadcast, n, message, from})
{:beb_broadcast, message, source} ->
# IO.puts("HEREEEEE")
send(app, {message, source})
end
next(pl, app, neighbours)
end
end
| 21.26087
| 63
| 0.578732
|
038badeb1367cf2708f06e92a9cb1e0a712ee679
| 16,269
|
ex
|
Elixir
|
lib/sanbase/billing/subscription.ex
|
sitedata/sanbase2
|
8da5e44a343288fbc41b68668c6c80ae8547d557
|
[
"MIT"
] | null | null | null |
lib/sanbase/billing/subscription.ex
|
sitedata/sanbase2
|
8da5e44a343288fbc41b68668c6c80ae8547d557
|
[
"MIT"
] | 1
|
2021-07-24T16:26:03.000Z
|
2021-07-24T16:26:03.000Z
|
lib/sanbase/billing/subscription.ex
|
sitedata/sanbase2
|
8da5e44a343288fbc41b68668c6c80ae8547d557
|
[
"MIT"
] | null | null | null |
defmodule Sanbase.Billing.Subscription do
@moduledoc """
Module for managing user subscriptions - create, upgrade/downgrade, cancel.
Also containing some helper functions that take user subscription as argument and
return some properties of the subscription plan.
"""
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query
alias Sanbase.Billing.Plan
alias Sanbase.Billing.Plan.AccessChecker
alias Sanbase.Auth.User
alias Sanbase.Repo
alias Sanbase.StripeApi
require Logger
@percent_discount_1000_san 20
@generic_error_message """
Current subscription attempt failed.
Please, contact administrator of the site for more information.
"""
@free_trial_plans Plan.Metadata.free_trial_plans()
schema "subscriptions" do
field(:stripe_id, :string)
field(:current_period_end, :utc_datetime)
field(:cancel_at_period_end, :boolean, null: false, default: false)
field(:status, SubscriptionStatusEnum)
field(:trial_end, :utc_datetime)
belongs_to(:user, User)
belongs_to(:plan, Plan)
timestamps()
end
def generic_error_message, do: @generic_error_message
def changeset(%__MODULE__{} = subscription, attrs \\ %{}) do
subscription
|> cast(attrs, [
:plan_id,
:user_id,
:stripe_id,
:current_period_end,
:trial_end,
:cancel_at_period_end,
:status,
:inserted_at
])
|> foreign_key_constraint(:plan_id, name: :subscriptions_plan_id_fkey)
end
def by_id(id) do
Repo.get(__MODULE__, id)
|> Repo.preload(:plan)
end
def by_stripe_id(stripe_id) do
Repo.get_by(__MODULE__, stripe_id: stripe_id)
|> Repo.preload(:plan)
end
@spec free_subscription() :: %__MODULE__{}
def free_subscription() do
%__MODULE__{plan: Plan.free_plan()}
end
@doc """
Subscribe user with card_token to a plan.
- Create or update a Stripe customer with card details contained by the card_token param.
- Create subscription record in Stripe.
- Create a subscription record locally so we can check access control without calling Stripe.
"""
@type string_or_nil :: String.t() | nil
@spec subscribe(%User{}, %Plan{}, string_or_nil, string_or_nil) ::
{:ok, %__MODULE__{}} | {atom(), String.t()}
def subscribe(user, plan, card_token \\ nil, coupon \\ nil) do
with {:ok, %User{stripe_customer_id: stripe_customer_id} = user}
when not is_nil(stripe_customer_id) <-
create_or_update_stripe_customer(user, card_token),
{:ok, stripe_subscription} <- create_stripe_subscription(user, plan, coupon),
{:ok, subscription} <- create_subscription_db(stripe_subscription, user, plan) do
{:ok, subscription |> Repo.preload(plan: [:product])}
end
end
@doc """
Upgrade or Downgrade plan:
- Updates subcription in Stripe with new plan.
- Updates local subscription
Stripe docs: https://stripe.com/docs/billing/subscriptions/upgrading-downgrading#switching
"""
def update_subscription(subscription, plan) do
with {:ok, item_id} <- StripeApi.get_subscription_first_item_id(subscription.stripe_id),
# Note: that will generate dialyzer error because the spec is wrong.
# More info here: https://github.com/code-corps/stripity_stripe/pull/499
{:ok, stripe_subscription} <-
StripeApi.update_subscription(subscription.stripe_id, %{
items: [%{id: item_id, plan: plan.stripe_id}]
}),
{:ok, updated_subscription} <-
sync_with_stripe_subscription(stripe_subscription, subscription) do
{:ok, updated_subscription |> Repo.preload([plan: [:product]], force: true)}
end
end
@doc """
Cancel subscription:
Cancellation means scheduling for cancellation. It updates the `cancel_at_period_end` field which will cancel the
subscription at `current_period_end`. That allows user to use the subscription for the time left that he has already paid for.
https://stripe.com/docs/billing/subscriptions/canceling-pausing#canceling
"""
def cancel_subscription(subscription) do
with {:ok, stripe_subscription} <- StripeApi.cancel_subscription(subscription.stripe_id),
{:ok, _} <- sync_with_stripe_subscription(stripe_subscription, subscription) do
Sanbase.Billing.StripeEvent.send_cancel_event_to_discord(subscription)
{:ok,
%{
is_scheduled_for_cancellation: true,
scheduled_for_cancellation_at: subscription.current_period_end
}}
end
end
@doc """
Renew cancelled subscription if `current_period_end` is not reached.
https://stripe.com/docs/billing/subscriptions/canceling-pausing#reactivating-canceled-subscriptions
"""
def renew_cancelled_subscription(subscription) do
with {:end_period_reached?, :lt} <-
{:end_period_reached?, DateTime.compare(Timex.now(), subscription.current_period_end)},
{:ok, stripe_subscription} <-
StripeApi.update_subscription(subscription.stripe_id, %{cancel_at_period_end: false}),
{:ok, updated_subscription} <-
sync_with_stripe_subscription(stripe_subscription, subscription) do
{:ok, updated_subscription |> Repo.preload([plan: [:product]], force: true)}
else
{:end_period_reached?, _} ->
{:end_period_reached_error,
"Cancelled subscription has already reached the end period at #{
subscription.current_period_end
}"}
{:error, reason} ->
{:error, reason}
end
end
def create_subscription_db(
%Stripe.Subscription{
id: stripe_id,
current_period_end: current_period_end,
cancel_at_period_end: cancel_at_period_end,
status: status,
created: created
} = stripe_subscription,
user,
plan
) do
%__MODULE__{}
|> changeset(%{
stripe_id: stripe_id,
user_id: user.id,
plan_id: plan.id,
current_period_end: DateTime.from_unix!(current_period_end),
cancel_at_period_end: cancel_at_period_end,
status: status,
trial_end: calculate_trial_end(stripe_subscription),
inserted_at: DateTime.from_unix!(created) |> DateTime.to_naive()
})
|> Repo.insert(on_conflict: :nothing)
end
def update_subscription_db(subscription, params) do
subscription
|> changeset(params)
|> Repo.update()
end
def sync_all() do
__MODULE__
|> Repo.all()
|> Enum.each(&sync_with_stripe_subscription/1)
end
def sync_with_stripe_subscription(%__MODULE__{stripe_id: stripe_id} = subscription) do
with {:ok,
%Stripe.Subscription{
current_period_end: current_period_end,
cancel_at_period_end: cancel_at_period_end,
status: status,
plan: %Stripe.Plan{id: stripe_plan_id},
created: created
} = stripe_subscription} <- StripeApi.retrieve_subscription(stripe_id),
{:plan_not_exist?, %Plan{id: plan_id}} <-
{:plan_not_exist?, Plan.by_stripe_id(stripe_plan_id)} do
update_subscription_db(subscription, %{
current_period_end: DateTime.from_unix!(current_period_end),
cancel_at_period_end: cancel_at_period_end,
status: status,
plan_id: plan_id,
trial_end: calculate_trial_end(stripe_subscription),
inserted_at: DateTime.from_unix!(created) |> DateTime.to_naive()
})
else
{:plan_not_exist?, nil} ->
Logger.error(
"Error while syncing subscription: #{subscription.stripe_id}, reason: plan does not exist}"
)
{:error, reason} ->
Logger.error(
"Error while syncing subscription: #{subscription.stripe_id}, reason: #{inspect(reason)}"
)
end
end
def sync_with_stripe_subscription(_), do: :ok
def sync_with_stripe_subscription(
%Stripe.Subscription{
current_period_end: current_period_end,
cancel_at_period_end: cancel_at_period_end,
status: status,
plan: %Stripe.Plan{id: stripe_plan_id}
} = stripe_subscription,
db_subscription
) do
plan_id =
case Plan.by_stripe_id(stripe_plan_id) do
%Plan{id: plan_id} -> plan_id
nil -> db_subscription.plan_id
end
update_subscription_db(db_subscription, %{
current_period_end: DateTime.from_unix!(current_period_end),
cancel_at_period_end: cancel_at_period_end,
status: status,
plan_id: plan_id,
trial_end: calculate_trial_end(stripe_subscription)
})
end
@doc """
List all active user subscriptions with plans and products.
"""
def user_subscriptions(%User{id: user_id}) do
user_id
|> user_subscriptions_query()
|> active_subscriptions_query()
|> join_plan_and_product_query()
|> Repo.all()
end
@doc """
List active subcriptions' product ids
"""
def user_subscriptions_product_ids(%User{id: user_id}) do
user_id
|> user_subscriptions_query()
|> active_subscriptions_query()
|> select_product_id_query()
|> Repo.all()
end
@doc """
Current subscription is the last active subscription for a product.
"""
def current_subscription(%User{id: user_id}, product_id) do
fetch_current_subscription(user_id, product_id)
end
def current_subscription(user_id, product_id) when is_integer(user_id) do
fetch_current_subscription(user_id, product_id)
end
@doc """
How much historical days a subscription plan can access.
"""
@spec historical_data_in_days(%__MODULE__{}, AccessChecker.query_or_metric(), non_neg_integer()) ::
non_neg_integer() | nil
def historical_data_in_days(%__MODULE__{plan: plan}, query_or_metric, product_id) do
plan
|> Plan.plan_atom_name()
|> AccessChecker.historical_data_in_days(query_or_metric, product_id)
end
@spec realtime_data_cut_off_in_days(
%__MODULE__{},
AccessChecker.query_or_metric(),
non_neg_integer()
) :: non_neg_integer() | nil
def realtime_data_cut_off_in_days(%__MODULE__{plan: plan}, query_or_metric, product_id) do
plan
|> Plan.plan_atom_name()
|> AccessChecker.realtime_data_cut_off_in_days(query_or_metric, product_id)
end
def plan_name(%__MODULE__{plan: plan}), do: plan |> Plan.plan_atom_name()
def create_or_update_stripe_customer(_, _card_token \\ nil)
def create_or_update_stripe_customer(%User{stripe_customer_id: stripe_id} = user, card_token)
when is_nil(stripe_id) do
StripeApi.create_customer(user, card_token)
|> case do
{:ok, stripe_customer} ->
user
|> User.changeset(%{stripe_customer_id: stripe_customer.id})
|> Repo.update()
{:error, reason} ->
{:error, reason}
end
end
def create_or_update_stripe_customer(%User{stripe_customer_id: stripe_id} = user, nil)
when is_binary(stripe_id) do
{:ok, user}
end
def create_or_update_stripe_customer(%User{stripe_customer_id: stripe_id} = user, card_token)
when is_binary(stripe_id) do
StripeApi.update_customer(user, card_token)
|> case do
{:ok, _} ->
{:ok, user}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Cancel trialing subscriptions.
* For Sanbase PRO cancel those:
- about to expire (in 2 hours)
- there is no payment instrument attached
and send an email for finished trial.
* For other plans - cancel regardless of card presense.
"""
def cancel_about_to_expire_trials() do
now = Timex.now()
after_2_hours = Timex.shift(now, hours: 2)
from(s in __MODULE__,
where:
s.status == "trialing" and
s.trial_end >= ^now and s.trial_end <= ^after_2_hours
)
|> Repo.all()
|> Enum.each(&maybe_send_email_and_delete_subscription/1)
end
# Private functions
defp create_stripe_subscription(user, plan, nil) do
percent_off =
user
|> san_balance()
|> percent_discount()
subscription_defaults(user, plan)
|> update_subscription_with_coupon(percent_off)
|> case do
{:ok, subscription} ->
StripeApi.create_subscription(subscription)
{:error, reason} ->
{:error, reason}
end
end
defp create_stripe_subscription(user, plan, coupon) when not is_nil(coupon) do
with {:ok, stripe_coupon} <- StripeApi.retrieve_coupon(coupon) do
subscription_defaults(user, plan)
|> update_subscription_with_coupon(stripe_coupon)
|> StripeApi.create_subscription()
end
end
defp subscription_defaults(user, plan) do
%{
customer: user.stripe_customer_id,
items: [%{plan: plan.stripe_id}]
}
end
defp update_subscription_with_coupon(subscription, %Stripe.Coupon{id: coupon_id}) do
Map.put(subscription, :coupon, coupon_id)
end
defp update_subscription_with_coupon(subscription, percent_off) when is_integer(percent_off) do
with {:ok, coupon} <-
StripeApi.create_coupon(%{percent_off: percent_off, duration: "forever"}) do
{:ok, Map.put(subscription, :coupon, coupon.id)}
end
end
defp update_subscription_with_coupon(subscription, nil), do: {:ok, subscription}
defp percent_discount(balance) when balance >= 1000, do: @percent_discount_1000_san
defp percent_discount(_), do: nil
defp user_subscriptions_query(user_id) do
from(s in __MODULE__,
where: s.user_id == ^user_id,
order_by: [desc: s.id]
)
end
defp active_subscriptions_query(query) do
from(s in query,
where: s.status == "active" or s.status == "trialing" or s.status == "past_due"
)
end
defp join_plan_and_product_query(query) do
from(
s in query,
join: p in assoc(s, :plan),
join: pr in assoc(p, :product),
preload: [plan: {p, product: pr}]
)
end
defp select_product_id_query(query) do
from(s in query, join: p in assoc(s, :plan), select: p.product_id)
end
defp last_subscription_for_product_query(query, product_id) do
from(s in query,
where: s.plan_id in fragment("select id from plans where product_id = ?", ^product_id),
limit: 1
)
end
defp san_balance(%User{} = user) do
case User.san_balance(user) do
{:ok, balance} -> balance
_ -> 0
end
end
defp calculate_trial_end(%Stripe.Subscription{
trial_end: trial_end,
cancel_at: cancel_at,
metadata: %{"current_promotion" => "devcon2019"}
}) do
format_trial_end(trial_end || cancel_at)
end
defp calculate_trial_end(%Stripe.Subscription{
trial_end: trial_end,
cancel_at: cancel_at,
created: created
})
when not is_nil(cancel_at) and not is_nil(created) do
# set trial_end if subscription is set to end 14 days after it is created
if ((cancel_at - created) / (3600 * 24)) |> Float.round() == 14 do
format_trial_end(trial_end || cancel_at)
else
format_trial_end(trial_end)
end
end
defp calculate_trial_end(%Stripe.Subscription{trial_end: trial_end}) do
format_trial_end(trial_end)
end
# Send email and delete subscription if user has no credit card attached
defp maybe_send_email_and_delete_subscription(
%__MODULE__{
user_id: user_id,
stripe_id: stripe_id,
plan_id: plan_id
} = subscription
)
when plan_id in @free_trial_plans do
Logger.info("Deleting subscription with id: #{stripe_id} for user: #{user_id}")
StripeApi.delete_subscription(stripe_id)
__MODULE__.SignUpTrial.maybe_send_trial_finished_email(subscription)
end
defp maybe_send_email_and_delete_subscription(%__MODULE__{stripe_id: stripe_id}) do
Logger.info("Deleting subscription with id: #{stripe_id}")
StripeApi.delete_subscription(stripe_id)
end
defp fetch_current_subscription(user_id, product_id) do
user_id
|> user_subscriptions_query()
|> active_subscriptions_query()
|> last_subscription_for_product_query(product_id)
|> preload_query(plan: [:product])
|> Repo.one()
end
defp preload_query(query, preloads) do
from(s in query, preload: ^preloads)
end
defp format_trial_end(nil), do: nil
defp format_trial_end(trial_end), do: DateTime.from_unix!(trial_end)
end
| 31.346821
| 128
| 0.684922
|
038bd45d80035f85eafcb76bc3bc10866467ea42
| 2,376
|
ex
|
Elixir
|
clients/sheets/lib/google_api/sheets/v4/model/update_conditional_format_rule_response.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/sheets/lib/google_api/sheets/v4/model/update_conditional_format_rule_response.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/sheets/lib/google_api/sheets/v4/model/update_conditional_format_rule_response.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.UpdateConditionalFormatRuleResponse do
@moduledoc """
The result of updating a conditional format rule.
## Attributes
* `newIndex` (*type:* `integer()`, *default:* `nil`) - The index of the new rule.
* `newRule` (*type:* `GoogleApi.Sheets.V4.Model.ConditionalFormatRule.t`, *default:* `nil`) - The new rule that replaced the old rule (if replacing),
or the rule that was moved (if moved)
* `oldIndex` (*type:* `integer()`, *default:* `nil`) - The old index of the rule. Not set if a rule was replaced
(because it is the same as new_index).
* `oldRule` (*type:* `GoogleApi.Sheets.V4.Model.ConditionalFormatRule.t`, *default:* `nil`) - The old (deleted) rule. Not set if a rule was moved
(because it is the same as new_rule).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:newIndex => integer(),
:newRule => GoogleApi.Sheets.V4.Model.ConditionalFormatRule.t(),
:oldIndex => integer(),
:oldRule => GoogleApi.Sheets.V4.Model.ConditionalFormatRule.t()
}
field(:newIndex)
field(:newRule, as: GoogleApi.Sheets.V4.Model.ConditionalFormatRule)
field(:oldIndex)
field(:oldRule, as: GoogleApi.Sheets.V4.Model.ConditionalFormatRule)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.UpdateConditionalFormatRuleResponse do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.UpdateConditionalFormatRuleResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.UpdateConditionalFormatRuleResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.271186
| 153
| 0.722643
|
038bea75928e921901ec9ed89cc8f81ca684fc91
| 1,262
|
exs
|
Elixir
|
test/test_helper.exs
|
AdamT/axon
|
083e1404e7cd4a3346e4728a43f2a6bded500b1f
|
[
"Apache-2.0"
] | null | null | null |
test/test_helper.exs
|
AdamT/axon
|
083e1404e7cd4a3346e4728a43f2a6bded500b1f
|
[
"Apache-2.0"
] | null | null | null |
test/test_helper.exs
|
AdamT/axon
|
083e1404e7cd4a3346e4728a43f2a6bded500b1f
|
[
"Apache-2.0"
] | null | null | null |
ExUnit.start()
defmodule AxonTestUtil do
def check_optimizer!(optimizer, loss, x0, num_steps) do
check_optimizer_functions!(optimizer)
check_optimizer_run!(optimizer, loss, x0, num_steps)
end
defp check_optimizer_functions!(optimizer) do
{init_fn, update_fn} = optimizer
is_function(init_fn, 1) and is_function(update_fn, 3)
end
defp check_optimizer_run!(optimizer, loss, x0, num_steps) do
{init_fn, update_fn} = optimizer
opt_state = init_fn.(x0)
state = {x0, opt_state}
step_fn = fn state ->
{params, opt_state} = state
gradients = Nx.Defn.grad(params, loss)
{updates, new_state} = update_fn.(gradients, opt_state, params)
{Axon.Updates.apply_updates(updates, params), new_state}
end
{params, _} =
for _ <- 1..num_steps, reduce: state do
state ->
Nx.Defn.jit(step_fn, [state])
end
lhs = loss.(params)
rhs = 1.0e-2
# Some optimizers require 1-D or 2-D input, so this potentially
# could be multi-dimensional
unless Nx.all?(Nx.less_equal(lhs, rhs)) == Nx.tensor(1, type: {:u, 8}) do
raise """
expected
#{inspect(lhs)}
to be less than or equal to
#{inspect(rhs)}
"""
end
end
end
| 25.24
| 77
| 0.635499
|
038c06010e2fb8440e7a06465bc13f3bdcd52fd1
| 2,742
|
exs
|
Elixir
|
test/alfred_result_test.exs
|
lee-dohm/alfred-ex
|
109939aa8cf3e7f87be57cdc569c701fde9d1920
|
[
"MIT"
] | 5
|
2018-01-23T05:49:32.000Z
|
2020-01-26T17:54:57.000Z
|
test/alfred_result_test.exs
|
lee-dohm/alfred-ex
|
109939aa8cf3e7f87be57cdc569c701fde9d1920
|
[
"MIT"
] | 1
|
2017-11-11T18:31:18.000Z
|
2017-11-11T18:31:18.000Z
|
test/alfred_result_test.exs
|
lee-dohm/alfred-ex
|
109939aa8cf3e7f87be57cdc569c701fde9d1920
|
[
"MIT"
] | 1
|
2019-12-28T11:33:37.000Z
|
2019-12-28T11:33:37.000Z
|
defmodule Alfred.ResultTest do
use ExUnit.Case, async: true
doctest Alfred.Result
alias Alfred.Result
import Test.Helpers
describe "basic results" do
test "happy path" do
result = Result.new("title", "subtitle")
assert %Result{} = result
assert result.title == "title"
assert result.subtitle == "subtitle"
end
test "title is required" do
assert_raise ArgumentError, fn ->
Result.new(nil, "subtitle")
end
end
test "title cannot be blank" do
assert_raise ArgumentError, fn ->
Result.new(" ", "subtitle")
end
end
test "subtitle is required" do
assert_raise ArgumentError, fn ->
Result.new("title", nil)
end
end
test "subtitle cannot be blank" do
assert_raise ArgumentError, fn ->
Result.new("title", " ")
end
end
test "sets uid via options" do
result = Result.new("title", "subtitle", uid: "uid")
assert result.uid == "uid"
end
test "sets arg via options" do
result = Result.new("title", "subtitle", arg: "arg")
assert result.arg == "arg"
end
test "sets valid via options" do
result = Result.new("title", "subtitle", valid: false)
refute result.valid
end
test "requires valid to be a boolean value" do
assert_raise ArgumentError, fn ->
Result.new("title", "subtitle", valid: 5)
end
end
test "requires uid to be a string value" do
assert_raise ArgumentError, fn ->
Result.new("title", "subtitle", uid: 5)
end
end
end
describe "URL results" do
test "happy path" do
result = Result.new_url("title", "http://example.com")
assert %Result{} = result
assert result.title == "title"
assert result.subtitle == "http://example.com"
assert result.arg == "http://example.com"
assert result.uid == "http://example.com"
assert result.autocomplete == "title"
assert result.quicklookurl == "http://example.com"
end
end
describe "JSON conversion" do
test "hanldes a single result" do
result = Result.new("title", "subtitle")
{:ok, json} = Result.to_json(result)
assert json == fixture("single-result.txt")
end
test "handles a list of results" do
result = Result.new("title", "subtitle")
{:ok, json} = Result.to_json([result, result, result])
assert json == fixture("multiple-results.txt")
end
test "emits only keys with values that are not nil" do
result = Result.new("title", "subtitle", uid: "foo", autocomplete: nil)
{:ok, json} = Result.to_json(result)
assert json == fixture("result-with-uid.txt")
end
end
end
| 24.702703
| 77
| 0.60868
|
038c1812f905c36442e4432474b7c57e6e1fe0f4
| 2,804
|
exs
|
Elixir
|
test/teslamate/settings_test.exs
|
normalfaults/teslamate
|
9c61150bd5614728447e21789ab6edc5169b631e
|
[
"MIT"
] | 1
|
2020-05-17T05:05:22.000Z
|
2020-05-17T05:05:22.000Z
|
test/teslamate/settings_test.exs
|
normalfaults/teslamate
|
9c61150bd5614728447e21789ab6edc5169b631e
|
[
"MIT"
] | null | null | null |
test/teslamate/settings_test.exs
|
normalfaults/teslamate
|
9c61150bd5614728447e21789ab6edc5169b631e
|
[
"MIT"
] | null | null | null |
defmodule TeslaMate.SettingsTest do
use TeslaMate.DataCase, async: false
alias TeslaMate.Settings.Settings, as: S
alias TeslaMate.Settings
describe "settings" do
@update_attrs %{
unit_of_length: :mi,
unit_of_temperature: :F,
suspend_min: 60,
suspend_after_idle_min: 60,
req_no_shift_state_reading: false,
req_no_temp_reading: false,
req_not_unlocked: true
}
@invalid_attrs %{
unit_of_length: nil,
unit_of_temperature: nil,
suspend_min: nil,
suspend_after_idle_min: nil,
req_no_shift_state_reading: nil,
req_no_temp_reading: nil,
req_not_unlocked: nil
}
test "get_settings!/0 returns the settings" do
assert settings = Settings.get_settings!()
assert settings.unit_of_length == :km
assert settings.suspend_min == 21
assert settings.suspend_after_idle_min == 15
assert settings.req_no_shift_state_reading == false
assert settings.req_no_temp_reading == false
assert settings.req_not_unlocked == true
end
test "update_settings/2 with valid data updates the settings" do
{:ok, _pid} = start_supervised({Phoenix.PubSub.PG2, name: TeslaMate.PubSub})
settings = Settings.get_settings!()
assert {:ok, %S{} = settings} = Settings.update_settings(settings, @update_attrs)
assert settings.unit_of_length == :mi
assert settings.unit_of_temperature == :F
assert settings.suspend_min == 60
assert settings.suspend_after_idle_min == 60
assert settings.req_no_shift_state_reading == false
assert settings.req_no_temp_reading == false
assert settings.req_not_unlocked == true
end
test "update_settings/2 publishes the settings" do
{:ok, _pid} = start_supervised({Phoenix.PubSub.PG2, name: TeslaMate.PubSub})
:ok = Settings.subscribe_to_changes()
assert {:ok, %S{} = settings} =
Settings.get_settings!()
|> Settings.update_settings(@update_attrs)
assert_receive ^settings
end
test "update_settings/2 with invalid data returns error changeset" do
settings = Settings.get_settings!()
assert {:error, %Ecto.Changeset{} = changeset} =
Settings.update_settings(settings, @invalid_attrs)
assert errors_on(changeset) == %{
req_no_shift_state_reading: ["can't be blank"],
req_no_temp_reading: ["can't be blank"],
req_not_unlocked: ["can't be blank"],
suspend_after_idle_min: ["can't be blank"],
suspend_min: ["can't be blank"],
unit_of_length: ["can't be blank"],
unit_of_temperature: ["can't be blank"]
}
assert ^settings = Settings.get_settings!()
end
end
end
| 33.783133
| 87
| 0.659415
|
038c19476ea78231cd1c338c9bf804fed7813cf6
| 6,524
|
exs
|
Elixir
|
lib/elixir/test/elixir/gen_server_test.exs
|
tmbb/exdocs_makedown_demo
|
6a0039c54d2fa10d79c080efcef8d70d359678f8
|
[
"Apache-2.0"
] | 1
|
2017-07-25T21:46:25.000Z
|
2017-07-25T21:46:25.000Z
|
lib/elixir/test/elixir/gen_server_test.exs
|
tmbb/exdocs_makedown_demo
|
6a0039c54d2fa10d79c080efcef8d70d359678f8
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/test/elixir/gen_server_test.exs
|
tmbb/exdocs_makedown_demo
|
6a0039c54d2fa10d79c080efcef8d70d359678f8
|
[
"Apache-2.0"
] | 1
|
2017-07-25T21:46:48.000Z
|
2017-07-25T21:46:48.000Z
|
Code.require_file "test_helper.exs", __DIR__
defmodule GenServerTest do
use ExUnit.Case, async: true
defmodule Stack do
use GenServer
def handle_call(:pop, _from, [h | t]) do
{:reply, h, t}
end
def handle_call(:noreply, _from, h) do
{:noreply, h}
end
def handle_call(request, from, state) do
super(request, from, state)
end
def handle_cast({:push, item}, state) do
{:noreply, [item | state]}
end
def handle_cast(request, state) do
super(request, state)
end
def terminate(_reason, _state) do
# There is a race condition if the agent is
# restarted too fast and it is registered.
try do
self() |> Process.info(:registered_name) |> elem(1) |> Process.unregister
rescue
_ -> :ok
end
:ok
end
end
test "generates child_spec/1" do
assert Stack.child_spec([:hello]) == %{
id: Stack,
restart: :permanent,
shutdown: 5000,
start: {Stack, :start_link, [[:hello]]},
type: :worker
}
defmodule CustomStack do
use GenServer,
id: :id,
restart: :temporary,
shutdown: :infinity,
start: {:foo, :bar, []}
end
assert CustomStack.child_spec([:hello]) == %{
id: :id,
restart: :temporary,
shutdown: :infinity,
start: {:foo, :bar, []},
type: :worker
}
end
test "start_link/3" do
assert_raise ArgumentError, ~r"expected :name option to be one of:", fn ->
GenServer.start_link(Stack, [:hello], name: "my_gen_server_name")
end
assert_raise ArgumentError, ~r"expected :name option to be one of:", fn ->
GenServer.start_link(Stack, [:hello], name: {:invalid_tuple, "my_gen_server_name"})
end
assert_raise ArgumentError, ~r"expected :name option to be one of:", fn ->
GenServer.start_link(Stack, [:hello], name: {:via, "Via", "my_gen_server_name"})
end
assert_raise ArgumentError, ~r/Got: "my_gen_server_name"/, fn ->
GenServer.start_link(Stack, [:hello], name: "my_gen_server_name")
end
end
test "start_link/3 with via" do
GenServer.start_link(Stack, [:hello], name: {:via, :global, :via_stack})
assert GenServer.call({:via, :global, :via_stack}, :pop) == :hello
end
test "start_link/3 with global" do
GenServer.start_link(Stack, [:hello], name: {:global, :global_stack})
assert GenServer.call({:global, :global_stack}, :pop) == :hello
end
test "start_link/3 with local" do
GenServer.start_link(Stack, [:hello], name: :stack)
assert GenServer.call(:stack, :pop) == :hello
end
test "start_link/2, call/2 and cast/2" do
{:ok, pid} = GenServer.start_link(Stack, [:hello])
{:links, links} = Process.info(self(), :links)
assert pid in links
assert GenServer.call(pid, :pop) == :hello
assert GenServer.cast(pid, {:push, :world}) == :ok
assert GenServer.call(pid, :pop) == :world
assert GenServer.stop(pid) == :ok
assert GenServer.cast({:global, :foo}, {:push, :world}) == :ok
assert GenServer.cast({:via, :foo, :bar}, {:push, :world}) == :ok
assert GenServer.cast(:foo, {:push, :world}) == :ok
end
@tag capture_log: true
test "call/3 exit messages" do
name = :self
Process.register self(), name
:global.register_name name, self()
{:ok, pid} = GenServer.start_link(Stack, [:hello])
{:ok, stopped_pid} = GenServer.start(Stack, [:hello])
GenServer.stop(stopped_pid)
assert catch_exit(GenServer.call(name, :pop, 5000)) == {:calling_self, {GenServer, :call, [name, :pop, 5000]}}
assert catch_exit(GenServer.call({:global, name}, :pop, 5000)) == {:calling_self, {GenServer, :call, [{:global, name}, :pop, 5000]}}
assert catch_exit(GenServer.call({:via, :global, name}, :pop, 5000)) == {:calling_self, {GenServer, :call, [{:via, :global, name}, :pop, 5000]}}
assert catch_exit(GenServer.call(self(), :pop, 5000)) == {:calling_self, {GenServer, :call, [self(), :pop, 5000]}}
assert catch_exit(GenServer.call(pid, :noreply, 1)) == {:timeout, {GenServer, :call, [pid, :noreply, 1]}}
assert catch_exit(GenServer.call(nil, :pop, 5000)) == {:noproc, {GenServer, :call, [nil, :pop, 5000]}}
assert catch_exit(GenServer.call(stopped_pid, :pop, 5000)) == {:noproc, {GenServer, :call, [stopped_pid, :pop, 5000]}}
assert catch_exit(GenServer.call({:stack, :bogus_node}, :pop, 5000)) == {{:nodedown, :bogus_node}, {GenServer, :call, [{:stack, :bogus_node}, :pop, 5000]}}
end
test "nil name" do
{:ok, pid} = GenServer.start_link(Stack, [:hello], name: nil)
assert Process.info(pid, :registered_name) == {:registered_name, []}
end
test "start/2" do
{:ok, pid} = GenServer.start(Stack, [:hello])
{:links, links} = Process.info(self(), :links)
refute pid in links
GenServer.stop(pid)
end
test "abcast/3" do
{:ok, _} = GenServer.start_link(Stack, [], name: :stack)
assert GenServer.abcast(:stack, {:push, :hello}) == :abcast
assert GenServer.call({:stack, node()}, :pop) == :hello
assert GenServer.abcast([node(), :foo@bar], :stack, {:push, :world}) == :abcast
assert GenServer.call(:stack, :pop) == :world
GenServer.stop(:stack)
end
test "multi_call/4" do
{:ok, _} = GenServer.start_link(Stack, [:hello, :world], name: :stack)
assert GenServer.multi_call(:stack, :pop) ==
{[{node(), :hello}], []}
assert GenServer.multi_call([node(), :foo@bar], :stack, :pop) ==
{[{node(), :world}], [:foo@bar]}
GenServer.stop(:stack)
end
test "whereis/1" do
name = :whereis_server
{:ok, pid} = GenServer.start_link(Stack, [], name: name)
assert GenServer.whereis(name) == pid
assert GenServer.whereis({name, node()}) == pid
assert GenServer.whereis({name, :another_node}) == {name, :another_node}
assert GenServer.whereis(pid) == pid
assert GenServer.whereis(:whereis_bad_server) == nil
{:ok, pid} = GenServer.start_link(Stack, [], name: {:global, name})
assert GenServer.whereis({:global, name}) == pid
assert GenServer.whereis({:global, :whereis_bad_server}) == nil
assert GenServer.whereis({:via, :global, name}) == pid
assert GenServer.whereis({:via, :global, :whereis_bad_server}) == nil
end
test "stop/3" do
{:ok, pid} = GenServer.start(Stack, [])
assert GenServer.stop(pid, :normal) == :ok
{:ok, _} = GenServer.start(Stack, [], name: :stack_for_stop)
assert GenServer.stop(:stack_for_stop, :normal) == :ok
end
end
| 33.45641
| 159
| 0.625077
|
038c4568d29c29a0095bdab09fb3aec4f3b1afad
| 10,026
|
exs
|
Elixir
|
test/course_planner_web/controllers/teacher_controller_test.exs
|
digitalnatives/course_planner
|
27b1c8067edc262685e9c4dcbfcf82633bc8b8dc
|
[
"MIT"
] | 38
|
2017-04-11T13:37:38.000Z
|
2021-05-22T19:35:36.000Z
|
test/course_planner_web/controllers/teacher_controller_test.exs
|
digitalnatives/course_planner
|
27b1c8067edc262685e9c4dcbfcf82633bc8b8dc
|
[
"MIT"
] | 226
|
2017-04-07T13:14:14.000Z
|
2018-03-08T16:50:11.000Z
|
test/course_planner_web/controllers/teacher_controller_test.exs
|
digitalnatives/course_planner
|
27b1c8067edc262685e9c4dcbfcf82633bc8b8dc
|
[
"MIT"
] | 7
|
2017-08-30T23:58:13.000Z
|
2021-03-28T11:50:45.000Z
|
defmodule CoursePlanner.TeacherControllerTest do
use CoursePlannerWeb.ConnCase
alias CoursePlanner.{Repo, Accounts.User}
import CoursePlanner.Factory
setup do
{:ok, conn: login_as(:coordinator)}
end
defp login_as(user_type) do
user_type
|> insert()
|> guardian_login_html()
end
test "lists all entries on index", %{conn: conn} do
conn = get conn, teacher_path(conn, :index)
assert html_response(conn, 200) =~ "Teachers"
end
test "lists all entries on index for supervisor" do
conn = login_as(:supervisor)
conn = get conn, teacher_path(conn, :index)
assert html_response(conn, 200) =~ "Teachers"
end
test "does not teacher student for coordinator user when data is invalid", %{conn: conn} do
conn = post conn, teacher_path(conn, :create), %{"user" => %{"email" => ""}}
assert html_response(conn, 200) =~ "Something went wrong."
end
test "create teacher for coordinator user", %{conn: conn} do
conn = post conn, teacher_path(conn, :create), %{"user" => %{"email" => "foo@bar.com"}}
assert redirected_to(conn) == teacher_path(conn, :index)
assert get_flash(conn, "info") == "Teacher created and notified by."
end
test "shows chosen resource", %{conn: conn} do
teacher = insert(:teacher)
conn = get conn, teacher_path(conn, :show, teacher)
assert html_response(conn, 200) =~ "#{teacher.name} #{teacher.family_name}"
end
test "shows chosen resource for", %{conn: conn} do
teacher = insert(:teacher)
conn = get conn, teacher_path(conn, :show, teacher)
assert html_response(conn, 200) =~ "#{teacher.name} #{teacher.family_name}"
end
test "shows chosen resource for supervisor" do
conn = login_as(:supervisor)
teacher = insert(:teacher)
conn = get conn, teacher_path(conn, :show, teacher)
assert html_response(conn, 200) =~ "#{teacher.name} #{teacher.family_name}"
end
test "renders page not found when id is nonexistent", %{conn: conn} do
conn = get conn, teacher_path(conn, :show, -1)
assert html_response(conn, 404)
end
test "renders form for editing chosen resource", %{conn: conn} do
teacher = insert(:teacher, %{name: "Foo", family_name: "Bar"})
conn = get conn, teacher_path(conn, :edit, teacher)
assert html_response(conn, 200) =~ "Foo Bar"
end
test "renders page not found for editing inexistent resource", %{conn: conn} do
conn = get conn, teacher_path(conn, :edit, -1)
assert html_response(conn, 404)
end
test "updates chosen resource and redirects when data is valid", %{conn: conn} do
teacher = insert(:teacher, %{})
conn = put conn, teacher_path(conn, :update, teacher), %{"user" => %{"email" => "foo@bar.com"}}
assert redirected_to(conn) == teacher_path(conn, :show, teacher)
assert Repo.get_by(User, email: "foo@bar.com")
end
test "does not updates if the resource does not exist", %{conn: conn} do
conn = put conn, teacher_path(conn, :update, -1), %{"user" => %{"email" => "foo@bar.com"}}
assert html_response(conn, 404)
end
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
teacher = insert(:teacher, %{name: "Foo", family_name: "Bar"})
conn = put conn, teacher_path(conn, :update, teacher), %{"user" => %{"email" => "not email"}}
assert html_response(conn, 200) =~ "Foo Bar"
end
test "deletes chosen resource", %{conn: conn} do
teacher = insert(:teacher)
conn = delete conn, teacher_path(conn, :delete, teacher)
assert redirected_to(conn) == teacher_path(conn, :index)
refute Repo.get(User, teacher.id)
end
test "does not delete chosen resource when does not exist", %{conn: conn} do
conn = delete conn, teacher_path(conn, :delete, "-1")
assert redirected_to(conn) == teacher_path(conn, :index)
assert get_flash(conn, "error") == "Teacher was not found."
end
test "renders form for new resources", %{conn: conn} do
conn = get conn, teacher_path(conn, :new)
assert html_response(conn, 200) =~ "New teacher"
end
test "does not shows chosen resource for non coordinator user", %{conn: _conn} do
student_conn = login_as(:student)
teacher_conn = login_as(:teacher)
volunteer_conn = login_as(:volunteer)
teacher = insert(:teacher)
conn = get student_conn, teacher_path(student_conn, :show, teacher)
assert html_response(conn, 403)
conn = get teacher_conn, teacher_path(teacher_conn, :show, teacher)
assert html_response(conn, 403)
conn = get volunteer_conn, teacher_path(volunteer_conn, :show, teacher)
assert html_response(conn, 403)
end
test "does not list entries on index for non coordinator user", %{conn: _conn} do
student_conn = login_as(:student)
teacher_conn = login_as(:teacher)
volunteer_conn = login_as(:volunteer)
conn = get student_conn, teacher_path(student_conn, :index)
assert html_response(conn, 403)
conn = get teacher_conn, teacher_path(teacher_conn, :index)
assert html_response(conn, 403)
conn = get volunteer_conn, teacher_path(volunteer_conn, :index)
assert html_response(conn, 403)
end
test "does not renders form for editing chosen resource for non coordinator user", %{conn: _conn} do
student_conn = login_as(:student)
teacher_conn = login_as(:teacher)
volunteer_conn = login_as(:volunteer)
supervisor_conn = login_as(:supervisor)
teacher = insert(:teacher)
conn = get student_conn, teacher_path(student_conn, :edit, teacher)
assert html_response(conn, 403)
conn = get teacher_conn, teacher_path(teacher_conn, :edit, teacher)
assert html_response(conn, 403)
conn = get volunteer_conn, teacher_path(volunteer_conn, :edit, teacher)
assert html_response(conn, 403)
conn = get supervisor_conn, teacher_path(supervisor_conn, :edit, teacher)
assert html_response(conn, 403)
end
test "does not delete a chosen resource for non coordinator user", %{conn: _conn} do
student_conn = login_as(:student)
teacher_conn = login_as(:teacher)
volunteer_conn = login_as(:volunteer)
supervisor_conn = login_as(:supervisor)
teacher = insert(:teacher)
conn = delete student_conn, teacher_path(student_conn, :delete, teacher.id)
assert html_response(conn, 403)
conn = delete teacher_conn, teacher_path(teacher_conn, :delete, teacher.id)
assert html_response(conn, 403)
conn = delete volunteer_conn, teacher_path(volunteer_conn, :delete, teacher.id)
assert html_response(conn, 403)
conn = delete supervisor_conn, teacher_path(supervisor_conn, :delete, teacher.id)
assert html_response(conn, 403)
end
test "does not render form for new teacher for non coordinator user", %{conn: _conn} do
student_conn = login_as(:student)
teacher_conn = login_as(:teacher)
volunteer_conn = login_as(:volunteer)
supervisor_conn = login_as(:supervisor)
conn = get student_conn, teacher_path(student_conn, :new)
assert html_response(conn, 403)
conn = get teacher_conn, teacher_path(teacher_conn, :new)
assert html_response(conn, 403)
conn = get volunteer_conn, teacher_path(volunteer_conn, :new)
assert html_response(conn, 403)
conn = get supervisor_conn, teacher_path(supervisor_conn, :new)
assert html_response(conn, 403)
end
test "does not create teacher for non coordinator user", %{conn: _conn} do
student_conn = login_as(:student)
teacher_conn = login_as(:teacher)
volunteer_conn = login_as(:volunteer)
supervisor_conn = login_as(:supervisor)
conn = post student_conn, teacher_path(student_conn, :create), %{"user" => %{"email" => "foo@bar.com"}}
assert html_response(conn, 403)
conn = post teacher_conn, teacher_path(teacher_conn, :create), %{"user" => %{"email" => "foo@bar.com"}}
assert html_response(conn, 403)
conn = post volunteer_conn, teacher_path(volunteer_conn, :create), %{"user" => %{"email" => "foo@bar.com"}}
assert html_response(conn, 403)
conn = post supervisor_conn, teacher_path(supervisor_conn, :create), %{"user" => %{"email" => "foo@bar.com"}}
assert html_response(conn, 403)
end
test "does not update chosen teacher for non coordinator user", %{conn: _conn} do
student_conn = login_as(:student)
teacher_conn = login_as(:teacher)
volunteer_conn = login_as(:volunteer)
supervisor_conn = login_as(:supervisor)
teacher = insert(:teacher, %{})
conn = put student_conn, teacher_path(student_conn, :update, teacher), %{"user" => %{"email" => "foo@bar.com"}}
assert html_response(conn, 403)
conn = put teacher_conn, teacher_path(teacher_conn, :update, teacher), %{"user" => %{"email" => "foo@bar.com"}}
assert html_response(conn, 403)
conn = put volunteer_conn, teacher_path(volunteer_conn, :update, teacher), %{"user" => %{"email" => "foo@bar.com"}}
assert html_response(conn, 403)
conn = put supervisor_conn, teacher_path(supervisor_conn, :update, teacher), %{"user" => %{"email" => "foo@bar.com"}}
assert html_response(conn, 403)
end
test "show the teacher himself" do
teacher = insert(:teacher)
teacher_conn = guardian_login_html(teacher)
conn = get teacher_conn, teacher_path(teacher_conn, :show, teacher)
assert html_response(conn, 200) =~ "#{teacher.name} #{teacher.family_name}"
end
test "edit the teacher himself" do
teacher = insert(:teacher, name: "Foo", family_name: "Bar")
teacher_conn = guardian_login_html(teacher)
conn = get teacher_conn, teacher_path(teacher_conn, :edit, teacher)
assert html_response(conn, 200) =~ "Foo Bar"
end
test "update the teacher himself" do
teacher = insert(:teacher)
teacher_conn = guardian_login_html(teacher)
conn = put teacher_conn, teacher_path(teacher_conn, :update, teacher), %{"user" => %{"email" => "foo@bar.com"}}
assert redirected_to(conn) == teacher_path(conn, :show, teacher)
assert Repo.get_by(User, email: "foo@bar.com")
end
end
| 37.410448
| 121
| 0.691103
|
038c76c658d26e6ec234663162534df4707df968
| 509
|
ex
|
Elixir
|
fixtures/elixir_output/patch.ex
|
martinsirbe/curlconverter
|
c5324e85d2ca24ef4743fb2bb36139d23367e293
|
[
"MIT"
] | 6
|
2021-10-01T18:49:49.000Z
|
2022-01-18T22:49:39.000Z
|
fixtures/elixir_output/patch.ex
|
martinsirbe/curlconverter
|
c5324e85d2ca24ef4743fb2bb36139d23367e293
|
[
"MIT"
] | null | null | null |
fixtures/elixir_output/patch.ex
|
martinsirbe/curlconverter
|
c5324e85d2ca24ef4743fb2bb36139d23367e293
|
[
"MIT"
] | 1
|
2021-06-09T17:10:12.000Z
|
2021-06-09T17:10:12.000Z
|
request = %HTTPoison.Request{
method: :patch,
url: "https://ci.example.com/go/api/agents/adb9540a-b954-4571-9d9b-2f330739d4da",
options: [hackney: [basic_auth: {~s|username|, ~s|password|}]],
headers: [
{~s|Accept|, ~s|application/vnd.go.cd.v4+json|},
{~s|Content-Type|, ~s|application/json|},
],
params: [],
body: ~s|{ "hostname": "agent02.example.com", "agent_config_state": "Enabled", "resources": ["Java","Linux"], "environments": ["Dev"] }|
}
response = HTTPoison.request(request)
| 36.357143
| 138
| 0.650295
|
038c8c59126a3ee601f61a0ece06e0d93cb4adca
| 259
|
ex
|
Elixir
|
examples/echo_phoenix/lib/echo_phoenix.ex
|
wingyplus/line_ex
|
73ae52c5c07998dac55f5f11aa9ce79d751b67d6
|
[
"MIT"
] | 3
|
2021-07-27T21:29:43.000Z
|
2021-08-06T11:52:24.000Z
|
examples/echo_phoenix/lib/echo_phoenix.ex
|
wingyplus/line_ex
|
73ae52c5c07998dac55f5f11aa9ce79d751b67d6
|
[
"MIT"
] | 7
|
2021-07-26T03:18:39.000Z
|
2021-10-11T11:16:49.000Z
|
examples/echo_phoenix/lib/echo_phoenix.ex
|
wingyplus/line_ex
|
73ae52c5c07998dac55f5f11aa9ce79d751b67d6
|
[
"MIT"
] | 1
|
2021-07-29T16:13:47.000Z
|
2021-07-29T16:13:47.000Z
|
defmodule EchoPhoenix do
@moduledoc """
EchoPhoenix keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 25.9
| 66
| 0.760618
|
038ce92852ce2f6a558cc5d5b23cf3a0b1a7ac34
| 2,472
|
exs
|
Elixir
|
lib/iex/test/iex/options_test.exs
|
bruce/elixir
|
d77ccf941541959079e5f677f8717da24b486fac
|
[
"Apache-2.0"
] | 1
|
2017-09-09T20:59:04.000Z
|
2017-09-09T20:59:04.000Z
|
lib/iex/test/iex/options_test.exs
|
bruce/elixir
|
d77ccf941541959079e5f677f8717da24b486fac
|
[
"Apache-2.0"
] | null | null | null |
lib/iex/test/iex/options_test.exs
|
bruce/elixir
|
d77ccf941541959079e5f677f8717da24b486fac
|
[
"Apache-2.0"
] | null | null | null |
Code.require_file "../test_helper.exs", __DIR__
defmodule IEx.OptionsTest do
use IEx.Case
@doc """
Hello, I have %{red}ANSI%{reset} escapes.
"""
def ansi_escapes, do: :ok
unless match?({:win32,_}, :os.type) do
test "color" do
opts = [colors: [enabled: true, eval_result: "red"]]
assert capture_iex("1 + 2", opts) == "\e[31m3\e[0m"
# Sanity checks
assert capture_iex("IO.ANSI.escape(\"%{blue}hello\", true)", opts)
== "\e[31m\"\\e[34mhello\\e[0m\"\e[0m"
assert capture_iex("IO.puts IO.ANSI.escape(\"%{blue}hello\", true)", opts)
== "\e[34mhello\e[0m\n\e[31m:ok\e[0m"
assert capture_iex("IO.puts IO.ANSI.escape(\"%{blue}hello\", true)", [colors: [enabled: false]])
== "\e[34mhello\e[0m\n:ok"
# Test that ANSI escapes in the docs are left alone
opts = [colors: [enabled: true, info: "red", eval_result: "red"]]
assert capture_iex("h IEx.OptionsTest.ansi_escapes", opts)
== "\e[31m* def ansi_escapes()\n\e[0m\n\e[31mHello, I have %{red}ANSI%{reset} escapes.\n\e[0m"
# Test that ANSI escapes in iex output are left alone
assert capture_iex("\"%{red} %{blue}\"", opts) == "\e[31m\"%{red} %{blue}\"\e[0m"
assert capture_iex("IO.puts IEx.color(:info, \"%{red} %{blue}\")", opts)
== "\e[31m%{red} %{blue}\e[0m\n\e[31m:ok\e[0m"
end
end
test "inspect opts" do
opts = [inspect: [limit: 3, raw: true]]
assert capture_iex("[1,2,3,4,5]\nArgumentError[]", opts) ==
"[1, 2, 3, ...]\n{ArgumentError, :__exception__, \"argument error\"}"
opts = [inspect: [raw: false]]
assert capture_iex("ArgumentError[]", opts) == "ArgumentError[message: \"argument error\"]"
end
test "history size" do
opts = [history_size: 3]
assert capture_iex("1\n2\n3\nv(1)", opts) == "1\n2\n3\n1"
assert "1\n2\n3\n4\n** (RuntimeError) v(1) is out of bounds" <> _ = capture_iex("1\n2\n3\n4\nv(1)", opts)
assert "1\n2\n3\n4\n** (RuntimeError) v(-4) is out of bounds" <> _ = capture_iex("1\n2\n3\n4\nv(-4)", opts)
assert "1\n2\n3\n4\n2\n** (RuntimeError) v(2) is out of bounds" <> _ = capture_iex("1\n2\n3\n4\nv(2)\nv(2)", opts)
end
test "bad option" do
assert_raise ArgumentError, fn ->
IEx.Options.set :nonexistent_option, nil
end
end
test "bad key" do
assert_raise ArgumentError, fn ->
IEx.Options.set :colors, nonexistent_color_name: "red"
end
end
end
| 38.030769
| 118
| 0.596278
|
038d2e5a815ff394df6ddaf577a914a05f760087
| 4,163
|
exs
|
Elixir
|
exercism/elixir/simple-linked-list/test/linked_list_test.exs
|
Tyyagoo/studies
|
f8fcc3a539cfb6d04a149174c88bf2208e220b96
|
[
"Unlicense"
] | null | null | null |
exercism/elixir/simple-linked-list/test/linked_list_test.exs
|
Tyyagoo/studies
|
f8fcc3a539cfb6d04a149174c88bf2208e220b96
|
[
"Unlicense"
] | null | null | null |
exercism/elixir/simple-linked-list/test/linked_list_test.exs
|
Tyyagoo/studies
|
f8fcc3a539cfb6d04a149174c88bf2208e220b96
|
[
"Unlicense"
] | null | null | null |
defmodule LinkedListTest do
use ExUnit.Case
test "count/1 of new list" do
list = LinkedList.new()
assert LinkedList.count(list) == 0
end
test "empty?/1 of new list" do
list = LinkedList.new()
assert LinkedList.empty?(list)
end
test "count/1 of list of 1 datum" do
list = LinkedList.new() |> LinkedList.push(10)
assert LinkedList.count(list) == 1
end
test "empty?/1 of list of 1 datum" do
list = LinkedList.new() |> LinkedList.push(20)
refute LinkedList.empty?(list)
end
test "peek/1 of list of 1 datum" do
list = LinkedList.new() |> LinkedList.push(20)
assert LinkedList.peek(list) == {:ok, 20}
end
test "peek/1 of list of empty list" do
list = LinkedList.new()
assert LinkedList.peek(list) == {:error, :empty_list}
end
test "tail/1 of empty list" do
list = LinkedList.new()
assert {:error, :empty_list} = LinkedList.tail(list)
end
test "tail/1 of list of 1 datum" do
list = LinkedList.new() |> LinkedList.push(:hello)
assert {:ok, tail} = LinkedList.tail(list)
assert LinkedList.peek(tail) == {:error, :empty_list}
end
test "pushed items are stacked" do
list =
LinkedList.new()
|> LinkedList.push(:a)
|> LinkedList.push(:b)
assert LinkedList.peek(list) == {:ok, :b}
assert {:ok, list} = LinkedList.tail(list)
assert LinkedList.peek(list) == {:ok, :a}
assert {:ok, list} = LinkedList.tail(list)
assert LinkedList.peek(list) == {:error, :empty_list}
end
test "push 10 times" do
list = Enum.reduce(1..10, LinkedList.new(), &LinkedList.push(&2, &1))
assert LinkedList.peek(list) == {:ok, 10}
assert LinkedList.count(list) == 10
end
test "pop/1 of list of 1 datum" do
list = LinkedList.new() |> LinkedList.push(:a)
assert {:ok, :a, tail} = LinkedList.pop(list)
assert LinkedList.count(tail) == 0
end
test "popping frenzy" do
list = Enum.reduce(11..20, LinkedList.new(), &LinkedList.push(&2, &1))
assert LinkedList.count(list) == 10
assert {:ok, 20, list} = LinkedList.pop(list)
assert {:ok, 19, list} = LinkedList.pop(list)
assert {:ok, 18, list} = LinkedList.pop(list)
assert {:ok, 17, list} = LinkedList.pop(list)
assert {:ok, 16, list} = LinkedList.pop(list)
assert {:ok, 15} = LinkedList.peek(list)
assert LinkedList.count(list) == 5
end
test "from_list/1 of empty list" do
list = LinkedList.from_list([])
assert LinkedList.count(list) == 0
end
test "from_list/1 of 2 element list, keeping order" do
list = LinkedList.from_list([:a, :b])
assert LinkedList.count(list) == 2
assert {:ok, :a, list} = LinkedList.pop(list)
assert {:ok, :b, list} = LinkedList.pop(list)
assert {:error, :empty_list} = LinkedList.pop(list)
end
test "to_list/1 of empty list" do
list = LinkedList.new()
assert LinkedList.to_list(list) == []
end
test "to_list/1 of list of 1 datum" do
list = LinkedList.from_list([:mon])
assert LinkedList.to_list(list) == [:mon]
end
test "to_list/1 of list of 2 datum, keeping order" do
list = LinkedList.from_list([:mon, :tues])
assert LinkedList.to_list(list) == [:mon, :tues]
end
test "from_list/1 and successive push/2 of a list result in reversed order" do
list = [:mon, :tues]
from_list = LinkedList.from_list(list)
push_list = Enum.reduce(list, LinkedList.new(), &LinkedList.push(&2, &1))
assert LinkedList.to_list(from_list) ==
LinkedList.to_list(push_list) |> Enum.reverse()
end
test "reverse/1 of list of 2 datum" do
list = LinkedList.from_list([1, 2, 3]) |> LinkedList.reverse()
assert LinkedList.to_list(list) == [3, 2, 1]
end
test "reverse/1 of list of 200 datum" do
list = Enum.to_list(1..200)
linked_list = LinkedList.from_list(list) |> LinkedList.reverse()
assert LinkedList.to_list(linked_list) == Enum.reverse(list)
end
test "reverse/1 round trip" do
list = Enum.to_list(1..200)
linked_list =
LinkedList.from_list(list)
|> LinkedList.reverse()
|> LinkedList.reverse()
assert LinkedList.to_list(linked_list) == list
end
end
| 29.316901
| 80
| 0.644968
|
038d349a3142d0c928a396e3b4bd0288e4525bfb
| 720
|
ex
|
Elixir
|
lib/diversity_in_tech/companies/review.ex
|
dreamingechoes/diversity-in-tech
|
4eb5dadf69f82fd08e1cdd1b125264930d3b4e6f
|
[
"MIT"
] | 8
|
2018-06-22T05:43:30.000Z
|
2020-04-13T20:31:40.000Z
|
lib/diversity_in_tech/companies/review.ex
|
dreamingechoes/diversity-in-tech
|
4eb5dadf69f82fd08e1cdd1b125264930d3b4e6f
|
[
"MIT"
] | null | null | null |
lib/diversity_in_tech/companies/review.ex
|
dreamingechoes/diversity-in-tech
|
4eb5dadf69f82fd08e1cdd1b125264930d3b4e6f
|
[
"MIT"
] | null | null | null |
defmodule DiversityInTech.Companies.Review do
use Ecto.Schema
import Ecto.Changeset
alias DiversityInTech.Companies.AttributeReview
alias DiversityInTech.Companies.Review
schema "reviews" do
field(:advice, :string)
field(:cons, :string)
field(:pros, :string)
field(:status, ReviewStatusEnum)
field(:company_id, :id)
timestamps()
# Associations
has_many(:attributes_reviews, AttributeReview, on_delete: :delete_all)
end
# Changeset cast params
@params [:pros, :cons, :advice, :status, :company_id]
@required [:company_id]
@doc false
def changeset(%Review{} = review, attrs) do
review
|> cast(attrs, @params)
|> validate_required(@required)
end
end
| 23.225806
| 74
| 0.704167
|
038d51c54011b175836644ff6748d3cc7625db7f
| 693
|
ex
|
Elixir
|
lib/config_cat/constants.ex
|
kianmeng/elixir-sdk
|
89fb73f6249f82ac8415246519c17ad4ade54760
|
[
"MIT"
] | null | null | null |
lib/config_cat/constants.ex
|
kianmeng/elixir-sdk
|
89fb73f6249f82ac8415246519c17ad4ade54760
|
[
"MIT"
] | null | null | null |
lib/config_cat/constants.ex
|
kianmeng/elixir-sdk
|
89fb73f6249f82ac8415246519c17ad4ade54760
|
[
"MIT"
] | null | null | null |
defmodule ConfigCat.Constants do
@moduledoc false
defmacro base_url_global, do: "https://cdn-global.configcat.com"
defmacro base_url_eu_only, do: "https://cdn-eu.configcat.com"
defmacro base_path, do: "configuration-files"
defmacro config_filename, do: "config_v5.json"
defmacro feature_flags, do: "f"
defmacro preferences, do: "p"
defmacro preferences_base_url, do: "u"
defmacro redirect, do: "r"
defmacro comparator, do: "t"
defmacro comparison_attribute, do: "a"
defmacro comparison_value, do: "c"
defmacro rollout_rules, do: "r"
defmacro percentage_rules, do: "p"
defmacro percentage, do: "p"
defmacro value, do: "v"
defmacro variation_id, do: "i"
end
| 30.130435
| 66
| 0.730159
|
038d600fd66a353b72d30420de01b8175943b6e8
| 206
|
ex
|
Elixir
|
lib/school_house_web/views/layout_view.ex
|
fmterrorf/school_house
|
b3a1374d4b23fbc027b6bc9c95004c6556c48bf0
|
[
"Apache-2.0"
] | 90
|
2021-02-10T23:57:52.000Z
|
2022-03-17T18:36:55.000Z
|
lib/school_house_web/views/layout_view.ex
|
fmterrorf/school_house
|
b3a1374d4b23fbc027b6bc9c95004c6556c48bf0
|
[
"Apache-2.0"
] | 120
|
2021-02-11T00:32:44.000Z
|
2022-03-23T04:11:59.000Z
|
lib/school_house_web/views/layout_view.ex
|
fmterrorf/school_house
|
b3a1374d4b23fbc027b6bc9c95004c6556c48bf0
|
[
"Apache-2.0"
] | 18
|
2021-04-15T09:57:44.000Z
|
2022-03-23T02:55:26.000Z
|
defmodule SchoolHouseWeb.LayoutView do
use SchoolHouseWeb, :view
def render_dark_mode?(conn) do
case Map.get(conn.query_params, "ui", nil) do
"dark" -> "dark"
_ -> ""
end
end
end
| 18.727273
| 49
| 0.640777
|
038dbf31935dfe872bf24b8dabb7553e3df8eefa
| 628
|
ex
|
Elixir
|
lib/nudge_api/users/user.ex
|
feelja-tech/feelja-api
|
03ce15430460cf2dac24a7740242c7e5ac5c5804
|
[
"MIT"
] | null | null | null |
lib/nudge_api/users/user.ex
|
feelja-tech/feelja-api
|
03ce15430460cf2dac24a7740242c7e5ac5c5804
|
[
"MIT"
] | null | null | null |
lib/nudge_api/users/user.ex
|
feelja-tech/feelja-api
|
03ce15430460cf2dac24a7740242c7e5ac5c5804
|
[
"MIT"
] | null | null | null |
defmodule NudgeApi.Users.User do
use Ecto.Schema
import Ecto.Changeset
schema "users" do
field :phone_number, :string
field :sms_code, :string
field :state, NudgeApi.Fsms.UserStateEnum
field :otp, :string
field :otp_expires_at, :utc_datetime
has_one :user_profile, NudgeApi.Users.UserProfile
timestamps(type: :utc_datetime)
end
@doc false
def changeset(user, attrs) do
user
|> cast(attrs, [
:phone_number,
:sms_code,
:state,
:otp_expires_at,
:otp
])
|> validate_required([:phone_number])
|> unique_constraint(:phone_number)
end
end
| 20.258065
| 53
| 0.664013
|
038dd6727bdedf35780772bd9b65769a0905ce74
| 2,795
|
ex
|
Elixir
|
clients/service_networking/lib/google_api/service_networking/v1/model/system_parameters.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/service_networking/lib/google_api/service_networking/v1/model/system_parameters.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/service_networking/lib/google_api/service_networking/v1/model/system_parameters.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceNetworking.V1.Model.SystemParameters do
@moduledoc """
### System parameter configuration
A system parameter is a special kind of parameter defined by the API
system, not by an individual API. It is typically mapped to an HTTP header
and/or a URL query parameter. This configuration specifies which methods
change the names of the system parameters.
## Attributes
* `rules` (*type:* `list(GoogleApi.ServiceNetworking.V1.Model.SystemParameterRule.t)`, *default:* `nil`) - Define system parameters.
The parameters defined here will override the default parameters
implemented by the system. If this field is missing from the service
config, default system parameters will be used. Default system parameters
and names is implementation-dependent.
Example: define api key for all methods
system_parameters
rules:
- selector: "*"
parameters:
- name: api_key
url_query_parameter: api_key
Example: define 2 api key names for a specific method.
system_parameters
rules:
- selector: "/ListShelves"
parameters:
- name: api_key
http_header: Api-Key1
- name: api_key
http_header: Api-Key2
**NOTE:** All service configuration rules follow "last one wins" order.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:rules => list(GoogleApi.ServiceNetworking.V1.Model.SystemParameterRule.t())
}
field(:rules, as: GoogleApi.ServiceNetworking.V1.Model.SystemParameterRule, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceNetworking.V1.Model.SystemParameters do
def decode(value, options) do
GoogleApi.ServiceNetworking.V1.Model.SystemParameters.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceNetworking.V1.Model.SystemParameters do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.9375
| 136
| 0.696601
|
038df15fafa4359d47c8786d282b932ecb1c9de8
| 3,891
|
ex
|
Elixir
|
lib/trie/destroyer.ex
|
DigixGlobal/merkle_patricia_tree
|
76e6b1bca9f7d96592350132bab37fdffcbec09b
|
[
"MIT"
] | 20
|
2017-06-20T12:46:54.000Z
|
2022-02-03T10:38:11.000Z
|
lib/trie/destroyer.ex
|
DigixGlobal/merkle_patricia_tree
|
76e6b1bca9f7d96592350132bab37fdffcbec09b
|
[
"MIT"
] | 7
|
2017-08-30T19:30:57.000Z
|
2018-05-01T20:15:14.000Z
|
lib/trie/destroyer.ex
|
DigixGlobal/merkle_patricia_tree
|
76e6b1bca9f7d96592350132bab37fdffcbec09b
|
[
"MIT"
] | 13
|
2017-08-21T13:03:14.000Z
|
2021-05-25T17:51:20.000Z
|
defmodule MerklePatriciaTree.Trie.Destroyer do
@moduledoc """
Destroyer is responsible for removing keys from a
merkle trie. To remove a key, we need to make a
delta to our trie which ends up as the canonical
form of the given tree as defined in http://gavwood.com/Paper.pdf.
Note: this algorithm is non-obvious, and hence why we have a good
number of functional and invariant tests. We should add more specific
unit tests to this module.
"""
alias MerklePatriciaTree.Trie
alias MerklePatriciaTree.Trie.Node
alias MerklePatriciaTree.ListHelper
@empty_branch <<>>
@doc """
Removes a key from a given trie, if it exists.
This may radically change the structure of the trie.
"""
@spec remove_key(Node.trie_node(), Trie.key(), Trie.t()) :: Node.trie_node()
def remove_key(trie_node, key, trie) do
trie_remove_key(trie_node, key, trie)
end
# To remove this leaf, simply remove it
defp trie_remove_key({:leaf, leaf_prefix, _value}, prefix, _trie) when prefix == leaf_prefix do
:empty
end
# This key doesn't exist, do nothing.
defp trie_remove_key({:leaf, leaf_prefix, value}, _prefix, _trie) do
{:leaf, leaf_prefix, value}
end
# Shed shared prefix and continue removal operation
defp trie_remove_key({:ext, ext_prefix, node_hash}, key_prefix, trie) do
{_matching_prefix, ext_tl, remaining_tl} = ListHelper.overlap(ext_prefix, key_prefix)
if ext_tl |> Enum.empty?() do
existing_node = Node.decode_trie(node_hash |> Trie.into(trie))
updated_node = trie_remove_key(existing_node, remaining_tl, trie)
# Handle the potential cases of children
case updated_node do
:empty ->
:empty
{:leaf, leaf_prefix, leaf_value} ->
{:leaf, ext_prefix ++ leaf_prefix, leaf_value}
{:ext, new_ext_prefix, new_ext_node_hash} ->
{:ext, ext_prefix ++ new_ext_prefix, new_ext_node_hash}
els ->
{:ext, ext_prefix, els |> Node.encode_node(trie)}
end
else
# Prefix doesn't match ext, do nothing.
{:ext, ext_prefix, node_hash}
end
end
# Remove from a branch when directly on value
defp trie_remove_key({:branch, branches}, [], _trie) when length(branches) == 17 do
{:branch, List.replace_at(branches, 16, nil)}
end
# Remove beneath a branch
defp trie_remove_key({:branch, branches}, [prefix_hd | prefix_tl], trie)
when length(branches) == 17 do
updated_branches =
List.update_at(branches, prefix_hd, fn branch ->
branch_node = branch |> Trie.into(trie) |> Node.decode_trie()
branch_node |> trie_remove_key(prefix_tl, trie) |> Node.encode_node(trie)
end)
non_blank_branches =
updated_branches
|> Enum.drop(-1)
|> Enum.with_index()
|> Enum.filter(fn {branch, _} -> branch != @empty_branch end)
final_value = List.last(updated_branches)
cond do
non_blank_branches |> Enum.empty?() ->
# We just have a final value, this will need to percolate up
{:leaf, [], final_value}
Enum.count(non_blank_branches) == 1 and final_value == "" ->
# We just have a node we need to percolate up
{branch_node, i} = List.first(non_blank_branches)
decoded_branch_node = Node.decode_trie(branch_node |> Trie.into(trie))
case decoded_branch_node do
{:leaf, leaf_prefix, leaf_value} ->
{:leaf, [i | leaf_prefix], leaf_value}
{:ext, ext_prefix, ext_node_hash} ->
{:ext, [i | ext_prefix], ext_node_hash}
# TODO: Is this illegal since ext has to have at least two nibbles?
{:branch, _branches} ->
{:ext, [i], branch_node}
end
true ->
{:branch, updated_branches}
end
end
# Merge into empty to create a leaf
defp trie_remove_key(:empty, _prefix, _trie) do
:empty
end
end
| 31.634146
| 97
| 0.659728
|
038df260dfe8f83bc039b65495efc6c444343ae7
| 2,803
|
ex
|
Elixir
|
clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1beta1_document_page_anchor_page_ref.ex
|
renovate-bot/elixir-google-api
|
1da34cd39b670c99f067011e05ab90af93fef1f6
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1beta1_document_page_anchor_page_ref.ex
|
swansoffiee/elixir-google-api
|
9ea6d39f273fb430634788c258b3189d3613dde0
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1beta1_document_page_anchor_page_ref.ex
|
dazuma/elixir-google-api
|
6a9897168008efe07a6081d2326735fe332e522c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta1DocumentPageAnchorPageRef do
@moduledoc """
Represents a weak reference to a page element within a document.
## Attributes
* `boundingPoly` (*type:* `GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta1BoundingPoly.t`, *default:* `nil`) - Optional. Identifies the bounding polygon of a layout element on the page.
* `confidence` (*type:* `number()`, *default:* `nil`) - Optional. Confidence of detected page element, if applicable. Range [0, 1].
* `layoutId` (*type:* `String.t`, *default:* `nil`) - Optional. Deprecated. Use PageRef.bounding_poly instead.
* `layoutType` (*type:* `String.t`, *default:* `nil`) - Optional. The type of the layout element that is being referenced if any.
* `page` (*type:* `String.t`, *default:* `nil`) - Required. Index into the Document.pages element, for example using Document.pages to locate the related page element. This field is skipped when its value is the default 0. See https://developers.google.com/protocol-buffers/docs/proto3#json.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:boundingPoly =>
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta1BoundingPoly.t() | nil,
:confidence => number() | nil,
:layoutId => String.t() | nil,
:layoutType => String.t() | nil,
:page => String.t() | nil
}
field(:boundingPoly, as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta1BoundingPoly)
field(:confidence)
field(:layoutId)
field(:layoutType)
field(:page)
end
defimpl Poison.Decoder,
for: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta1DocumentPageAnchorPageRef do
def decode(value, options) do
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta1DocumentPageAnchorPageRef.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta1DocumentPageAnchorPageRef do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.123077
| 295
| 0.730289
|
038dfe85b3b14dd56d828631565c9f1922de6409
| 61,259
|
ex
|
Elixir
|
lib/elixir/lib/calendar/datetime.ex
|
jmbejar/elixir
|
4a5fcbe04a5a477e27c6645a8a4bdb98332fb6eb
|
[
"Apache-2.0"
] | 1
|
2019-06-27T08:47:13.000Z
|
2019-06-27T08:47:13.000Z
|
lib/elixir/lib/calendar/datetime.ex
|
jmbejar/elixir
|
4a5fcbe04a5a477e27c6645a8a4bdb98332fb6eb
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/lib/calendar/datetime.ex
|
jmbejar/elixir
|
4a5fcbe04a5a477e27c6645a8a4bdb98332fb6eb
|
[
"Apache-2.0"
] | 1
|
2021-12-09T11:36:07.000Z
|
2021-12-09T11:36:07.000Z
|
defmodule DateTime do
@moduledoc """
A datetime implementation with a time zone.
This datetime can be seen as a snapshot of a date and time
at a given time zone. For such purposes, it also includes both
UTC and Standard offsets, as well as the zone abbreviation
field used exclusively for formatting purposes. Note future
datetimes are not necessarily guaranteed to exist, as time
zones may change any time in the future due to geopolitical
reasons. See the "Datetimes as snapshots" section for more
information.
Remember, comparisons in Elixir using `==/2`, `>/2`, `</2` and friends
are structural and based on the DateTime struct fields. For proper
comparison between datetimes, use the `compare/2` function. The
existence of the `compare/2` function in this module also allows
using `Enum.min/2` and `Enum.max/2` functions to get the minimum and
maximum datetime of an `Enum`. For example:
iex> Enum.min([~U[2022-01-12 00:01:00.00Z], ~U[2021-01-12 00:01:00.00Z]], DateTime)
~U[2021-01-12 00:01:00.00Z]
Developers should avoid creating the `DateTime` struct directly
and instead rely on the functions provided by this module as
well as the ones in third-party calendar libraries.
## Time zone database
Many functions in this module require a time zone database.
By default, it uses the default time zone database returned by
`Calendar.get_time_zone_database/0`, which defaults to
`Calendar.UTCOnlyTimeZoneDatabase` which only handles "Etc/UTC"
datetimes and returns `{:error, :utc_only_time_zone_database}`
for any other time zone.
Other time zone databases can also be configured. Here are some
available options and libraries:
* [`tz`](https://github.com/mathieuprog/tz)
* [`tzdata`](https://github.com/lau/tzdata)
* [`zoneinfo`](https://github.com/smartrent/zoneinfo) -
recommended for embedded devices
To use them, first make sure it is added as a dependency in `mix.exs`.
It can then be configured either via configuration:
config :elixir, :time_zone_database, Tz.TimeZoneDatabase
or by calling `Calendar.put_time_zone_database/1`:
Calendar.put_time_zone_database(Tz.TimeZoneDatabase)
See the proper names in the library installation instructions.
## Datetimes as snapshots
In the first section, we described datetimes as a "snapshot of
a date and time at a given time zone". To understand precisely
what we mean, let's see an example.
Imagine someone in Poland wants to schedule a meeting with someone
in Brazil in the next year. The meeting will happen at 2:30 AM
in the Polish time zone. At what time will the meeting happen in
Brazil?
You can consult the time zone database today, one year before,
using the API in this module and it will give you an answer that
is valid right now. However, this answer may not be valid in the
future. Why? Because both Brazil and Poland may change their timezone
rules, ultimately affecting the result. For example, a country may
choose to enter or abandon "Daylight Saving Time", which is a
process where we adjust the clock one hour forward or one hour
back once per year. Whenener the rules change, the exact instant
that 2:30 AM in Polish time will be in Brazil may change.
In other words, whenever working with future DateTimes, there is
no guarantee the results you get will always be correct, until
the event actually happens. Therefore, when you ask for a future
time, the answers you get are a snapshot that reflects the current
state of the time zone rules. For datetimes in the past, this is
not a problem, because time zone rules do not change for past
events.
To make matters worse, it may be that the 2:30 AM in Polish time
does not actually even exist or it is ambiguous. If a certain
time zone observes "Daylight Saving Time", they will move their
clock forward once a year. When this happens, there is a whole
hour that does not exist. Then, when they move the clock back,
there is a certain hour that will happen twice. So if you want
to schedule a meeting when this shift back happens, you would
need to explicitly say which of the 2:30 AM you precisely mean.
Applications that are date and time sensitive, need to take
these scenarios into account and correctly communicate them to
users.
The good news is: Elixir contains all of the building blocks
necessary to tackle those problems. The default timezone database
used by Elixir, `Calendar.UTCOnlyTimeZoneDatabase`, only works
with UTC, which does not observe those issues. Once you bring
a proper time zone database, the functions in this module will
query the database and return the relevant information. For
example, look at how `DateTime.new/4` returns different results
based on the scenarios described in this section.
"""
@enforce_keys [:year, :month, :day, :hour, :minute, :second] ++
[:time_zone, :zone_abbr, :utc_offset, :std_offset]
defstruct [
:year,
:month,
:day,
:hour,
:minute,
:second,
:time_zone,
:zone_abbr,
:utc_offset,
:std_offset,
microsecond: {0, 0},
calendar: Calendar.ISO
]
@type t :: %__MODULE__{
year: Calendar.year(),
month: Calendar.month(),
day: Calendar.day(),
calendar: Calendar.calendar(),
hour: Calendar.hour(),
minute: Calendar.minute(),
second: Calendar.second(),
microsecond: Calendar.microsecond(),
time_zone: Calendar.time_zone(),
zone_abbr: Calendar.zone_abbr(),
utc_offset: Calendar.utc_offset(),
std_offset: Calendar.std_offset()
}
@unix_days :calendar.date_to_gregorian_days({1970, 1, 1})
@seconds_per_day 24 * 60 * 60
@doc """
Returns the current datetime in UTC.
## Examples
iex> datetime = DateTime.utc_now()
iex> datetime.time_zone
"Etc/UTC"
"""
@spec utc_now(Calendar.calendar()) :: t
def utc_now(calendar \\ Calendar.ISO) do
System.os_time() |> from_unix!(:native, calendar)
end
@doc """
Builds a datetime from date and time structs.
It expects a time zone to put the `DateTime` in.
If the time zone is not passed it will default to `"Etc/UTC"`,
which always succeeds. Otherwise, the `DateTime` is checked against the time zone database
given as `time_zone_database`. See the "Time zone database"
section in the module documentation.
## Examples
iex> DateTime.new(~D[2016-05-24], ~T[13:26:08.003], "Etc/UTC")
{:ok, ~U[2016-05-24 13:26:08.003Z]}
When the datetime is ambiguous - for instance during changing from summer
to winter time - the two possible valid datetimes are returned in a tuple.
The first datetime is also the one which comes first chronologically, while
the second one comes last.
iex> {:ambiguous, first_dt, second_dt} = DateTime.new(~D[2018-10-28], ~T[02:30:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> first_dt
#DateTime<2018-10-28 02:30:00+02:00 CEST Europe/Copenhagen>
iex> second_dt
#DateTime<2018-10-28 02:30:00+01:00 CET Europe/Copenhagen>
When there is a gap in wall time - for instance in spring when the clocks are
turned forward - the latest valid datetime just before the gap and the first
valid datetime just after the gap.
iex> {:gap, just_before, just_after} = DateTime.new(~D[2019-03-31], ~T[02:30:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> just_before
#DateTime<2019-03-31 01:59:59.999999+01:00 CET Europe/Copenhagen>
iex> just_after
#DateTime<2019-03-31 03:00:00+02:00 CEST Europe/Copenhagen>
Most of the time there is one, and just one, valid datetime for a certain
date and time in a certain time zone.
iex> {:ok, datetime} = DateTime.new(~D[2018-07-28], ~T[12:30:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> datetime
#DateTime<2018-07-28 12:30:00+02:00 CEST Europe/Copenhagen>
"""
@doc since: "1.11.0"
@spec new(Date.t(), Time.t(), Calendar.time_zone(), Calendar.time_zone_database()) ::
{:ok, t}
| {:ambiguous, first_datetime :: t, second_datetime :: t}
| {:gap, t, t}
| {:error,
:incompatible_calendars | :time_zone_not_found | :utc_only_time_zone_database}
def new(
date,
time,
time_zone \\ "Etc/UTC",
time_zone_database \\ Calendar.get_time_zone_database()
)
def new(%Date{calendar: calendar} = date, %Time{calendar: calendar} = time, "Etc/UTC", _db) do
%{year: year, month: month, day: day} = date
%{hour: hour, minute: minute, second: second, microsecond: microsecond} = time
datetime = %DateTime{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
std_offset: 0,
utc_offset: 0,
zone_abbr: "UTC",
time_zone: "Etc/UTC"
}
{:ok, datetime}
end
def new(date, time, time_zone, time_zone_database) do
with {:ok, naive_datetime} <- NaiveDateTime.new(date, time) do
from_naive(naive_datetime, time_zone, time_zone_database)
end
end
@doc """
Builds a datetime from date and time structs, raising on errors.
It expects a time zone to put the `DateTime` in.
If the time zone is not passed it will default to `"Etc/UTC"`,
which always succeeds. Otherwise, the DateTime is checked against the time zone database
given as `time_zone_database`. See the "Time zone database"
section in the module documentation.
## Examples
iex> DateTime.new!(~D[2016-05-24], ~T[13:26:08.003], "Etc/UTC")
~U[2016-05-24 13:26:08.003Z]
When the datetime is ambiguous - for instance during changing from summer
to winter time - an error will be raised.
iex> DateTime.new!(~D[2018-10-28], ~T[02:30:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
** (ArgumentError) cannot build datetime with ~D[2018-10-28] and ~T[02:30:00] because such instant is ambiguous in time zone Europe/Copenhagen as there is an overlap between #DateTime<2018-10-28 02:30:00+02:00 CEST Europe/Copenhagen> and #DateTime<2018-10-28 02:30:00+01:00 CET Europe/Copenhagen>
When there is a gap in wall time - for instance in spring when the clocks are
turned forward - an error will be raised.
iex> DateTime.new!(~D[2019-03-31], ~T[02:30:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
** (ArgumentError) cannot build datetime with ~D[2019-03-31] and ~T[02:30:00] because such instant does not exist in time zone Europe/Copenhagen as there is a gap between #DateTime<2019-03-31 01:59:59.999999+01:00 CET Europe/Copenhagen> and #DateTime<2019-03-31 03:00:00+02:00 CEST Europe/Copenhagen>
Most of the time there is one, and just one, valid datetime for a certain
date and time in a certain time zone.
iex> datetime = DateTime.new!(~D[2018-07-28], ~T[12:30:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> datetime
#DateTime<2018-07-28 12:30:00+02:00 CEST Europe/Copenhagen>
"""
@doc since: "1.11.0"
@spec new!(Date.t(), Time.t(), Calendar.time_zone(), Calendar.time_zone_database()) :: t
def new!(
date,
time,
time_zone \\ "Etc/UTC",
time_zone_database \\ Calendar.get_time_zone_database()
)
def new!(date, time, time_zone, time_zone_database) do
case new(date, time, time_zone, time_zone_database) do
{:ok, datetime} ->
datetime
{:ambiguous, dt1, dt2} ->
raise ArgumentError,
"cannot build datetime with #{inspect(date)} and #{inspect(time)} because such " <>
"instant is ambiguous in time zone #{time_zone} as there is an overlap " <>
"between #{inspect(dt1)} and #{inspect(dt2)}"
{:gap, dt1, dt2} ->
raise ArgumentError,
"cannot build datetime with #{inspect(date)} and #{inspect(time)} because such " <>
"instant does not exist in time zone #{time_zone} as there is a gap " <>
"between #{inspect(dt1)} and #{inspect(dt2)}"
{:error, reason} ->
raise ArgumentError,
"cannot build datetime with #{inspect(date)} and #{inspect(time)}, reason: #{inspect(reason)}"
end
end
@doc """
Converts the given Unix time to `DateTime`.
The integer can be given in different unit
according to `System.convert_time_unit/3` and it will
be converted to microseconds internally. Up to
253402300799 seconds is supported.
Unix times are always in UTC and therefore the DateTime
will be returned in UTC.
## Examples
iex> {:ok, datetime} = DateTime.from_unix(1_464_096_368)
iex> datetime
~U[2016-05-24 13:26:08Z]
iex> {:ok, datetime} = DateTime.from_unix(1_432_560_368_868_569, :microsecond)
iex> datetime
~U[2015-05-25 13:26:08.868569Z]
iex> {:ok, datetime} = DateTime.from_unix(253_402_300_799)
iex> datetime
~U[9999-12-31 23:59:59Z]
iex> {:error, :invalid_unix_time} = DateTime.from_unix(253_402_300_800)
The unit can also be an integer as in `t:System.time_unit/0`:
iex> {:ok, datetime} = DateTime.from_unix(143_256_036_886_856, 1024)
iex> datetime
~U[6403-03-17 07:05:22.320312Z]
Negative Unix times are supported up to -377705116800 seconds:
iex> {:ok, datetime} = DateTime.from_unix(-377_705_116_800)
iex> datetime
~U[-9999-01-01 00:00:00Z]
iex> {:error, :invalid_unix_time} = DateTime.from_unix(-377_705_116_801)
"""
@spec from_unix(integer, :native | System.time_unit(), Calendar.calendar()) ::
{:ok, t} | {:error, atom}
def from_unix(integer, unit \\ :second, calendar \\ Calendar.ISO) when is_integer(integer) do
case Calendar.ISO.from_unix(integer, unit) do
{:ok, {year, month, day}, {hour, minute, second}, microsecond} ->
iso_datetime = %DateTime{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
std_offset: 0,
utc_offset: 0,
zone_abbr: "UTC",
time_zone: "Etc/UTC"
}
convert(iso_datetime, calendar)
{:error, _} = error ->
error
end
end
@doc """
Converts the given Unix time to `DateTime`.
The integer can be given in different unit
according to `System.convert_time_unit/3` and it will
be converted to microseconds internally.
Unix times are always in UTC and therefore the DateTime
will be returned in UTC.
## Examples
# An easy way to get the Unix epoch is passing 0 to this function
iex> DateTime.from_unix!(0)
~U[1970-01-01 00:00:00Z]
iex> DateTime.from_unix!(1_464_096_368)
~U[2016-05-24 13:26:08Z]
iex> DateTime.from_unix!(1_432_560_368_868_569, :microsecond)
~U[2015-05-25 13:26:08.868569Z]
iex> DateTime.from_unix!(143_256_036_886_856, 1024)
~U[6403-03-17 07:05:22.320312Z]
"""
@spec from_unix!(integer, :native | System.time_unit(), Calendar.calendar()) :: t
def from_unix!(integer, unit \\ :second, calendar \\ Calendar.ISO) do
case from_unix(integer, unit, calendar) do
{:ok, datetime} ->
datetime
{:error, :invalid_unix_time} ->
raise ArgumentError, "invalid Unix time #{integer}"
end
end
@doc """
Converts the given `NaiveDateTime` to `DateTime`.
It expects a time zone to put the `NaiveDateTime` in.
If the time zone is "Etc/UTC", it always succeeds. Otherwise,
the NaiveDateTime is checked against the time zone database
given as `time_zone_database`. See the "Time zone database"
section in the module documentation.
## Examples
iex> DateTime.from_naive(~N[2016-05-24 13:26:08.003], "Etc/UTC")
{:ok, ~U[2016-05-24 13:26:08.003Z]}
When the datetime is ambiguous - for instance during changing from summer
to winter time - the two possible valid datetimes are returned in a tuple.
The first datetime is also the one which comes first chronologically, while
the second one comes last.
iex> {:ambiguous, first_dt, second_dt} = DateTime.from_naive(~N[2018-10-28 02:30:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> first_dt
#DateTime<2018-10-28 02:30:00+02:00 CEST Europe/Copenhagen>
iex> second_dt
#DateTime<2018-10-28 02:30:00+01:00 CET Europe/Copenhagen>
When there is a gap in wall time - for instance in spring when the clocks are
turned forward - the latest valid datetime just before the gap and the first
valid datetime just after the gap.
iex> {:gap, just_before, just_after} = DateTime.from_naive(~N[2019-03-31 02:30:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> just_before
#DateTime<2019-03-31 01:59:59.999999+01:00 CET Europe/Copenhagen>
iex> just_after
#DateTime<2019-03-31 03:00:00+02:00 CEST Europe/Copenhagen>
Most of the time there is one, and just one, valid datetime for a certain
date and time in a certain time zone.
iex> {:ok, datetime} = DateTime.from_naive(~N[2018-07-28 12:30:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> datetime
#DateTime<2018-07-28 12:30:00+02:00 CEST Europe/Copenhagen>
This function accepts any map or struct that contains at least the same fields as a `NaiveDateTime`
struct. The most common example of that is a `DateTime`. In this case the information about the time
zone of that `DateTime` is completely ignored. This is the same principle as passing a `DateTime` to
`Date.to_iso8601/2`. `Date.to_iso8601/2` extracts only the date-specific fields (calendar, year,
month and day) of the given structure and ignores all others.
This way if you have a `DateTime` in one time zone, you can get the same wall time in another time zone.
For instance if you have 2018-08-24 10:00:00 in Copenhagen and want a `DateTime` for 2018-08-24 10:00:00
in UTC you can do:
iex> cph_datetime = DateTime.from_naive!(~N[2018-08-24 10:00:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> {:ok, utc_datetime} = DateTime.from_naive(cph_datetime, "Etc/UTC", FakeTimeZoneDatabase)
iex> utc_datetime
~U[2018-08-24 10:00:00Z]
If instead you want a `DateTime` for the same point time in a different time zone see the
`DateTime.shift_zone/3` function which would convert 2018-08-24 10:00:00 in Copenhagen
to 2018-08-24 08:00:00 in UTC.
"""
@doc since: "1.4.0"
@spec from_naive(
Calendar.naive_datetime(),
Calendar.time_zone(),
Calendar.time_zone_database()
) ::
{:ok, t}
| {:ambiguous, first_datetime :: t, second_datetime :: t}
| {:gap, t, t}
| {:error,
:incompatible_calendars | :time_zone_not_found | :utc_only_time_zone_database}
def from_naive(
naive_datetime,
time_zone,
time_zone_database \\ Calendar.get_time_zone_database()
)
def from_naive(naive_datetime, "Etc/UTC", _) do
utc_period = %{std_offset: 0, utc_offset: 0, zone_abbr: "UTC"}
{:ok, from_naive_with_period(naive_datetime, "Etc/UTC", utc_period)}
end
def from_naive(%{calendar: Calendar.ISO} = naive_datetime, time_zone, time_zone_database) do
case time_zone_database.time_zone_periods_from_wall_datetime(naive_datetime, time_zone) do
{:ok, period} ->
{:ok, from_naive_with_period(naive_datetime, time_zone, period)}
{:ambiguous, first_period, second_period} ->
first_datetime = from_naive_with_period(naive_datetime, time_zone, first_period)
second_datetime = from_naive_with_period(naive_datetime, time_zone, second_period)
{:ambiguous, first_datetime, second_datetime}
{:gap, {first_period, first_period_until_wall}, {second_period, second_period_from_wall}} ->
# `until_wall` is not valid, but any time just before is.
# So by subtracting a second and adding .999999 seconds
# we get the last microsecond just before.
before_naive =
first_period_until_wall
|> Map.replace!(:microsecond, {999_999, 6})
|> NaiveDateTime.add(-1)
after_naive = second_period_from_wall
latest_datetime_before = from_naive_with_period(before_naive, time_zone, first_period)
first_datetime_after = from_naive_with_period(after_naive, time_zone, second_period)
{:gap, latest_datetime_before, first_datetime_after}
{:error, _} = error ->
error
end
end
def from_naive(%{calendar: calendar} = naive_datetime, time_zone, time_zone_database)
when calendar != Calendar.ISO do
# For non-ISO calendars, convert to ISO, create ISO DateTime, and then
# convert to original calendar
iso_result =
with {:ok, in_iso} <- NaiveDateTime.convert(naive_datetime, Calendar.ISO) do
from_naive(in_iso, time_zone, time_zone_database)
end
case iso_result do
{:ok, dt} ->
convert(dt, calendar)
{:ambiguous, dt1, dt2} ->
with {:ok, dt1converted} <- convert(dt1, calendar),
{:ok, dt2converted} <- convert(dt2, calendar),
do: {:ambiguous, dt1converted, dt2converted}
{:gap, dt1, dt2} ->
with {:ok, dt1converted} <- convert(dt1, calendar),
{:ok, dt2converted} <- convert(dt2, calendar),
do: {:gap, dt1converted, dt2converted}
{:error, _} = error ->
error
end
end
defp from_naive_with_period(naive_datetime, time_zone, period) do
%{std_offset: std_offset, utc_offset: utc_offset, zone_abbr: zone_abbr} = period
%{
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
year: year,
month: month,
day: day
} = naive_datetime
%DateTime{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
std_offset: std_offset,
utc_offset: utc_offset,
zone_abbr: zone_abbr,
time_zone: time_zone
}
end
@doc """
Converts the given `NaiveDateTime` to `DateTime`.
It expects a time zone to put the NaiveDateTime in.
If the time zone is "Etc/UTC", it always succeeds. Otherwise,
the NaiveDateTime is checked against the time zone database
given as `time_zone_database`. See the "Time zone database"
section in the module documentation.
## Examples
iex> DateTime.from_naive!(~N[2016-05-24 13:26:08.003], "Etc/UTC")
~U[2016-05-24 13:26:08.003Z]
iex> DateTime.from_naive!(~N[2018-05-24 13:26:08.003], "Europe/Copenhagen", FakeTimeZoneDatabase)
#DateTime<2018-05-24 13:26:08.003+02:00 CEST Europe/Copenhagen>
"""
@doc since: "1.4.0"
@spec from_naive!(
NaiveDateTime.t(),
Calendar.time_zone(),
Calendar.time_zone_database()
) :: t
def from_naive!(
naive_datetime,
time_zone,
time_zone_database \\ Calendar.get_time_zone_database()
) do
case from_naive(naive_datetime, time_zone, time_zone_database) do
{:ok, datetime} ->
datetime
{:ambiguous, dt1, dt2} ->
raise ArgumentError,
"cannot convert #{inspect(naive_datetime)} to datetime because such " <>
"instant is ambiguous in time zone #{time_zone} as there is an overlap " <>
"between #{inspect(dt1)} and #{inspect(dt2)}"
{:gap, dt1, dt2} ->
raise ArgumentError,
"cannot convert #{inspect(naive_datetime)} to datetime because such " <>
"instant does not exist in time zone #{time_zone} as there is a gap " <>
"between #{inspect(dt1)} and #{inspect(dt2)}"
{:error, reason} ->
raise ArgumentError,
"cannot convert #{inspect(naive_datetime)} to datetime, reason: #{inspect(reason)}"
end
end
@doc """
Changes the time zone of a `DateTime`.
Returns a `DateTime` for the same point in time, but instead at
the time zone provided. It assumes that `DateTime` is valid and
exists in the given time zone and calendar.
By default, it uses the default time zone database returned by
`Calendar.get_time_zone_database/0`, which defaults to
`Calendar.UTCOnlyTimeZoneDatabase` which only handles "Etc/UTC" datetimes.
Other time zone databases can be passed as argument or set globally.
See the "Time zone database" section in the module docs.
## Examples
iex> {:ok, pacific_datetime} = DateTime.shift_zone(~U[2018-07-16 10:00:00Z], "America/Los_Angeles", FakeTimeZoneDatabase)
iex> pacific_datetime
#DateTime<2018-07-16 03:00:00-07:00 PDT America/Los_Angeles>
iex> DateTime.shift_zone(~U[2018-07-16 10:00:00Z], "bad timezone", FakeTimeZoneDatabase)
{:error, :time_zone_not_found}
"""
@doc since: "1.8.0"
@spec shift_zone(t, Calendar.time_zone(), Calendar.time_zone_database()) ::
{:ok, t} | {:error, :time_zone_not_found | :utc_only_time_zone_database}
def shift_zone(datetime, time_zone, time_zone_database \\ Calendar.get_time_zone_database())
def shift_zone(%{time_zone: time_zone} = datetime, time_zone, _) do
{:ok, datetime}
end
def shift_zone(datetime, time_zone, time_zone_database) do
%{
std_offset: std_offset,
utc_offset: utc_offset,
calendar: calendar,
microsecond: {_, precision}
} = datetime
datetime
|> to_iso_days()
|> apply_tz_offset(utc_offset + std_offset)
|> shift_zone_for_iso_days_utc(calendar, precision, time_zone, time_zone_database)
end
defp shift_zone_for_iso_days_utc(iso_days_utc, calendar, precision, time_zone, time_zone_db) do
case time_zone_db.time_zone_period_from_utc_iso_days(iso_days_utc, time_zone) do
{:ok, %{std_offset: std_offset, utc_offset: utc_offset, zone_abbr: zone_abbr}} ->
{year, month, day, hour, minute, second, {microsecond_without_precision, _}} =
iso_days_utc
|> apply_tz_offset(-(utc_offset + std_offset))
|> calendar.naive_datetime_from_iso_days()
datetime = %DateTime{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: {microsecond_without_precision, precision},
std_offset: std_offset,
utc_offset: utc_offset,
zone_abbr: zone_abbr,
time_zone: time_zone
}
{:ok, datetime}
{:error, _} = error ->
error
end
end
@doc """
Changes the time zone of a `DateTime` or raises on errors.
See `shift_zone/3` for more information.
## Examples
iex> DateTime.shift_zone!(~U[2018-07-16 10:00:00Z], "America/Los_Angeles", FakeTimeZoneDatabase)
#DateTime<2018-07-16 03:00:00-07:00 PDT America/Los_Angeles>
iex> DateTime.shift_zone!(~U[2018-07-16 10:00:00Z], "bad timezone", FakeTimeZoneDatabase)
** (ArgumentError) cannot shift ~U[2018-07-16 10:00:00Z] to "bad timezone" time zone, reason: :time_zone_not_found
"""
@doc since: "1.10.0"
@spec shift_zone!(t, Calendar.time_zone(), Calendar.time_zone_database()) :: t
def shift_zone!(datetime, time_zone, time_zone_database \\ Calendar.get_time_zone_database()) do
case shift_zone(datetime, time_zone, time_zone_database) do
{:ok, datetime} ->
datetime
{:error, reason} ->
raise ArgumentError,
"cannot shift #{inspect(datetime)} to #{inspect(time_zone)} time zone" <>
", reason: #{inspect(reason)}"
end
end
@doc """
Returns the current datetime in the provided time zone.
By default, it uses the default time_zone returned by
`Calendar.get_time_zone_database/0`, which defaults to
`Calendar.UTCOnlyTimeZoneDatabase` which only handles "Etc/UTC" datetimes.
Other time zone databases can be passed as argument or set globally.
See the "Time zone database" section in the module docs.
## Examples
iex> {:ok, datetime} = DateTime.now("Etc/UTC")
iex> datetime.time_zone
"Etc/UTC"
iex> DateTime.now("Europe/Copenhagen")
{:error, :utc_only_time_zone_database}
iex> DateTime.now("bad timezone", FakeTimeZoneDatabase)
{:error, :time_zone_not_found}
"""
@doc since: "1.8.0"
@spec now(Calendar.time_zone(), Calendar.time_zone_database()) ::
{:ok, t} | {:error, :time_zone_not_found | :utc_only_time_zone_database}
def now(time_zone, time_zone_database \\ Calendar.get_time_zone_database())
def now("Etc/UTC", _) do
{:ok, utc_now()}
end
def now(time_zone, time_zone_database) do
shift_zone(utc_now(), time_zone, time_zone_database)
end
@doc """
Returns the current datetime in the provided time zone or raises on errors
See `now/2` for more information.
## Examples
iex> datetime = DateTime.now!("Etc/UTC")
iex> datetime.time_zone
"Etc/UTC"
iex> DateTime.now!("Europe/Copenhagen")
** (ArgumentError) cannot get current datetime in "Europe/Copenhagen" time zone, reason: :utc_only_time_zone_database
iex> DateTime.now!("bad timezone", FakeTimeZoneDatabase)
** (ArgumentError) cannot get current datetime in "bad timezone" time zone, reason: :time_zone_not_found
"""
@doc since: "1.10.0"
@spec now!(Calendar.time_zone(), Calendar.time_zone_database()) :: t
def now!(time_zone, time_zone_database \\ Calendar.get_time_zone_database()) do
case now(time_zone, time_zone_database) do
{:ok, datetime} ->
datetime
{:error, reason} ->
raise ArgumentError,
"cannot get current datetime in #{inspect(time_zone)} time zone, reason: " <>
inspect(reason)
end
end
@doc """
Converts the given `datetime` to Unix time.
The `datetime` is expected to be using the ISO calendar
with a year greater than or equal to 0.
It will return the integer with the given unit,
according to `System.convert_time_unit/3`.
If you want to get the current time in Unix seconds,
do not do `DateTime.utc_now() |> DateTime.to_unix()`.
Simply call `System.os_time(:second)` instead.
## Examples
iex> 1_464_096_368 |> DateTime.from_unix!() |> DateTime.to_unix()
1464096368
iex> dt = %DateTime{calendar: Calendar.ISO, day: 20, hour: 18, microsecond: {273806, 6},
...> minute: 58, month: 11, second: 19, time_zone: "America/Montevideo",
...> utc_offset: -10800, std_offset: 3600, year: 2014, zone_abbr: "UYST"}
iex> DateTime.to_unix(dt)
1416517099
iex> flamel = %DateTime{calendar: Calendar.ISO, day: 22, hour: 8, microsecond: {527771, 6},
...> minute: 2, month: 3, second: 25, std_offset: 0, time_zone: "Etc/UTC",
...> utc_offset: 0, year: 1418, zone_abbr: "UTC"}
iex> DateTime.to_unix(flamel)
-17412508655
"""
@spec to_unix(Calendar.datetime(), System.time_unit()) :: integer
def to_unix(datetime, unit \\ :second)
def to_unix(%{utc_offset: utc_offset, std_offset: std_offset} = datetime, unit) do
{days, fraction} = to_iso_days(datetime)
unix_units = Calendar.ISO.iso_days_to_unit({days - @unix_days, fraction}, unit)
offset_units = System.convert_time_unit(utc_offset + std_offset, :second, unit)
unix_units - offset_units
end
@doc """
Converts the given `datetime` into a `NaiveDateTime`.
Because `NaiveDateTime` does not hold time zone information,
any time zone related data will be lost during the conversion.
## Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 1},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_naive(dt)
~N[2000-02-29 23:00:07.0]
"""
@spec to_naive(Calendar.datetime()) :: NaiveDateTime.t()
def to_naive(datetime)
def to_naive(%{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: _
}) do
%NaiveDateTime{
year: year,
month: month,
day: day,
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
}
end
@doc """
Converts a `DateTime` into a `Date`.
Because `Date` does not hold time nor time zone information,
data will be lost during the conversion.
## Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_date(dt)
~D[2000-02-29]
"""
@spec to_date(Calendar.datetime()) :: Date.t()
def to_date(datetime)
def to_date(%{
year: year,
month: month,
day: day,
calendar: calendar,
hour: _,
minute: _,
second: _,
microsecond: _,
time_zone: _
}) do
%Date{year: year, month: month, day: day, calendar: calendar}
end
@doc """
Converts a `DateTime` into `Time`.
Because `Time` does not hold date nor time zone information,
data will be lost during the conversion.
## Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 1},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_time(dt)
~T[23:00:07.0]
"""
@spec to_time(Calendar.datetime()) :: Time.t()
def to_time(datetime)
def to_time(%{
year: _,
month: _,
day: _,
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: _
}) do
%Time{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: calendar
}
end
@doc """
Converts the given datetime to
[ISO 8601:2019](https://en.wikipedia.org/wiki/ISO_8601) format.
By default, `DateTime.to_iso8601/2` returns datetimes formatted in the "extended"
format, for human readability. It also supports the "basic" format through passing the `:basic` option.
Only supports converting datetimes which are in the ISO calendar,
attempting to convert datetimes from other calendars will raise.
You can also optionally specify an offset for the formatted string.
WARNING: the ISO 8601 datetime format does not contain the time zone nor
its abbreviation, which means information is lost when converting to such
format.
### Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_iso8601(dt)
"2000-02-29T23:00:07+01:00"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "UTC",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"}
iex> DateTime.to_iso8601(dt)
"2000-02-29T23:00:07Z"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.to_iso8601(dt, :extended)
"2000-02-29T23:00:07-04:00"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.to_iso8601(dt, :basic)
"20000229T230007-0400"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.to_iso8601(dt, :extended, 3600)
"2000-03-01T04:00:07+01:00"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.to_iso8601(dt, :extended, 0)
"2000-03-01T03:00:07+00:00"
iex> dt = %DateTime{year: 2000, month: 3, day: 01, zone_abbr: "UTC",
...> hour: 03, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"}
iex> DateTime.to_iso8601(dt, :extended, 0)
"2000-03-01T03:00:07Z"
iex> {:ok, dt, offset} = DateTime.from_iso8601("2000-03-01T03:00:07Z")
iex> "2000-03-01T03:00:07Z" = DateTime.to_iso8601(dt, :extended, offset)
"""
@spec to_iso8601(Calendar.datetime(), :basic | :extended, nil | integer()) :: String.t()
def to_iso8601(datetime, format \\ :extended, offset \\ nil)
def to_iso8601(%{calendar: Calendar.ISO} = datetime, format, nil)
when format in [:extended, :basic] do
%{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: time_zone,
utc_offset: utc_offset,
std_offset: std_offset
} = datetime
datetime_to_string(year, month, day, hour, minute, second, microsecond, format) <>
Calendar.ISO.offset_to_string(utc_offset, std_offset, time_zone, format)
end
def to_iso8601(
%{calendar: Calendar.ISO, microsecond: {_, precision}, time_zone: "Etc/UTC"} = datetime,
format,
0
)
when format in [:extended, :basic] do
{year, month, day, hour, minute, second, {microsecond, _}} = shift_by_offset(datetime, 0)
datetime_to_string(year, month, day, hour, minute, second, {microsecond, precision}, format) <>
"Z"
end
def to_iso8601(%{calendar: Calendar.ISO} = datetime, format, offset)
when format in [:extended, :basic] do
{_, precision} = datetime.microsecond
{year, month, day, hour, minute, second, {microsecond, _}} = shift_by_offset(datetime, offset)
datetime_to_string(year, month, day, hour, minute, second, {microsecond, precision}, format) <>
Calendar.ISO.offset_to_string(offset, 0, nil, format)
end
def to_iso8601(%{calendar: _} = datetime, format, offset) when format in [:extended, :basic] do
datetime
|> convert!(Calendar.ISO)
|> to_iso8601(format, offset)
end
defp shift_by_offset(%{calendar: calendar} = datetime, offset) do
total_offset = datetime.utc_offset + datetime.std_offset
datetime
|> to_iso_days()
# Subtract total original offset in order to get UTC and add the new offset
|> Calendar.ISO.add_day_fraction_to_iso_days(offset - total_offset, 86400)
|> calendar.naive_datetime_from_iso_days()
end
defp datetime_to_string(year, month, day, hour, minute, second, microsecond, format) do
Calendar.ISO.date_to_string(year, month, day, format) <>
"T" <>
Calendar.ISO.time_to_string(hour, minute, second, microsecond, format)
end
@doc """
Parses the extended "Date and time of day" format described by
[ISO 8601:2019](https://en.wikipedia.org/wiki/ISO_8601).
Since ISO 8601 does not include the proper time zone, the given
string will be converted to UTC and its offset in seconds will be
returned as part of this function. Therefore offset information
must be present in the string.
As specified in the standard, the separator "T" may be omitted if
desired as there is no ambiguity within this function.
Note leap seconds are not supported by the built-in Calendar.ISO.
## Examples
iex> {:ok, datetime, 0} = DateTime.from_iso8601("2015-01-23T23:50:07Z")
iex> datetime
~U[2015-01-23 23:50:07Z]
iex> {:ok, datetime, 9000} = DateTime.from_iso8601("2015-01-23T23:50:07.123+02:30")
iex> datetime
~U[2015-01-23 21:20:07.123Z]
iex> {:ok, datetime, 9000} = DateTime.from_iso8601("2015-01-23T23:50:07,123+02:30")
iex> datetime
~U[2015-01-23 21:20:07.123Z]
iex> {:ok, datetime, 0} = DateTime.from_iso8601("-2015-01-23T23:50:07Z")
iex> datetime
~U[-2015-01-23 23:50:07Z]
iex> {:ok, datetime, 9000} = DateTime.from_iso8601("-2015-01-23T23:50:07,123+02:30")
iex> datetime
~U[-2015-01-23 21:20:07.123Z]
iex> DateTime.from_iso8601("2015-01-23P23:50:07")
{:error, :invalid_format}
iex> DateTime.from_iso8601("2015-01-23T23:50:07")
{:error, :missing_offset}
iex> DateTime.from_iso8601("2015-01-23 23:50:61")
{:error, :invalid_time}
iex> DateTime.from_iso8601("2015-01-32 23:50:07")
{:error, :invalid_date}
iex> DateTime.from_iso8601("2015-01-23T23:50:07.123-00:00")
{:error, :invalid_format}
"""
@doc since: "1.4.0"
@spec from_iso8601(String.t(), Calendar.calendar()) ::
{:ok, t, Calendar.utc_offset()} | {:error, atom}
def from_iso8601(string, calendar \\ Calendar.ISO) do
with {:ok, {year, month, day, hour, minute, second, microsecond}, offset} <-
Calendar.ISO.parse_utc_datetime(string) do
datetime = %DateTime{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
std_offset: 0,
utc_offset: 0,
zone_abbr: "UTC",
time_zone: "Etc/UTC"
}
with {:ok, converted} <- convert(datetime, calendar) do
{:ok, converted, offset}
end
end
end
@doc """
Converts a number of gregorian seconds to a `DateTime` struct.
The returned `DateTime` will have `UTC` timezone, if you want other timezone, please use
`DateTime.shift_zone/3`.
## Examples
iex> DateTime.from_gregorian_seconds(1)
~U[0000-01-01 00:00:01Z]
iex> DateTime.from_gregorian_seconds(63_755_511_991, {5000, 3})
~U[2020-05-01 00:26:31.005Z]
iex> DateTime.from_gregorian_seconds(-1)
~U[-0001-12-31 23:59:59Z]
"""
@doc since: "1.11.0"
@spec from_gregorian_seconds(integer(), Calendar.microsecond(), Calendar.calendar()) :: t
def from_gregorian_seconds(
seconds,
{microsecond, precision} \\ {0, 0},
calendar \\ Calendar.ISO
)
when is_integer(seconds) do
iso_days = Calendar.ISO.gregorian_seconds_to_iso_days(seconds, microsecond)
{year, month, day, hour, minute, second, {microsecond, _}} =
calendar.naive_datetime_from_iso_days(iso_days)
%DateTime{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: {microsecond, precision},
std_offset: 0,
utc_offset: 0,
zone_abbr: "UTC",
time_zone: "Etc/UTC"
}
end
@doc """
Converts a `DateTime` struct to a number of gregorian seconds and microseconds.
## Examples
iex> dt = %DateTime{year: 0000, month: 1, day: 1, zone_abbr: "UTC",
...> hour: 0, minute: 0, second: 1, microsecond: {0, 0},
...> utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"}
iex> DateTime.to_gregorian_seconds(dt)
{1, 0}
iex> dt = %DateTime{year: 2020, month: 5, day: 1, zone_abbr: "UTC",
...> hour: 0, minute: 26, second: 31, microsecond: {5000, 0},
...> utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"}
iex> DateTime.to_gregorian_seconds(dt)
{63_755_511_991, 5000}
iex> dt = %DateTime{year: 2020, month: 5, day: 1, zone_abbr: "CET",
...> hour: 1, minute: 26, second: 31, microsecond: {5000, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_gregorian_seconds(dt)
{63_755_511_991, 5000}
"""
@doc since: "1.11.0"
@spec to_gregorian_seconds(Calendar.datetime()) :: {integer(), non_neg_integer()}
def to_gregorian_seconds(
%{
std_offset: std_offset,
utc_offset: utc_offset,
microsecond: {microsecond, _}
} = datetime
) do
{days, day_fraction} =
datetime
|> to_iso_days()
|> apply_tz_offset(utc_offset + std_offset)
seconds_in_day = seconds_from_day_fraction(day_fraction)
{days * @seconds_per_day + seconds_in_day, microsecond}
end
@doc """
Converts the given `datetime` to a string according to its calendar.
### Examples
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.to_string(dt)
"2000-02-29 23:00:07+01:00 CET Europe/Warsaw"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "UTC",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"}
iex> DateTime.to_string(dt)
"2000-02-29 23:00:07Z"
iex> dt = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.to_string(dt)
"2000-02-29 23:00:07-04:00 AMT America/Manaus"
iex> dt = %DateTime{year: -100, month: 12, day: 19, zone_abbr: "CET",
...> hour: 3, minute: 20, second: 31, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Stockholm"}
iex> DateTime.to_string(dt)
"-0100-12-19 03:20:31+01:00 CET Europe/Stockholm"
"""
@spec to_string(Calendar.datetime()) :: String.t()
def to_string(%{calendar: calendar} = datetime) do
%{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset
} = datetime
calendar.datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
)
end
@doc """
Compares two datetime structs.
Returns `:gt` if the first datetime is later than the second
and `:lt` for vice versa. If the two datetimes are equal
`:eq` is returned.
Note that both UTC and Standard offsets will be taken into
account when comparison is done.
## Examples
iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> dt2 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.compare(dt1, dt2)
:gt
"""
@doc since: "1.4.0"
@spec compare(Calendar.datetime(), Calendar.datetime()) :: :lt | :eq | :gt
def compare(
%{utc_offset: utc_offset1, std_offset: std_offset1} = datetime1,
%{utc_offset: utc_offset2, std_offset: std_offset2} = datetime2
) do
{days1, {parts1, ppd1}} =
datetime1
|> to_iso_days()
|> apply_tz_offset(utc_offset1 + std_offset1)
{days2, {parts2, ppd2}} =
datetime2
|> to_iso_days()
|> apply_tz_offset(utc_offset2 + std_offset2)
# Ensure fraction tuples have same denominator.
first = {days1, parts1 * ppd2}
second = {days2, parts2 * ppd1}
cond do
first > second -> :gt
first < second -> :lt
true -> :eq
end
end
@doc """
Subtracts `datetime2` from `datetime1`.
The answer can be returned in any `unit` available from `t:System.time_unit/0`.
Leap seconds are not taken into account.
This function returns the difference in seconds where seconds are measured
according to `Calendar.ISO`.
## Examples
iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> dt2 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "CET",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Warsaw"}
iex> DateTime.diff(dt1, dt2)
18000
iex> DateTime.diff(dt2, dt1)
-18000
"""
@doc since: "1.5.0"
@spec diff(Calendar.datetime(), Calendar.datetime(), System.time_unit()) :: integer()
def diff(
%{utc_offset: utc_offset1, std_offset: std_offset1} = datetime1,
%{utc_offset: utc_offset2, std_offset: std_offset2} = datetime2,
unit \\ :second
) do
naive_diff =
(datetime1 |> to_iso_days() |> Calendar.ISO.iso_days_to_unit(unit)) -
(datetime2 |> to_iso_days() |> Calendar.ISO.iso_days_to_unit(unit))
offset_diff = utc_offset2 + std_offset2 - (utc_offset1 + std_offset1)
naive_diff + System.convert_time_unit(offset_diff, :second, unit)
end
@doc """
Adds a specified amount of time to a `DateTime`.
Accepts an `amount_to_add` in any `unit` available from `t:System.time_unit/0`.
Negative values will move backwards in time.
Takes changes such as summer time/DST into account. This means that adding time
can cause the wall time to "go backwards" during "fall back" during autumn.
Adding just a few seconds to a datetime just before "spring forward" can cause wall
time to increase by more than an hour.
Fractional second precision stays the same in a similar way to `NaiveDateTime.add/2`.
### Examples
iex> dt = DateTime.from_naive!(~N[2018-11-15 10:00:00], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> dt |> DateTime.add(3600, :second, FakeTimeZoneDatabase)
#DateTime<2018-11-15 11:00:00+01:00 CET Europe/Copenhagen>
iex> DateTime.add(~U[2018-11-15 10:00:00Z], 3600, :second)
~U[2018-11-15 11:00:00Z]
When adding 3 seconds just before "spring forward" we go from 1:59:59 to 3:00:02
iex> dt = DateTime.from_naive!(~N[2019-03-31 01:59:59.123], "Europe/Copenhagen", FakeTimeZoneDatabase)
iex> dt |> DateTime.add(3, :second, FakeTimeZoneDatabase)
#DateTime<2019-03-31 03:00:02.123+02:00 CEST Europe/Copenhagen>
"""
@doc since: "1.8.0"
@spec add(Calendar.datetime(), integer, System.time_unit(), Calendar.time_zone_database()) ::
t()
def add(
datetime,
amount_to_add,
unit \\ :second,
time_zone_database \\ Calendar.get_time_zone_database()
)
when is_integer(amount_to_add) do
%{
utc_offset: utc_offset,
std_offset: std_offset,
calendar: calendar,
microsecond: {_, precision}
} = datetime
ppd = System.convert_time_unit(86400, :second, unit)
total_offset = System.convert_time_unit(utc_offset + std_offset, :second, unit)
result =
datetime
|> to_iso_days()
# Subtract total offset in order to get UTC and add the integer for the addition
|> Calendar.ISO.add_day_fraction_to_iso_days(amount_to_add - total_offset, ppd)
|> shift_zone_for_iso_days_utc(calendar, precision, datetime.time_zone, time_zone_database)
case result do
{:ok, result_datetime} ->
result_datetime
{:error, error} ->
raise ArgumentError,
"cannot add #{amount_to_add} #{unit} to #{inspect(datetime)} (with time zone " <>
"database #{inspect(time_zone_database)}), reason: #{inspect(error)}"
end
end
@doc """
Returns the given datetime with the microsecond field truncated to the given
precision (`:microsecond`, `:millisecond` or `:second`).
The given datetime is returned unchanged if it already has lower precision than
the given precision.
## Examples
iex> dt1 = %DateTime{year: 2017, month: 11, day: 7, zone_abbr: "CET",
...> hour: 11, minute: 45, second: 18, microsecond: {123456, 6},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Paris"}
iex> DateTime.truncate(dt1, :microsecond)
#DateTime<2017-11-07 11:45:18.123456+01:00 CET Europe/Paris>
iex> dt2 = %DateTime{year: 2017, month: 11, day: 7, zone_abbr: "CET",
...> hour: 11, minute: 45, second: 18, microsecond: {123456, 6},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Paris"}
iex> DateTime.truncate(dt2, :millisecond)
#DateTime<2017-11-07 11:45:18.123+01:00 CET Europe/Paris>
iex> dt3 = %DateTime{year: 2017, month: 11, day: 7, zone_abbr: "CET",
...> hour: 11, minute: 45, second: 18, microsecond: {123456, 6},
...> utc_offset: 3600, std_offset: 0, time_zone: "Europe/Paris"}
iex> DateTime.truncate(dt3, :second)
#DateTime<2017-11-07 11:45:18+01:00 CET Europe/Paris>
"""
@doc since: "1.6.0"
@spec truncate(Calendar.datetime(), :microsecond | :millisecond | :second) :: t()
def truncate(%DateTime{microsecond: microsecond} = datetime, precision) do
%{datetime | microsecond: Calendar.truncate(microsecond, precision)}
end
def truncate(%{} = datetime_map, precision) do
truncate(from_map(datetime_map), precision)
end
@doc """
Converts a given `datetime` from one calendar to another.
If it is not possible to convert unambiguously between the calendars
(see `Calendar.compatible_calendars?/2`), an `{:error, :incompatible_calendars}` tuple
is returned.
## Examples
Imagine someone implements `Calendar.Holocene`, a calendar based on the
Gregorian calendar that adds exactly 10,000 years to the current Gregorian
year:
iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.convert(dt1, Calendar.Holocene)
{:ok, %DateTime{calendar: Calendar.Holocene, day: 29, hour: 23,
microsecond: {0, 0}, minute: 0, month: 2, second: 7, std_offset: 0,
time_zone: "America/Manaus", utc_offset: -14400, year: 12000,
zone_abbr: "AMT"}}
"""
@doc since: "1.5.0"
@spec convert(Calendar.datetime(), Calendar.calendar()) ::
{:ok, t} | {:error, :incompatible_calendars}
def convert(%DateTime{calendar: calendar} = datetime, calendar) do
{:ok, datetime}
end
def convert(%{calendar: calendar} = datetime, calendar) do
{:ok, from_map(datetime)}
end
def convert(%{calendar: dt_calendar, microsecond: {_, precision}} = datetime, calendar) do
if Calendar.compatible_calendars?(dt_calendar, calendar) do
result_datetime =
datetime
|> to_iso_days
|> from_iso_days(datetime, calendar, precision)
{:ok, result_datetime}
else
{:error, :incompatible_calendars}
end
end
@doc """
Converts a given `datetime` from one calendar to another.
If it is not possible to convert unambiguously between the calendars
(see `Calendar.compatible_calendars?/2`), an ArgumentError is raised.
## Examples
Imagine someone implements `Calendar.Holocene`, a calendar based on the
Gregorian calendar that adds exactly 10,000 years to the current Gregorian
year:
iex> dt1 = %DateTime{year: 2000, month: 2, day: 29, zone_abbr: "AMT",
...> hour: 23, minute: 0, second: 7, microsecond: {0, 0},
...> utc_offset: -14400, std_offset: 0, time_zone: "America/Manaus"}
iex> DateTime.convert!(dt1, Calendar.Holocene)
%DateTime{calendar: Calendar.Holocene, day: 29, hour: 23,
microsecond: {0, 0}, minute: 0, month: 2, second: 7, std_offset: 0,
time_zone: "America/Manaus", utc_offset: -14400, year: 12000,
zone_abbr: "AMT"}
"""
@doc since: "1.5.0"
@spec convert!(Calendar.datetime(), Calendar.calendar()) :: t
def convert!(datetime, calendar) do
case convert(datetime, calendar) do
{:ok, value} ->
value
{:error, :incompatible_calendars} ->
raise ArgumentError,
"cannot convert #{inspect(datetime)} to target calendar #{inspect(calendar)}, " <>
"reason: #{inspect(datetime.calendar)} and #{inspect(calendar)} have different " <>
"day rollover moments, making this conversion ambiguous"
end
end
# Keep it multiline for proper function clause errors.
defp to_iso_days(%{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
}) do
calendar.naive_datetime_to_iso_days(year, month, day, hour, minute, second, microsecond)
end
defp from_iso_days(iso_days, datetime, calendar, precision) do
%{time_zone: time_zone, zone_abbr: zone_abbr, utc_offset: utc_offset, std_offset: std_offset} =
datetime
{year, month, day, hour, minute, second, {microsecond, _}} =
calendar.naive_datetime_from_iso_days(iso_days)
%DateTime{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: {microsecond, precision},
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset
}
end
defp apply_tz_offset(iso_days, 0) do
iso_days
end
defp apply_tz_offset(iso_days, offset) do
Calendar.ISO.add_day_fraction_to_iso_days(iso_days, -offset, 86400)
end
defp from_map(%{} = datetime_map) do
%DateTime{
year: datetime_map.year,
month: datetime_map.month,
day: datetime_map.day,
hour: datetime_map.hour,
minute: datetime_map.minute,
second: datetime_map.second,
microsecond: datetime_map.microsecond,
time_zone: datetime_map.time_zone,
zone_abbr: datetime_map.zone_abbr,
utc_offset: datetime_map.utc_offset,
std_offset: datetime_map.std_offset
}
end
defp seconds_from_day_fraction({parts_in_day, @seconds_per_day}),
do: parts_in_day
defp seconds_from_day_fraction({parts_in_day, parts_per_day}),
do: div(parts_in_day * @seconds_per_day, parts_per_day)
defimpl String.Chars do
def to_string(datetime) do
%{
calendar: calendar,
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset
} = datetime
calendar.datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
)
end
end
defimpl Inspect do
def inspect(datetime, _) do
%{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
time_zone: time_zone,
zone_abbr: zone_abbr,
utc_offset: utc_offset,
std_offset: std_offset,
calendar: calendar
} = datetime
formatted =
calendar.datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
)
case datetime do
%{utc_offset: 0, std_offset: 0, time_zone: "Etc/UTC"} ->
"~U[" <> formatted <> suffix(calendar) <> "]"
_ ->
"#DateTime<" <> formatted <> suffix(calendar) <> ">"
end
end
defp suffix(Calendar.ISO), do: ""
defp suffix(calendar), do: " " <> inspect(calendar)
end
end
| 35.368938
| 306
| 0.642975
|
038e1259b1530ff4548f4ba9991ea2ce30fce692
| 2,533
|
exs
|
Elixir
|
farmbot_os/test/farmbot_os/syscalls/pin_control_test.exs
|
adamswsk/farmbot_os
|
d177d3b74888c1e7bcbf8f8595818708ee97f73b
|
[
"MIT"
] | 1
|
2021-04-22T10:18:50.000Z
|
2021-04-22T10:18:50.000Z
|
farmbot_os/test/farmbot_os/syscalls/pin_control_test.exs
|
adamswsk/farmbot_os
|
d177d3b74888c1e7bcbf8f8595818708ee97f73b
|
[
"MIT"
] | null | null | null |
farmbot_os/test/farmbot_os/syscalls/pin_control_test.exs
|
adamswsk/farmbot_os
|
d177d3b74888c1e7bcbf8f8595818708ee97f73b
|
[
"MIT"
] | null | null | null |
defmodule FarmbotOS.SysCalls.PinControlTest do
use ExUnit.Case
use Mimic
setup :verify_on_exit!
alias FarmbotOS.SysCalls.PinControl
alias FarmbotCore.Asset.Peripheral
@digital 0
@tag :capture_log
test "read_pin with %Peripheral{}, pin is 1" do
expect(FarmbotFirmware, :request, 1, fn
{:pin_read, [p: 13, m: 0]} ->
{:ok, {:qqq, {:report_pin_value, [p: 13, v: 1]}}}
end)
peripheral = %Peripheral{
pin: 13,
label: "xyz"
}
assert 1 == PinControl.read_pin(peripheral, @digital)
end
@tag :capture_log
test "read_pin with %Peripheral{}, pin is 0" do
expect(FarmbotFirmware, :request, 1, fn
{:pin_read, [p: 13, m: 0]} ->
{:ok, {:qqq, {:report_pin_value, [p: 13, v: 0]}}}
end)
peripheral = %Peripheral{pin: 13, label: "xyz"}
assert 0 == PinControl.read_pin(peripheral, @digital)
end
@tag :capture_log
test "toggle_pin, 1 => 0" do
expect(FarmbotCore.Asset, :get_peripheral_by_pin, 1, fn 12 ->
nil
end)
expect(FarmbotFirmware, :command, 2, fn
{:pin_mode_write, [p: 12, m: 1]} -> :ok
{:pin_write, [p: 12, v: _, m: _]} -> :ok
end)
expect(FarmbotFirmware, :request, 2, fn
{:pin_read, [p: 12, m: 0]} ->
{:ok, {:qqq, {:report_pin_value, [p: 12, v: 1]}}}
end)
assert :ok = PinControl.toggle_pin(12)
end
@tag :capture_log
test "toggle_pin, 0 => 1" do
expect(FarmbotCore.Asset, :get_peripheral_by_pin, 1, fn 12 ->
nil
end)
expect(FarmbotFirmware, :command, 2, fn
{:pin_mode_write, [p: 12, m: 1]} -> :ok
{:pin_write, [p: 12, v: _, m: _]} -> :ok
end)
expect(FarmbotFirmware, :request, 2, fn
{:pin_read, [p: 12, m: 0]} ->
{:ok, {:qqq, {:report_pin_value, [p: 12, v: 0]}}}
end)
assert :ok = PinControl.toggle_pin(12)
end
test "toggle_pin, unknown" do
assert {:error, "Unknown pin data: :x"} == PinControl.toggle_pin(:x)
end
test "set_servo_angle" do
expect(FarmbotFirmware, :command, 2, fn
{:servo_write, [p: 20, v: 90]} -> {:error, "opps"}
{:servo_write, [p: 40, v: 180]} -> :ok
end)
assert :ok = PinControl.set_servo_angle(40, 180)
message = "Firmware error @ \"set_servo_angle\": \"opps\""
assert {:error, ^message} = PinControl.set_servo_angle(20, 90)
end
test "read_cached_pin" do
expect(FarmbotCore.BotState, :fetch, 1, fn ->
%FarmbotCore.BotStateNG{pins: %{4 => %{value: 6}}}
end)
assert 6 == PinControl.read_cached_pin(4)
end
end
| 25.846939
| 72
| 0.596131
|
038e3cd4f3db6644fe121763109252bb03eff664
| 236
|
ex
|
Elixir
|
lib/benchee/output/profile_printer.ex
|
mayel/benchee
|
faf3c85c241a4c7eeaab8edfc85094bbbb10c44d
|
[
"MIT"
] | 636
|
2016-06-06T07:58:36.000Z
|
2019-03-19T15:27:37.000Z
|
lib/benchee/output/profile_printer.ex
|
mayel/benchee
|
faf3c85c241a4c7eeaab8edfc85094bbbb10c44d
|
[
"MIT"
] | 198
|
2016-06-18T08:19:15.000Z
|
2019-03-19T15:32:37.000Z
|
lib/benchee/output/profile_printer.ex
|
mayel/benchee
|
faf3c85c241a4c7eeaab8edfc85094bbbb10c44d
|
[
"MIT"
] | 43
|
2016-06-08T08:04:30.000Z
|
2019-02-13T17:10:24.000Z
|
defmodule Benchee.Output.ProfilePrinter do
@moduledoc false
@doc """
Prints a notification of which job is being profiled.
"""
def profiling(name, profiler) do
IO.puts("\nProfiling #{name} with #{profiler}...")
end
end
| 21.454545
| 55
| 0.690678
|
038e5e211cbfe902a2d1443eae628950f9bf0977
| 136
|
ex
|
Elixir
|
lib/seren_web/controllers/page_controller.ex
|
allen-garvey/seren
|
f61cb7edcd7d3f927d2929db14b2a4a1578a3925
|
[
"MIT"
] | 4
|
2019-10-04T16:11:15.000Z
|
2021-08-18T21:00:13.000Z
|
apps/seren/lib/seren_web/controllers/page_controller.ex
|
allen-garvey/phoenix-umbrella
|
1d444bbd62a5e7b5f51d317ce2be71ee994125d5
|
[
"MIT"
] | 5
|
2020-03-16T23:52:25.000Z
|
2021-09-03T16:52:17.000Z
|
lib/seren_web/controllers/page_controller.ex
|
allen-garvey/seren
|
f61cb7edcd7d3f927d2929db14b2a4a1578a3925
|
[
"MIT"
] | null | null | null |
defmodule SerenWeb.PageController do
use SerenWeb, :controller
def index(conn, _params) do
render conn, "index.html"
end
end
| 17
| 36
| 0.735294
|
038eb135bbfcb5bb86572c264c25d0216d4b1257
| 1,305
|
ex
|
Elixir
|
lib/roger/queue.ex
|
jnylen/roger
|
074338eceae4783221088e8b235a635452708ef1
|
[
"MIT"
] | null | null | null |
lib/roger/queue.ex
|
jnylen/roger
|
074338eceae4783221088e8b235a635452708ef1
|
[
"MIT"
] | null | null | null |
lib/roger/queue.ex
|
jnylen/roger
|
074338eceae4783221088e8b235a635452708ef1
|
[
"MIT"
] | null | null | null |
defmodule Roger.Queue do
@moduledoc """
Functions related to queues.
"""
alias Roger.{Queue, AMQPClient}
@type t :: %__MODULE__{}
defstruct type: nil, max_workers: nil, consumer_tag: nil, channel: nil, confirmed: false
def define({type, max_workers}) do
define(type, max_workers)
end
def define(type, max_workers) do
%__MODULE__{type: type, max_workers: max_workers}
end
@doc """
Setup channel with the queue options.
This makes sure the channel prefetch count follows the queue max worker size.
"""
@spec setup_channel(queue :: t) :: {atom, t}
def setup_channel(%Queue{} = queue) do
{:ok, channel} = Roger.AMQPClient.open_channel()
:ok = AMQP.Basic.qos(channel, prefetch_count: queue.max_workers)
{:ok, %Queue{queue | channel: channel}}
end
@doc """
Given a partition and a queue type, construct the Queue's name for RabbitMQ.
"""
def make_name(partition_id, type, postfix \\ "") do
"#{partition_id}-#{type}#{postfix}"
end
@doc """
Flushes all messages on the given queue.
"""
def purge(partition_id, queue_type) do
{:ok, channel} = AMQPClient.open_channel()
queue = make_name(partition_id, queue_type)
result = AMQP.Queue.purge(channel, queue)
:ok = AMQP.Channel.close(channel)
result
end
end
| 24.622642
| 90
| 0.676628
|
038ec3802186ccf58e93d837d219987161c572c7
| 237
|
exs
|
Elixir
|
list_length.exs
|
hvnsweeting/elixir-hackerrank-fp
|
1b4c259c7e335b272b0bbc50d3ba92ddb724566f
|
[
"MIT"
] | null | null | null |
list_length.exs
|
hvnsweeting/elixir-hackerrank-fp
|
1b4c259c7e335b272b0bbc50d3ba92ddb724566f
|
[
"MIT"
] | null | null | null |
list_length.exs
|
hvnsweeting/elixir-hackerrank-fp
|
1b4c259c7e335b272b0bbc50d3ba92ddb724566f
|
[
"MIT"
] | null | null | null |
defmodule Solution do
def len([_|tail]) do
1 + len(tail)
end
def len([]) do
0
end
def main() do
data = IO.read(:stdio, :all) |> String.trim |> String.split([" ", "\n"])
IO.puts len(data)
end
end
Solution.main()
| 15.8
| 76
| 0.56962
|
038ee0b2aa56dc091d9b39fc039d5c97d2093ec2
| 16,704
|
exs
|
Elixir
|
test/graphql/helpers/authorization_test.exs
|
isshindev/accent
|
ae4c13139b0a0dfd64ff536b94c940a4e2862150
|
[
"BSD-3-Clause"
] | 806
|
2018-04-07T20:40:33.000Z
|
2022-03-30T01:39:57.000Z
|
test/graphql/helpers/authorization_test.exs
|
isshindev/accent
|
ae4c13139b0a0dfd64ff536b94c940a4e2862150
|
[
"BSD-3-Clause"
] | 194
|
2018-04-07T13:49:37.000Z
|
2022-03-30T19:58:45.000Z
|
test/graphql/helpers/authorization_test.exs
|
doc-ai/accent
|
e337e16f3658cc0728364f952c0d9c13710ebb06
|
[
"BSD-3-Clause"
] | 89
|
2018-04-09T13:55:49.000Z
|
2022-03-24T07:09:31.000Z
|
defmodule AccentTest.GraphQL.Helpers.Authorization do
use Accent.RepoCase
alias Accent.{
Collaborator,
Document,
GraphQL.Helpers.Authorization,
Integration,
Language,
Operation,
ProjectCreator,
Repo,
Translation,
TranslationCommentsSubscription,
User,
Version
}
@user %User{email: "test@test.com"}
setup do
user = Repo.insert!(@user)
language = Repo.insert!(%Language{name: "English", slug: Ecto.UUID.generate()})
{:ok, project} = ProjectCreator.create(params: %{main_color: "#f00", name: "My project", language_id: language.id}, user: user)
revision = project |> Repo.preload(:revisions) |> Map.get(:revisions) |> hd()
document = Repo.insert!(%Document{project_id: project.id, path: "test", format: "json"})
version = Repo.insert!(%Version{project_id: project.id, name: "test", tag: "v1.0", user_id: user.id})
translation = Repo.insert!(%Translation{revision_id: revision.id, key: "test", corrected_text: "bar"})
collaborator = Repo.insert!(%Collaborator{project_id: project.id, user_id: user.id, role: "owner"})
integration = Repo.insert!(%Integration{project_id: project.id, user_id: user.id, service: "slack", data: %{url: "http://example.com"}})
translation_comments_subscription = Repo.insert!(%TranslationCommentsSubscription{translation_id: translation.id, user_id: user.id})
{:ok,
[
project: project,
document: document,
revision: revision,
user: user,
version: version,
translation: translation,
collaborator: collaborator,
integration: integration,
translation_comments_subscription: translation_comments_subscription
]}
end
test "authorized viewer", %{user: user} do
root = %{user: user}
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.viewer_authorize(:index_permissions, resolver).(root, args, context)
assert_receive :ok
end
test "authorized viewer to create project", %{user: user} do
root = %{user: user}
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.viewer_authorize(:create_project, resolver).(root, args, context)
assert_receive :ok
end
test "unauthorized viewer" do
root = %{user: nil}
args = %{}
context = %{conn: %{}}
resolver = fn _, _, _ -> send(self(), :ok) end
result = Authorization.viewer_authorize(:index_permissions, resolver).(root, args, context)
assert result == {:ok, nil}
refute_receive :ok
end
test "authorized project root", %{user: user, project: project} do
user = Map.put(user, :permissions, %{project.id => "owner"})
root = project
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.project_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "authorized project args", %{user: user, project: project} do
user = Map.put(user, :permissions, %{project.id => "owner"})
root = nil
args = %{id: project.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.project_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "unauthorized project role", %{user: user, project: project} do
user = Map.put(user, :permissions, %{project.id => "reviewer"})
root = project
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
result = Authorization.project_authorize(:create_slave, resolver).(root, args, context)
assert result == {:ok, nil}
refute_receive :ok
end
test "unauthorized project root", %{project: project} do
user = %User{email: "test+2@test.com"} |> Repo.insert!()
user = Map.put(user, :permissions, %{})
root = project
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
result = Authorization.project_authorize(:show_project, resolver).(root, args, context)
assert result == {:ok, nil}
refute_receive :ok
end
test "authorized revision root", %{user: user, revision: revision, project: project} do
user = Map.put(user, :permissions, %{project.id => "owner"})
root = revision
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.revision_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "authorized revision args", %{user: user, revision: revision, project: project} do
user = Map.put(user, :permissions, %{project.id => "owner"})
root = nil
args = %{id: revision.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.revision_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "unauthorized revision role", %{user: user, revision: revision, project: project} do
user = Map.put(user, :permissions, %{project.id => "reviewer"})
root = revision
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
result = Authorization.revision_authorize(:create_slave, resolver).(root, args, context)
assert result == {:ok, nil}
refute_receive :ok
end
test "unauthorized revision root", %{revision: revision} do
user = %User{email: "test+2@test.com"} |> Repo.insert!()
user = Map.put(user, :permissions, %{})
root = revision
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
result = Authorization.revision_authorize(:show_project, resolver).(root, args, context)
assert result == {:ok, nil}
refute_receive :ok
end
test "authorized version root", %{user: user, version: version, project: project} do
user = Map.put(user, :permissions, %{project.id => "owner"})
root = version
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.version_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "authorized version args", %{user: user, version: version, project: project} do
user = Map.put(user, :permissions, %{project.id => "owner"})
root = nil
args = %{id: version.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.version_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "unauthorized version role", %{user: user, version: version, project: project} do
user = Map.put(user, :permissions, %{project.id => "reviewer"})
root = version
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
result = Authorization.version_authorize(:create_slave, resolver).(root, args, context)
assert result == {:ok, nil}
refute_receive :ok
end
test "unauthorized version root", %{version: version} do
user = %User{email: "test+2@test.com"} |> Repo.insert!()
user = Map.put(user, :permissions, %{})
root = version
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
result = Authorization.version_authorize(:show_project, resolver).(root, args, context)
assert result == {:ok, nil}
refute_receive :ok
end
test "authorized translation root", %{user: user, translation: translation, project: project} do
user = Map.put(user, :permissions, %{project.id => "owner"})
root = translation
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.translation_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "authorized translation revision preloaded root", %{user: user, revision: revision, translation: translation, project: project} do
translation = %{translation | revision: revision}
user = Map.put(user, :permissions, %{project.id => "owner"})
root = translation
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.translation_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "authorized translation args", %{user: user, translation: translation, project: project} do
user = Map.put(user, :permissions, %{project.id => "owner"})
root = nil
args = %{id: translation.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.translation_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "unauthorized translation role", %{user: user, translation: translation, project: project} do
user = Map.put(user, :permissions, %{project.id => "reviewer"})
root = translation
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
result = Authorization.translation_authorize(:create_slave, resolver).(root, args, context)
assert result == {:ok, nil}
refute_receive :ok
end
test "unauthorized translation root", %{translation: translation} do
user = %User{email: "test+2@test.com"} |> Repo.insert!()
user = Map.put(user, :permissions, %{})
root = translation
args = %{}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
result = Authorization.translation_authorize(:show_project, resolver).(root, args, context)
assert result == {:ok, nil}
refute_receive :ok
end
test "authorized document args", %{user: user, document: document, project: project} do
user = Map.put(user, :permissions, %{project.id => "owner"})
root = nil
args = %{id: document.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.document_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "unauthorized document role", %{user: user, document: document, project: project} do
user = Map.put(user, :permissions, %{project.id => "reviewer"})
root = nil
args = %{id: document.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
result = Authorization.document_authorize(:create_slave, resolver).(root, args, context)
assert result == {:ok, nil}
refute_receive :ok
end
test "authorized collaborator args", %{user: user, collaborator: collaborator, project: project} do
user = Map.put(user, :permissions, %{project.id => "owner"})
root = nil
args = %{id: collaborator.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.collaborator_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "unauthorized collaborator role", %{user: user, collaborator: collaborator, project: project} do
user = Map.put(user, :permissions, %{project.id => "reviewer"})
root = nil
args = %{id: collaborator.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
result = Authorization.collaborator_authorize(:create_slave, resolver).(root, args, context)
assert result == {:ok, nil}
refute_receive :ok
end
test "authorized integration args", %{user: user, integration: integration, project: project} do
user = Map.put(user, :permissions, %{project.id => "owner"})
root = nil
args = %{id: integration.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.integration_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "unauthorized integration role", %{user: user, integration: integration, project: project} do
user = Map.put(user, :permissions, %{project.id => "reviewer"})
root = nil
args = %{id: integration.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
result = Authorization.integration_authorize(:create_slave, resolver).(root, args, context)
assert result == {:ok, nil}
refute_receive :ok
end
test "authorized operation revision args", %{user: user, revision: revision, project: project} do
operation = Repo.insert!(%Operation{revision_id: revision.id, user_id: user.id, key: "test", text: "bar"})
user = Map.put(user, :permissions, %{project.id => "owner"})
root = nil
args = %{id: operation.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.operation_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "authorized operation translation args", %{user: user, translation: translation, project: project} do
operation = Repo.insert!(%Operation{translation_id: translation.id, user_id: user.id, key: "test", text: "bar"})
user = Map.put(user, :permissions, %{project.id => "owner"})
root = nil
args = %{id: operation.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.operation_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "authorized operation project args", %{user: user, project: project} do
operation = Repo.insert!(%Operation{project_id: project.id, user_id: user.id, key: "test", text: "bar"})
user = Map.put(user, :permissions, %{project.id => "owner"})
root = nil
args = %{id: operation.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.operation_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "unauthorized operation role", %{user: user, revision: revision, project: project} do
operation = Repo.insert!(%Operation{revision_id: revision.id, user_id: user.id, key: "test", text: "bar"})
user = Map.put(user, :permissions, %{project.id => "reviewer"})
root = nil
args = %{id: operation.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
result = Authorization.operation_authorize(:create_slave, resolver).(root, args, context)
assert result == {:ok, nil}
refute_receive :ok
end
test "authorized translation_comments_subscription args", %{user: user, translation_comments_subscription: translation_comments_subscription, project: project} do
user = Map.put(user, :permissions, %{project.id => "owner"})
root = nil
args = %{id: translation_comments_subscription.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
Authorization.translation_comment_subscription_authorize(:show_project, resolver).(root, args, context)
assert_receive :ok
end
test "unauthorized translation_comments_subscription role", %{user: user, translation_comments_subscription: translation_comments_subscription, project: project} do
user = Map.put(user, :permissions, %{project.id => "reviewer"})
root = nil
args = %{id: translation_comments_subscription.id}
context = %{context: %{conn: %{assigns: %{current_user: user}}}}
resolver = fn _, _, _ -> send(self(), :ok) end
result = Authorization.translation_comment_subscription_authorize(:create_slave, resolver).(root, args, context)
assert result == {:ok, nil}
refute_receive :ok
end
end
| 36.712088
| 166
| 0.650862
|
038ee0fef1b2142bdada0941162f330f1f7445d9
| 1,076
|
ex
|
Elixir
|
lib/badges_web/live/group_live/index.ex
|
TomGrozev/SAD-Badges
|
a94331433ea21a0d719216ac8473e706166d6004
|
[
"MIT"
] | null | null | null |
lib/badges_web/live/group_live/index.ex
|
TomGrozev/SAD-Badges
|
a94331433ea21a0d719216ac8473e706166d6004
|
[
"MIT"
] | null | null | null |
lib/badges_web/live/group_live/index.ex
|
TomGrozev/SAD-Badges
|
a94331433ea21a0d719216ac8473e706166d6004
|
[
"MIT"
] | null | null | null |
defmodule BadgesWeb.GroupLive.Index do
use BadgesWeb, :live_view
alias Badges.Students
alias Badges.Students.Group
@impl true
def mount(_params, _session, socket) do
{:ok, assign(socket, :groups, list_groups())}
end
@impl true
def handle_params(params, _url, socket) do
{:noreply, apply_action(socket, socket.assigns.live_action, params)}
end
defp apply_action(socket, :edit, %{"id" => id}) do
socket
|> assign(:page_title, "Edit Group")
|> assign(:group, Students.get_group!(id))
end
defp apply_action(socket, :new, _params) do
socket
|> assign(:page_title, "New Group")
|> assign(:group, %Group{})
end
defp apply_action(socket, :index, _params) do
socket
|> assign(:page_title, "Listing Groups")
|> assign(:group, nil)
end
@impl true
def handle_event("delete", %{"id" => id}, socket) do
group = Students.get_group!(id)
{:ok, _} = Students.delete_group(group)
{:noreply, assign(socket, :groups, list_groups())}
end
defp list_groups do
Students.list_groups()
end
end
| 22.893617
| 72
| 0.66171
|
038eee5d4167ee2937d76ad50a1dfa8948ec8d7b
| 1,539
|
ex
|
Elixir
|
clients/ad_sense/lib/google_api/ad_sense/v2/model/list_alerts_response.ex
|
renovate-bot/elixir-google-api
|
1da34cd39b670c99f067011e05ab90af93fef1f6
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/ad_sense/lib/google_api/ad_sense/v2/model/list_alerts_response.ex
|
swansoffiee/elixir-google-api
|
9ea6d39f273fb430634788c258b3189d3613dde0
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/ad_sense/lib/google_api/ad_sense/v2/model/list_alerts_response.ex
|
dazuma/elixir-google-api
|
6a9897168008efe07a6081d2326735fe332e522c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdSense.V2.Model.ListAlertsResponse do
@moduledoc """
Response definition for the alerts list rpc.
## Attributes
* `alerts` (*type:* `list(GoogleApi.AdSense.V2.Model.Alert.t)`, *default:* `nil`) - The alerts returned in this list response.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:alerts => list(GoogleApi.AdSense.V2.Model.Alert.t()) | nil
}
field(:alerts, as: GoogleApi.AdSense.V2.Model.Alert, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.AdSense.V2.Model.ListAlertsResponse do
def decode(value, options) do
GoogleApi.AdSense.V2.Model.ListAlertsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdSense.V2.Model.ListAlertsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.744681
| 130
| 0.739441
|
038f1569a014aa2803e11ba7c808429b416ae07d
| 1,854
|
exs
|
Elixir
|
clients/cloud_scheduler/mix.exs
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | null | null | null |
clients/cloud_scheduler/mix.exs
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | 1
|
2020-12-18T09:25:12.000Z
|
2020-12-18T09:25:12.000Z
|
clients/cloud_scheduler/mix.exs
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | 1
|
2020-10-04T10:12:44.000Z
|
2020-10-04T10:12:44.000Z
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudScheduler.Mixfile do
use Mix.Project
@version "0.6.1"
def project() do
[
app: :google_api_cloud_scheduler,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/cloud_scheduler"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Cloud Scheduler API client library. Creates and manages jobs run on a regular recurring schedule.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/cloud_scheduler",
"Homepage" => "https://cloud.google.com/scheduler/"
}
]
end
end
| 27.671642
| 106
| 0.658576
|
038f3053171d8d9406864e720fd50497df0195c5
| 1,997
|
exs
|
Elixir
|
config/prod.exs
|
cdegroot/clueless
|
c7f575159a42f55eb843df0e37802e778826c845
|
[
"MIT"
] | null | null | null |
config/prod.exs
|
cdegroot/clueless
|
c7f575159a42f55eb843df0e37802e778826c845
|
[
"MIT"
] | 4
|
2015-12-18T20:35:49.000Z
|
2015-12-18T20:36:43.000Z
|
config/prod.exs
|
cdegroot/clueless
|
c7f575159a42f55eb843df0e37802e778826c845
|
[
"MIT"
] | null | null | null |
use Mix.Config
# For production, we configure the host to read the PORT
# from the system environment. Therefore, you will need
# to set PORT=80 before running your server.
#
# You should also configure the url host to something
# meaningful, we use this information when generating URLs.
#
# Finally, we also include the path to a manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :clueless, Clueless.Endpoint,
http: [port: {:system, "PORT"}],
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :clueless, Clueless.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :clueless, Clueless.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :clueless, Clueless.Endpoint, server: true
#
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 31.698413
| 67
| 0.714071
|
038f4454dc86d5a775ae3db454af2bbb86884bcf
| 1,791
|
ex
|
Elixir
|
clients/display_video/lib/google_api/display_video/v1/model/digital_content_label_targeting_option_details.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
clients/display_video/lib/google_api/display_video/v1/model/digital_content_label_targeting_option_details.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
clients/display_video/lib/google_api/display_video/v1/model/digital_content_label_targeting_option_details.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DisplayVideo.V1.Model.DigitalContentLabelTargetingOptionDetails do
@moduledoc """
Represents a targetable digital content label rating tier. This will be
populated in the
digital_content_label_details
field of the TargetingOption when
targeting_type is
`TARGETING_TYPE_DIGITAL_CONTENT_LABEL_EXCLUSION`.
## Attributes
* `contentRatingTier` (*type:* `String.t`, *default:* `nil`) - Output only. An enum for the content label brand safety tiers.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:contentRatingTier => String.t()
}
field(:contentRatingTier)
end
defimpl Poison.Decoder,
for: GoogleApi.DisplayVideo.V1.Model.DigitalContentLabelTargetingOptionDetails do
def decode(value, options) do
GoogleApi.DisplayVideo.V1.Model.DigitalContentLabelTargetingOptionDetails.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DisplayVideo.V1.Model.DigitalContentLabelTargetingOptionDetails do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.421053
| 129
| 0.760469
|
038f6b136255af5adb91b261c7ca841e3ccf822b
| 1,013
|
ex
|
Elixir
|
lib/cocktail/rule.ex
|
gadabout/cocktail
|
3dd3b6ee60d6f99b640f092c974d067338dd589b
|
[
"MIT"
] | 117
|
2017-09-09T00:02:32.000Z
|
2022-02-10T15:36:28.000Z
|
lib/cocktail/rule.ex
|
gadabout/cocktail
|
3dd3b6ee60d6f99b640f092c974d067338dd589b
|
[
"MIT"
] | 193
|
2017-09-09T22:49:18.000Z
|
2022-03-30T13:05:25.000Z
|
lib/cocktail/rule.ex
|
gadabout/cocktail
|
3dd3b6ee60d6f99b640f092c974d067338dd589b
|
[
"MIT"
] | 20
|
2018-01-04T14:58:42.000Z
|
2021-11-25T13:59:10.000Z
|
defmodule Cocktail.Rule do
@moduledoc """
Represent a recurrence rule (RRULE).
"""
alias Cocktail.{Builder, Rule, Validation}
@type t :: %__MODULE__{
count: pos_integer | nil,
until: Cocktail.time() | nil,
validations: Validation.validations_map()
}
@enforce_keys [:validations]
defstruct count: nil,
until: nil,
validations: %{}
@spec new(Cocktail.rule_options()) :: t
def new(options) do
{count, options} = Keyword.pop(options, :count)
{until, options} = Keyword.pop(options, :until)
validations = Validation.build_validations(options)
%Rule{count: count, until: until, validations: validations}
end
@spec set_until(t, Cocktail.time()) :: t
def set_until(%__MODULE__{} = rule, end_time), do: %{rule | until: end_time}
defimpl Inspect, for: __MODULE__ do
import Inspect.Algebra
def inspect(rule, _) do
concat(["#Cocktail.Rule<", Builder.String.build_rule(rule), ">"])
end
end
end
| 25.974359
| 78
| 0.640671
|
038f72fe23cccd0ff612042c0f52611f452f0c76
| 3,629
|
ex
|
Elixir
|
lib/nerves_network/if_supervisor.ex
|
mattneel/nerves_network
|
192a6f7c0cc59696ad82c95c2ac6c1e979994d21
|
[
"Apache-2.0"
] | 1
|
2020-04-25T11:48:00.000Z
|
2020-04-25T11:48:00.000Z
|
lib/nerves_network/if_supervisor.ex
|
mattneel/nerves_network
|
192a6f7c0cc59696ad82c95c2ac6c1e979994d21
|
[
"Apache-2.0"
] | null | null | null |
lib/nerves_network/if_supervisor.ex
|
mattneel/nerves_network
|
192a6f7c0cc59696ad82c95c2ac6c1e979994d21
|
[
"Apache-2.0"
] | null | null | null |
defmodule Nerves.Network.IFSupervisor do
use Supervisor
alias Nerves.Network.Types
import Nerves.Network.Utils, only: [log_atomized_iface_error: 1]
@moduledoc false
@spec start_link(GenServer.options()) :: GenServer.on_start()
def start_link(options \\ []) do
Supervisor.start_link(__MODULE__, [], options)
end
def init([]) do
{:ok, {{:one_for_one, 10, 3600}, []}}
end
@spec setup(Types.ifname() | atom, Nerves.Network.setup_settings()) ::
Supervisor.on_start_child()
def setup(ifname, settings) when is_atom(ifname) do
log_atomized_iface_error(ifname)
setup(to_string(ifname), settings)
end
def setup(ifname, settings) do
pidname = pname(ifname)
if !Process.whereis(pidname) do
manager_module = manager(if_type(ifname), settings)
child = worker(manager_module, [ifname, settings, [name: pidname]], id: pidname)
Supervisor.start_child(__MODULE__, child)
else
{:error, :already_added}
end
end
@spec teardown(Types.ifname()) :: :ok | {:error, :not_started}
def teardown(ifname) do
pidname = pname(ifname)
if Process.whereis(pidname) do
Supervisor.terminate_child(__MODULE__, pidname)
Supervisor.delete_child(__MODULE__, pidname)
else
{:error, :not_started}
end
end
# Support atom interface names to avoid breaking some existing
# code. This is a deprecated use of the API.
@spec scan(Types.ifname() | atom) :: [String.t()] | {:error, any}
def scan(ifname) when is_atom(ifname) do
log_atomized_iface_error(ifname)
scan(to_string(ifname))
end
def scan(ifname) when is_binary(ifname) do
with pid when is_pid(pid) <- Process.whereis(pname(ifname)),
:wireless <- if_type(ifname) do
GenServer.call(pid, :scan, 30_000)
else
# If there is no pid.
nil ->
{:error, :not_started}
# if the interface was wired.
:wired ->
{:error, :not_wireless}
end
end
@spec pname(Types.ifname()) :: atom
defp pname(ifname) do
String.to_atom("Nerves.Network.Interface." <> ifname)
end
# Return the appropriate interface manager based on the interface's type
# and settings
@spec manager(:wired | :wireless, Nerves.Network.setup_settings()) ::
Nerves.Network.StaticManager
| Nerves.Network.LinkLocalManager
| Nerves.Network.DHCPManager
| Nerves.Network.WiFiManager
defp manager(:wired, settings) do
case Keyword.get(settings, :ipv4_address_method) do
:static ->
Nerves.Network.StaticManager
:linklocal ->
Nerves.Network.LinkLocalManager
:dhcp ->
Nerves.Network.DHCPManager
# Default to DHCP if unset; crash if anything else.
nil ->
Nerves.Network.DHCPManager
end
end
defp manager(:wireless, _settings) do
Nerves.Network.WiFiManager
end
@spec if_type(Types.ifname()) :: :wired | :wireless
# Categorize networks into wired and wireless based on their if names
defp if_type(<<"eth", _rest::binary>>), do: :wired
defp if_type(<<"usb", _rest::binary>>), do: :wired
# Localhost
defp if_type(<<"lo", _rest::binary>>), do: :wired
defp if_type(<<"wlan", _rest::binary>>), do: :wireless
# Ralink
defp if_type(<<"ra", _rest::binary>>), do: :wireless
# systemd predictable names
defp if_type(<<"en", _rest::binary>>), do: :wired
# SLIP
defp if_type(<<"sl", _rest::binary>>), do: :wired
defp if_type(<<"wl", _rest::binary>>), do: :wireless
# wwan (not really supported)
defp if_type(<<"ww", _rest::binary>>), do: :wired
defp if_type(_ifname), do: :wired
end
| 29.266129
| 86
| 0.661615
|
038f752da229027a0f3e78eda71f89cf1c17f86f
| 3,794
|
exs
|
Elixir
|
test/lib/canvas_api/github_trackback_test.exs
|
usecanvas/api-v2
|
59214db3a2cf12eb939f22fed320fd10cb47cdfe
|
[
"Apache-2.0"
] | 123
|
2017-04-04T18:15:48.000Z
|
2021-04-26T08:04:22.000Z
|
test/lib/canvas_api/github_trackback_test.exs
|
usecanvas/api-v2
|
59214db3a2cf12eb939f22fed320fd10cb47cdfe
|
[
"Apache-2.0"
] | null | null | null |
test/lib/canvas_api/github_trackback_test.exs
|
usecanvas/api-v2
|
59214db3a2cf12eb939f22fed320fd10cb47cdfe
|
[
"Apache-2.0"
] | 17
|
2017-04-04T18:58:29.000Z
|
2021-05-10T21:39:16.000Z
|
defmodule CanvasAPI.GitHubTrackbackTest do
use CanvasAPI.ModelCase
alias CanvasAPI.{Canvas, GitHubTrackback, PulseEvent,Referencer, Repo}
import CanvasAPI.Factory
setup do
{:ok, canvas: insert(:canvas)}
end
test ".add adds an event for a created comment", %{canvas: canvas} do
comment = github_event(canvas)
{:ok, event} =
GitHubTrackback.add(%{"action" => "created", "comment" => comment})
assert event.url == comment["html_url"]
assert event.type == "reference_added"
assert event.referencer == %Referencer{
id: get_in(comment, ~w(user id)),
avatar_url: get_in(comment, ~w(user avatar_url)),
name: "@#{get_in(comment, ~w(user login))}",
url: get_in(comment, ~w(user html_url))
}
assert event.canvas.id == canvas.id
end
test ".add adds an event for a submitted review", %{canvas: canvas} do
review = github_event(canvas)
{:ok, event} =
GitHubTrackback.add(%{"action" => "submitted", "review" => review})
assert event.url == review["html_url"]
assert event.type == "reference_added"
assert event.referencer == %Referencer{
id: get_in(review, ~w(user id)),
avatar_url: get_in(review, ~w(user avatar_url)),
name: "@#{get_in(review, ~w(user login))}",
url: get_in(review, ~w(user html_url))
}
assert event.canvas.id == canvas.id
end
test ".add adds an event for an opened issue", %{canvas: canvas} do
issue = github_event(canvas) |> Map.merge(%{
"title" => Canvas.web_url(canvas),
"body" => "",
})
{:ok, event} =
GitHubTrackback.add(%{"action" => "opened", "issue" => issue})
assert event.url == issue["html_url"]
assert event.type == "reference_added"
assert event.referencer == %Referencer{
id: get_in(issue, ~w(user id)),
avatar_url: get_in(issue, ~w(user avatar_url)),
name: "@#{get_in(issue, ~w(user login))}",
url: get_in(issue, ~w(user html_url))
}
assert event.canvas.id == canvas.id
end
test ".add adds an event for an opened pull request", %{canvas: canvas} do
pr = github_event(canvas) |> Map.merge(%{
"title" => Canvas.web_url(canvas),
"body" => "",
})
{:ok, event} =
GitHubTrackback.add(%{"action" => "opened", "pull_request" => pr})
assert event.url == pr["html_url"]
assert event.type == "reference_added"
assert event.referencer == %Referencer{
id: get_in(pr, ~w(user id)),
avatar_url: get_in(pr, ~w(user avatar_url)),
name: "@#{get_in(pr, ~w(user login))}",
url: get_in(pr, ~w(user html_url))
}
assert event.canvas.id == canvas.id
end
test ".add adds events for commits", %{canvas: canvas} do
commits = [
%{
"message" => "Foo"
},
%{
"message" => Canvas.web_url(canvas),
"url" => "commit-url",
"author" => %{
"email" => "auth-email",
"name" => "auth-name"
}
},
%{
"message" => Canvas.web_url(canvas),
"url" => "commit-url",
"author" => %{
"email" => "auth-email",
"name" => "auth-name"
}
}
]
:ok = GitHubTrackback.add(%{"commits" => commits})
events = Repo.all(PulseEvent)
assert Enum.map(events, & &1.type) |> Enum.dedup == ["mentioned"]
assert Enum.map(events, & &1.url) |> Enum.dedup == ["commit-url"]
assert Enum.map(events, & &1.referencer.id) |> Enum.dedup == ["auth-email"]
end
defp github_event(canvas) do
%{
"body" => Canvas.web_url(canvas),
"html_url" => "comment-url",
"user" => %{
"id" => "user-id",
"avatar_url" => "user-avatar_url",
"login" => "user-login",
"html_url" => "user-html_url"
}
}
end
end
| 27.897059
| 79
| 0.573537
|
038f8cb23d6efd25448b8dbe0cef3f69106b75b4
| 2,761
|
ex
|
Elixir
|
clients/os_config/lib/google_api/os_config/v1/model/inventory_item.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/os_config/lib/google_api/os_config/v1/model/inventory_item.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | 1
|
2020-12-18T09:25:12.000Z
|
2020-12-18T09:25:12.000Z
|
clients/os_config/lib/google_api/os_config/v1/model/inventory_item.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.OSConfig.V1.Model.InventoryItem do
@moduledoc """
A single piece of inventory on a VM.
## Attributes
* `availablePackage` (*type:* `GoogleApi.OSConfig.V1.Model.InventorySoftwarePackage.t`, *default:* `nil`) - Software package available to be installed on the VM instance.
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - When this inventory item was first detected.
* `id` (*type:* `String.t`, *default:* `nil`) - Identifier for this item, unique across items for this VM.
* `installedPackage` (*type:* `GoogleApi.OSConfig.V1.Model.InventorySoftwarePackage.t`, *default:* `nil`) - Software package present on the VM instance.
* `originType` (*type:* `String.t`, *default:* `nil`) - The origin of this inventory item.
* `type` (*type:* `String.t`, *default:* `nil`) - The specific type of inventory, correlating to its specific details.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - When this inventory item was last modified.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:availablePackage => GoogleApi.OSConfig.V1.Model.InventorySoftwarePackage.t(),
:createTime => DateTime.t(),
:id => String.t(),
:installedPackage => GoogleApi.OSConfig.V1.Model.InventorySoftwarePackage.t(),
:originType => String.t(),
:type => String.t(),
:updateTime => DateTime.t()
}
field(:availablePackage, as: GoogleApi.OSConfig.V1.Model.InventorySoftwarePackage)
field(:createTime, as: DateTime)
field(:id)
field(:installedPackage, as: GoogleApi.OSConfig.V1.Model.InventorySoftwarePackage)
field(:originType)
field(:type)
field(:updateTime, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.OSConfig.V1.Model.InventoryItem do
def decode(value, options) do
GoogleApi.OSConfig.V1.Model.InventoryItem.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.OSConfig.V1.Model.InventoryItem do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.476923
| 174
| 0.709526
|
038fb727f3a3f39dee6b85caadd80cf1b174c16e
| 2,261
|
exs
|
Elixir
|
test/loki/file_manipulation_test.exs
|
Selvaticus/loki
|
58006dcf5752595ed7b875bee265423074895ae4
|
[
"MIT"
] | 83
|
2016-11-27T11:15:58.000Z
|
2021-12-24T21:04:04.000Z
|
test/loki/file_manipulation_test.exs
|
Selvaticus/loki
|
58006dcf5752595ed7b875bee265423074895ae4
|
[
"MIT"
] | 28
|
2016-12-08T14:18:32.000Z
|
2019-01-08T10:57:54.000Z
|
test/loki/file_manipulation_test.exs
|
Selvaticus/loki
|
58006dcf5752595ed7b875bee265423074895ae4
|
[
"MIT"
] | 9
|
2017-02-08T21:56:18.000Z
|
2019-06-09T02:14:07.000Z
|
defmodule Loki.FileManipulationTest do
use ExUnit.Case, async: false
import Loki.File
import Loki.FileManipulation
import ExUnit.CaptureIO
describe "FileManipulation" do
test "#append_to_file" do
capture_io(fn -> create_file("temp/append") end)
assert capture_io(fn ->
append_to_file("temp/append", "appended")
end) == "\e[32m * append \e[0mtemp/append\e[0m\n"
end
test "#prepend_to_file" do
capture_io(fn -> create_file("temp/prepend") end)
assert capture_io(fn ->
prepend_to_file("temp/prepend", "prepended")
end) == "\e[32m * prepend \e[0mtemp/prepend\e[0m\n"
end
test "#remove_from_file" do
capture_io(fn -> create_file("temp/remove", "remove") end)
assert capture_io(fn ->
remove_from_file("temp/remove", "remove")
end) == "\e[32m * remove \e[0mtemp/remove\e[0m\n"
end
test "#inject_into_file" do
capture_io(fn -> create_file("temp/inject", "line") end)
assert capture_io(fn ->
inject_into_file("temp/inject", "injected", :after, "line")
end) == "\e[32m * inject \e[0mtemp/inject\e[0m\n"
end
test "#replace_in_file" do
capture_io(fn -> create_file("temp/replace", "replace") end)
assert capture_io(fn ->
replace_in_file("temp/replace", "replaced", "replace")
end) == "\e[32m * replace \e[0mtemp/replace\e[0m\n"
end
test "#comment_in_file" do
capture_io(fn -> create_file("temp/comment", "comment") end)
assert capture_io(fn ->
comment_in_file("temp/comment", "comment")
end) == "\e[32m * comment \e[0mtemp/comment\e[0m\n"
end
test "#uncomment_in_file" do
capture_io(fn -> create_file("temp/uncomment", "# uncomment") end)
assert capture_io(fn ->
comment_in_file("temp/uncomment", "# uncomment")
end) == "\e[32m * comment \e[0mtemp/uncomment\e[0m\n"
end
test "#remove_comments_in_file" do
capture_io(fn -> create_file("temp/remove_all_comments", "# comment\n # comment") end)
assert capture_io(fn ->
remove_comments_in_file("temp/remove_all_comments")
end) == "\e[32m * uncomment \e[0m in file temp/remove_all_comments\e[0m\n"
end
end
end
| 29.75
| 92
| 0.62981
|
038fd7222ee97dad4461b12ea77862a3f6ed67ff
| 1,005
|
ex
|
Elixir
|
lib/cineplex/queue/job/extract.ex
|
upmaru/cineplex
|
7d1d516d3e3d3683b2ad4425b61517a8f556f721
|
[
"MIT"
] | null | null | null |
lib/cineplex/queue/job/extract.ex
|
upmaru/cineplex
|
7d1d516d3e3d3683b2ad4425b61517a8f556f721
|
[
"MIT"
] | null | null | null |
lib/cineplex/queue/job/extract.ex
|
upmaru/cineplex
|
7d1d516d3e3d3683b2ad4425b61517a8f556f721
|
[
"MIT"
] | null | null | null |
defmodule Cineplex.Queue.Job.Extract do
alias Cineplex.{
Queue,
Repo
}
alias Queue.Job
@spec perform(Job.t()) :: {:ok, %{job: Job.t(), entries: [Job.Entry.t()]}}
def perform(%Job{source: source} = job) do
source = Repo.preload(source, [:presets])
job = Repo.preload(job, [:parent_entries])
entries = Enum.map(source.presets, &entry_from_preset(&1, job))
{_count, created_entries} = Repo.insert_all(Job.Entry, entries)
{:ok, %{job: job, entries: created_entries}}
end
defp entry_from_preset(preset, job) do
timestamp = DateTime.truncate(DateTime.utc_now(), :second)
parent_entry =
job.parent_entries
|> Enum.filter(fn e -> e.preset_id == preset.id end)
|> List.first()
base_entry = %{
preset_id: preset.id,
job_id: job.id,
inserted_at: timestamp,
updated_at: timestamp
}
if parent_entry do
Map.merge(base_entry, %{parent_id: parent_entry.id})
else
base_entry
end
end
end
| 23.372093
| 76
| 0.634826
|
03900444a745073d09009d7f13daa1d97db1510b
| 1,847
|
exs
|
Elixir
|
clients/books/mix.exs
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/books/mix.exs
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/books/mix.exs
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Books.Mixfile do
use Mix.Project
@version "0.20.0"
def project() do
[
app: :google_api_books,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/books"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Books API client library. The Google Books API allows clients to access the Google Books repository.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/books",
"Homepage" => "https://code.google.com/apis/books/docs/v1/getting_started.html"
}
]
end
end
| 27.567164
| 104
| 0.654575
|
0390157e536abba1d6df977091549960e09bde53
| 359
|
ex
|
Elixir
|
lib/message_check/listen.ex
|
Celeo/gandalf_bot
|
f8e48b9a0b403fac5e29e514ce6c684d1bebeecc
|
[
"MIT"
] | null | null | null |
lib/message_check/listen.ex
|
Celeo/gandalf_bot
|
f8e48b9a0b403fac5e29e514ce6c684d1bebeecc
|
[
"MIT"
] | null | null | null |
lib/message_check/listen.ex
|
Celeo/gandalf_bot
|
f8e48b9a0b403fac5e29e514ce6c684d1bebeecc
|
[
"MIT"
] | null | null | null |
defmodule Bot.MessageCheck.HeyListen do
require Logger
@pattern ~r/^listen[! ]*$/i
def is_match!(content) do
content =
content
|> String.trim()
|> Bot.Util.Message.strip_formatting()
|> Bot.Util.Message.strip_punctuation()
Regex.match?(@pattern, content)
end
def emoji(), do: %Nostrum.Struct.Emoji{name: "👂"}
end
| 19.944444
| 51
| 0.640669
|
03901b441c38f16c265bcfd278bc1fdbde7578ea
| 4,259
|
ex
|
Elixir
|
lib/beats/conductor.ex
|
mtrudel/beats
|
6edf532bc02f2625190cbcb7f99a09fc6f58c5fd
|
[
"MIT"
] | 37
|
2018-05-19T17:45:46.000Z
|
2022-01-18T11:03:36.000Z
|
lib/beats/conductor.ex
|
mtrudel/beats
|
6edf532bc02f2625190cbcb7f99a09fc6f58c5fd
|
[
"MIT"
] | 1
|
2020-10-08T09:53:15.000Z
|
2020-10-08T09:53:15.000Z
|
lib/beats/conductor.ex
|
mtrudel/beats
|
6edf532bc02f2625190cbcb7f99a09fc6f58c5fd
|
[
"MIT"
] | 2
|
2018-11-20T18:16:08.000Z
|
2021-02-09T15:14:28.000Z
|
defmodule Beats.Conductor do
use GenServer
# Client API
def start_link(arg) do
GenServer.start_link(__MODULE__, arg, name: __MODULE__)
end
def do_tick() do
GenServer.call(__MODULE__, :do_tick)
end
def reset_tick(to \\ 1) do
GenServer.call(__MODULE__, {:reset_tick, to})
end
def play_fill(num) do
GenServer.call(__MODULE__, {:play_fill, num})
end
def toggle_update_display do
GenServer.call(__MODULE__, :toggle_update_display)
end
# Server API
def init(arg) do
Beats.FileWatcher.subscribe()
score =
arg
|> Keyword.get(:filename, "default.json")
|> Beats.Score.score_from_file()
|> load_score()
{:ok,
%{
tick: 0,
score: score,
current_score: score,
pending_score: nil,
pending_fill: nil,
update_display: true
}}
end
def handle_call(
:do_tick,
_from,
%{
score: score,
current_score: %Beats.Score{channel: channel} = current_score,
pending_score: pending_score,
pending_fill: pending_fill,
update_display: update_display,
tick: tick
} = state
) do
# Map the tick into a musical measure and direct everyone to play it
measure = div(tick, 16)
sixteenth = rem(tick, 16)
# Collect all the notes to play from the current score and play them
score.parts
|> Enum.map(&Beats.Part.note_for(&1, measure, sixteenth))
|> Enum.filter(& &1)
|> Beats.Output.play(channel)
if update_display do
# Update the display
Beats.Display.set_tick(tick)
end
# Update our state according to whether we're at the end of a measure or not
cond do
sixteenth == 15 && pending_score ->
# New score coming our way
load_score(pending_score)
{:reply, tick,
%{
state
| tick: 0,
score: pending_score,
current_score: pending_score,
pending_score: nil,
pending_fill: nil
}}
sixteenth == 15 && pending_fill ->
# Fill request enqueued
Beats.Display.set_score(pending_fill)
{:reply, tick,
%{
state
| tick: tick + 1,
score: pending_fill,
current_score: current_score,
pending_score: nil,
pending_fill: nil
}}
sixteenth == 15 && score != current_score ->
# Restoring after a fill
Beats.Display.set_score(current_score)
Beats.Display.puts(" ")
{:reply, tick,
%{
state
| tick: tick + 1,
score: current_score,
current_score: current_score,
pending_score: nil,
pending_fill: nil
}}
true ->
{:reply, tick, %{state | tick: tick + 1}}
end
end
def handle_call({:reset_tick, to}, _from, state) do
Beats.Display.puts("Resetting tick to #{to}")
{:reply, to, %{state | tick: to}}
end
def handle_call({:play_fill, num}, _from, %{current_score: %{fills: fills}} = state) do
if length(fills) >= num do
Beats.Display.puts("Playing fill #{num}")
{:reply, :ok, %{state | pending_fill: Enum.at(fills, num - 1)}}
else
Beats.Display.puts("Fill #{num} not defined")
{:reply, :no_such_fill, state}
end
end
def handle_call(:toggle_update_display, _from, %{update_display: update_display} = state) do
Beats.Display.puts("Updating Display: #{!update_display}")
{:reply, :ok, %{state | update_display: !update_display}}
end
def handle_info({:file_event, _watcher_pid, {path, _events}}, state) do
if String.ends_with?(path, ".json") do
case Beats.Score.score_from_file(path) do
%Beats.Score{} = score ->
{:noreply, %{state | pending_score: score}}
_ ->
Beats.Display.puts("Could not parse #{Path.basename(path)}")
{:noreply, state}
end
else
{:noreply, state}
end
end
defp load_score(score) do
Beats.Display.set_score(score)
Beats.Metronome.set_bpm(score.desired_bpm)
Beats.Metronome.set_swing(score.swing)
score
end
end
| 25.502994
| 94
| 0.582296
|
03905dc4397cf97d42d502ffa8995cd1d1a80539
| 1,921
|
ex
|
Elixir
|
clients/cloud_tasks/lib/google_api/cloud_tasks/v2beta2/model/list_queues_response.ex
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
clients/cloud_tasks/lib/google_api/cloud_tasks/v2beta2/model/list_queues_response.ex
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
clients/cloud_tasks/lib/google_api/cloud_tasks/v2beta2/model/list_queues_response.ex
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudTasks.V2beta2.Model.ListQueuesResponse do
@moduledoc """
Response message for ListQueues.
## Attributes
- nextPageToken (String.t): A token to retrieve next page of results. To return the next page of results, call ListQueues with this value as the page_token. If the next_page_token is empty, there are no more results. The page token is valid for only 2 hours. Defaults to: `null`.
- queues ([Queue]): The list of queues. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:nextPageToken => any(),
:queues => list(GoogleApi.CloudTasks.V2beta2.Model.Queue.t())
}
field(:nextPageToken)
field(:queues, as: GoogleApi.CloudTasks.V2beta2.Model.Queue, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.CloudTasks.V2beta2.Model.ListQueuesResponse do
def decode(value, options) do
GoogleApi.CloudTasks.V2beta2.Model.ListQueuesResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudTasks.V2beta2.Model.ListQueuesResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.666667
| 284
| 0.747527
|
039080ba662f2761072e00c909e503dcd68d4735
| 2,323
|
ex
|
Elixir
|
lib/plug/session/ets.ex
|
gjaldon/plug
|
bfe88530b429c7b9b29b69b737772ef7c6aa2f6b
|
[
"Apache-2.0"
] | 1
|
2019-05-07T15:05:52.000Z
|
2019-05-07T15:05:52.000Z
|
lib/plug/session/ets.ex
|
gjaldon/plug
|
bfe88530b429c7b9b29b69b737772ef7c6aa2f6b
|
[
"Apache-2.0"
] | null | null | null |
lib/plug/session/ets.ex
|
gjaldon/plug
|
bfe88530b429c7b9b29b69b737772ef7c6aa2f6b
|
[
"Apache-2.0"
] | 1
|
2019-11-23T12:09:14.000Z
|
2019-11-23T12:09:14.000Z
|
defmodule Plug.Session.ETS do
@moduledoc """
Stores the session in an in-memory ETS table.
This store does not create the ETS table; it expects that an
existing named table with public properties is passed as an
argument.
We don't recommend using this store in production as every
session will be stored in ETS and never cleaned until you
create a task responsible for cleaning up old entries.
Also, since the store is in-memory, it means sessions are
not shared between servers. If you deploy to more than one
machine, using this store is again not recommended.
This store, however, can be used as an example for creating
custom storages, based on Redis, Memcached, or a database
itself.
## Options
* `:table` - ETS table name (required)
For more information on ETS tables, visit the Erlang documentation at
http://www.erlang.org/doc/man/ets.html.
## Storage
The data is stored in ETS in the following format:
{sid :: String.t, data :: map, timestamp :: :erlang.timestamp}
The timestamp is updated whenever there is a read or write to the
table and it may be used to detect if a session is still active.
## Examples
# Create an ETS table when the application starts
:ets.new(:session, [:named_table, :public, read_concurrency: true])
# Use the session plug with the table name
plug Plug.Session, store: :ets, key: "sid", table: :session
"""
@behaviour Plug.Session.Store
@max_tries 100
def init(opts) do
Keyword.fetch!(opts, :table)
end
def get(_conn, sid, table) do
case :ets.lookup(table, sid) do
[{^sid, data, _timestamp}] ->
:ets.update_element(table, sid, {3, now()})
{sid, data}
[] ->
{nil, %{}}
end
end
def put(_conn, nil, data, table) do
put_new(data, table)
end
def put(_conn, sid, data, table) do
:ets.insert(table, {sid, data, now()})
sid
end
def delete(_conn, sid, table) do
:ets.delete(table, sid)
:ok
end
defp put_new(data, table, counter \\ 0)
when counter < @max_tries do
sid = :crypto.strong_rand_bytes(96) |> Base.encode64
if :ets.insert_new(table, {sid, data, now()}) do
sid
else
put_new(data, table, counter + 1)
end
end
defp now() do
:os.timestamp()
end
end
| 24.712766
| 73
| 0.663366
|
03909fd21baa51374ae5f9afefe78866aec479e5
| 4,874
|
exs
|
Elixir
|
deps/ecto_sql/mix.exs
|
Abninho92/RocketseatNLW2021-Rocketpay
|
3e492bf582148202a215f6a26e6c0890ed9a0e23
|
[
"MIT"
] | 1
|
2021-02-24T13:06:18.000Z
|
2021-02-24T13:06:18.000Z
|
deps/ecto_sql/mix.exs
|
gleidsonduarte/rocketseat-rocketpay-phoenix-elixir-postgres
|
300e0b34e0179380f56a8c6cae707722d9c4d8d0
|
[
"MIT"
] | null | null | null |
deps/ecto_sql/mix.exs
|
gleidsonduarte/rocketseat-rocketpay-phoenix-elixir-postgres
|
300e0b34e0179380f56a8c6cae707722d9c4d8d0
|
[
"MIT"
] | null | null | null |
defmodule EctoSQL.MixProject do
use Mix.Project
@version "3.5.4"
@adapters ~w(pg myxql tds)
def project do
[
app: :ecto_sql,
version: @version,
elixir: "~> 1.8",
deps: deps(),
test_paths: test_paths(System.get_env("ECTO_ADAPTER")),
xref: [
exclude: [
MyXQL,
Ecto.Adapters.MyXQL.Connection,
Postgrex,
Ecto.Adapters.Postgres.Connection,
Tds,
Tds.Ecto.UUID,
Ecto.Adapters.Tds.Connection
]
],
# Custom testing
aliases: [
"test.all": ["test", "test.adapters", "test.as_a_dep"],
"test.adapters": &test_adapters/1,
"test.as_a_dep": &test_as_a_dep/1
],
preferred_cli_env: ["test.all": :test, "test.adapters": :test],
# Hex
description: "SQL-based adapters for Ecto and database migrations",
package: package(),
# Docs
name: "Ecto SQL",
docs: docs()
]
end
def application do
[
extra_applications: [:logger, :eex],
env: [postgres_map_type: "jsonb"],
mod: {Ecto.Adapters.SQL.Application, []}
]
end
defp deps do
[
ecto_dep(),
{:telemetry, "~> 0.4.0"},
# Drivers
{:db_connection, "~> 2.2"},
postgrex_dep(),
myxql_dep(),
tds_dep(),
# Bring something in for JSON during tests
{:jason, ">= 0.0.0", only: [:test, :docs]},
# Docs
{:ex_doc, "~> 0.21", only: :docs},
# Benchmarks
{:benchee, "~> 0.11.0", only: :bench},
{:benchee_json, "~> 0.4.0", only: :bench}
]
end
defp ecto_dep do
if path = System.get_env("ECTO_PATH") do
{:ecto, path: path}
else
{:ecto, "~> 3.5.0"}
end
end
defp postgrex_dep do
if path = System.get_env("POSTGREX_PATH") do
{:postgrex, path: path}
else
{:postgrex, "~> 0.15.0 or ~> 1.0", optional: true}
end
end
defp myxql_dep do
if path = System.get_env("MYXQL_PATH") do
{:myxql, path: path}
else
{:myxql, "~> 0.3.0 or ~> 0.4.0", optional: true}
end
end
defp tds_dep do
if path = System.get_env("TDS_PATH") do
{:tds, path: path}
else
{:tds, "~> 2.1.1", optional: true}
end
end
defp test_paths(adapter) when adapter in @adapters, do: ["integration_test/#{adapter}"]
defp test_paths(nil), do: ["test"]
defp test_paths(other), do: raise("unknown adapter #{inspect(other)}")
defp package do
[
maintainers: ["Eric Meadows-Jönsson", "José Valim", "James Fish", "Michał Muskała"],
licenses: ["Apache-2.0"],
links: %{"GitHub" => "https://github.com/elixir-ecto/ecto_sql"},
files:
~w(.formatter.exs mix.exs README.md CHANGELOG.md lib) ++
~w(integration_test/sql integration_test/support)
]
end
defp test_as_a_dep(args) do
IO.puts("==> Compiling ecto_sql from a dependency")
File.rm_rf!("tmp/as_a_dep")
File.mkdir_p!("tmp/as_a_dep")
File.cd!("tmp/as_a_dep", fn ->
File.write!("mix.exs", """
defmodule DepsOnEctoSQL.MixProject do
use Mix.Project
def project do
[
app: :deps_on_ecto_sql,
version: "0.0.1",
deps: [{:ecto_sql, path: "../.."}]
]
end
end
""")
mix_cmd_with_status_check(["do", "deps.get,", "compile", "--force" | args])
end)
end
defp test_adapters(args) do
for adapter <- @adapters, do: env_run(adapter, args)
end
defp env_run(adapter, args) do
IO.puts("==> Running tests for ECTO_ADAPTER=#{adapter} mix test")
mix_cmd_with_status_check(
["test", ansi_option() | args],
env: [{"ECTO_ADAPTER", adapter}]
)
end
defp ansi_option do
if IO.ANSI.enabled?(), do: "--color", else: "--no-color"
end
defp mix_cmd_with_status_check(args, opts \\ []) do
{_, res} = System.cmd("mix", args, [into: IO.binstream(:stdio, :line)] ++ opts)
if res > 0 do
System.at_exit(fn _ -> exit({:shutdown, 1}) end)
end
end
defp docs do
[
main: "Ecto.Adapters.SQL",
source_ref: "v#{@version}",
canonical: "http://hexdocs.pm/ecto_sql",
source_url: "https://github.com/elixir-ecto/ecto_sql",
groups_for_modules: [
# Ecto.Adapters.SQL,
# Ecto.Adapters.SQL.Sandbox,
# Ecto.Migration,
# Ecto.Migrator,
"Built-in adapters": [
Ecto.Adapters.MyXQL,
Ecto.Adapters.Tds,
Ecto.Adapters.Postgres
],
"Adapter specification": [
Ecto.Adapter.Migration,
Ecto.Adapter.Structure,
Ecto.Adapters.SQL.Connection,
Ecto.Migration.Command,
Ecto.Migration.Constraint,
Ecto.Migration.Index,
Ecto.Migration.Reference,
Ecto.Migration.Table
]
]
]
end
end
| 24.128713
| 90
| 0.558063
|
0390e0d7332a7d5365164117f70fe194aac2dd47
| 1,257
|
ex
|
Elixir
|
parser/lib/quenya_parser/util.ex
|
yidaoit/quenya
|
45fe42c32829a1a2499d325e592553061b876ef8
|
[
"MIT"
] | 143
|
2020-12-01T06:53:36.000Z
|
2022-03-24T02:33:01.000Z
|
parser/lib/quenya_parser/util.ex
|
yidaoit/quenya
|
45fe42c32829a1a2499d325e592553061b876ef8
|
[
"MIT"
] | 2
|
2020-11-30T05:30:42.000Z
|
2020-12-17T06:33:17.000Z
|
parser/lib/quenya_parser/util.ex
|
tyrchen/quenya
|
b9e8ef9e71e0e52b010b930eee66942e30c62ddd
|
[
"MIT"
] | 12
|
2020-12-07T01:22:17.000Z
|
2020-12-27T12:49:39.000Z
|
defmodule QuenyaParser.Util do
@moduledoc """
Utility functions
"""
require Logger
@spec update_map(map() | String.t(), any, boolean(), fun()) ::
map() | {:error, String.t()}
@doc """
Update the map recursively for $ref
"""
def update_map(context, %{"$ref" => path}, recursive, process_ref_fn) do
case process_ref_fn.(context, path, recursive) do
{:error, msg} ->
Logger.warn("Failed to process #{path}. Error: #{msg}")
{:error, msg}
v ->
v
end
end
def update_map(context, val, recursive, process_ref_fn) when is_map(val) do
Enum.reduce_while(val, %{}, fn {k, v}, acc ->
result =
case v do
%{"$ref" => _path} ->
update_map(context, v, recursive, process_ref_fn)
v when is_list(v) ->
Enum.map(v, fn item -> update_map(context, item, recursive, process_ref_fn) end)
v when is_map(v) ->
update_map(context, v, recursive, process_ref_fn)
v ->
v
end
case result do
{:error, msg} -> {:halt, {:error, msg}}
_ -> {:cont, Map.put(acc, k, result)}
end
end)
end
def update_map(_context, val, _recursive, _process_ref_fn), do: val
end
| 25.653061
| 92
| 0.558473
|
0390e80494e5fc520f1ca58800fe18e64c576552
| 1,408
|
ex
|
Elixir
|
lib/mix/tasks/test.ex
|
KingOfRostov/exavier
|
fe8020361db6295d9ec92cc71fb5170937c22833
|
[
"MIT"
] | null | null | null |
lib/mix/tasks/test.ex
|
KingOfRostov/exavier
|
fe8020361db6295d9ec92cc71fb5170937c22833
|
[
"MIT"
] | null | null | null |
lib/mix/tasks/test.ex
|
KingOfRostov/exavier
|
fe8020361db6295d9ec92cc71fb5170937c22833
|
[
"MIT"
] | null | null | null |
defmodule Mix.Tasks.Exavier.Test do
use Mix.Task
@shortdoc "Runs mutation testing"
def run(_args) do
unless System.get_env("MIX_ENV") || Mix.env() == :test do
Mix.raise(
"\"mix test\" is running in the \"#{Mix.env()}\" environment. If you are " <>
"running tests alongside another task, please set MIX_ENV explicitly"
)
end
Mix.shell().print_app
Mix.Task.run("app.start", [])
case Application.load(:ex_unit) do
:ok -> :ok
{:error, {:already_loaded, :ex_unit}} -> :ok
end
config =
ExUnit.configuration()
|> Keyword.merge(Application.get_all_env(:ex_unit))
|> Keyword.merge(formatters: [Exavier.Formatter], autorun: false)
ExUnit.configure(config)
require_test_helper()
Code.compiler_options(ignore_module_conflict: true)
{:ok, reporter} = Exavier.Reporter.start_link(name: :exavier_reporter)
{:ok, server} = Exavier.Server.start_link()
GenServer.call(server, :xmen, Exavier.timeout(:mutate_everything))
{:ok, exit_code} = GenServer.call(reporter, :report, Exavier.timeout(:report))
exit({:shutdown, exit_code})
end
defp require_test_helper do
file = Path.join("test", "test_helper.exs")
if File.exists?(file) do
Code.require_file(file)
else
Mix.raise("Cannot run tests because test helper file #{inspect(file)} does not exist")
end
end
end
| 29.333333
| 92
| 0.658381
|
0390fb0618acc085c290cb2b3863014d47f229e2
| 289
|
ex
|
Elixir
|
lib/surface_bulma/navbar/brand.ex
|
justin-m-morgan/surface_bulma
|
c31faebc818c39d06250574b913096504bd6eeec
|
[
"MIT"
] | null | null | null |
lib/surface_bulma/navbar/brand.ex
|
justin-m-morgan/surface_bulma
|
c31faebc818c39d06250574b913096504bd6eeec
|
[
"MIT"
] | null | null | null |
lib/surface_bulma/navbar/brand.ex
|
justin-m-morgan/surface_bulma
|
c31faebc818c39d06250574b913096504bd6eeec
|
[
"MIT"
] | null | null | null |
defmodule SurfaceBulma.Navbar.Brand do
use Surface.Component, slot: "brand"
prop path, :string
@doc "The brand image or text you want displayed."
slot default
def render(assigns) do
~F"""
<a class="navbar-item" href={@path}>
<#slot />
</a>
"""
end
end
| 17
| 52
| 0.619377
|
0390fe22b68f7cecf2113a26baed3a9884836a4a
| 178
|
exs
|
Elixir
|
chapter07/my_list.exs
|
asux/programming_elixir
|
69d27404f5e929541518269a383d571184faebe8
|
[
"MIT"
] | null | null | null |
chapter07/my_list.exs
|
asux/programming_elixir
|
69d27404f5e929541518269a383d571184faebe8
|
[
"MIT"
] | null | null | null |
chapter07/my_list.exs
|
asux/programming_elixir
|
69d27404f5e929541518269a383d571184faebe8
|
[
"MIT"
] | null | null | null |
defmodule MyList do
def span(from, from), do: [from]
def span(from, to) when from > to, do: [from | span(from-1, to)]
def span(from, to), do: [from | span(from+1, to)]
end
| 29.666667
| 66
| 0.623596
|
039105c45c40acbdcdd92a29c1d0f73ccfe08bbe
| 1,549
|
exs
|
Elixir
|
test_apps/dockers_phoenix/mix.exs
|
emedia-project/xrel
|
56d69ad8fbd9da5be043a6a094ca3ef0a2d1db83
|
[
"Unlicense"
] | 5
|
2015-12-14T16:02:09.000Z
|
2016-11-29T12:50:50.000Z
|
test_apps/dockers_phoenix/mix.exs
|
G-Corp/jorel
|
56d69ad8fbd9da5be043a6a094ca3ef0a2d1db83
|
[
"Unlicense"
] | 3
|
2016-02-12T14:36:50.000Z
|
2016-08-29T08:17:55.000Z
|
test_apps/dockers_phoenix/mix.exs
|
G-Corp/jorel
|
56d69ad8fbd9da5be043a6a094ca3ef0a2d1db83
|
[
"Unlicense"
] | 2
|
2016-01-02T15:19:53.000Z
|
2016-10-21T15:32:11.000Z
|
defmodule DockersPhoenix.Mixfile do
use Mix.Project
def project do
[app: :dockers_phoenix,
version: "0.0.1",
elixir: "~> 1.0",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
aliases: aliases,
deps: deps]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {DockersPhoenix, []},
applications: [:phoenix, :phoenix_html, :cowboy, :logger, :gettext,
:phoenix_ecto, :postgrex]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[{:phoenix, "~> 1.1.1"},
{:phoenix_ecto, "~> 2.0"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.3"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.9"},
{:cowboy, "~> 1.0"}]
end
# Aliases are shortcut or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
["ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"]]
end
end
| 29.226415
| 78
| 0.611362
|
03911cd92673658ccffe3e9d90c9085464346d99
| 397
|
exs
|
Elixir
|
priv/repo/migrations/20181023160033_create_repos_viewers.exs
|
coderplanets/coderplanets_server
|
3663e56340d6d050e974c91f7e499d8424fc25e9
|
[
"Apache-2.0"
] | 240
|
2018-11-06T09:36:54.000Z
|
2022-02-20T07:12:36.000Z
|
priv/repo/migrations/20181023160033_create_repos_viewers.exs
|
coderplanets/coderplanets_server
|
3663e56340d6d050e974c91f7e499d8424fc25e9
|
[
"Apache-2.0"
] | 363
|
2018-07-11T03:38:14.000Z
|
2021-12-14T01:42:40.000Z
|
priv/repo/migrations/20181023160033_create_repos_viewers.exs
|
mydearxym/mastani_server
|
f24034a4a5449200165cf4a547964a0961793eab
|
[
"Apache-2.0"
] | 22
|
2019-01-27T11:47:56.000Z
|
2021-02-28T13:17:52.000Z
|
defmodule GroupherServer.Repo.Migrations.CreateReposViewers do
use Ecto.Migration
def change do
create table(:repos_viewers) do
add(:repo_id, references(:cms_repos, on_delete: :delete_all), null: false)
add(:user_id, references(:users, on_delete: :delete_all), null: false)
timestamps()
end
create(unique_index(:repos_viewers, [:repo_id, :user_id]))
end
end
| 26.466667
| 80
| 0.715365
|
0391219541c8b38f97395d32886911c66d867afa
| 1,440
|
ex
|
Elixir
|
clients/cloud_kms/lib/google_api/cloud_kms/v1/model/asymmetric_sign_response.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/cloud_kms/lib/google_api/cloud_kms/v1/model/asymmetric_sign_response.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/cloud_kms/lib/google_api/cloud_kms/v1/model/asymmetric_sign_response.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudKMS.V1.Model.AsymmetricSignResponse do
@moduledoc """
Response message for KeyManagementService.AsymmetricSign.
## Attributes
* `signature` (*type:* `String.t`, *default:* `nil`) - The created signature.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:signature => String.t()
}
field(:signature)
end
defimpl Poison.Decoder, for: GoogleApi.CloudKMS.V1.Model.AsymmetricSignResponse do
def decode(value, options) do
GoogleApi.CloudKMS.V1.Model.AsymmetricSignResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudKMS.V1.Model.AsymmetricSignResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.638298
| 82
| 0.745139
|
0391296e7829a94ecfb1a1a6f5c69d2efab6d6d8
| 1,396
|
ex
|
Elixir
|
lib/fen_gen_web/telemetry.ex
|
imsekun/fengen
|
d24e6583c9aa662a27f8d884b261ec24d4a85807
|
[
"MIT"
] | 2
|
2021-05-28T19:33:52.000Z
|
2021-05-28T21:10:19.000Z
|
lib/fen_gen_web/telemetry.ex
|
hsek/fen_gen
|
d24e6583c9aa662a27f8d884b261ec24d4a85807
|
[
"MIT"
] | 20
|
2021-05-12T15:26:50.000Z
|
2021-05-29T14:42:47.000Z
|
lib/fen_gen_web/telemetry.ex
|
imsekun/fengen
|
d24e6583c9aa662a27f8d884b261ec24d4a85807
|
[
"MIT"
] | null | null | null |
defmodule FenGenWeb.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {FenGenWeb, :count_users, []}
]
end
end
| 28.489796
| 86
| 0.669771
|
039149e59d62892a99fb9ad946c4c7a17cbb5d65
| 433
|
exs
|
Elixir
|
test/texas_holdem_web/views/error_view_test.exs
|
thelightcosine/texas_holdem-phoenix
|
31da9754082015b7562639bf2d5b8ac5212f11d0
|
[
"BSD-3-Clause"
] | null | null | null |
test/texas_holdem_web/views/error_view_test.exs
|
thelightcosine/texas_holdem-phoenix
|
31da9754082015b7562639bf2d5b8ac5212f11d0
|
[
"BSD-3-Clause"
] | null | null | null |
test/texas_holdem_web/views/error_view_test.exs
|
thelightcosine/texas_holdem-phoenix
|
31da9754082015b7562639bf2d5b8ac5212f11d0
|
[
"BSD-3-Clause"
] | null | null | null |
defmodule TexasHoldemWeb.ErrorViewTest do
use TexasHoldemWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(TexasHoldemWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(TexasHoldemWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 28.866667
| 96
| 0.743649
|
03916f43b847fd0711b67876c3b6becf2f0d3728
| 7,799
|
ex
|
Elixir
|
apps/ex_wire/lib/ex_wire/kademlia/node.ex
|
atoulme/mana
|
cff3fd96c23feaaeb9fe32df3c0d35ee6dc548a5
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
apps/ex_wire/lib/ex_wire/kademlia/node.ex
|
atoulme/mana
|
cff3fd96c23feaaeb9fe32df3c0d35ee6dc548a5
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
apps/ex_wire/lib/ex_wire/kademlia/node.ex
|
atoulme/mana
|
cff3fd96c23feaaeb9fe32df3c0d35ee6dc548a5
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
defmodule ExWire.Kademlia.Node do
@moduledoc """
Represents a node in Kademlia algorithm; an entity on the network.
"""
alias ExWire.{Crypto, Message}
alias ExWire.Handler.Params
alias ExWire.Kademlia.XorDistance
alias ExWire.Struct.Endpoint
defstruct [
:public_key,
:key,
:endpoint
]
@type t :: %__MODULE__{
public_key: binary(),
key: binary(),
endpoint: Endpoint.t()
}
@doc """
Constructs a new node.
## Examples
iex> endpoint = ExWire.Struct.Endpoint.decode([<<1,2,3,4>>, <<>>, <<5>>])
iex> ExWire.Kademlia.Node.new(<<4, 108, 224, 89, 48, 199, 42, 188, 99, 44, 88, 226, 228, 50, 79,
...> 124, 126, 164, 120, 206, 192, 237, 79, 162, 82, 137, 130, 207, 52, 72, 48,
...> 148, 233, 203, 201, 33, 110, 122, 163, 73, 105, 18, 66, 87, 109, 85, 42, 42,
...> 86, 170, 234, 228, 38, 197, 48, 61, 237, 103, 124, 228, 85, 186, 26, 205, 157>>,
...> endpoint)
%ExWire.Kademlia.Node{
endpoint: %ExWire.Struct.Endpoint{
ip: [1, 2, 3, 4],
tcp_port: 5,
udp_port: nil
},
key: <<115, 3, 97, 5, 230, 214, 202, 188, 202, 118, 204, 177, 15, 72, 13, 68,
134, 100, 145, 57, 13, 239, 13, 175, 42, 38, 147, 127, 31, 18, 27, 226>>,
public_key: <<4, 108, 224, 89, 48, 199, 42, 188, 99, 44, 88, 226, 228, 50, 79,
124, 126, 164, 120, 206, 192, 237, 79, 162, 82, 137, 130, 207, 52, 72, 48,
148, 233, 203, 201, 33, 110, 122, 163, 73, 105, 18, 66, 87, 109, 85, 42, 42,
86, 170, 234, 228, 38, 197, 48, 61, 237, 103, 124, 228, 85, 186, 26, 205,
157>>
}
iex> address = "enode://20c9ad97c081d63397d7b685a412227a40e23c8bdc6688c6f37e97cfbc22d2b4d1db1510d8f61e6a8866ad7f0e17c02b14182d37ea7c3c8b9c2683aeb6b733a1@52.169.14.227:30303"
iex> ExWire.Kademlia.Node.new(address)
%ExWire.Kademlia.Node{
endpoint: %ExWire.Struct.Endpoint{
ip: [52, 169, 14, 227],
tcp_port: nil,
udp_port: 30303
},
key: <<202, 107, 222, 100, 235, 37, 246, 148, 81, 241, 131, 186, 231, 136, 53,
244, 150, 181, 223, 94, 85, 8, 248, 17, 242, 130, 233, 242, 131, 19, 153,
173>>,
public_key: <<32, 201, 173, 151, 192, 129, 214, 51, 151, 215, 182, 133, 164,
18, 34, 122, 64, 226, 60, 139, 220, 102, 136, 198, 243, 126, 151, 207, 188,
34, 210, 180, 209, 219, 21, 16, 216, 246, 30, 106, 136, 102, 173, 127, 14,
23, 192, 43, 20, 24, 45, 55, 234, 124, 60, 139, 156, 38, 131, 174, 182, 183,
51, 161>>
}
"""
@spec new(binary(), Endpoint) :: t()
def new(public_key, endpoint = %Endpoint{}) do
key = Crypto.hash(public_key)
%__MODULE__{
public_key: public_key,
key: key,
endpoint: endpoint
}
end
@spec new(binary()) :: t()
def new(enode_address) when is_binary(enode_address) do
%URI{
scheme: _scheme,
userinfo: remote_id,
host: remote_host,
port: remote_peer_port
} = URI.parse(enode_address)
remote_ip =
with {:ok, remote_ip} <- :inet.ip(remote_host |> String.to_charlist()) do
remote_ip |> Tuple.to_list()
end
endpoint = %Endpoint{
ip: remote_ip,
udp_port: remote_peer_port
}
public_key = Crypto.hex_to_bin(remote_id)
new(public_key, endpoint)
end
@doc """
Creates a new Node struct form ExWire.Handler.Params
## Examples
iex> params = %ExWire.Handler.Params{
...> remote_host: %ExWire.Struct.Endpoint{ip: [1,2,3,4], udp_port: 55},
...> signature: <<193, 30, 149, 122, 226, 192, 230, 158, 118, 204, 173, 80, 63,
...> 232, 67, 152, 216, 249, 89, 52, 162, 92, 233, 201, 177, 108, 63, 120, 152,
...> 134, 149, 220, 73, 198, 29, 93, 218, 123, 50, 70, 8, 202, 17, 171, 67, 245,
...> 70, 235, 163, 158, 201, 246, 223, 114, 168, 7, 7, 95, 9, 53, 165, 8, 177,
...> 13>>,
...> recovery_id: 1,
...> hash: <<5>>,
...> data: [1, [<<1,2,3,4>>, <<>>, <<5>>], [<<5,6,7,8>>, <<6>>, <<>>], 4] |> ExRLP.encode(),
...> timestamp: 123,
...> type: 2
...> }
iex> ExWire.Kademlia.Node.from_handler_params(params)
%ExWire.Kademlia.Node{
endpoint: %ExWire.Struct.Endpoint{
ip: [1, 2, 3, 4],
tcp_port: nil,
udp_port: 55
},
key: <<82, 25, 231, 209, 101, 209, 232, 115, 33, 237, 181, 81, 181, 2, 202,
77, 181, 78, 159, 231, 221, 144, 198, 11, 123, 132, 136, 183, 135, 31, 207,
141>>,
public_key: <<153, 149, 149, 167, 201, 115, 154, 11, 141, 233, 49, 71, 229,
202, 25, 84, 59, 111, 153, 217, 57, 132, 148, 55, 195, 58, 42, 211, 227,
178, 122, 26, 23, 85, 51, 240, 231, 4, 255, 112, 141, 5, 6, 222, 217, 181,
49, 46, 46, 23, 149, 27, 253, 38, 20, 167, 95, 161, 175, 72, 195, 134, 234,
158>>
}
"""
@spec from_handler_params(Params.t()) :: t()
def from_handler_params(params) do
public_key =
(<<params.type>> <> params.data)
|> Message.recover_public_key(params.signature, params.recovery_id)
|> Crypto.node_id_from_public_key()
new(public_key, params.remote_host)
end
@doc """
Calculates distance between two nodes.
## Examples
iex> node1 = ExWire.Kademlia.Node.new(<<4, 108, 224, 89, 48, 199, 42, 188, 99, 44, 88, 226, 228, 50, 79,
...> 124, 126, 164, 120, 206, 192, 237, 79, 162, 82, 137, 130, 207, 52, 72, 48,
...> 148, 233, 203, 201, 33, 110, 122, 163, 73, 105, 18, 66, 87, 109, 85, 42, 42,
...> 86, 170, 234, 228, 38, 197, 48, 61, 237, 103, 124, 228, 85, 186, 26, 205,
...> 157>>, ExWire.Struct.Endpoint.decode([<<1,2,3,4>>, <<>>, <<5>>]))
iex> node2 = ExWire.Kademlia.Node.new(<<4, 48, 183, 171, 48, 160, 28, 18, 74, 108, 206, 202, 54, 134,
...> 62, 206, 18, 196, 245, 250, 104, 227, 186, 155, 11, 81, 64, 124, 204, 0,
...> 46, 238, 211, 179, 16, 45, 32, 168, 143, 28, 29, 60, 49, 84, 226, 68, 147,
...> 23, 184, 239, 149, 9, 14, 119, 179, 18, 213, 204, 57, 53, 79, 134, 213,
...> 214, 6>>, ExWire.Struct.Endpoint.decode([<<5, 6, 7, 8>>, <<>>, <<5>>]))
iex> ExWire.Kademlia.Node.distance(node1, node2)
131
"""
@spec distance(t(), t()) :: integer()
def distance(%__MODULE__{key: key1}, %__MODULE__{key: key2}) do
XorDistance.distance(key1, key2)
end
@doc """
Calculates common id prefix between two peers.
## Examples
iex> node1 = ExWire.Kademlia.Node.new(<<4, 108, 224, 89, 48, 199, 42, 188, 99, 44, 88, 226, 228, 50, 79,
...> 124, 126, 164, 120, 206, 192, 237, 79, 162, 82, 137, 130, 207, 52, 72, 48,
...> 148, 233, 203, 201, 33, 110, 122, 163, 73, 105, 18, 66, 87, 109, 85, 42, 42,
...> 86, 170, 234, 228, 38, 197, 48, 61, 237, 103, 124, 228, 85, 186, 26, 205,
...> 157>>, ExWire.Struct.Endpoint.decode([<<1,2,3,4>>, <<>>, <<5>>]))
iex> node2 = ExWire.Kademlia.Node.new(<<4, 48, 183, 171, 48, 160, 28, 18, 74, 108, 206, 202, 54, 134,
...> 62, 206, 18, 196, 245, 250, 104, 227, 186, 155, 11, 81, 64, 124, 204, 0,
...> 46, 238, 211, 179, 16, 45, 32, 168, 143, 28, 29, 60, 49, 84, 226, 68, 147,
...> 23, 184, 239, 149, 9, 14, 119, 179, 18, 213, 204, 57, 53, 79, 134, 213,
...> 214, 6>>, ExWire.Struct.Endpoint.decode([<<5, 6, 7, 8>>, <<>>, <<5>>]))
iex> ExWire.Kademlia.Node.common_prefix(node1, node2)
0
"""
@spec common_prefix(t(), t()) :: integer()
def common_prefix(%__MODULE__{key: key1}, %__MODULE__{key: key2}) do
XorDistance.common_prefix(key1, key2)
end
end
| 40.409326
| 179
| 0.531222
|
0391753a79f34a9e1594d9d324aeedc6b8500873
| 677
|
ex
|
Elixir
|
apps/bus_detective_web/lib/bus_detective_web/views/stop_view.ex
|
bus-detective/bus_detective_ng
|
ef54684d4f640384bd20a4d5550ff51ab440190b
|
[
"MIT"
] | 8
|
2018-07-06T14:44:10.000Z
|
2021-08-19T17:24:25.000Z
|
apps/bus_detective_web/lib/bus_detective_web/views/stop_view.ex
|
bus-detective/bus_detective_ng
|
ef54684d4f640384bd20a4d5550ff51ab440190b
|
[
"MIT"
] | 12
|
2018-07-15T18:43:04.000Z
|
2022-02-10T16:07:47.000Z
|
apps/bus_detective_web/lib/bus_detective_web/views/stop_view.ex
|
bus-detective/bus_detective_ng
|
ef54684d4f640384bd20a4d5550ff51ab440190b
|
[
"MIT"
] | 1
|
2018-07-13T17:30:20.000Z
|
2018-07-13T17:30:20.000Z
|
defmodule BusDetectiveWeb.StopView do
use BusDetectiveWeb, :view
alias BusDetective.GTFS.Stop
alias Geo.Point
def map_shapes(nil), do: "[]"
def map_shapes(departures) do
departures
|> Enum.map(fn departure ->
coords =
departure.trip.shape.geometry.coordinates
|> Enum.map(fn {lat, lng} -> "[#{lat}, #{lng}]" end)
|> Enum.join(", ")
"[" <> coords <> "]"
end)
|> Enum.join(", ")
end
def latitude(%Stop{location: %Point{coordinates: {_, latitude}}}), do: latitude
def latitude(_), do: nil
def longitude(%Stop{location: %Point{coordinates: {longitude, _}}}), do: longitude
def longitude(_), do: nil
end
| 24.178571
| 84
| 0.615953
|
0391a46943a52d0e01b6e22c39aec16a4352a74e
| 1,639
|
exs
|
Elixir
|
test/still/compiler/view_helpers/safe_html_test.exs
|
mrmicahcooper/still
|
ba785b0b068d998d0343f73a1fd1795edbe9831c
|
[
"0BSD"
] | 2
|
2021-02-15T07:55:38.000Z
|
2021-03-05T18:04:53.000Z
|
test/still/compiler/view_helpers/safe_html_test.exs
|
mrmicahcooper/still
|
ba785b0b068d998d0343f73a1fd1795edbe9831c
|
[
"0BSD"
] | null | null | null |
test/still/compiler/view_helpers/safe_html_test.exs
|
mrmicahcooper/still
|
ba785b0b068d998d0343f73a1fd1795edbe9831c
|
[
"0BSD"
] | null | null | null |
defmodule Still.Compiler.ViewHelpers.SafeHTMLTest do
use ExUnit.Case, async: true
alias Still.Compiler.ViewHelpers.SafeHTML
describe "render/1" do
test "renders nil" do
assert "" == SafeHTML.render(nil)
end
test "HTML escapes atoms" do
assert "foo" = SafeHTML.render(:foo)
assert "<h1>" = SafeHTML.render(:"<h1>")
end
test "HTML escapes strings" do
assert "foo" = SafeHTML.render("foo")
assert "<h1>" = SafeHTML.render("<h1>")
end
test "HTML escapes lists" do
list = [:foo, "<h1>", ~D[2021-01-01]]
assert "foo, <h1>, 2021-01-01" = SafeHTML.render(list)
end
test "converts integers to strings" do
assert "1" = SafeHTML.render(1)
end
test "converts floats to strings" do
assert "1.0" = SafeHTML.render(1.0)
end
test "renders dates" do
assert "2021-01-01" = SafeHTML.render(~D[2021-01-01])
end
test "renders times" do
assert "12:00:00" = SafeHTML.render(~T[12:00:00])
end
test "renders naive date times" do
assert "2021-01-01 12:00:00" = SafeHTML.render(~N[2021-01-01 12:00:00])
end
test "renders date times" do
dt = DateTime.from_naive!(~N[2021-01-01 12:00:00], "Etc/UTC")
assert "2021-01-01 12:00:00Z" = SafeHTML.render(dt)
end
test "renders data marked as safe" do
assert "<h1>" = SafeHTML.render({:safe, "<h1>"})
end
test "renders tuples" do
assert ~s({"1", "2"}) = SafeHTML.render({1, 2})
end
test "renders maps" do
assert "bar: 2, foo: 1" = SafeHTML.render(%{foo: 1, bar: 2})
end
end
end
| 24.833333
| 77
| 0.600366
|
0391b477553298ae37aa8c9f8fce6924c6110f6f
| 65
|
ex
|
Elixir
|
lib/epi_locator_web/views/tr_view.ex
|
RatioPBC/epi-locator
|
58c90500c4e0071ce365d76ec9812f9051d6a9f9
|
[
"Apache-2.0"
] | null | null | null |
lib/epi_locator_web/views/tr_view.ex
|
RatioPBC/epi-locator
|
58c90500c4e0071ce365d76ec9812f9051d6a9f9
|
[
"Apache-2.0"
] | 6
|
2021-10-19T01:55:57.000Z
|
2022-02-15T01:04:19.000Z
|
lib/epi_locator_web/views/tr_view.ex
|
RatioPBC/epi-locator
|
58c90500c4e0071ce365d76ec9812f9051d6a9f9
|
[
"Apache-2.0"
] | 2
|
2022-01-21T08:38:50.000Z
|
2022-01-21T08:42:04.000Z
|
defmodule EpiLocatorWeb.TRView do
use EpiLocatorWeb, :view
end
| 16.25
| 33
| 0.815385
|
0391bdfdbc1fc446aa66103a4f682b13e1966e6a
| 2,831
|
exs
|
Elixir
|
test/kwtool_web/features/keyword_page/view_keyword_page_test.exs
|
byhbt/kwtool
|
8958a160066e3e4c61806202af2563541f2261e3
|
[
"MIT"
] | 5
|
2021-12-14T08:18:24.000Z
|
2022-03-29T10:02:48.000Z
|
test/kwtool_web/features/keyword_page/view_keyword_page_test.exs
|
byhbt/kwtool
|
8958a160066e3e4c61806202af2563541f2261e3
|
[
"MIT"
] | 32
|
2021-03-21T16:32:18.000Z
|
2022-03-23T08:00:37.000Z
|
test/kwtool_web/features/keyword_page/view_keyword_page_test.exs
|
byhbt/kwtool
|
8958a160066e3e4c61806202af2563541f2261e3
|
[
"MIT"
] | 1
|
2021-06-03T17:22:16.000Z
|
2021-06-03T17:22:16.000Z
|
defmodule KwtoolWeb.HomePage.ViewKeywordPageTest do
use KwtoolWeb.FeatureCase, async: true
feature "views the keyword listing page", %{session: session} do
created_user_1 = insert(:user)
keyword_of_user_1 = insert(:keyword, user: created_user_1)
created_user_2 = insert(:user)
custom_keyword_attrs = %{phrase: "test listing per user phrase", user: created_user_2}
keyword_of_user_2 = insert(:keyword, custom_keyword_attrs)
session
|> login_as(created_user_1)
|> visit(Routes.keyword_path(KwtoolWeb.Endpoint, :index))
|> assert_has(Query.text("Listing Keywords"))
|> assert_has(css(".search-box"))
|> assert_has(Query.text(keyword_of_user_1.phrase))
|> refute_has(Query.text(keyword_of_user_2.phrase))
end
feature "searches for the keyword which is existing", %{session: session} do
created_user = insert(:user)
custom_keyword_attrs = %{phrase: "test listing per user phrase", user: created_user}
keyword_1 = insert(:keyword, custom_keyword_attrs)
keyword_2 = insert(:keyword, user: created_user)
session
|> login_as(created_user)
|> visit(Routes.keyword_path(KwtoolWeb.Endpoint, :index))
|> fill_in(Query.css("#query"), with: keyword_1.phrase)
|> click(Query.button("Search"))
|> assert_has(Query.text(keyword_1.phrase))
|> refute_has(Query.text(keyword_2.phrase))
end
feature "searches for the keyword which does NOT exist", %{session: session} do
created_user = insert(:user)
keyword_1 = insert(:keyword, user: created_user)
keyword_2 = insert(:keyword, user: created_user)
session
|> login_as(created_user)
|> visit(Routes.keyword_path(KwtoolWeb.Endpoint, :index))
|> fill_in(Query.css("#query"), with: "Lorem ipsum")
|> click(Query.button("Search"))
|> refute_has(Query.text(keyword_1.phrase))
|> refute_has(Query.text(keyword_2.phrase))
|> assert_has(Query.text("No keywords found."))
end
feature "views the keyword details page", %{session: session} do
created_user = insert(:user)
keyword = insert(:keyword, user: created_user)
session
|> login_as(created_user)
|> visit(Routes.keyword_path(KwtoolWeb.Endpoint, :show, keyword.id))
|> assert_has(Query.text("Result for"))
end
feature "redirects to the listing keywords page when given a keyword of another user", %{
session: session
} do
created_user_1 = insert(:user)
keyword_of_user_1 = insert(:keyword, user: created_user_1)
created_user_2 = insert(:user)
keyword_of_user_2 = insert(:keyword, user: created_user_2)
session
|> login_as(created_user_1)
|> visit(Routes.keyword_path(KwtoolWeb.Endpoint, :show, keyword_of_user_2.id))
|> assert_has(Query.text(keyword_of_user_1.phrase))
|> assert_has(Query.text("Listing Keywords"))
end
end
| 36.766234
| 91
| 0.707877
|
0391df23f03d603a532adb451d6f266dd224b920
| 1,803
|
exs
|
Elixir
|
apps/tai/test/tai/iex/commands/products_test.exs
|
ccamateur/tai
|
41c4b3e09dafc77987fa3f6b300c15461d981e16
|
[
"MIT"
] | 276
|
2018-01-16T06:36:06.000Z
|
2021-03-20T21:48:01.000Z
|
apps/tai/test/tai/iex/commands/products_test.exs
|
ccamateur/tai
|
41c4b3e09dafc77987fa3f6b300c15461d981e16
|
[
"MIT"
] | 78
|
2020-10-12T06:21:43.000Z
|
2022-03-28T09:02:00.000Z
|
apps/tai/test/tai/iex/commands/products_test.exs
|
yurikoval/tai
|
94254b45d22fa0307b01577ff7c629c7280c0295
|
[
"MIT"
] | 43
|
2018-06-09T09:54:51.000Z
|
2021-03-07T07:35:17.000Z
|
defmodule Tai.IEx.Commands.ProductsTest do
use Tai.TestSupport.DataCase, async: false
import ExUnit.CaptureIO
test "show products and their trade restrictions for configured exchanges" do
mock_product(
venue_id: :test_exchange_a,
symbol: :btc_usd,
venue_symbol: "BTC_USD",
status: :trading,
type: :spot,
maker_fee: Decimal.new("0.001"),
taker_fee: Decimal.new("0.002")
)
mock_product(
venue_id: :test_exchange_b,
symbol: :eth_usd,
venue_symbol: "ETH_USD",
status: :trading,
type: :spot
)
assert capture_io(&Tai.IEx.products/0) == """
+-----------------+---------+--------------+---------+------+-----------+-----------+
| Venue | Symbol | Venue Symbol | Status | Type | Maker Fee | Taker Fee |
+-----------------+---------+--------------+---------+------+-----------+-----------+
| test_exchange_a | btc_usd | BTC_USD | trading | spot | 0.1% | 0.2% |
| test_exchange_b | eth_usd | ETH_USD | trading | spot | | |
+-----------------+---------+--------------+---------+------+-----------+-----------+\n
"""
end
test "shows an empty table when there are no products" do
assert capture_io(&Tai.IEx.products/0) == """
+-------+--------+--------------+--------+------+-----------+-----------+
| Venue | Symbol | Venue Symbol | Status | Type | Maker Fee | Taker Fee |
+-------+--------+--------------+--------+------+-----------+-----------+
| - | - | - | - | - | - | - |
+-------+--------+--------------+--------+------+-----------+-----------+\n
"""
end
end
| 40.977273
| 98
| 0.37604
|
0391ffb38a309fe8f87591513ac2a0f4efa3c9b2
| 1,919
|
ex
|
Elixir
|
clients/sas_portal/lib/google_api/sas_portal/v1alpha1/model/sas_portal_list_nodes_response.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/sas_portal/lib/google_api/sas_portal/v1alpha1/model/sas_portal_list_nodes_response.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/sas_portal/lib/google_api/sas_portal/v1alpha1/model/sas_portal_list_nodes_response.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SASPortal.V1alpha1.Model.SasPortalListNodesResponse do
@moduledoc """
Response for ListNodes.
## Attributes
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - A pagination token returned from a previous call to ListNodes that indicates from where listing should continue. If the field is missing or empty, it means there is no more nodes.
* `nodes` (*type:* `list(GoogleApi.SASPortal.V1alpha1.Model.SasPortalNode.t)`, *default:* `nil`) - The nodes that match the request.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:nextPageToken => String.t(),
:nodes => list(GoogleApi.SASPortal.V1alpha1.Model.SasPortalNode.t())
}
field(:nextPageToken)
field(:nodes, as: GoogleApi.SASPortal.V1alpha1.Model.SasPortalNode, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.SASPortal.V1alpha1.Model.SasPortalListNodesResponse do
def decode(value, options) do
GoogleApi.SASPortal.V1alpha1.Model.SasPortalListNodesResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SASPortal.V1alpha1.Model.SasPortalListNodesResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.38
| 242
| 0.748828
|
03920189f46206b93de91e1b97cdb9b582f15f91
| 5,201
|
ex
|
Elixir
|
apps/admin_api/lib/admin_api/v1/controllers/account_controller.ex
|
amadeobrands/ewallet
|
505b7822721940a7b892a9b35c225e80cc8ac0b4
|
[
"Apache-2.0"
] | 1
|
2018-12-07T06:21:21.000Z
|
2018-12-07T06:21:21.000Z
|
apps/admin_api/lib/admin_api/v1/controllers/account_controller.ex
|
amadeobrands/ewallet
|
505b7822721940a7b892a9b35c225e80cc8ac0b4
|
[
"Apache-2.0"
] | null | null | null |
apps/admin_api/lib/admin_api/v1/controllers/account_controller.ex
|
amadeobrands/ewallet
|
505b7822721940a7b892a9b35c225e80cc8ac0b4
|
[
"Apache-2.0"
] | null | null | null |
defmodule AdminAPI.V1.AccountController do
use AdminAPI, :controller
import AdminAPI.V1.ErrorHandler
alias AdminAPI.V1.AccountHelper
alias EWallet.AccountPolicy
alias EWallet.Web.{Orchestrator, Paginator, V1.AccountOverlay}
alias EWalletDB.Account
@doc """
Retrieves a list of accounts based on current account for users.
"""
@spec all(Plug.Conn.t(), map()) :: Plug.Conn.t()
def all(conn, attrs) do
with :ok <- permit(:all, conn.assigns, nil),
account_uuids <- AccountHelper.get_accessible_account_uuids(conn.assigns) do
# Get all the accounts the current accessor has access to
Account
|> Account.where_in(account_uuids)
|> Orchestrator.query(AccountOverlay, attrs)
|> respond(conn)
else
error -> respond(error, conn)
end
end
@spec descendants_for_account(Plug.Conn.t(), map()) :: Plug.Conn.t()
def descendants_for_account(conn, %{"id" => account_id} = attrs) do
with %Account{} = account <- Account.get(account_id) || {:error, :unauthorized},
:ok <- permit(:all, conn.assigns, account.id),
descendant_uuids <- Account.get_all_descendants_uuids(account) do
# Get all users since everyone can access them
Account
|> Account.where_in(descendant_uuids)
|> Orchestrator.query(AccountOverlay, attrs)
|> respond(conn)
else
error -> respond(error, conn)
end
end
def descendants_for_account(conn, _), do: handle_error(conn, :invalid_parameter)
@doc """
Retrieves a specific account by its id.
"""
@spec get(Plug.Conn.t(), map()) :: Plug.Conn.t()
def get(conn, %{"id" => id} = attrs) do
with %Account{} = account <- Account.get_by(id: id) || {:error, :unauthorized},
:ok <- permit(:get, conn.assigns, account.id),
{:ok, account} <- Orchestrator.one(account, AccountOverlay, attrs) do
render(conn, :account, %{account: account})
else
{:error, code} ->
handle_error(conn, code)
nil ->
handle_error(conn, :account_id_not_found)
end
end
@doc """
Creates a new account.
The requesting user must have write permission on the given parent account.
"""
@spec create(Plug.Conn.t(), map()) :: Plug.Conn.t()
def create(conn, attrs) do
parent =
if attrs["parent_id"] do
Account.get_by(id: attrs["parent_id"])
else
Account.get_master_account()
end
with :ok <- permit(:create, conn.assigns, parent.id),
attrs <- Map.put(attrs, "parent_uuid", parent.uuid),
{:ok, account} <- Account.insert(attrs),
{:ok, account} <- Orchestrator.one(account, AccountOverlay, attrs) do
render(conn, :account, %{account: account})
else
{:error, %{} = changeset} ->
handle_error(conn, :invalid_parameter, changeset)
{:error, code} ->
handle_error(conn, code)
end
end
@doc """
Updates the account if all required parameters are provided.
The requesting user must have write permission on the given account.
"""
@spec update(Plug.Conn.t(), map()) :: Plug.Conn.t()
def update(conn, %{"id" => account_id} = attrs) do
with %Account{} = original <- Account.get(account_id) || {:error, :unauthorized},
:ok <- permit(:update, conn.assigns, original.id),
{:ok, updated} <- Account.update(original, attrs),
{:ok, updated} <- Orchestrator.one(updated, AccountOverlay, attrs) do
render(conn, :account, %{account: updated})
else
{:error, %{} = changeset} ->
handle_error(conn, :invalid_parameter, changeset)
{:error, code} ->
handle_error(conn, code)
end
end
def update(conn, _), do: handle_error(conn, :invalid_parameter)
@doc """
Uploads an image as avatar for a specific account.
"""
@spec upload_avatar(Plug.Conn.t(), map()) :: Plug.Conn.t()
def upload_avatar(conn, %{"id" => id, "avatar" => _} = attrs) do
with %Account{} = account <- Account.get(id) || {:error, :unauthorized},
:ok <- permit(:update, conn.assigns, account.id),
%{} = saved <- Account.store_avatar(account, attrs),
{:ok, saved} <- Orchestrator.one(saved, AccountOverlay, attrs) do
render(conn, :account, %{account: saved})
else
nil ->
handle_error(conn, :invalid_parameter)
changeset when is_map(changeset) ->
handle_error(conn, :invalid_parameter, changeset)
{:error, changeset} when is_map(changeset) ->
handle_error(conn, :invalid_parameter, changeset)
{:error, code} ->
handle_error(conn, code)
end
end
def upload_avatar(conn, _), do: handle_error(conn, :invalid_parameter)
defp respond(%Paginator{} = paginator, conn) do
render(conn, :accounts, %{accounts: paginator})
end
defp respond({:error, code}, conn) do
handle_error(conn, code)
end
defp respond({:error, code, description}, conn) do
handle_error(conn, code, description)
end
@spec permit(:all | :create | :get | :update, map(), String.t() | nil) ::
:ok | {:error, any()} | no_return()
defp permit(action, params, account_id) do
Bodyguard.permit(AccountPolicy, action, params, account_id)
end
end
| 33.127389
| 85
| 0.637377
|
0392111b999a479468e74ac6cf1e74816f1eda22
| 177
|
exs
|
Elixir
|
priv/templates/brando.upgrade/migrations/brando_09_add_wrapper_to_fragments.exs
|
brandocms/brando
|
4198e0c0920031bd909969055064e4e2b7230d21
|
[
"MIT"
] | 4
|
2020-10-30T08:40:38.000Z
|
2022-01-07T22:21:37.000Z
|
priv/templates/brando.upgrade/migrations/brando_09_add_wrapper_to_fragments.exs
|
brandocms/brando
|
4198e0c0920031bd909969055064e4e2b7230d21
|
[
"MIT"
] | 1,162
|
2020-07-05T11:20:15.000Z
|
2022-03-31T06:01:49.000Z
|
priv/templates/brando.upgrade/migrations/brando_09_add_wrapper_to_fragments.exs
|
brandocms/brando
|
4198e0c0920031bd909969055064e4e2b7230d21
|
[
"MIT"
] | null | null | null |
defmodule Brando.Repo.Migrations.AddWrapperToFragments do
use Ecto.Migration
def change do
alter table(:pages_fragments) do
add :wrapper, :text
end
end
end
| 17.7
| 57
| 0.728814
|
03926f3eb31fa467765fe6034a20508abcdee3fc
| 144
|
exs
|
Elixir
|
test/lit/views/filter_view_test.exs
|
muhammedsekerci/lit
|
b58c6135134688ed1ed16f7b2e0f9a6396b2be67
|
[
"MIT"
] | 14
|
2020-12-22T08:26:15.000Z
|
2022-01-11T16:08:14.000Z
|
test/lit/views/filter_view_test.exs
|
muhammedsekerci/lit
|
b58c6135134688ed1ed16f7b2e0f9a6396b2be67
|
[
"MIT"
] | 2
|
2021-04-06T19:22:55.000Z
|
2021-05-31T14:02:40.000Z
|
test/lit/views/filter_view_test.exs
|
muhammedsekerci/lit
|
b58c6135134688ed1ed16f7b2e0f9a6396b2be67
|
[
"MIT"
] | 7
|
2021-02-03T23:44:12.000Z
|
2021-12-27T05:09:40.000Z
|
defmodule Lit.FilterViewTest do
use ExUnit.Case
import Phoenix.HTML, only: [safe_to_string: 1]
doctest Lit.FilterView, import: true
end
| 18
| 48
| 0.763889
|
03927edf77d0d92e180e55e3220570a08a566c50
| 2,375
|
exs
|
Elixir
|
mix.exs
|
brianberlin/ecto_diff
|
933d4e27d300ff2b01a95233073e972770da8aa7
|
[
"MIT"
] | null | null | null |
mix.exs
|
brianberlin/ecto_diff
|
933d4e27d300ff2b01a95233073e972770da8aa7
|
[
"MIT"
] | null | null | null |
mix.exs
|
brianberlin/ecto_diff
|
933d4e27d300ff2b01a95233073e972770da8aa7
|
[
"MIT"
] | null | null | null |
defmodule EctoDiff.MixProject do
use Mix.Project
@version "0.3.0"
@source_url "https://github.com/peek-travel/ecto_diff"
def project do
[
app: :ecto_diff,
version: @version,
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: preferred_cli_env(),
dialyzer: dialyzer(),
docs: docs(),
description: description(),
package: package(),
aliases: aliases()
]
end
def application do
[
extra_applications: []
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp preferred_cli_env do
[
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test,
"coveralls.json": :test
]
end
defp dialyzer do
[
plt_apps: [:compiler, :ecto, :elixir, :kernel, :stdlib],
plt_file: {:no_warn, "plts/ecto_diff.plt"},
flags: [:error_handling, :underspecs]
]
end
defp docs do
[
main: "EctoDiff",
source_ref: @version,
source_url: @source_url,
extras: ["README.md", "CHANGELOG.md", "LICENSE.md"]
]
end
defp description do
"""
Generates a data structure describing the difference between two ecto structs
"""
end
defp package do
[
files: ["lib", ".formatter.exs", "mix.exs", "README.md", "LICENSE.md", "CHANGELOG.md"],
maintainers: ["Peek Travel <noreply@peek.com>"],
licenses: ["MIT"],
links: %{
"GitHub" => @source_url,
"Readme" => "#{@source_url}/blob/#{@version}/README.md",
"Changelog" => "#{@source_url}/blob/#{@version}/CHANGELOG.md"
}
]
end
defp deps do
[
{:credo, "~> 1.0", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.0.0-rc.5", only: [:dev, :test], runtime: false},
{:ecto_sql, "~> 3.0", only: [:dev, :test]},
{:ecto, "~> 3.0"},
{:ex_doc, "~> 0.18", only: :dev, runtime: false},
{:excoveralls, "~> 0.10", only: :test},
{:jason, ">= 1.0.0", only: [:dev, :test]},
{:postgrex, ">= 0.0.0", only: [:dev, :test]}
]
end
defp aliases do
[
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 23.989899
| 93
| 0.552842
|
03929230c1e0711914bd8c1589191f17f2db14eb
| 2,129
|
ex
|
Elixir
|
clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/language.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | 1
|
2018-12-03T23:43:10.000Z
|
2018-12-03T23:43:10.000Z
|
clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/language.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/language.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.DFAReporting.V33.Model.Language do
@moduledoc """
Contains information about a language that can be targeted by ads.
## Attributes
* `id` (*type:* `String.t`, *default:* `nil`) - Language ID of this language. This is the ID used for targeting and generating reports.
* `kind` (*type:* `String.t`, *default:* `dfareporting#language`) - Identifies what kind of resource this is. Value: the fixed string "dfareporting#language".
* `languageCode` (*type:* `String.t`, *default:* `nil`) - Format of language code is an ISO 639 two-letter language code optionally followed by an underscore followed by an ISO 3166 code. Examples are "en" for English or "zh_CN" for Simplified Chinese.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of this language.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:id => String.t(),
:kind => String.t(),
:languageCode => String.t(),
:name => String.t()
}
field(:id)
field(:kind)
field(:languageCode)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V33.Model.Language do
def decode(value, options) do
GoogleApi.DFAReporting.V33.Model.Language.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V33.Model.Language do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.017857
| 256
| 0.709723
|
0392af80bdd610bd8a30809fb1bebfec80288ee3
| 1,602
|
ex
|
Elixir
|
test/support/test_parser.ex
|
fossabot/bejo
|
7d25d68ef97bd77b7d53fc9a9546261fdc99bfba
|
[
"Apache-2.0"
] | null | null | null |
test/support/test_parser.ex
|
fossabot/bejo
|
7d25d68ef97bd77b7d53fc9a9546261fdc99bfba
|
[
"Apache-2.0"
] | null | null | null |
test/support/test_parser.ex
|
fossabot/bejo
|
7d25d68ef97bd77b7d53fc9a9546261fdc99bfba
|
[
"Apache-2.0"
] | null | null | null |
defmodule TestParser do
import NimbleParsec
alias Bejo.Parser.{
Common,
Types,
Expressions,
FunctionDef
}
exp = parsec({Expressions, :exp})
exps = parsec({Expressions, :exps})
function_def = parsec({FunctionDef, :function_def})
allow_space = parsec({Common, :allow_space})
parse_type = parsec({Types, :parse_type})
@vertical_space ["\n", "\r"]
@horizontal_space ["\s", "\t"]
@space @vertical_space ++ @horizontal_space ++ ["# Dummy comment\n"]
# Returns a random combination of horizontal space symbols
def h_space(length \\ 5), do: do_space(@horizontal_space, length)
# Returns a random combination of space symbols (both horizontal and vertical)
def space(length \\ 5), do: do_space(@space, length)
# Returns a random combination of space symbols which is
# guaranteed to contain at leat one occurence of any of them
defp do_space(symbols, length) do
n = :rand.uniform(length)
more = for _ <- 1..n, do: Enum.random(symbols)
symbols
|> Kernel.++(more)
|> Enum.shuffle()
|> Enum.join("")
end
def expression!(str), do: do_parse(&expression/1, str)
def function_def!(str), do: do_parse(&function_def/1, str)
def type_str!(str), do: do_parse(&type_str/1, str)
defp do_parse(parser, str) do
{:ok, [result], _rest, _context, _line, _byte_offset} = parser.(str)
result
end
defparsec :expression, exp |> concat(allow_space) |> eos()
defparsec :function_def, function_def
defparsec :type_str, parse_type |> concat(allow_space) |> eos()
defparsec :exps, exps |> concat(allow_space) |> eos()
end
| 29.127273
| 80
| 0.679775
|
0392c77fea267dd0bcab9e39f987cb3d7a473eb6
| 4,903
|
exs
|
Elixir
|
test/wobserver/util/metrics/formatter_test.exs
|
coby-spotim/wobserver
|
b0b9d77fb4ff40bb417f6b370ee343b8d42de750
|
[
"MIT"
] | null | null | null |
test/wobserver/util/metrics/formatter_test.exs
|
coby-spotim/wobserver
|
b0b9d77fb4ff40bb417f6b370ee343b8d42de750
|
[
"MIT"
] | null | null | null |
test/wobserver/util/metrics/formatter_test.exs
|
coby-spotim/wobserver
|
b0b9d77fb4ff40bb417f6b370ee343b8d42de750
|
[
"MIT"
] | 1
|
2018-09-12T16:13:57.000Z
|
2018-09-12T16:13:57.000Z
|
defmodule Wobserver.Util.Metrics.FormatterTest do
use ExUnit.Case, async: false
alias Wobserver.Util.Metrics.{
Formatter,
Prometheus
}
def example_function do
[point: 5]
end
def local_ip do
with {:ok, ips} <- :inet.getif(),
{ip, _, _} <- List.first(ips),
{ip1, ip2, ip3, ip4} <- ip,
do: "#{ip1}.#{ip2}.#{ip3}.#{ip4}"
end
describe "format" do
test "returns with valid data" do
assert Formatter.format(
%{point: 5},
"data"
) == "data{node=\"#{local_ip()}\",type=\"point\"} 5\n"
end
test "returns with valid data and type" do
assert Formatter.format(
%{point: 5},
"data",
:gauge
) == "# TYPE data gauge\ndata{node=\"#{local_ip()}\",type=\"point\"} 5\n"
end
test "returns with valid data, type, and help" do
assert Formatter.format(
%{point: 5},
"data",
:gauge,
"help"
) ==
"# HELP data help\n# TYPE data gauge\ndata{node=\"#{local_ip()}\",type=\"point\"} 5\n"
end
test "returns with valid data as integer" do
assert Formatter.format(
5,
"data"
) == "data{node=\"#{local_ip()}\"} 5\n"
end
test "returns with valid data as float" do
assert Formatter.format(
5.4,
"data"
) == "data{node=\"#{local_ip()}\"} 5.4\n"
end
test "returns with valid data as keywords" do
assert Formatter.format(
[point: 5],
"data"
) == "data{node=\"#{local_ip()}\",type=\"point\"} 5\n"
end
test "returns with valid data as data String" do
assert Formatter.format(
"[point: 5]",
"data"
) == "data{node=\"#{local_ip()}\",type=\"point\"} 5\n"
end
test "returns with valid data as anon function" do
assert Formatter.format(
"fn -> [point: 5] end",
"data"
) == "data{node=\"#{local_ip()}\",type=\"point\"} 5\n"
end
test "returns with valid data as function call" do
assert Formatter.format(
"Wobserver.Util.Metrics.FormatterTest.example_function",
"data"
) == "data{node=\"#{local_ip()}\",type=\"point\"} 5\n"
end
test "returns with valid data as function" do
assert Formatter.format(
"&Wobserver.Util.Metrics.FormatterTest.example_function/0",
"data"
) == "data{node=\"#{local_ip()}\",type=\"point\"} 5\n"
end
test "returns with explicit formatter" do
assert Formatter.format(
[point: 5],
"data",
nil,
nil,
Wobserver.Util.Metrics.Prometheus
) == "data{node=\"#{local_ip()}\",type=\"point\"} 5\n"
end
test "returns with explicit formatter as String" do
assert Formatter.format(
[point: 5],
"data",
nil,
nil,
"Wobserver.Util.Metrics.Prometheus"
) == "data{node=\"#{local_ip()}\",type=\"point\"} 5\n"
end
end
describe "format_all" do
test "returns :error with invalid entry" do
assert Formatter.format_all(
works: %{value: 8},
invalid: "w{"
) == :error
end
test "returns with multiple entries" do
assert Formatter.format_all(
[
works: %{value: 8},
also_works: %{value: 9}
],
Prometheus
) ==
"works{node=\"#{local_ip()}\",type=\"value\"} 8\nalso_works{node=\"#{local_ip()}\",type=\"value\"} 9\n"
end
test "returns with multiple entries and type" do
assert Formatter.format_all(
[
works: {
%{value: 8},
:gauge
},
also_works: %{value: 9}
],
Prometheus
) ==
"# TYPE works gauge\nworks{node=\"#{local_ip()}\",type=\"value\"} 8\nalso_works{node=\"#{
local_ip()
}\",type=\"value\"} 9\n"
end
test "returns with multiple entries + type & help" do
assert Formatter.format_all(
[
works: {
%{value: 8},
:gauge,
"Info"
},
also_works: %{value: 9}
],
Prometheus
) ==
"# HELP works Info\n# TYPE works gauge\nworks{node=\"#{local_ip()}\",type=\"value\"} 8\nalso_works{node=\"#{
local_ip()
}\",type=\"value\"} 9\n"
end
end
end
| 29.011834
| 123
| 0.459311
|
0392dbabdb0fe9e7ad35cd6857fc75336ca14cfa
| 3,848
|
exs
|
Elixir
|
ch04/exmeal/test/exmeal_web/controllers/meals_controller_test.exs
|
arilsonsouza/rocketseat-ignite-elixir
|
93e32d52d589336dfd2d81e755d6dd7f05ee40b8
|
[
"MIT"
] | null | null | null |
ch04/exmeal/test/exmeal_web/controllers/meals_controller_test.exs
|
arilsonsouza/rocketseat-ignite-elixir
|
93e32d52d589336dfd2d81e755d6dd7f05ee40b8
|
[
"MIT"
] | null | null | null |
ch04/exmeal/test/exmeal_web/controllers/meals_controller_test.exs
|
arilsonsouza/rocketseat-ignite-elixir
|
93e32d52d589336dfd2d81e755d6dd7f05ee40b8
|
[
"MIT"
] | null | null | null |
defmodule ExmealWeb.MealsControllerTest do
use ExmealWeb.ConnCase, async: true
import Exmeal.Factory
setup %{} do
user_id = Ecto.UUID.generate()
insert(:user, %{id: user_id})
{:ok, user_id: user_id}
end
describe "create/2" do
test "should create a meal when all params are present", %{conn: conn, user_id: user_id} do
params = build(:meal_attrs)
response =
conn
|> post(Routes.meals_path(conn, :create, %{params | "user_id" => user_id}))
|> json_response(:created)
assert %{
"meal" => %{
"calories" => 284,
"date" => "2021-09-12T01:23:20Z",
"description" => "Lasanha",
"id" => _
},
"message" => "Meal created."
} = response
end
test "should return an error when there are missing params", %{conn: conn} do
response =
conn
|> post(Routes.meals_path(conn, :create, %{}))
|> json_response(:bad_request)
assert %{
"errors" => %{
"calories" => ["can't be blank"],
"date" => ["can't be blank"],
"description" => ["can't be blank"]
}
} = response
end
end
describe "update/2" do
test "should update a meal when id exist", %{conn: conn, user_id: user_id} do
id = Ecto.UUID.generate()
insert(:meal, %{id: id, user_id: user_id, description: "Rice and Beans", calories: 450})
response =
conn
|> put(
Routes.meals_path(conn, :update, id, %{"description" => "Banana", "calories" => 20})
)
|> json_response(:ok)
assert %{
"meal" => %{
"calories" => 20,
"date" => "2021-09-12T01:23:20Z",
"description" => "Banana",
"id" => _
}
} = response
end
test "should return an error when id not exist", %{conn: conn} do
id = Ecto.UUID.generate()
response =
conn
|> put(
Routes.meals_path(conn, :update, id, %{"description" => "Banana", "calories" => 20})
)
|> json_response(:not_found)
assert %{"errors" => "Meal not found."} = response
end
end
describe "show/2" do
test "should return a meal when id exist", %{conn: conn, user_id: user_id} do
id = Ecto.UUID.generate()
insert(:meal, %{id: id, user_id: user_id})
response =
conn
|> get(Routes.meals_path(conn, :show, id))
|> json_response(:ok)
assert %{
"meal" => %{
"calories" => 284,
"date" => "2021-09-12T01:23:20Z",
"description" => "Lasanha",
"id" => _
}
} = response
end
test "should return an error when id not exist", %{conn: conn} do
id = Ecto.UUID.generate()
response =
conn
|> get(Routes.meals_path(conn, :show, id))
|> json_response(:not_found)
assert %{"errors" => "Meal not found."} = response
end
end
describe "delete/2" do
test "should delete an meal when is given a valid id", %{conn: conn, user_id: user_id} do
id = Ecto.UUID.generate()
insert(:meal, %{id: id, user_id: user_id})
response =
conn
|> delete(Routes.meals_path(conn, :delete, id))
|> response(:no_content)
assert response == ""
end
test "should return an error when is given an invalid id", %{conn: conn} do
id = Ecto.UUID.generate()
response =
conn
|> delete(Routes.meals_path(conn, :delete, id))
|> json_response(:not_found)
assert response == %{"errors" => "Meal not found."}
end
end
end
| 27.098592
| 95
| 0.505977
|
0392e0278e2ab6e5c7691f9f1b728c68fbe054e7
| 962
|
ex
|
Elixir
|
test/support/edge_cases/multiple_def_cases/useless_spaces.ex
|
Ajwah/ex_debugger
|
44cd5b99c0b7751db052887b7a0bc3ce52a2972e
|
[
"Apache-2.0"
] | 3
|
2020-07-07T20:41:23.000Z
|
2020-07-17T09:05:37.000Z
|
test/support/edge_cases/multiple_def_cases/useless_spaces.ex
|
Ajwah/ex_debugger
|
44cd5b99c0b7751db052887b7a0bc3ce52a2972e
|
[
"Apache-2.0"
] | 1
|
2020-07-08T00:04:38.000Z
|
2020-07-08T00:04:38.000Z
|
test/support/edge_cases/multiple_def_cases/useless_spaces.ex
|
Ajwah/ex_debugger
|
44cd5b99c0b7751db052887b7a0bc3ce52a2972e
|
[
"Apache-2.0"
] | null | null | null |
defmodule Support.EdgeCases.MultipleDefCases.UselessSpaces do
@moduledoc "In case that `mix format` reorganizes this file, then resort to `useless_spaces.txt`"
use ExDebugger
def run1, do: %{ls: [1, 2, 3, 4]}
|> Map.fetch!(:ls)
|> List.wrap
|> Enum.reverse
|> Enum.reduce(0, &((&1 * 2) + &2))
def run2(%{digit: a}, %{digit: b}, %{digit: c}, %{digit: d}) do
%{ls: [a, b, c, d]}
|> Map.fetch!(:ls)
|> List.wrap
|> Enum.reverse
|> Enum.reduce(0, &((&1 * 2) + &2))
end
def run3(a, b, c, d) when is_integer(a) and is_integer(b) and is_integer(c) and is_integer(d) do
%{ls: [a, b, c, d]}
|> Map.fetch!(:ls)
|> List.wrap
|> Enum.reverse
|> Enum.reduce(0, &((&1 * 2) + &2))
end
def run4(a) do
%{ls: a}
|> Map.fetch!(:ls)
|> List.wrap
|> Enum.reverse
|> Enum.reduce(0, &((&1 * 2) + &2)) end
end
| 10.808989
| 99
| 0.490644
|
0392f36ac8fdde1510c6630bff49b26d0126f0b5
| 1,028
|
ex
|
Elixir
|
lib/ex338_web/views/fantasy_league_view.ex
|
axelclark/ex338
|
3fb3c260d93bda61f7636ee1a677770d2dc1b89a
|
[
"MIT"
] | 17
|
2016-12-22T06:39:26.000Z
|
2021-01-20T13:51:13.000Z
|
lib/ex338_web/views/fantasy_league_view.ex
|
axelclark/ex338
|
3fb3c260d93bda61f7636ee1a677770d2dc1b89a
|
[
"MIT"
] | 608
|
2016-08-06T18:57:58.000Z
|
2022-03-01T02:48:17.000Z
|
lib/ex338_web/views/fantasy_league_view.ex
|
axelclark/ex338
|
3fb3c260d93bda61f7636ee1a677770d2dc1b89a
|
[
"MIT"
] | 6
|
2017-11-21T22:35:45.000Z
|
2022-01-11T21:37:40.000Z
|
defmodule Ex338Web.FantasyLeagueView do
use Ex338Web, :view
@line_colors [
"#e6194B",
"#3cb44b",
"#ffe119",
"#4363d8",
"#f58231",
"#42d4f4",
"#f032e6",
"#fabebe",
"#469990",
"#e6beff",
"#9A6324",
"#800000",
"#000075",
"#aaffc3",
"#fffac8"
]
def format_and_encode_dataset(standings_history) do
standings_history
|> format_dataset()
|> Jason.encode!()
end
def format_dataset(standings_history) do
standings_history
|> Enum.map(&update_keys(&1))
|> Enum.zip(@line_colors)
|> Enum.map(&add_colors(&1))
end
## Helpers
## format_dataset
defp update_keys(team_data) do
team_data
|> Map.put_new(:data, team_data.points)
|> Map.put_new(:label, team_data.team_name)
|> Map.put_new(:fill, false)
|> Map.delete(:points)
|> Map.delete(:team_name)
end
defp add_colors({team_data, color}) do
team_data
|> Map.put_new(:borderColor, color)
|> Map.put_new(:backgroundColor, color)
end
end
| 19.037037
| 53
| 0.611868
|
03931577dd0a7f33cf76c3d49600295165f8be9e
| 112
|
ex
|
Elixir
|
lib/component/definitions/npc_brain.ex
|
doawoo/elixir_rpg
|
4dcd0eb717bd1d654b3e6a06be31aba4c3254fb3
|
[
"MIT"
] | 23
|
2021-10-24T00:21:13.000Z
|
2022-03-13T12:33:38.000Z
|
lib/component/definitions/npc_brain.ex
|
doawoo/elixir_rpg
|
4dcd0eb717bd1d654b3e6a06be31aba4c3254fb3
|
[
"MIT"
] | null | null | null |
lib/component/definitions/npc_brain.ex
|
doawoo/elixir_rpg
|
4dcd0eb717bd1d654b3e6a06be31aba4c3254fb3
|
[
"MIT"
] | 3
|
2021-11-04T02:42:25.000Z
|
2022-02-02T14:22:52.000Z
|
use ElixirRPG.DSL.Component
defcomponent NPCBrain do
member :brain_name, "flan"
member :cached_src, ""
end
| 16
| 28
| 0.758929
|
03932ef54027885cd1fc87922c5f674514813569
| 9,899
|
ex
|
Elixir
|
lib/bupe/builder.ex
|
milmazz/bupe
|
e06007980297e60fe7663969cdd202a5da578bb6
|
[
"Apache-2.0"
] | 79
|
2016-08-02T13:17:09.000Z
|
2021-12-24T21:47:28.000Z
|
lib/bupe/builder.ex
|
milmazz/bupe
|
e06007980297e60fe7663969cdd202a5da578bb6
|
[
"Apache-2.0"
] | 48
|
2019-07-22T15:00:30.000Z
|
2022-02-28T11:17:36.000Z
|
lib/bupe/builder.ex
|
milmazz/bupe
|
e06007980297e60fe7663969cdd202a5da578bb6
|
[
"Apache-2.0"
] | 4
|
2017-04-08T17:29:35.000Z
|
2021-11-29T01:33:23.000Z
|
defmodule BUPE.Builder do
@moduledoc ~S"""
Elixir EPUB generator
## Example
iex(1)> files = "~/book/*.xhtml" |> Path.expand() |> Path.wildcard()
["/Users/dev/book/bacon.xhtml", "/Users/dev/book/egg.xhtml", "/Users/dev/book/ham.xhtml"]
iex(2)> get_id = fn file -> Path.basename(file, ".xhtml") end
#Function<6.99386804/1 in :erl_eval.expr/5>
iex(3)> pages = Enum.map(files, fn file ->
...(3)> %BUPE.Item{href: file, id: get_id.(file), description: file |> get_id.() |> String.capitalize()}
...(3)> end)
[
%BUPE.Item{
description: "Bacon",
duration: nil,
fallback: nil,
href: "/Users/dev/book/bacon.xhtml",
id: "bacon",
media_overlay: nil,
media_type: nil,
properties: nil
},
%BUPE.Item{
description: "Egg",
duration: nil,
fallback: nil,
href: "/Users/dev/book/egg.xhtml",
id: "egg",
media_overlay: nil,
media_type: nil,
properties: nil
},
%BUPE.Item{
description: "Ham",
duration: nil,
fallback: nil,
href: "/Users/dev/book/ham.xhtml",
id: "ham",
media_overlay: nil,
media_type: nil,
properties: nil
}
]
iex(4)> config = %BUPE.Config{
...(4)> title: "Sample",
...(4)> language: "en",
...(4)> creator: "John Doe",
...(4)> publisher: "Sample",
...(4)> date: "2016-06-23T06:00:00Z",
...(4)> unique_identifier: "EXAMPLE",
...(4)> identifier: "http://example.com/book/jdoe/1",
...(4)> pages: pages
...(4)> }
%BUPE.Config{
audio: [],
contributor: nil,
cover: true,
coverage: nil,
creator: "John Doe",
date: "2016-06-23T06:00:00Z",
description: nil,
fonts: [],
format: nil,
identifier: "http://example.com/book/jdoe/1",
images: [],
language: "en",
logo: nil,
modified: nil,
nav: [],
pages: [
%BUPE.Item{
description: "Bacon",
duration: nil,
fallback: nil,
href: "/Users/dev/book/bacon.xhtml",
id: "bacon",
media_overlay: nil,
media_type: nil,
properties: nil
},
%BUPE.Item{
description: "Egg",
duration: nil,
fallback: nil,
href: "/Users/dev/book/egg.xhtml",
id: "egg",
media_overlay: nil,
media_type: nil,
properties: nil
},
%BUPE.Item{
description: "Ham",
duration: nil,
fallback: nil,
href: "/Users/dev/book/ham.xhtml",
id: "ham",
media_overlay: nil,
media_type: nil,
properties: nil
}
],
publisher: "Sample",
relation: nil,
rights: nil,
scripts: [],
source: nil,
styles: [],
subject: nil,
title: "Sample",
type: nil,
unique_identifier: "EXAMPLE",
version: "3.0"
}
iex(6)> BUPE.Builder.run(config, "example.epub")
{:ok, '/Users/dev/example.epub'}
"""
alias BUPE.{Builder.Templates, Config, Item, Util}
@mimetype "application/epub+zip"
@container_template File.read!(Path.expand("builder/templates/assets/container.xml", __DIR__))
@display_options File.read!(
Path.expand(
"builder/templates/assets/com.apple.ibooks.display-options.xml",
__DIR__
)
)
@stylesheet File.read!(Path.expand("builder/templates/css/stylesheet.css", __DIR__))
@doc """
Generates an EPUB v3 document
"""
@spec run(Config.t(), Path.t(), Keyword.t()) :: {:ok, String.t()} | {:error, String.t()}
def run(config, name, options \\ []) do
name = Path.expand(name)
config
|> normalize_config()
|> generate_assets(assets())
|> generate_package()
|> generate_ncx()
|> generate_nav()
|> generate_title()
|> generate_content()
|> generate_epub(name, options)
end
defp normalize_config(config) do
config =
config
|> modified_date()
|> normalize_assets()
|> check_identifier()
|> check_files_extension()
|> check_unique_identifier()
%{files: [], details: config}
end
defp normalize_assets(config) do
[pages, styles, scripts, images] =
for asset <- ~w(pages styles scripts images)a do
config
|> Map.get(asset)
|> transform_assets()
end
%{config | pages: pages, styles: styles, scripts: scripts, images: images}
end
defp transform_assets([]), do: []
defp transform_assets(assets), do: Enum.map(assets, &transform_asset/1)
defp transform_asset(%Item{} = asset) do
Item.normalize(asset)
end
defp transform_asset(asset) when is_binary(asset) do
Item.from_string(asset)
end
# Package definition builder.
#
# According to the EPUB specification, the *Package Document* carries
# bibliographic and structural metadata about an EPUB Publication, and is thus
# the primary source of information about how to process and display it.
#
# The `package` element is the root container of the Package Document and
# encapsulates Publication metadata and resource information.
defp generate_package(config) do
content = Templates.content_template(config.details)
%{config | files: [{'OEBPS/content.opf', content} | config.files]}
end
# Navigation Center eXtended definition
#
# Keep in mind that the EPUB Navigation Document supersedes this definition.
# According to the EPUB specification:
#
# > EPUB 3 Publications may include an NCX (as defined in OPF 2.0.1) for EPUB
# > 2 Reading System forwards compatibility purposes, but EPUB 3 Reading
# > Systems must ignore the NCX.
defp generate_ncx(config) do
content = Templates.ncx_template(config.details)
%{config | files: [{'OEBPS/toc.ncx', content} | config.files]}
end
# Navigation Document Definition
#
# The TOC nav element defines the primary navigation hierarchy of the document.
# It conceptually corresponds to a table of contents in a printed work.
#
# See [EPUB Navigation Document Definition][nav] for more information.
#
# [nav]: http://www.idpf.org/epub/301/spec/epub-contentdocs.html#sec-xhtml-nav-def
defp generate_nav(config) do
if config.details.version == "3.0" do
content = Templates.nav_template(config.details)
%{config | files: [{'OEBPS/nav.xhtml', content} | config.files]}
else
config
end
end
# Cover page definition for the EPUB document
defp generate_title(config) do
if config.details.cover do
content = Templates.title_template(config.details)
%{config | files: [{'OEBPS/title.xhtml', content} | config.files]}
else
config
end
end
defp generate_content(config) do
sources =
config.details.pages ++
config.details.styles ++ config.details.scripts ++ config.details.images
sources
|> Enum.map(fn source ->
content = File.read!(source.href)
path = "OEBPS/content" |> Path.join(Path.basename(source.href)) |> String.to_charlist()
{path, content}
end)
|> Enum.concat(config.files)
end
defp generate_epub(files, name, options) do
opts = [compress: ['.css', '.js', '.html', '.xhtml', '.ncx', '.opf', '.jpg', '.png', '.xml']]
opts = if Enum.find(options, &(&1 == :memory)), do: [:memory | opts], else: opts
:zip.create(String.to_charlist(name), [{'mimetype', @mimetype} | files], opts)
end
## Helpers
defp modified_date(%{modified: nil} = config) do
dt = DateTime.utc_now() |> Map.put(:microsecond, {0, 0}) |> DateTime.to_iso8601()
Map.put(config, :modified, dt)
end
# credo:disable-for-next-line Credo.Check.Design.TagTODO
# TODO: Check if format is compatible with ISO8601
defp modified_date(config), do: config
defp check_identifier(%{identifier: nil} = config) do
identifier = "urn:uuid:#{Util.uuid4()}"
Map.put(config, :identifier, identifier)
end
defp check_identifier(config), do: config
defp check_files_extension(%{version: "3.0"} = config) do
if invalid_files?(config.pages, [".xhtml"]) do
raise BUPE.InvalidExtensionName,
"XHTML Content Document file names should have the extension '.xhtml'."
end
config
end
defp check_files_extension(%{version: "2.0"} = config) do
if invalid_files?(config.pages, [".html", ".htm", ".xhtml"]) do
raise BUPE.InvalidExtensionName,
"invalid file extension for HTML file, expected '.html', '.htm' or '.xhtml'"
end
config
end
defp check_files_extension(_config), do: raise(BUPE.InvalidVersion)
defp check_unique_identifier(%{unique_identifier: nil} = config),
do: Map.put(config, :unique_identifier, "BUPE")
defp check_unique_identifier(config), do: config
defp invalid_files?(files, extensions) do
Enum.filter(files, &((&1.href |> Path.extname() |> String.downcase()) in extensions)) != files
end
defp assets do
[
[content: @stylesheet, dir: "OEBPS/css", filename: "stylesheet.css"],
[content: @container_template, dir: "META-INF", filename: "container.xml"],
[
content: @display_options,
dir: "META-INF",
filename: "com.apple.ibooks.display-options.xml"
]
]
end
defp generate_assets(config, assets) do
files =
Enum.into(assets, config.files, fn asset ->
{asset[:dir] |> Path.join(asset[:filename]) |> String.to_charlist(), asset[:content]}
end)
%{config | files: files}
end
end
| 29.99697
| 112
| 0.590969
|
03937c16dbc3559527cd55aa62b9333231a281ad
| 1,876
|
ex
|
Elixir
|
clients/big_query/lib/google_api/big_query/v2/model/list_routines_response.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/big_query/lib/google_api/big_query/v2/model/list_routines_response.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/big_query/lib/google_api/big_query/v2/model/list_routines_response.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQuery.V2.Model.ListRoutinesResponse do
@moduledoc """
## Attributes
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - A token to request the next page of results.
* `routines` (*type:* `list(GoogleApi.BigQuery.V2.Model.Routine.t)`, *default:* `nil`) - Routines in the requested dataset. Unless read_mask is set in the request, only the following fields are populated: etag, project_id, dataset_id, routine_id, routine_type, creation_time, last_modified_time, and language.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:nextPageToken => String.t() | nil,
:routines => list(GoogleApi.BigQuery.V2.Model.Routine.t()) | nil
}
field(:nextPageToken)
field(:routines, as: GoogleApi.BigQuery.V2.Model.Routine, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.ListRoutinesResponse do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.ListRoutinesResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.ListRoutinesResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.52
| 313
| 0.738806
|
0393a1945f7c32e46afc7fa783c30149581766a3
| 8,307
|
ex
|
Elixir
|
clients/compute/lib/google_api/compute/v1/api/regions.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/compute/lib/google_api/compute/v1/api/regions.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/compute/lib/google_api/compute/v1/api/regions.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Api.Regions do
@moduledoc """
API calls for all endpoints tagged `Regions`.
"""
alias GoogleApi.Compute.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Returns the specified Region resource. Gets a list of available regions by making a list() request.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - Name of the region resource to return.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Region{}}` on success
* `{:error, info}` on failure
"""
@spec compute_regions_get(Tesla.Env.client(), String.t(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Compute.V1.Model.Region.t()} | {:ok, Tesla.Env.t()} | {:error, any()}
def compute_regions_get(connection, project, region, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/projects/{project}/regions/{region}", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Region{}])
end
@doc """
Retrieves the list of region resources available to the specified project.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:filter` (*type:* `String.t`) - A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
* `:maxResults` (*type:* `integer()`) - The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
* `:orderBy` (*type:* `String.t`) - Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
Currently, only sorting by `name` or `creationTimestamp desc` is supported.
* `:pageToken` (*type:* `String.t`) - Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
* `:returnPartialSuccess` (*type:* `boolean()`) - Opt-in for partial success behavior which provides partial results in case of failure. The default value is false.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.RegionList{}}` on success
* `{:error, info}` on failure
"""
@spec compute_regions_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Compute.V1.Model.RegionList.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def compute_regions_list(connection, project, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:filter => :query,
:maxResults => :query,
:orderBy => :query,
:pageToken => :query,
:returnPartialSuccess => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/projects/{project}/regions", %{
"project" => URI.encode(project, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.RegionList{}])
end
end
| 55.38
| 434
| 0.670398
|
039407b495a41319c159f76cf63661e57e362122
| 503
|
ex
|
Elixir
|
lib/phoenix_sample_web/views/error_view.ex
|
jonathanleang/phoenix_sample
|
2dbc65d74087fedc9eccb87436870f0927c74522
|
[
"Apache-2.0"
] | 19
|
2018-09-24T19:55:31.000Z
|
2021-12-12T20:46:17.000Z
|
lib/phoenix_sample_web/views/error_view.ex
|
jonathanleang/phoenix_sample
|
2dbc65d74087fedc9eccb87436870f0927c74522
|
[
"Apache-2.0"
] | 8
|
2018-10-17T13:48:15.000Z
|
2021-11-04T05:09:55.000Z
|
lib/phoenix_sample_web/views/error_view.ex
|
jonathanleang/phoenix_sample
|
2dbc65d74087fedc9eccb87436870f0927c74522
|
[
"Apache-2.0"
] | 6
|
2018-10-11T13:59:25.000Z
|
2022-02-14T14:04:19.000Z
|
defmodule PhoenixSampleWeb.ErrorView do
use PhoenixSampleWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 29.588235
| 61
| 0.741551
|
0394126dfd9e77a2060fdf088383d65813406aee
| 308
|
ex
|
Elixir
|
lib/yourbot/editor.ex
|
ConnorRigby/yourbot
|
eea40e63b0f93963ed14b7efab9ecbe898ab11dd
|
[
"Apache-2.0"
] | 3
|
2021-11-08T15:19:19.000Z
|
2021-11-11T03:18:35.000Z
|
lib/yourbot/editor.ex
|
ConnorRigby/yourbot
|
eea40e63b0f93963ed14b7efab9ecbe898ab11dd
|
[
"Apache-2.0"
] | null | null | null |
lib/yourbot/editor.ex
|
ConnorRigby/yourbot
|
eea40e63b0f93963ed14b7efab9ecbe898ab11dd
|
[
"Apache-2.0"
] | null | null | null |
defmodule YourBot.Editor do
def format(code) do
filename = Path.join(System.tmp_dir!(), Ecto.UUID.generate())
File.write!(filename, code)
{"", 0} = System.cmd("python3", ["-m", "black", "-q", filename])
formatted = File.read!(filename)
File.rm!(filename)
{:ok, formatted}
end
end
| 28
| 68
| 0.626623
|
039447c23aca30f71aeb402bfbd1ee69ffbf1fea
| 871
|
ex
|
Elixir
|
rocketpay/lib/rocketpay_web/controllers/accounts_controller.ex
|
alfredots/next-level-week-4-elixir
|
ea47b53a27540454efba2f3007125ee5964a760e
|
[
"MIT"
] | null | null | null |
rocketpay/lib/rocketpay_web/controllers/accounts_controller.ex
|
alfredots/next-level-week-4-elixir
|
ea47b53a27540454efba2f3007125ee5964a760e
|
[
"MIT"
] | null | null | null |
rocketpay/lib/rocketpay_web/controllers/accounts_controller.ex
|
alfredots/next-level-week-4-elixir
|
ea47b53a27540454efba2f3007125ee5964a760e
|
[
"MIT"
] | null | null | null |
defmodule RocketpayWeb.AccountsController do
use RocketpayWeb, :controller
alias Rocketpay.Accounts.Transactions.Response, as: TransactionResponse
alias Rocketpay.Account
action_fallback RocketpayWeb.FallbackController
def deposit(conn, params) do
with {:ok, %Account{} = account} <- Rocketpay.deposit(params) do
conn
|> put_status(:ok)
|> render("update.json", account: account)
end
end
def withdraw(conn, params) do
with {:ok, %Account{} = account} <- Rocketpay.withdraw(params) do
conn
|> put_status(:ok)
|> render("update.json", account: account)
end
end
def transaction(conn, params) do
with {:ok, %TransactionResponse{} = transaction} <- Rocketpay.transaction(params) do
conn
|> put_status(:ok)
|> render("transaction.json", transaction: transaction)
end
end
end
| 27.21875
| 88
| 0.67853
|
039462d66485c02ffa4027e9e1c7bc3acfbb06b3
| 149
|
ex
|
Elixir
|
final-projects/shop_website/lib/shop_website_web/controllers/page_controller.ex
|
anitabenites/elixir-and-phoenix-mentoring-sessions
|
0fa283a8d4a8884edd54c8e16d788e2ca65e4592
|
[
"MIT"
] | 3
|
2019-07-31T20:06:34.000Z
|
2021-11-16T11:19:45.000Z
|
final-projects/shop_website/lib/shop_website_web/controllers/page_controller.ex
|
anitabenites/elixir-and-phoenix-mentoring-sessions
|
0fa283a8d4a8884edd54c8e16d788e2ca65e4592
|
[
"MIT"
] | null | null | null |
final-projects/shop_website/lib/shop_website_web/controllers/page_controller.ex
|
anitabenites/elixir-and-phoenix-mentoring-sessions
|
0fa283a8d4a8884edd54c8e16d788e2ca65e4592
|
[
"MIT"
] | 1
|
2019-07-31T16:46:14.000Z
|
2019-07-31T16:46:14.000Z
|
defmodule ShopWebsiteWeb.PageController do
use ShopWebsiteWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
end
| 18.625
| 42
| 0.751678
|
03946574c37a92e5d314e89953bfacf3c6467c95
| 847
|
exs
|
Elixir
|
bank-account/account.exs
|
nlhuykhang/elixir-exercism
|
0462661cc411cb28b4bf800639b16684480a06a7
|
[
"MIT"
] | null | null | null |
bank-account/account.exs
|
nlhuykhang/elixir-exercism
|
0462661cc411cb28b4bf800639b16684480a06a7
|
[
"MIT"
] | null | null | null |
bank-account/account.exs
|
nlhuykhang/elixir-exercism
|
0462661cc411cb28b4bf800639b16684480a06a7
|
[
"MIT"
] | null | null | null |
defmodule BankAccount do
@moduledoc """
A bank account that supports access from multiple processes.
"""
@typedoc """
An account handle.
"""
@opaque account :: pid
@doc """
Open the bank. Makes the account available.
"""
@spec open_bank() :: account
def open_bank() do
%{
:balance => 0
}
end
@doc """
Close the bank. Makes the account unavailable.
"""
@spec close_bank(account) :: none
def close_bank(account) do
end
@doc """
Get the account's balance.
"""
@spec balance(account) :: integer
def balance(account) do
account.balance
end
@doc """
Update the account's balance by adding the given amount which may be negative.
"""
@spec update(account, integer) :: any
def update(account, amount) do
Map.update(account, :balance, amount, &(&1 + amount))
end
end
| 19.25
| 80
| 0.636364
|
03948b7100bd0b9e2519c52fa4b62339f7e2fef5
| 684
|
exs
|
Elixir
|
config/config.exs
|
thaisfiori/blog_da_tha
|
e860e64e5251dae03b95fdf1e339ec0a3d789a29
|
[
"Apache-2.0"
] | null | null | null |
config/config.exs
|
thaisfiori/blog_da_tha
|
e860e64e5251dae03b95fdf1e339ec0a3d789a29
|
[
"Apache-2.0"
] | null | null | null |
config/config.exs
|
thaisfiori/blog_da_tha
|
e860e64e5251dae03b95fdf1e339ec0a3d789a29
|
[
"Apache-2.0"
] | null | null | null |
import Config
config :blog_da_tha,
ecto_repos: [BlogDaTha.Repo]
config :blog_da_tha_web,
ecto_repos: [BlogDaTha.Repo],
generators: [context_app: :blog_da_tha, binary_id: true]
config :blog_da_tha_web, BlogDaThaWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "5kvRDoWr9xSP6eKTB9wYr6nlYRcfw0+HxRAth13MLHW5ayIi+UNAxxZWiZwsWA+S",
render_errors: [view: BlogDaThaWeb.ErrorView, accepts: ~w(json), layout: false],
pubsub_server: BlogDaTha.PubSub,
live_view: [signing_salt: "Y43hUYwb"]
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
config :phoenix, :json_library, Jason
import_config "#{Mix.env()}.exs"
| 24.428571
| 86
| 0.748538
|
039492c1354a2ad5c8ea57cb109d6cd511a29861
| 444
|
ex
|
Elixir
|
lib/test_that_json/parsing.ex
|
Ariel-Thomas/test_that_json
|
5c6301b77d58681a65383344d2f35d1c291fc0ff
|
[
"MIT"
] | 11
|
2016-07-19T15:33:25.000Z
|
2021-07-11T17:31:33.000Z
|
lib/test_that_json/parsing.ex
|
Ariel-Thomas/test_that_json
|
5c6301b77d58681a65383344d2f35d1c291fc0ff
|
[
"MIT"
] | 1
|
2018-05-17T16:15:23.000Z
|
2018-05-17T16:15:23.000Z
|
lib/test_that_json/parsing.ex
|
Ariel-Thomas/test_that_json
|
5c6301b77d58681a65383344d2f35d1c291fc0ff
|
[
"MIT"
] | 2
|
2016-09-20T11:14:41.000Z
|
2018-05-16T15:41:29.000Z
|
defmodule TestThatJson.Parsing do
def parse!(json) when is_binary(json) do
JSX.decode!(json)
rescue
ArgumentError -> raise TestThatJson.InvalidJsonError
end
def parse!(value), do: value
def parse(json) when is_binary(json) do
case JSX.decode(json) do
{:error, _} -> {:error, {TestThatJson.InvalidJsonError, [json], "Invalid JSON"}}
result -> result
end
end
def parse(value), do: {:ok, value}
end
| 26.117647
| 86
| 0.666667
|
0394b63178aef330bfd18088799bd0090f116d9a
| 510
|
ex
|
Elixir
|
lib/app_web/views/lead_source_view.ex
|
ThanhUong/Chronos
|
5e1b0823c585b784f5c51212513d518cab53a571
|
[
"MIT"
] | null | null | null |
lib/app_web/views/lead_source_view.ex
|
ThanhUong/Chronos
|
5e1b0823c585b784f5c51212513d518cab53a571
|
[
"MIT"
] | null | null | null |
lib/app_web/views/lead_source_view.ex
|
ThanhUong/Chronos
|
5e1b0823c585b784f5c51212513d518cab53a571
|
[
"MIT"
] | null | null | null |
defmodule AppWeb.LeadSourceView do
use AppWeb, :view
alias AppWeb.LeadSourceView
def render("index.json", %{lead_sources: lead_sources}) do
%{data: render_many(lead_sources, LeadSourceView, "lead_source.json")}
end
def render("show.json", %{lead_source: lead_source}) do
%{data: render_one(lead_source, LeadSourceView, "lead_source.json")}
end
def render("lead_source.json", %{lead_source: lead_source}) do
%{
id: lead_source.id,
name: lead_source.name
}
end
end
| 25.5
| 74
| 0.701961
|
03953776242781744635a02e8b76608f5fb8adb1
| 1,774
|
exs
|
Elixir
|
lib/mix/test/mix/tasks/loadconfig_test.exs
|
mertonium/elixir
|
74e666156906974082f6b4d34dfbe6988d6465c0
|
[
"Apache-2.0"
] | 1
|
2018-10-02T13:55:29.000Z
|
2018-10-02T13:55:29.000Z
|
lib/mix/test/mix/tasks/loadconfig_test.exs
|
mertonium/elixir
|
74e666156906974082f6b4d34dfbe6988d6465c0
|
[
"Apache-2.0"
] | null | null | null |
lib/mix/test/mix/tasks/loadconfig_test.exs
|
mertonium/elixir
|
74e666156906974082f6b4d34dfbe6988d6465c0
|
[
"Apache-2.0"
] | null | null | null |
Code.require_file("../../test_helper.exs", __DIR__)
defmodule Mix.Tasks.LoadconfigTest do
use MixTest.Case
@tag apps: [:my_app]
test "reads and persists project configuration", context do
Mix.Project.push(MixTest.Case.Sample)
in_tmp(context.test, fn ->
write_config("""
[my_app: [key: :project]]
""")
assert Application.fetch_env(:my_app, :key) == :error
Mix.Task.run("loadconfig", [])
assert Application.fetch_env(:my_app, :key) == {:ok, :project}
# App configuration should have lower precedence
:ok = :application.load({:application, :my_app, [vsn: '1.0.0', env: [key: :app]]})
assert Application.fetch_env(:my_app, :key) == {:ok, :project}
# loadconfig can be called multiple times
# Later values should have higher precedence
Mix.Task.run("loadconfig", [fixture_path("configs/good_config.exs")])
assert Application.fetch_env(:my_app, :key) == {:ok, :value}
end)
end
@tag apps: [:config_app]
test "reads from custom config_path", context do
Mix.ProjectStack.post_config(config_path: "fresh.config")
Mix.Project.push(MixTest.Case.Sample)
in_tmp(context.test, fn ->
write_config("fresh.config", """
[config_app: [key: :value]]
""")
assert Application.fetch_env(:config_app, :key) == :error
Mix.Task.run("loadconfig", [])
assert Application.fetch_env(:config_app, :key) == {:ok, :value}
File.rm("fresh.config")
assert_raise Mix.Config.LoadError, ~r"could not load config fresh\.config", fn ->
Mix.Task.run("loadconfig", [])
end
end)
end
defp write_config(path \\ "config/config.exs", contents) do
File.mkdir_p!(Path.dirname(path))
File.write!(path, contents)
end
end
| 31.122807
| 88
| 0.645434
|
03954b3c2080c962e804e0ddc32973e6592b13e1
| 661
|
exs
|
Elixir
|
advent_umbrella_2016/apps/day8/mix.exs
|
lauromoura/adventofcode
|
320dc1ea7099fbc7c3ffcbc406bfc0aa236c3b78
|
[
"Unlicense"
] | null | null | null |
advent_umbrella_2016/apps/day8/mix.exs
|
lauromoura/adventofcode
|
320dc1ea7099fbc7c3ffcbc406bfc0aa236c3b78
|
[
"Unlicense"
] | null | null | null |
advent_umbrella_2016/apps/day8/mix.exs
|
lauromoura/adventofcode
|
320dc1ea7099fbc7c3ffcbc406bfc0aa236c3b78
|
[
"Unlicense"
] | null | null | null |
defmodule Day8.Mixfile do
use Mix.Project
def project do
[app: :day8,
version: "0.0.1",
elixir: "~> 1.2",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[]
end
end
| 20.030303
| 77
| 0.605144
|
039575de23e75eea3dd442e0bb23df790760517d
| 2,363
|
exs
|
Elixir
|
mix.exs
|
zubairshokh/asciinema-server
|
b882f285a84054e94e70def8f9777cc2fc3551b1
|
[
"Apache-2.0"
] | null | null | null |
mix.exs
|
zubairshokh/asciinema-server
|
b882f285a84054e94e70def8f9777cc2fc3551b1
|
[
"Apache-2.0"
] | null | null | null |
mix.exs
|
zubairshokh/asciinema-server
|
b882f285a84054e94e70def8f9777cc2fc3551b1
|
[
"Apache-2.0"
] | null | null | null |
defmodule Asciinema.Mixfile do
use Mix.Project
def project do
[
app: :asciinema,
version: "0.0.1",
elixir: "~> 1.6",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {Asciinema.Application, []}, extra_applications: [:logger]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:bamboo, "~> 1.2"},
{:bamboo_smtp, "~> 1.6"},
{:briefly, "~> 0.3"},
{:cowboy, "~> 1.0"},
{:credo, "~> 0.10.0", only: [:dev, :test], runtime: false},
{:distillery, "~> 2.0"},
{:earmark, "~> 1.2"},
{:ex_aws, "~> 1.0"},
{:ex_machina, "~> 2.1", only: :test},
{:exq, "~> 0.12.2"},
{:exq_ui, "~> 0.9.0"},
{:gettext, "~> 0.11"},
{:html_sanitize_ex, "~> 1.3"},
{:inflex, "~> 1.9"},
{:jason, "~> 1.1"},
{:phoenix, "~> 1.3.4"},
{:phoenix_ecto, "~> 3.4"},
{:phoenix_html, "~> 2.12"},
{:phoenix_live_reload, "~> 1.1", only: :dev},
{:phoenix_markdown, "~> 0.1"},
{:phoenix_pubsub, "~> 1.1"},
{:plug_cowboy, "~> 1.0"},
{:poison, "~> 3.1"},
{:poolboy, "~> 1.5"},
{:postgrex, ">= 0.0.0"},
{:quantum, "~> 2.3"},
{:redix, ">= 0.6.1"},
{:scrivener_ecto, "~> 1.0"},
{:scrivener_html, "~> 1.7"},
{:sentry, "~> 6.4"},
{:timex, "~> 3.0"},
{:timex_ecto, "~> 3.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 28.46988
| 79
| 0.519255
|
03959b19a21890a6562c276a28b15ef56ebfdc4c
| 3,433
|
ex
|
Elixir
|
clients/games/lib/google_api/games/v1/model/room_create_request.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | 1
|
2018-12-03T23:43:10.000Z
|
2018-12-03T23:43:10.000Z
|
clients/games/lib/google_api/games/v1/model/room_create_request.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
clients/games/lib/google_api/games/v1/model/room_create_request.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Games.V1.Model.RoomCreateRequest do
@moduledoc """
This is a JSON template for a room creation request.
## Attributes
* `autoMatchingCriteria` (*type:* `GoogleApi.Games.V1.Model.RoomAutoMatchingCriteria.t`, *default:* `nil`) - Criteria for auto-matching players into this room.
* `capabilities` (*type:* `list(String.t)`, *default:* `nil`) - The capabilities that this client supports for realtime communication.
* `clientAddress` (*type:* `GoogleApi.Games.V1.Model.RoomClientAddress.t`, *default:* `nil`) - Client address for the player creating the room.
* `invitedPlayerIds` (*type:* `list(String.t)`, *default:* `nil`) - The player IDs to invite to the room.
* `kind` (*type:* `String.t`, *default:* `games#roomCreateRequest`) - Uniquely identifies the type of this resource. Value is always the fixed string games#roomCreateRequest.
* `networkDiagnostics` (*type:* `GoogleApi.Games.V1.Model.NetworkDiagnostics.t`, *default:* `nil`) - Network diagnostics for the client creating the room.
* `requestId` (*type:* `String.t`, *default:* `nil`) - A randomly generated numeric ID. This number is used at the server to ensure that the request is handled correctly across retries.
* `variant` (*type:* `integer()`, *default:* `nil`) - The variant / mode of the application to be played. This can be any integer value, or left blank. You should use a small number of variants to keep the auto-matching pool as large as possible.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:autoMatchingCriteria => GoogleApi.Games.V1.Model.RoomAutoMatchingCriteria.t(),
:capabilities => list(String.t()),
:clientAddress => GoogleApi.Games.V1.Model.RoomClientAddress.t(),
:invitedPlayerIds => list(String.t()),
:kind => String.t(),
:networkDiagnostics => GoogleApi.Games.V1.Model.NetworkDiagnostics.t(),
:requestId => String.t(),
:variant => integer()
}
field(:autoMatchingCriteria, as: GoogleApi.Games.V1.Model.RoomAutoMatchingCriteria)
field(:capabilities, type: :list)
field(:clientAddress, as: GoogleApi.Games.V1.Model.RoomClientAddress)
field(:invitedPlayerIds, type: :list)
field(:kind)
field(:networkDiagnostics, as: GoogleApi.Games.V1.Model.NetworkDiagnostics)
field(:requestId)
field(:variant)
end
defimpl Poison.Decoder, for: GoogleApi.Games.V1.Model.RoomCreateRequest do
def decode(value, options) do
GoogleApi.Games.V1.Model.RoomCreateRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Games.V1.Model.RoomCreateRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 50.485294
| 250
| 0.7224
|
0395b575e4b69ba19c676d068f0129323e62353e
| 1,381
|
exs
|
Elixir
|
integration_test/mysql/test_helper.exs
|
timgestson/ecto
|
1c1eb6d322db04cfa48a4fc81da1332e91adbc1f
|
[
"Apache-2.0"
] | null | null | null |
integration_test/mysql/test_helper.exs
|
timgestson/ecto
|
1c1eb6d322db04cfa48a4fc81da1332e91adbc1f
|
[
"Apache-2.0"
] | null | null | null |
integration_test/mysql/test_helper.exs
|
timgestson/ecto
|
1c1eb6d322db04cfa48a4fc81da1332e91adbc1f
|
[
"Apache-2.0"
] | null | null | null |
Logger.configure(level: :info)
ExUnit.start exclude: [:array_type, :read_after_writes, :case_sensitive]
# Basic test repo
alias Ecto.Integration.TestRepo
Application.put_env(:ecto, TestRepo,
adapter: Ecto.Adapters.MySQL,
url: "ecto://root@localhost/ecto_test",
size: 1,
max_overflow: 0)
defmodule Ecto.Integration.TestRepo do
use Ecto.Repo, otp_app: :ecto
end
# Pool repo for transaction and lock tests
alias Ecto.Integration.PoolRepo
Application.put_env(:ecto, PoolRepo,
adapter: Ecto.Adapters.MySQL,
url: "ecto://root@localhost/ecto_test",
size: 10)
defmodule Ecto.Integration.PoolRepo do
use Ecto.Repo, otp_app: :ecto
end
defmodule Ecto.Integration.Case do
use ExUnit.CaseTemplate
setup_all do
Ecto.Adapters.SQL.begin_test_transaction(TestRepo, [])
on_exit fn -> Ecto.Adapters.SQL.rollback_test_transaction(TestRepo, []) end
:ok
end
setup do
Ecto.Adapters.SQL.restart_test_transaction(TestRepo, [])
:ok
end
end
# Load support models and migration
Code.require_file "../support/models.exs", __DIR__
Code.require_file "../support/migration.exs", __DIR__
# Load up the repository, start it, and run migrations
_ = Ecto.Storage.down(TestRepo)
:ok = Ecto.Storage.up(TestRepo)
{:ok, _pid} = TestRepo.start_link
{:ok, _pid} = PoolRepo.start_link
:ok = Ecto.Migrator.up(TestRepo, 0, Ecto.Integration.Migration, log: false)
| 24.660714
| 79
| 0.747285
|
0395d69d408ee13beb7b3ad1dac81530420c6642
| 503
|
ex
|
Elixir
|
lib/central/account/startup.ex
|
badosu/teiserver
|
19b623aeb7c2ab28756405f7486e92b714777c54
|
[
"MIT"
] | 4
|
2021-07-29T16:23:20.000Z
|
2022-02-23T05:34:36.000Z
|
lib/central/account/startup.ex
|
badosu/teiserver
|
19b623aeb7c2ab28756405f7486e92b714777c54
|
[
"MIT"
] | 14
|
2021-08-01T02:36:14.000Z
|
2022-01-30T21:15:03.000Z
|
lib/central/account/startup.ex
|
badosu/teiserver
|
19b623aeb7c2ab28756405f7486e92b714777c54
|
[
"MIT"
] | 7
|
2021-05-13T12:55:28.000Z
|
2022-01-14T06:39:06.000Z
|
defmodule Central.Account.Startup do
@moduledoc false
use CentralWeb, :startup
def startup do
QuickAction.add_items([
%{label: "Logout", icons: ["far fa-sign-out"], url: "/logout"},
%{label: "Edit account", icons: ["far fa-lock", :edit], url: "/account/edit"}
])
add_audit_types([
"Account: User password reset",
"Account: Failed login",
"Account: Created user",
"Account: Updated user",
"Account: Updated user permissions"
])
end
end
| 25.15
| 83
| 0.61829
|
0395d77bfd2ce84771c1f17cc8e56e9b3da69d4c
| 1,546
|
ex
|
Elixir
|
lib/aws/generated/entitlement_marketplace.ex
|
kw7oe/aws-elixir
|
4ba60502dde270c83143822c9964018c7770bad7
|
[
"Apache-2.0"
] | 341
|
2018-04-04T19:06:19.000Z
|
2022-03-25T21:34:23.000Z
|
lib/aws/generated/entitlement_marketplace.ex
|
kw7oe/aws-elixir
|
4ba60502dde270c83143822c9964018c7770bad7
|
[
"Apache-2.0"
] | 82
|
2018-04-04T17:32:33.000Z
|
2022-03-24T15:12:04.000Z
|
lib/aws/generated/entitlement_marketplace.ex
|
kw7oe/aws-elixir
|
4ba60502dde270c83143822c9964018c7770bad7
|
[
"Apache-2.0"
] | 76
|
2018-04-10T20:19:44.000Z
|
2022-03-15T13:49:19.000Z
|
# WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.Entitlement.Marketplace do
@moduledoc """
AWS Marketplace Entitlement Service
This reference provides descriptions of the AWS Marketplace Entitlement Service
API.
AWS Marketplace Entitlement Service is used to determine the entitlement of a
customer to a given product. An entitlement represents capacity in a product
owned by the customer. For example, a customer might own some number of users or
seats in an SaaS application or some amount of data capacity in a multi-tenant
database.
## Getting Entitlement Records
* *GetEntitlements*- Gets the entitlements for a Marketplace
product.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-01-11",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "entitlement.marketplace",
global?: false,
protocol: "json",
service_id: nil,
signature_version: "v4",
signing_name: "aws-marketplace",
target_prefix: "AWSMPEntitlementService"
}
end
@doc """
GetEntitlements retrieves entitlement values for a given product.
The results can be filtered based on customer identifier or product dimensions.
"""
def get_entitlements(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetEntitlements", input, options)
end
end
| 30.313725
| 82
| 0.723803
|
039618306d025921b1af7cac82e550f0f83e54d6
| 240
|
ex
|
Elixir
|
apps/fz_http/lib/fz_http_web/views/root_view.ex
|
amishakov/firezone
|
cd85b0847ac1792ca00aedab99fbf0f7a520f1a6
|
[
"Apache-2.0"
] | null | null | null |
apps/fz_http/lib/fz_http_web/views/root_view.ex
|
amishakov/firezone
|
cd85b0847ac1792ca00aedab99fbf0f7a520f1a6
|
[
"Apache-2.0"
] | 1
|
2022-03-30T03:57:41.000Z
|
2022-03-30T03:57:41.000Z
|
apps/fz_http/lib/fz_http_web/views/root_view.ex
|
amishakov/firezone
|
cd85b0847ac1792ca00aedab99fbf0f7a520f1a6
|
[
"Apache-2.0"
] | null | null | null |
defmodule FzHttpWeb.RootView do
use FzHttpWeb, :view
alias FzCommon.FzCrypto
def authorization_uri(oidc, provider) do
params = %{
state: FzCrypto.rand_string()
}
oidc.authorization_uri(provider, params)
end
end
| 17.142857
| 44
| 0.708333
|
0396340e388ad07fcf1bf9598e6a6df06900e283
| 2,565
|
ex
|
Elixir
|
apps/studio/lib/studio/painter.ex
|
danmarcab/deep_painting
|
860c7d02bd6b112fffa199f715e61d895cba6623
|
[
"Apache-2.0"
] | null | null | null |
apps/studio/lib/studio/painter.ex
|
danmarcab/deep_painting
|
860c7d02bd6b112fffa199f715e61d895cba6623
|
[
"Apache-2.0"
] | 11
|
2020-01-28T22:19:10.000Z
|
2022-03-11T23:18:18.000Z
|
apps/studio/lib/studio/painter.ex
|
danmarcab/deep_painting
|
860c7d02bd6b112fffa199f715e61d895cba6623
|
[
"Apache-2.0"
] | null | null | null |
defmodule Studio.Painter do
@moduledoc """
Module to interact with a painter executable.
"""
use GenServer
require Logger
alias Painting.Iteration
@spec start_link(String.t, Keyword.t) :: GenServer.on_start
def start_link(painting_name, opts \\ []) when is_list(opts) do
watcher = Keyword.get(opts, :watcher)
callback_url = Keyword.get(opts, :callback_url)
server_opts = case Keyword.fetch(opts, :name) do
{:ok, name} ->
[name: name]
:error ->
[]
end
GenServer.start_link(__MODULE__, {painting_name, watcher, callback_url}, server_opts)
end
def init({name, watcher, callback_url}) do
with {:ok, painting} <- Studio.find_painting(name),
painting <- Painting.start(painting),
port <- start_port(painting)
do
{:ok, %{port: port, painting: painting, watcher: watcher, callback_url: callback_url}}
else
_ -> {:stop, :error}
end
end
def stop(painter) do
GenServer.cast(painter, :stop)
end
def handle_cast(:stop, %{port: port, painting: painting} = state) do
Studio.save_painting(Painting.complete(painting))
Port.close(port)
{:stop, :normal, state}
end
def handle_info({port, {:data, response}}, %{port: port, painting: painting} = state) do
Logger.debug("received from port:")
Logger.debug(inspect(response))
{:ok, iteration} = parse_iteration(response)
new_painting = Painting.add_iteration(painting, iteration)
Studio.save_painting(new_painting)
# set iteration data in painting and save it
new_state = %{state | painting: new_painting}
# send iteration data to watcher
if state.watcher do
send(state.watcher, {:painter, state.callback_url, state.painting.name, iteration})
end
if keep_painting?(new_state) do
Port.command(port, "CONT")
{:noreply, new_state}
else
Port.close(port)
{:stop, :normal, new_state}
end
end
def handle_info({port, {:exit_status, status}}, %{port: port}) do
:erlang.error({:port_exit, status})
end
def handle_info(_, state), do: {:noreply, state}
defp start_port(painting) do
painter_module = Application.get_env(:studio, :painter)
painter_module.start(painting)
end
defp keep_painting?(%{painting: painting}) do
painting.status != :complete
end
defp parse_iteration(data) do
with {:ok, %{"file_name" => file_name, "loss" => loss}} <- Poison.decode(data),
{loss, ""} <- Float.parse(loss)
do
{:ok, Iteration.new(file_name, loss)}
end
end
end
| 27.287234
| 92
| 0.659259
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.