hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
list | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
list | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
list | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
93de1bab63e7482a4578198c83d9907f665bcf4a
| 170
|
exs
|
Elixir
|
priv/repo/migrations/20171023142220_add_image_url_to_users.exs
|
lucab85/audioslides.io
|
cb502ccf6ed0b2db42d9fb20bb4c963bcca3cfa9
|
[
"MIT"
] | 17
|
2017-11-14T14:03:18.000Z
|
2021-12-10T04:18:48.000Z
|
priv/repo/migrations/20171023142220_add_image_url_to_users.exs
|
lucab85/audioslides.io
|
cb502ccf6ed0b2db42d9fb20bb4c963bcca3cfa9
|
[
"MIT"
] | 21
|
2017-11-19T13:38:07.000Z
|
2022-02-10T00:11:14.000Z
|
priv/repo/migrations/20171023142220_add_image_url_to_users.exs
|
lucab85/audioslides.io
|
cb502ccf6ed0b2db42d9fb20bb4c963bcca3cfa9
|
[
"MIT"
] | 2
|
2019-09-03T03:32:13.000Z
|
2021-02-23T21:52:57.000Z
|
defmodule Platform.Repo.Migrations.AddImageUrlToUsers do
use Ecto.Migration
def change do
alter table(:users) do
add :image_url, :string
end
end
end
| 17
| 56
| 0.717647
|
93de31696163b322e65bb0036daca570268444fc
| 1,544
|
ex
|
Elixir
|
clients/double_click_bid_manager/lib/google_api/double_click_bid_manager/v11/model/filter_pair.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/double_click_bid_manager/lib/google_api/double_click_bid_manager/v11/model/filter_pair.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/double_click_bid_manager/lib/google_api/double_click_bid_manager/v11/model/filter_pair.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DoubleClickBidManager.V11.Model.FilterPair do
@moduledoc """
Filter used to match traffic data in your report.
## Attributes
* `type` (*type:* `String.t`, *default:* `nil`) - Filter type.
* `value` (*type:* `String.t`, *default:* `nil`) - Filter value.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:type => String.t() | nil,
:value => String.t() | nil
}
field(:type)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.DoubleClickBidManager.V11.Model.FilterPair do
def decode(value, options) do
GoogleApi.DoubleClickBidManager.V11.Model.FilterPair.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DoubleClickBidManager.V11.Model.FilterPair do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.88
| 84
| 0.718912
|
93de576f603809a753d67622a6729531f50aa9fb
| 2,299
|
ex
|
Elixir
|
lib/airline_api_aggregator/afkl.ex
|
SanketSapkal/airline_api_aggregator
|
fd4d5243c7ee936ab2fa07281ef1db6badec8d14
|
[
"Apache-2.0"
] | null | null | null |
lib/airline_api_aggregator/afkl.ex
|
SanketSapkal/airline_api_aggregator
|
fd4d5243c7ee936ab2fa07281ef1db6badec8d14
|
[
"Apache-2.0"
] | null | null | null |
lib/airline_api_aggregator/afkl.ex
|
SanketSapkal/airline_api_aggregator
|
fd4d5243c7ee936ab2fa07281ef1db6badec8d14
|
[
"Apache-2.0"
] | null | null | null |
defmodule AirlineAPIAggregator.AFKL do
@moduledoc """
AFKL airlines aggregator module. Fetches data from airlines. Currently fetches
the flight data for a origin airport to destination airport on the specified
date.
"""
import SweetXml
@behaviour AirlineAPIAggregator.APIBehaviour
@airline_code "AFKL"
@doc """
Get the direct cheapest flight(from AFKL airlines) between two airports on the
given date.
"""
@spec get_cheapest_offer(String.t, String.t, String.t) :: tuple
def get_cheapest_offer(origin, destination, date) do
Application.get_env(:airline_api_aggregator, :afkl)[:body]
|> AirlineAPIAggregator.prepare_body(origin, destination, date)
|> get_data
end
@doc """
Get the cheapest flight from the given xml flight data. The XML is parsed using
SweetXML library. The XML parsing is specific to AFKL airlines.
"""
def parse_xml_and_get_cheapest_offer(xml) do
xml
|> xpath(~x"//AirlineOffers/Offer/TotalPrice/DetailCurrencyPrice/Total/text()"l)
|> get_min_ticket()
end
#
# Request data from airlines using http post requests. HTTPoison is used to
# compose the http requests.
# Status code other than 200 is considered as error.
#
defp get_data(body) do
url = Application.get_env(:airline_api_aggregator, :afkl)[:url]
headers = Application.get_env(:airline_api_aggregator, :afkl)[:headers]
case HTTPoison.post(url, body, headers) do
{:ok, %HTTPoison.Response{body: xml, status_code: 200}} ->
parse_xml_and_get_cheapest_offer(xml)
{:ok, %HTTPoison.Response{status_code: other_status_code}} ->
IO.puts("Failed with status_code: #{other_status_code}")
{:error, "Failed with status_code: #{other_status_code}"}
{:error, reason} ->
IO.puts("Failed with reason: #{reason}")
{:error, reason}
end
end
#
# Case where no flights are found for the route on the specified date.
#
defp get_min_ticket([]) do
{:error, "No flights found."}
end
#
# Flights are found in reponse xml from airline
#
defp get_min_ticket(ticket_list) do
cheapest_ticket =
ticket_list
|> Enum.map(fn price ->
price |> to_string |> String.to_float
end)
|> Enum.min
{@airline_code, cheapest_ticket}
end
end
| 29.857143
| 84
| 0.695085
|
93de5c7b521c19f7e79cc13da1e992dc656fe720
| 681
|
exs
|
Elixir
|
test/support/test_error_view.exs
|
RiverFinancial/sentry-elixir
|
5ce0a8fc1d5523755dabeb7e629c955248c0d99e
|
[
"MIT"
] | 502
|
2016-09-03T14:23:53.000Z
|
2022-03-23T17:36:37.000Z
|
test/support/test_error_view.exs
|
RiverFinancial/sentry-elixir
|
5ce0a8fc1d5523755dabeb7e629c955248c0d99e
|
[
"MIT"
] | 350
|
2016-08-29T18:53:26.000Z
|
2022-03-27T15:45:38.000Z
|
test/support/test_error_view.exs
|
RiverFinancial/sentry-elixir
|
5ce0a8fc1d5523755dabeb7e629c955248c0d99e
|
[
"MIT"
] | 147
|
2016-09-22T13:30:57.000Z
|
2022-03-14T13:24:14.000Z
|
defmodule Sentry.ErrorView do
import Phoenix.HTML, only: [sigil_E: 2, raw: 1]
def render(_, _) do
case Sentry.get_last_event_id_and_source() do
{event_id, :plug} ->
opts =
%{title: "Testing", eventId: event_id}
|> Jason.encode!()
~E"""
<script src="https://browser.sentry-cdn.com/5.9.1/bundle.min.js" integrity="sha384-/x1aHz0nKRd6zVUazsV6CbQvjJvr6zQL2CHbQZf3yoLkezyEtZUpqUNnOLW9Nt3v" crossorigin="anonymous"></script>
<script>
Sentry.init({ dsn: '<%= Sentry.Config.dsn() %>' });
Sentry.showReportDialog(<%= raw opts %>)
</script>
"""
_ ->
"error"
end
end
end
| 28.375
| 190
| 0.590308
|
93de60e9afb3b1f799d4523dd1c24a71d7dca322
| 1,790
|
ex
|
Elixir
|
lib/ex_check/config/generator.ex
|
gerbal/ex_check
|
1247075a64d17f69c3e6e9699bd95664cc128466
|
[
"MIT"
] | 225
|
2019-07-21T14:44:17.000Z
|
2022-03-31T11:08:07.000Z
|
lib/ex_check/config/generator.ex
|
gerbal/ex_check
|
1247075a64d17f69c3e6e9699bd95664cc128466
|
[
"MIT"
] | 23
|
2019-07-30T03:05:42.000Z
|
2022-03-06T18:11:50.000Z
|
lib/ex_check/config/generator.ex
|
gerbal/ex_check
|
1247075a64d17f69c3e6e9699bd95664cc128466
|
[
"MIT"
] | 9
|
2019-11-23T23:04:39.000Z
|
2022-03-29T00:54:34.000Z
|
defmodule ExCheck.Config.Generator do
@moduledoc false
alias ExCheck.Printer
alias ExCheck.Project
@generated_config """
[
## don't run tools concurrently
# parallel: false,
## don't print info about skipped tools
# skipped: false,
## always run tools in fix mode (put it in ~/.check.exs locally, not in project config)
# fix: true,
## don't retry automatically even if last run resulted in failures
# retry: false,
## list of tools (see `mix check` docs for a list of default curated tools)
tools: [
## curated tools may be disabled (e.g. the check for compilation warnings)
# {:compiler, false},
## ...or have command & args adjusted (e.g. enable skip comments for sobelow)
# {:sobelow, "mix sobelow --exit --skip"},
## ...or reordered (e.g. to see output from dialyzer before others)
# {:dialyzer, order: -1},
## ...or reconfigured (e.g. disable parallel execution of ex_unit in umbrella)
# {:ex_unit, umbrella: [parallel: false]},
## custom new tools may be added (Mix tasks or arbitrary commands)
# {:my_task, "mix my_task", env: %{"MIX_ENV" => "prod"}},
# {:my_tool, ["my_tool", "arg with spaces"]}
]
]
"""
@config_filename ".check.exs"
# sobelow_skip ["Traversal.FileModule"]
def generate do
target_path =
Project.get_mix_root_dir()
|> Path.join(@config_filename)
|> Path.expand()
formatted_path = Path.relative_to_cwd(target_path)
if File.exists?(target_path) do
Printer.info([:yellow, "* ", :bright, formatted_path, :normal, " already exists, skipped"])
else
Printer.info([:green, "* creating ", :bright, formatted_path])
File.write!(target_path, @generated_config)
end
end
end
| 28.870968
| 97
| 0.636872
|
93de78252c58e78916de65929c2121e2cbd97e5e
| 371
|
exs
|
Elixir
|
market_api/priv/repo/migrations/20160515003314_create_product.exs
|
enilsen16/elixir
|
b4d1d45858a25e4beb39e07de8685f3d93d6a520
|
[
"MIT"
] | null | null | null |
market_api/priv/repo/migrations/20160515003314_create_product.exs
|
enilsen16/elixir
|
b4d1d45858a25e4beb39e07de8685f3d93d6a520
|
[
"MIT"
] | null | null | null |
market_api/priv/repo/migrations/20160515003314_create_product.exs
|
enilsen16/elixir
|
b4d1d45858a25e4beb39e07de8685f3d93d6a520
|
[
"MIT"
] | null | null | null |
defmodule MarketApi.Repo.Migrations.CreateProduct do
use Ecto.Migration
def change do
create table(:products) do
add :name, :string
add :barcode, :string
add :image, :string
add :price, :decimal
add :market_id, references(:markets, on_delete: :nothing)
timestamps
end
create index(:products, [:market_id])
end
end
| 20.611111
| 63
| 0.660377
|
93de7ebef112972d8e1dc925e6c8b82ddfd3bc1d
| 1,732
|
ex
|
Elixir
|
lib/hn_comments_game_web/telemetry.ex
|
ldd/hn_comments_game
|
5f720621c549a3737c155f9d59fa8277491f3b16
|
[
"MIT"
] | 5
|
2020-05-15T17:06:22.000Z
|
2020-06-20T12:05:46.000Z
|
lib/hn_comments_game_web/telemetry.ex
|
ldd/hn_comments_game
|
5f720621c549a3737c155f9d59fa8277491f3b16
|
[
"MIT"
] | null | null | null |
lib/hn_comments_game_web/telemetry.ex
|
ldd/hn_comments_game
|
5f720621c549a3737c155f9d59fa8277491f3b16
|
[
"MIT"
] | null | null | null |
defmodule HnCommentsGameWeb.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# Database Metrics
summary("hn_comments_game.repo.query.total_time", unit: {:native, :millisecond}),
summary("hn_comments_game.repo.query.decode_time", unit: {:native, :millisecond}),
summary("hn_comments_game.repo.query.query_time", unit: {:native, :millisecond}),
summary("hn_comments_game.repo.query.queue_time", unit: {:native, :millisecond}),
summary("hn_comments_game.repo.query.idle_time", unit: {:native, :millisecond}),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {HnCommentsGameWeb, :count_users, []}
]
end
end
| 32.074074
| 88
| 0.676674
|
93de81c48296a7f468126122dba01fcfeb389a21
| 1,520
|
ex
|
Elixir
|
lib/goldie/location.ex
|
scatterbrain/goldie
|
db649f9555d453541d01d0707d86b41f41156640
|
[
"MIT"
] | null | null | null |
lib/goldie/location.ex
|
scatterbrain/goldie
|
db649f9555d453541d01d0707d86b41f41156640
|
[
"MIT"
] | null | null | null |
lib/goldie/location.ex
|
scatterbrain/goldie
|
db649f9555d453541d01d0707d86b41f41156640
|
[
"MIT"
] | null | null | null |
defmodule Goldie.Location do
@moduledoc """
Entity location in the world
"""
require Logger
#defstruct [:from, :to, :ppos, :updated]
@spec xy_distance(number, number, number, number) :: map
def new(from_x, from_y, to_x, to_y) do
%{
from: %{ x: from_x, y: from_y },
to: %{ x: to_x, y: to_y },
ppos: 0.0, ## Current progress on the path
updated: Goldie.Utils.timestamp_ms() ##When was the position updated (now)
}
end
@spec interpolate(map, number) :: map
def interpolate(loc, velocity) do
now = Goldie.Utils.timestamp_ms()
time_passed = now - loc.updated
distance = xy_distance(loc.from.x, loc.from.y, loc.to.x, loc.to.y)
{x, y} = do_interpolate(loc, velocity, distance, time_passed)
from = %{ loc.from | x: x, y: y}
%{loc | from: from, updated: now }
end
@doc """
Distance between to points
"""
@spec xy_distance(number, number, number, number) :: number
def xy_distance(from_x, from_y, to_x, to_y) do
:math.sqrt(:math.pow(to_x - from_x, 2) + :math.pow(to_y - from_y, 2))
end
@spec do_interpolate(map, number, number, number) :: {number, number}
defp do_interpolate(loc, _velocity, 0.0, _time_passed), do: {loc.from.x, loc.from.y}
defp do_interpolate(loc, velocity, distance, time_passed) do
step = (time_passed / 1000.0) * velocity #Velocity is given as units / second
t = Goldie.Utils.min(1.0, step / distance)
Graphmath.Vec2.lerp({loc.from.x, loc.from.y}, {loc.to.x, loc.to.y}, t)
end
end
| 33.777778
| 86
| 0.643421
|
93de8dd53ed7f1f24922af2e3f511b6f4f52a748
| 441
|
ex
|
Elixir
|
apps/general/lib/general/router.ex
|
bornmeyer/janus_signaling
|
cbab905aaa844a2762d4647f9363370cecd3db22
|
[
"Apache-2.0"
] | null | null | null |
apps/general/lib/general/router.ex
|
bornmeyer/janus_signaling
|
cbab905aaa844a2762d4647f9363370cecd3db22
|
[
"Apache-2.0"
] | null | null | null |
apps/general/lib/general/router.ex
|
bornmeyer/janus_signaling
|
cbab905aaa844a2762d4647f9363370cecd3db22
|
[
"Apache-2.0"
] | null | null | null |
defmodule General.Router do
use Plug.Router
require EEx
plug :match
plug Plug.Parsers,
parsers: [:json],
pass: ["application/json"],
json_decoder: Jason
plug :dispatch
#EEx.function_from_file(:defp, :application_html, "lib/application.html.eex", [])
#get "/" do
# send_resp(conn, 200, application_html())
#end
match _ do
send_resp(conn, 404, "404")
end
end
| 21
| 85
| 0.600907
|
93de94dd11cd6f3b91c481851d428627965bd850
| 1,988
|
exs
|
Elixir
|
config/prod.exs
|
JesseHerrick/phoenix_liveview_gps
|
711b0cd3eb7277e12be3bac26aa950914c4cf8e7
|
[
"MIT"
] | 1
|
2021-03-28T08:22:40.000Z
|
2021-03-28T08:22:40.000Z
|
config/prod.exs
|
JesseHerrick/phoenix_liveview_gps
|
711b0cd3eb7277e12be3bac26aa950914c4cf8e7
|
[
"MIT"
] | null | null | null |
config/prod.exs
|
JesseHerrick/phoenix_liveview_gps
|
711b0cd3eb7277e12be3bac26aa950914c4cf8e7
|
[
"MIT"
] | 1
|
2021-01-15T12:40:12.000Z
|
2021-01-15T12:40:12.000Z
|
use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :gps, GpsWeb.Endpoint,
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :gps, GpsWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH"),
# transport_options: [socket_opts: [:inet6]]
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :gps, GpsWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# Finally import the config/prod.secret.exs which loads secrets
# and configuration from environment variables.
import_config "prod.secret.exs"
| 35.5
| 66
| 0.711268
|
93deb54c58496d9265516542f36cccf071fcd702
| 1,364
|
exs
|
Elixir
|
server/config/config.exs
|
randycoulman/freedomAccount
|
c1c51a765052aa318ad3a504a396e8d07a770195
|
[
"MIT"
] | 6
|
2019-04-03T19:16:01.000Z
|
2020-08-10T09:38:24.000Z
|
server/config/config.exs
|
randycoulman/freedomAccount
|
c1c51a765052aa318ad3a504a396e8d07a770195
|
[
"MIT"
] | 32
|
2019-03-19T02:45:45.000Z
|
2021-12-05T06:58:07.000Z
|
server/config/config.exs
|
randycoulman/freedomAccount
|
c1c51a765052aa318ad3a504a396e8d07a770195
|
[
"MIT"
] | null | null | null |
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
import Config
config :freedom_account,
ecto_repos: [FreedomAccount.Repo]
config :freedom_account, FreedomAccount.Repo,
migration_primary_key: [type: :binary_id],
migration_timestamps: [type: :utc_datetime]
config :freedom_account, FreedomAccountWeb.Authentication,
issuer: "freedom_account",
secret_key: "Q6F60egwfiDH3fr1CcYR4qf9a3LQVy4RFOdyfb0pqWYrZDyznxQduKiiKLnGOWm5"
# Configures the endpoint
config :freedom_account, FreedomAccountWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "odok5kW5BGc7DH/58dbxwKM7RJWjsJ7xc6kj3mY1o1LMxlBjeZMXyQYKQjRLfV6x",
render_errors: [view: FreedomAccountWeb.ErrorView, accepts: ~w(json)],
pubsub_server: FreedomAccount.PubSub,
live_view: [signing_salt: "PcFHsb2S"]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 34.1
| 86
| 0.792522
|
93dec0156a0c7495af27e58465db037519569cce
| 1,319
|
exs
|
Elixir
|
mix.exs
|
kianmeng/burnex
|
bc9c475e43e60a2faf3240b1c101bde32d50e86f
|
[
"MIT"
] | null | null | null |
mix.exs
|
kianmeng/burnex
|
bc9c475e43e60a2faf3240b1c101bde32d50e86f
|
[
"MIT"
] | null | null | null |
mix.exs
|
kianmeng/burnex
|
bc9c475e43e60a2faf3240b1c101bde32d50e86f
|
[
"MIT"
] | null | null | null |
defmodule Burnex.Mixfile do
use Mix.Project
def project do
[
app: :burnex,
version: String.trim(File.read!("VERSION")),
elixir: "~> 1.7",
description: "Elixir burner email (temporary address) detector",
start_permanent: Mix.env() == :prod,
package: package(),
deps: deps(),
docs: [main: "Burnex"],
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [coveralls: :test],
dialyzer: [
remove_defaults: [:unknown],
plt_file: {:no_warn, "priv/plts/dialyzer.plt"}
]
]
end
def application do
[]
end
defp deps do
[
# Dev
{:credo, "~> 1.5.0", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.0.0-rc.3", only: :dev, runtime: false},
{:ex_doc, "~> 0.19", only: :dev, runtime: false},
{:eliver, "~> 2.0.0", only: :dev},
# Testing
{:excoveralls, "~> 0.10", only: :test},
{:stream_data, "~> 0.1", only: :test}
]
end
defp package do
[
files: ["lib", "priv/burner-email-providers", "mix.exs", "README.md", "LICENSE", "VERSION"],
maintainers: ["Benjamin Piouffle"],
licenses: ["MIT"],
links: %{
"GitHub" => "https://github.com/Betree/burnex",
"Docs" => "https://hexdocs.pm/burnex"
}
]
end
end
| 24.886792
| 98
| 0.536012
|
93def3f646cfecabe0af2760f5a0aa5816b4f0e3
| 263
|
exs
|
Elixir
|
test/test_helper.exs
|
smpallen99/ucx_chat
|
0dd98d0eb5e0537521844520ea2ba63a08fd3f19
|
[
"MIT"
] | 60
|
2017-05-09T19:08:26.000Z
|
2021-01-20T11:09:42.000Z
|
test/test_helper.exs
|
smpallen99/ucx_chat
|
0dd98d0eb5e0537521844520ea2ba63a08fd3f19
|
[
"MIT"
] | 6
|
2017-05-10T15:43:16.000Z
|
2020-07-15T07:14:41.000Z
|
test/test_helper.exs
|
smpallen99/ucx_chat
|
0dd98d0eb5e0537521844520ea2ba63a08fd3f19
|
[
"MIT"
] | 10
|
2017-05-10T04:13:54.000Z
|
2020-12-28T10:30:27.000Z
|
{:ok, _} = Application.ensure_all_started(:ex_machina)
Application.ensure_all_started(:hound)
ExUnit.configure(timeout: :infinity)
ExUnit.configure(exclude: [pending: true, integration: true])
ExUnit.start
Ecto.Adapters.SQL.Sandbox.mode(UcxChat.Repo, :manual)
| 26.3
| 61
| 0.790875
|
93df0ae819ad738be4dae31ae984a09032f5bc7c
| 75,439
|
exs
|
Elixir
|
integration_test/cases/repo.exs
|
nathanl/ecto
|
774216ea4be4dbfc35836be19519a376b80fc392
|
[
"Apache-2.0"
] | null | null | null |
integration_test/cases/repo.exs
|
nathanl/ecto
|
774216ea4be4dbfc35836be19519a376b80fc392
|
[
"Apache-2.0"
] | 1
|
2021-03-09T16:43:23.000Z
|
2021-03-09T16:43:23.000Z
|
integration_test/cases/repo.exs
|
nathanl/ecto
|
774216ea4be4dbfc35836be19519a376b80fc392
|
[
"Apache-2.0"
] | null | null | null |
defmodule Ecto.Integration.RepoTest do
use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true)
alias Ecto.Integration.TestRepo
import Ecto.Query
alias Ecto.Integration.Post
alias Ecto.Integration.Order
alias Ecto.Integration.User
alias Ecto.Integration.Comment
alias Ecto.Integration.Permalink
alias Ecto.Integration.Custom
alias Ecto.Integration.Barebone
alias Ecto.Integration.CompositePk
alias Ecto.Integration.PostUserCompositePk
test "returns already started for started repos" do
assert {:error, {:already_started, _}} = TestRepo.start_link()
end
test "supports unnamed repos" do
assert {:ok, pid} = TestRepo.start_link(name: nil)
assert Ecto.Repo.Queryable.all(pid, Post, []) == []
end
test "all empty" do
assert TestRepo.all(Post) == []
assert TestRepo.all(from p in Post) == []
end
test "all with in" do
TestRepo.insert!(%Post{title: "hello"})
# Works without the query cache.
assert_raise Ecto.Query.CastError, fn ->
TestRepo.all(from p in Post, where: p.title in ^nil)
end
assert [] = TestRepo.all from p in Post, where: p.title in []
assert [] = TestRepo.all from p in Post, where: p.title in ["1", "2", "3"]
assert [] = TestRepo.all from p in Post, where: p.title in ^[]
assert [_] = TestRepo.all from p in Post, where: p.title not in []
assert [_] = TestRepo.all from p in Post, where: p.title in ["1", "hello", "3"]
assert [_] = TestRepo.all from p in Post, where: p.title in ["1", ^"hello", "3"]
assert [_] = TestRepo.all from p in Post, where: p.title in ^["1", "hello", "3"]
# Still doesn't work after the query cache.
assert_raise Ecto.Query.CastError, fn ->
TestRepo.all(from p in Post, where: p.title in ^nil)
end
end
test "all using named from" do
TestRepo.insert!(%Post{title: "hello"})
query =
from(p in Post, as: :post)
|> where([post: p], p.title == "hello")
assert [_] = TestRepo.all query
end
test "all without schema" do
%Post{} = TestRepo.insert!(%Post{title: "title1"})
%Post{} = TestRepo.insert!(%Post{title: "title2"})
assert ["title1", "title2"] =
TestRepo.all(from(p in "posts", order_by: p.title, select: p.title))
assert [_] =
TestRepo.all(from(p in "posts", where: p.title == "title1", select: p.id))
end
test "all shares metadata" do
TestRepo.insert!(%Post{title: "title1"})
TestRepo.insert!(%Post{title: "title2"})
[post1, post2] = TestRepo.all(Post)
assert :erts_debug.same(post1.__meta__, post2.__meta__)
[new_post1, new_post2] = TestRepo.all(Post)
assert :erts_debug.same(post1.__meta__, new_post1.__meta__)
assert :erts_debug.same(post2.__meta__, new_post2.__meta__)
end
@tag :invalid_prefix
test "all with invalid prefix" do
assert catch_error(TestRepo.all("posts", prefix: "oops"))
end
test "insert, update and delete" do
post = %Post{title: "insert, update, delete", visits: 1}
meta = post.__meta__
assert %Post{} = inserted = TestRepo.insert!(post)
assert %Post{} = updated = TestRepo.update!(Ecto.Changeset.change(inserted, visits: 2))
deleted_meta = put_in meta.state, :deleted
assert %Post{__meta__: ^deleted_meta} = TestRepo.delete!(updated)
loaded_meta = put_in meta.state, :loaded
assert %Post{__meta__: ^loaded_meta} = TestRepo.insert!(post)
post = TestRepo.one(Post)
assert post.__meta__.state == :loaded
assert post.inserted_at
end
test "insert, update and delete with field source" do
permalink = %Permalink{url: "url"}
assert %Permalink{url: "url"} = inserted =
TestRepo.insert!(permalink)
assert %Permalink{url: "new"} = updated =
TestRepo.update!(Ecto.Changeset.change(inserted, url: "new"))
assert %Permalink{url: "new"} =
TestRepo.delete!(updated)
end
@tag :composite_pk
test "insert, update and delete with composite pk" do
c1 = TestRepo.insert!(%CompositePk{a: 1, b: 2, name: "first"})
c2 = TestRepo.insert!(%CompositePk{a: 1, b: 3, name: "second"})
assert CompositePk |> first |> TestRepo.one == c1
assert CompositePk |> last |> TestRepo.one == c2
changeset = Ecto.Changeset.cast(c1, %{name: "first change"}, ~w(name)a)
c1 = TestRepo.update!(changeset)
assert TestRepo.get_by!(CompositePk, %{a: 1, b: 2}) == c1
TestRepo.delete!(c2)
assert TestRepo.all(CompositePk) == [c1]
assert_raise ArgumentError, ~r"to have exactly one primary key", fn ->
TestRepo.get(CompositePk, [])
end
assert_raise ArgumentError, ~r"to have exactly one primary key", fn ->
TestRepo.get!(CompositePk, [1, 2])
end
end
@tag :composite_pk
test "insert, update and delete with associated composite pk" do
user = TestRepo.insert!(%User{})
post = TestRepo.insert!(%Post{title: "post title"})
user_post = TestRepo.insert!(%PostUserCompositePk{user_id: user.id, post_id: post.id})
assert TestRepo.get_by!(PostUserCompositePk, [user_id: user.id, post_id: post.id]) == user_post
TestRepo.delete!(user_post)
assert TestRepo.all(PostUserCompositePk) == []
end
@tag :invalid_prefix
test "insert, update and delete with invalid prefix" do
post = TestRepo.insert!(%Post{})
changeset = Ecto.Changeset.change(post, title: "foo")
assert catch_error(TestRepo.insert(%Post{}, prefix: "oops"))
assert catch_error(TestRepo.update(changeset, prefix: "oops"))
assert catch_error(TestRepo.delete(changeset, prefix: "oops"))
# Check we can still insert the post after the invalid prefix attempt
assert %Post{id: _} = TestRepo.insert!(%Post{})
end
test "insert and update with changeset" do
# On insert we merge the fields and changes
changeset = Ecto.Changeset.cast(%Post{visits: 13, title: "wrong"},
%{"title" => "hello", "temp" => "unknown"}, ~w(title temp)a)
post = TestRepo.insert!(changeset)
assert %Post{visits: 13, title: "hello", temp: "unknown"} = post
assert %Post{visits: 13, title: "hello", temp: "temp"} = TestRepo.get!(Post, post.id)
# On update we merge only fields, direct schema changes are discarded
changeset = Ecto.Changeset.cast(%{post | visits: 17},
%{"title" => "world", "temp" => "unknown"}, ~w(title temp)a)
assert %Post{visits: 17, title: "world", temp: "unknown"} = TestRepo.update!(changeset)
assert %Post{visits: 13, title: "world", temp: "temp"} = TestRepo.get!(Post, post.id)
end
test "insert and update with empty changeset" do
# On insert we merge the fields and changes
changeset = Ecto.Changeset.cast(%Permalink{}, %{}, ~w())
assert %Permalink{} = permalink = TestRepo.insert!(changeset)
# Assert we can update the same value twice,
# without changes, without triggering stale errors.
changeset = Ecto.Changeset.cast(permalink, %{}, ~w())
assert TestRepo.update!(changeset) == permalink
assert TestRepo.update!(changeset) == permalink
end
@tag :no_primary_key
test "insert with no primary key" do
assert %Barebone{num: nil} = TestRepo.insert!(%Barebone{})
assert %Barebone{num: 13} = TestRepo.insert!(%Barebone{num: 13})
end
@tag :read_after_writes
test "insert and update with changeset read after writes" do
defmodule RAW do
use Ecto.Schema
schema "comments" do
field :text, :string
field :lock_version, :integer, read_after_writes: true
end
end
changeset = Ecto.Changeset.cast(struct(RAW, %{}), %{}, ~w())
# If the field is nil, we will not send it
# and read the value back from the database.
assert %{id: cid, lock_version: 1} = raw = TestRepo.insert!(changeset)
# Set the counter to 11, so we can read it soon
TestRepo.update_all from(u in RAW, where: u.id == ^cid), set: [lock_version: 11]
# We will read back on update too
changeset = Ecto.Changeset.cast(raw, %{"text" => "0"}, ~w(text)a)
assert %{id: ^cid, lock_version: 11, text: "0"} = TestRepo.update!(changeset)
end
test "insert autogenerates for custom type" do
post = TestRepo.insert!(%Post{uuid: nil})
assert byte_size(post.uuid) == 36
assert TestRepo.get_by(Post, uuid: post.uuid) == post
end
@tag :id_type
test "insert autogenerates for custom id type" do
defmodule ID do
use Ecto.Schema
@primary_key {:id, CustomPermalink, autogenerate: true}
schema "posts" do
end
end
id = TestRepo.insert!(struct(ID, id: nil))
assert id.id
assert TestRepo.get_by(ID, id: "#{id.id}-hello") == id
end
@tag :id_type
@tag :assigns_id_type
test "insert with user-assigned primary key" do
assert %Post{id: 1} = TestRepo.insert!(%Post{id: 1})
end
@tag :id_type
@tag :assigns_id_type
test "insert and update with user-assigned primary key in changeset" do
changeset = Ecto.Changeset.cast(%Post{id: 11}, %{"id" => "13"}, ~w(id)a)
assert %Post{id: 13} = post = TestRepo.insert!(changeset)
changeset = Ecto.Changeset.cast(post, %{"id" => "15"}, ~w(id)a)
assert %Post{id: 15} = TestRepo.update!(changeset)
end
test "insert and fetch a schema with utc timestamps" do
datetime = DateTime.from_unix!(System.os_time(:second), :second)
TestRepo.insert!(%User{inserted_at: datetime})
assert [%{inserted_at: ^datetime}] = TestRepo.all(User)
end
test "optimistic locking in update/delete operations" do
import Ecto.Changeset, only: [cast: 3, optimistic_lock: 2]
base_comment = TestRepo.insert!(%Comment{})
changeset_ok =
base_comment
|> cast(%{"text" => "foo.bar"}, ~w(text)a)
|> optimistic_lock(:lock_version)
TestRepo.update!(changeset_ok)
changeset_stale =
base_comment
|> cast(%{"text" => "foo.bat"}, ~w(text)a)
|> optimistic_lock(:lock_version)
assert_raise Ecto.StaleEntryError, fn -> TestRepo.update!(changeset_stale) end
assert_raise Ecto.StaleEntryError, fn -> TestRepo.delete!(changeset_stale) end
end
test "optimistic locking in update operation with nil field" do
import Ecto.Changeset, only: [cast: 3, optimistic_lock: 3]
base_comment =
%Comment{}
|> cast(%{lock_version: nil}, [:lock_version])
|> TestRepo.insert!()
incrementer =
fn
nil -> 1
old_value -> old_value + 1
end
changeset_ok =
base_comment
|> cast(%{"text" => "foo.bar"}, ~w(text)a)
|> optimistic_lock(:lock_version, incrementer)
updated = TestRepo.update!(changeset_ok)
assert updated.text == "foo.bar"
assert updated.lock_version == 1
end
test "optimistic locking in delete operation with nil field" do
import Ecto.Changeset, only: [cast: 3, optimistic_lock: 3]
base_comment =
%Comment{}
|> cast(%{lock_version: nil}, [:lock_version])
|> TestRepo.insert!()
incrementer =
fn
nil -> 1
old_value -> old_value + 1
end
changeset_ok = optimistic_lock(base_comment, :lock_version, incrementer)
TestRepo.delete!(changeset_ok)
refute TestRepo.get(Comment, base_comment.id)
end
@tag :unique_constraint
test "unique constraint" do
changeset = Ecto.Changeset.change(%Post{}, uuid: Ecto.UUID.generate())
{:ok, _} = TestRepo.insert(changeset)
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "posts_uuid_index (unique_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
assert exception.message =~ "call `unique_constraint/3`"
message = ~r/constraint error when attempting to insert struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.unique_constraint(:uuid, name: :posts_email_changeset)
|> TestRepo.insert()
end
assert exception.message =~ "posts_email_changeset (unique_constraint)"
{:error, changeset} =
changeset
|> Ecto.Changeset.unique_constraint(:uuid)
|> TestRepo.insert()
assert changeset.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "posts_uuid_index"]}]
assert changeset.data.__meta__.state == :built
end
@tag :unique_constraint
test "unique constraint from association" do
uuid = Ecto.UUID.generate()
post = & %Post{} |> Ecto.Changeset.change(uuid: &1) |> Ecto.Changeset.unique_constraint(:uuid)
{:error, changeset} =
TestRepo.insert %User{
comments: [%Comment{}],
permalink: %Permalink{},
posts: [post.(uuid), post.(uuid), post.(Ecto.UUID.generate())]
}
[_, p2, _] = changeset.changes.posts
assert p2.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "posts_uuid_index"]}]
end
@tag :id_type
@tag :unique_constraint
test "unique constraint with binary_id" do
changeset = Ecto.Changeset.change(%Custom{}, uuid: Ecto.UUID.generate())
{:ok, _} = TestRepo.insert(changeset)
{:error, changeset} =
changeset
|> Ecto.Changeset.unique_constraint(:uuid)
|> TestRepo.insert()
assert changeset.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "customs_uuid_index"]}]
assert changeset.data.__meta__.state == :built
end
test "unique pseudo-constraint violation error message with join table at the repository" do
post =
TestRepo.insert!(%Post{title: "some post"})
|> TestRepo.preload(:unique_users)
user =
TestRepo.insert!(%User{name: "some user"})
# Violate the unique composite index
{:error, changeset} =
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:unique_users, [user, user])
|> TestRepo.update
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
assert errors == %{unique_users: [%{}, %{id: ["has already been taken"]}]}
refute changeset.valid?
end
@tag :join
@tag :unique_constraint
test "unique constraint violation error message with join table in single changeset" do
post =
TestRepo.insert!(%Post{title: "some post"})
|> TestRepo.preload(:constraint_users)
user =
TestRepo.insert!(%User{name: "some user"})
# Violate the unique composite index
{:error, changeset} =
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:constraint_users, [user, user])
|> Ecto.Changeset.unique_constraint(:user,
name: :posts_users_composite_pk_post_id_user_id_index,
message: "has already been assigned")
|> TestRepo.update
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
assert errors == %{constraint_users: [%{}, %{user: ["has already been assigned"]}]}
refute changeset.valid?
end
@tag :join
@tag :unique_constraint
test "unique constraint violation error message with join table and separate changesets" do
post =
TestRepo.insert!(%Post{title: "some post"})
|> TestRepo.preload(:constraint_users)
user = TestRepo.insert!(%User{name: "some user"})
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:constraint_users, [user])
|> TestRepo.update
# Violate the unique composite index
{:error, changeset} =
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:constraint_users, [user])
|> Ecto.Changeset.unique_constraint(:user,
name: :posts_users_composite_pk_post_id_user_id_index,
message: "has already been assigned")
|> TestRepo.update
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
assert errors == %{constraint_users: [%{user: ["has already been assigned"]}]}
refute changeset.valid?
end
@tag :foreign_key_constraint
test "foreign key constraint" do
changeset = Ecto.Changeset.change(%Comment{post_id: 0})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_fkey (foreign_key_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
assert exception.message =~ "call `foreign_key_constraint/3`"
message = ~r/constraint error when attempting to insert struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.foreign_key_constraint(:post_id, name: :comments_post_id_other)
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_other (foreign_key_constraint)"
{:error, changeset} =
changeset
|> Ecto.Changeset.foreign_key_constraint(:post_id)
|> TestRepo.insert()
assert changeset.errors == [post_id: {"does not exist", [constraint: :foreign, constraint_name: "comments_post_id_fkey"]}]
end
@tag :foreign_key_constraint
test "assoc constraint" do
changeset = Ecto.Changeset.change(%Comment{post_id: 0})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_fkey (foreign_key_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
message = ~r/constraint error when attempting to insert struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.assoc_constraint(:post, name: :comments_post_id_other)
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_other (foreign_key_constraint)"
{:error, changeset} =
changeset
|> Ecto.Changeset.assoc_constraint(:post)
|> TestRepo.insert()
assert changeset.errors == [post: {"does not exist", [constraint: :assoc, constraint_name: "comments_post_id_fkey"]}]
end
@tag :foreign_key_constraint
test "no assoc constraint error" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Permalink{user_id: user.id})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to delete struct/, fn ->
TestRepo.delete!(user)
end
assert exception.message =~ "permalinks_user_id_fkey (foreign_key_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
end
@tag :foreign_key_constraint
test "no assoc constraint with changeset mismatch" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Permalink{user_id: user.id})
message = ~r/constraint error when attempting to delete struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
user
|> Ecto.Changeset.change
|> Ecto.Changeset.no_assoc_constraint(:permalink, name: :permalinks_user_id_pther)
|> TestRepo.delete()
end
assert exception.message =~ "permalinks_user_id_pther (foreign_key_constraint)"
end
@tag :foreign_key_constraint
test "no assoc constraint with changeset match" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Permalink{user_id: user.id})
{:error, changeset} =
user
|> Ecto.Changeset.change
|> Ecto.Changeset.no_assoc_constraint(:permalink)
|> TestRepo.delete()
assert changeset.errors == [permalink: {"is still associated with this entry", [constraint: :no_assoc, constraint_name: "permalinks_user_id_fkey"]}]
end
@tag :foreign_key_constraint
test "insert and update with embeds during failing child foreign key" do
changeset =
Order
|> struct(%{})
|> order_changeset(%{item: %{price: 10}, permalink: %{post_id: 0}})
{:error, changeset} = TestRepo.insert(changeset)
assert %Ecto.Changeset{} = changeset.changes.item
order =
Order
|> struct(%{})
|> order_changeset(%{})
|> TestRepo.insert!()
|> TestRepo.preload([:permalink])
changeset = order_changeset(order, %{item: %{price: 10}, permalink: %{post_id: 0}})
assert %Ecto.Changeset{} = changeset.changes.item
{:error, changeset} = TestRepo.update(changeset)
assert %Ecto.Changeset{} = changeset.changes.item
end
def order_changeset(order, params) do
order
|> Ecto.Changeset.cast(params, [:permalink_id])
|> Ecto.Changeset.cast_embed(:item, with: &item_changeset/2)
|> Ecto.Changeset.cast_assoc(:permalink, with: &permalink_changeset/2)
end
def item_changeset(item, params) do
item
|> Ecto.Changeset.cast(params, [:price])
end
def permalink_changeset(comment, params) do
comment
|> Ecto.Changeset.cast(params, [:post_id])
|> Ecto.Changeset.assoc_constraint(:post)
end
test "unsafe_validate_unique/3" do
{:ok, inserted_post} = TestRepo.insert(%Post{title: "Greetings", visits: 13})
new_post_changeset = Post.changeset(%Post{}, %{title: "Greetings", visits: 17})
changeset = Ecto.Changeset.unsafe_validate_unique(new_post_changeset, [:title], TestRepo)
assert changeset.errors[:title] ==
{"has already been taken", validation: :unsafe_unique, fields: [:title]}
changeset = Ecto.Changeset.unsafe_validate_unique(new_post_changeset, [:title, :text], TestRepo)
assert changeset.errors[:title] == nil
update_changeset = Post.changeset(inserted_post, %{visits: 17})
changeset = Ecto.Changeset.unsafe_validate_unique(update_changeset, [:title], TestRepo)
assert changeset.errors[:title] == nil # cannot conflict with itself
end
test "unsafe_validate_unique/3 with composite keys" do
{:ok, inserted_post} = TestRepo.insert(%CompositePk{a: 123, b: 456, name: "UniqueName"})
different_pk = CompositePk.changeset(%CompositePk{}, %{name: "UniqueName", a: 789, b: 321})
changeset = Ecto.Changeset.unsafe_validate_unique(different_pk, [:name], TestRepo)
assert changeset.errors[:name] ==
{"has already been taken", validation: :unsafe_unique, fields: [:name]}
partial_pk = CompositePk.changeset(%CompositePk{}, %{name: "UniqueName", a: 789, b: 456})
changeset = Ecto.Changeset.unsafe_validate_unique(partial_pk, [:name], TestRepo)
assert changeset.errors[:name] ==
{"has already been taken", validation: :unsafe_unique, fields: [:name]}
update_changeset = CompositePk.changeset(inserted_post, %{name: "NewName"})
changeset = Ecto.Changeset.unsafe_validate_unique(update_changeset, [:name], TestRepo)
assert changeset.valid?
assert changeset.errors[:name] == nil # cannot conflict with itself
end
test "get(!)" do
post1 = TestRepo.insert!(%Post{title: "1"})
post2 = TestRepo.insert!(%Post{title: "2"})
assert post1 == TestRepo.get(Post, post1.id)
assert post2 == TestRepo.get(Post, to_string post2.id) # With casting
assert post1 == TestRepo.get!(Post, post1.id)
assert post2 == TestRepo.get!(Post, to_string post2.id) # With casting
TestRepo.delete!(post1)
assert TestRepo.get(Post, post1.id) == nil
assert_raise Ecto.NoResultsError, fn ->
TestRepo.get!(Post, post1.id)
end
end
test "get(!) with custom source" do
custom = Ecto.put_meta(%Custom{}, source: "posts")
custom = TestRepo.insert!(custom)
bid = custom.bid
assert %Custom{bid: ^bid, __meta__: %{source: "posts"}} =
TestRepo.get(from(c in {"posts", Custom}), bid)
end
test "get(!) with binary_id" do
custom = TestRepo.insert!(%Custom{})
bid = custom.bid
assert %Custom{bid: ^bid} = TestRepo.get(Custom, bid)
end
test "get_by(!)" do
post1 = TestRepo.insert!(%Post{title: "1", visits: 1})
post2 = TestRepo.insert!(%Post{title: "2", visits: 2})
assert post1 == TestRepo.get_by(Post, id: post1.id)
assert post1 == TestRepo.get_by(Post, title: post1.title)
assert post1 == TestRepo.get_by(Post, id: post1.id, title: post1.title)
assert post2 == TestRepo.get_by(Post, id: to_string(post2.id)) # With casting
assert nil == TestRepo.get_by(Post, title: "hey")
assert nil == TestRepo.get_by(Post, id: post2.id, visits: 3)
assert post1 == TestRepo.get_by!(Post, id: post1.id)
assert post1 == TestRepo.get_by!(Post, title: post1.title)
assert post1 == TestRepo.get_by!(Post, id: post1.id, visits: 1)
assert post2 == TestRepo.get_by!(Post, id: to_string(post2.id)) # With casting
assert post1 == TestRepo.get_by!(Post, %{id: post1.id})
assert_raise Ecto.NoResultsError, fn ->
TestRepo.get_by!(Post, id: post2.id, title: "hey")
end
end
test "reload" do
post1 = TestRepo.insert!(%Post{title: "1", visits: 1})
post2 = TestRepo.insert!(%Post{title: "2", visits: 2})
assert post1 == TestRepo.reload(post1)
assert [post1, post2] == TestRepo.reload([post1, post2])
assert [post1, post2, nil] == TestRepo.reload([post1, post2, %Post{id: 55}])
assert nil == TestRepo.reload(%Post{id: 55})
# keeps order as received in the params
assert [post2, post1] == TestRepo.reload([post2, post1])
TestRepo.update_all(Post, inc: [visits: 1])
assert [%{visits: 2}, %{visits: 3}] = TestRepo.reload([post1, post2])
end
test "reload ignores preloads" do
post = TestRepo.insert!(%Post{title: "1", visits: 1}) |> TestRepo.preload(:comments)
assert %{comments: %Ecto.Association.NotLoaded{}} = TestRepo.reload(post)
end
test "reload!" do
post1 = TestRepo.insert!(%Post{title: "1", visits: 1})
post2 = TestRepo.insert!(%Post{title: "2", visits: 2})
assert post1 == TestRepo.reload!(post1)
assert [post1, post2] == TestRepo.reload!([post1, post2])
assert_raise RuntimeError, ~r"could not reload", fn ->
TestRepo.reload!([post1, post2, %Post{id: 55}])
end
assert_raise Ecto.NoResultsError, fn ->
TestRepo.reload!(%Post{id: 55})
end
assert [post2, post1] == TestRepo.reload([post2, post1])
TestRepo.update_all(Post, inc: [visits: 1])
assert [%{visits: 2}, %{visits: 3}] = TestRepo.reload!([post1, post2])
end
test "first, last and one(!)" do
post1 = TestRepo.insert!(%Post{title: "1"})
post2 = TestRepo.insert!(%Post{title: "2"})
assert post1 == Post |> first |> TestRepo.one
assert post2 == Post |> last |> TestRepo.one
query = from p in Post, order_by: p.title
assert post1 == query |> first |> TestRepo.one
assert post2 == query |> last |> TestRepo.one
query = from p in Post, order_by: [desc: p.title], limit: 10
assert post2 == query |> first |> TestRepo.one
assert post1 == query |> last |> TestRepo.one
query = from p in Post, where: is_nil(p.id)
refute query |> first |> TestRepo.one
refute query |> last |> TestRepo.one
assert_raise Ecto.NoResultsError, fn -> query |> first |> TestRepo.one! end
assert_raise Ecto.NoResultsError, fn -> query |> last |> TestRepo.one! end
end
test "exists?" do
TestRepo.insert!(%Post{title: "1", visits: 2})
TestRepo.insert!(%Post{title: "2", visits: 1})
query = from p in Post, where: not is_nil(p.title), limit: 2
assert query |> TestRepo.exists? == true
query = from p in Post, where: p.title == "1", select: p.title
assert query |> TestRepo.exists? == true
query = from p in Post, where: is_nil(p.id)
assert query |> TestRepo.exists? == false
query = from p in Post, where: is_nil(p.id)
assert query |> TestRepo.exists? == false
query = from(p in Post, select: {p.visits, avg(p.visits)}, group_by: p.visits, having: avg(p.visits) > 1)
assert query |> TestRepo.exists? == true
end
test "aggregate" do
assert TestRepo.aggregate(Post, :max, :visits) == nil
TestRepo.insert!(%Post{visits: 10})
TestRepo.insert!(%Post{visits: 12})
TestRepo.insert!(%Post{visits: 14})
TestRepo.insert!(%Post{visits: 14})
# Barebones
assert TestRepo.aggregate(Post, :max, :visits) == 14
assert TestRepo.aggregate(Post, :min, :visits) == 10
assert TestRepo.aggregate(Post, :count, :visits) == 4
assert "50" = to_string(TestRepo.aggregate(Post, :sum, :visits))
# With order_by
query = from Post, order_by: [asc: :visits]
assert TestRepo.aggregate(query, :max, :visits) == 14
# With order_by and limit
query = from Post, order_by: [asc: :visits], limit: 2
assert TestRepo.aggregate(query, :max, :visits) == 12
end
@tag :decimal_precision
test "aggregate avg" do
TestRepo.insert!(%Post{visits: 10})
TestRepo.insert!(%Post{visits: 12})
TestRepo.insert!(%Post{visits: 14})
TestRepo.insert!(%Post{visits: 14})
assert "12.5" <> _ = to_string(TestRepo.aggregate(Post, :avg, :visits))
end
@tag :inline_order_by
test "aggregate with distinct" do
TestRepo.insert!(%Post{visits: 10})
TestRepo.insert!(%Post{visits: 12})
TestRepo.insert!(%Post{visits: 14})
TestRepo.insert!(%Post{visits: 14})
query = from Post, order_by: [asc: :visits], distinct: true
assert TestRepo.aggregate(query, :count, :visits) == 3
end
@tag :insert_cell_wise_defaults
test "insert all" do
assert {2, nil} = TestRepo.insert_all("comments", [[text: "1"], %{text: "2", lock_version: 2}])
assert {2, nil} = TestRepo.insert_all({"comments", Comment}, [[text: "3"], %{text: "4", lock_version: 2}])
assert [%Comment{text: "1", lock_version: 1},
%Comment{text: "2", lock_version: 2},
%Comment{text: "3", lock_version: 1},
%Comment{text: "4", lock_version: 2}] = TestRepo.all(Comment)
assert {2, nil} = TestRepo.insert_all(Post, [[], []])
assert [%Post{}, %Post{}] = TestRepo.all(Post)
assert {0, nil} = TestRepo.insert_all("posts", [])
assert {0, nil} = TestRepo.insert_all({"posts", Post}, [])
end
@tag :insert_select
test "insert all with query for single fields" do
comment = TestRepo.insert!(%Comment{text: "1", lock_version: 1})
text_query = from(c in Comment, select: c.text, where: [id: ^comment.id, lock_version: 1])
lock_version_query = from(c in Comment, select: c.lock_version, where: [id: ^comment.id])
rows = [
[text: "2", lock_version: lock_version_query],
[lock_version: lock_version_query, text: "3"],
[text: text_query],
[text: text_query, lock_version: lock_version_query],
[lock_version: 6, text: "6"]
]
assert {5, nil} = TestRepo.insert_all(Comment, rows, [])
inserted_rows = Comment
|> where([c], c.id != ^comment.id)
|> TestRepo.all()
assert [%Comment{text: "2", lock_version: 1},
%Comment{text: "3", lock_version: 1},
%Comment{text: "1"},
%Comment{text: "1", lock_version: 1},
%Comment{text: "6", lock_version: 6}] = inserted_rows
end
describe "insert_all with source query" do
@tag :upsert
@tag :with_conflict_target
test "insert_all with query and conflict target" do
{:ok, %Post{id: id}} = TestRepo.insert(%Post{
title: "A generic title"
})
source = from p in Post,
select: %{
title: fragment("concat(?, ?, ?)", p.title, type(^" suffix ", :string), p.id)
}
assert {1, _} = TestRepo.insert_all(Post, source, conflict_target: [:id], on_conflict: :replace_all)
expected_id = id + 1
expected_title = "A generic title suffix #{id}"
assert %Post{title: ^expected_title} = TestRepo.get(Post, expected_id)
end
@tag :returning
test "insert_all with query and returning" do
{:ok, %Post{id: id}} = TestRepo.insert(%Post{
title: "A generic title"
})
source = from p in Post,
select: %{
title: fragment("concat(?, ?, ?)", p.title, type(^" suffix ", :string), p.id)
}
assert {1, returns} = TestRepo.insert_all(Post, source, returning: [:id, :title])
expected_id = id + 1
expected_title = "A generic title suffix #{id}"
assert [%Post{id: ^expected_id, title: ^expected_title}] = returns
end
@tag :upsert
@tag :without_conflict_target
test "insert_all with query and on_conflict" do
{:ok, %Post{id: id}} = TestRepo.insert(%Post{
title: "A generic title"
})
source = from p in Post,
select: %{
title: fragment("concat(?, ?, ?)", p.title, type(^" suffix ", :string), p.id)
}
assert {1, _} = TestRepo.insert_all(Post, source, on_conflict: :replace_all)
expected_id = id + 1
expected_title = "A generic title suffix #{id}"
assert %Post{title: ^expected_title} = TestRepo.get(Post, expected_id)
end
test "insert_all with query" do
{:ok, %Post{id: id}} = TestRepo.insert(%Post{
title: "A generic title"
})
source = from p in Post,
select: %{
title: fragment("concat(?, ?, ?)", p.title, type(^" suffix ", :string), p.id)
}
assert {1, _} = TestRepo.insert_all(Post, source)
expected_id = id + 1
expected_title = "A generic title suffix #{id}"
assert %Post{title: ^expected_title} = TestRepo.get(Post, expected_id)
end
end
@tag :invalid_prefix
@tag :insert_cell_wise_defaults
test "insert all with invalid prefix" do
assert catch_error(TestRepo.insert_all(Post, [[], []], prefix: "oops"))
end
@tag :returning
@tag :insert_cell_wise_defaults
test "insert all with returning with schema" do
assert {0, []} = TestRepo.insert_all(Comment, [], returning: true)
assert {0, nil} = TestRepo.insert_all(Comment, [], returning: false)
{2, [c1, c2]} = TestRepo.insert_all(Comment, [[text: "1"], [text: "2"]], returning: [:id, :text])
assert %Comment{text: "1", __meta__: %{state: :loaded}} = c1
assert %Comment{text: "2", __meta__: %{state: :loaded}} = c2
{2, [c1, c2]} = TestRepo.insert_all(Comment, [[text: "3"], [text: "4"]], returning: true)
assert %Comment{text: "3", __meta__: %{state: :loaded}} = c1
assert %Comment{text: "4", __meta__: %{state: :loaded}} = c2
end
@tag :returning
@tag :insert_cell_wise_defaults
test "insert all with returning with schema with field source" do
assert {0, []} = TestRepo.insert_all(Permalink, [], returning: true)
assert {0, nil} = TestRepo.insert_all(Permalink, [], returning: false)
{2, [c1, c2]} = TestRepo.insert_all(Permalink, [[url: "1"], [url: "2"]], returning: [:id, :url])
assert %Permalink{url: "1", __meta__: %{state: :loaded}} = c1
assert %Permalink{url: "2", __meta__: %{state: :loaded}} = c2
{2, [c1, c2]} = TestRepo.insert_all(Permalink, [[url: "3"], [url: "4"]], returning: true)
assert %Permalink{url: "3", __meta__: %{state: :loaded}} = c1
assert %Permalink{url: "4", __meta__: %{state: :loaded}} = c2
end
@tag :returning
@tag :insert_cell_wise_defaults
test "insert all with returning without schema" do
{2, [c1, c2]} = TestRepo.insert_all("comments", [[text: "1"], [text: "2"]], returning: [:id, :text])
assert %{id: _, text: "1"} = c1
assert %{id: _, text: "2"} = c2
assert_raise ArgumentError, fn ->
TestRepo.insert_all("comments", [[text: "1"], [text: "2"]], returning: true)
end
end
@tag :insert_cell_wise_defaults
test "insert all with dumping" do
uuid = Ecto.UUID.generate()
assert {1, nil} = TestRepo.insert_all(Post, [%{uuid: uuid}])
assert [%Post{uuid: ^uuid, title: nil}] = TestRepo.all(Post)
end
@tag :insert_cell_wise_defaults
test "insert all autogenerates for binary_id type" do
custom = TestRepo.insert!(%Custom{bid: nil})
assert custom.bid
assert TestRepo.get(Custom, custom.bid)
assert TestRepo.delete!(custom)
refute TestRepo.get(Custom, custom.bid)
uuid = Ecto.UUID.generate()
assert {2, nil} = TestRepo.insert_all(Custom, [%{uuid: uuid}, %{bid: custom.bid}])
assert [%Custom{bid: bid2, uuid: nil},
%Custom{bid: bid1, uuid: ^uuid}] = Enum.sort_by(TestRepo.all(Custom), & &1.uuid)
assert bid1 && bid2
assert custom.bid != bid1
assert custom.bid == bid2
end
describe "placeholders" do
@describetag :placeholders
test "Repo.insert_all fills in placeholders" do
placeholders = %{foo: 100, bar: "test"}
bar_ph = {:placeholder, :bar}
foo_ph = {:placeholder, :foo}
entries = [
%{intensity: 1.0, title: bar_ph, posted: ~D[2020-12-21], visits: foo_ph},
%{intensity: 2.0, title: bar_ph, posted: ~D[2000-12-21], visits: foo_ph}
] |> Enum.map(&Map.put(&1, :uuid, Ecto.UUID.generate))
TestRepo.insert_all(Post, entries, placeholders: placeholders)
query = from(p in Post, select: {p.intensity, p.title, p.visits})
assert [{1.0, "test", 100}, {2.0, "test", 100}] == TestRepo.all(query)
end
test "Repo.insert_all accepts non-atom placeholder keys" do
placeholders = %{10 => "integer key", {:foo, :bar} => "tuple key"}
entries = [%{text: {:placeholder, 10}}, %{text: {:placeholder, {:foo, :bar}}}]
TestRepo.insert_all(Comment, entries, placeholders: placeholders)
query = from(c in Comment, select: c.text)
assert ["integer key", "tuple key"] == TestRepo.all(query)
end
test "Repo.insert_all fills in placeholders with keyword list entries" do
TestRepo.insert_all(Barebone, [[num: {:placeholder, :foo}]], placeholders: %{foo: 100})
query = from(b in Barebone, select: b.num)
assert [100] == TestRepo.all(query)
end
end
test "update all" do
assert post1 = TestRepo.insert!(%Post{title: "1"})
assert post2 = TestRepo.insert!(%Post{title: "2"})
assert post3 = TestRepo.insert!(%Post{title: "3"})
assert {3, nil} = TestRepo.update_all(Post, set: [title: "x"])
assert %Post{title: "x"} = TestRepo.reload(post1)
assert %Post{title: "x"} = TestRepo.reload(post2)
assert %Post{title: "x"} = TestRepo.reload(post3)
assert {3, nil} = TestRepo.update_all("posts", [set: [title: nil]])
assert %Post{title: nil} = TestRepo.reload(post1)
assert %Post{title: nil} = TestRepo.reload(post2)
assert %Post{title: nil} = TestRepo.reload(post3)
end
@tag :invalid_prefix
test "update all with invalid prefix" do
assert catch_error(TestRepo.update_all(Post, [set: [title: "x"]], prefix: "oops"))
end
@tag :returning
test "update all with returning with schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
assert {3, posts} = TestRepo.update_all(select(Post, [p], p), [set: [title: "x"]])
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert %Post{id: ^id1, title: "x"} = p1
assert %Post{id: ^id2, title: "x"} = p2
assert %Post{id: ^id3, title: "x"} = p3
assert {3, posts} = TestRepo.update_all(select(Post, [:id, :visits]), [set: [visits: 11]])
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert %Post{id: ^id1, title: nil, visits: 11} = p1
assert %Post{id: ^id2, title: nil, visits: 11} = p2
assert %Post{id: ^id3, title: nil, visits: 11} = p3
end
@tag :returning
test "update all with returning without schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
assert {3, posts} = TestRepo.update_all(select("posts", [:id, :title]), [set: [title: "x"]])
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert p1 == %{id: id1, title: "x"}
assert p2 == %{id: id2, title: "x"}
assert p3 == %{id: id3, title: "x"}
end
test "update all with filter" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
query = from(p in Post, where: p.title == "1" or p.title == "2",
update: [set: [visits: ^17]])
assert {2, nil} = TestRepo.update_all(query, set: [title: "x"])
assert %Post{title: "x", visits: 17} = TestRepo.get(Post, id1)
assert %Post{title: "x", visits: 17} = TestRepo.get(Post, id2)
assert %Post{title: "3", visits: nil} = TestRepo.get(Post, id3)
end
test "update all no entries" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
query = from(p in Post, where: p.title == "4")
assert {0, nil} = TestRepo.update_all(query, set: [title: "x"])
assert %Post{title: "1"} = TestRepo.get(Post, id1)
assert %Post{title: "2"} = TestRepo.get(Post, id2)
assert %Post{title: "3"} = TestRepo.get(Post, id3)
end
test "update all increment syntax" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", visits: 0})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", visits: 1})
# Positive
query = from p in Post, where: not is_nil(p.id), update: [inc: [visits: 2]]
assert {2, nil} = TestRepo.update_all(query, [])
assert %Post{visits: 2} = TestRepo.get(Post, id1)
assert %Post{visits: 3} = TestRepo.get(Post, id2)
# Negative
query = from p in Post, where: not is_nil(p.id), update: [inc: [visits: -1]]
assert {2, nil} = TestRepo.update_all(query, [])
assert %Post{visits: 1} = TestRepo.get(Post, id1)
assert %Post{visits: 2} = TestRepo.get(Post, id2)
end
@tag :id_type
test "update all with casting and dumping on id type field" do
assert %Post{id: id1} = TestRepo.insert!(%Post{})
assert {1, nil} = TestRepo.update_all(Post, set: [counter: to_string(id1)])
assert %Post{counter: ^id1} = TestRepo.get(Post, id1)
end
test "update all with casting and dumping" do
visits = 13
datetime = ~N[2014-01-16 20:26:51]
assert %Post{id: id} = TestRepo.insert!(%Post{})
assert {1, nil} = TestRepo.update_all(Post, set: [visits: visits, inserted_at: datetime])
assert %Post{visits: 13, inserted_at: ^datetime} = TestRepo.get(Post, id)
end
test "delete all" do
assert %Post{} = TestRepo.insert!(%Post{title: "1"})
assert %Post{} = TestRepo.insert!(%Post{title: "2"})
assert %Post{} = TestRepo.insert!(%Post{title: "3"})
assert {3, nil} = TestRepo.delete_all(Post)
assert [] = TestRepo.all(Post)
end
@tag :invalid_prefix
test "delete all with invalid prefix" do
assert catch_error(TestRepo.delete_all(Post, prefix: "oops"))
end
@tag :returning
test "delete all with returning with schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
assert {3, posts} = TestRepo.delete_all(select(Post, [p], p))
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert %Post{id: ^id1, title: "1"} = p1
assert %Post{id: ^id2, title: "2"} = p2
assert %Post{id: ^id3, title: "3"} = p3
end
@tag :returning
test "delete all with returning without schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
assert {3, posts} = TestRepo.delete_all(select("posts", [:id, :title]))
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert p1 == %{id: id1, title: "1"}
assert p2 == %{id: id2, title: "2"}
assert p3 == %{id: id3, title: "3"}
end
test "delete all with filter" do
assert %Post{} = TestRepo.insert!(%Post{title: "1"})
assert %Post{} = TestRepo.insert!(%Post{title: "2"})
assert %Post{} = TestRepo.insert!(%Post{title: "3"})
query = from(p in Post, where: p.title == "1" or p.title == "2")
assert {2, nil} = TestRepo.delete_all(query)
assert [%Post{}] = TestRepo.all(Post)
end
test "delete all no entries" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
query = from(p in Post, where: p.title == "4")
assert {0, nil} = TestRepo.delete_all(query)
assert %Post{title: "1"} = TestRepo.get(Post, id1)
assert %Post{title: "2"} = TestRepo.get(Post, id2)
assert %Post{title: "3"} = TestRepo.get(Post, id3)
end
test "virtual field" do
assert %Post{id: id} = TestRepo.insert!(%Post{title: "1"})
assert TestRepo.get(Post, id).temp == "temp"
end
## Query syntax
defmodule Foo do
defstruct [:title]
end
describe "query select" do
test "expressions" do
%Post{} = TestRepo.insert!(%Post{title: "1", visits: 13})
assert [{"1", 13}] ==
TestRepo.all(from p in Post, select: {p.title, p.visits})
assert [["1", 13]] ==
TestRepo.all(from p in Post, select: [p.title, p.visits])
assert [%{:title => "1", 3 => 13, "visits" => 13}] ==
TestRepo.all(from p in Post, select: %{
:title => p.title,
"visits" => p.visits,
3 => p.visits
})
assert [%{:title => "1", "1" => 13, "visits" => 13}] ==
TestRepo.all(from p in Post, select: %{
:title => p.title,
p.title => p.visits,
"visits" => p.visits
})
assert [%Foo{title: "1"}] ==
TestRepo.all(from p in Post, select: %Foo{title: p.title})
end
test "map update" do
%Post{} = TestRepo.insert!(%Post{title: "1", visits: 13})
assert [%Post{:title => "new title", visits: 13}] =
TestRepo.all(from p in Post, select: %{p | title: "new title"})
assert [%Post{title: "new title", visits: 13}] =
TestRepo.all(from p in Post, select: %Post{p | title: "new title"})
assert_raise KeyError, fn ->
TestRepo.all(from p in Post, select: %{p | unknown: "new title"})
end
assert_raise BadMapError, fn ->
TestRepo.all(from p in Post, select: %{p.title | title: "new title"})
end
assert_raise BadStructError, fn ->
TestRepo.all(from p in Post, select: %Foo{p | title: p.title})
end
end
test "take with structs" do
%{id: pid1} = TestRepo.insert!(%Post{title: "1"})
%{id: pid2} = TestRepo.insert!(%Post{title: "2"})
%{id: pid3} = TestRepo.insert!(%Post{title: "3"})
[p1, p2, p3] = Post |> select([p], struct(p, [:title])) |> order_by([:title]) |> TestRepo.all
refute p1.id
assert p1.title == "1"
assert match?(%Post{}, p1)
refute p2.id
assert p2.title == "2"
assert match?(%Post{}, p2)
refute p3.id
assert p3.title == "3"
assert match?(%Post{}, p3)
[p1, p2, p3] = Post |> select([:id]) |> order_by([:id]) |> TestRepo.all
assert %Post{id: ^pid1} = p1
assert %Post{id: ^pid2} = p2
assert %Post{id: ^pid3} = p3
end
test "take with maps" do
%{id: pid1} = TestRepo.insert!(%Post{title: "1"})
%{id: pid2} = TestRepo.insert!(%Post{title: "2"})
%{id: pid3} = TestRepo.insert!(%Post{title: "3"})
[p1, p2, p3] = "posts" |> select([p], map(p, [:title])) |> order_by([:title]) |> TestRepo.all
assert p1 == %{title: "1"}
assert p2 == %{title: "2"}
assert p3 == %{title: "3"}
[p1, p2, p3] = "posts" |> select([:id]) |> order_by([:id]) |> TestRepo.all
assert p1 == %{id: pid1}
assert p2 == %{id: pid2}
assert p3 == %{id: pid3}
end
test "take with preload assocs" do
%{id: pid} = TestRepo.insert!(%Post{title: "post"})
TestRepo.insert!(%Comment{post_id: pid, text: "comment"})
fields = [:id, :title, comments: [:text, :post_id]]
[p] = Post |> preload(:comments) |> select([p], ^fields) |> TestRepo.all
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
[p] = Post |> preload(:comments) |> select([p], struct(p, ^fields)) |> TestRepo.all
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
[p] = Post |> preload(:comments) |> select([p], map(p, ^fields)) |> TestRepo.all
assert p == %{id: pid, title: "post", comments: [%{text: "comment", post_id: pid}]}
end
test "take with nil preload assoc" do
%{id: cid} = TestRepo.insert!(%Comment{text: "comment"})
fields = [:id, :text, post: [:title]]
[c] = Comment |> preload(:post) |> select([c], ^fields) |> TestRepo.all
assert %Comment{id: ^cid, text: "comment", post: nil} = c
[c] = Comment |> preload(:post) |> select([c], struct(c, ^fields)) |> TestRepo.all
assert %Comment{id: ^cid, text: "comment", post: nil} = c
[c] = Comment |> preload(:post) |> select([c], map(c, ^fields)) |> TestRepo.all
assert c == %{id: cid, text: "comment", post: nil}
end
test "take with join assocs" do
%{id: pid} = TestRepo.insert!(%Post{title: "post"})
%{id: cid} = TestRepo.insert!(%Comment{post_id: pid, text: "comment"})
fields = [:id, :title, comments: [:text, :post_id, :id]]
query = from p in Post, where: p.id == ^pid, join: c in assoc(p, :comments), preload: [comments: c]
p = TestRepo.one(from q in query, select: ^fields)
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
p = TestRepo.one(from q in query, select: struct(q, ^fields))
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
p = TestRepo.one(from q in query, select: map(q, ^fields))
assert p == %{id: pid, title: "post", comments: [%{text: "comment", post_id: pid, id: cid}]}
end
test "take with single nil column" do
%Post{} = TestRepo.insert!(%Post{title: "1", counter: nil})
assert %{counter: nil} =
TestRepo.one(from p in Post, where: p.title == "1", select: [:counter])
end
test "take with join assocs and single nil column" do
%{id: post_id} = TestRepo.insert!(%Post{title: "1"}, counter: nil)
TestRepo.insert!(%Comment{post_id: post_id, text: "comment"})
assert %{counter: nil} ==
TestRepo.one(from p in Post, join: c in assoc(p, :comments), where: p.title == "1", select: map(p, [:counter]))
end
test "field source" do
TestRepo.insert!(%Permalink{url: "url"})
assert ["url"] = Permalink |> select([p], p.url) |> TestRepo.all()
assert [1] = Permalink |> select([p], count(p.url)) |> TestRepo.all()
end
test "merge" do
date = Date.utc_today()
%Post{id: post_id} = TestRepo.insert!(%Post{title: "1", counter: nil, posted: date, public: false})
# Merge on source
assert [%Post{title: "2"}] =
Post |> select([p], merge(p, %{title: "2"})) |> TestRepo.all()
assert [%Post{title: "2"}] =
Post |> select([p], p) |> select_merge([p], %{title: "2"}) |> TestRepo.all()
# Merge on struct
assert [%Post{title: "2"}] =
Post |> select([p], merge(%Post{title: p.title}, %{title: "2"})) |> TestRepo.all()
assert [%Post{title: "2"}] =
Post |> select([p], %Post{title: p.title}) |> select_merge([p], %{title: "2"}) |> TestRepo.all()
# Merge on map
assert [%{title: "2"}] =
Post |> select([p], merge(%{title: p.title}, %{title: "2"})) |> TestRepo.all()
assert [%{title: "2"}] =
Post |> select([p], %{title: p.title}) |> select_merge([p], %{title: "2"}) |> TestRepo.all()
# Merge on outer join with map
%Permalink{} = TestRepo.insert!(%Permalink{post_id: post_id, url: "Q", title: "Z"})
# left join record is present
assert [%{url: "Q", title: "1", posted: _date}] =
Permalink
|> join(:left, [l], p in Post, on: l.post_id == p.id)
|> select([l, p], merge(l, map(p, ^~w(title posted)a)))
|> TestRepo.all()
assert [%{url: "Q", title: "1", posted: _date}] =
Permalink
|> join(:left, [l], p in Post, on: l.post_id == p.id)
|> select_merge([_l, p], map(p, ^~w(title posted)a))
|> TestRepo.all()
# left join record is not present
assert [%{url: "Q", title: "Z", posted: nil}] =
Permalink
|> join(:left, [l], p in Post, on: l.post_id == p.id and p.public == true)
|> select([l, p], merge(l, map(p, ^~w(title posted)a)))
|> TestRepo.all()
assert [%{url: "Q", title: "Z", posted: nil}] =
Permalink
|> join(:left, [l], p in Post, on: l.post_id == p.id and p.public == true)
|> select_merge([_l, p], map(p, ^~w(title posted)a))
|> TestRepo.all()
end
test "merge with update on self" do
%Post{} = TestRepo.insert!(%Post{title: "1", counter: 1})
assert [%Post{title: "1", counter: 2}] =
Post |> select([p], merge(p, %{p | counter: 2})) |> TestRepo.all()
assert [%Post{title: "1", counter: 2}] =
Post |> select([p], p) |> select_merge([p], %{p | counter: 2}) |> TestRepo.all()
end
test "merge within subquery" do
%Post{} = TestRepo.insert!(%Post{title: "1", counter: 1})
subquery =
Post
|> select_merge([p], %{p | counter: 2})
|> subquery()
assert [%Post{title: "1", counter: 2}] = TestRepo.all(subquery)
end
end
test "query count distinct" do
TestRepo.insert!(%Post{title: "1"})
TestRepo.insert!(%Post{title: "1"})
TestRepo.insert!(%Post{title: "2"})
assert [3] == Post |> select([p], count(p.title)) |> TestRepo.all
assert [2] == Post |> select([p], count(p.title, :distinct)) |> TestRepo.all
end
test "query where interpolation" do
post1 = TestRepo.insert!(%Post{title: "hello"})
post2 = TestRepo.insert!(%Post{title: "goodbye"})
assert [post1, post2] == Post |> where([], []) |> TestRepo.all |> Enum.sort_by(& &1.id)
assert [post1] == Post |> where([], [title: "hello"]) |> TestRepo.all
assert [post1] == Post |> where([], [title: "hello", id: ^post1.id]) |> TestRepo.all
params0 = []
params1 = [title: "hello"]
params2 = [title: "hello", id: post1.id]
assert [post1, post2] == (from Post, where: ^params0) |> TestRepo.all |> Enum.sort_by(& &1.id)
assert [post1] == (from Post, where: ^params1) |> TestRepo.all
assert [post1] == (from Post, where: ^params2) |> TestRepo.all
post3 = TestRepo.insert!(%Post{title: "goodbye", uuid: nil})
params3 = [title: "goodbye", uuid: post3.uuid]
assert [post3] == (from Post, where: ^params3) |> TestRepo.all
end
describe "upsert via insert" do
@describetag :upsert
test "on conflict raise" do
{:ok, inserted} = TestRepo.insert(%Post{title: "first"}, on_conflict: :raise)
assert catch_error(TestRepo.insert(%Post{id: inserted.id, title: "second"}, on_conflict: :raise))
end
test "on conflict ignore" do
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing)
assert inserted.id
assert inserted.__meta__.state == :loaded
{:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing)
assert not_inserted.id == nil
assert not_inserted.__meta__.state == :loaded
end
@tag :with_conflict_target
test "on conflict and associations" do
on_conflict = [set: [title: "second"]]
post = %Post{uuid: Ecto.UUID.generate(),
title: "first", comments: [%Comment{}]}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert inserted.id
end
@tag :with_conflict_target
test "on conflict with inc" do
uuid = "6fa459ea-ee8a-3ca4-894e-db77e160355e"
post = %Post{title: "first", uuid: uuid}
{:ok, _} = TestRepo.insert(post)
post = %{title: "upsert", uuid: uuid}
TestRepo.insert_all(Post, [post], on_conflict: [inc: [visits: 1]], conflict_target: :uuid)
end
@tag :with_conflict_target
test "on conflict ignore and conflict target" do
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid])
assert inserted.id
# Error on non-conflict target
assert catch_error(TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:id]))
# Error on conflict target
{:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid])
assert not_inserted.id == nil
end
@tag :without_conflict_target
test "on conflict keyword list" do
on_conflict = [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict)
assert inserted.id
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict)
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :with_conflict_target
test "on conflict keyword list and conflict target" do
on_conflict = [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert inserted.id
# Error on non-conflict target
assert catch_error(TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:id]))
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :returning
@tag :with_conflict_target
test "on conflict keyword list and conflict target and returning" do
{:ok, c1} = TestRepo.insert(%Post{})
{:ok, c2} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: [:id, :uuid])
{:ok, c3} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: true)
{:ok, c4} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: false)
assert c2.uuid == c1.uuid
assert c3.uuid == c1.uuid
assert c4.uuid != c1.uuid
end
@tag :returning
@tag :with_conflict_target
test "on conflict keyword list and conflict target and returning and field source" do
TestRepo.insert!(%Permalink{url: "old"})
{:ok, c1} = TestRepo.insert(%Permalink{url: "old"},
on_conflict: [set: [url: "new1"]],
conflict_target: [:url],
returning: [:url])
TestRepo.insert!(%Permalink{url: "old"})
{:ok, c2} = TestRepo.insert(%Permalink{url: "old"},
on_conflict: [set: [url: "new2"]],
conflict_target: [:url],
returning: true)
assert c1.url == "new1"
assert c2.url == "new2"
end
@tag :returning
@tag :with_conflict_target
test "on conflict ignore and returning" do
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid])
assert inserted.id
{:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid], returning: true)
assert not_inserted.id == nil
end
@tag :without_conflict_target
test "on conflict query" do
on_conflict = from Post, update: [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict)
assert inserted.id
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict)
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :with_conflict_target
test "on conflict query and conflict target" do
on_conflict = from Post, update: [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert inserted.id
# Error on non-conflict target
assert catch_error(TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:id]))
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :with_conflict_target
test "on conflict query having condition" do
post = %Post{title: "first", counter: 1, uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post)
on_conflict = from Post, where: [counter: 2], update: [set: [title: "second"]]
insert_options = [
on_conflict: on_conflict,
conflict_target: [:uuid],
stale_error_field: :counter
]
assert {:error, changeset} = TestRepo.insert(post, insert_options)
assert changeset.errors == [counter: {"is stale", [stale: true]}]
assert TestRepo.get!(Post, inserted.id).title == "first"
end
@tag :without_conflict_target
test "on conflict replace_all" do
post = %Post{title: "first", visits: 13, uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :replace_all)
assert inserted.id
post = %Post{title: "updated", visits: 17, uuid: post.uuid}
post = TestRepo.insert!(post, on_conflict: :replace_all)
assert post.id != inserted.id
assert post.title == "updated"
assert post.visits == 17
assert TestRepo.all(from p in Post, select: {p.id, p.title, p.visits}) ==
[{post.id, "updated", 17}]
assert TestRepo.all(from p in Post, select: count(p.id)) == [1]
end
@tag :with_conflict_target
test "on conflict replace_all and conflict target" do
post = %Post{title: "first", visits: 13, uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :replace_all, conflict_target: :uuid)
assert inserted.id
post = %Post{title: "updated", visits: 17, uuid: post.uuid}
post = TestRepo.insert!(post, on_conflict: :replace_all, conflict_target: :uuid)
assert post.id != inserted.id
assert post.title == "updated"
assert post.visits == 17
assert TestRepo.all(from p in Post, select: {p.id, p.title, p.visits}) ==
[{post.id, "updated", 17}]
assert TestRepo.all(from p in Post, select: count(p.id)) == [1]
end
end
describe "upsert via insert_all" do
@describetag :upsert_all
test "on conflict raise" do
post = [title: "first", uuid: Ecto.UUID.generate()]
{1, nil} = TestRepo.insert_all(Post, [post], on_conflict: :raise)
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: :raise))
end
test "on conflict ignore" do
post = [title: "first", uuid: Ecto.UUID.generate()]
assert TestRepo.insert_all(Post, [post], on_conflict: :nothing) == {1, nil}
# PG returns 0, MySQL returns 1
{entries, nil} = TestRepo.insert_all(Post, [post], on_conflict: :nothing)
assert entries == 0 or entries == 1
assert length(TestRepo.all(Post)) == 1
end
@tag :with_conflict_target
test "on conflict ignore and conflict target" do
post = [title: "first", uuid: Ecto.UUID.generate()]
assert TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:uuid]) ==
{1, nil}
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:uuid]) ==
{0, nil}
end
@tag :with_conflict_target
test "on conflict keyword list and conflict target" do
on_conflict = [set: [title: "second"]]
post = [title: "first", uuid: Ecto.UUID.generate()]
{1, nil} = TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid])
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
assert TestRepo.all(from p in Post, select: p.title) == ["second"]
end
@tag :with_conflict_target
@tag :returning
test "on conflict keyword list and conflict target and returning and source field" do
on_conflict = [set: [url: "new"]]
permalink = [url: "old"]
assert {1, [%Permalink{url: "old"}]} =
TestRepo.insert_all(Permalink, [permalink],
on_conflict: on_conflict, conflict_target: [:url], returning: [:url])
assert {1, [%Permalink{url: "new"}]} =
TestRepo.insert_all(Permalink, [permalink],
on_conflict: on_conflict, conflict_target: [:url], returning: [:url])
end
@tag :with_conflict_target
test "on conflict query and conflict target" do
on_conflict = from Post, update: [set: [title: "second"]]
post = [title: "first", uuid: Ecto.UUID.generate()]
assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
assert TestRepo.all(from p in Post, select: p.title) == ["second"]
end
@tag :returning
@tag :with_conflict_target
test "on conflict query and conflict target and returning" do
on_conflict = from Post, update: [set: [title: "second"]]
post = [title: "first", uuid: Ecto.UUID.generate()]
{1, [%{id: id}]} = TestRepo.insert_all(Post, [post], on_conflict: on_conflict,
conflict_target: [:uuid], returning: [:id])
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict,
conflict_target: [:id], returning: [:id]))
# Error on conflict target
{1, [%Post{id: ^id, title: "second"}]} =
TestRepo.insert_all(Post, [post], on_conflict: on_conflict,
conflict_target: [:uuid], returning: [:id, :title])
end
@tag :with_conflict_target
test "source (without an Ecto schema) on conflict query and conflict target" do
on_conflict = [set: [title: "second"]]
{:ok, uuid} = Ecto.UUID.dump(Ecto.UUID.generate())
post = [title: "first", uuid: uuid]
assert TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
# Error on non-conflict target
assert catch_error(TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
assert TestRepo.all(from p in Post, select: p.title) == ["second"]
end
@tag :without_conflict_target
test "on conflict replace_all" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate()}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate()}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are also replaced
changes = [%{id: post_first.id + 2, title: "first_updated",
visits: 1, uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
visits: 2, uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: :replace_all)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id + 2)
assert updated_first.title == "first_updated"
assert updated_first.visits == 1
updated_second = TestRepo.get(Post, post_second.id + 2)
assert updated_second.title == "second_updated"
assert updated_second.visits == 2
end
@tag :with_conflict_target
test "on conflict replace_all and conflict_target" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate()}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate()}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all, conflict_target: :uuid)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all, conflict_target: :uuid)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are also replaced
changes = [%{id: post_second.id + 1, title: "first_updated",
visits: 1, uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
visits: 2, uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: :replace_all, conflict_target: :uuid)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_second.id + 1)
assert updated_first.title == "first_updated"
assert updated_first.visits == 1
updated_second = TestRepo.get(Post, post_second.id + 2)
assert updated_second.title == "second_updated"
assert updated_second.visits == 2
end
@tag :without_conflict_target
test "on conflict replace_all_except" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate()}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate()}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: {:replace_all_except, [:id]})
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: {:replace_all_except, [:id]})
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are not replaced
changes = [%{id: post_first.id + 2, title: "first_updated",
visits: 1, uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
visits: 2, uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: {:replace_all_except, [:id]})
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id)
assert updated_first.title == "first_updated"
assert updated_first.visits == 1
updated_second = TestRepo.get(Post, post_second.id)
assert updated_second.title == "second_updated"
assert updated_second.visits == 2
end
@tag :with_conflict_target
test "on conflict replace_all_except and conflict_target" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate()}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate()}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: {:replace_all_except, [:id]}, conflict_target: :uuid)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: {:replace_all_except, [:id]}, conflict_target: :uuid)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are not replaced
changes = [%{id: post_first.id + 2, title: "first_updated",
visits: 1, uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
visits: 2, uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: {:replace_all_except, [:id]}, conflict_target: :uuid)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id)
assert updated_first.title == "first_updated"
assert updated_first.visits == 1
updated_second = TestRepo.get(Post, post_second.id)
assert updated_second.title == "second_updated"
assert updated_second.visits == 2
end
@tag :with_conflict_target
test "on conflict replace and conflict_target" do
post_first = %Post{title: "first", visits: 10, public: true, uuid: Ecto.UUID.generate()}
post_second = %Post{title: "second", visits: 20, public: false, uuid: Ecto.UUID.generate()}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: {:replace, [:title, :visits]}, conflict_target: :uuid)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: {:replace, [:title, :visits]}, conflict_target: :uuid)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note `public` field is not changed
changes = [%{id: post_first.id, title: "first_updated", visits: 11, public: false, uuid: post_first.uuid},
%{id: post_second.id, title: "second_updated", visits: 21, public: true, uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: {:replace, [:title, :visits]}, conflict_target: :uuid)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id)
assert updated_first.title == "first_updated"
assert updated_first.visits == 11
assert updated_first.public == true
updated_second = TestRepo.get(Post, post_second.id)
assert updated_second.title == "second_updated"
assert updated_second.visits == 21
assert updated_second.public == false
end
end
end
| 37.814035
| 152
| 0.628176
|
93df0ebe9243d3c9cf1f5c768beea9e947f2fbaa
| 1,724
|
exs
|
Elixir
|
rel/config.exs
|
stevegrossi/ex_venture
|
e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa
|
[
"MIT"
] | 1
|
2019-02-10T10:22:39.000Z
|
2019-02-10T10:22:39.000Z
|
rel/config.exs
|
stevegrossi/ex_venture
|
e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa
|
[
"MIT"
] | null | null | null |
rel/config.exs
|
stevegrossi/ex_venture
|
e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa
|
[
"MIT"
] | null | null | null |
# Import all plugins from `rel/plugins`
# They can then be used by adding `plugin MyPlugin` to
# either an environment, or release definition, where
# `MyPlugin` is the name of the plugin module.
Path.join(["rel", "plugins", "*.exs"])
|> Path.wildcard()
|> Enum.map(&Code.eval_file(&1))
use Mix.Releases.Config,
# This sets the default release built by `mix release`
default_release: :default,
# This sets the default environment used by `mix release`
default_environment: Mix.env()
# For a full list of config options for both releases
# and environments, visit https://hexdocs.pm/distillery/configuration.html
# You may define one or more environments in this file,
# an environment's settings will override those of a release
# when building in that environment, this combination of release
# and environment configuration is called a profile
environment :dev do
set dev_mode: true
set include_erts: false
set cookie: :"(*.VMz%3=&MrBo$r]M`&s7a~ok{eecL=/U?r_|0!Rbq5mLy[wCzsDV^j>5m%W_,S"
end
environment :prod do
set include_erts: true
set include_src: false
set cookie: :"<U}DQ)|8,I)&O_eEHQ$i8TTI]FSbKc(OWHGRhhj5uv/f_mUR_!Qx%KzTK?s|YN}4"
set config_providers: [
{Mix.Releases.Config.Providers.Elixir, ["/etc/exventure.config.exs"]}
]
end
# You may define one or more releases in this file.
# If you have not set a default release, or selected one
# when running `mix release`, the first release in the file
# will be used by default
release :ex_venture do
set version: current_version(:ex_venture)
set applications: [
:parse_trans,
:runtime_tools,
:unsafe
]
set commands: [
migrate: "rel/commands/migrate.sh",
seed: "rel/commands/seed.sh",
]
end
| 29.724138
| 81
| 0.722738
|
93df51ab5b84c661394d8ad589e57f941b682aa6
| 324
|
ex
|
Elixir
|
lib/clients/gravity.ex
|
artsy/aprb
|
9e93200462a76823c831b92f02bddcf1b326a451
|
[
"MIT"
] | 11
|
2016-08-18T23:18:57.000Z
|
2019-05-03T17:46:55.000Z
|
lib/clients/gravity.ex
|
artsy/aprb
|
9e93200462a76823c831b92f02bddcf1b326a451
|
[
"MIT"
] | 105
|
2016-08-17T23:36:07.000Z
|
2019-09-26T18:14:24.000Z
|
lib/clients/gravity.ex
|
artsy/aprb
|
9e93200462a76823c831b92f02bddcf1b326a451
|
[
"MIT"
] | 14
|
2016-08-17T17:23:45.000Z
|
2019-09-16T16:14:59.000Z
|
defmodule Gravity do
use HTTPoison.Base
def process_url(url) do
Application.get_env(:aprb, :gravity_api_url) <> url
end
def process_request_headers(_headers), do: Enum.into(%{"X-XAPP-TOKEN" => Application.get_env(:aprb, :gravity_api_token)}, [])
def process_response_body(body), do: Poison.decode!(body)
end
| 29.454545
| 127
| 0.734568
|
93df75b2066d8e69457512007aacfdadedd9913b
| 72
|
ex
|
Elixir
|
lib/ketbin_web/views/user_reset_password_view.ex
|
ATechnoHazard/katbin
|
20a0b45954cf7819cd9d51c401db06be0f47666b
|
[
"MIT"
] | 4
|
2020-08-05T20:05:34.000Z
|
2020-10-01T10:01:56.000Z
|
lib/ketbin_web/views/user_reset_password_view.ex
|
ATechnoHazard/katbin
|
20a0b45954cf7819cd9d51c401db06be0f47666b
|
[
"MIT"
] | 1
|
2020-07-08T05:02:12.000Z
|
2020-09-25T10:05:11.000Z
|
lib/ketbin_web/views/user_reset_password_view.ex
|
ATechnoHazard/katbin
|
20a0b45954cf7819cd9d51c401db06be0f47666b
|
[
"MIT"
] | 1
|
2020-08-30T12:59:49.000Z
|
2020-08-30T12:59:49.000Z
|
defmodule KetbinWeb.UserResetPasswordView do
use KetbinWeb, :view
end
| 18
| 44
| 0.833333
|
93df8867106061d2e18ff75df215ee5322121106
| 1,801
|
ex
|
Elixir
|
clients/content/lib/google_api/content/v2/model/unit_invoice_tax_line.ex
|
kaaboaye/elixir-google-api
|
1896784c4342151fd25becd089a5beb323eff567
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v2/model/unit_invoice_tax_line.ex
|
kaaboaye/elixir-google-api
|
1896784c4342151fd25becd089a5beb323eff567
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v2/model/unit_invoice_tax_line.ex
|
kaaboaye/elixir-google-api
|
1896784c4342151fd25becd089a5beb323eff567
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.UnitInvoiceTaxLine do
@moduledoc """
## Attributes
* `taxAmount` (*type:* `GoogleApi.Content.V2.Model.Price.t`, *default:* `nil`) - [required] Tax amount for the tax type.
* `taxName` (*type:* `String.t`, *default:* `nil`) - Optional name of the tax type. This should only be provided if taxType is otherFeeTax.
* `taxType` (*type:* `String.t`, *default:* `nil`) - [required] Type of the tax.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:taxAmount => GoogleApi.Content.V2.Model.Price.t(),
:taxName => String.t(),
:taxType => String.t()
}
field(:taxAmount, as: GoogleApi.Content.V2.Model.Price)
field(:taxName)
field(:taxType)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.UnitInvoiceTaxLine do
def decode(value, options) do
GoogleApi.Content.V2.Model.UnitInvoiceTaxLine.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.UnitInvoiceTaxLine do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.981132
| 143
| 0.712937
|
93dfa01ebc55b23ea3d78c3d84d2444303d3b5f7
| 994
|
ex
|
Elixir
|
lib/execjs/runtime.ex
|
solvedata/execjs
|
a2fc3adf2b915f2f90c0a47eb9b3553838202c1c
|
[
"CC0-1.0"
] | 126
|
2015-01-08T11:55:52.000Z
|
2022-01-06T01:26:48.000Z
|
lib/execjs/runtime.ex
|
solvedata/execjs
|
a2fc3adf2b915f2f90c0a47eb9b3553838202c1c
|
[
"CC0-1.0"
] | 7
|
2016-06-16T06:32:39.000Z
|
2020-01-30T21:09:58.000Z
|
lib/execjs/runtime.ex
|
solvedata/execjs
|
a2fc3adf2b915f2f90c0a47eb9b3553838202c1c
|
[
"CC0-1.0"
] | 25
|
2015-07-06T14:42:56.000Z
|
2021-10-05T18:14:51.000Z
|
defmodule Execjs.Runtime do
@moduledoc "Defines the `defruntime/2` macro to define JavaScript runtimes."
alias Mix.Project
app = Project.config()[:app]
def runner_path(runner) do
Path.join([:code.priv_dir(unquote(app)), runner])
end
defmacro defruntime(runtime, options) do
name = Macro.to_string(runtime)
quote do
defmodule unquote(runtime) do
@moduledoc "Runtime definition for #{unquote(name)}."
require EEx
alias Execjs.Runtime
def executables, do: unquote(options[:executables])
def command, do: Enum.find(executables(), &System.find_executable(&1))
def arguments, do: unquote(options[:arguments] || [])
def available?, do: not (command() == nil)
@runner_path Runtime.runner_path(unquote(options[:runner]))
@external_resource @runner_path
EEx.function_from_file(:def, :template, @runner_path, [:source])
end
@runtimes unquote(runtime)
end
end
end
| 24.243902
| 78
| 0.660966
|
93dfa89b1135a0d620d739f3487b88cb304d2638
| 1,817
|
ex
|
Elixir
|
clients/tool_results/lib/google_api/tool_results/v1beta3/model/project_settings.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | null | null | null |
clients/tool_results/lib/google_api/tool_results/v1beta3/model/project_settings.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | 1
|
2020-12-18T09:25:12.000Z
|
2020-12-18T09:25:12.000Z
|
clients/tool_results/lib/google_api/tool_results/v1beta3/model/project_settings.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | 1
|
2020-10-04T10:12:44.000Z
|
2020-10-04T10:12:44.000Z
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ToolResults.V1beta3.Model.ProjectSettings do
@moduledoc """
Per-project settings for the Tool Results service.
## Attributes
* `defaultBucket` (*type:* `String.t`, *default:* `nil`) - The name of the Google Cloud Storage bucket to which results are written. By default, this is unset. In update request: optional In response: optional
* `name` (*type:* `String.t`, *default:* `nil`) - The name of the project's settings. Always of the form: projects/{project-id}/settings In update request: never set In response: always set
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:defaultBucket => String.t(),
:name => String.t()
}
field(:defaultBucket)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.ToolResults.V1beta3.Model.ProjectSettings do
def decode(value, options) do
GoogleApi.ToolResults.V1beta3.Model.ProjectSettings.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ToolResults.V1beta3.Model.ProjectSettings do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.34
| 213
| 0.735278
|
93dfe29670e2c4a27d3d419b2b887569d1532a61
| 5,421
|
ex
|
Elixir
|
lib/ecto/model/queryable.ex
|
MSch/ecto
|
648f785b2202193921fe00d68bc334399f5edb93
|
[
"Apache-2.0"
] | 1
|
2015-08-27T13:17:10.000Z
|
2015-08-27T13:17:10.000Z
|
lib/ecto/model/queryable.ex
|
TanYewWei/ecto
|
916c6467d5f7368fa10ecd7cfcfd2d4a9924a282
|
[
"Apache-2.0"
] | null | null | null |
lib/ecto/model/queryable.ex
|
TanYewWei/ecto
|
916c6467d5f7368fa10ecd7cfcfd2d4a9924a282
|
[
"Apache-2.0"
] | null | null | null |
defmodule Ecto.Model.Queryable do
@moduledoc """
Defines a model as queryable.
In order to create queries in Ecto, you need to pass a queryable
data structure as argument. By using `Ecto.Model.Queryable` in
your model, it imports the `queryable/2` macro.
Assuming you have an entity named `Weather.Entity`, you can associate
it with a model via:
defmodule Weather do
use Ecto.Model
queryable "weather", Weather.Entity
end
Since this is a common pattern, Ecto allows developers to define an entity
inlined in a model:
defmodule Weather do
use Ecto.Model
queryable "weather" do
field :city, :string
field :temp_lo, :integer
field :temp_hi, :integer
field :prcp, :float, default: 0.0
end
end
By making it queryable, three functions are added to the model:
* `new/0` - simply delegates to `entity.new/0`
* `new/1` - simply delegates to `entity.new/1`
* `__model__/1` - reflection functions about the source and entity
This module also automatically imports `from/1` and `from/2`
from `Ecto.Query` as a convenience.
## Entity defaults
When using the block syntax, the created entity uses the usual default
of a primary key named `:id`, of type `:integer`. This can be customized
by passing `primary_key: false` to queryable:
queryable "weather", primary_key: false do
...
end
Or by passing a tuple in the format `{ field, type, opts }`:
queryable "weather", primary_key: { :custom_field, :string, [] } do
...
end
Global defaults can be specified via the `@queryable_defaults` attribute.
This is useful if you want to use a different default primary key
through your entire application.
The supported options are:
* `primary_key` - either `false`, or a `{ field, type, opts }` tuple
* `foreign_key_type` - sets the type for any belongs_to associations.
This can be overrided using the `:type` option
to the `belongs_to` statement. Defaults to
type `:integer`
## Reflection
Any queryable model module will generate the `__model__` function that can be
used for runtime introspection of the model.
* `__model__(:source)` - Returns the "source" as given to `queryable/2`;
* `__model__(:entity)` - Returns the entity module as given to or generated by
`queryable/2`;
## Example
defmodule MyApp.Model do
defmacro __using__(_) do
quote do
@queryable_defaults primary_key: { :uuid, :string, [] },
foreign_key_type: :string
use Ecto.Model
end
end
end
defmodule MyApp.Post do
use MyApp.Model
queryable "posts" do
has_many :comments, MyApp.Comment
end
end
defmodule MyApp.Comment do
use MyApp.Model
queryable "comments" do
belongs_to :post, MyApp.Comment
end
end
By using `MyApp.Model`, any `MyApp.Post` and `MyApp.Comment` entities
will get the `:uuid` field, with type `:string` as the primary key.
The `belongs_to` association on `MyApp.Comment` will also now require
that `:post_id` be of `:string` type to reference the `:uuid` of a
`MyApp.Post` entity.
"""
@doc false
defmacro __using__(_) do
quote do
import Ecto.Query, only: [from: 2]
import unquote(__MODULE__)
end
end
@doc """
Defines a queryable name and its entity.
The source and entity can be accessed during the model compilation
via `@ecto_source` and `@ecto_entity`.
## Example
defmodule Post do
use Ecto.Model
queryable "posts", Post.Entity
end
"""
defmacro queryable(source, entity)
@doc """
Defines a queryable name and the entity definition inline. `opts` will be
given to the `use Ecto.Entity` call, see `Ecto.Entity`.
## Examples
# The two following Model definitions are equivalent
defmodule Post do
use Ecto.Model
queryable "posts" do
field :text, :string
end
end
defmodule Post do
use Ecto.Model
defmodule Entity do
use Ecto.Entity, model: Post
field :text, :string
end
queryable "posts", Entity
end
"""
defmacro queryable(source, opts \\ [], do: block)
defmacro queryable(source, opts, [do: block]) do
quote do
opts =
(Module.get_attribute(__MODULE__, :queryable_defaults) || [])
|> Keyword.merge(unquote(opts))
|> Keyword.put(:model, __MODULE__)
defmodule Entity do
use Ecto.Entity, opts
unquote(block)
end
queryable(unquote(source), Entity)
end
end
defmacro queryable(source, [], entity) do
quote do
@ecto_source unquote(source)
@ecto_entity unquote(entity)
@doc "Delegates to #{@ecto_entity}.new/0"
def new(), do: @ecto_entity.new()
@doc "Delegates to #{@ecto_entity}.new/1"
def new(params), do: @ecto_entity.new(params)
@doc false
def __model__(:source), do: @ecto_source
def __model__(:entity), do: @ecto_entity
@doc false
def __queryable__,
do: Ecto.Query.Query[from: { @ecto_source, @ecto_entity, __MODULE__ }]
end
end
end
| 26.970149
| 80
| 0.627006
|
93dffcda60deedda3fab6fe2e39b183f888d91be
| 6,067
|
ex
|
Elixir
|
apps/demon_spirit_web/lib/demon_spirit_web/live/live_game_show.ex
|
mreishus/demon_spirit_umbrella
|
1ab9161427361ac7d35132fce2aed36923896a4b
|
[
"MIT"
] | 12
|
2019-09-17T13:47:57.000Z
|
2021-12-11T17:11:11.000Z
|
apps/demon_spirit_web/lib/demon_spirit_web/live/live_game_show.ex
|
mreishus/demon_spirit_umbrella
|
1ab9161427361ac7d35132fce2aed36923896a4b
|
[
"MIT"
] | 181
|
2019-10-15T01:21:44.000Z
|
2021-08-31T19:26:54.000Z
|
apps/demon_spirit_web/lib/demon_spirit_web/live/live_game_show.ex
|
mreishus/demon_spirit_umbrella
|
1ab9161427361ac7d35132fce2aed36923896a4b
|
[
"MIT"
] | 2
|
2020-07-11T02:18:46.000Z
|
2021-05-31T10:46:39.000Z
|
defmodule DemonSpiritWeb.LiveGameShow do
@moduledoc """
LiveGameShow: This is the liveView of the "show" action of the game controller.
If you are watching or playing a game, you're using this module.
"""
use Phoenix.LiveView
require Logger
alias DemonSpiritWeb.{Endpoint, GameUIServer, GameView, Presence}
alias DemonSpiritWeb.Router.Helpers, as: Routes
def render(assigns) do
GameView.render("live_show.html", assigns)
end
def mount(_params, %{"game_name" => game_name, "guest" => guest}, socket) do
topic = topic_for(game_name)
if connected?(socket), do: Endpoint.subscribe(topic)
{:ok, _} = Presence.track(self(), topic, guest.id, guest)
state = GameUIServer.sit_down_if_possible(game_name, guest)
tick_ref = create_tick_interval(socket, state)
notify(topic)
socket =
assign(socket,
game_name: game_name,
topic: topic,
state: state,
guest: guest,
users: [],
flip_per: guest == state.black,
tick_ref: tick_ref
)
{:ok, socket}
end
# Update state every 1 second. This is so we can
# see the chess timers counting down.
defp create_tick_interval(socket, _state) do
{:ok, tick_ref} =
if connected?(socket) do
:timer.send_interval(1000, self(), :tick)
else
{:ok, nil}
end
tick_ref
end
## Event: "click-square-3-3" (Someone clicked on square (3,3))
def handle_event(
"click-square-" <> coords_str,
_value,
socket = %{assigns: %{game_name: game_name, guest: guest, topic: topic}}
) do
{x, y} = extract_coords(coords_str)
Logger.info("Game #{game_name}: Clicked on piece: #{x} #{y}")
state = GameUIServer.click(game_name, {x, y}, guest)
notify(topic)
{:noreply, assign(socket, state: state, flip_per: guest == state.black)}
end
def handle_event(
"click-ready",
_value,
socket = %{assigns: %{game_name: game_name, guest: guest, topic: topic}}
) do
Logger.info("Game #{game_name}: Someone clicked ready")
state = GameUIServer.ready(game_name, guest)
notify(topic)
{:noreply, assign(socket, state: state)}
end
def handle_event(
"click-not-ready",
_value,
socket = %{assigns: %{game_name: game_name, guest: guest, topic: topic}}
) do
Logger.info("Game #{game_name}: Someone clicked not ready")
state = GameUIServer.not_ready(game_name, guest)
notify(topic)
{:noreply, assign(socket, state: state)}
end
def handle_event(
"click-leave",
_value,
socket = %{assigns: %{game_name: game_name, guest: guest, topic: topic}}
) do
Logger.info("Game #{game_name}: Someone clicked leave")
state = GameUIServer.stand_up_if_possible(game_name, guest)
notify(topic)
socket =
socket
|> assign(state: state)
|> redirect(to: Routes.game_path(socket, :index))
{:stop, socket}
end
def handle_event(
"drag-piece",
%{"sx" => sx, "sy" => sy},
socket = %{assigns: %{game_name: game_name, guest: guest, topic: topic}}
) do
state = GameUIServer.drag_start(game_name, {sx, sy}, guest)
notify(topic)
{:noreply, assign(socket, state: state)}
end
def handle_event(
"drag-end",
_val,
socket = %{assigns: %{game_name: game_name, guest: guest, topic: topic}}
) do
state = GameUIServer.drag_end(game_name, guest)
notify(topic)
{:noreply, assign(socket, state: state)}
end
def handle_event(
"drop-piece",
%{"sx" => sx, "sy" => sy, "tx" => tx, "ty" => ty},
socket = %{assigns: %{game_name: game_name, guest: guest, topic: topic}}
) do
state = GameUIServer.drag_drop(game_name, {sx, sy}, {tx, ty}, guest)
notify(topic)
{:noreply, assign(socket, state: state)}
end
def handle_event(
"clarify-move",
%{"i" => i},
socket = %{assigns: %{game_name: game_name, guest: guest, topic: topic}}
) do
{i, ""} = Integer.parse(i)
state = GameUIServer.clarify_move(game_name, i, guest)
notify(topic)
{:noreply, assign(socket, state: state)}
end
def handle_event(
"cancel-clarify",
_val,
socket = %{assigns: %{game_name: game_name, guest: guest, topic: topic}}
) do
state = GameUIServer.clarify_cancel(game_name, guest)
notify(topic)
{:noreply, assign(socket, state: state)}
end
defp extract_coords(coords_str) do
[{x, ""}, {y, ""}] = coords_str |> String.split("-") |> Enum.map(&Integer.parse/1)
{x, y}
end
def notify(topic) do
Endpoint.broadcast_from(self(), topic, "state_update", %{})
end
def topic_for(game_name) do
"game-topic:" <> game_name
end
# Handle incoming "state_updates": Game state has changed
def handle_info(
%{event: "state_update"},
socket = %{assigns: %{game_name: game_name}}
) do
state = GameUIServer.state(game_name)
{:noreply, assign(socket, state: state)}
end
# Handle "presence_diff", someone joined or left
def handle_info(%{event: "presence_diff"}, socket = %{assigns: %{topic: topic}}) do
users =
Presence.list(topic)
|> Enum.map(fn {_user_id, data} ->
data[:metas]
|> List.first()
end)
{:noreply, assign(socket, users: users)}
end
# Handle ":tick", a request to update game state on a timer
def handle_info(
:tick,
socket = %{assigns: %{game_name: game_name, tick_ref: tick_ref}}
) do
state = GameUIServer.state(game_name)
if tick_ref != nil and stop_ticking?(state) do
:timer.cancel(tick_ref)
end
{:noreply, assign(socket, state: state)}
end
# There's a winner or game has been alive for a long time
defp stop_ticking?(state) do
state.game.winner != nil or game_alive_too_long?(state)
end
# Game alive more than 4 hours
defp game_alive_too_long?(game_state) do
DateTime.diff(DateTime.utc_now(), game_state.created_at) > 60 * 60 * 4
end
end
| 28.087963
| 86
| 0.621394
|
93e026439ab1b836a324548474e75c1d12fc104f
| 200
|
ex
|
Elixir
|
lib/phone/vu.ex
|
davidkovsky/phone
|
83108ab1042efe62778c7363f5d02ef888883408
|
[
"Apache-2.0"
] | 97
|
2016-04-05T13:08:41.000Z
|
2021-12-25T13:08:34.000Z
|
lib/phone/vu.ex
|
davidkovsky/phone
|
83108ab1042efe62778c7363f5d02ef888883408
|
[
"Apache-2.0"
] | 70
|
2016-06-14T00:56:00.000Z
|
2022-02-10T19:43:14.000Z
|
lib/phone/vu.ex
|
davidkovsky/phone
|
83108ab1042efe62778c7363f5d02ef888883408
|
[
"Apache-2.0"
] | 31
|
2016-04-21T22:26:12.000Z
|
2022-01-24T21:40:00.000Z
|
defmodule Phone.VU do
@moduledoc false
use Helper.Country
def regex, do: ~r/^(678)()(.{5})/
def country, do: "Vanuatu"
def a2, do: "VU"
def a3, do: "VUT"
matcher(:regex, ["678"])
end
| 15.384615
| 35
| 0.595
|
93e02710b31ed7516aab359d719a098151da0d73
| 1,624
|
exs
|
Elixir
|
test/wallet_test.exs
|
rpip/upvest-elixir
|
160177161cc4c36b39174569df5a0f68d4ba2ede
|
[
"MIT"
] | null | null | null |
test/wallet_test.exs
|
rpip/upvest-elixir
|
160177161cc4c36b39174569df5a0f68d4ba2ede
|
[
"MIT"
] | 6
|
2019-09-09T00:47:30.000Z
|
2019-10-23T14:45:13.000Z
|
test/wallet_test.exs
|
rpip/upvest-elixir
|
160177161cc4c36b39174569df5a0f68d4ba2ede
|
[
"MIT"
] | 1
|
2019-09-16T13:08:06.000Z
|
2019-09-16T13:08:06.000Z
|
defmodule Upvest.WalletTest do
use ExUnit.Case, async: false
alias Upvest.Clientele.Wallet
alias Upvest.Clientele.Wallet.Signature
import Upvest.TestHelper
doctest Upvest.Clientele.Wallet
@client new_test_client(:oauth)
@user_password System.get_env("UPVEST_TEST_PASSWORD")
@eth_wallet %Wallet{
id: "8fc19cd0-8f50-4626-becb-c9e284d2315b",
balances: [
%{
amount: 0,
asset_id: "cfc59efb-3b21-5340-ae96-8cadb4ce31a8",
name: "Example coin",
symbol: "COIN",
exponent: 12
}
],
protocol: "ethereum_ropsten",
address: "0xc4a284e55ab2f1c2feb23a0bfc56fca31b0c94a3",
status: "ACTIVE",
index: 0
}
setup_all do
asset_id = Enum.at(@eth_wallet.balances, 0).asset_id
{:ok, wallet} = Wallet.create(@client, @user_password, asset_id)
{:ok, [wallet: wallet]}
end
test "list all wallets" do
{:ok, wallets} = Wallet.all(@client)
assert is_list(wallets)
assert Enum.random(wallets).__struct__ == Wallet
end
test "list subset of wallets" do
{:ok, wallets} = Wallet.list(@client, 210)
assert is_list(wallets)
assert Enum.random(wallets).__struct__ == Wallet
end
test "sign wallet", context do
to_sign = random_string() |> Base.encode16(case: :lower)
{:ok, resp} = Wallet.sign(@client, @user_password, context.wallet.id, to_sign, "hex", "hex")
assert resp.__struct__ == Signature
end
test "retrieve wallet", context do
{:ok, wallet} = Wallet.retrieve(@client, context.wallet.id)
assert wallet.__struct__ == Wallet
assert context.wallet.id == wallet.id
end
end
| 26.193548
| 96
| 0.67303
|
93e03d43f63ea9a5ff1c7e09af8c9c9ae110bbf2
| 2,245
|
ex
|
Elixir
|
apps/parse/lib/parse/stop_times.ex
|
fjlanasa/api
|
c39bc393aea572bfb81754b2ea1adf9dda9ce24a
|
[
"MIT"
] | 62
|
2019-01-17T12:34:39.000Z
|
2022-03-20T21:49:47.000Z
|
apps/parse/lib/parse/stop_times.ex
|
fjlanasa/api
|
c39bc393aea572bfb81754b2ea1adf9dda9ce24a
|
[
"MIT"
] | 375
|
2019-02-13T15:30:50.000Z
|
2022-03-30T18:50:41.000Z
|
apps/parse/lib/parse/stop_times.ex
|
fjlanasa/api
|
c39bc393aea572bfb81754b2ea1adf9dda9ce24a
|
[
"MIT"
] | 14
|
2019-01-16T19:35:57.000Z
|
2022-02-26T18:55:54.000Z
|
defmodule Parse.StopTimes do
@moduledoc """
Parses the GTFS stop_times.txt file.
"""
@behaviour Parse
import NimbleParsec
import Parse.Helpers
# credo:disable-for-lines:4 Credo.Check.Refactor.PipeChainStart
defparsec(
:time,
integer(min: 1, max: 2)
|> ignore(string(":"))
|> integer(2)
|> ignore(string(":"))
|> integer(2)
)
alias Model.{Schedule, Trip}
require Logger
def parse(blob, trip_fn \\ nil) do
blob
|> BinaryLineSplit.stream!()
|> SimpleCSV.stream()
|> Stream.chunk_by(& &1["trip_id"])
|> Stream.flat_map(&parse_rows(&1, trip_fn))
end
def parse_row(row) do
%Schedule{
trip_id: copy(row["trip_id"]),
stop_id: copy(row["stop_id"]),
arrival_time: convert_time(row["arrival_time"], row["drop_off_type"]),
departure_time: convert_time(row["departure_time"], row["pickup_type"]),
stop_sequence: String.to_integer(row["stop_sequence"]),
stop_headsign: optional_copy(row["stop_headsign"]),
pickup_type: pick_drop_type(row["pickup_type"]),
drop_off_type: pick_drop_type(row["drop_off_type"]),
timepoint?: row["timepoint"] != "0"
}
end
defp convert_time(_, "1"), do: nil
defp convert_time(binary, _) do
{:ok, [h, m, s], _, _, _, _} = time(binary)
3600 * h + 60 * m + s
end
defp pick_drop_type("0"), do: 0
defp pick_drop_type("1"), do: 1
defp pick_drop_type("2"), do: 2
defp pick_drop_type("3"), do: 3
defp parse_rows(rows, nil) do
rows
|> Enum.map(&parse_row/1)
|> position_first_row
|> position_last_row
end
defp parse_rows([%{"trip_id" => trip_id} | _] = rows, trip_fn) do
case trip_fn.(trip_id) do
nil ->
[]
%Trip{} = trip ->
rows
|> parse_rows(nil)
|> Enum.map(
&%{
&1
| route_id: trip.route_id,
direction_id: trip.direction_id,
service_id: trip.service_id
}
)
end
end
defp position_first_row([first | rest]) do
first = %{first | position: :first}
[first | rest]
end
defp position_last_row(list) do
[last | rest] = Enum.reverse(list)
Enum.reverse([%{last | position: :last} | rest])
end
end
| 24.402174
| 78
| 0.598664
|
93e047f57b92d1f27feb9a14fc1ef90003141665
| 8,340
|
ex
|
Elixir
|
apps/schema_stitch/lib/query_generator.ex
|
emeryotopalik/absinthe-schema-stitching-example
|
922c2b76c7d722c1b8a009306570e134be8854d8
|
[
"Apache-2.0"
] | null | null | null |
apps/schema_stitch/lib/query_generator.ex
|
emeryotopalik/absinthe-schema-stitching-example
|
922c2b76c7d722c1b8a009306570e134be8854d8
|
[
"Apache-2.0"
] | null | null | null |
apps/schema_stitch/lib/query_generator.ex
|
emeryotopalik/absinthe-schema-stitching-example
|
922c2b76c7d722c1b8a009306570e134be8854d8
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) New Relic Corporation. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
defmodule SchemaStitch.QueryGenerator do
@moduledoc """
Takes an Absinthe resolution struct and generates the relevant GraphQL query and variables.
"""
alias Absinthe.Blueprint
@typename_field %Blueprint.Document.Field{name: "__typename"}
def render(%{definition: tree, path: path, fragments: fragments}) do
root_operation_node = List.last(path)
used_fragments = gather_fragments(tree, fragments)
used_variables = gather_variables(tree, used_fragments)
query = build_query(tree, root_operation_node, used_variables, used_fragments)
{query, used_variables}
end
defp gather_fragments(tree, fragments) do
used_fragments = gather_tree_fragments(tree) ++ gather_nested_fragments(fragments)
Enum.filter(fragments, fn {name, _fragment} -> name in used_fragments end)
|> Enum.into(%{})
end
defp gather_tree_fragments(%{selections: selections}) do
Enum.flat_map(selections, &gather_tree_fragments(&1))
end
defp gather_tree_fragments(%Blueprint.Document.Fragment.Spread{name: name}) do
[name]
end
defp gather_nested_fragments(fragments) do
Enum.flat_map(fragments, fn {_name, fragment} -> gather_tree_fragments(fragment) end)
end
def gather_variables(tree, fragments) do
(gather_tree_variables(tree) ++ gather_fragment_variables(fragments))
|> Enum.into(%{})
end
defp gather_tree_variables(%Blueprint.Document.Field{arguments: arguments, selections: []}) do
gather_arguments(arguments)
end
defp gather_tree_variables(%Blueprint.Document.Field{
arguments: arguments,
selections: selections
}) do
argument_names = gather_arguments(arguments)
Enum.reduce(selections, argument_names, fn selection, acc ->
acc ++ gather_tree_variables(selection)
end)
end
defp gather_tree_variables(%{selections: selections}) do
Enum.reduce(selections, [], fn selection, acc ->
acc ++ gather_tree_variables(selection)
end)
end
defp gather_tree_variables(_) do
[]
end
defp gather_fragment_variables(fragments) do
Enum.reduce(fragments, [], fn {_name, fragment}, acc ->
acc ++ gather_tree_variables(fragment)
end)
end
def gather_arguments(arguments) do
arguments
|> Enum.map(&select_variable_arguments/1)
|> List.flatten()
|> Enum.reject(&is_nil/1)
end
defp select_variable_arguments(%{
input_value: %{
raw: %Blueprint.Input.RawValue{content: %Blueprint.Input.Variable{name: name}},
data: data
}
}) do
{name, data}
end
defp select_variable_arguments(%{
input_value: %{
normalized: %Absinthe.Blueprint.Input.Object{fields: fields}
}
}) do
Enum.map(fields, &select_variable_arguments(&1))
end
defp select_variable_arguments(_argument) do
nil
end
@indent_increment 2
defp build_query(tree, root_operation_node, used_variables, fragments) do
root_operation_type = build_operation_type(root_operation_node)
root_operation_name = build_operation_name(root_operation_node)
root_operation_variables = build_operation_variables(root_operation_node, used_variables)
field_selection = build_field_selection(tree, @indent_increment)
spread_fragments = build_fragments(fragments)
"#{root_operation_type}#{root_operation_name}#{root_operation_variables} " <>
"{\n#{field_selection}\n}#{spread_fragments}"
end
defp build_operation_type(%Blueprint.Document.Operation{type: operation_type}) do
"#{operation_type}"
end
defp build_operation_name(%Blueprint.Document.Operation{name: nil}) do
""
end
defp build_operation_name(%Blueprint.Document.Operation{name: operation_name}) do
" #{operation_name}"
end
defp build_operation_variables(
%Blueprint.Document.Operation{variable_definitions: variable_definitions},
used_variables
) do
used_variable_names = Map.keys(used_variables)
variable_definitions
|> Enum.filter(&(&1.name in used_variable_names))
|> Enum.map(&build_operation_variable_definition/1)
|> Enum.join(", ")
|> case do
"" -> ""
variables -> "(#{variables})"
end
end
defp build_operation_variable_definition(%Blueprint.Document.VariableDefinition{
name: name,
type: type
}) do
"$#{name}: #{build_variable_typename(type)}"
end
defp build_variable_typename(%Blueprint.TypeReference.NonNull{of_type: of_type}) do
build_variable_typename(of_type) <> "!"
end
defp build_variable_typename(%Blueprint.TypeReference.List{of_type: of_type}) do
"[" <> build_variable_typename(of_type) <> "]"
end
defp build_variable_typename(%Blueprint.TypeReference.Name{name: typename}) do
typename
end
defp build_field_selection(
%Blueprint.Document.Fragment.Inline{
selections: children,
schema_node: %{name: type_name}
},
indent_level
) do
indent = String.duplicate(" ", indent_level)
subtree =
Enum.map(children, &build_field_selection(&1, indent_level + @indent_increment))
|> Enum.join("\n")
"#{indent}... on #{type_name} {\n#{subtree}\n#{indent}}"
end
defp build_field_selection(
%Blueprint.Document.Field{selections: []} = blueprint_node,
indent_level
) do
String.duplicate(" ", indent_level) <> build_field_name(blueprint_node)
end
defp build_field_selection(
%Blueprint.Document.Field{selections: children} = blueprint_node,
indent_level
) do
indent = String.duplicate(" ", indent_level)
field_name = build_field_name(blueprint_node)
subtree =
[@typename_field | children]
|> Enum.map(&build_field_selection(&1, indent_level + @indent_increment))
|> Enum.join("\n")
"#{indent}#{field_name} {\n#{subtree}\n#{indent}}"
end
defp build_field_selection(
%Blueprint.Document.Fragment.Spread{name: fragment_name},
indent_level
) do
indent = String.duplicate(" ", indent_level)
"#{indent}...#{fragment_name}"
end
defp build_field_name(%Blueprint.Document.Field{name: name, arguments: args}) do
args
|> exclude_args_with_default_values
|> case do
[] -> name
used_input_args -> "#{name}(#{build_input_args(used_input_args)})"
end
end
defp exclude_args_with_default_values(args) do
Enum.reject(args, fn
%{
input_value: %Blueprint.Input.Value{
normalized: %Absinthe.Blueprint.Input.Generated{}
}
} ->
true
_ ->
false
end)
end
defp build_input_args(args) do
Enum.map(args, &build_input_arg(&1)) |> Enum.join(", ")
end
defp build_input_arg(%{
name: arg_name,
input_value: input_value
}) do
"#{arg_name}: #{build_input_value(input_value)}"
end
defp build_input_value(%Blueprint.Input.Value{
raw: %Blueprint.Input.RawValue{content: %Blueprint.Input.Variable{name: var_name}}
}) do
"$#{var_name}"
end
defp build_input_value(%Blueprint.Input.Value{
normalized: %Blueprint.Input.String{value: value}
}) do
"\"#{value}\""
end
defp build_input_value(%Blueprint.Input.Value{
normalized: %Blueprint.Input.Object{fields: sub_fields}
}) do
"{ #{build_input_args(sub_fields)} }"
end
defp build_input_value(%Blueprint.Input.Value{
normalized: %Blueprint.Input.List{items: items}
}) do
list_items = Enum.map(items, &build_input_value/1) |> Enum.join(", ")
"[ #{list_items} ]"
end
defp build_input_value(%Blueprint.Input.Value{
normalized: %Blueprint.Input.Null{}
}) do
"null"
end
defp build_input_value(%Blueprint.Input.Value{normalized: %{value: value}}) do
value
end
defp build_fragments(fragments) do
Enum.map(fragments, &build_fragment(&1))
|> Enum.join("")
end
defp build_fragment(
{fragment_name, %{type_condition: %{name: type_name}, selections: children}}
) do
subtree = Enum.map(children, &build_field_selection(&1, @indent_increment)) |> Enum.join("\n")
"\n\nfragment #{fragment_name} on #{type_name} {\n#{subtree}\n}"
end
end
| 28.271186
| 98
| 0.680935
|
93e0708c69778296d4c20f1537a7b5196c4cf5b9
| 16,737
|
ex
|
Elixir
|
lib/aws/generated/app_stream.ex
|
benmmari/aws-elixir
|
b97477498a9e8ba0d46a09255302d88c6a1c8573
|
[
"Apache-2.0"
] | null | null | null |
lib/aws/generated/app_stream.ex
|
benmmari/aws-elixir
|
b97477498a9e8ba0d46a09255302d88c6a1c8573
|
[
"Apache-2.0"
] | null | null | null |
lib/aws/generated/app_stream.ex
|
benmmari/aws-elixir
|
b97477498a9e8ba0d46a09255302d88c6a1c8573
|
[
"Apache-2.0"
] | null | null | null |
# WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.AppStream do
@moduledoc """
Amazon AppStream 2.0
This is the *Amazon AppStream 2.0 API Reference*. This documentation
provides descriptions and syntax for each of the actions and data types in
AppStream 2.0. AppStream 2.0 is a fully managed, secure application
streaming service that lets you stream desktop applications to users
without rewriting applications. AppStream 2.0 manages the AWS resources
that are required to host and run your applications, scales automatically,
and provides access to your users on demand.
<note> You can call the AppStream 2.0 API operations by using an interface
VPC endpoint (interface endpoint). For more information, see [Access
AppStream 2.0 API Operations and CLI Commands Through an Interface VPC
Endpoint](https://docs.aws.amazon.com/appstream2/latest/developerguide/access-api-cli-through-interface-vpc-endpoint.html)
in the *Amazon AppStream 2.0 Administration Guide*.
</note> To learn more about AppStream 2.0, see the following resources:
<ul> <li> [Amazon AppStream 2.0 product
page](http://aws.amazon.com/appstream2)
</li> <li> [Amazon AppStream 2.0
documentation](http://aws.amazon.com/documentation/appstream2)
</li> </ul>
"""
@doc """
Associates the specified fleet with the specified stack.
"""
def associate_fleet(client, input, options \\ []) do
request(client, "AssociateFleet", input, options)
end
@doc """
Associates the specified users with the specified stacks. Users in a user
pool cannot be assigned to stacks with fleets that are joined to an Active
Directory domain.
"""
def batch_associate_user_stack(client, input, options \\ []) do
request(client, "BatchAssociateUserStack", input, options)
end
@doc """
Disassociates the specified users from the specified stacks.
"""
def batch_disassociate_user_stack(client, input, options \\ []) do
request(client, "BatchDisassociateUserStack", input, options)
end
@doc """
Copies the image within the same region or to a new region within the same
AWS account. Note that any tags you added to the image will not be copied.
"""
def copy_image(client, input, options \\ []) do
request(client, "CopyImage", input, options)
end
@doc """
Creates a Directory Config object in AppStream 2.0. This object includes
the configuration information required to join fleets and image builders to
Microsoft Active Directory domains.
"""
def create_directory_config(client, input, options \\ []) do
request(client, "CreateDirectoryConfig", input, options)
end
@doc """
Creates a fleet. A fleet consists of streaming instances that run a
specified image.
"""
def create_fleet(client, input, options \\ []) do
request(client, "CreateFleet", input, options)
end
@doc """
Creates an image builder. An image builder is a virtual machine that is
used to create an image.
The initial state of the builder is `PENDING`. When it is ready, the state
is `RUNNING`.
"""
def create_image_builder(client, input, options \\ []) do
request(client, "CreateImageBuilder", input, options)
end
@doc """
Creates a URL to start an image builder streaming session.
"""
def create_image_builder_streaming_u_r_l(client, input, options \\ []) do
request(client, "CreateImageBuilderStreamingURL", input, options)
end
@doc """
Creates a stack to start streaming applications to users. A stack consists
of an associated fleet, user access policies, and storage configurations.
"""
def create_stack(client, input, options \\ []) do
request(client, "CreateStack", input, options)
end
@doc """
Creates a temporary URL to start an AppStream 2.0 streaming session for the
specified user. A streaming URL enables application streaming to be tested
without user setup.
"""
def create_streaming_u_r_l(client, input, options \\ []) do
request(client, "CreateStreamingURL", input, options)
end
@doc """
Creates a usage report subscription. Usage reports are generated daily.
"""
def create_usage_report_subscription(client, input, options \\ []) do
request(client, "CreateUsageReportSubscription", input, options)
end
@doc """
Creates a new user in the user pool.
"""
def create_user(client, input, options \\ []) do
request(client, "CreateUser", input, options)
end
@doc """
Deletes the specified Directory Config object from AppStream 2.0. This
object includes the information required to join streaming instances to an
Active Directory domain.
"""
def delete_directory_config(client, input, options \\ []) do
request(client, "DeleteDirectoryConfig", input, options)
end
@doc """
Deletes the specified fleet.
"""
def delete_fleet(client, input, options \\ []) do
request(client, "DeleteFleet", input, options)
end
@doc """
Deletes the specified image. You cannot delete an image when it is in use.
After you delete an image, you cannot provision new capacity using the
image.
"""
def delete_image(client, input, options \\ []) do
request(client, "DeleteImage", input, options)
end
@doc """
Deletes the specified image builder and releases the capacity.
"""
def delete_image_builder(client, input, options \\ []) do
request(client, "DeleteImageBuilder", input, options)
end
@doc """
Deletes permissions for the specified private image. After you delete
permissions for an image, AWS accounts to which you previously granted
these permissions can no longer use the image.
"""
def delete_image_permissions(client, input, options \\ []) do
request(client, "DeleteImagePermissions", input, options)
end
@doc """
Deletes the specified stack. After the stack is deleted, the application
streaming environment provided by the stack is no longer available to
users. Also, any reservations made for application streaming sessions for
the stack are released.
"""
def delete_stack(client, input, options \\ []) do
request(client, "DeleteStack", input, options)
end
@doc """
Disables usage report generation.
"""
def delete_usage_report_subscription(client, input, options \\ []) do
request(client, "DeleteUsageReportSubscription", input, options)
end
@doc """
Deletes a user from the user pool.
"""
def delete_user(client, input, options \\ []) do
request(client, "DeleteUser", input, options)
end
@doc """
Retrieves a list that describes one or more specified Directory Config
objects for AppStream 2.0, if the names for these objects are provided.
Otherwise, all Directory Config objects in the account are described. These
objects include the configuration information required to join fleets and
image builders to Microsoft Active Directory domains.
Although the response syntax in this topic includes the account password,
this password is not returned in the actual response.
"""
def describe_directory_configs(client, input, options \\ []) do
request(client, "DescribeDirectoryConfigs", input, options)
end
@doc """
Retrieves a list that describes one or more specified fleets, if the fleet
names are provided. Otherwise, all fleets in the account are described.
"""
def describe_fleets(client, input, options \\ []) do
request(client, "DescribeFleets", input, options)
end
@doc """
Retrieves a list that describes one or more specified image builders, if
the image builder names are provided. Otherwise, all image builders in the
account are described.
"""
def describe_image_builders(client, input, options \\ []) do
request(client, "DescribeImageBuilders", input, options)
end
@doc """
Retrieves a list that describes the permissions for shared AWS account IDs
on a private image that you own.
"""
def describe_image_permissions(client, input, options \\ []) do
request(client, "DescribeImagePermissions", input, options)
end
@doc """
Retrieves a list that describes one or more specified images, if the image
names or image ARNs are provided. Otherwise, all images in the account are
described.
"""
def describe_images(client, input, options \\ []) do
request(client, "DescribeImages", input, options)
end
@doc """
Retrieves a list that describes the streaming sessions for a specified
stack and fleet. If a UserId is provided for the stack and fleet, only
streaming sessions for that user are described. If an authentication type
is not provided, the default is to authenticate users using a streaming
URL.
"""
def describe_sessions(client, input, options \\ []) do
request(client, "DescribeSessions", input, options)
end
@doc """
Retrieves a list that describes one or more specified stacks, if the stack
names are provided. Otherwise, all stacks in the account are described.
"""
def describe_stacks(client, input, options \\ []) do
request(client, "DescribeStacks", input, options)
end
@doc """
Retrieves a list that describes one or more usage report subscriptions.
"""
def describe_usage_report_subscriptions(client, input, options \\ []) do
request(client, "DescribeUsageReportSubscriptions", input, options)
end
@doc """
Retrieves a list that describes the UserStackAssociation objects. You must
specify either or both of the following:
<ul> <li> The stack name
</li> <li> The user name (email address of the user associated with the
stack) and the authentication type for the user
</li> </ul>
"""
def describe_user_stack_associations(client, input, options \\ []) do
request(client, "DescribeUserStackAssociations", input, options)
end
@doc """
Retrieves a list that describes one or more specified users in the user
pool.
"""
def describe_users(client, input, options \\ []) do
request(client, "DescribeUsers", input, options)
end
@doc """
Disables the specified user in the user pool. Users can't sign in to
AppStream 2.0 until they are re-enabled. This action does not delete the
user.
"""
def disable_user(client, input, options \\ []) do
request(client, "DisableUser", input, options)
end
@doc """
Disassociates the specified fleet from the specified stack.
"""
def disassociate_fleet(client, input, options \\ []) do
request(client, "DisassociateFleet", input, options)
end
@doc """
Enables a user in the user pool. After being enabled, users can sign in to
AppStream 2.0 and open applications from the stacks to which they are
assigned.
"""
def enable_user(client, input, options \\ []) do
request(client, "EnableUser", input, options)
end
@doc """
Immediately stops the specified streaming session.
"""
def expire_session(client, input, options \\ []) do
request(client, "ExpireSession", input, options)
end
@doc """
Retrieves the name of the fleet that is associated with the specified
stack.
"""
def list_associated_fleets(client, input, options \\ []) do
request(client, "ListAssociatedFleets", input, options)
end
@doc """
Retrieves the name of the stack with which the specified fleet is
associated.
"""
def list_associated_stacks(client, input, options \\ []) do
request(client, "ListAssociatedStacks", input, options)
end
@doc """
Retrieves a list of all tags for the specified AppStream 2.0 resource. You
can tag AppStream 2.0 image builders, images, fleets, and stacks.
For more information about tags, see [Tagging Your
Resources](https://docs.aws.amazon.com/appstream2/latest/developerguide/tagging-basic.html)
in the *Amazon AppStream 2.0 Administration Guide*.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Starts the specified fleet.
"""
def start_fleet(client, input, options \\ []) do
request(client, "StartFleet", input, options)
end
@doc """
Starts the specified image builder.
"""
def start_image_builder(client, input, options \\ []) do
request(client, "StartImageBuilder", input, options)
end
@doc """
Stops the specified fleet.
"""
def stop_fleet(client, input, options \\ []) do
request(client, "StopFleet", input, options)
end
@doc """
Stops the specified image builder.
"""
def stop_image_builder(client, input, options \\ []) do
request(client, "StopImageBuilder", input, options)
end
@doc """
Adds or overwrites one or more tags for the specified AppStream 2.0
resource. You can tag AppStream 2.0 image builders, images, fleets, and
stacks.
Each tag consists of a key and an optional value. If a resource already has
a tag with the same key, this operation updates its value.
To list the current tags for your resources, use `ListTagsForResource`. To
disassociate tags from your resources, use `UntagResource`.
For more information about tags, see [Tagging Your
Resources](https://docs.aws.amazon.com/appstream2/latest/developerguide/tagging-basic.html)
in the *Amazon AppStream 2.0 Administration Guide*.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Disassociates one or more specified tags from the specified AppStream 2.0
resource.
To list the current tags for your resources, use `ListTagsForResource`.
For more information about tags, see [Tagging Your
Resources](https://docs.aws.amazon.com/appstream2/latest/developerguide/tagging-basic.html)
in the *Amazon AppStream 2.0 Administration Guide*.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates the specified Directory Config object in AppStream 2.0. This object
includes the configuration information required to join fleets and image
builders to Microsoft Active Directory domains.
"""
def update_directory_config(client, input, options \\ []) do
request(client, "UpdateDirectoryConfig", input, options)
end
@doc """
Updates the specified fleet.
If the fleet is in the `STOPPED` state, you can update any attribute except
the fleet name. If the fleet is in the `RUNNING` state, you can update the
`DisplayName`, `ComputeCapacity`, `ImageARN`, `ImageName`,
`IdleDisconnectTimeoutInSeconds`, and `DisconnectTimeoutInSeconds`
attributes. If the fleet is in the `STARTING` or `STOPPING` state, you
can't update it.
"""
def update_fleet(client, input, options \\ []) do
request(client, "UpdateFleet", input, options)
end
@doc """
Adds or updates permissions for the specified private image.
"""
def update_image_permissions(client, input, options \\ []) do
request(client, "UpdateImagePermissions", input, options)
end
@doc """
Updates the specified fields for the specified stack.
"""
def update_stack(client, input, options \\ []) do
request(client, "UpdateStack", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "appstream"}
host = build_host("appstream2", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "PhotonAdminProxyService.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
| 33.208333
| 124
| 0.709625
|
93e079682cb357dd49e180d00333f8137abc4c86
| 2,424
|
exs
|
Elixir
|
lib/elixir/test/elixir/kernel/sigils_test.exs
|
MSch/elixir
|
fc42dc9bb76ec1fdcfcbdbfb11fea6a845a62fca
|
[
"Apache-2.0"
] | 1
|
2017-09-09T20:59:04.000Z
|
2017-09-09T20:59:04.000Z
|
lib/elixir/test/elixir/kernel/sigils_test.exs
|
bruce/elixir
|
d77ccf941541959079e5f677f8717da24b486fac
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/test/elixir/kernel/sigils_test.exs
|
bruce/elixir
|
d77ccf941541959079e5f677f8717da24b486fac
|
[
"Apache-2.0"
] | null | null | null |
Code.require_file "../test_helper.exs", __DIR__
defmodule Kernel.SigilsTest do
use ExUnit.Case, async: true
test :sigil_s do
assert %s(foo) == "foo"
assert %s(f#{:o}o) == "foo"
assert %s(f\no) == "f\no"
end
test :sigil_s_with_heredoc do
assert " foo\n\n" == %s"""
f#{:o}o\n
"""
end
test :sigil_S do
assert %S(foo) == "foo"
assert %S[foo] == "foo"
assert %S{foo} == "foo"
assert %S'foo' == "foo"
assert %S"foo" == "foo"
assert %S|foo| == "foo"
assert %S(f#{o}o) == "f\#{o}o"
assert %S(f\no) == "f\\no"
end
test :sigil_S_with_heredoc do
assert " f\#{o}o\\n\n" == %S"""
f#{o}o\n
"""
end
test :sigil_c do
assert %c(foo) == 'foo'
assert %c(f#{:o}o) == 'foo'
assert %c(f\no) == 'f\no'
end
test :sigil_C do
assert %C(foo) == 'foo'
assert %C[foo] == 'foo'
assert %C{foo} == 'foo'
assert %C'foo' == 'foo'
assert %C"foo" == 'foo'
assert %C|foo| == 'foo'
assert %C(f#{o}o) == 'f\#{o}o'
assert %C(f\no) == 'f\\no'
end
test :sigil_w do
assert %w() == []
assert %w(foo bar baz) == ["foo", "bar", "baz"]
assert %w(foo #{:bar} baz) == ["foo", "bar", "baz"]
assert %w(
foo
bar
baz
) == ["foo", "bar", "baz"]
assert %w(foo bar baz)s == ["foo", "bar", "baz"]
assert %w(foo bar baz)a == [:foo, :bar, :baz]
assert %w(foo bar baz)c == ['foo', 'bar', 'baz']
bad_modifier = quote do: %w(foo bar baz)x
assert ArgumentError[] = catch_error(Code.eval_quoted(bad_modifier))
assert %w(Foo Bar)a == [:"Foo", :"Bar"]
assert %w(Foo.#{Bar}.Baz)a == [:"Foo.Elixir.Bar.Baz"]
assert %w(Foo.Bar)s == ["Foo.Bar"]
assert %w(Foo.#{Bar})c == ['Foo.Elixir.Bar']
# Ensure it is fully expanded at compile time
assert Macro.expand(quote(do: %w(a b c)a), __ENV__) == [:a, :b, :c]
end
test :sigil_W do
assert %W(foo #{bar} baz) == ["foo", "\#{bar}", "baz"]
assert %W(
foo
bar
baz
) == ["foo", "bar", "baz"]
assert %W(foo bar baz)s == ["foo", "bar", "baz"]
assert %W(foo bar baz)a == [:foo, :bar, :baz]
assert %W(foo bar baz)c == ['foo', 'bar', 'baz']
bad_modifier = quote do: %W(foo bar baz)x
assert ArgumentError[] = catch_error(Code.eval_quoted(bad_modifier))
assert %W(Foo #{Bar})a == [:"Foo", :"\#{Bar}"]
assert %W(Foo.Bar.Baz)a == [:"Foo.Bar.Baz"]
end
end
| 24.484848
| 72
| 0.512376
|
93e0c01901d716af9d3629c5ebd1ad9c3d0eb462
| 6,046
|
ex
|
Elixir
|
lib/guardian_phoenix/socket.ex
|
WLSF/guardian_phoenix
|
604eefd2cda5555c1b5a935631e006bb29321f95
|
[
"MIT"
] | 32
|
2019-01-27T11:59:06.000Z
|
2022-03-14T05:16:46.000Z
|
lib/guardian_phoenix/socket.ex
|
WLSF/guardian_phoenix
|
604eefd2cda5555c1b5a935631e006bb29321f95
|
[
"MIT"
] | 10
|
2019-03-11T20:18:26.000Z
|
2022-03-03T15:49:01.000Z
|
lib/guardian_phoenix/socket.ex
|
WLSF/guardian_phoenix
|
604eefd2cda5555c1b5a935631e006bb29321f95
|
[
"MIT"
] | 8
|
2019-08-22T07:20:28.000Z
|
2021-09-24T20:21:10.000Z
|
defmodule Guardian.Phoenix.Socket do
@moduledoc """
Provides functions for managing authentication with sockets.
This module mostly provides convenience functions for storing tokens, claims and resources
on the socket assigns.
The main functions you'll be interested in are:
* `Guardian.Phoenix.Socket.authenticated?` - check if the socket has been authenticated
* `Guardian.Phoenix.Socket.authenticate` - Sign in a resource to a socket. Similar to `Guardian.Plug.authenticate`
### Getters
Once you're authenticated with your socket, you can use the getters
to fetch information about the authenticated resource for the socket.
* `Guardian.Phoenix.Socket.current_claims`
* `Guardian.Phoenix.Socket.current_token`
* `Guardian.Phoenix.Socket.current_resource`
These are the usual functions you'll want to use when dealing with authentication on sockets.
There is a bit of a difference between the usual `Guardian.Plug.sign_in` and the socket one.
The socket authenticate receives a token and signs in from that.
Please note that this is mere sugar on the underlying Guardian functions.
As an example:
```elixir
defmodule MyApp.UserSocket do
use Phoenix.Socket
def connect(%{"token" => token}, socket) do
case Guardian.Phoenix.Socket.authenticate(socket, MyApp.Guardian, token) do
{:ok, authed_socket} ->
{:ok, authed_socket}
{:error, _} -> :error
end
end
# This function will be called when there was no authentication information
def connect(_params, socket) do
:error
end
end
```
If you want to authenticate on the join of a channel, you can import this
module and use the authenticate function as normal.
"""
import Guardian.Plug.Keys
alias Phoenix.Socket
@doc """
Puts the current token onto the socket for later use.
Get the token from the socket with `current_token`
"""
@spec put_current_token(
socket :: Socket.t(),
token :: Guardian.Token.token() | nil,
key :: atom | String.t() | nil
) :: Socket.t()
def put_current_token(socket, token, key \\ :default) do
Socket.assign(socket, token_key(key), token)
end
@doc """
Put the current claims onto the socket for later use.
Get the claims from the socket with `current_claims`
"""
@spec put_current_claims(
socket :: Socket.t(),
new_claims :: Guardian.Token.claims() | nil,
atom | String.t() | nil
) :: Socket.t()
def put_current_claims(socket, new_claims, key \\ :default) do
Socket.assign(socket, claims_key(key), new_claims)
end
@doc """
Put the current resource onto the socket for later use.
Get the resource from the socket with `current_resource`
"""
@spec put_current_resource(
socket :: Socket.t(),
resource :: Guardian.Token.resource() | nil,
key :: atom | String.t() | nil
) :: Socket.t()
def put_current_resource(socket, resource, key \\ :default) do
Socket.assign(socket, resource_key(key), resource)
end
@doc """
Fetches the `claims` map that was encoded into the token from the socket.
"""
@spec current_claims(Socket.t(), atom | String.t()) :: Guardian.Token.claims() | nil
def current_claims(socket, key \\ :default) do
key = claims_key(key)
socket.assigns[key]
end
@doc """
Fetches the token that was provided for the initial authentication.
This is provided as an encoded string and fetched from the socket.
"""
@spec current_token(Socket.t(), atom | String.t()) :: Guardian.Token.token() | nil
def current_token(socket, key \\ :default) do
key = token_key(key)
socket.assigns[key]
end
@doc """
Fetches the resource from that was previously put onto the socket.
"""
@spec current_resource(Socket.t(), atom | String.t()) :: Guardian.Token.resource() | nil
def current_resource(socket, key \\ :default) do
key = resource_key(key)
socket.assigns[key]
end
@doc """
Boolean if the token is present or not to indicate an authenticated socket
"""
@spec authenticated?(Socket.t(), atom | String.t()) :: true | false
def authenticated?(socket, key \\ :default) do
current_token(socket, key) != nil
end
@doc """
Assigns the resource, token and claims to the socket.
Use the `key` to specify a different location. This allows
multiple tokens to be active on a socket at once.
"""
@spec assign_rtc(
socket :: Socket.t(),
resource :: Guardian.Token.resource() | nil,
token :: Guardian.Token.token() | nil,
claims :: Guardian.Token.claims() | nil,
key :: atom | String.t() | nil
) :: Socket.t()
def assign_rtc(socket, resource, token, claims, key \\ :default) do
socket
|> put_current_token(token, key)
|> put_current_claims(claims, key)
|> put_current_resource(resource, key)
end
@doc """
Given an implementation module and token, this will
* decode and verify the token
* load the resource
* store the resource, claims and token on the socket.
Use the `key` to store the information in a different location.
This allows multiple tokens and resources on a single socket.
"""
@spec authenticate(
socket :: Socket.t(),
impl :: module,
token :: Guardian.Token.token() | nil,
claims_to_check :: Guardian.Token.claims(),
opts :: Guardian.options()
) :: {:ok, Socket.t()} | {:error, atom | any}
def authenticate(socket, impl, token, claims_to_check \\ %{}, opts \\ [])
def authenticate(_socket, _impl, nil, _claims_to_check, _opts), do: {:error, :no_token}
def authenticate(socket, impl, token, claims_to_check, opts) do
with {:ok, resource, claims} <-
Guardian.resource_from_token(impl, token, claims_to_check, opts),
key <- Keyword.get(opts, :key, Guardian.Plug.default_key()) do
authed_socket = assign_rtc(socket, resource, token, claims, key)
{:ok, authed_socket}
end
end
end
| 32.858696
| 116
| 0.669368
|
93e0ce8ce606d35d2ebb1e5296c508b0ed5fae90
| 200
|
exs
|
Elixir
|
test/hT16K33_test.exs
|
Bananarchist/HT16K33
|
73a26c02d7ea36d2a2750f1d1b6f3e12cb49701a
|
[
"Apache-2.0"
] | null | null | null |
test/hT16K33_test.exs
|
Bananarchist/HT16K33
|
73a26c02d7ea36d2a2750f1d1b6f3e12cb49701a
|
[
"Apache-2.0"
] | null | null | null |
test/hT16K33_test.exs
|
Bananarchist/HT16K33
|
73a26c02d7ea36d2a2750f1d1b6f3e12cb49701a
|
[
"Apache-2.0"
] | null | null | null |
defmodule HT16K33Test do
use ExUnit.Case
test "Returns a nice decimal for a space" do
assert HT16K33.character_for(' ')
|> HT16K33.with_decimal_point() == <<0x00, 0x40>>
end
end
| 22.222222
| 60
| 0.67
|
93e0d6cc982fd8a1c697df9dc60567b18fbeddec
| 979
|
exs
|
Elixir
|
test/list_tags_test.exs
|
fmcgeough/ex_aws_cloud_trail
|
30346e5c11acf9dc5d133635b8025fe659e9f957
|
[
"MIT"
] | null | null | null |
test/list_tags_test.exs
|
fmcgeough/ex_aws_cloud_trail
|
30346e5c11acf9dc5d133635b8025fe659e9f957
|
[
"MIT"
] | null | null | null |
test/list_tags_test.exs
|
fmcgeough/ex_aws_cloud_trail
|
30346e5c11acf9dc5d133635b8025fe659e9f957
|
[
"MIT"
] | null | null | null |
defmodule ListTagsTest do
use ExUnit.Case
test "list_tags" do
op = ExAws.CloudTrail.list_tags("arn:aws:cloudtrail:us-east-2:123456789012:trail/MyTrail")
assert op.headers == [
{"x-amz-target", "CloudTrail_20131101.ListTags"},
{"content-type", "application/x-amz-json-1.1"}
]
assert op.data == %{
"ResourceTagList" => "arn:aws:cloudtrail:us-east-2:123456789012:trail/MyTrail"
}
end
test "list tags with paging" do
op =
ExAws.CloudTrail.list_tags("arn:aws:cloudtrail:us-east-2:123456789012:trail/MyTrail",
next_token: "ABCDEF"
)
assert op.headers == [
{"x-amz-target", "CloudTrail_20131101.ListTags"},
{"content-type", "application/x-amz-json-1.1"}
]
assert op.data == %{
"ResourceTagList" => "arn:aws:cloudtrail:us-east-2:123456789012:trail/MyTrail",
"NextToken" => "ABCDEF"
}
end
end
| 28.794118
| 94
| 0.589377
|
93e0d8e1896651184ae14985de799c25b09cf9bc
| 2,175
|
ex
|
Elixir
|
lib/r_range/ruby.ex
|
koga1020/ex-r_enum
|
9289452b85aa48bc3ecbe3c09df023a4a0f20a8e
|
[
"Apache-2.0"
] | null | null | null |
lib/r_range/ruby.ex
|
koga1020/ex-r_enum
|
9289452b85aa48bc3ecbe3c09df023a4a0f20a8e
|
[
"Apache-2.0"
] | null | null | null |
lib/r_range/ruby.ex
|
koga1020/ex-r_enum
|
9289452b85aa48bc3ecbe3c09df023a4a0f20a8e
|
[
"Apache-2.0"
] | null | null | null |
defmodule RRange.Ruby do
@moduledoc """
Summarized all of Ruby's Range functions.
Functions corresponding to the following patterns are not implemented
- When a function with the same name already exists in Elixir.
- When a method name includes `!`.
- %, ==, ===
"""
@spec __using__(any) :: list
defmacro __using__(_opts) do
RUtils.define_all_functions!(__MODULE__)
end
use RRange.RubyEnd
# https://ruby-doc.org/core-3.1.0/Range.html
# [:begin, :bsearch, :count, :cover?, :each, :end, :entries, :eql?, :exclude_end?, :first, :hash, :include?, :inspect, :last, :max, :member?, :min, :minmax, :size, :step, :to_a, :to_s]
# |> RUtils.required_functions([Range, REnum])
# ✔ begin
# × bsearch
# ✔ cover?
# ✔ end
# ✔ eql?
# × exclude_end?
# × hash
# ✔ inspect
# ✔ last
# ✔ step
# ✔ to_s
@doc """
Returns true if list1 == list2.
## Examples
iex> 1..3
iex> |> RList.eql?(1..3)
true
iex> 1..3
iex> |> RList.eql?(1..4)
false
"""
@spec eql?(Range.t(), Range.t()) :: boolean()
def eql?(range1, range2) do
range1 == range2
end
@doc """
Returns the first element of range.
## Examples
iex> RList.begin(1..3)
1
"""
@spec begin(Range.t()) :: integer()
def begin(begin.._) do
begin
end
@doc """
Returns Stream that from given range split into by given step.
## Examples
iex> RList.step(1..10, 2)
iex> |> Enum.to_list()
[1, 3, 5, 7, 9]
"""
@spec step(Range.t(), integer()) :: Enum.t()
def step(begin..last, step) do
begin..last//step
|> REnum.Ruby.lazy()
end
@doc """
Executes `Enum.each` to g given range split into by given step.
## Examples
iex> RList.step(1..10, 2, &IO.inspect(&1))
iex> |> Enum.to_list()
# 1
# 3
# 5
# 7
# 9
:ok
"""
@spec step(Range.t(), integer(), function()) :: :ok
def step(begin..last, step, func) do
begin..last//step
|> Enum.each(func)
end
defdelegate inspect(range), to: Kernel, as: :inspect
defdelegate to_s(range), to: Kernel, as: :inspect
defdelegate cover?(range, n), to: Enum, as: :member?
end
| 23.387097
| 186
| 0.576092
|
93e145ae3d0b0df284a2c892c9dcb5c3327586bf
| 1,226
|
exs
|
Elixir
|
mix.exs
|
thanos/ccxtex
|
55c019a2de716813c544fb4219bda6e5d53bedcf
|
[
"MIT"
] | 1
|
2019-06-10T21:37:08.000Z
|
2019-06-10T21:37:08.000Z
|
mix.exs
|
thanos/ccxtex
|
55c019a2de716813c544fb4219bda6e5d53bedcf
|
[
"MIT"
] | null | null | null |
mix.exs
|
thanos/ccxtex
|
55c019a2de716813c544fb4219bda6e5d53bedcf
|
[
"MIT"
] | 1
|
2019-06-10T21:36:52.000Z
|
2019-06-10T21:36:52.000Z
|
defmodule Ccxtex.MixProject do
use Mix.Project
def project do
[
app: :ccxtex,
version: "0.3.0",
elixir: "~> 1.6",
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger],
mod: {Ccxtex.Application, []}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:export, ">= 0.0.0"},
{:dialyxir, "~> 1.0.0-rc.2", only: [:dev], runtime: false},
{:nodejs, "~> 0.1"},
{:jason, "~> 1.1"},
{:construct, "~> 1.0"},
{:map_keys, "~> 0.1"},
{:ex_doc, ">= 0.0.0", only: :dev}
]
end
defp package do
[
name: :ccxtex,
files: ["lib", "mix.exs", "README*", "LICENSE*", "priv/js/dist"],
description: "Call ccxt (cryptocurrency trading library) from Elixir/Erlang",
maintainers: ["ontofractal"],
licenses: ["MIT"],
links: %{
"GitHub" => "https://github.com/metachaos-systems/ccxtex",
"Metachaos Systems" => "http://metachaos.systems",
"Ccxt" => "https://github.com/ccxt/ccxt"
}
]
end
end
| 24.039216
| 83
| 0.531811
|
93e149a08e2fa2a0f7bb92ac16ccd4c874594152
| 405
|
ex
|
Elixir
|
apps/artemis_api/lib/artemis_api/graphql/resolvers/session.ex
|
chrislaskey/atlas_platform
|
969aea95814f62d3471f93000ee5ad77edb9d1bf
|
[
"MIT"
] | 10
|
2019-07-05T19:59:20.000Z
|
2021-05-23T07:36:11.000Z
|
apps/artemis_api/lib/artemis_api/graphql/resolvers/session.ex
|
chrislaskey/atlas_platform
|
969aea95814f62d3471f93000ee5ad77edb9d1bf
|
[
"MIT"
] | 7
|
2019-07-12T21:41:01.000Z
|
2020-08-17T21:29:22.000Z
|
apps/artemis_api/lib/artemis_api/graphql/resolvers/session.ex
|
chrislaskey/atlas_platform
|
969aea95814f62d3471f93000ee5ad77edb9d1bf
|
[
"MIT"
] | 4
|
2019-07-05T20:04:08.000Z
|
2021-05-13T16:28:33.000Z
|
defmodule ArtemisApi.GraphQL.Resolver.Session do
alias Artemis.Event
alias ArtemisApi.CreateSession
alias ArtemisApi.GetUserByAuthProvider
def create_session(params, _context) do
case GetUserByAuthProvider.call(params) do
{:ok, user} ->
user
|> CreateSession.call()
|> Event.broadcast("session:created:api", user)
error ->
error
end
end
end
| 22.5
| 55
| 0.679012
|
93e15428bd44bd0ca1b75c81f3d5ca2e27415a87
| 429
|
ex
|
Elixir
|
mix/support/normalize/normalize_territory_containers.ex
|
szTheory/cldr
|
30e67d2e5ff13a61c29586a7cfad79995b070e1a
|
[
"Apache-2.0"
] | null | null | null |
mix/support/normalize/normalize_territory_containers.ex
|
szTheory/cldr
|
30e67d2e5ff13a61c29586a7cfad79995b070e1a
|
[
"Apache-2.0"
] | null | null | null |
mix/support/normalize/normalize_territory_containers.ex
|
szTheory/cldr
|
30e67d2e5ff13a61c29586a7cfad79995b070e1a
|
[
"Apache-2.0"
] | null | null | null |
defmodule Cldr.Normalize.TerritoryContainers do
@moduledoc false
def normalize(content) do
content
|> normalize_territory_containers
end
def normalize_territory_containers(content) do
content
|> Enum.map(fn {k, v} ->
if String.contains?(k, "-status-") do
nil
else
{k, Map.get(v, "_contains")}
end
end)
|> Enum.reject(&is_nil/1)
|> Enum.into(%{})
end
end
| 18.652174
| 48
| 0.615385
|
93e1614b32b92ff4aaea3dddfd49936849d79f60
| 2,003
|
ex
|
Elixir
|
lib/embed_chat_web/models/user.ex
|
guofei/embedchat
|
6562108acd1d488dde457f28cf01d82b4c5a9bf8
|
[
"MIT"
] | 27
|
2016-10-15T12:13:22.000Z
|
2021-02-07T20:31:41.000Z
|
lib/embed_chat_web/models/user.ex
|
guofei/embedchat
|
6562108acd1d488dde457f28cf01d82b4c5a9bf8
|
[
"MIT"
] | null | null | null |
lib/embed_chat_web/models/user.ex
|
guofei/embedchat
|
6562108acd1d488dde457f28cf01d82b4c5a9bf8
|
[
"MIT"
] | 4
|
2016-08-21T15:03:29.000Z
|
2019-11-22T13:15:29.000Z
|
defmodule EmbedChat.User do
use EmbedChatWeb, :model
schema "users" do
field :email, :string
field :name, :string
field :password, :string, virtual: true
field :crypted_password, :string
has_many :addresses, EmbedChat.Address
has_many :userprojects, EmbedChat.UserProject
has_many :projects, through: [:userprojects, :project]
has_many :rooms, through: [:projects, :room]
has_many :auto_message_configs, EmbedChat.AutoMessageConfig
timestamps()
end
def registration_changeset(struct, params) do
struct
|> changeset(params)
|> cast(params, [:password])
|> validate_required([:password])
|> validate_length(:password, min: 6, max: 100)
|> put_pass_hash()
end
@doc """
Creates a changeset based on the `model` and `params`.
If no params are provided, an invalid changeset is returned
with no validation performed.
"""
def changeset(model, params \\ %{}) do
model
|> cast(params, [:email, :name])
|> validate_required([:email, :name])
|> unique_constraint(:email)
|> validate_format(:email, ~r/@/)
|> validate_length(:email, min: 1, max: 50)
|> validate_length(:name, min: 1, max: 20)
end
defp put_pass_hash(changeset) do
case changeset do
%Ecto.Changeset{valid?: true, changes: %{password: pass}} ->
put_change(changeset, :crypted_password, Comeonin.Bcrypt.hashpwsalt(pass))
_ ->
changeset
end
end
def name(user) do
if user.name && String.length(user.name) > 0 do
user.name
else
user.email
end
end
def admin?(struct) do
if struct.id == 1 do
true
else
false
end
end
def sorted(query) do
from p in query, order_by: [desc: p.id]
end
def latest_for_room(query, room, limit \\ 1) do
from u in query,
join: up in EmbedChat.UserProject, on: u.id == up.user_id,
where: ^room.project_id == up.project_id,
order_by: [desc: u.id],
limit: ^limit
end
end
| 24.426829
| 82
| 0.643035
|
93e1761398f73bd37cf597ab3dfb1b3e234df92f
| 113
|
ex
|
Elixir
|
lib/forge_sdk/wallet/type/general.ex
|
ArcBlock/forge-elixir-sdk
|
61ff656a59116d3da18055d54655e8eb5ca15f63
|
[
"Apache-2.0"
] | 21
|
2019-05-08T01:25:42.000Z
|
2022-02-08T02:30:18.000Z
|
lib/forge_sdk/wallet/type/general.ex
|
ArcBlock/forge-elixir-sdk
|
61ff656a59116d3da18055d54655e8eb5ca15f63
|
[
"Apache-2.0"
] | 6
|
2019-05-27T23:07:00.000Z
|
2020-03-10T04:33:10.000Z
|
lib/forge_sdk/wallet/type/general.ex
|
ArcBlock/forge-elixir-sdk
|
61ff656a59116d3da18055d54655e8eb5ca15f63
|
[
"Apache-2.0"
] | 2
|
2019-07-21T18:12:47.000Z
|
2021-07-31T21:18:38.000Z
|
defimpl ForgeSdk.Wallet, for: ForgeAbi.WalletType do
use ForgeSdk.Wallet.Builder, mod: ForgeAbi.WalletType
end
| 28.25
| 55
| 0.814159
|
93e1a36c75287fb630266e9c1c82afe06f4a8ab2
| 973
|
exs
|
Elixir
|
mix.exs
|
robktek/poxa
|
1c330789735f9bc1e91c3dfa7a10edb3aff50aab
|
[
"MIT"
] | null | null | null |
mix.exs
|
robktek/poxa
|
1c330789735f9bc1e91c3dfa7a10edb3aff50aab
|
[
"MIT"
] | null | null | null |
mix.exs
|
robktek/poxa
|
1c330789735f9bc1e91c3dfa7a10edb3aff50aab
|
[
"MIT"
] | null | null | null |
defmodule Poxa.Mixfile do
use Mix.Project
def project do
[ app: :poxa,
version: "0.8.1",
name: "Poxa",
elixir: "~> 1.5",
deps: deps(),
dialyzer: [
plt_add_apps: ~w(cowboy poison gproc httpoison signaturex),
plt_file: ".local.plt",
flags: ~w(-Wunmatched_returns -Werror_handling -Wrace_conditions -Wno_opaque --fullpath --statistics)
]
]
end
def application do
[ mod: { Poxa, [] } ]
end
defp deps do
[ {:cowboy, "~> 1.1.2" },
{:poison, "~> 3.0"},
{:signaturex, "~> 1.3"},
{:gproc, "~> 0.6"},
{:mimic, "~> 1.0", only: :test},
{:pusher_client, github: "edgurgel/pusher_client", only: :test},
{:pusher, "~> 0.1", only: :test},
{:distillery, "~> 1.5.5", runtime: false},
{:conform, "~> 2.0"},
{:httpoison, "~> 0.9"},
{:ex2ms, "~> 1.5"},
{:watcher, "~> 1.1.0"},
{:dialyxir, "~> 0.3", only: [:dev, :test]}]
end
end
| 25.605263
| 109
| 0.500514
|
93e1b8123810d41d4ece76b03aed4b3ba0d0fff6
| 46,637
|
exs
|
Elixir
|
test/phoenix_live_view/diff_test.exs
|
ucwaldo/phoenix_live_view
|
aa192420dc3b3b4158bbc7582ce39cb149221170
|
[
"MIT"
] | 2
|
2021-05-15T05:20:19.000Z
|
2021-05-20T17:55:04.000Z
|
test/phoenix_live_view/diff_test.exs
|
ucwaldo/phoenix_live_view
|
aa192420dc3b3b4158bbc7582ce39cb149221170
|
[
"MIT"
] | 1
|
2021-05-12T12:14:27.000Z
|
2021-05-12T12:14:27.000Z
|
test/phoenix_live_view/diff_test.exs
|
ucwaldo/phoenix_live_view
|
aa192420dc3b3b4158bbc7582ce39cb149221170
|
[
"MIT"
] | null | null | null |
defmodule Phoenix.LiveView.DiffTest do
use ExUnit.Case, async: true
import Phoenix.LiveView.Helpers
alias Phoenix.LiveView.{Socket, Diff, Rendered, Component}
alias Phoenix.LiveComponent.CID
def basic_template(assigns) do
~L"""
<div>
<h2>It's <%= @time %></h2>
<%= @subtitle %>
</div>
"""
end
def literal_template(assigns) do
~L"""
<div>
<%= @title %>
<%= "<div>" %>
</div>
"""
end
def comprehension_template(assigns) do
~L"""
<div>
<h1><%= @title %></h1>
<%= for name <- @names do %>
<br/><%= name %>
<% end %>
</div>
"""
end
defp nested_rendered(changed? \\ true) do
%Rendered{
static: ["<h2>", "</h2>", "<span>", "</span>"],
dynamic: fn _ ->
[
"hi",
%Rendered{
static: ["s1", "s2", "s3"],
dynamic: fn _ -> if changed?, do: ["abc", "efg"], else: [nil, nil] end,
fingerprint: 456
},
%Rendered{
static: ["s1", "s2"],
dynamic: fn _ -> if changed?, do: ["efg"], else: [nil] end,
fingerprint: 789
}
]
end,
fingerprint: 123
}
end
defp render(
rendered,
fingerprints \\ Diff.new_fingerprints(),
components \\ Diff.new_components()
) do
socket = %Socket{endpoint: __MODULE__, fingerprints: fingerprints}
Diff.render(socket, rendered, components)
end
defp rendered_to_binary(map) do
map |> Diff.to_iodata() |> IO.iodata_to_binary()
end
describe "to_iodata" do
test "with subtrees chain" do
assert rendered_to_binary(%{
0 => %{d: [["1", 1], ["2", 2], ["3", 3]], s: ["\n", ":", ""]},
:c => %{
1 => %{0 => %{0 => "index_1", :s => ["\nIF ", ""]}, :s => ["", ""]},
2 => %{0 => %{0 => "index_2", :s => ["\nELSE ", ""]}, :s => 1},
3 => %{0 => %{0 => "index_3"}, :s => 2}
},
:s => ["<div>", "\n</div>\n"]
}) == """
<div>
1:
IF index_1
2:
ELSE index_2
3:
ELSE index_3
</div>
"""
end
test "with subtrees where a comprehension is replaced by rendered" do
assert rendered_to_binary(%{
0 => 1,
1 => 2,
:c => %{
1 => %{
0 => %{
0 => %{d: [[], [], []], s: ["ROW"]},
:s => ["\n", "\n"]
},
:s => ["<div>", "</div>"]
},
2 => %{
0 => %{
0 => %{0 => "BAR", :s => ["FOO", "BAZ"]},
:s => ["\n", "\n"]
},
:s => 1
}
},
:s => ["", "", ""]
}) == "<div>\nROWROWROW\n</div><div>\nFOOBARBAZ\n</div>"
end
end
describe "full renders without fingerprints" do
test "basic template" do
rendered = basic_template(%{time: "10:30", subtitle: "Sunny"})
{socket, full_render, _} = render(rendered)
assert full_render == %{
0 => "10:30",
1 => "Sunny",
:s => ["<div>\n <h2>It's ", "</h2>\n ", "\n</div>\n"]
}
assert rendered_to_binary(full_render) ==
"<div>\n <h2>It's 10:30</h2>\n Sunny\n</div>\n"
assert socket.fingerprints == {rendered.fingerprint, %{}}
end
test "template with literal" do
rendered = literal_template(%{title: "foo"})
{socket, full_render, _} = render(rendered)
assert full_render ==
%{0 => "foo", 1 => "<div>", :s => ["<div>\n ", "\n ", "\n</div>\n"]}
assert rendered_to_binary(full_render) ==
"<div>\n foo\n <div>\n</div>\n"
assert socket.fingerprints == {rendered.fingerprint, %{}}
end
test "nested %Rendered{}'s" do
{socket, full_render, _} = render(nested_rendered())
assert full_render ==
%{
0 => "hi",
1 => %{
0 => "abc",
1 => "efg",
:s => ["s1", "s2", "s3"]
},
2 => %{0 => "efg", :s => ["s1", "s2"]},
:s => ["<h2>", "</h2>", "<span>", "</span>"]
}
assert rendered_to_binary(full_render) ==
"<h2>hi</h2>s1abcs2efgs3<span>s1efgs2</span>"
assert socket.fingerprints == {123, %{2 => {789, %{}}, 1 => {456, %{}}}}
end
test "comprehensions" do
%{fingerprint: fingerprint} =
rendered = comprehension_template(%{title: "Users", names: ["phoenix", "elixir"]})
{socket, full_render, _} = render(rendered)
assert full_render == %{
0 => "Users",
:s => ["<div>\n <h1>", "</h1>\n ", "\n</div>\n"],
1 => %{
s: ["\n <br/>", "\n "],
d: [["phoenix"], ["elixir"]]
}
}
assert {^fingerprint, %{1 => comprehension_print}} = socket.fingerprints
assert is_integer(comprehension_print)
end
test "empty comprehensions" do
# If they are empty on first render, we don't send them
%{fingerprint: fingerprint} =
rendered = comprehension_template(%{title: "Users", names: []})
{socket, full_render, components} = render(rendered)
assert full_render == %{
0 => "Users",
:s => ["<div>\n <h1>", "</h1>\n ", "\n</div>\n"],
1 => ""
}
assert {^fingerprint, inner} = socket.fingerprints
assert inner == %{}
# Making them non-empty adds a fingerprint
rendered = comprehension_template(%{title: "Users", names: ["phoenix", "elixir"]})
{socket, full_render, components} = render(rendered, socket.fingerprints, components)
assert full_render == %{
0 => "Users",
1 => %{
d: [["phoenix"], ["elixir"]],
s: ["\n <br/>", "\n "]
}
}
assert {^fingerprint, %{1 => comprehension_print}} = socket.fingerprints
assert is_integer(comprehension_print)
# Making them empty again does not reset the fingerprint
rendered = comprehension_template(%{title: "Users", names: []})
{socket, full_render, _components} = render(rendered, socket.fingerprints, components)
assert full_render == %{
0 => "Users",
1 => %{d: []}
}
assert {^fingerprint, %{1 => ^comprehension_print}} = socket.fingerprints
end
end
describe "diffed render with fingerprints" do
test "basic template skips statics for known fingerprints" do
rendered = basic_template(%{time: "10:30", subtitle: "Sunny"})
{socket, full_render, _} = render(rendered, {rendered.fingerprint, %{}})
assert full_render == %{0 => "10:30", 1 => "Sunny"}
assert socket.fingerprints == {rendered.fingerprint, %{}}
end
test "renders nested %Rendered{}'s" do
tree = {123, %{2 => {789, %{}}, 1 => {456, %{}}}}
{socket, diffed_render, _} = render(nested_rendered(), tree)
assert diffed_render == %{0 => "hi", 1 => %{0 => "abc", 1 => "efg"}, 2 => %{0 => "efg"}}
assert socket.fingerprints == tree
end
test "does not emit nested %Rendered{}'s if they did not change" do
tree = {123, %{2 => {789, %{}}, 1 => {456, %{}}}}
{socket, diffed_render, _} = render(nested_rendered(false), tree)
assert diffed_render == %{0 => "hi"}
assert socket.fingerprints == tree
end
test "detects change in nested fingerprint" do
old_tree = {123, %{2 => {789, %{}}, 1 => {100_001, %{}}}}
{socket, diffed_render, _} = render(nested_rendered(), old_tree)
assert diffed_render ==
%{
0 => "hi",
1 => %{
0 => "abc",
1 => "efg",
:s => ["s1", "s2", "s3"]
},
2 => %{0 => "efg"}
}
assert socket.fingerprints == {123, %{2 => {789, %{}}, 1 => {456, %{}}}}
end
test "detects change in root fingerprint" do
old_tree = {99999, %{}}
{socket, diffed_render, _} = render(nested_rendered(), old_tree)
assert diffed_render == %{
0 => "hi",
1 => %{
0 => "abc",
1 => "efg",
:s => ["s1", "s2", "s3"]
},
2 => %{0 => "efg", :s => ["s1", "s2"]},
:s => ["<h2>", "</h2>", "<span>", "</span>"]
}
assert socket.fingerprints == {123, %{2 => {789, %{}}, 1 => {456, %{}}}}
end
end
defmodule MyComponent do
use Phoenix.LiveComponent
def mount(socket) do
send(self(), {:mount, socket})
{:ok, assign(socket, hello: "world")}
end
def update(assigns, socket) do
send(self(), {:update, assigns, socket})
{:ok, assign(socket, assigns)}
end
def render(assigns) do
send(self(), :render)
~L"""
FROM <%= @from %> <%= @hello %>
"""
end
end
defmodule IfComponent do
use Phoenix.LiveComponent
def mount(socket) do
{:ok, assign(socket, if: true)}
end
def render(assigns) do
~L"""
<%= if @if do %>
IF <%= @from %>
<% else %>
ELSE <%= @from %>
<% end %>
"""
end
end
defmodule TempComponent do
use Phoenix.LiveComponent
def mount(socket) do
send(self(), {:temporary_mount, socket})
{:ok, assign(socket, :first_time, true), temporary_assigns: [first_time: false]}
end
def render(assigns) do
send(self(), {:temporary_render, assigns})
~L"""
FROM <%= if @first_time, do: "WELCOME!", else: @from %>
"""
end
end
defmodule RenderOnlyComponent do
use Phoenix.LiveComponent
def render(assigns) do
~L"""
RENDER ONLY <%= @from %>
"""
end
end
defmodule BlockComponent do
use Phoenix.LiveComponent
def mount(socket) do
{:ok, assign(socket, id: "DEFAULT")}
end
def render(%{do: _}), do: raise("unexpected :do assign")
def render(assigns) do
~L"""
HELLO <%= @id %> <%= render_block(@inner_block, value: 1) %>
HELLO <%= @id %> <%= render_block(@inner_block, value: 2) %>
"""
end
end
defmodule BlockNoArgsComponent do
use Phoenix.LiveComponent
def mount(socket) do
{:ok, assign(socket, id: "DEFAULT")}
end
def render(%{do: _}), do: raise("unexpected :do assign")
def render(assigns) do
~L"""
HELLO <%= @id %> <%= render_block(@inner_block) %>
HELLO <%= @id %> <%= render_block(@inner_block) %>
"""
end
end
defmodule FunctionComponent do
def render_only(assigns) do
~L"""
RENDER ONLY <%= @from %>
"""
end
def render_with_block_no_args(assigns) do
~L"""
HELLO <%= @id %> <%= render_block(@inner_block) %>
HELLO <%= @id %> <%= render_block(@inner_block) %>
"""
end
def render_with_block(assigns) do
~L"""
HELLO <%= @id %> <%= render_block(@inner_block, 1) %>
HELLO <%= @id %> <%= render_block(@inner_block, 2) %>
"""
end
def render_with_live_component(assigns) do
~L"""
COMPONENT
<%= live_component BlockComponent, id: "WORLD" do %>
WITH VALUE <%= @value %>
<% end %>
"""
end
end
defmodule TreeComponent do
use Phoenix.LiveComponent
def preload(list_of_assigns) do
send(self(), {:preload, list_of_assigns})
Enum.map(list_of_assigns, &Map.put(&1, :preloaded?, true))
end
def update(assigns, socket) do
send(self(), {:update, assigns})
{:ok, assign(socket, assigns)}
end
def render(assigns) do
~L"""
<%= @id %> - <%= @preloaded? %>
<%= for {component, index} <- Enum.with_index(@children, 0) do %>
<%= index %>: <%= component %>
<% end %>
"""
end
end
defmodule NestedDynamicComponent do
use Phoenix.LiveComponent
def render(assigns) do
~L"""
<%= render_itself(assigns) %>
"""
end
def render_itself(assigns) do
case assigns.key do
:a ->
~L"""
<%= for key <- [:nothing] do %>
<%= key %><%= key %>
<% end %>
"""
:b ->
~L"""
<%= %>
"""
:c ->
~L"""
<%= live_component __MODULE__, id: make_ref(), key: :a %>
"""
end
end
end
def component_template(assigns) do
~L"""
<div>
<%= @component %>
</div>
"""
end
def another_component_template(assigns) do
~L"""
<span>
<%= @component %>
</span>
"""
end
describe "stateless components" do
test "on mount" do
component = %Component{assigns: %{from: :component}, component: MyComponent}
rendered = component_template(%{component: component})
{socket, full_render, components} = render(rendered)
assert full_render == %{
0 => %{0 => "component", 1 => "world", :s => ["FROM ", " ", "\n"]},
:s => ["<div>\n ", "\n</div>\n"]
}
assert socket.fingerprints != {rendered.fingerprint, %{}}
assert components == Diff.new_components()
assert_received {:mount, %Socket{endpoint: __MODULE__, assigns: assigns}}
when assigns == %{flash: %{}}
assert_received {:update, %{from: :component}, %Socket{assigns: %{hello: "world"}}}
assert_received :render
refute_received _
end
test "on update" do
component = %Component{assigns: %{from: :component}, component: MyComponent}
rendered = component_template(%{component: component})
{previous_socket, _, previous_components} = render(rendered)
{socket, full_render, components} =
render(rendered, previous_socket.fingerprints, previous_components)
assert full_render == %{
0 => %{0 => "component", 1 => "world"}
}
assert socket.fingerprints == previous_socket.fingerprints
assert components == previous_components
assert components == Diff.new_components()
assert_received {:mount, %Socket{endpoint: __MODULE__, assigns: assigns}}
when assigns == %{flash: %{}}
assert_received {:update, %{from: :component}, %Socket{assigns: %{hello: "world"}}}
assert_received :render
assert_received {:mount, %Socket{endpoint: __MODULE__, assigns: assigns}}
when assigns == %{flash: %{}}
assert_received {:update, %{from: :component}, %Socket{assigns: %{hello: "world"}}}
assert_received :render
refute_received _
end
test "render only" do
component = %Component{assigns: %{from: :component}, component: RenderOnlyComponent}
rendered = component_template(%{component: component})
{socket, full_render, components} = render(rendered)
assert full_render == %{
0 => %{
0 => "component",
:s => ["RENDER ONLY ", "\n"]
},
:s => ["<div>\n ", "\n</div>\n"]
}
assert socket.fingerprints != {rendered.fingerprint, %{}}
assert components == Diff.new_components()
end
test "block tracking" do
assigns = %{socket: %Socket{}}
rendered = ~L"""
<%= live_component BlockNoArgsComponent do %>
INSIDE BLOCK
<% end %>
"""
{socket, full_render, components} = render(rendered)
assert full_render == %{
0 => %{
0 => "",
1 => %{s: ["\n INSIDE BLOCK\n"]},
2 => "",
3 => %{s: ["\n INSIDE BLOCK\n"]},
:s => ["HELLO ", " ", "\nHELLO ", " ", "\n"]
},
:s => ["", "\n"]
}
{_socket, full_render, _components} = render(rendered, socket.fingerprints, components)
assert full_render == %{0 => %{0 => "", 2 => ""}}
end
end
describe "function components" do
test "render only" do
assigns = %{socket: %Socket{}}
rendered = ~L"""
<%= component &FunctionComponent.render_only/1, from: :component %>
"""
{socket, full_render, components} = render(rendered)
assert full_render == %{
0 => %{
0 => "component",
:s => ["RENDER ONLY ", "\n"]
},
:s => ["", "\n"]
}
assert socket.fingerprints != {rendered.fingerprint, %{}}
assert components == Diff.new_components()
end
test "block tracking without args" do
assigns = %{socket: %Socket{}}
rendered = ~L"""
<%= component &FunctionComponent.render_with_block_no_args/1, id: "DEFAULT" do %>
INSIDE BLOCK
<% end %>
"""
{socket, full_render, components} = render(rendered)
assert full_render == %{
0 => %{
0 => "DEFAULT",
1 => %{s: ["\n INSIDE BLOCK\n"]},
2 => "DEFAULT",
3 => %{s: ["\n INSIDE BLOCK\n"]},
:s => ["HELLO ", " ", "\nHELLO ", " ", "\n"]
},
:s => ["", "\n"]
}
{_socket, full_render, _components} = render(rendered, socket.fingerprints, components)
assert full_render == %{0 => %{0 => "DEFAULT", 2 => "DEFAULT"}}
end
defp function_tracking(assigns) do
~L"""
<%= component &FunctionComponent.render_with_block/1, id: @id do %>
<% value -> %>
WITH VALUE <%= value %> - <%= @value %>
<% end %>
"""
end
test "block tracking with args and parent assign" do
assigns = %{socket: %Socket{}, value: 123, id: "DEFAULT"}
{socket, full_render, components} = render(function_tracking(assigns))
assert full_render == %{
0 => %{
0 => "DEFAULT",
1 => %{0 => "1", :s => ["\n WITH VALUE ", " - ", "\n"], 1 => "123"},
2 => "DEFAULT",
3 => %{0 => "2", :s => ["\n WITH VALUE ", " - ", "\n"], 1 => "123"},
:s => ["HELLO ", " ", "\nHELLO ", " ", "\n"]
},
:s => ["", "\n"]
}
{_socket, full_render, _components} =
render(function_tracking(assigns), socket.fingerprints, components)
assert full_render == %{
0 => %{
0 => "DEFAULT",
1 => %{0 => "1", 1 => "123"},
2 => "DEFAULT",
3 => %{0 => "2", 1 => "123"}
}
}
assigns = Map.put(assigns, :__changed__, %{})
{_socket, full_render, _components} =
render(function_tracking(assigns), socket.fingerprints, components)
assert full_render == %{}
assigns = Map.put(assigns, :__changed__, %{id: true})
{_socket, full_render, _components} =
render(function_tracking(assigns), socket.fingerprints, components)
assert full_render == %{
0 => %{
0 => "DEFAULT",
1 => %{0 => "1"},
2 => "DEFAULT",
3 => %{0 => "2"}
}
}
assigns = Map.put(assigns, :__changed__, %{value: true})
{_socket, full_render, _components} =
render(function_tracking(assigns), socket.fingerprints, components)
assert full_render == %{
0 => %{
0 => "DEFAULT",
1 => %{0 => "1", 1 => "123"},
2 => "DEFAULT",
3 => %{0 => "2", 1 => "123"}
}
}
end
test "with live_component" do
assigns = %{socket: %Socket{}}
rendered = ~L"""
<%= component &FunctionComponent.render_with_live_component/1 %>
"""
{socket, full_render, components} = render(rendered)
assert full_render == %{
0 => %{0 => 1, :s => ["COMPONENT\n", "\n"]},
:c => %{
1 => %{
0 => "WORLD",
1 => %{0 => "1", :s => ["\n WITH VALUE ", "\n"]},
2 => "WORLD",
3 => %{0 => "2", :s => ["\n WITH VALUE ", "\n"]},
:s => ["HELLO ", " ", "\nHELLO ", " ", "\n"]
}
},
:s => ["", "\n"]
}
{_socket, full_render, _components} = render(rendered, socket.fingerprints, components)
assert full_render == %{0 => %{0 => 1}}
end
end
describe "stateful components" do
test "on mount" do
component = %Component{id: "hello", assigns: %{from: :component}, component: MyComponent}
rendered = component_template(%{component: component})
{socket, full_render, components} = render(rendered)
assert full_render == %{
0 => 1,
:c => %{1 => %{0 => "component", 1 => "world", :s => ["FROM ", " ", "\n"]}},
:s => ["<div>\n ", "\n</div>\n"]
}
assert socket.fingerprints == {rendered.fingerprint, %{}}
{cid_to_component, _, 2} = components
assert {MyComponent, "hello", _, _, _} = cid_to_component[1]
assert_received {:mount, %Socket{endpoint: __MODULE__, assigns: assigns}}
when assigns == %{flash: %{}, myself: %CID{cid: 1}}
assert_received {:update, %{from: :component}, %Socket{assigns: %{hello: "world"}}}
assert_received :render
refute_received _
end
test "on root fingerprint change" do
component = %Component{id: "hello", assigns: %{from: :component}, component: MyComponent}
rendered = component_template(%{component: component})
{socket, full_render, components} = render(rendered)
assert full_render == %{
0 => 1,
:c => %{1 => %{0 => "component", 1 => "world", :s => ["FROM ", " ", "\n"]}},
:s => ["<div>\n ", "\n</div>\n"]
}
assert socket.fingerprints == {rendered.fingerprint, %{}}
assert_received {:mount, %Socket{endpoint: __MODULE__, assigns: assigns}}
when assigns == %{flash: %{}, myself: %CID{cid: 1}}
assert_received :render
another_rendered = another_component_template(%{component: component})
{another_socket, another_full_render, _} =
render(another_rendered, socket.fingerprints, components)
assert another_full_render == %{
0 => 2,
:c => %{2 => %{0 => "component", 1 => "world", :s => ["FROM ", " ", "\n"]}},
:s => ["<span>\n ", "\n</span>\n"]
}
assert another_socket.fingerprints == {another_rendered.fingerprint, %{}}
assert socket.fingerprints != another_socket.fingerprints
assert_received {:mount, %Socket{endpoint: __MODULE__, assigns: assigns}}
when assigns == %{flash: %{}, myself: %CID{cid: 2}}
assert_received :render
end
test "raises on duplicate component IDs" do
assigns = %{socket: %Socket{}}
rendered = ~L"""
<%= live_component RenderOnlyComponent, id: "SAME", from: "SAME" %>
<%= live_component RenderOnlyComponent, id: "SAME", from: "SAME" %>
"""
assert_raise RuntimeError,
"found duplicate ID \"SAME\" for component Phoenix.LiveView.DiffTest.RenderOnlyComponent when rendering template",
fn -> render(rendered) end
end
test "on update without render" do
component = %Component{id: "hello", assigns: %{from: :component}, component: MyComponent}
rendered = component_template(%{component: component})
{previous_socket, _, previous_components} = render(rendered)
{socket, full_render, components} =
render(rendered, previous_socket.fingerprints, previous_components)
assert full_render == %{0 => 1}
assert socket.fingerprints == previous_socket.fingerprints
assert components == previous_components
assert_received {:mount, %Socket{endpoint: __MODULE__, assigns: assigns}}
when assigns == %{flash: %{}, myself: %CID{cid: 1}}
assert_received {:update, %{from: :component}, %Socket{assigns: %{hello: "world"}}}
assert_received :render
assert_received {:update, %{from: :component}, %Socket{assigns: %{hello: "world"}}}
refute_received _
end
test "on update with render" do
component = %Component{id: "hello", assigns: %{from: :component}, component: MyComponent}
rendered = component_template(%{component: component})
{previous_socket, _, previous_components} = render(rendered)
component = %Component{id: "hello", assigns: %{from: :rerender}, component: MyComponent}
rendered = component_template(%{component: component})
{socket, full_render, components} =
render(rendered, previous_socket.fingerprints, previous_components)
assert full_render == %{0 => 1, :c => %{1 => %{0 => "rerender"}}}
assert socket.fingerprints == previous_socket.fingerprints
assert components != previous_components
assert_received {:mount, %Socket{endpoint: __MODULE__, assigns: assigns}}
when assigns == %{flash: %{}, myself: %CID{cid: 1}}
assert_received {:update, %{from: :component},
%Socket{assigns: %{hello: "world", myself: %CID{cid: 1}}}}
assert_received :render
assert_received {:update, %{from: :rerender},
%Socket{assigns: %{hello: "world", myself: %CID{cid: 1}}}}
assert_received :render
refute_received _
end
test "on update with temporary" do
component = %Component{id: "hello", assigns: %{from: :component}, component: TempComponent}
rendered = component_template(%{component: component})
{previous_socket, full_render, previous_components} = render(rendered)
assert full_render == %{
0 => 1,
:c => %{1 => %{0 => "WELCOME!", :s => ["FROM ", "\n"]}},
:s => ["<div>\n ", "\n</div>\n"]
}
component = %Component{id: "hello", assigns: %{from: :rerender}, component: TempComponent}
rendered = component_template(%{component: component})
{socket, full_render, components} =
render(rendered, previous_socket.fingerprints, previous_components)
assert full_render == %{0 => 1, :c => %{1 => %{0 => "rerender"}}}
assert socket.fingerprints == previous_socket.fingerprints
assert components != previous_components
assert_received {:temporary_mount, %Socket{endpoint: __MODULE__}}
assert_received {:temporary_render, %{first_time: true}}
assert_received {:temporary_render, %{first_time: false}}
refute_received _
end
test "on update with stateless/stateful swap" do
component = %Component{assigns: %{from: :component}, component: MyComponent}
rendered = component_template(%{component: component})
{socket, diff, components} = render(rendered)
assert diff == %{
0 => %{0 => "component", 1 => "world", :s => ["FROM ", " ", "\n"]},
:s => ["<div>\n ", "\n</div>\n"]
}
assert {root_prints, %{0 => {_, %{}}}} = socket.fingerprints
assert {_, _, 1} = components
component = %Component{id: "hello", assigns: %{from: :rerender}, component: MyComponent}
rendered = component_template(%{component: component})
{socket, diff, components} = render(rendered, socket.fingerprints, components)
assert diff == %{
0 => 1,
:c => %{1 => %{0 => "rerender", 1 => "world", :s => ["FROM ", " ", "\n"]}}
}
assert socket.fingerprints == {root_prints, %{}}
assert {_, _, 2} = components
end
test "on preload" do
alias Component, as: C
tree = %C{
component: TreeComponent,
id: "R",
assigns: %{
id: "R",
children: [
%C{
component: TreeComponent,
id: "A",
assigns: %{
id: "A",
children: [
%C{component: TreeComponent, id: "B", assigns: %{id: "B", children: []}},
%C{component: TreeComponent, id: "C", assigns: %{id: "C", children: []}},
%C{component: TreeComponent, id: "D", assigns: %{id: "D", children: []}}
]
}
},
%C{
component: TreeComponent,
id: "X",
assigns: %{
id: "X",
children: [
%C{component: TreeComponent, id: "Y", assigns: %{id: "Y", children: []}},
%C{component: TreeComponent, id: "Z", assigns: %{id: "Z", children: []}}
]
}
}
]
}
}
rendered = component_template(%{component: tree})
{socket, full_render, components} = render(rendered)
assert %{
c: %{
1 => %{0 => "R"},
2 => %{0 => "A"},
3 => %{0 => "X"},
4 => %{0 => "B"},
5 => %{0 => "C"},
6 => %{0 => "D"},
7 => %{0 => "Y"},
8 => %{0 => "Z"}
}
} = full_render
assert socket.fingerprints == {rendered.fingerprint, %{}}
assert {_, _, 9} = components
assert_received {:preload, [%{id: "R"}]}
assert_received {:preload, [%{id: "A"}, %{id: "X"}]}
assert_received {:preload, [%{id: "B"}, %{id: "C"}, %{id: "D"}, %{id: "Y"}, %{id: "Z"}]}
for id <- ~w(R A X B C D Y Z) do
assert_received {:update, %{id: ^id, preloaded?: true}}
end
end
test "on addition" do
component = %Component{id: "hello", assigns: %{from: :component}, component: MyComponent}
rendered = component_template(%{component: component})
{previous_socket, _, previous_components} = render(rendered)
component = %Component{id: "another", assigns: %{from: :another}, component: MyComponent}
rendered = component_template(%{component: component})
{socket, full_render, components} =
render(rendered, previous_socket.fingerprints, previous_components)
assert full_render == %{0 => 2, :c => %{2 => %{0 => "another", 1 => "world", :s => -1}}}
assert socket.fingerprints == previous_socket.fingerprints
assert components != previous_components
assert_received {:mount, %Socket{endpoint: __MODULE__}}
assert_received {:update, %{from: :component}, %Socket{assigns: %{hello: "world"}}}
assert_received :render
assert_received {:mount, %Socket{endpoint: __MODULE__}}
assert_received {:update, %{from: :another}, %Socket{assigns: %{hello: "world"}}}
assert_received :render
refute_received _
end
test "duplicate IDs" do
component = %Component{id: "hello", assigns: %{from: :component}, component: TempComponent}
rendered = component_template(%{component: component})
{previous_socket, _, previous_components} = render(rendered)
component = %Component{id: "hello", assigns: %{from: :replaced}, component: MyComponent}
rendered = component_template(%{component: component})
{socket, full_render, components} =
render(rendered, previous_socket.fingerprints, previous_components)
assert full_render == %{
0 => 2,
:c => %{2 => %{0 => "replaced", 1 => "world", :s => ["FROM ", " ", "\n"]}}
}
assert socket.fingerprints == previous_socket.fingerprints
assert components != previous_components
assert_received {:temporary_mount, %Socket{endpoint: __MODULE__}}
assert_received {:temporary_render, %{first_time: true, from: :component}}
assert_received {:mount, %Socket{endpoint: __MODULE__}}
assert_received {:update, %{from: :replaced}, %Socket{assigns: %{hello: "world"}}}
assert_received :render
refute_received _
end
test "inside comprehension" do
components = [
%Component{id: "index_1", assigns: %{from: :index_1}, component: MyComponent},
%Component{id: "index_2", assigns: %{from: :index_2}, component: MyComponent}
]
assigns = %{components: components}
%{fingerprint: fingerprint} =
rendered = ~L"""
<div>
<%= for {component, index} <- Enum.with_index(@components, 0) do %>
<%= index %>: <%= component %>
<% end %>
</div>
"""
{socket, full_render, components} = render(rendered)
assert full_render == %{
0 => %{d: [["0", 1], ["1", 2]], s: ["\n ", ": ", "\n "]},
:c => %{
1 => %{0 => "index_1", 1 => "world", :s => ["FROM ", " ", "\n"]},
2 => %{0 => "index_2", 1 => "world", :s => 1}
},
:s => ["<div>\n ", "\n</div>\n"]
}
assert {^fingerprint, %{0 => _}} = socket.fingerprints
{cid_to_component, _, 3} = components
assert {MyComponent, "index_1", _, _, _} = cid_to_component[1]
assert {MyComponent, "index_2", _, _, _} = cid_to_component[2]
assert_received {:mount, %Socket{endpoint: __MODULE__}}
assert_received {:update, %{from: :index_1}, %Socket{assigns: %{hello: "world"}}}
assert_received :render
assert_received {:mount, %Socket{endpoint: __MODULE__}}
assert_received {:update, %{from: :index_2}, %Socket{assigns: %{hello: "world"}}}
assert_received :render
end
test "inside comprehension with subtree" do
template = fn components ->
assigns = %{components: components}
~L"""
<div>
<%= for {component, index} <- Enum.with_index(@components, 0) do %>
<%= index %>: <%= component %>
<% end %>
</div>
"""
end
# We start by rendering two components
components = [
%Component{id: "index_1", assigns: %{from: :index_1}, component: IfComponent},
%Component{id: "index_2", assigns: %{from: :index_2}, component: IfComponent}
]
{socket, full_render, diff_components} = render(template.(components))
assert full_render == %{
0 => %{d: [["0", 1], ["1", 2]], s: ["\n ", ": ", "\n "]},
:c => %{
1 => %{0 => %{0 => "index_1", :s => ["\n IF ", "\n"]}, :s => ["", "\n"]},
2 => %{0 => %{0 => "index_2"}, :s => 1}
},
:s => ["<div>\n ", "\n</div>\n"]
}
{cid_to_component, _, 3} = diff_components
assert {IfComponent, "index_1", _, _, _} = cid_to_component[1]
assert {IfComponent, "index_2", _, _, _} = cid_to_component[2]
# Now let's add a third component, it shall reuse index_1
components = [
%Component{id: "index_3", assigns: %{from: :index_3}, component: IfComponent}
]
{socket, diff, diff_components} =
render(template.(components), socket.fingerprints, diff_components)
assert diff == %{
0 => %{d: [["0", 3]]},
:c => %{3 => %{0 => %{0 => "index_3"}, :s => -1}}
}
{cid_to_component, _, 4} = diff_components
assert {IfComponent, "index_3", _, _, _} = cid_to_component[3]
# Now let's add a fourth component, with a different subtree than index_0
components = [
%Component{id: "index_4", assigns: %{from: :index_4, if: false}, component: IfComponent}
]
{socket, diff, diff_components} =
render(template.(components), socket.fingerprints, diff_components)
assert diff == %{
0 => %{d: [["0", 4]]},
:c => %{4 => %{0 => %{0 => "index_4", :s => ["\n ELSE ", "\n"]}, :s => -1}}
}
{cid_to_component, _, 5} = diff_components
assert {IfComponent, "index_4", _, _, _} = cid_to_component[4]
# Finally, let's add a fifth component while changing the first component at the same time.
# We should point to the index tree of index_0 before render.
components = [
%Component{id: "index_1", assigns: %{from: :index_1, if: false}, component: IfComponent},
%Component{id: "index_5", assigns: %{from: :index_5}, component: IfComponent}
]
{_socket, diff, diff_components} =
render(template.(components), socket.fingerprints, diff_components)
assert diff == %{
0 => %{d: [["0", 1], ["1", 5]]},
:c => %{
1 => %{0 => %{0 => "index_1", :s => ["\n ELSE ", "\n"]}},
5 => %{0 => %{0 => "index_5"}, :s => -1}
}
}
{cid_to_component, _, 6} = diff_components
assert {IfComponent, "index_5", _, _, _} = cid_to_component[5]
end
test "inside nested comprehension" do
components = [
%Component{id: "index_1", assigns: %{from: :index_1}, component: MyComponent},
%Component{id: "index_2", assigns: %{from: :index_2}, component: MyComponent}
]
assigns = %{components: components, ids: ["foo", "bar"]}
%{fingerprint: fingerprint} =
rendered = ~L"""
<div>
<%= for prefix_id <- @ids do %>
<%= prefix_id %>
<%= for {component, index} <- Enum.with_index(@components, 0) do %>
<%= index %>: <%= %{component | id: "#{prefix_id}-#{component.id}"} %>
<% end %>
<% end %>
</div>
"""
{socket, full_render, components} = render(rendered)
assert full_render == %{
0 => %{
d: [
["foo", %{d: [["0", 1], ["1", 2]], s: ["\n ", ": ", "\n "]}],
["bar", %{d: [["0", 3], ["1", 4]], s: ["\n ", ": ", "\n "]}]
],
s: ["\n ", "\n ", "\n "]
},
:c => %{
1 => %{0 => "index_1", 1 => "world", :s => ["FROM ", " ", "\n"]},
2 => %{0 => "index_2", 1 => "world", :s => 1},
3 => %{0 => "index_1", 1 => "world", :s => 1},
4 => %{0 => "index_2", 1 => "world", :s => 3}
},
:s => ["<div>\n ", "\n</div>\n"]
}
assert {^fingerprint, %{0 => _}} = socket.fingerprints
{cid_to_component, _, 5} = components
assert {MyComponent, "foo-index_1", _, _, _} = cid_to_component[1]
assert {MyComponent, "foo-index_2", _, _, _} = cid_to_component[2]
assert {MyComponent, "bar-index_1", _, _, _} = cid_to_component[3]
assert {MyComponent, "bar-index_2", _, _, _} = cid_to_component[4]
for from <- [:index_1, :index_2, :index_1, :index_2] do
assert_received {:mount, %Socket{endpoint: __MODULE__}}
assert_received {:update, %{from: ^from}, %Socket{assigns: %{hello: "world"}}}
assert_received :render
end
end
test "inside rendered inside comprehension" do
components = [
%Component{id: "index_1", assigns: %{from: :index_1}, component: MyComponent},
%Component{id: "index_2", assigns: %{from: :index_2}, component: MyComponent}
]
assigns = %{components: components}
%{fingerprint: fingerprint} =
rendered = ~L"""
<div>
<%= for {component, index} <- Enum.with_index(@components, 1) do %>
<%= index %>: <%= component_template(%{component: component}) %>
<% end %>
</div>
"""
{socket, full_render, components} = render(rendered)
assert full_render == %{
0 => %{
d: [
["1", %{0 => 1, :s => ["<div>\n ", "\n</div>\n"]}],
["2", %{0 => 2, :s => ["<div>\n ", "\n</div>\n"]}]
],
s: ["\n ", ": ", "\n "]
},
:c => %{
1 => %{0 => "index_1", 1 => "world", :s => ["FROM ", " ", "\n"]},
2 => %{0 => "index_2", 1 => "world", :s => 1}
},
:s => ["<div>\n ", "\n</div>\n"]
}
assert {^fingerprint, %{0 => _}} = socket.fingerprints
{cid_to_component, _, 3} = components
assert {MyComponent, "index_1", _, _, _} = cid_to_component[1]
assert {MyComponent, "index_2", _, _, _} = cid_to_component[2]
assert_received {:mount, %Socket{endpoint: __MODULE__}}
assert_received {:update, %{from: :index_1}, %Socket{assigns: %{hello: "world"}}}
assert_received :render
assert_received {:mount, %Socket{endpoint: __MODULE__}}
assert_received {:update, %{from: :index_2}, %Socket{assigns: %{hello: "world"}}}
assert_received :render
end
test "inside condition inside comprehension" do
components = [
%Component{id: "index_1", assigns: %{from: :index_1}, component: MyComponent},
%Component{id: "index_2", assigns: %{from: :index_2}, component: MyComponent}
]
assigns = %{components: components}
%{fingerprint: fingerprint} =
rendered = ~L"""
<div>
<%= for {component, index} <- Enum.with_index(@components, 1) do %>
<%= if index > 1 do %><%= index %>: <%= component %><% end %>
<% end %>
</div>
"""
{socket, full_render, components} = render(rendered)
assert full_render == %{
0 => %{
d: [[""], [%{0 => "2", 1 => 1, :s => ["", ": ", ""]}]],
s: ["\n ", "\n "]
},
:c => %{1 => %{0 => "index_2", 1 => "world", :s => ["FROM ", " ", "\n"]}},
:s => ["<div>\n ", "\n</div>\n"]
}
assert {^fingerprint, %{0 => _}} = socket.fingerprints
{cid_to_component, _, 2} = components
assert {MyComponent, "index_2", _, _, _} = cid_to_component[1]
assert_received {:mount, %Socket{endpoint: __MODULE__}}
assert_received {:update, %{from: :index_2}, %Socket{assigns: %{hello: "world"}}}
assert_received :render
refute_received {:update, %{from: :index_1}, %Socket{assigns: %{hello: "world"}}}
end
test "inside comprehension inside live_component without static" do
assigns = %{socket: %Socket{}}
%{fingerprint: _fingerprint} =
rendered = ~L"""
<%= for key <- [:b, :c, :a] do %>
<%= live_component(NestedDynamicComponent, id: key, key: key) %>
<% end %>
"""
{_socket, full_render, _components} = render(rendered)
assert full_render == %{
0 => %{d: [[1], [2], [3]], s: ["\n ", "\n"]},
:c => %{
1 => %{0 => %{0 => "", :s => ["", "\n"]}, :s => ["", "\n"]},
2 => %{0 => %{0 => 4, :s => ["", "\n"]}, :s => 1},
3 => %{
0 => %{
0 => %{d: [["nothing", "nothing"]], s: ["\n ", "", "\n"]},
:s => ["", "\n"]
},
:s => 1
},
4 => %{0 => %{0 => %{d: [["nothing", "nothing"]]}}, :s => 3}
},
:s => ["", "\n"]
}
assert rendered_to_binary(full_render) =~ "nothingnothing"
end
test "block tracking" do
assigns = %{socket: %Socket{}}
rendered = ~L"""
<%= live_component BlockComponent, id: "WORLD" do %>
WITH VALUE <%= @value %>
<% end %>
"""
{socket, full_render, components} = render(rendered)
assert full_render == %{
0 => 1,
:c => %{
1 => %{
0 => "WORLD",
1 => %{0 => "1", :s => ["\n WITH VALUE ", "\n"]},
2 => "WORLD",
3 => %{0 => "2", :s => ["\n WITH VALUE ", "\n"]},
:s => ["HELLO ", " ", "\nHELLO ", " ", "\n"]
}
},
:s => ["", "\n"]
}
{_socket, full_render, _components} = render(rendered, socket.fingerprints, components)
assert full_render == %{0 => 1}
end
defp tracking(assigns) do
~L"""
<%= live_component BlockComponent, %{id: "TRACKING"} do %>
WITH PARENT VALUE <%= @parent_value %>
WITH VALUE <%= @value %>
<% end %>
"""
end
# TODO: Change this to "with args and parent assign" once we deprecate implicit assigns
test "block tracking with child and parent assigns" do
assigns = %{socket: %Socket{}, parent_value: 123}
{socket, full_render, components} = render(tracking(assigns))
assert full_render == %{
0 => 1,
:c => %{
1 => %{
0 => "TRACKING",
1 => %{
0 => "123",
1 => "1",
:s => ["\n WITH PARENT VALUE ", "\n WITH VALUE ", "\n"]
},
2 => "TRACKING",
3 => %{
0 => "123",
1 => "2",
:s => ["\n WITH PARENT VALUE ", "\n WITH VALUE ", "\n"]
},
:s => ["HELLO ", " ", "\nHELLO ", " ", "\n"]
}
},
:s => ["", "\n"]
}
{_socket, full_render, _components} =
render(tracking(assigns), socket.fingerprints, components)
assert full_render == %{0 => 1}
# Changing the root assign
assigns = %{socket: %Socket{}, parent_value: 123, __changed__: %{parent_value: true}}
{_socket, full_render, _components} =
render(tracking(assigns), socket.fingerprints, components)
assert full_render == %{
0 => 1,
:c => %{
1 => %{
1 => %{0 => "123", 1 => "1"},
3 => %{0 => "123", 1 => "2"}
}
}
}
end
end
end
| 32.590496
| 133
| 0.485173
|
93e1d4ccaee95401fa2b661828d49845c2afc524
| 124
|
ex
|
Elixir
|
lib/google_fit/activity_type/cricket.ex
|
tsubery/google_fit
|
7578b832c560b3b4a78059ac86af6e111812712e
|
[
"Apache-2.0"
] | 2
|
2017-02-01T13:51:26.000Z
|
2019-04-12T11:37:25.000Z
|
lib/google_fit/activity_type/cricket.ex
|
tsubery/google_fit
|
7578b832c560b3b4a78059ac86af6e111812712e
|
[
"Apache-2.0"
] | null | null | null |
lib/google_fit/activity_type/cricket.ex
|
tsubery/google_fit
|
7578b832c560b3b4a78059ac86af6e111812712e
|
[
"Apache-2.0"
] | null | null | null |
defmodule GoogleFit.ActivityType.Cricket do
@moduledoc false
def code, do: GoogleFit.ActivityType.code(__MODULE__)
end
| 20.666667
| 55
| 0.806452
|
93e2019e363aec3dc74e8e489cdb65497fba3e89
| 761
|
ex
|
Elixir
|
examples/originate.ex
|
evangilo/switchx
|
bbd045dd3c067c23663d17a9f9db632ab4affd22
|
[
"MIT"
] | 8
|
2020-03-30T11:14:19.000Z
|
2022-03-24T11:53:30.000Z
|
examples/originate.ex
|
evangilo/switchx
|
bbd045dd3c067c23663d17a9f9db632ab4affd22
|
[
"MIT"
] | 5
|
2020-03-27T00:00:54.000Z
|
2020-04-23T00:30:08.000Z
|
examples/originate.ex
|
evangilo/switchx
|
bbd045dd3c067c23663d17a9f9db632ab4affd22
|
[
"MIT"
] | 1
|
2020-04-22T23:55:52.000Z
|
2020-04-22T23:55:52.000Z
|
defmodule Examples.InboundSocket do
@moduledoc false
def originate() do
{:ok, conn} = SwitchX.Connection.Inbound.start_link([host: "192.168.56.10", port: 8021])
SwitchX.auth(conn, "ClueCon")
case SwitchX.originate(conn, "${verto_contact(800}", "&park()", :expand) do
{:ok, uuid} ->
IO.puts("Success #{uuid}")
Process.sleep(1000)
IO.puts("Playing some file")
event = SwitchX.execute(conn, uuid, "playback", "ivr/ivr-welcome_to_freeswitch.wav")
IO.puts("Playback duration was #{event.headers["variable_playback_ms"]} ms")
IO.puts("Bye")
SwitchX.hangup(conn, uuid, "NORMAL_CLEARING")
SwitchX.exit(conn)
:ok
{:error, term} -> "Error #{term}"
end
end
end
| 29.269231
| 92
| 0.616294
|
93e20bb72bac5059438199be0e634a21092b031e
| 1,103
|
exs
|
Elixir
|
apps/note_it_web/config/dev.exs
|
sushilman/note-it
|
c47edafb4272b9f01d53f8566f5ed7858f2d355c
|
[
"Apache-2.0"
] | null | null | null |
apps/note_it_web/config/dev.exs
|
sushilman/note-it
|
c47edafb4272b9f01d53f8566f5ed7858f2d355c
|
[
"Apache-2.0"
] | null | null | null |
apps/note_it_web/config/dev.exs
|
sushilman/note-it
|
c47edafb4272b9f01d53f8566f5ed7858f2d355c
|
[
"Apache-2.0"
] | null | null | null |
use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :note_it_web, NoteItWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin",
cd: Path.expand("../", __DIR__)]]
# Watch static and templates for browser reloading.
config :note_it_web, NoteItWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
| 31.514286
| 73
| 0.69175
|
93e20e27125bf3f84dd0e33d6703d1e22d80c4e4
| 1,607
|
ex
|
Elixir
|
clients/area120_tables/lib/google_api/area120_tables/v1alpha1/model/batch_create_rows_response.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/area120_tables/lib/google_api/area120_tables/v1alpha1/model/batch_create_rows_response.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/area120_tables/lib/google_api/area120_tables/v1alpha1/model/batch_create_rows_response.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Area120Tables.V1alpha1.Model.BatchCreateRowsResponse do
@moduledoc """
Response message for TablesService.BatchCreateRows.
## Attributes
* `rows` (*type:* `list(GoogleApi.Area120Tables.V1alpha1.Model.Row.t)`, *default:* `nil`) - The created rows.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:rows => list(GoogleApi.Area120Tables.V1alpha1.Model.Row.t())
}
field(:rows, as: GoogleApi.Area120Tables.V1alpha1.Model.Row, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Area120Tables.V1alpha1.Model.BatchCreateRowsResponse do
def decode(value, options) do
GoogleApi.Area120Tables.V1alpha1.Model.BatchCreateRowsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Area120Tables.V1alpha1.Model.BatchCreateRowsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.191489
| 113
| 0.756067
|
93e2169eb0fb06f96c022d8a62c523f28e43bd03
| 3,514
|
ex
|
Elixir
|
lib/error.ex
|
infinityoneframework/kadabra
|
0cedd740010514977d8b0fca6583f770ca78cac1
|
[
"MIT"
] | 35
|
2016-10-17T09:52:46.000Z
|
2021-01-04T16:27:46.000Z
|
lib/error.ex
|
infinityoneframework/kadabra
|
0cedd740010514977d8b0fca6583f770ca78cac1
|
[
"MIT"
] | 26
|
2016-12-06T17:21:41.000Z
|
2021-11-26T18:49:05.000Z
|
lib/error.ex
|
infinityoneframework/kadabra
|
0cedd740010514977d8b0fca6583f770ca78cac1
|
[
"MIT"
] | 13
|
2016-12-06T16:03:30.000Z
|
2021-12-16T10:28:03.000Z
|
defmodule Kadabra.Error do
@moduledoc false
@type error ::
:NO_ERROR
| :PROTOCOL_ERROR
| :INTERNAL_ERROR
| :FLOW_CONTROL_ERROR
| :SETTINGS_TIMEOUT
| :STREAM_CLOSED
| :FRAME_SIZE_ERROR
| :REFUSED_STREAM
| :CANCEL
| :COMPRESSION_ERROR
| :CONNECT_ERROR
| :ENHANCE_YOUR_CALM
| :INADEQUATE_SECURITY
| :HTTP_1_1_REQUIRED
@doc ~S"""
32-bit error code of type `NO_ERROR`
The associated condition is not a result of an error. For example,
a GOAWAY might include this code to indicate graceful shutdown of
a connection.
## Examples
iex> Kadabra.Error.no_error
<<0, 0, 0, 0>>
"""
@spec no_error :: <<_::32>>
def no_error, do: <<0::32>>
@doc ~S"""
32-bit error code of type `PROTOCOL_ERROR`
The endpoint detected an unspecific protocol error. This error is for use
when a more specific error code is not available.
## Examples
iex> Kadabra.Error.protocol_error
<<0, 0, 0, 1>>
"""
@spec protocol_error :: <<_::32>>
def protocol_error, do: <<1::32>>
@doc ~S"""
32-bit error code of type `FLOW_CONTROL_ERROR`
The endpoint detected that its peer violated the flow-control protocol.
## Examples
iex> Kadabra.Error.flow_control_error
<<0, 0, 0, 3>>
"""
@spec flow_control_error :: <<_::32>>
def flow_control_error, do: <<3::32>>
@doc ~S"""
32-bit error code of type `FRAME_SIZE_ERROR`
## Examples
iex> Kadabra.Error.frame_size_error
<<0, 0, 0, 6>>
"""
@spec frame_size_error :: <<_::32>>
def frame_size_error, do: <<6::32>>
@doc ~S"""
32-bit error code of type `COMPRESSION_ERROR`
## Examples
iex> Kadabra.Error.compression_error
<<0, 0, 0, 9>>
"""
@spec compression_error :: <<_::32>>
def compression_error, do: <<9::32>>
@doc ~S"""
Returns a string error given integer error in range 0x0 - 0xd.
## Examples
iex> Kadabra.Error.parse(0x1)
:PROTOCOL_ERROR
iex> Kadabra.Error.parse(0xfff)
0xfff
"""
@spec parse(integer) :: error | integer
def parse(0x0), do: :NO_ERROR
def parse(0x1), do: :PROTOCOL_ERROR
def parse(0x2), do: :INTERNAL_ERROR
def parse(0x3), do: :FLOW_CONTROL_ERROR
def parse(0x4), do: :SETTINGS_TIMEOUT
def parse(0x5), do: :STREAM_CLOSED
def parse(0x6), do: :FRAME_SIZE_ERROR
def parse(0x7), do: :REFUSED_STREAM
def parse(0x8), do: :CANCEL
def parse(0x9), do: :COMPRESSION_ERROR
def parse(0xA), do: :CONNECT_ERROR
def parse(0xB), do: :ENHANCE_YOUR_CALM
def parse(0xC), do: :INADEQUATE_SECURITY
def parse(0xD), do: :HTTP_1_1_REQUIRED
def parse(error), do: error
@doc ~S"""
Returns integer error code given string error.
## Examples
iex> Kadabra.Error.code(:PROTOCOL_ERROR)
0x1
iex> Kadabra.Error.code(:NOT_AN_ERROR)
:NOT_AN_ERROR
"""
@spec code(error) :: integer
def code(:NO_ERROR), do: 0x0
def code(:PROTOCOL_ERROR), do: 0x1
def code(:INTERNAL_ERROR), do: 0x2
def code(:FLOW_CONTROL_ERROR), do: 0x3
def code(:SETTINGS_TIMEOUT), do: 0x4
def code(:STREAM_CLOSED), do: 0x5
def code(:FRAME_SIZE_ERROR), do: 0x6
def code(:REFUSED_STREAM), do: 0x7
def code(:CANCEL), do: 0x8
def code(:COMPRESSION_ERROR), do: 0x9
def code(:CONNECT_ERROR), do: 0xA
def code(:ENHANCE_YOUR_CALM), do: 0xB
def code(:INADEQUATE_SECURITY), do: 0xC
def code(:HTTP_1_1_REQUIRED), do: 0xD
def code(error), do: error
end
| 25.463768
| 75
| 0.640865
|
93e239a7d7ac9815b549b676df1fab0d785da7b3
| 2,052
|
exs
|
Elixir
|
test/phoenix_html_simplified_helpers/url_test.exs
|
ryochin/phoenix_html_simplified_helpers
|
873c7b3eac9374f4805a3b1288d75496ee7b056b
|
[
"MIT"
] | 31
|
2016-02-27T18:15:12.000Z
|
2022-02-23T11:34:09.000Z
|
test/phoenix_html_simplified_helpers/url_test.exs
|
ryochin/phoenix_html_simplified_helpers
|
873c7b3eac9374f4805a3b1288d75496ee7b056b
|
[
"MIT"
] | 13
|
2016-05-26T14:08:59.000Z
|
2020-10-13T11:03:08.000Z
|
test/phoenix_html_simplified_helpers/url_test.exs
|
ryochin/phoenix_html_simplified_helpers
|
873c7b3eac9374f4805a3b1288d75496ee7b056b
|
[
"MIT"
] | 15
|
2016-05-21T09:54:32.000Z
|
2021-09-23T01:43:03.000Z
|
Code.require_file("../../test_helper.exs", __ENV__.file)
defmodule Phoenix.HTML.SimplifiedHelpers.URLTest do
alias Phoenix.HTML.SimplifiedHelpers
use ExUnit.Case
use Plug.Test
use SimplifiedHelpers
import SimplifiedHelpers
doctest SimplifiedHelpers
# @opts SimplifiedHelpers.Router.init([])
test "url_for home" do
conn = conn(:get, "/") |> Map.put(:private, %{phoenix_router: SimplifiedHelpers.Router})
assert "/" == url_for(conn, "home.index")
end
test "url_for entry" do
conn = conn(:get, "/") |> Map.put(:private, %{phoenix_router: SimplifiedHelpers.Router})
assert "/release/" == url_for(conn, "entry.release:")
end
test "url_for with params" do
conn = conn(:get, "/") |> Map.put(:private, %{phoenix_router: SimplifiedHelpers.Router})
assert "/release/percent" == url_for(conn, "entry.release:percent")
end
test "url_for with options" do
conn = conn(:get, "/") |> Map.put(:private, %{phoenix_router: SimplifiedHelpers.Router})
assert "/release/?some=query" == url_for(conn, "entry.release:", some: "query")
end
test "url_for with options two" do
conn = conn(:get, "/") |> Map.put(:private, %{phoenix_router: SimplifiedHelpers.Router})
assert "/release/?some=query&unko=query2" ==
url_for(conn, "entry.release:", some: "query", unko: "query2")
end
test "current_pate? one" do
conn =
conn(:get, "/release/?percent=oh+yes%21")
|> Map.put(:private, %{phoenix_router: SimplifiedHelpers.Router})
assert true == current_page?(conn, "entry.release:", percent: "oh yes!")
end
test "current_pate? two" do
conn =
conn(:get, "/release/?unko1=1&unko2=3")
|> Map.put(:private, %{phoenix_router: SimplifiedHelpers.Router})
assert false == current_page?(conn, "entry.release:", unko1: "1", unko2: "2")
end
test "current_pate? three" do
conn =
conn(:get, "/release/") |> Map.put(:private, %{phoenix_router: SimplifiedHelpers.Router})
assert true == current_page?(conn, "entry.release:")
end
end
| 31.090909
| 95
| 0.661306
|
93e2693d6d8bf58f9ef0bb5a11646b2cb2586aea
| 1,826
|
ex
|
Elixir
|
lib/skeleton/types/union/union_type.ex
|
haskric/map_schema
|
615d6428e168a0d3991d334cba76c2d8e5c417b6
|
[
"MIT"
] | 3
|
2020-12-15T09:04:57.000Z
|
2021-06-11T02:01:09.000Z
|
lib/skeleton/types/union/union_type.ex
|
haskric/map_schema
|
615d6428e168a0d3991d334cba76c2d8e5c417b6
|
[
"MIT"
] | null | null | null |
lib/skeleton/types/union/union_type.ex
|
haskric/map_schema
|
615d6428e168a0d3991d334cba76c2d8e5c417b6
|
[
"MIT"
] | null | null | null |
defmodule MapSchema.Types.TypeUnion do
@moduledoc """
Macro for creation Union types.
"""
alias MapSchema.Exceptions
alias MapSchema.Types.Default
alias MapSchema.Types.TypeUnion.Methods
defmacro __using__(opts) do
name = Keyword.get(opts, :name)
list_types = Keyword.get(opts, :types)
|> transform_list_types()
quote bind_quoted: [name: name, list_types: list_types] do
@behaviour MapSchema.CustomType
@spec name :: atom | String.t()
def name, do: unquote(name)
# I dont know
def nested?, do: true
@spec cast(value :: any) :: any | :map_schema_type_error
def cast(value) do
Methods.cast(value, unquote(list_types))
end
@spec is_valid?(any) :: boolean
def is_valid?(value) do
Methods.is_valid?(value, unquote(list_types))
end
@spec doctest_values :: [{any, any}]
def doctest_values do
Methods.doctest_values(unquote(list_types))
end
end
end
defp transform_list_types(list_types) do
{list_types, []} = Code.eval_quoted(list_types)
list_types
|> Enum.map(fn(type_module) ->
cond do
is_valid_module?(type_module) -> type_module
is_atom(type_module) ->
Default.get_default_type_module(type_module)
true ->
throw :error
end
end)
catch
_e ->
Exceptions.throw_config_union_type_definition_error()
end
defp is_valid_module?(module) do
is_module?(module) and is_map_schema_module?(module)
end
defp is_module?(module) do
function_exported?(module, :__info__, 1)
end
defp is_map_schema_module?(module) do
function_exported?(module, :cast, 1)
and
function_exported?(module, :is_valid?, 1)
and
function_exported?(module, :doctest_values, 0)
end
end
| 23.714286
| 62
| 0.657174
|
93e28dc2fe04d4b497c43759f3c95d5841eb6788
| 1,083
|
exs
|
Elixir
|
test/radio_test.exs
|
mliszcz/radio-playlistgen
|
05fc6e5c44ded4ee2c79373661189caa6da8ca14
|
[
"MIT"
] | null | null | null |
test/radio_test.exs
|
mliszcz/radio-playlistgen
|
05fc6e5c44ded4ee2c79373661189caa6da8ca14
|
[
"MIT"
] | null | null | null |
test/radio_test.exs
|
mliszcz/radio-playlistgen
|
05fc6e5c44ded4ee2c79373661189caa6da8ca14
|
[
"MIT"
] | null | null | null |
defmodule RadioTest do
use ExUnit.Case
test "generate playlist file" do
playlist1 = """
NumberOfEntries=1
Version=2
Title1=non-existent stream
File1=http://localhost:8101
Length1=-1
Title2=existing stream
File2=http://localhost:8102
Length2=l2
"""
HTTPServer.serve_once("HTTP/1.0 200 OK\r\nContent-Length: 0\r\n\r\n", 8102)
HTTPServer.serve_once("HTTP/1.0 200 OK\r\nContent-Length: #{byte_size playlist1}\r\n\r\n#{playlist1}", 8100)
playlist2 = """
http://localhost:8201
http://localhost:8202
"""
HTTPServer.serve_once("HTTP/1.0 200 OK\r\nContent-Length: 0\r\n\r\n", 8202)
HTTPServer.serve_once("HTTP/1.0 200 OK\r\nContent-Length: #{byte_size playlist2}\r\n\r\n#{playlist2}", 8200)
result = Radio.generate( %{
"Radio@8100" => {:pls, "http://localhost:8100"},
"Radio@8200" => {:m3u, "http://localhost:8200"}
}, 2000)
assert result == """
[playlist]
NumberOfEntries=2
Version=2
File1=http://localhost:8102
Title1=Radio@8100
Length1=-1
File2=http://localhost:8202
Title2=Radio@8200
Length2=-1
"""
end
end
| 22.102041
| 110
| 0.6759
|
93e295450bde61af13388269ea7be8f5cdb94766
| 3,649
|
exs
|
Elixir
|
config/dev.exs
|
RatioPBC/epi-viaduct-nys
|
99fb637785ea207aee5449fa01fa59dd18ec8bf2
|
[
"MIT"
] | 2
|
2021-06-22T21:01:49.000Z
|
2021-11-04T18:36:48.000Z
|
config/dev.exs
|
RatioPBC/epi-viaduct-nys
|
99fb637785ea207aee5449fa01fa59dd18ec8bf2
|
[
"MIT"
] | null | null | null |
config/dev.exs
|
RatioPBC/epi-viaduct-nys
|
99fb637785ea207aee5449fa01fa59dd18ec8bf2
|
[
"MIT"
] | null | null | null |
import Config
config :ex_aws,
session_token: [{:system, "AWS_SESSION_TOKEN"}, {:awscli, System.get_env("AWS_PROFILE"), 30}, :instance_role]
# Configure your database
repo_opts =
if socket_dir = System.get_env("PGDATA") do
[socket_dir: socket_dir]
else
[url: System.get_env("DATABASE_URL", "postgres://postgres@localhost/nys_etl_dev")]
end
config :nys_etl, NYSETL.Repo, [database: "nys_etl_dev", show_sensitive_data_on_connection_error: true, pool_size: 20] ++ repo_opts
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :nys_etl, NYSETLWeb.Endpoint,
check_origin: false,
code_reloader: true,
debug_errors: true,
http: [port: 4000],
server: true,
url: [host: {:system, "CANONICAL_HOST"}, port: {:system, "PORT"}],
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/nys_etl_web/(live|views)/.*(ex)$",
~r"lib/nys_etl_web/templates/.*(eex)$"
]
],
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-options-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
config :nys_etl,
cloudwatch_metrics_enabled: false,
county_list: [
%{
"analysis_date" => "2020-05-24 00:00:00",
"county_display" => "Staging",
"county_name" => "staging",
"county_sort" => "zz_staging",
"domain" => "ny-staging-cdcms",
"fips" => "600",
"gaz" => "",
"is_state_domain" => "",
"location_id" => "d87c0134be3c401bb14e6c4420b7a178",
"participating" => "yes"
},
%{
"analysis_date" => "2020-05-24 00:00:00",
"county_display" => "Integrations",
"county_name" => "integrations",
"county_sort" => "zz_integrations",
"domain" => "ny-integrations-cdcms",
"fips" => "800",
"gaz" => "",
"is_state_domain" => "",
"location_id" => "8a4a05a87003445ab63e9407c6f00bf4",
"participating" => "yes"
}
],
start_viaduct_workers: false
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :nys_etl, :basic_auth,
dashboard_username: System.get_env("DASHBOARD_USERNAME"),
dashboard_password: System.get_env("DASHBOARD_PASSWORD")
config :logger, :debug_log_file,
path: "log/debug.log",
level: :debug
config :logger, :info_log_file,
path: "log/info.log",
level: :info
config :logger, :error_log_file,
path: "log/error.log",
level: :warn
config :logger, :console, format: "[$level] $message\n"
config :logger, level: :info
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 28.732283
| 130
| 0.660455
|
93e2c4c124b369409e07fe926c928a5fc931f220
| 2,296
|
exs
|
Elixir
|
lib/eex/test/eex/tokenizer_test.exs
|
ekosz/elixir
|
62e375bc711b4072e1b68de776e96cc31f571d45
|
[
"Apache-2.0"
] | 1
|
2017-10-29T16:37:08.000Z
|
2017-10-29T16:37:08.000Z
|
lib/eex/test/eex/tokenizer_test.exs
|
ekosz/elixir
|
62e375bc711b4072e1b68de776e96cc31f571d45
|
[
"Apache-2.0"
] | null | null | null |
lib/eex/test/eex/tokenizer_test.exs
|
ekosz/elixir
|
62e375bc711b4072e1b68de776e96cc31f571d45
|
[
"Apache-2.0"
] | null | null | null |
Code.require_file "../../test_helper", __FILE__
defmodule EEx.TokenizerTest do
use ExUnit.Case, async: true
require EEx.Tokenizer, as: T
test "simple chars lists" do
assert T.tokenize('foo', 1) == [ { :text, 1, "foo" } ]
end
test "simple strings" do
assert T.tokenize("foo", 1) == [ { :text, 1, "foo" } ]
end
test "strings with embedded code" do
assert T.tokenize('foo <% bar %>', 1) == [ { :text, 1, "foo " }, { :expr, 1, [], ' bar ' } ]
end
test "strings with embedded equals code" do
assert T.tokenize('foo <%= bar %>', 1) == [ { :text, 1, "foo " }, { :expr, 1, '=', ' bar ' } ]
end
test "strings with more than one line" do
assert T.tokenize('foo\n<%= bar %>', 1) == [ { :text, 1, "foo\n" },{ :expr, 2, '=', ' bar ' } ]
end
test "strings with more than one line and expression with more than one line" do
string = '''
foo <%= bar
baz %>
<% foo %>
'''
assert T.tokenize(string, 1) == [
{:text, 1, "foo "},
{:expr, 1, '=', ' bar\n\nbaz '},
{:text, 3, "\n"},
{:expr, 4, [], ' foo '},
{:text, 4, "\n"}
]
end
test "strings with embedded do end" do
assert T.tokenize('foo <% if true do %>bar<% end %>', 1) == [
{ :text, 1, "foo " },
{ :start_expr, 1, '', ' if true do ' },
{ :text, 1, "bar" },
{ :end_expr, 1, '', ' end ' }
]
end
test "strings with embedded -> end" do
assert T.tokenize('foo <% cond do %><% false -> %>bar<% true -> %>baz<% end %>', 1) == [
{ :text, 1, "foo " },
{ :start_expr, 1, '', ' cond do ' },
{ :middle_expr, 1, '', ' false -> ' },
{ :text, 1, "bar" },
{ :middle_expr, 1, '', ' true -> ' },
{ :text, 1, "baz" },
{ :end_expr, 1, '', ' end ' }
]
end
test "strings with embedded keywords blocks" do
assert T.tokenize('foo <% if true do %>bar<% else %>baz<% end %>', 1) == [
{ :text, 1, "foo " },
{ :start_expr, 1, '', ' if true do ' },
{ :text, 1, "bar" },
{ :middle_expr, 1, '', ' else ' },
{ :text, 1, "baz" },
{ :end_expr, 1, '', ' end ' }
]
end
test "raise syntax error when there is start mark and no end mark" do
assert_raise EEx.SyntaxError, "missing token: %>", fn ->
T.tokenize('foo <% :bar', 1)
end
end
end
| 28
| 99
| 0.48824
|
93e2f703bbeaa62e5efc230b5aa88db9512899c2
| 3,062
|
exs
|
Elixir
|
test/xdr/transactions/operations/create_passive_sell_offer_test.exs
|
einerzg/stellar_base
|
2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f
|
[
"MIT"
] | 3
|
2021-08-17T20:32:45.000Z
|
2022-03-13T20:26:02.000Z
|
test/xdr/transactions/operations/create_passive_sell_offer_test.exs
|
einerzg/stellar_base
|
2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f
|
[
"MIT"
] | 45
|
2021-08-12T20:19:41.000Z
|
2022-03-27T21:00:10.000Z
|
test/xdr/transactions/operations/create_passive_sell_offer_test.exs
|
einerzg/stellar_base
|
2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f
|
[
"MIT"
] | 2
|
2021-09-22T23:11:13.000Z
|
2022-01-23T03:19:11.000Z
|
defmodule StellarBase.XDR.Operations.CreatePassiveSellOfferTest do
use ExUnit.Case
alias StellarBase.XDR.{
AccountID,
AlphaNum4,
AlphaNum12,
Asset,
AssetCode4,
AssetCode12,
AssetType,
Int32,
Int64,
Price,
PublicKey,
PublicKeyType,
UInt256
}
alias StellarBase.StrKey
alias StellarBase.XDR.Operations.CreatePassiveSellOffer
describe "CreatePassiveSellOffer Operation" do
setup do
pk_issuer =
"GBZNLMUQMIN3VGUJISKZU7GNY3O3XLMYEHJCKCSMDHKLGSMKALRXOEZD"
|> StrKey.decode!(:ed25519_public_key)
|> UInt256.new()
issuer =
PublicKeyType.new(:PUBLIC_KEY_TYPE_ED25519)
|> (&PublicKey.new(pk_issuer, &1)).()
|> AccountID.new()
asset1 =
"BTCN"
|> AssetCode4.new()
|> AlphaNum4.new(issuer)
|> Asset.new(AssetType.new(:ASSET_TYPE_CREDIT_ALPHANUM4))
asset2 =
"BTCNEW2000"
|> AssetCode12.new()
|> AlphaNum12.new(issuer)
|> Asset.new(AssetType.new(:ASSET_TYPE_CREDIT_ALPHANUM12))
amount = Int64.new(10_000_000)
price = Price.new(Int32.new(1), Int32.new(10))
passive_sell_offer = CreatePassiveSellOffer.new(asset1, asset2, amount, price)
%{
selling: asset1,
buying: asset2,
amount: amount,
price: price,
passive_sell_offer: passive_sell_offer,
binary:
<<0, 0, 0, 1, 66, 84, 67, 78, 0, 0, 0, 0, 114, 213, 178, 144, 98, 27, 186, 154, 137, 68,
149, 154, 124, 205, 198, 221, 187, 173, 152, 33, 210, 37, 10, 76, 25, 212, 179, 73,
138, 2, 227, 119, 0, 0, 0, 2, 66, 84, 67, 78, 69, 87, 50, 48, 48, 48, 0, 0, 0, 0, 0,
0, 114, 213, 178, 144, 98, 27, 186, 154, 137, 68, 149, 154, 124, 205, 198, 221, 187,
173, 152, 33, 210, 37, 10, 76, 25, 212, 179, 73, 138, 2, 227, 119, 0, 0, 0, 0, 0, 152,
150, 128, 0, 0, 0, 1, 0, 0, 0, 10>>
}
end
test "new/1", %{selling: selling, buying: buying, amount: amount, price: price} do
%CreatePassiveSellOffer{selling: ^selling, buying: ^buying, amount: ^amount} =
CreatePassiveSellOffer.new(selling, buying, amount, price)
end
test "encode_xdr/1", %{passive_sell_offer: passive_sell_offer, binary: binary} do
{:ok, ^binary} = CreatePassiveSellOffer.encode_xdr(passive_sell_offer)
end
test "encode_xdr!/1", %{passive_sell_offer: passive_sell_offer, binary: binary} do
^binary = CreatePassiveSellOffer.encode_xdr!(passive_sell_offer)
end
test "decode_xdr/2", %{passive_sell_offer: passive_sell_offer, binary: binary} do
{:ok, {^passive_sell_offer, ""}} = CreatePassiveSellOffer.decode_xdr(binary)
end
test "decode_xdr/2 with an invalid binary" do
{:error, :not_binary} = CreatePassiveSellOffer.decode_xdr(123)
end
test "decode_xdr!/2", %{passive_sell_offer: passive_sell_offer, binary: binary} do
{^passive_sell_offer, ^binary} = CreatePassiveSellOffer.decode_xdr!(binary <> binary)
end
end
end
| 32.231579
| 98
| 0.627041
|
93e3055754566f1a766f9e212020ff2b4771f1c6
| 346
|
exs
|
Elixir
|
ex_cubic_ingestion/priv/repo/migrations/20220215223740_add_oban_jobs_table.exs
|
mbta/data_platform
|
3fa66cb74134b2baa5234e908e147bf393c13926
|
[
"MIT"
] | 1
|
2022-01-30T21:02:48.000Z
|
2022-01-30T21:02:48.000Z
|
ex_cubic_ingestion/priv/repo/migrations/20220215223740_add_oban_jobs_table.exs
|
mbta/data_platform
|
3fa66cb74134b2baa5234e908e147bf393c13926
|
[
"MIT"
] | 21
|
2022-01-25T16:35:50.000Z
|
2022-03-31T19:42:52.000Z
|
ex_cubic_ingestion/priv/repo/migrations/20220215223740_add_oban_jobs_table.exs
|
mbta/data_platform
|
3fa66cb74134b2baa5234e908e147bf393c13926
|
[
"MIT"
] | 1
|
2022-02-02T14:34:17.000Z
|
2022-02-02T14:34:17.000Z
|
defmodule ExCubicIngestion.Repo.Migrations.AddObanJobsTable do
use Ecto.Migration
def up do
Oban.Migrations.up()
end
# We specify `version: 1` in `down`, ensuring that we'll roll all the way back down if
# necessary, regardless of which version we've migrated `up` to.
def down do
Oban.Migrations.down(version: 1)
end
end
| 24.714286
| 88
| 0.725434
|
93e334b641dbc196b43fce008c88a8bace9192c0
| 2,008
|
ex
|
Elixir
|
lib/zeitvergleich.ex
|
STUDITEMPS/jehovakel_ex_times
|
093d4f96ed5bda345d6731a8ce30e9ab3eeb0e7b
|
[
"MIT"
] | null | null | null |
lib/zeitvergleich.ex
|
STUDITEMPS/jehovakel_ex_times
|
093d4f96ed5bda345d6731a8ce30e9ab3eeb0e7b
|
[
"MIT"
] | 6
|
2021-03-22T03:04:58.000Z
|
2021-12-22T09:26:24.000Z
|
lib/zeitvergleich.ex
|
STUDITEMPS/jehovakel_ex_times
|
093d4f96ed5bda345d6731a8ce30e9ab3eeb0e7b
|
[
"MIT"
] | 2
|
2021-06-28T06:45:36.000Z
|
2021-07-05T09:27:42.000Z
|
defprotocol Shared.Zeitvergleich do
@fallback_to_any true
@spec frueher_als?(struct(), struct()) :: boolean()
def frueher_als?(self, other)
@spec zeitgleich?(struct(), struct()) :: boolean()
def zeitgleich?(self, other)
@spec frueher_als_oder_zeitgleich?(struct(), struct()) :: boolean()
def frueher_als_oder_zeitgleich?(self, other)
end
defimpl Shared.Zeitvergleich, for: NaiveDateTime do
def frueher_als?(%NaiveDateTime{} = self, %NaiveDateTime{} = other) do
NaiveDateTime.compare(self, other) == :lt
end
def zeitgleich?(%NaiveDateTime{} = self, %NaiveDateTime{} = other) do
NaiveDateTime.compare(self, other) == :eq
end
def frueher_als_oder_zeitgleich?(%NaiveDateTime{} = self, %NaiveDateTime{} = other) do
self |> frueher_als?(other) || self |> zeitgleich?(other)
end
end
defimpl Shared.Zeitvergleich, for: DateTime do
def frueher_als?(%DateTime{} = self, %DateTime{} = other) do
DateTime.compare(self, other) == :lt
end
def zeitgleich?(%DateTime{} = self, %DateTime{} = other) do
DateTime.compare(self, other) == :eq
end
def frueher_als_oder_zeitgleich?(%DateTime{} = self, %DateTime{} = other) do
self |> frueher_als?(other) || self |> zeitgleich?(other)
end
end
defimpl Shared.Zeitvergleich, for: Time do
def frueher_als?(%Time{} = self, %Time{} = other) do
Time.compare(self, other) == :lt
end
def zeitgleich?(%Time{} = self, %Time{} = other) do
Time.compare(self, other) == :eq
end
def frueher_als_oder_zeitgleich?(%Time{} = self, %Time{} = other) do
self |> frueher_als?(other) || self |> zeitgleich?(other)
end
end
defimpl Shared.Zeitvergleich, for: Date do
def frueher_als?(%@for{} = self, %@for{} = other) do
@for.compare(self, other) == :lt
end
def zeitgleich?(%@for{} = self, %@for{} = other) do
@for.compare(self, other) == :eq
end
def frueher_als_oder_zeitgleich?(%@for{} = self, %@for{} = other) do
self |> frueher_als?(other) || self |> zeitgleich?(other)
end
end
| 29.101449
| 88
| 0.667331
|
93e34e3af09b316cf6ca345d7b452d4c5e9af1c4
| 4,613
|
ex
|
Elixir
|
apps/faqcheck/lib/faqcheck/accounts/user.ex
|
csboling/faqcheck
|
bc182c365d466c8dcacc6b1a5fe9186a2c912cd4
|
[
"CC0-1.0"
] | null | null | null |
apps/faqcheck/lib/faqcheck/accounts/user.ex
|
csboling/faqcheck
|
bc182c365d466c8dcacc6b1a5fe9186a2c912cd4
|
[
"CC0-1.0"
] | 20
|
2021-09-08T04:07:31.000Z
|
2022-03-10T21:52:24.000Z
|
apps/faqcheck/lib/faqcheck/accounts/user.ex
|
csboling/faqcheck
|
bc182c365d466c8dcacc6b1a5fe9186a2c912cd4
|
[
"CC0-1.0"
] | null | null | null |
defmodule Faqcheck.Accounts.User do
use Ecto.Schema
@timestamps_opts [type: :utc_datetime]
import Ecto.Changeset
@derive {Inspect, except: [:password]}
schema "users" do
field :email, :string
field :password, :string, virtual: true, redact: true
field :hashed_password, :string, redact: true
field :confirmed_at, :utc_datetime
belongs_to :first_version, PaperTrail.Version
belongs_to :current_version, PaperTrail.Version, on_replace: :update
timestamps()
end
@doc """
A user changeset for registration.
It is important to validate the length of both email and password.
Otherwise databases may truncate the email without warnings, which
could lead to unpredictable or insecure behaviour. Long passwords may
also be very expensive to hash for certain algorithms.
## Options
* `:hash_password` - Hashes the password so it can be stored securely
in the database and ensures the password field is cleared to prevent
leaks in the logs. If password hashing is not needed and clearing the
password field is not desired (like when using this changeset for
validations on a LiveView form), this option can be set to `false`.
Defaults to `true`.
"""
def registration_changeset(user, attrs, opts \\ []) do
user
|> cast(attrs, [:email, :password])
|> validate_email()
|> validate_password(opts)
end
defp validate_email(changeset) do
changeset
|> validate_required([:email])
|> validate_format(:email, ~r/^[^\s]+@[^\s]+$/, message: "must have the @ sign and no spaces")
|> validate_length(:email, max: 160)
|> unsafe_validate_unique(:email, Faqcheck.Repo)
|> unique_constraint(:email)
end
defp validate_password(changeset, opts) do
changeset
|> validate_required([:password])
|> validate_length(:password, min: 12, max: 80)
# |> validate_format(:password, ~r/[a-z]/, message: "at least one lower case character")
# |> validate_format(:password, ~r/[A-Z]/, message: "at least one upper case character")
# |> validate_format(:password, ~r/[!?@#$%^&*_0-9]/, message: "at least one digit or punctuation character")
|> maybe_hash_password(opts)
end
defp maybe_hash_password(changeset, opts) do
hash_password? = Keyword.get(opts, :hash_password, true)
password = get_change(changeset, :password)
if hash_password? && password && changeset.valid? do
changeset
|> put_change(:hashed_password, Pbkdf2.hash_pwd_salt(password))
|> delete_change(:password)
else
changeset
end
end
@doc """
A user changeset for changing the email.
It requires the email to change otherwise an error is added.
"""
def email_changeset(user, attrs) do
user
|> cast(attrs, [:email])
|> validate_email()
|> case do
%{changes: %{email: _}} = changeset -> changeset
%{} = changeset -> add_error(changeset, :email, "did not change")
end
end
@doc """
A user changeset for changing the password.
## Options
* `:hash_password` - Hashes the password so it can be stored securely
in the database and ensures the password field is cleared to prevent
leaks in the logs. If password hashing is not needed and clearing the
password field is not desired (like when using this changeset for
validations on a LiveView form), this option can be set to `false`.
Defaults to `true`.
"""
def password_changeset(user, attrs, opts \\ []) do
user
|> cast(attrs, [:password])
|> validate_confirmation(:password, message: "does not match password")
|> validate_password(opts)
end
@doc """
Confirms the account by setting `confirmed_at`.
"""
def confirm_changeset(user) do
now = DateTime.utc_now() |> DateTime.truncate(:second)
change(user, confirmed_at: now)
end
@doc """
Verifies the password.
If there is no user or the user doesn't have a password, we call
`Pbkdf2.no_user_verify/0` to avoid timing attacks.
"""
def valid_password?(%Faqcheck.Accounts.User{hashed_password: hashed_password}, password)
when is_binary(hashed_password) and byte_size(password) > 0 do
Pbkdf2.verify_pass(password, hashed_password)
end
def valid_password?(_, _) do
Pbkdf2.no_user_verify()
false
end
@doc """
Validates the current password otherwise adds an error to the changeset.
"""
def validate_current_password(changeset, password) do
if valid_password?(changeset.data, password) do
changeset
else
add_error(changeset, :current_password, "is not valid")
end
end
end
| 31.813793
| 112
| 0.688706
|
93e3556577651e9739dd0bca0d945e9e48c02e46
| 1,072
|
ex
|
Elixir
|
apps/gitgud_web/lib/gitgud_web/views/layout_view.ex
|
EdmondFrank/gitgud
|
1952c16130564357aa6f23e35f48f19e3a50d4dd
|
[
"MIT"
] | 449
|
2018-03-06T01:05:55.000Z
|
2022-03-23T21:03:56.000Z
|
apps/gitgud_web/lib/gitgud_web/views/layout_view.ex
|
EdmondFrank/gitgud
|
1952c16130564357aa6f23e35f48f19e3a50d4dd
|
[
"MIT"
] | 69
|
2018-03-06T09:26:41.000Z
|
2022-03-21T22:43:09.000Z
|
apps/gitgud_web/lib/gitgud_web/views/layout_view.ex
|
EdmondFrank/gitgud
|
1952c16130564357aa6f23e35f48f19e3a50d4dd
|
[
"MIT"
] | 41
|
2018-03-06T01:06:07.000Z
|
2021-11-21T17:55:04.000Z
|
defmodule GitGud.Web.LayoutView do
@moduledoc false
use GitGud.Web, :view
@spec render_layout({atom(), binary() | atom()}, map, keyword) :: binary
def render_layout(layout, assigns, do: content) do
render(layout, Map.put(assigns, :inner_content, content))
end
@spec session_params(Plug.Conn.t) :: keyword
def session_params(conn) do
cond do
current_route?(conn, :landing_page) -> []
current_route?(conn, :session) -> []
current_route?(conn, :user, :new) -> []
true -> [redirect_to: conn.request_path]
end
end
@spec title(Plug.Conn.t, binary) :: binary
def title(conn, default \\ ""), do: conn.assigns[:page_title] || view_title(conn) || default
#
# Helpers
#
defp view_title(conn) do
try do
case view_module(conn) do
GitGud.Web.ErrorView = view ->
apply(view, :title, [Plug.Conn.Status.reason_atom(conn.status), conn.assigns])
view ->
apply(view, :title, [action_name(conn), conn.assigns])
end
rescue
_error ->
nil
end
end
end
| 26.146341
| 94
| 0.625933
|
93e3598be24ea4ea60190323ba26eddb92f250d1
| 2,656
|
exs
|
Elixir
|
mix.exs
|
mathiaHT/ex_step_flow
|
6496e9511239de64f00119428476338dfcde9dea
|
[
"MIT"
] | null | null | null |
mix.exs
|
mathiaHT/ex_step_flow
|
6496e9511239de64f00119428476338dfcde9dea
|
[
"MIT"
] | null | null | null |
mix.exs
|
mathiaHT/ex_step_flow
|
6496e9511239de64f00119428476338dfcde9dea
|
[
"MIT"
] | null | null | null |
defmodule StepFlow.MixProject do
use Mix.Project
@source_url "https://github.com/media-io/ex_step_flow"
def project do
[
app: :step_flow,
version: "0.2.9",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps(),
test_coverage: [tool: ExCoveralls],
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
package: package(),
description: description(),
elixirc_paths: elixirc_paths(Mix.env()),
aliases: aliases(),
# Docs
name: "StepFlow",
homepage_url: @source_url,
docs: [
main: "readme",
extras: ["README.md"],
source_url: @source_url
]
]
end
def application do
[
mod: {StepFlow.Application, []},
extra_applications: [
:amqp,
:blue_bird,
:httpoison,
:jason,
:logger,
:phoenix,
:plug,
:postgrex,
:slack,
:timex
]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps do
[
{:amqp, "~> 1.6"},
{:blue_bird, "~> 0.4.1"},
{:cowboy, "~> 2.8.0"},
{:credo, "~> 1.3", only: [:dev, :test], runtime: false},
{:ecto, "~> 3.5.5"},
{:ecto_sql, "~> 3.5.3"},
{:ecto_enum, "~> 1.4"},
{:excoveralls, "~> 0.13", only: :test},
{:ex_doc, "~> 0.23", only: :dev, runtime: false},
{:json_xema, "~> 0.6"},
{:fake_server, "~> 2.1", only: :test},
{:gettext, "~> 0.18"},
{:httpoison, "~> 1.6"},
{:jason, "~> 1.1"},
{:phoenix, "~> 1.5.7"},
{:phoenix_html, "~> 2.10"},
{:plug, "~> 1.11"},
{:postgrex, "~> 0.15.0"},
{:prometheus_ex, "~> 3.0"},
{:prometheus_plugs, "~> 1.1"},
{:slack, "~> 0.23.5"},
{:timex, "~> 3.2"},
{:xema, "0.13.6"}
]
end
defp description() do
"Step flow manager for Elixir applications"
end
defp package() do
[
name: "step_flow",
files: ["lib", "mix.exs", "README*", "LICENSE*"],
maintainers: [
"Valentin Noël",
"Marc-Antoine Arnaud"
],
licenses: ["MIT"],
links: %{"GitHub" => @source_url}
]
end
defp aliases do
[
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "test"],
checks: [
"ecto.create --quiet",
"test",
"format --check-formatted",
"credo --strict"
]
]
end
end
| 22.896552
| 62
| 0.480422
|
93e36949f0ba38f28ad05f774e3cdd68d617635b
| 908
|
ex
|
Elixir
|
kv_umbrella/apps/kv/lib/kv/supervisor.ex
|
seansu4you87/hello-elixir
|
1fc017b96ebadb3eb5f21874f2ba32bd4b8602ab
|
[
"MIT"
] | null | null | null |
kv_umbrella/apps/kv/lib/kv/supervisor.ex
|
seansu4you87/hello-elixir
|
1fc017b96ebadb3eb5f21874f2ba32bd4b8602ab
|
[
"MIT"
] | null | null | null |
kv_umbrella/apps/kv/lib/kv/supervisor.ex
|
seansu4you87/hello-elixir
|
1fc017b96ebadb3eb5f21874f2ba32bd4b8602ab
|
[
"MIT"
] | null | null | null |
defmodule KV.Supervisor do
use Supervisor
def start_link do
Supervisor.start_link(__MODULE__, :ok)
end
@manager_name KV.EventManager
@registry_name KV.Registry
@ets_registry_name KV.Registry
@bucket_sup_name KV.Bucket.Supervisor
def init(:ok) do
ets = :ets.new(@ets_registry_name,
[:set, :public, :named_table, {:read_concurrency, true}])
# `worker` takes a process module, and an array of arguments to pass into
# `start_link`. In this case we start up the event manager, and then take
# the event manager and pass it into the Registry
children = [
worker(GenEvent, [[name: @manager_name]]),
worker(KV.Bucket.Supervisor, [[name: @bucket_sup_name]]),
worker(KV.Registry, [ets, @manager_name,
@bucket_sup_name, [name: @registry_name]])
]
supervise(children, strategy: :one_for_one)
end
end
| 31.310345
| 78
| 0.669604
|
93e374527ba2104890fd6a816cdc477edafb1111
| 3,837
|
ex
|
Elixir
|
clients/you_tube/lib/google_api/you_tube/v3/model/channel_settings.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/you_tube/lib/google_api/you_tube/v3/model/channel_settings.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | 1
|
2020-12-18T09:25:12.000Z
|
2020-12-18T09:25:12.000Z
|
clients/you_tube/lib/google_api/you_tube/v3/model/channel_settings.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | 1
|
2020-10-04T10:12:44.000Z
|
2020-10-04T10:12:44.000Z
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.YouTube.V3.Model.ChannelSettings do
@moduledoc """
Branding properties for the channel view.
## Attributes
* `country` (*type:* `String.t`, *default:* `nil`) - The country of the channel.
* `defaultLanguage` (*type:* `String.t`, *default:* `nil`) -
* `defaultTab` (*type:* `String.t`, *default:* `nil`) - Which content tab users should see when viewing the channel.
* `description` (*type:* `String.t`, *default:* `nil`) - Specifies the channel description.
* `featuredChannelsTitle` (*type:* `String.t`, *default:* `nil`) - Title for the featured channels tab.
* `featuredChannelsUrls` (*type:* `list(String.t)`, *default:* `nil`) - The list of featured channels.
* `keywords` (*type:* `String.t`, *default:* `nil`) - Lists keywords associated with the channel, comma-separated.
* `moderateComments` (*type:* `boolean()`, *default:* `nil`) - Whether user-submitted comments left on the channel page need to be approved by the channel owner to be publicly visible.
* `profileColor` (*type:* `String.t`, *default:* `nil`) - A prominent color that can be rendered on this channel page.
* `showBrowseView` (*type:* `boolean()`, *default:* `nil`) - Whether the tab to browse the videos should be displayed.
* `showRelatedChannels` (*type:* `boolean()`, *default:* `nil`) - Whether related channels should be proposed.
* `title` (*type:* `String.t`, *default:* `nil`) - Specifies the channel title.
* `trackingAnalyticsAccountId` (*type:* `String.t`, *default:* `nil`) - The ID for a Google Analytics account to track and measure traffic to the channels.
* `unsubscribedTrailer` (*type:* `String.t`, *default:* `nil`) - The trailer of the channel, for users that are not subscribers.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:country => String.t(),
:defaultLanguage => String.t(),
:defaultTab => String.t(),
:description => String.t(),
:featuredChannelsTitle => String.t(),
:featuredChannelsUrls => list(String.t()),
:keywords => String.t(),
:moderateComments => boolean(),
:profileColor => String.t(),
:showBrowseView => boolean(),
:showRelatedChannels => boolean(),
:title => String.t(),
:trackingAnalyticsAccountId => String.t(),
:unsubscribedTrailer => String.t()
}
field(:country)
field(:defaultLanguage)
field(:defaultTab)
field(:description)
field(:featuredChannelsTitle)
field(:featuredChannelsUrls, type: :list)
field(:keywords)
field(:moderateComments)
field(:profileColor)
field(:showBrowseView)
field(:showRelatedChannels)
field(:title)
field(:trackingAnalyticsAccountId)
field(:unsubscribedTrailer)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.ChannelSettings do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.ChannelSettings.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.ChannelSettings do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.616279
| 188
| 0.682043
|
93e38bb71dcfddd97272fba7bbc70c0e0e5a37aa
| 1,797
|
ex
|
Elixir
|
clients/health_care/lib/google_api/health_care/v1beta1/model/parser_config.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/health_care/lib/google_api/health_care/v1beta1/model/parser_config.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/health_care/lib/google_api/health_care/v1beta1/model/parser_config.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1beta1.Model.ParserConfig do
@moduledoc """
The configuration for the parser. It determines how the server parses the
messages.
## Attributes
* `allowNullHeader` (*type:* `boolean()`, *default:* `nil`) - Determines whether messages with no header are allowed.
* `segmentTerminator` (*type:* `String.t`, *default:* `nil`) - Byte(s) to use as the segment terminator. If this is unset, '\\r' is
used as segment terminator, matching the HL7 version 2
specification.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:allowNullHeader => boolean(),
:segmentTerminator => String.t()
}
field(:allowNullHeader)
field(:segmentTerminator)
end
defimpl Poison.Decoder, for: GoogleApi.HealthCare.V1beta1.Model.ParserConfig do
def decode(value, options) do
GoogleApi.HealthCare.V1beta1.Model.ParserConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HealthCare.V1beta1.Model.ParserConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.90566
| 135
| 0.731219
|
93e3a13c195811c7060b1ff44028067d311001c7
| 1,748
|
ex
|
Elixir
|
lib/glimesh_web/live/admin/category_live/form_component.ex
|
itsUnsmart/glimesh.tv
|
22c532184bb5046f6c6d8232e8bd66ba534c01c1
|
[
"MIT"
] | 1
|
2020-08-02T00:12:28.000Z
|
2020-08-02T00:12:28.000Z
|
lib/glimesh_web/live/admin/category_live/form_component.ex
|
itsUnsmart/glimesh.tv
|
22c532184bb5046f6c6d8232e8bd66ba534c01c1
|
[
"MIT"
] | null | null | null |
lib/glimesh_web/live/admin/category_live/form_component.ex
|
itsUnsmart/glimesh.tv
|
22c532184bb5046f6c6d8232e8bd66ba534c01c1
|
[
"MIT"
] | null | null | null |
defmodule GlimeshWeb.Admin.CategoryLive.FormComponent do
use GlimeshWeb, :live_component
alias Glimesh.Streams
@impl true
def update(%{category: category} = assigns, socket) do
changeset = Streams.change_category(category)
{:ok,
socket
|> assign(assigns)
|> assign(
:existing_categories,
Enum.map(Streams.list_parent_categories(), &{&1.name, &1.id})
)
|> assign(:changeset, changeset)}
end
@impl true
def handle_event("validate", %{"category" => category_params}, socket) do
changeset =
socket.assigns.category
|> Streams.change_category(category_params)
|> Map.put(:action, :validate)
{:noreply, assign(socket, :changeset, changeset)}
end
def handle_event("save", %{"category" => category_params}, socket) do
save_category(socket, socket.assigns.action, category_params)
end
defp save_category(socket, :edit, category_params) do
case Streams.update_category(socket.assigns.category, category_params) do
{:ok, _category} ->
{:noreply,
socket
|> put_flash(:info, gettext("Category updated successfully"))
|> push_redirect(to: socket.assigns.return_to)}
{:error, %Ecto.Changeset{} = changeset} ->
{:noreply, assign(socket, :changeset, changeset)}
end
end
defp save_category(socket, :new, category_params) do
case Streams.create_category(category_params) do
{:ok, _category} ->
{:noreply,
socket
|> put_flash(:info, gettext("Category created successfully"))
|> push_redirect(to: socket.assigns.return_to)}
{:error, %Ecto.Changeset{} = changeset} ->
{:noreply, assign(socket, changeset: changeset)}
end
end
end
| 29.133333
| 77
| 0.657323
|
93e3a81db391fcd908d64860a61bd9fad4dceb6a
| 1,244
|
ex
|
Elixir
|
lib/teslamate/addresses/addresses.ex
|
mvaal/teslamate
|
057e15d69ca1ddc19366428ad6b47b8101277472
|
[
"MIT"
] | null | null | null |
lib/teslamate/addresses/addresses.ex
|
mvaal/teslamate
|
057e15d69ca1ddc19366428ad6b47b8101277472
|
[
"MIT"
] | null | null | null |
lib/teslamate/addresses/addresses.ex
|
mvaal/teslamate
|
057e15d69ca1ddc19366428ad6b47b8101277472
|
[
"MIT"
] | null | null | null |
defmodule TeslaMate.Addresses do
@moduledoc """
The Addresses context.
"""
import Ecto.Query, warn: false
alias TeslaMate.Addresses.{Address, Geocoder}
alias TeslaMate.Repo
def list_addresses do
Repo.all(Address)
end
def get_address!(id) do
Repo.get!(Address, id)
end
def create_address(attrs \\ %{}) do
%Address{}
|> Address.changeset(attrs)
|> Repo.insert()
end
def create_address_if_not_exists(%{place_id: place_id} = attrs) do
case Repo.get_by(Address, place_id: place_id) do
%Address{} = address -> {:ok, address}
nil -> create_address(attrs)
end
end
def update_address(%Address{} = address, attrs) do
address
|> Address.changeset(attrs)
|> Repo.update()
end
def delete_address(%Address{} = address) do
Repo.delete(address)
end
def change_address(%Address{} = address) do
Address.changeset(address, %{})
end
@geocoder (case Mix.env() do
:test -> GeocoderMock
_____ -> Geocoder
end)
def find_address(%{latitude: latitude, longitude: longitude}) do
with {:ok, attrs} <- @geocoder.reverse_lookup(latitude, longitude) do
create_address_if_not_exists(attrs)
end
end
end
| 21.824561
| 73
| 0.648714
|
93e3a9b7a90b2954a60fb64346a7be40d3ff391e
| 2,366
|
ex
|
Elixir
|
clients/network_management/lib/google_api/network_management/v1beta1/model/location.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/network_management/lib/google_api/network_management/v1beta1/model/location.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/network_management/lib/google_api/network_management/v1beta1/model/location.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.NetworkManagement.V1beta1.Model.Location do
@moduledoc """
A resource that represents Google Cloud Platform location.
## Attributes
* `displayName` (*type:* `String.t`, *default:* `nil`) - The friendly name for this location, typically a nearby city name.
For example, "Tokyo".
* `labels` (*type:* `map()`, *default:* `nil`) - Cross-service attributes for the location. For example
{"cloud.googleapis.com/region": "us-east1"}
* `locationId` (*type:* `String.t`, *default:* `nil`) - The canonical id for this location. For example: `"us-east1"`.
* `metadata` (*type:* `map()`, *default:* `nil`) - Service-specific metadata. For example the available capacity at the given
location.
* `name` (*type:* `String.t`, *default:* `nil`) - Resource name for the location, which may vary between implementations.
For example: `"projects/example-project/locations/us-east1"`
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:displayName => String.t(),
:labels => map(),
:locationId => String.t(),
:metadata => map(),
:name => String.t()
}
field(:displayName)
field(:labels, type: :map)
field(:locationId)
field(:metadata, type: :map)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.NetworkManagement.V1beta1.Model.Location do
def decode(value, options) do
GoogleApi.NetworkManagement.V1beta1.Model.Location.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.NetworkManagement.V1beta1.Model.Location do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.96875
| 129
| 0.694421
|
93e3af42c465a5a29de6f61154e743bac8eef548
| 8,993
|
exs
|
Elixir
|
test/phoenix/router/routing_test.exs
|
matthewp/phoenix
|
af86b95db365faada58d326e98de65c811690bc7
|
[
"MIT"
] | 7
|
2021-01-31T04:51:08.000Z
|
2022-01-09T06:59:28.000Z
|
test/phoenix/router/routing_test.exs
|
matthewp/phoenix
|
af86b95db365faada58d326e98de65c811690bc7
|
[
"MIT"
] | 2
|
2022-02-19T07:43:27.000Z
|
2022-02-27T16:25:02.000Z
|
test/phoenix/router/routing_test.exs
|
matthewp/phoenix
|
af86b95db365faada58d326e98de65c811690bc7
|
[
"MIT"
] | 2
|
2021-02-06T08:40:23.000Z
|
2021-03-20T16:35:47.000Z
|
defmodule Phoenix.Router.RoutingTest do
use ExUnit.Case, async: true
use RouterHelper
import ExUnit.CaptureLog
defmodule SomePlug do
def init(opts), do: opts
def call(conn, _opts), do: conn
end
defmodule UserController do
use Phoenix.Controller
def index(conn, _params), do: text(conn, "users index")
def show(conn, _params), do: text(conn, "users show")
def top(conn, _params), do: text(conn, "users top")
def options(conn, _params), do: text(conn, "users options")
def connect(conn, _params), do: text(conn, "users connect")
def trace(conn, _params), do: text(conn, "users trace")
def not_found(conn, _params), do: text(put_status(conn, :not_found), "not found")
def image(conn, _params), do: text(conn, conn.params["path"] || "show files")
def move(conn, _params), do: text(conn, "users move")
def any(conn, _params), do: text(conn, "users any")
end
defmodule Router do
use Phoenix.Router
get "/", UserController, :index, as: :users
get "/users/top", UserController, :top, as: :top
get "/users/:id", UserController, :show, as: :users, metadata: %{access: :user}
get "/spaced users/:id", UserController, :show
get "/profiles/profile-:id", UserController, :show
get "/route_that_crashes", UserController, :crash
get "/files/:user_name/*path", UserController, :image
get "/backups/*path", UserController, :image
get "/static/images/icons/*image", UserController, :image
trace("/trace", UserController, :trace)
options "/options", UserController, :options
connect "/connect", UserController, :connect
match :move, "/move", UserController, :move
match :*, "/any", UserController, :any
scope log: :info do
pipe_through :noop
get "/plug", SomePlug, []
end
get "/no_log", SomePlug, [], log: false
get "/users/:user_id/files/:id", UserController, :image
get "/*path", UserController, :not_found
defp noop(conn, _), do: conn
end
setup do
Logger.disable(self())
:ok
end
test "get root path" do
conn = call(Router, :get, "/")
assert conn.status == 200
assert conn.resp_body == "users index"
end
test "get to named param with dashes" do
conn = call(Router, :get, "users/75f6306d-a090-46f9-8b80-80fd57ec9a41")
assert conn.status == 200
assert conn.resp_body == "users show"
assert conn.params["id"] == "75f6306d-a090-46f9-8b80-80fd57ec9a41"
assert conn.path_params["id"] == "75f6306d-a090-46f9-8b80-80fd57ec9a41"
conn = call(Router, :get, "users/75f6306d-a0/files/34-95")
assert conn.status == 200
assert conn.resp_body == "show files"
assert conn.params["user_id"] == "75f6306d-a0"
assert conn.path_params["user_id"] == "75f6306d-a0"
assert conn.params["id"] == "34-95"
assert conn.path_params["id"] == "34-95"
end
test "get with named param" do
conn = call(Router, :get, "users/1")
assert conn.status == 200
assert conn.resp_body == "users show"
assert conn.params["id"] == "1"
assert conn.path_params["id"] == "1"
end
test "parameters are url decoded" do
conn = call(Router, :get, "/users/hello%20matey")
assert conn.params == %{"id" => "hello matey"}
conn = call(Router, :get, "/spaced%20users/hello%20matey")
assert conn.params == %{"id" => "hello matey"}
conn = call(Router, :get, "/spaced users/hello matey")
assert conn.params == %{"id" => "hello matey"}
conn = call(Router, :get, "/users/a%20b")
assert conn.params == %{"id" => "a b"}
conn = call(Router, :get, "/backups/a%20b/c%20d")
assert conn.params == %{"path" => ["a b", "c d"]}
end
test "get to custom action" do
conn = call(Router, :get, "users/top")
assert conn.status == 200
assert conn.resp_body == "users top"
end
test "options to custom action" do
conn = call(Router, :options, "/options")
assert conn.status == 200
assert conn.resp_body == "users options"
end
test "connect to custom action" do
conn = call(Router, :connect, "/connect")
assert conn.status == 200
assert conn.resp_body == "users connect"
end
test "trace to custom action" do
conn = call(Router, :trace, "/trace")
assert conn.status == 200
assert conn.resp_body == "users trace"
end
test "splat arg with preceding named parameter to files/:user_name/*path" do
conn = call(Router, :get, "files/elixir/Users/home/file.txt")
assert conn.status == 200
assert conn.params["user_name"] == "elixir"
assert conn.params["path"] == ["Users", "home", "file.txt"]
end
test "splat arg with preceding string to backups/*path" do
conn = call(Router, :get, "backups/name")
assert conn.status == 200
assert conn.params["path"] == ["name"]
end
test "splat arg with multiple preceding strings to static/images/icons/*path" do
conn = call(Router, :get, "static/images/icons/elixir/logos/main.png")
assert conn.status == 200
assert conn.params["image"] == ["elixir", "logos", "main.png"]
end
test "splat args are %encodings in path" do
conn = call(Router, :get, "backups/silly%20name")
assert conn.status == 200
assert conn.params["path"] == ["silly name"]
end
test "catch-all splat route matches" do
conn = call(Router, :get, "foo/bar/baz")
assert conn.status == 404
assert conn.params == %{"path" => ~w"foo bar baz"}
assert conn.resp_body == "not found"
end
test "match on arbitrary http methods" do
conn = call(Router, :move, "/move")
assert conn.method == "MOVE"
assert conn.status == 200
assert conn.resp_body == "users move"
end
test "any verb matches" do
conn = call(Router, :get, "/any")
assert conn.method == "GET"
assert conn.status == 200
assert conn.resp_body == "users any"
conn = call(Router, :put, "/any")
assert conn.method == "PUT"
assert conn.status == 200
assert conn.resp_body == "users any"
end
test "raises on malformed URIs" do
assert_raise Phoenix.Router.MalformedURIError, fn ->
call(Router, :get, "/<% foo %>")
end
end
describe "logging" do
setup do
Logger.enable(self())
:ok
end
test "logs controller and action with (path) parameters" do
assert capture_log(fn -> call(Router, :get, "/users/1", foo: "bar") end) =~ """
[debug] Processing with Phoenix.Router.RoutingTest.UserController.show/2
Parameters: %{"foo" => "bar", "id" => "1"}
Pipelines: []
"""
end
test "logs controller and action with filtered parameters" do
assert capture_log(fn -> call(Router, :get, "/users/1", password: "bar") end) =~ """
[debug] Processing with Phoenix.Router.RoutingTest.UserController.show/2
Parameters: %{"id" => "1", "password" => "[FILTERED]"}
Pipelines: []
"""
end
test "logs plug with pipeline and custom level" do
assert capture_log(fn -> call(Router, :get, "/plug") end) =~ """
[info] Processing with Phoenix.Router.RoutingTest.SomePlug
Parameters: %{}
Pipelines: [:noop]
"""
end
test "does not log when log is set to false" do
refute capture_log(fn -> call(Router, :get, "/no_log", foo: "bar") end) =~
"Processing with Phoenix.Router.RoutingTest.SomePlug"
end
end
test "route_info returns route string, path params, and more" do
assert Phoenix.Router.route_info(Router, "GET", "foo/bar/baz", nil) == %{
log: :debug,
path_params: %{"path" => ["foo", "bar", "baz"]},
pipe_through: [],
plug: Phoenix.Router.RoutingTest.UserController,
plug_opts: :not_found,
route: "/*path"
}
assert Phoenix.Router.route_info(Router, "GET", "users/1", nil) == %{
log: :debug,
path_params: %{"id" => "1"},
pipe_through: [],
plug: Phoenix.Router.RoutingTest.UserController,
plug_opts: :show,
route: "/users/:id",
access: :user
}
assert Phoenix.Router.route_info(Router, "GET", "/", "host") == %{
log: :debug,
path_params: %{},
pipe_through: [],
plug: Phoenix.Router.RoutingTest.UserController,
plug_opts: :index,
route: "/",
}
assert Phoenix.Router.route_info(Router, "POST", "/not-exists", "host") == :error
end
test "route_info returns route string, path params and more for split path" do
assert Phoenix.Router.route_info(Router, "GET", ~w(foo bar baz), nil) == %{
log: :debug,
path_params: %{"path" => ["foo", "bar", "baz"]},
pipe_through: [],
plug: Phoenix.Router.RoutingTest.UserController,
plug_opts: :not_found,
route: "/*path"
}
end
end
| 33.431227
| 90
| 0.606138
|
93e3e083c70b561a569e89f24166cd3c3254f050
| 1,686
|
exs
|
Elixir
|
apps/day03/test/day03_test.exs
|
jwarwick/aoc_2019
|
04229b86829b72323498b57a6649fcc6f7c96406
|
[
"MIT"
] | 2
|
2019-12-21T21:21:04.000Z
|
2019-12-27T07:00:19.000Z
|
apps/day03/test/day03_test.exs
|
jwarwick/aoc_2019
|
04229b86829b72323498b57a6649fcc6f7c96406
|
[
"MIT"
] | null | null | null |
apps/day03/test/day03_test.exs
|
jwarwick/aoc_2019
|
04229b86829b72323498b57a6649fcc6f7c96406
|
[
"MIT"
] | null | null | null |
defmodule Day03Test do
use ExUnit.Case
doctest Day03
test "parse input" do
input = "R8,U5,L5,D3\nU7,R6,D4,L4\n"
expected = {[{"R",8}, {"U",5}, {"L",5}, {"D", 3}],
[{"U",7}, {"R",6}, {"D",4}, {"L",4}]}
assert Day03.parse_input(input) == expected
end
test "wire points" do
assert Day03.wire_points([{"R",2}]) == [{1, 0}, {2, 0}]
assert Day03.wire_points([{"L",2}]) == [{-1, 0}, {-2, 0}]
assert Day03.wire_points([{"U",2}]) == [{0, 1}, {0, 2}]
assert Day03.wire_points([{"D",2}]) == [{0, -1}, {0, -2}]
end
test "closest intersection" do
input = "R8,U5,L5,D3\nU7,R6,D4,L4\n"
result = Day03.parse_input(input)
assert Day03.intersection_distance(result) == 6
input = "R75,D30,R83,U83,L12,D49,R71,U7,L72\nU62,R66,U55,R34,D71,R55,D58,R83"
result = Day03.parse_input(input)
assert Day03.intersection_distance(result) == 159
input = "R98,U47,R26,D63,R33,U87,L62,D20,R33,U53,R51\nU98,R91,D20,R16,D67,R40,U7,R15,U6,R7"
result = Day03.parse_input(input)
assert Day03.intersection_distance(result) == 135
end
test "part1" do
assert Day03.part1() == 1519
end
test "shortest intersection" do
input = "R8,U5,L5,D3\nU7,R6,D4,L4\n"
result = Day03.parse_input(input)
assert Day03.intersection_shortest(result) == 30
input = "R75,D30,R83,U83,L12,D49,R71,U7,L72\nU62,R66,U55,R34,D71,R55,D58,R83"
result = Day03.parse_input(input)
assert Day03.intersection_shortest(result) == 610
input = "R98,U47,R26,D63,R33,U87,L62,D20,R33,U53,R51\nU98,R91,D20,R16,D67,R40,U7,R15,U6,R7"
result = Day03.parse_input(input)
assert Day03.intersection_shortest(result) == 410
end
end
| 32.423077
| 95
| 0.632266
|
93e403ff6adf8b2e2731950cbac807865c57dba3
| 1,288
|
exs
|
Elixir
|
priv/repo/seeds/eyecatch/0013_files.exs
|
ramkrishna70/opencov
|
7a3415f8eebb797ad1f7b6c832daa4f04d70af8d
|
[
"MIT"
] | 189
|
2018-09-25T09:02:41.000Z
|
2022-03-09T13:52:06.000Z
|
priv/repo/seeds/eyecatch/0013_files.exs
|
ramkrishna70/opencov
|
7a3415f8eebb797ad1f7b6c832daa4f04d70af8d
|
[
"MIT"
] | 29
|
2018-09-26T05:51:18.000Z
|
2021-11-05T08:55:03.000Z
|
priv/repo/seeds/eyecatch/0013_files.exs
|
ramkrishna70/opencov
|
7a3415f8eebb797ad1f7b6c832daa4f04d70af8d
|
[
"MIT"
] | 32
|
2018-10-21T12:28:11.000Z
|
2022-03-28T02:20:19.000Z
|
import Seedex
source = """
defmodule Opencov.Plug.ForcePasswordInitialize do
import Opencov.Helpers.Authentication
import Plug.Conn, only: [halt: 1]
import Phoenix.Controller, only: [redirect: 2]
def init(opts) do
opts
end
def call(conn, _opts) do
if user_signed_in?(conn) do
check_password_state(conn)
else
conn
end
end
defp check_password_state(conn) do
user = current_user(conn)
if user.password_initialized or allowed_path?(conn) do
conn
else
redirect(conn, to: Opencov.Router.Helpers.profile_path(conn, :edit_password)) |> halt
end
end
defp allowed_path?(conn) do
conn.request_path in [
Opencov.Router.Helpers.profile_path(conn, :edit_password),
Opencov.Router.Helpers.profile_path(conn, :update_password),
Opencov.Router.Helpers.auth_path(conn, :logout)
]
end
end
"""
seed Opencov.File, fn file ->
file
|> Map.put(:id, 1)
|> Map.put(:name, "lib/opencov/plug/force_password_initialize.ex")
|> Map.put(:job_id, 1)
|> Map.put(:coverage_lines, [nil,nil,nil,nil,nil,nil,0,nil,nil,nil,38,19,nil,19,nil,nil,nil,nil,19,19,19,nil,0,
nil,nil,nil,nil,0,nil,nil,nil,nil,nil,nil])
|> Map.put(:source, source)
|> Map.put(:coverage, 66.7)
end
| 25.76
| 113
| 0.669255
|
93e422e880b7e8766412cd847c7dc3178a55b5f6
| 1,852
|
ex
|
Elixir
|
client_query/lib/client_query_web/telemetry.ex
|
gumberss/Ticketing-deno
|
959b1b0763d246e05fd261aa5ceb94894dc5fe7b
|
[
"MIT"
] | 1
|
2020-09-08T00:06:33.000Z
|
2020-09-08T00:06:33.000Z
|
client_query/lib/client_query_web/telemetry.ex
|
gumberss/Ticketing-deno
|
959b1b0763d246e05fd261aa5ceb94894dc5fe7b
|
[
"MIT"
] | null | null | null |
client_query/lib/client_query_web/telemetry.ex
|
gumberss/Ticketing-deno
|
959b1b0763d246e05fd261aa5ceb94894dc5fe7b
|
[
"MIT"
] | null | null | null |
defmodule ClientQueryWeb.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# Database Metrics
summary("client_query.repo.query.total_time", unit: {:native, :millisecond}),
summary("client_query.repo.query.decode_time", unit: {:native, :millisecond}),
summary("client_query.repo.query.query_time", unit: {:native, :millisecond}),
summary("client_query.repo.query.queue_time", unit: {:native, :millisecond}),
summary("client_query.repo.query.idle_time", unit: {:native, :millisecond}),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {ClientQueryWeb, :count_users, []}
]
end
end
| 33.071429
| 86
| 0.677646
|
93e42c99d47aa8dd0dc0556886096acfa2912a85
| 1,308
|
exs
|
Elixir
|
test/pushstate_test.exs
|
benfb/elixush
|
ab0797d4b39f510283d2230d46aa061bc3198bf7
|
[
"MIT"
] | 13
|
2016-02-04T01:34:26.000Z
|
2021-05-24T08:34:09.000Z
|
test/pushstate_test.exs
|
benfb/elixush
|
ab0797d4b39f510283d2230d46aa061bc3198bf7
|
[
"MIT"
] | null | null | null |
test/pushstate_test.exs
|
benfb/elixush
|
ab0797d4b39f510283d2230d46aa061bc3198bf7
|
[
"MIT"
] | null | null | null |
defmodule Elixush.PushStateTest do
use ExUnit.Case, async: true
import Elixush.PushState
# alias Elixush.Server
test "the result of make_push_state is empty" do
state = make_push_state
assert state |> Map.get(:boolean) |> List.first == nil
assert state |> Map.get(:integer) |> List.first == nil
assert state |> Map.get(:string) |> List.first == nil
end
test "push_item pushes an item successfully" do
assert push_item(true, :boolean, make_push_state)
|> Map.get(:boolean)
|> List.first
end
test "register_instruction registers an instruction successfully" do
assert register_instruction(:integer_test) == :ok
assert_raise ArgumentError, fn ->
register_instruction(:integer_test)
end
end
test "registered_for_type returns correct instructions" do
assert registered_for_type(:boolean) |> Enum.member?(:boolean_and)
refute registered_for_type(:boolean) |> Enum.member?(:integer_gte)
assert registered_for_type(:genome, include_randoms: false)
|> Enum.member?(:genome_gene_dup)
end
test "registered_nonrandom returns correct instructions" do
assert registered_nonrandom() |> Enum.member?(:boolean_and)
refute registered_nonrandom() |> Enum.member?(:autoconstructive_integer_rand)
end
end
| 32.7
| 81
| 0.717125
|
93e4303ca5b15bdcc55ea1ebb1dcc30645e34598
| 1,373
|
ex
|
Elixir
|
test/support/data_case.ex
|
MatthieuSegret/yummy-phoenix-graphql
|
f0b258293697b0b120ef8e8a3b3905043c998617
|
[
"MIT"
] | 122
|
2017-11-24T11:28:17.000Z
|
2022-02-25T17:05:20.000Z
|
test/support/data_case.ex
|
MatthieuSegret/yummy-phoenix
|
85b490075e3a0395b4e7cfa9f06936659e9d12b5
|
[
"MIT"
] | 6
|
2018-01-11T22:07:44.000Z
|
2021-11-21T15:41:42.000Z
|
test/support/data_case.ex
|
MatthieuSegret/yummy-phoenix
|
85b490075e3a0395b4e7cfa9f06936659e9d12b5
|
[
"MIT"
] | 25
|
2018-04-01T02:43:21.000Z
|
2022-02-15T03:22:54.000Z
|
defmodule Yummy.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Yummy.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Yummy.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Yummy.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Yummy.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transform changeset errors to a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Enum.reduce(opts, message, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 25.425926
| 77
| 0.677349
|
93e432545bb045413d4a9b4262dac12d668e0b78
| 1,168
|
exs
|
Elixir
|
mix.exs
|
bulutfon/elixir_mod_event
|
d574863e66c779577b4108e4ab44366bad7c8313
|
[
"Apache-2.0"
] | null | null | null |
mix.exs
|
bulutfon/elixir_mod_event
|
d574863e66c779577b4108e4ab44366bad7c8313
|
[
"Apache-2.0"
] | null | null | null |
mix.exs
|
bulutfon/elixir_mod_event
|
d574863e66c779577b4108e4ab44366bad7c8313
|
[
"Apache-2.0"
] | null | null | null |
defmodule FSModEvent.Mixfile do
use Mix.Project
def project do
[
app: :elixir_mod_event,
name: "elixir_mod_event",
version: "0.0.10",
description: description(),
package: package(),
source_url: "https://github.com/marcelog/elixir_mod_event",
elixir: "~> 1.0",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()
]
end
def application do
[
applications: [:logger],
extra_applications: [:uuid]
]
end
defp description do
"""
Elixir client for FreeSWITCH mod_event_socket.
Find the user guide in the Github repo at: https://github.com/marcelog/elixir_mod_event.
"""
end
defp package do
[
files: ["lib", "mix.exs", "README*", "LICENSE*"],
maintainers: ["Marcelo Gornstein"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/marcelog/elixir_mod_event"
}
]
end
defp deps do
[
{:earmark, "~> 1.0.3", only: :dev},
{:ex_doc, "~> 0.14.5", only: :dev},
{:coverex, "~> 1.4.12", only: :test},
{:uuid, "~> 1.1.6"}
]
end
end
| 21.62963
| 92
| 0.5625
|
93e43ae3925d55f265916cf7a6d59bc610f56fce
| 12,517
|
ex
|
Elixir
|
lib/pdf_generator.ex
|
azzahidK/elixir-pdf-generator
|
31584797e32ea36d8f15200b26625e9e2fe18388
|
[
"MIT"
] | null | null | null |
lib/pdf_generator.ex
|
azzahidK/elixir-pdf-generator
|
31584797e32ea36d8f15200b26625e9e2fe18388
|
[
"MIT"
] | null | null | null |
lib/pdf_generator.ex
|
azzahidK/elixir-pdf-generator
|
31584797e32ea36d8f15200b26625e9e2fe18388
|
[
"MIT"
] | null | null | null |
defmodule PdfGenerator do
require Logger
@vsn "0.6.0"
@moduledoc """
# PdfGenerator
Provides a simple wrapper around [wkhtmltopdf](http://wkhtmltopdf.org) and
[pdftk](https://www.pdflabs.com/tools/pdftk-the-pdf-toolkit/) to generate
possibly encrypted PDFs from an HTML source.
# Configuration (optional)
if no or partial configuration is given, PdfGenerator will search for
executables on path. This will rais an error when wkhtmltopdf cannot be
found.
config :pdf_generator,
wkhtml_path: "/path/to/wkhtmltopdf",
pdftk_path: "/path/to/pdftk",
In your config/config.exs. Add :pdf_generator to your mix.exs:
Note that this is optional but advised to as it will perform a check on
startup whether it can find a suitable wkhtmltopdf executable. It's
generally better to have an app fail at startup than at later runtime.
def application do
[applications: [ .., :pdf_generator, ..], .. ]
end
If you don't want to autostart, issue
PdfGenerator.start wkhtml_path: "/path/to/wkhtml_path"
# System requirements
- wkhtmltopdf or chrome-headless
- pdftk (optional, for encrypted PDFs)
Precompiled **wkhtmltopdf** binaries can be obtained here:
http://wkhtmltopdf.org/downloads.html
**pdftk** should be available as package on your system via
- `apt-get install pdftk` on Debian/Ubuntu
- `brew pdftk` on OSX (you'll need homebrew, of course)
- Install the Exe-Installer on Windows found the project's homepage (link
above)
"""
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
# Define workers and child supervisors to be supervised
# worker(TestApp.Worker, [arg1, arg2, arg3])
worker(
PdfGenerator.PathAgent, [[
wkhtml_path: Application.get_env(:pdf_generator, :wkhtml_path),
pdftk_path: Application.get_env(:pdf_generator, :pdftk_path),
raise_on_missing_wkhtmltopdf_binary: Application.get_env(:pdf_generator, :raise_on_missing_wkhtmltopdf_binary, true),
]]
)
]
opts = [strategy: :one_for_one, name: PdfGenerator.Supervisor]
Supervisor.start_link(children, opts)
end
def defaults(), do: [generator: :wkhtmltopdf, page_size: "A4"]
# return file name of generated pdf
@doc """
Generates a pdf file from given html string. Returns a string containing a
temporary file path for that PDF.
## Options
* `:generator` – either `chrome` or `wkhtmltopdf` (default)
* `:prefer_system_executable` - set to `true` if you installed
chrome-headless-render-pdf globally
* `:no_sandbox` – disable sandbox for chrome, required to run as root (read: _docker_)
* `:page_size` - output page size, defaults to "A4", other options are "letter" (US letter) and "A5"
* `:open_password` - password required to open PDF. Will apply encryption to PDF
* `:edit_password` - password required to edit PDF
* `:shell_params` - list of command-line arguments to wkhtmltopdf or chrome
see http://wkhtmltopdf.org/usage/wkhtmltopdf.txt for all options
* `:delete_temporary` - true to remove the temporary html generated in
the system tmp dir
* `:filename` - filename you want for the output PDF (provide without .pdf extension),
defaults to a random string
# Examples
pdf_path_1 = PdfGenerator.generate "<html><body><h1>Boom</h1></body></html>"
pdf_path_2 = PdfGenerator.generate(
"<html><body><h1>Boom</h1></body></html>",
page_size: "letter",
open_password: "secret",
edit_password: "g3h31m",
shell_params: [ "--outline", "--outline-depth3", "3" ],
delete_temporary: true,
filename: "my_awesome_pdf"
)
"""
@type url :: binary()
@type html :: binary()
@type pdf_file_path :: binary()
@type content :: html | {:url, url} | {:html, html}
@type reason :: atom() | {atom(), any}
@type opts :: keyword()
@type path :: binary()
@type html_path :: path
@type pdf_path :: path
@type generator :: :wkhtmltopdf | :chrome
@spec generate(content, opts) :: {:ok, pdf_file_path} | {:error, reason}
def generate(content, opts \\ []) do
options = Keyword.merge(defaults(), opts)
generator = options[:generator]
open_password = options[:open_password]
edit_password = options[:edit_password]
delete_temp = options[:delete_temporary]
with {html_file, pdf_file} <- make_file_paths(options),
:ok <- maybe_write_html(content, html_file),
{executable, arguments} <- make_command(generator, options, content, {html_file, pdf_file}),
{:cmd, {stderr, exit_code}} <- {:cmd, System.cmd(executable, arguments, stderr_to_stdout: true)}, # unfortuantely wkhtmltopdf returns 0 on errors as well :-/
{:result_ok, true, _err} <- {:result_ok, result_ok(generator, stderr, exit_code), stderr}, # so we inspect stderr instead
{:rm, :ok} <- {:rm, maybe_delete_temp(delete_temp, html_file)},
{:ok, encrypted_pdf} <- maybe_encrypt_pdf(pdf_file, open_password, edit_password) do
{:ok, encrypted_pdf}
else
{:error, reason} -> {:error, reason}
{:result_ok, _, err} -> {:error, {:generator_failed, err}}
reason -> {:error, reason}
end
end
@spec maybe_write_html(content, path()) :: :ok | {:error, reason}
def maybe_write_html({:url, _url}, _html_file_path), do: :ok
def maybe_write_html({:html, html}, html_file_path), do: File.write(html_file_path, html)
def maybe_write_html(html, html_file_path) when is_binary(html), do: maybe_write_html({:html, html}, html_file_path)
@spec make_file_paths(keyword()) :: {html_path, pdf_path}
def make_file_paths(options) do
filebase = options[:filename] |> generate_filebase()
{filebase <> ".html", filebase <> ".pdf"}
end
def make_dimensions(options) when is_list(options) do
options |> Enum.into(%{}) |> dimensions_for()
end
@doc ~s"""
Returns `{width, height}` tuple for page sizes either as given or for A4 and
A5. Defaults to A4 sizes. In inches. Because chrome wants imperial.
"""
def dimensions_for(%{page_width: width, page_height: height}), do: {width, height}
def dimensions_for(%{page_size: "A4"}), do: {"8.26772", "11.695"}
def dimensions_for(%{page_size: "A5"}), do: {"5.8475", "8.26772"}
def dimensions_for(%{page_size: "letter"}), do: {"8.5", "11"}
def dimensions_for(_map), do: dimensions_for(%{page_size: "A4"})
@spec make_command(generator, opts, content, {html_path, pdf_path}) :: {path, list()}
def make_command(:chrome, options, content, {html_path, pdf_path}) do
chrome_executable = PdfGenerator.PathAgent.get.chrome_path
node_executable = PdfGenerator.PathAgent.get.node_path
disable_sandbox = Application.get_env(:pdf_generator, :disable_chrome_sandbox) || options[:no_sandbox]
dir =
if options[:prefer_local_executable] do
Path.expand("assets")
else
# needs `make priv/node_modules` to be run when building
:code.priv_dir(:pdf_generator) |> to_string()
end
js_file = "#{dir}/node_modules/chrome-headless-render-pdf/dist/cli/chrome-headless-render-pdf.js"
{executable, executable_args} =
if options[:prefer_system_executable] && is_binary(chrome_executable) do
{chrome_executable, []}
else
{node_executable, [js_file]}
end
{width, height} = make_dimensions(options)
more_params = options[:shell_params] || []
source =
case content do
{:url, url} -> url
_html -> "file://" <> html_path
end
arguments = List.flatten([
executable_args,
[
"--url", source,
"--pdf", pdf_path,
"--paper-width", width,
"--paper-height", height,
"--chrome-option=--disable-dev-shm-usage"
],
more_params,
if(disable_sandbox, do: ["--chrome-option=--no-sandbox"], else: [])
])
Logger.info("Running executable for chrome pdf generator")
{executable, arguments} |> inspect() |> Logger.info()
{executable, arguments}
end
def make_command(:wkhtmltopdf, options, content, {html_path, pdf_path}) do
executable = PdfGenerator.PathAgent.get.wkhtml_path
source =
case content do
{:url, url} -> url
_html -> html_path
end
shell_params = options[:shell_params] || []
arguments = List.flatten([
shell_params,
"--page-size", options[:page_size] || "A4",
source, pdf_path
])
# for wkhtmltopdf we support prefixes like ["xvfb-run", "-a"] to precede the actual command
{executable, arguments} =
case get_command_prefix(options) do
nil -> {executable, arguments}
[prefix | prefix_args] -> {prefix, prefix_args ++ [executable] ++ arguments}
prefix -> {prefix, [executable | arguments]}
end
{executable, arguments} |> inspect() |> Logger.debug()
{executable, arguments}
end
defp maybe_delete_temp(true, file), do: File.rm(file)
defp maybe_delete_temp(_falsy, _file), do: :ok
def maybe_encrypt_pdf(pdf_file, open_password, edit_password)
when is_binary(open_password) or is_binary(edit_password) do
encrypt_pdf(pdf_file, open_password, edit_password)
end
def maybe_encrypt_pdf(pdf_file, _open_password, _edit_password) do
{:ok, pdf_file}
end
defp result_ok(:chrome, string, 0) do
Logger.info("PDF generated with message #{inspect string}")
true
end
defp result_ok(:chrome, string, _exit_code) do
Logger.error("PDF generated with message #{inspect string}")
false
end
defp result_ok(:wkhtmltopdf, string, _exit_code), do: String.match?(string, ~r/Done/ms)
defp get_command_prefix(options) do
options[:command_prefix] || Application.get_env(:pdf_generator, :command_prefix)
end
defp generate_filebase(nil), do: generate_filebase(PdfGenerator.Random.string())
defp generate_filebase(filename), do: Path.join(System.tmp_dir, filename)
def encrypt_pdf(pdf_input_path, user_pw, owner_pw ) do
pdftk_path = PdfGenerator.PathAgent.get.pdftk_path
pdf_output_file = Path.join System.tmp_dir, PdfGenerator.Random.string() <> ".pdf"
pdftk_args = [
pdf_input_path,
"output", pdf_output_file,
"owner_pw", random_if_undef(owner_pw),
"user_pw", random_if_undef(user_pw),
"encrypt_128bit", "allow", "Printing", "CopyContents"
]
{stderr, exit_code} = System.cmd(pdftk_path, pdftk_args, stderr_to_stdout: true)
case exit_code do
0 -> {:ok, pdf_output_file}
_ -> {:error, {:pdftk, stderr}}
end
end
defp random_if_undef(nil), do: PdfGenerator.Random.string(16)
defp random_if_undef(any), do: any
@doc """
Takes same options as `generate` but will return an
`{:ok, binary_pdf_content}` tuple.
In case option _delete_temporary_ is true, will as well delete the temporary
pdf file.
"""
def generate_binary(html, options \\ []) do
result = generate html, options
case result do
{:ok, filename} -> {:ok, filename |> read_and_maybe_delete(options) }
{:error, reason} -> {:error, reason}
end
end
defp read_and_maybe_delete(filename, options) do
content = filename |> File.read!
if Keyword.get(options, :delete_temporary), do: filename |> File.rm
content
end
@doc """
Same as generate_binary but returns PDF content directly or raises on
error.
"""
def generate_binary!(html, options \\ []) do
result = generate_binary html, options
case result do
{:ok, content} -> content
{:error, reason} -> raise "in-place generation failed: " <> reason
end
end
@doc """
Same as generate but returns PDF file name only (raises on error).
"""
def generate!(html, options \\ []) do
result = generate html, options
case result do
{:ok, filename} -> filename
{:error, reason} -> raise "HTML generation failed: " <> reason
end
end
end
| 35.968391
| 172
| 0.648878
|
93e49bffafe84d1e6225f20daf0fd5ff270ed3ca
| 359
|
exs
|
Elixir
|
config/test.exs
|
maxneuvians/elenchos_ex
|
03b31e848dafe12614a01104f89d9477c7b21025
|
[
"MIT"
] | null | null | null |
config/test.exs
|
maxneuvians/elenchos_ex
|
03b31e848dafe12614a01104f89d9477c7b21025
|
[
"MIT"
] | null | null | null |
config/test.exs
|
maxneuvians/elenchos_ex
|
03b31e848dafe12614a01104f89d9477c7b21025
|
[
"MIT"
] | null | null | null |
use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :elenchos_ex, ElenchosExWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
config :elenchos_ex, :code_dir, "/tmp"
config :elenchos_ex, :db, "releases_test.tab"
| 25.642857
| 56
| 0.740947
|
93e4ad2ac61b4518036c698445b9fd6b650947fe
| 2,649
|
ex
|
Elixir
|
lib/aws/forecastquery.ex
|
ahsandar/aws-elixir
|
25de8b6c3a1401bde737cfc26b0679b14b058f23
|
[
"Apache-2.0"
] | null | null | null |
lib/aws/forecastquery.ex
|
ahsandar/aws-elixir
|
25de8b6c3a1401bde737cfc26b0679b14b058f23
|
[
"Apache-2.0"
] | null | null | null |
lib/aws/forecastquery.ex
|
ahsandar/aws-elixir
|
25de8b6c3a1401bde737cfc26b0679b14b058f23
|
[
"Apache-2.0"
] | null | null | null |
# WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.Forecastquery do
@moduledoc """
Provides APIs for creating and managing Amazon Forecast resources.
"""
@doc """
Retrieves a forecast for a single item, filtered by the supplied criteria.
The criteria is a key-value pair. The key is either `item_id` (or the
equivalent non-timestamp, non-target field) from the `TARGET_TIME_SERIES`
dataset, or one of the forecast dimensions specified as part of the
`FeaturizationConfig` object.
By default, `QueryForecast` returns the complete date range for the
filtered forecast. You can request a specific date range.
To get the full forecast, use the
[CreateForecastExportJob](https://docs.aws.amazon.com/en_us/forecast/latest/dg/API_CreateForecastExportJob.html)
operation.
<note> The forecasts generated by Amazon Forecast are in the same timezone
as the dataset that was used to create the predictor.
</note>
"""
def query_forecast(client, input, options \\ []) do
request(client, "QueryForecast", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "forecast"}
host = build_host("forecastquery", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AmazonForecastRuntime.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
| 33.961538
| 114
| 0.662137
|
93e4b8c309e84f4998f618ac5bf199a2477fa714
| 1,228
|
ex
|
Elixir
|
lib/bitcoin_simulator/simulation/trade_center.ex
|
sidharth-shridhar/Bitcoin-Miner-Simulation
|
2789dc8fe5f65269789540f675fac682e431e518
|
[
"MIT"
] | null | null | null |
lib/bitcoin_simulator/simulation/trade_center.ex
|
sidharth-shridhar/Bitcoin-Miner-Simulation
|
2789dc8fe5f65269789540f675fac682e431e518
|
[
"MIT"
] | null | null | null |
lib/bitcoin_simulator/simulation/trade_center.ex
|
sidharth-shridhar/Bitcoin-Miner-Simulation
|
2789dc8fe5f65269789540f675fac682e431e518
|
[
"MIT"
] | null | null | null |
defmodule BitcoinSimulator.Simulation.TradeCenter do
use GenServer
require Logger
# Client
def start_link(_) do
GenServer.start_link(__MODULE__, nil, name: BitcoinSimulator.Simulation.TradeCenter)
end
# Server (callbacks)
def init(_) do
state = %{
total_peers: 0,
peer_ids: MapSet.new()
}
{:ok, state}
end
def handle_call({:get_random_trade_partner, self_id, count}, _from, state) do
peer_ids_exclude_requester = MapSet.delete(state.peer_ids, self_id)
if MapSet.size(peer_ids_exclude_requester) < count do
{:reply, :not_enough_partner, state}
else
{:reply, get_partner(peer_ids_exclude_requester, MapSet.new(), count), state}
end
end
def handle_cast({:peer_join, id}, state) do
new_state = %{state | total_peers: state.total_peers + 1, peer_ids: MapSet.put(state.peer_ids, id)}
{:noreply, new_state}
end
def terminate(reason, _state), do: if reason != :normal, do: Logger.error(reason)
# Aux
defp get_partner(set, result, target_count) do
result = MapSet.put(result, set |> MapSet.to_list() |> Enum.random())
if MapSet.size(result) < target_count, do: get_partner(set, result, target_count), else: result
end
end
| 26.695652
| 103
| 0.697068
|
93e4c54c57bcec27300c0f9a0b98eab863bbbd44
| 913
|
exs
|
Elixir
|
paths_test.exs
|
abetkin/paths_and_deps
|
30171d15d38a9c6cac7c97ff6bcdb8d9e969b35b
|
[
"MIT"
] | null | null | null |
paths_test.exs
|
abetkin/paths_and_deps
|
30171d15d38a9c6cac7c97ff6bcdb8d9e969b35b
|
[
"MIT"
] | null | null | null |
paths_test.exs
|
abetkin/paths_and_deps
|
30171d15d38a9c6cac7c97ff6bcdb8d9e969b35b
|
[
"MIT"
] | null | null | null |
defmodule Hi do
dep %{scope: scope}, do:
def hi [:message, format] do
case format do
:short -> "Hi #{who}"
:long -> "#{scope}: Hi #{who}"
end
end
dep do:
def author [:author], do:
"Vitalii"
end
defmodule Full do
dep %{[:message, :short] => msg, :author => author}, do:
def full [:message, :full], do
msg <> "\n" <> "Best regards, #{author}"
end
defmodule PathsTest do
use ExUnit.Case
@ctx %{scope: :party}
test "simplest" do
Paths.include([Hi])
|> Paths.eval([:message, :short])
end
test "el" do
scope = dep do: fn :scope -> :home end
Paths.include([scope, Hi])
|> Path.eval([:message, :short])
end
test "1" do
@ctx
|> Paths.include_modules([Hi, Full])
|> Paths.eval([:message, :full])
end
dep_modules [Hi, Full]
test "2" do
@ctx
|> Paths.eval([:message, :full])
end
end
| 16.6
| 58
| 0.544359
|
93e592abdf4154878790b9c8a36a5a82f10dae41
| 892
|
ex
|
Elixir
|
clients/cloud_identity/lib/google_api/cloud_identity/v1/metadata.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
clients/cloud_identity/lib/google_api/cloud_identity/v1/metadata.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
clients/cloud_identity/lib/google_api/cloud_identity/v1/metadata.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudIdentity.V1 do
@moduledoc """
API client metadata for GoogleApi.CloudIdentity.V1.
"""
@discovery_revision "20200509"
def discovery_revision(), do: @discovery_revision
end
| 33.037037
| 74
| 0.761211
|
93e59625d41281808154095f2643044ffe08aa99
| 665
|
ex
|
Elixir
|
lib/utility_belt/application.ex
|
ArcBlock/utility_belt
|
382c6f8c9b8cf1db1d394476a2c47972430f798e
|
[
"MIT"
] | 1
|
2020-06-06T19:35:43.000Z
|
2020-06-06T19:35:43.000Z
|
lib/utility_belt/application.ex
|
ArcBlock/utility_belt
|
382c6f8c9b8cf1db1d394476a2c47972430f798e
|
[
"MIT"
] | 11
|
2018-11-27T17:06:16.000Z
|
2019-02-14T07:11:36.000Z
|
lib/utility_belt/application.ex
|
ArcBlock/utility_belt
|
382c6f8c9b8cf1db1d394476a2c47972430f798e
|
[
"MIT"
] | 1
|
2018-08-13T21:07:03.000Z
|
2018-08-13T21:07:03.000Z
|
defmodule UtilityBelt.Application do
@moduledoc false
use Application
alias UtilityBelt.Config
def start(_type, _args) do
con_cache_opts = [
name: :query_metrics,
ttl_check_interval: :timer.seconds(5),
global_ttl: :timer.seconds(10)
]
children = [
{ConCache, con_cache_opts}
]
opts = [strategy: :one_for_one, name: UtilityBelt.Supervisor]
update_runtime_config()
Supervisor.start_link(children, opts)
end
def update_runtime_config do
Config.update(:cipher, :keyphrase, System.get_env("CIPHER_KEY_PHRASE"))
Config.update(:cipher, :ivphrase, System.get_env("CIPHER_IV_PHRASE"))
end
end
| 22.166667
| 75
| 0.702256
|
93e59e8450ad383c3ae3a3c216f80aa7a8ba0fcc
| 1,301
|
exs
|
Elixir
|
test/context_through_slot_test.exs
|
edgarmiadzieles/surface
|
24d1162cc25004df9d40a11dd65fca98d7b65c64
|
[
"MIT"
] | null | null | null |
test/context_through_slot_test.exs
|
edgarmiadzieles/surface
|
24d1162cc25004df9d40a11dd65fca98d7b65c64
|
[
"MIT"
] | null | null | null |
test/context_through_slot_test.exs
|
edgarmiadzieles/surface
|
24d1162cc25004df9d40a11dd65fca98d7b65c64
|
[
"MIT"
] | null | null | null |
defmodule Surface.ContextThroughSlotTest do
use ExUnit.Case, async: true
import Surface
import ComponentTestHelper
defmodule Parent.ContextProvider do
use Surface.Component
property foo, :string
context set foo, :string, scope: :only_children
slot default
# The foo prop is passed here and so we can use it
def init_context(assigns) do
{:ok, foo: assigns.foo}
end
def render(assigns) do
~H"""
<slot />
"""
end
end
defmodule Parent do
use Surface.Component
slot default
def render(assigns) do
~H"""
<div>
<Parent.ContextProvider foo="bar">
<slot />
</Parent.ContextProvider>
</div>
"""
end
end
defmodule Child do
use Surface.Component
context get foo, from: Parent.ContextProvider
def render(assigns) do
# @foo is nil here
~H"""
<div>{{ @foo }}</div>
"""
end
end
defmodule ExampleWeb.ContextLive do
use Surface.LiveView
def render(assigns) do
~H"""
<Parent>
<Child/>
</Parent>
"""
end
end
test "child should take context from parent when rendered in slot" do
assert render_live(ExampleWeb.ContextLive) =~ "<div><div>bar</div></div>"
end
end
| 18.323944
| 77
| 0.599539
|
93e5a58d4e0adfe33d76d5bf91bbab6ab58b8b57
| 253
|
ex
|
Elixir
|
lib/tarot_cup.ex
|
hpopp/tarot-cup
|
cc358145d35e7a0cee2ea68ce00e2e374b83ab11
|
[
"MIT"
] | 1
|
2020-12-31T22:23:00.000Z
|
2020-12-31T22:23:00.000Z
|
lib/tarot_cup.ex
|
hpopp/tarot-cup
|
cc358145d35e7a0cee2ea68ce00e2e374b83ab11
|
[
"MIT"
] | null | null | null |
lib/tarot_cup.ex
|
hpopp/tarot-cup
|
cc358145d35e7a0cee2ea68ce00e2e374b83ab11
|
[
"MIT"
] | null | null | null |
defmodule TarotCup do
@moduledoc """
TarotCup keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 25.3
| 66
| 0.754941
|
93e5aa1585ccf4c6d5a9d39b2448901f39c526b7
| 1,081
|
exs
|
Elixir
|
elixir/.iex.exs
|
MOPineyro/dotfiles
|
fc7a5440ddb8375fe70e68abee6e0152bd997c6a
|
[
"MIT"
] | null | null | null |
elixir/.iex.exs
|
MOPineyro/dotfiles
|
fc7a5440ddb8375fe70e68abee6e0152bd997c6a
|
[
"MIT"
] | null | null | null |
elixir/.iex.exs
|
MOPineyro/dotfiles
|
fc7a5440ddb8375fe70e68abee6e0152bd997c6a
|
[
"MIT"
] | null | null | null |
timestamp = fn ->
{_date, {hour, minute, _second}} = :calendar.local_time
[hour, minute]
|> Enum.map(&(String.pad_leading(Integer.to_string(&1), 2, "0")))
|> Enum.join(":")
end
IEx.configure(
colors: [
syntax_colors: [
number: :light_yellow,
atom: :light_cyan,
string: :yellow,
boolean: :red,
nil: [:magenta, :bright],
],
ls_directory: :cyan,
ls_device: :yellow,
doc_code: :green,
doc_inline_code: :magenta,
doc_headings: [:cyan, :underline],
doc_title: [:cyan, :bright, :underline],
],
default_prompt:
"#{IO.ANSI.green}%prefix#{IO.ANSI.reset} " <>
"[#{IO.ANSI.magenta}#{timestamp.()}#{IO.ANSI.reset} " <>
":: #{IO.ANSI.cyan}%counter#{IO.ANSI.reset}] >",
alive_prompt:
"#{IO.ANSI.green}%prefix#{IO.ANSI.reset} " <>
"(#{IO.ANSI.yellow}%node#{IO.ANSI.reset}) " <>
"[#{IO.ANSI.magenta}#{timestamp.()}#{IO.ANSI.reset} " <>
":: #{IO.ANSI.cyan}%counter#{IO.ANSI.reset}] >",
history_size: 50,
inspect: [
pretty: true,
limit: :infinity,
width: 80
],
width: 80
)
| 26.365854
| 67
| 0.577243
|
93e5e1c447d5f7c05675da820c50bf3d4abdbd27
| 728
|
ex
|
Elixir
|
lib/groupher_server_web/router.ex
|
mydearxym/mastani_server
|
f24034a4a5449200165cf4a547964a0961793eab
|
[
"Apache-2.0"
] | 2
|
2018-03-26T08:56:21.000Z
|
2018-07-02T22:34:51.000Z
|
lib/groupher_server_web/router.ex
|
mydearxym/mastani_server
|
f24034a4a5449200165cf4a547964a0961793eab
|
[
"Apache-2.0"
] | 22
|
2018-03-21T03:40:50.000Z
|
2018-07-10T06:33:10.000Z
|
lib/groupher_server_web/router.ex
|
mydearxym/mastani_server
|
f24034a4a5449200165cf4a547964a0961793eab
|
[
"Apache-2.0"
] | null | null | null |
defmodule GroupherServerWeb.Router do
@moduledoc false
use GroupherServerWeb, :router
pipeline :api do
plug(:accepts, ["json"])
plug(GroupherServerWeb.Context)
end
alias GroupherServerWeb.Controller
scope "/api" do
pipe_through(:api)
# get "/og-info", TodoController, only: [:index]
# resources("/og-info", OG, only: [:index])
get("/og-info", Controller.OG, :index)
end
scope "/graphiql" do
pipe_through(:api)
forward(
"/",
Absinthe.Plug.GraphiQL,
schema: GroupherServerWeb.Schema,
json_codec: Jason,
pipeline: {ApolloTracing.Pipeline, :plug},
interface: :playground,
context: %{pubsub: GroupherServerWeb.Endpoint}
)
end
end
| 20.8
| 52
| 0.651099
|
93e5e8310800ac16f3e71b8d837a861c5c0d55ea
| 514
|
exs
|
Elixir
|
mix.exs
|
felipe-jm/ignite-elixir-challenge-02
|
98f250bf9931460c89d4d7af71514eb471f1507a
|
[
"MIT"
] | null | null | null |
mix.exs
|
felipe-jm/ignite-elixir-challenge-02
|
98f250bf9931460c89d4d7af71514eb471f1507a
|
[
"MIT"
] | null | null | null |
mix.exs
|
felipe-jm/ignite-elixir-challenge-02
|
98f250bf9931460c89d4d7af71514eb471f1507a
|
[
"MIT"
] | null | null | null |
defmodule ListFilter.MixProject do
use Mix.Project
def project do
[
app: :list_filter,
version: "0.1.0",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:credo, "~> 1.5", only: [:dev, :test], runtime: false}
]
end
end
| 18.357143
| 61
| 0.575875
|
93e6378918dcce45b5f8470d87390099a84d3dec
| 2,018
|
ex
|
Elixir
|
clients/data_labeling/lib/google_api/data_labeling/v1beta1/model/google_cloud_datalabeling_v1p2alpha1_import_data_operation_response.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/data_labeling/lib/google_api/data_labeling/v1beta1/model/google_cloud_datalabeling_v1p2alpha1_import_data_operation_response.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | 1
|
2020-12-18T09:25:12.000Z
|
2020-12-18T09:25:12.000Z
|
clients/data_labeling/lib/google_api/data_labeling/v1beta1/model/google_cloud_datalabeling_v1p2alpha1_import_data_operation_response.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1p2alpha1ImportDataOperationResponse do
@moduledoc """
Response used for ImportData longrunning operation.
## Attributes
* `dataset` (*type:* `String.t`, *default:* `nil`) - Ouptut only. The name of imported dataset.
* `importCount` (*type:* `integer()`, *default:* `nil`) - Output only. Number of examples imported successfully.
* `totalCount` (*type:* `integer()`, *default:* `nil`) - Output only. Total number of examples requested to import
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dataset => String.t(),
:importCount => integer(),
:totalCount => integer()
}
field(:dataset)
field(:importCount)
field(:totalCount)
end
defimpl Poison.Decoder,
for:
GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1p2alpha1ImportDataOperationResponse do
def decode(value, options) do
GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1p2alpha1ImportDataOperationResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1p2alpha1ImportDataOperationResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.633333
| 118
| 0.736373
|
93e63f733a0369818681e2642d1d18fd7d5d7d4a
| 2,812
|
exs
|
Elixir
|
test/phoenix_live_view/integrations/event_test.exs
|
ohr486/phoenix_live_view
|
14a3e5a993de7767e38117852707c6c1feb1d485
|
[
"MIT"
] | null | null | null |
test/phoenix_live_view/integrations/event_test.exs
|
ohr486/phoenix_live_view
|
14a3e5a993de7767e38117852707c6c1feb1d485
|
[
"MIT"
] | null | null | null |
test/phoenix_live_view/integrations/event_test.exs
|
ohr486/phoenix_live_view
|
14a3e5a993de7767e38117852707c6c1feb1d485
|
[
"MIT"
] | null | null | null |
defmodule Phoenix.LiveView.EventTest do
use ExUnit.Case
import Phoenix.ConnTest
import Phoenix.LiveViewTest
alias Phoenix.LiveView
alias Phoenix.LiveViewTest.{Endpoint}
@endpoint Endpoint
setup_all do
ExUnit.CaptureLog.capture_log(fn -> Endpoint.start_link() end)
:ok
end
setup config do
{:ok,
conn: Plug.Test.init_test_session(Phoenix.ConnTest.build_conn(), config[:session] || %{})}
end
describe "push_event" do
test "sends updates with general assigns diff", %{conn: conn} do
{:ok, view, _html} = live(conn, "/events")
GenServer.call(
view.pid,
{:run,
fn socket ->
new_socket =
socket
|> LiveView.assign(count: 123)
|> LiveView.push_event("my-event", %{one: 1})
{:reply, :ok, new_socket}
end}
)
assert_push_event(view, "my-event", %{one: 1})
assert render(view) =~ "count: 123"
end
test "sends updates with no assigns diff", %{conn: conn} do
{:ok, view, _html} = live(conn, "/events")
GenServer.call(
view.pid,
{:run,
fn socket ->
{:reply, :ok, LiveView.push_event(socket, "my-event", %{two: 2})}
end}
)
assert_push_event(view, "my-event", %{two: 2})
assert render(view) =~ "count: 0"
end
test "sends updates in root and child mounts", %{conn: conn} do
{:ok, view, _html} = live(conn, "/events-in-mount")
assert_push_event(view, "root-mount", %{root: "foo"})
assert_push_event(view, "child-mount", %{child: "bar"})
end
end
describe "replies" do
test "sends reply from handle_event with general assigns diff", %{conn: conn} do
{:ok, view, _html} = live(conn, "/events")
assert render_hook(view, :reply, %{count: 456, reply: %{"val" => "my-reply"}}) =~
"count: 456"
assert_reply(view, %{"val" => "my-reply"})
end
test "sends reply from handle_event with no assigns diff", %{conn: conn} do
{:ok, view, _html} = live(conn, "/events")
assert render_hook(view, :reply, %{reply: %{"val" => "nodiff"}}) =~ "count: 0"
assert_reply(view, %{"val" => "nodiff"})
end
test "raises when trying to reply outside of handle_event", %{conn: conn} do
Process.flag(:trap_exit, true)
{:ok, view, _html} = live(conn, "/events")
pid = view.pid
Process.monitor(pid)
assert ExUnit.CaptureLog.capture_log(fn ->
send(
view.pid,
{:run,
fn socket ->
{:reply, :boom, socket}
end}
)
assert_receive {:DOWN, _ref, :process, ^pid, _reason}
end) =~ "Got: {:reply, :boom"
end
end
end
| 27.300971
| 95
| 0.555477
|
93e64baa9c1d921aaced31d36a451f2e1c5264bd
| 2,189
|
ex
|
Elixir
|
lib/porcelain/drivers/stream_server.ex
|
samgaw/porcelain
|
3a19526b61945736e682704c5d873b5dcf7c50ad
|
[
"MIT"
] | null | null | null |
lib/porcelain/drivers/stream_server.ex
|
samgaw/porcelain
|
3a19526b61945736e682704c5d873b5dcf7c50ad
|
[
"MIT"
] | null | null | null |
lib/porcelain/drivers/stream_server.ex
|
samgaw/porcelain
|
3a19526b61945736e682704c5d873b5dcf7c50ad
|
[
"MIT"
] | null | null | null |
defmodule Porcelain.Driver.Common.StreamServer do
@moduledoc false
# Internal module used to make output streams work
require Record
Record.defrecordp(:state, [:done, :chunks, :client])
use GenServer
def start() do
GenServer.start(__MODULE__, state(chunks: :queue.new()))
end
def get_data(pid) do
log("Stream server get data #{inspect(self())}")
GenServer.call(pid, :get_data, :infinity)
end
def put_data(pid, data) do
log("Stream server put data #{inspect(data)}")
:ok = GenServer.call(pid, {:data, data})
end
def finish(pid) do
log("Stream server finish")
GenServer.cast(pid, :done)
end
###
def handle_call(:get_data, _from, state(done: true, chunks: q) = state) do
if :queue.is_empty(q) do
log("Stream server did stop")
{:stop, :shutdown, nil, nil}
else
log("Stream server reply")
{:reply, :queue.head(q), state(state, chunks: :queue.tail(q))}
end
end
def handle_call(:get_data, from, state(chunks: q) = state) do
if :queue.is_empty(q) do
log("get_data: []")
{:noreply, state(state, client: from)}
else
log("get_data: <data>")
{:reply, :queue.head(q), state(state, chunks: :queue.tail(q))}
end
end
def handle_call({:data, data}, _from, state(chunks: q, client: nil) = state) do
log("Stream server got data")
{:reply, :ok, state(state, chunks: :queue.in(data, q))}
end
def handle_call({:data, data}, _from, state(chunks: q, client: client) = state) do
true = :queue.is_empty(q)
log("Stream server got data. Sending to client")
GenServer.reply(client, data)
{:reply, :ok, state(state, client: nil)}
end
def handle_cast(:done, state(client: nil) = state) do
{:noreply, state(state, done: true)}
end
def handle_cast(:done, state(chunks: q, client: client) = state) do
if :queue.is_empty(q) do
log("Stream server did stop")
GenServer.reply(client, nil)
{:stop, :shutdown, nil}
else
GenServer.reply(client, :queue.head(q))
{:noreply, state(state, done: true, chunks: :queue.tail(q))}
end
end
defp log(_), do: nil
# defp log(msg), do: IO.puts msg
end
| 26.373494
| 84
| 0.632252
|
93e653e51f6de7a3845615220e1e666cea82f678
| 2,325
|
ex
|
Elixir
|
clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3beta1_continuous_test_result.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3beta1_continuous_test_result.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3beta1_continuous_test_result.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1ContinuousTestResult do
@moduledoc """
Represents a result from running a test case in an agent environment.
## Attributes
* `name` (*type:* `String.t`, *default:* `nil`) - The resource name for the continuous test result. Format: `projects//locations//agents//environments//continuousTestResults/`.
* `result` (*type:* `String.t`, *default:* `nil`) - The result of this continuous test run, i.e. whether all the tests in this continuous test run pass or not.
* `runTime` (*type:* `DateTime.t`, *default:* `nil`) - Time when the continuous testing run starts.
* `testCaseResults` (*type:* `list(String.t)`, *default:* `nil`) - A list of individual test case results names in this continuous test run.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:name => String.t() | nil,
:result => String.t() | nil,
:runTime => DateTime.t() | nil,
:testCaseResults => list(String.t()) | nil
}
field(:name)
field(:result)
field(:runTime, as: DateTime)
field(:testCaseResults, type: :list)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1ContinuousTestResult do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1ContinuousTestResult.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3beta1ContinuousTestResult do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.114754
| 180
| 0.71957
|
93e66d6b002390bb0a42b9b85a1e82248db1b49c
| 239
|
exs
|
Elixir
|
hexdocs__pm__phoenix__up_and_running.html/hello/priv/repo/migrations/20200928045138_create_pages.exs
|
jim80net/elixir_tutorial_projects
|
db19901a9305b297faa90642bebcc08455621b52
|
[
"Unlicense"
] | null | null | null |
hexdocs__pm__phoenix__up_and_running.html/hello/priv/repo/migrations/20200928045138_create_pages.exs
|
jim80net/elixir_tutorial_projects
|
db19901a9305b297faa90642bebcc08455621b52
|
[
"Unlicense"
] | null | null | null |
hexdocs__pm__phoenix__up_and_running.html/hello/priv/repo/migrations/20200928045138_create_pages.exs
|
jim80net/elixir_tutorial_projects
|
db19901a9305b297faa90642bebcc08455621b52
|
[
"Unlicense"
] | null | null | null |
defmodule Hello.Repo.Migrations.CreatePages do
use Ecto.Migration
def change do
create table(:pages) do
add :title, :string
add :body, :text
add :views, :integer, default: 0
timestamps()
end
end
end
| 17.071429
| 46
| 0.640167
|
93e690ddbcb37b7d23791b2115e03116c29d97ae
| 1,092
|
ex
|
Elixir
|
lib/conduit/router.ex
|
rudyyazdi/conduit
|
8defa60962482fb81f5093ea5d58b71a160db3c4
|
[
"MIT"
] | null | null | null |
lib/conduit/router.ex
|
rudyyazdi/conduit
|
8defa60962482fb81f5093ea5d58b71a160db3c4
|
[
"MIT"
] | 2
|
2022-01-15T02:09:30.000Z
|
2022-01-22T10:18:43.000Z
|
lib/conduit/router.ex
|
rudyyazdi/conduit
|
8defa60962482fb81f5093ea5d58b71a160db3c4
|
[
"MIT"
] | null | null | null |
defmodule Conduit.Router do
use Commanded.Commands.Router
alias Conduit.Accounts.Aggregates.User
alias Conduit.Accounts.Commands.{
RegisterUser,
UpdateUser,
}
alias Conduit.Blog.Aggregates.{Article,Author,Comment}
alias Conduit.Blog.Commands.{
CreateAuthor,
CommentOnArticle,
DeleteComment,
FavoriteArticle,
FollowAuthor,
PublishArticle,
UnfavoriteArticle,
UnfollowAuthor,
}
alias Conduit.Support.Middleware.{Uniqueness,Validate}
middleware Validate
middleware Uniqueness
identify Article, by: :article_uuid, prefix: "article-"
identify Author, by: :author_uuid, prefix: "author-"
identify Comment, by: :comment_uuid, prefix: "comment-"
identify User, by: :user_uuid, prefix: "user-"
dispatch [
PublishArticle,
FavoriteArticle,
UnfavoriteArticle
], to: Article
dispatch [
CreateAuthor,
FollowAuthor,
UnfollowAuthor,
], to: Author
dispatch [
CommentOnArticle,
DeleteComment,
], to: Comment, lifespan: Comment
dispatch [
RegisterUser,
UpdateUser,
], to: User
end
| 21
| 57
| 0.709707
|
93e70443713f715b61120a718d6de63fd1c47b73
| 1,660
|
exs
|
Elixir
|
test/ez_calendar/calendars/day_calendar_test.exs
|
elixir-twister/ez_calendar
|
4d08f049f4e8cd162e333f1ad296b29297a57621
|
[
"MIT"
] | 7
|
2016-11-12T07:53:34.000Z
|
2021-05-19T01:34:27.000Z
|
test/ez_calendar/calendars/day_calendar_test.exs
|
elixir-twister/ez_calendar
|
4d08f049f4e8cd162e333f1ad296b29297a57621
|
[
"MIT"
] | 1
|
2017-04-06T15:16:22.000Z
|
2017-04-06T15:16:22.000Z
|
test/ez_calendar/calendars/day_calendar_test.exs
|
elixir-twister/ez_calendar
|
4d08f049f4e8cd162e333f1ad296b29297a57621
|
[
"MIT"
] | 2
|
2017-04-19T15:03:58.000Z
|
2018-07-04T19:37:10.000Z
|
defmodule EZCalendar.DayCalendarTest do
use ExUnit.Case, async: true
alias EZCalendar.Repo
alias EZCalendar.Event
alias EZCalendar.DayCalendar
setup do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(EZCalendar.Repo)
end
test "returns the correct date range" do
{start_date, end_date} = DayCalendar.date_range({2016, 11, 1}, [])
assert start_date == ~D[2016-11-01]
assert end_date == ~D[2016-11-01]
end
test "returns a struct with the correct params" do
params = Event |> Repo.day_calendar!({2016, 11, 1}) |> Map.get(:params)
assert params.day == 1
assert params.month == 11
assert params.year == 2016
end
test "returns a struct with the correct next" do
next = Event |> Repo.day_calendar!({2016, 12, 31}) |> Map.get(:next)
assert next.day == 1
assert next.month == 1
assert next.year == 2017
end
test "returns a struct with the correct prev" do
prev = Event |> Repo.day_calendar!({2016, 1, 1}) |> Map.get(:prev)
assert prev.day == 31
assert prev.month == 12
assert prev.year == 2015
end
test "returns a struct with the correct title" do
title = Event |> Repo.day_calendar!({2016, 11, 1}) |> Map.get(:title)
assert title == "November 1, 2016"
end
test "returns a struct with the correct dates" do
dates = Event |> Repo.day_calendar!({2016, 11, 1}) |> Map.get(:dates)
start_date = dates |> List.first
end_date = dates |> List.last
assert start_date.day == 1
assert start_date.month == 11
assert start_date.year == 2016
assert end_date.day == 1
assert end_date.month == 11
assert end_date.year == 2016
end
end
| 28.62069
| 75
| 0.655422
|
93e70ea96f7466a3214f442601396080aa77aec9
| 1,091
|
ex
|
Elixir
|
lib/railway_ipc/storage.ex
|
learn-co/railway_ipc
|
aeec16fb5b315fb3d8472b38c6eeea20d20e731a
|
[
"MIT"
] | 2
|
2021-03-22T19:37:33.000Z
|
2022-01-04T08:48:20.000Z
|
lib/railway_ipc/storage.ex
|
learn-co/railway_ipc
|
aeec16fb5b315fb3d8472b38c6eeea20d20e731a
|
[
"MIT"
] | 10
|
2019-11-29T20:24:24.000Z
|
2021-02-26T22:06:13.000Z
|
lib/railway_ipc/storage.ex
|
learn-co/railway_ipc
|
aeec16fb5b315fb3d8472b38c6eeea20d20e731a
|
[
"MIT"
] | 1
|
2020-01-09T17:13:29.000Z
|
2020-01-09T17:13:29.000Z
|
defmodule RailwayIpc.Storage do
@moduledoc """
Behaviour specification for message persistence.
Any message persistence adapters must conform to this behaviour. Railway
ships with a Ecto adapter, but you may define your own. For example, you
may define your own no-op implementation if you do not wish to persist
messages. You can then override the Railway configuration to use your
custom adapter.
```
config :railway_ipc,
storage: MyCustomAdapter
```
Note that this behaviour is incomplete, it only handles outgoing (published)
messages. This is part of an ongoing refactoring to clean up the internal
API of Railway. Support for incoming (consumed) messages will be added at a
later date.
"""
defmodule OutgoingMessage do
@moduledoc """
Represents an outgoing message and its metadata.
"""
defstruct [:protobuf, :encoded, :exchange, :type]
end
@doc """
Inserts an outgoing message into the message store.
"""
@callback insert(message :: %__MODULE__.OutgoingMessage{}) ::
{:ok, term} | {:error, term}
end
| 28.710526
| 78
| 0.716774
|
93e7237b8fbe861e7bbc1b1be4d5f391f72ad24e
| 2,405
|
ex
|
Elixir
|
test/support/worker.ex
|
chrismo/oban
|
f912ccf75a1d89e02229041d578f9263d4de0232
|
[
"Apache-2.0"
] | null | null | null |
test/support/worker.ex
|
chrismo/oban
|
f912ccf75a1d89e02229041d578f9263d4de0232
|
[
"Apache-2.0"
] | 26
|
2021-07-24T21:32:21.000Z
|
2022-03-23T11:55:24.000Z
|
test/support/worker.ex
|
chrismo/oban
|
f912ccf75a1d89e02229041d578f9263d4de0232
|
[
"Apache-2.0"
] | null | null | null |
defmodule Oban.Integration.Worker do
@moduledoc false
use Oban.Worker, queue: :alpha
@impl Worker
def new(args, opts) do
opts = Keyword.merge(__opts__(), opts)
args
|> Map.new()
|> Map.put_new(:bin_pid, pid_to_bin())
|> Job.new(opts)
end
@impl Worker
# credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity
def perform(%_{args: %{"ref" => ref, "action" => action, "bin_pid" => bin_pid}}) do
pid = bin_to_pid(bin_pid)
case action do
"OK" ->
send(pid, {:ok, ref})
:ok
"DISCARD" ->
send(pid, {:discard, ref})
:discard
"ERROR" ->
send(pid, {:error, ref})
{:error, "ERROR"}
"EXIT" ->
send(pid, {:exit, ref})
GenServer.call(FakeServer, :exit)
"FAIL" ->
send(pid, {:fail, ref})
raise RuntimeError, "FAILED"
"KILL" ->
send(pid, {:kill, ref})
Process.exit(self(), :kill)
"SNOOZE" ->
send(pid, {:snooze, ref})
{:snooze, 60}
"TASK_ERROR" ->
send(pid, {:async, ref})
fn -> apply(FakeServer, :call, []) end
|> Task.async()
|> Task.await()
:ok
"TASK_EXIT" ->
send(pid, {:async, ref})
fn -> apply(Kernel, :exit, [{:timeout, :not_a_list}]) end
|> Task.async()
|> Task.await()
end
end
def perform(%_{args: %{"ref" => ref, "recur" => recur, "bin_pid" => bin_pid}} = job) do
bin_pid
|> bin_to_pid()
|> send({:ok, ref, recur})
if ref < recur do
new_job = new(%{"ref" => ref + 1, "recur" => recur, "bin_pid" => bin_pid})
Oban.insert!(job.conf.name, new_job)
end
:ok
end
def perform(%_{args: %{"ref" => ref, "sleep" => sleep, "bin_pid" => bin_pid}}) do
pid = bin_to_pid(bin_pid)
send(pid, {:started, ref})
:ok = Process.sleep(sleep)
send(pid, {:ok, ref})
:ok
end
@impl Worker
def backoff(%_{args: %{"backoff" => backoff}}), do: backoff
def backoff(%_{} = job), do: Worker.backoff(job)
@impl Worker
def timeout(%_{args: %{"timeout" => timeout}}) when timeout > 0, do: timeout
def timeout(_job), do: :infinity
def pid_to_bin(pid \\ self()) do
pid
|> :erlang.term_to_binary()
|> Base.encode64()
end
def bin_to_pid(bin) do
bin
|> Base.decode64!()
|> :erlang.binary_to_term()
end
end
| 19.876033
| 89
| 0.533056
|
93e74ba6149b95681b0db5dd4d2c1f4355cb9eb1
| 987
|
ex
|
Elixir
|
lib/sap/context.ex
|
slogsdon/sap
|
766f06cfac8a04772affd977a88d61210064e598
|
[
"MIT"
] | 7
|
2015-10-25T16:38:45.000Z
|
2020-01-12T19:06:57.000Z
|
lib/sap/context.ex
|
slogsdon/sap
|
766f06cfac8a04772affd977a88d61210064e598
|
[
"MIT"
] | null | null | null |
lib/sap/context.ex
|
slogsdon/sap
|
766f06cfac8a04772affd977a88d61210064e598
|
[
"MIT"
] | null | null | null |
defmodule Sap.Context do
@moduledoc """
A data type to hold both the connection data and the status of a decision
path.
"""
@type t :: %__MODULE__{
status: :ok | :error,
conn: Plug.Conn.t
}
defstruct status: :ok,
conn: nil
@doc """
Creates a `Sap.Context` from a `Plug.Conn`. The created context has a
status of `:ok`.
"""
@spec new(Plug.Conn.t) :: t
def new(conn) do
__MODULE__ |> struct(conn: conn)
end
@doc """
Updates a `Sap.Context` to set the status to `:error`. In a decision path,
this is used to halt the processing in the rest of the path.
"""
@spec error(t) :: t
def error(context) do
%{context | status: :error}
end
end
defimpl Control.Monad, for: Sap.Context do
alias Sap.Context, as: C
def return(context, nil), do: context
def return(%C{conn: nil}, conn) do
C.new(conn)
end
def bind(%C{status: :error} = c, _), do: c
def bind(context, fun) do
context.conn |> fun.()
end
end
| 21.456522
| 76
| 0.620061
|
93e7a6f369e65039ef59e0c0598fde8e23890fca
| 1,980
|
ex
|
Elixir
|
lib/indulgences/report.ex
|
TrsNium/Indulgences
|
9492e49508551eb30c030016f5422475184e90f7
|
[
"MIT"
] | 5
|
2019-09-09T08:51:25.000Z
|
2020-01-01T06:28:21.000Z
|
lib/indulgences/report.ex
|
TrsNium/Indulgences
|
9492e49508551eb30c030016f5422475184e90f7
|
[
"MIT"
] | 10
|
2020-05-19T20:28:34.000Z
|
2022-03-23T21:39:11.000Z
|
lib/indulgences/report.ex
|
TrsNium/Indulgences
|
9492e49508551eb30c030016f5422475184e90f7
|
[
"MIT"
] | null | null | null |
defmodule Indulgences.Report do
use Memento.Table,
attributes: [
:id,
:instruction_name,
:status,
:reason,
:start_time,
:end_time,
:execution_time
],
index: [:instruction_name, :status, :reason, :start_time, :end_time],
type: :ordered_set,
autoincrement: true
def write(instruction_name, status, reason, start_time, end_time, execution_time) do
Memento.transaction(fn ->
Memento.Query.write(%__MODULE__{
instruction_name: instruction_name,
status: status,
reason: reason,
start_time: start_time,
end_time: end_time,
execution_time: execution_time
})
end)
end
def get_instruction_row_count(instruction_name) do
rows =
Memento.transaction!(fn ->
Memento.Query.select(__MODULE__, {:==, :instruction_name, instruction_name})
end)
Enum.count(rows)
end
def count_rows(status) do
Enum.count(get_rows(status))
end
def min_response_time(status) do
min =
get_rows(status)
|> Enum.map(fn %__MODULE__{} = report -> report.execution_time end)
|> Enum.min()
trunc(min / 1000)
end
def max_response_time(status) do
max =
get_rows(status)
|> Enum.map(fn %__MODULE__{} = report -> report.execution_time end)
|> Enum.max()
trunc(max / 1000)
end
def mean_response_time(status) do
rows = get_rows(status)
total_execution_time =
rows
|> Enum.map(fn %__MODULE__{} = report -> report.execution_time end)
|> Enum.sum()
trunc(total_execution_time / Enum.count(rows) / 1000)
end
defp get_rows(status) do
rows =
Memento.transaction!(fn ->
Memento.Query.select(__MODULE__, {:==, :status, status})
end)
if rows == [] do
[%__MODULE__{execution_time: 0}]
else
rows
end
end
def clear() do
Memento.Table.delete!(__MODULE__)
Memento.Table.create!(__MODULE__)
end
end
| 22.247191
| 86
| 0.628283
|
93e7a9dbc6c2e0ca5c2d16feb3de2f6e291c1ade
| 675
|
exs
|
Elixir
|
test/people_classify_test.exs
|
sqeezy/people_classify
|
0d2b261c2df616ce9bc38e85e20f8e7a9276c924
|
[
"MIT"
] | null | null | null |
test/people_classify_test.exs
|
sqeezy/people_classify
|
0d2b261c2df616ce9bc38e85e20f8e7a9276c924
|
[
"MIT"
] | null | null | null |
test/people_classify_test.exs
|
sqeezy/people_classify
|
0d2b261c2df616ce9bc38e85e20f8e7a9276c924
|
[
"MIT"
] | null | null | null |
defmodule PeopleClassifyTest do
use ExUnit.Case
doctest PeopleClassify
test "the truth" do
assert 1 + 1 == 2
end
test "it knows josi is sweet" do
someone_sweet = PeopleClassify.trained
|> SimpleBayes.classify_one("sweet")
assert someone_sweet == :josi
end
test "it knows anton is funny" do
someone_funny = PeopleClassify.trained
|> SimpleBayes.classify_one("funny")
assert someone_funny == :anton
end
test "it knows heike is funny too" do
funny_people = PeopleClassify.trained
|> SimpleBayes.classify("funny")
assert (funny_people[:heike]) != nil
end
end
| 25
| 62
| 0.64
|
93e7c3d0d5708c4dbcdcecb1a1801155b951ad04
| 506
|
exs
|
Elixir
|
config/config.exs
|
z0w0/imagism
|
d337d6c76e3e5bd9d5cadd69dfc1d80e66688b91
|
[
"MIT"
] | null | null | null |
config/config.exs
|
z0w0/imagism
|
d337d6c76e3e5bd9d5cadd69dfc1d80e66688b91
|
[
"MIT"
] | null | null | null |
config/config.exs
|
z0w0/imagism
|
d337d6c76e3e5bd9d5cadd69dfc1d80e66688b91
|
[
"MIT"
] | null | null | null |
import Config
config :imagism,
port: System.get_env("PORT") || 8000,
adapter: System.get_env("IMAGISM_ADAPTER") || "file",
file_path: System.get_env("IMAGISM_FILE_PATH") || "test/images",
s3_bucket: System.get_env("IMAGISM_S3_BUCKET"),
s3_region: System.get_env("IMAGISM_S3_REGION")
config :imagism, Imagism.Cache,
gc_interval: 1800,
allocated_memory: 1_000_000_000,
backend: :shards
config :logger, :console,
metadata: [:resize, :fit, :w, :h, :crop, :brighten, :blur, :rotate, :flip]
| 29.764706
| 76
| 0.715415
|
93e7d59829ba96f112df723d15c4a15a0ed78061
| 942
|
ex
|
Elixir
|
apps/omg_watcher/lib/omg_watcher/exit_processor/standard_exit_challenge.ex
|
kendricktan/elixir-omg
|
834c103fd5c4b9e063c1d32b9b4e5728abb64009
|
[
"Apache-2.0"
] | null | null | null |
apps/omg_watcher/lib/omg_watcher/exit_processor/standard_exit_challenge.ex
|
kendricktan/elixir-omg
|
834c103fd5c4b9e063c1d32b9b4e5728abb64009
|
[
"Apache-2.0"
] | null | null | null |
apps/omg_watcher/lib/omg_watcher/exit_processor/standard_exit_challenge.ex
|
kendricktan/elixir-omg
|
834c103fd5c4b9e063c1d32b9b4e5728abb64009
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.ExitProcessor.StandardExitChallenge do
@moduledoc """
Represents a challenge to a standard exit
"""
defstruct [:exit_id, :txbytes, :input_index, :sig]
@type t() :: %__MODULE__{
exit_id: non_neg_integer(),
txbytes: String.t(),
input_index: non_neg_integer(),
sig: String.t()
}
end
| 32.482759
| 74
| 0.70913
|
93e7f0920fa1dd8b22dedc2350cc63baf1d21159
| 3,021
|
ex
|
Elixir
|
clients/content/lib/google_api/content/v2/model/orderinvoices_create_refund_invoice_request.ex
|
hauptbenutzer/elixir-google-api
|
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v2/model/orderinvoices_create_refund_invoice_request.ex
|
hauptbenutzer/elixir-google-api
|
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v2/model/orderinvoices_create_refund_invoice_request.ex
|
hauptbenutzer/elixir-google-api
|
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
|
[
"Apache-2.0"
] | 1
|
2020-11-10T16:58:27.000Z
|
2020-11-10T16:58:27.000Z
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Model.OrderinvoicesCreateRefundInvoiceRequest do
@moduledoc """
## Attributes
- invoiceId (String.t): The ID of the invoice. Defaults to: `null`.
- operationId (String.t): The ID of the operation, unique across all operations for a given order. Defaults to: `null`.
- refundOnlyOption (OrderinvoicesCustomBatchRequestEntryCreateRefundInvoiceRefundOption): Option to create a refund-only invoice. Exactly one of refundOnlyOption or returnOption must be provided. Defaults to: `null`.
- returnOption (OrderinvoicesCustomBatchRequestEntryCreateRefundInvoiceReturnOption): Option to create an invoice for a refund and mark all items within the invoice as returned. Exactly one of refundOnlyOption or returnOption must be provided. Defaults to: `null`.
- shipmentInvoices ([ShipmentInvoice]): Invoice details for different shipment groups. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:invoiceId => any(),
:operationId => any(),
:refundOnlyOption =>
GoogleApi.Content.V2.Model.OrderinvoicesCustomBatchRequestEntryCreateRefundInvoiceRefundOption.t(),
:returnOption =>
GoogleApi.Content.V2.Model.OrderinvoicesCustomBatchRequestEntryCreateRefundInvoiceReturnOption.t(),
:shipmentInvoices => list(GoogleApi.Content.V2.Model.ShipmentInvoice.t())
}
field(:invoiceId)
field(:operationId)
field(
:refundOnlyOption,
as:
GoogleApi.Content.V2.Model.OrderinvoicesCustomBatchRequestEntryCreateRefundInvoiceRefundOption
)
field(
:returnOption,
as:
GoogleApi.Content.V2.Model.OrderinvoicesCustomBatchRequestEntryCreateRefundInvoiceReturnOption
)
field(:shipmentInvoices, as: GoogleApi.Content.V2.Model.ShipmentInvoice, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.OrderinvoicesCreateRefundInvoiceRequest do
def decode(value, options) do
GoogleApi.Content.V2.Model.OrderinvoicesCreateRefundInvoiceRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.OrderinvoicesCreateRefundInvoiceRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.383562
| 266
| 0.764978
|
93e7fa23624e69f112ccca3fbe02b198cb06ded4
| 3,330
|
exs
|
Elixir
|
apps/neoscan_web/test/neoscan_web/schema/query/block_test.exs
|
vincentgeneste/neo-scan
|
4a654575331eeb3eb12d4fd61696a7bd6dbca3ce
|
[
"MIT"
] | 75
|
2017-07-23T02:45:32.000Z
|
2021-12-13T11:04:17.000Z
|
apps/neoscan_web/test/neoscan_web/schema/query/block_test.exs
|
vincentgeneste/neo-scan
|
4a654575331eeb3eb12d4fd61696a7bd6dbca3ce
|
[
"MIT"
] | 252
|
2017-07-13T19:36:00.000Z
|
2021-07-28T18:40:00.000Z
|
apps/neoscan_web/test/neoscan_web/schema/query/block_test.exs
|
vincentgeneste/neo-scan
|
4a654575331eeb3eb12d4fd61696a7bd6dbca3ce
|
[
"MIT"
] | 87
|
2017-07-23T02:45:34.000Z
|
2022-03-02T14:54:27.000Z
|
defmodule NeoscanWeb.Schema.Query.BlockTest do
use NeoscanWeb.ConnCase
import NeoscanWeb.Factory
@query """
query Blocks($paginator: Paginator){
blocks(paginator: $paginator){
blockRows{
cumulative_sys_fee
gasGenerated
hash
index
inserted_at
lag
merkle_root
next_consensus
nonce
script {
invocation
verification
}
},
pagination{
totalCount
pageSize
page
}
}
}
"""
test "all", %{conn: conn} do
insert(:block)
insert(:block)
insert(:block)
conn =
post(
conn,
"/graphql",
%{
query: @query,
variables: %{
paginator: %{
page_size: 2,
page: 1
}
}
}
)
body = json_response(conn, 200)
%{
"data" => %{
"blocks" => %{
"blockRows" => rows,
"pagination" => %{
"page" => 1,
"pageSize" => 2,
"totalCount" => 3
}
}
}
} = body
assert length(rows) == 2
end
@query """
query Block($params: Params!){
block(params: $params){
cumulative_sys_fee
gasGenerated
hash
index
inserted_at
lag
merkle_root
next_consensus
nonce
script {
invocation
verification
}
}
}
"""
test "get one block by index", %{conn: conn} do
block = insert(:block)
index = block.index
conn =
post(
conn,
"/graphql",
%{
query: @query,
variables: %{
params: %{
index: index
}
}
}
)
body = json_response(conn, 200)
assert %{
"data" => %{
"block" => %{
"cumulative_sys_fee" => _,
"gasGenerated" => _,
"hash" => _,
"index" => ^index,
"inserted_at" => _,
"lag" => _,
"merkle_root" => _,
"next_consensus" => _,
"nonce" => _,
"script" => %{
"invocation" => _,
"verification" => _
}
}
}
} = body
end
test "get one block by hash", %{conn: conn} do
block = insert(:block)
hash =
block.hash
|> Base.encode16(case: :lower)
conn =
post(
conn,
"/graphql",
%{
query: @query,
variables: %{
params: %{
hash: hash
}
}
}
)
body = json_response(conn, 200)
assert %{
"data" => %{
"block" => %{
"cumulative_sys_fee" => _,
"gasGenerated" => _,
"hash" => ^hash,
"index" => _,
"inserted_at" => _,
"lag" => _,
"merkle_root" => _,
"next_consensus" => _,
"nonce" => _,
"script" => %{
"invocation" => _,
"verification" => _
}
}
}
} = body
end
end
| 19.028571
| 49
| 0.37958
|
93e81687926bc09371a0111be6f279b8421904ad
| 22,017
|
ex
|
Elixir
|
lib/horde/dynamic_supervisor_impl.ex
|
TSSands/horde
|
b74b3a2513c19fd86b2537cc052aa340fc683cdb
|
[
"MIT"
] | null | null | null |
lib/horde/dynamic_supervisor_impl.ex
|
TSSands/horde
|
b74b3a2513c19fd86b2537cc052aa340fc683cdb
|
[
"MIT"
] | null | null | null |
lib/horde/dynamic_supervisor_impl.ex
|
TSSands/horde
|
b74b3a2513c19fd86b2537cc052aa340fc683cdb
|
[
"MIT"
] | null | null | null |
defmodule Horde.DynamicSupervisor.Member do
@moduledoc false
@type t :: %Horde.DynamicSupervisor.Member{}
@type status :: :uninitialized | :alive | :shutting_down | :dead
defstruct [:status, :name]
end
defmodule Horde.DynamicSupervisorImpl do
@moduledoc false
require Logger
use GenServer
defstruct name: nil,
members: %{},
members_info: %{},
processes_by_id: %{},
process_pid_to_id: %{},
local_process_count: 0,
waiting_for_quorum: [],
supervisor_ref_to_name: %{},
name_to_supervisor_ref: %{},
shutting_down: false,
supervisor_options: [],
distribution_strategy: Horde.UniformDistribution
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, Keyword.take(opts, [:name]))
end
## GenServer callbacks
defp crdt_name(name), do: :"#{name}.Crdt"
defp supervisor_name(name), do: :"#{name}.ProcessesSupervisor"
defp fully_qualified_name({name, node}) when is_atom(name) and is_atom(node), do: {name, node}
defp fully_qualified_name(name) when is_atom(name), do: {name, node()}
@doc false
def init(options) do
name = Keyword.get(options, :name)
Logger.info("Starting #{inspect(__MODULE__)} with name #{inspect(name)}")
Process.flag(:trap_exit, true)
state =
%__MODULE__{
supervisor_options: options,
name: name
}
|> Map.merge(Map.new(Keyword.take(options, [:distribution_strategy])))
state = set_own_node_status(state)
{:ok, state, {:continue, {:set_members, Keyword.get(options, :members)}}}
end
def handle_continue({:set_members, nil}, state), do: {:noreply, state}
def handle_continue({:set_members, :auto}, state) do
state =
state.name
|> Horde.NodeListener.make_members()
|> set_members(state)
{:noreply, state}
end
def handle_continue({:set_members, members}, state) do
{:noreply, set_members(members, state)}
end
def on_diffs(name, diffs) do
try do
send(name, {:crdt_update, diffs})
rescue
ArgumentError ->
# the process might already been stopped
:ok
end
end
defp node_info(state) do
%Horde.DynamicSupervisor.Member{
status: node_status(state),
name: fully_qualified_name(state.name)
}
end
defp node_status(%{shutting_down: false}), do: :alive
defp node_status(%{shutting_down: true}), do: :shutting_down
@doc false
def handle_call(:horde_shutting_down, _f, state) do
state =
%{state | shutting_down: true}
|> set_own_node_status()
{:reply, :ok, state}
end
def handle_call(:get_telemetry, _from, state) do
telemetry = %{
global_supervised_process_count: map_size(state.processes_by_id),
local_supervised_process_count: state.local_process_count
}
{:reply, telemetry, state}
end
def handle_call(:wait_for_quorum, from, state) do
if state.distribution_strategy.has_quorum?(Map.values(members(state))) do
{:reply, :ok, state}
else
{:noreply, %{state | waiting_for_quorum: [from | state.waiting_for_quorum]}}
end
end
def handle_call({:set_members, members}, _from, state) do
{:reply, :ok, set_members(members, state)}
end
def handle_call(:members, _from, state) do
{:reply, Map.keys(state.members), state}
end
def handle_call({:terminate_child, child_pid} = msg, from, state) do
this_name = fully_qualified_name(state.name)
with child_id when not is_nil(child_id) <- Map.get(state.process_pid_to_id, child_pid),
{^this_name, child, _child_pid} <- Map.get(state.processes_by_id, child_id),
{reply, new_state} <- terminate_child(child, state) do
{:reply, reply, new_state}
else
{other_node, _child_spec, _child_pid} ->
proxy_to_node(other_node, msg, from, state)
nil ->
{:reply, {:error, :not_found}, state}
end
end
def handle_call({:start_child, _child_spec}, _from, %{shutting_down: true} = state),
do: {:reply, {:error, {:shutting_down, "this node is shutting down."}}, state}
@big_number round(:math.pow(2, 128))
def handle_call({:start_child, child_spec} = msg, from, state) do
this_name = fully_qualified_name(state.name)
child_spec = randomize_child_id(child_spec)
case choose_node(child_spec, state) do
{:ok, %{name: ^this_name}} ->
{reply, new_state} = add_child(child_spec, state)
{:reply, reply, new_state}
{:ok, %{name: other_node_name}} ->
proxy_to_node(other_node_name, msg, from, state)
{:error, reason} ->
{:reply, {:error, reason}, state}
end
end
def handle_call(:which_children, _from, state) do
which_children =
Enum.flat_map(members(state), fn
{_, %{name: {name, node}}} ->
[{supervisor_name(name), node}]
end)
|> Enum.flat_map(fn supervisor_name ->
try do
Horde.ProcessesSupervisor.which_children(supervisor_name)
catch
:exit, _ -> []
end
end)
|> Enum.map(fn {_id, pid, type, module} -> {:undefined, pid, type, module} end)
{:reply, which_children, state}
end
def handle_call(:count_children, _from, state) do
count =
Enum.flat_map(members(state), fn
{_, %{name: {name, node}}} ->
[{supervisor_name(name), node}]
end)
|> Enum.flat_map(fn supervisor_name ->
try do
Horde.ProcessesSupervisor.count_children(supervisor_name)
catch
:exit, _ -> [nil]
end
end)
|> Enum.reject(fn
nil -> true
_ -> false
end)
|> Enum.reduce(%{}, fn {process_type, count}, acc ->
Map.update(acc, process_type, count, &(&1 + count))
end)
{:reply, count, state}
end
def handle_call({:update_child_pid, child_id, new_pid}, _from, state) do
{:reply, :ok, set_child_pid(state, child_id, new_pid)}
end
defp set_child_pid(state, child_id, new_child_pid) do
case Map.get(state.processes_by_id, child_id) do
{name, child_spec, old_pid} ->
:ok =
DeltaCrdt.mutate(
crdt_name(state.name),
:add,
[
{:process, child_spec.id},
{fully_qualified_name(state.name), child_spec, new_child_pid}
],
:infinity
)
new_processes_by_id =
Map.put(state.processes_by_id, child_id, {name, child_spec, new_child_pid})
new_process_pid_to_id =
Map.put(state.process_pid_to_id, new_child_pid, child_id) |> Map.delete(old_pid)
%{
state
| processes_by_id: new_processes_by_id,
process_pid_to_id: new_process_pid_to_id
}
nil ->
state
end
end
def handle_cast({:relinquish_child_process, child_id}, state) do
# signal to the rest of the nodes that this process has been relinquished
# (to the Horde!) by its parent
{_, child, _} = Map.get(state.processes_by_id, child_id)
:ok =
DeltaCrdt.mutate(
crdt_name(state.name),
:add,
[{:process, child.id}, {nil, child}]
)
{:noreply, state}
end
# TODO think of a better name than "disown_child_process"
def handle_cast({:disown_child_process, child_id}, state) do
{{_, _, child_pid}, new_processes_by_id} = Map.pop(state.processes_by_id, child_id)
new_state = %{
state
| processes_by_id: new_processes_by_id,
process_pid_to_id: Map.delete(state.process_pid_to_id, child_pid),
local_process_count: state.local_process_count - 1
}
:ok = DeltaCrdt.mutate(crdt_name(state.name), :remove, [{:process, child_id}], :infinity)
{:noreply, new_state}
end
defp randomize_child_id(child) do
Map.put(child, :id, :rand.uniform(@big_number))
end
defp proxy_to_node(node_name, message, reply_to, state) do
case Map.get(members(state), node_name) do
%{status: :alive} ->
send(node_name, {:proxy_operation, message, reply_to})
{:noreply, state}
_ ->
{:reply,
{:error,
{:node_dead_or_shutting_down,
"the node responsible for this process is shutting down or dead, try again soon"}},
state}
end
end
defp set_own_node_status(state, force \\ false)
defp set_own_node_status(state, false) do
if Map.get(state.members_info, fully_qualified_name(state.name)) == node_info(state) do
state
else
set_own_node_status(state, true)
end
end
defp set_own_node_status(state, true) do
DeltaCrdt.mutate(
crdt_name(state.name),
:add,
[{:member_node_info, fully_qualified_name(state.name)}, node_info(state)],
:infinity
)
new_members_info =
Map.put(state.members_info, fully_qualified_name(state.name), node_info(state))
Map.put(state, :members_info, new_members_info)
end
defp mark_dead(state, name) do
DeltaCrdt.mutate(
crdt_name(state.name),
:add,
[{:member_node_info, name}, %Horde.DynamicSupervisor.Member{name: name, status: :dead}],
:infinity
)
state
end
def handle_info({:set_members, members}, state) do
{:noreply, set_members(members, state)}
end
def handle_info({:proxy_operation, msg, reply_to}, state) do
case handle_call(msg, reply_to, state) do
{:reply, reply, new_state} ->
GenServer.reply(reply_to, reply)
{:noreply, new_state}
{:noreply, new_state} ->
{:noreply, new_state}
end
end
def handle_info({:DOWN, ref, _type, _pid, _reason}, state) do
case Map.get(state.supervisor_ref_to_name, ref) do
nil ->
{:noreply, state}
name ->
new_state =
mark_dead(state, name)
|> set_own_node_status()
|> Map.put(:supervisor_ref_to_name, Map.delete(state.supervisor_ref_to_name, ref))
|> Map.put(:name_to_supervisor_ref, Map.delete(state.name_to_supervisor_ref, name))
{:noreply, new_state}
end
end
@doc false
def handle_info({:processes_updated, reply_to}, %{shutting_down: true} = state) do
GenServer.reply(reply_to, :ok)
{:noreply, state}
end
def handle_info({:crdt_update, diffs}, state) do
new_state =
update_members(state, diffs)
|> update_processes(diffs)
new_state =
if has_membership_changed?(diffs) do
monitor_supervisors(new_state)
|> set_own_node_status()
|> handle_quorum_change()
|> set_crdt_neighbours()
|> handoff_processes()
else
new_state
end
{:noreply, new_state}
end
def has_membership_changed?([{:add, {:member_node_info, _}, _} = _diff | _diffs]), do: true
def has_membership_changed?([{:remove, {:member_node_info, _}} = _diff | _diffs]), do: true
def has_membership_changed?([{:add, {:member, _}, _} = _diff | _diffs]), do: true
def has_membership_changed?([{:remove, {:member, _}} = _diff | _diffs]), do: true
def has_membership_changed?([_diff | diffs]) do
has_membership_changed?(diffs)
end
def has_membership_changed?([]), do: false
defp handoff_processes(state) do
this_node = fully_qualified_name(state.name)
Map.values(state.processes_by_id)
|> Enum.reduce(state, fn {current_node, child_spec, _child_pid}, state ->
case choose_node(child_spec, state) do
{:ok, %{name: chosen_node}} ->
current_member = Map.get(state.members_info, current_node)
case {current_node, chosen_node} do
{same_node, same_node} ->
# process is running on the node on which it belongs
state
{^this_node, _other_node} ->
# process is running here but belongs somewhere else
case state.supervisor_options[:process_redistribution] do
:active ->
handoff_child(child_spec, state)
:passive ->
state
end
{_current_node, ^this_node} ->
# process is running on another node but belongs here
case current_member do
%{status: :dead} ->
{_response, state} = add_child(randomize_child_id(child_spec), state)
state
_ ->
state
end
{_other_node1, _other_node2} ->
# process is neither running here nor belongs here
state
end
{:error, _reason} ->
state
end
end)
end
defp update_processes(state, [diff | diffs]) do
update_process(state, diff)
|> update_processes(diffs)
end
defp update_processes(state, []), do: state
defp update_process(state, {:add, {:process, _child_id}, {nil, child_spec}}) do
this_name = fully_qualified_name(state.name)
case choose_node(child_spec, state) do
{:ok, %{name: ^this_name}} ->
{_resp, new_state} = add_child(child_spec, state)
new_state
{:ok, _} ->
# matches another node, do nothing
state
{:error, _reason} ->
# error (could be quorum), do nothing
state
end
end
defp update_process(state, {:add, {:process, child_id}, {node, child_spec, child_pid}}) do
new_process_pid_to_id =
case Map.get(state.processes_by_id, child_id) do
{_, _, old_pid} -> Map.delete(state.process_pid_to_id, old_pid)
nil -> state.process_pid_to_id
end
|> Map.put(child_pid, child_id)
new_processes_by_id = Map.put(state.processes_by_id, child_id, {node, child_spec, child_pid})
Map.put(state, :processes_by_id, new_processes_by_id)
|> Map.put(:process_pid_to_id, new_process_pid_to_id)
end
defp update_process(state, {:remove, {:process, child_id}}) do
{value, new_processes_by_id} = Map.pop(state.processes_by_id, child_id)
new_process_pid_to_id =
case value do
{_node_name, _child_spec, child_pid} ->
Map.delete(state.process_pid_to_id, child_pid)
nil ->
state.process_pid_to_id
end
Map.put(state, :processes_by_id, new_processes_by_id)
|> Map.put(:process_pid_to_id, new_process_pid_to_id)
end
defp update_process(state, _), do: state
defp update_members(state, [diff | diffs]) do
update_member(state, diff)
|> update_members(diffs)
end
defp update_members(state, []), do: state
defp update_member(state, {:add, {:member, member}, 1}) do
new_members = Map.put_new(state.members, member, 1)
new_members_info = Map.put_new(state.members_info, member, uninitialized_member(member))
Map.put(state, :members, new_members)
|> Map.put(:members_info, new_members_info)
end
defp update_member(state, {:remove, {:member, member}}) do
new_members = Map.delete(state.members, member)
Map.put(state, :members, new_members)
end
defp update_member(state, {:add, {:member_node_info, member}, node_info}) do
new_members = Map.put(state.members_info, member, node_info)
Map.put(state, :members_info, new_members)
end
defp update_member(state, {:remove, {:member_node_info, member}}) do
new_members = Map.delete(state.members_info, member)
Map.put(state, :members_info, new_members)
end
defp update_member(state, _), do: state
defp uninitialized_member(member) do
%Horde.DynamicSupervisor.Member{status: :uninitialized, name: member}
end
defp member_names(names) do
Enum.map(names, fn
{name, node} -> {name, node}
name when is_atom(name) -> {name, node()}
end)
end
defp set_members(members, state) do
members = Enum.map(members, &fully_qualified_name/1)
uninitialized_new_members_info =
member_names(members)
|> Map.new(fn name ->
{name, %Horde.DynamicSupervisor.Member{name: name, status: :uninitialized}}
end)
new_members_info =
Map.merge(
uninitialized_new_members_info,
Map.take(state.members_info, Map.keys(uninitialized_new_members_info))
)
new_members = Map.new(new_members_info, fn {member, _} -> {member, 1} end)
new_member_names = Map.keys(new_members_info) |> MapSet.new()
existing_member_names = Map.keys(state.members) |> MapSet.new()
Enum.each(MapSet.difference(existing_member_names, new_member_names), fn removed_member ->
DeltaCrdt.mutate(crdt_name(state.name), :remove, [{:member, removed_member}], :infinity)
DeltaCrdt.mutate(
crdt_name(state.name),
:remove,
[{:member_node_info, removed_member}],
:infinity
)
end)
Enum.each(MapSet.difference(new_member_names, existing_member_names), fn added_member ->
DeltaCrdt.mutate(crdt_name(state.name), :add, [{:member, added_member}, 1], :infinity)
end)
%{state | members: new_members, members_info: new_members_info}
|> monitor_supervisors()
|> handle_quorum_change()
|> set_crdt_neighbours()
end
defp handle_quorum_change(state) do
if state.distribution_strategy.has_quorum?(Map.values(members(state))) do
Enum.each(state.waiting_for_quorum, fn from -> GenServer.reply(from, :ok) end)
%{state | waiting_for_quorum: []}
else
shut_down_all_processes(state)
end
end
defp shut_down_all_processes(state) do
case Enum.any?(state.processes_by_id, processes_for_node(fully_qualified_name(state.name))) do
false ->
state
true ->
:ok = Horde.ProcessesSupervisor.stop(supervisor_name(state.name))
state
end
end
defp set_crdt_neighbours(state) do
names = Map.keys(state.members) -- [fully_qualified_name(state.name)]
crdt_names = Enum.map(names, fn {name, node} -> {crdt_name(name), node} end)
send(crdt_name(state.name), {:set_neighbours, crdt_names})
state
end
defp processes_for_node(node_name) do
fn
{_id, {^node_name, _child_spec, _child_pid}} -> true
_ -> false
end
end
defp monitor_supervisors(state) do
new_supervisor_refs =
Enum.flat_map(members(state), fn
{name, %{status: :alive}} ->
[name]
_ ->
[]
end)
|> Enum.reject(fn name ->
Map.has_key?(state.name_to_supervisor_ref, name)
end)
|> Map.new(fn name ->
{name, Process.monitor(name)}
end)
new_supervisor_ref_to_name =
Map.merge(
state.supervisor_ref_to_name,
Map.new(new_supervisor_refs, fn {k, v} -> {v, k} end)
)
new_name_to_supervisor_ref = Map.merge(state.name_to_supervisor_ref, new_supervisor_refs)
Map.put(state, :supervisor_ref_to_name, new_supervisor_ref_to_name)
|> Map.put(:name_to_supervisor_ref, new_name_to_supervisor_ref)
end
defp update_state_with_child(child, child_pid, state) do
:ok =
DeltaCrdt.mutate(
crdt_name(state.name),
:add,
[{:process, child.id}, {fully_qualified_name(state.name), child, child_pid}],
:infinity
)
new_processes_by_id =
Map.put(
state.processes_by_id,
child.id,
{fully_qualified_name(state.name), child, child_pid}
)
new_process_pid_to_id = Map.put(state.process_pid_to_id, child_pid, child.id)
new_local_process_count = state.local_process_count + 1
Map.put(state, :processes_by_id, new_processes_by_id)
|> Map.put(:process_pid_to_id, new_process_pid_to_id)
|> Map.put(:local_process_count, new_local_process_count)
end
defp handoff_child(child, state) do
{_, _, child_pid} = Map.get(state.processes_by_id, child.id)
# we send a special exit signal to the process here.
# when the process has exited, Horde.ProcessSupervisor
# will cast `{:relinquish_child_process, child_id}`
# to this process for cleanup.
Horde.ProcessesSupervisor.send_exit_signal(
supervisor_name(state.name),
child_pid,
{:shutdown, :process_redistribution}
)
new_state = Map.put(state, :local_process_count, state.local_process_count - 1)
new_state
end
defp terminate_child(child, state) do
child_id = child.id
reply =
Horde.ProcessesSupervisor.terminate_child_by_id(
supervisor_name(state.name),
child_id
)
new_state =
Map.put(state, :processes_by_id, Map.delete(state.processes_by_id, child_id))
|> Map.put(:local_process_count, state.local_process_count - 1)
:ok = DeltaCrdt.mutate(crdt_name(state.name), :remove, [{:process, child_id}], :infinity)
{reply, new_state}
end
defp add_child(child, state) do
{[response], new_state} = add_children([child], state)
{response, new_state}
end
defp add_children(children, state) do
Enum.map(children, fn child_spec ->
case Horde.ProcessesSupervisor.start_child(supervisor_name(state.name), child_spec) do
{:ok, child_pid} ->
{{:ok, child_pid}, child_spec}
{:ok, child_pid, term} ->
{{:ok, child_pid, term}, child_spec}
{:error, error} ->
{:error, error}
:ignore ->
:ignore
end
end)
|> Enum.reduce({[], state}, fn
{{:ok, child_pid} = resp, child_spec}, {responses, state} ->
{[resp | responses], update_state_with_child(child_spec, child_pid, state)}
{{:ok, child_pid, _term} = resp, child_spec}, {responses, state} ->
{[resp | responses], update_state_with_child(child_spec, child_pid, state)}
{:error, error}, {responses, state} ->
{[{:error, error} | responses], state}
:ignore, {responses, state} ->
{[:ignore | responses], state}
end)
end
defp choose_node(child_spec, state) do
distribution_id = :erlang.phash2(Map.drop(child_spec, [:id]))
state.distribution_strategy.choose_node(
distribution_id,
Map.values(members(state))
)
end
defp members(state) do
Map.take(state.members_info, Map.keys(state.members))
end
end
| 28.780392
| 98
| 0.641913
|
93e8272d8cefe6eb765bfadcb5fa9c1dcf96b87c
| 678
|
exs
|
Elixir
|
test/wallaby/phantom/log_store_test.exs
|
schnittchen/wallaby
|
30be89cc78087e53e5b47a86043c2bbe8566bbf4
|
[
"MIT"
] | null | null | null |
test/wallaby/phantom/log_store_test.exs
|
schnittchen/wallaby
|
30be89cc78087e53e5b47a86043c2bbe8566bbf4
|
[
"MIT"
] | null | null | null |
test/wallaby/phantom/log_store_test.exs
|
schnittchen/wallaby
|
30be89cc78087e53e5b47a86043c2bbe8566bbf4
|
[
"MIT"
] | null | null | null |
defmodule Wallaby.Phantom.LogStoreTest do
use ExUnit.Case, async: true
alias Wallaby.Phantom.LogStore
@l1 %{"level" => "INFO", "message" => "l1 (:)", "timestamp" => 1470795015152}
@l2 %{"level" => "INFO", "message" => "l2 (:)", "timestamp" => 1470795015290}
@l3 %{"level" => "WARNING", "message" => "l3 (:)", "timestamp" => 1470795015345}
@session "123abc"
describe "append_logs/2" do
test "only appends new logs" do
LogStore.start_link
assert LogStore.append_logs(@session, [@l1, @l2]) == [@l1, @l2]
assert LogStore.append_logs(@session, [@l2, @l3]) == [@l3]
assert LogStore.get_logs(@session) == [@l1, @l2, @l3]
end
end
end
| 32.285714
| 82
| 0.610619
|
93e843747c377745c8e5e6e9ea651fc6fb7a1fb8
| 245
|
ex
|
Elixir
|
web/controllers/movie_api_controller.ex
|
WillsonSmith/similarfilms_phoenix
|
2466a3796ace5e0a09d345da3c76d90cf7747c13
|
[
"MIT"
] | null | null | null |
web/controllers/movie_api_controller.ex
|
WillsonSmith/similarfilms_phoenix
|
2466a3796ace5e0a09d345da3c76d90cf7747c13
|
[
"MIT"
] | null | null | null |
web/controllers/movie_api_controller.ex
|
WillsonSmith/similarfilms_phoenix
|
2466a3796ace5e0a09d345da3c76d90cf7747c13
|
[
"MIT"
] | null | null | null |
defmodule SimilarfilmsPhoenix.MovieApiController do
use SimilarfilmsPhoenix.Web, :controller
alias SimilarfilmsPhoenix.Movie
def index(conn, _params) do
movies = Repo.all(Movie)
render conn, "index.json", movies: movies
end
end
| 24.5
| 51
| 0.767347
|
93e8492481c2f1a8e09c5f563e451127888a514c
| 663
|
ex
|
Elixir
|
lib/rclex/job_queue.ex
|
rclex/rclex
|
978095d43c7bd18f20ab9e2ad2646a8201085db5
|
[
"Apache-2.0"
] | 42
|
2021-05-05T09:11:33.000Z
|
2022-02-12T11:57:12.000Z
|
lib/rclex/job_queue.ex
|
rclex/rclex
|
978095d43c7bd18f20ab9e2ad2646a8201085db5
|
[
"Apache-2.0"
] | 49
|
2021-05-05T08:31:38.000Z
|
2022-03-22T10:39:57.000Z
|
lib/rclex/job_queue.ex
|
rclex/rclex
|
978095d43c7bd18f20ab9e2ad2646a8201085db5
|
[
"Apache-2.0"
] | 1
|
2021-05-31T09:28:37.000Z
|
2021-05-31T09:28:37.000Z
|
defmodule Rclex.JobQueue do
require Rclex.Macros
require Logger
use GenServer, restart: :transient
@moduledoc """
T.B.A
"""
def start_link(_) do
GenServer.start_link(__MODULE__, {}, name: JobQueue)
end
def init(_) do
{:ok, :queue.new()}
end
def handle_cast({:push, job}, job_queue) do
new_job_queue = :queue.in(job, job_queue)
{:noreply, new_job_queue}
end
def handle_call(:pop, _from, job_queue) do
if :queue.len(job_queue) > 0 do
{{:value, job}, new_job_queue} = :queue.out(job_queue)
{:reply, {:exist_job, job}, new_job_queue}
else
{:reply, {:no_job, {}}, job_queue}
end
end
end
| 20.71875
| 60
| 0.634992
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.