hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
list | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
list | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
list | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
79fcb1cb25f7b0aa1fcb8dd5997f566ecd6ab528
| 754
|
ex
|
Elixir
|
apps/service_persist/lib/persist/transformations.ex
|
jdenen/hindsight
|
ef69b4c1a74c94729dd838a9a0849a48c9b6e04c
|
[
"Apache-2.0"
] | 12
|
2020-01-27T19:43:02.000Z
|
2021-07-28T19:46:29.000Z
|
apps/service_persist/lib/persist/transformations.ex
|
jdenen/hindsight
|
ef69b4c1a74c94729dd838a9a0849a48c9b6e04c
|
[
"Apache-2.0"
] | 81
|
2020-01-28T18:07:23.000Z
|
2021-11-22T02:12:13.000Z
|
apps/service_persist/lib/persist/transformations.ex
|
jdenen/hindsight
|
ef69b4c1a74c94729dd838a9a0849a48c9b6e04c
|
[
"Apache-2.0"
] | 10
|
2020-02-13T21:24:09.000Z
|
2020-05-21T18:39:35.000Z
|
defmodule Persist.Transformations do
@moduledoc """
State management functions for transformation events.
"""
@instance Persist.Application.instance()
@collection "transformations"
import Definition, only: [identifier: 1, identifier: 2]
@spec persist(Transform.t()) :: :ok
def persist(transform) do
Brook.ViewState.merge(@collection, identifier(transform), %{"transform" => transform})
end
@spec get(dataset_id :: String.t(), subset_id :: String.t()) ::
{:ok, Transform.t()} | {:ok, nil} | {:error, term}
def get(dataset_id, subset_id) do
key = identifier(dataset_id, subset_id)
with {:ok, %{"transform" => transform}} <- Brook.get(@instance, @collection, key) do
Ok.ok(transform)
end
end
end
| 29
| 90
| 0.669761
|
79fcb9240ee62c684091e7c1fe623a901ebc7b3c
| 399
|
ex
|
Elixir
|
lib/hologram/template/token_html_encoder.ex
|
gregjohnsonsaltaire/hologram
|
aa8e9ea0d599def864c263cc37cc8ee31f02ac4a
|
[
"MIT"
] | 40
|
2022-01-19T20:27:36.000Z
|
2022-03-31T18:17:41.000Z
|
lib/hologram/template/token_html_encoder.ex
|
gregjohnsonsaltaire/hologram
|
aa8e9ea0d599def864c263cc37cc8ee31f02ac4a
|
[
"MIT"
] | 42
|
2022-02-03T22:52:43.000Z
|
2022-03-26T20:57:32.000Z
|
lib/hologram/template/token_html_encoder.ex
|
gregjohnsonsaltaire/hologram
|
aa8e9ea0d599def864c263cc37cc8ee31f02ac4a
|
[
"MIT"
] | 3
|
2022-02-10T04:00:37.000Z
|
2022-03-08T22:07:45.000Z
|
# Covered in Hologram.Template.Parser integration tests
defmodule Hologram.Template.TokenHTMLEncoder do
def encode(arg)
def encode(tokens) when is_list(tokens) do
Enum.map(tokens, &encode/1)
|> Enum.join("")
end
def encode({:symbol, symbol}), do: to_string(symbol)
def encode({:string, str}), do: str
def encode({:whitespace, char}), do: char
def encode(nil), do: ""
end
| 21
| 55
| 0.686717
|
79fcdbb67a983a8526fa9b8cde6231e23476c67c
| 393
|
ex
|
Elixir
|
lib/ambrosia_web/pow/mailer.ex
|
emeric-martineau/ambrosia
|
74c55d35cf66537d7c8a33ef6057e89d44abd347
|
[
"MIT"
] | 2
|
2020-05-25T05:28:31.000Z
|
2020-05-25T08:10:43.000Z
|
lib/ambrosia_web/pow/mailer.ex
|
emeric-martineau/ambrosia
|
74c55d35cf66537d7c8a33ef6057e89d44abd347
|
[
"MIT"
] | 9
|
2020-05-25T16:39:15.000Z
|
2020-11-11T16:51:37.000Z
|
lib/ambrosia_web/pow/mailer.ex
|
emeric-martineau/ambrosia
|
74c55d35cf66537d7c8a33ef6057e89d44abd347
|
[
"MIT"
] | null | null | null |
defmodule AmbrosiaWeb.Pow.Mailer do
use Pow.Phoenix.Mailer
require Logger
def cast(%{user: user, subject: subject, text: text, html: html, assigns: _assigns}) do
# Build email struct to be used in `process/1`
%{to: user.email, subject: subject, text: text, html: html}
end
def process(email) do
# Send email
Logger.debug("E-mail sent: #{inspect email}")
end
end
| 24.5625
| 89
| 0.679389
|
79fd0ac19fc43fed6775d4b79dbecfd20db715a2
| 2,152
|
ex
|
Elixir
|
clients/compute/lib/google_api/compute/v1/model/backend_service_list_warning.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/compute/lib/google_api/compute/v1/model/backend_service_list_warning.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/compute/lib/google_api/compute/v1/model/backend_service_list_warning.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.BackendServiceListWarning do
@moduledoc """
[Output Only] Informational warning message.
## Attributes
* `code` (*type:* `String.t`, *default:* `nil`) - [Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.
* `data` (*type:* `list(GoogleApi.Compute.V1.Model.BackendServiceListWarningData.t)`, *default:* `nil`) - [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key": "scope", "value": "zones/us-east1-d" }
* `message` (*type:* `String.t`, *default:* `nil`) - [Output Only] A human-readable description of the warning code.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:code => String.t() | nil,
:data => list(GoogleApi.Compute.V1.Model.BackendServiceListWarningData.t()) | nil,
:message => String.t() | nil
}
field(:code)
field(:data, as: GoogleApi.Compute.V1.Model.BackendServiceListWarningData, type: :list)
field(:message)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.BackendServiceListWarning do
def decode(value, options) do
GoogleApi.Compute.V1.Model.BackendServiceListWarning.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.BackendServiceListWarning do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.603774
| 245
| 0.723513
|
79fd77adac94c80c3c076bea9f4f076619a9dc8f
| 2,882
|
exs
|
Elixir
|
test/graphql/resolvers/document_test.exs
|
leonardocouy/accent
|
29fb324395ff998cc5cdc6947c60070ffabe647c
|
[
"BSD-3-Clause"
] | null | null | null |
test/graphql/resolvers/document_test.exs
|
leonardocouy/accent
|
29fb324395ff998cc5cdc6947c60070ffabe647c
|
[
"BSD-3-Clause"
] | null | null | null |
test/graphql/resolvers/document_test.exs
|
leonardocouy/accent
|
29fb324395ff998cc5cdc6947c60070ffabe647c
|
[
"BSD-3-Clause"
] | null | null | null |
defmodule AccentTest.GraphQL.Resolvers.Document do
use Accent.RepoCase
alias Accent.GraphQL.Resolvers.Document, as: Resolver
alias Accent.{
Repo,
Project,
Document,
Translation,
Revision,
User,
Language
}
defmodule PlugConn do
defstruct [:assigns]
end
@user %User{email: "test@test.com"}
setup do
user = Repo.insert!(@user)
french_language = %Language{name: "french"} |> Repo.insert!()
project = %Project{name: "My project"} |> Repo.insert!()
revision = %Revision{language_id: french_language.id, project_id: project.id, master: true} |> Repo.insert!()
document = %Document{project_id: project.id, path: "test", format: "json"} |> Repo.insert!()
{:ok, [user: user, project: project, document: document, revision: revision]}
end
test "delete", %{document: document, revision: revision, user: user} do
%Translation{revision_id: revision.id, document_id: document.id, key: "ok", corrected_text: "bar", proposed_text: "bar"} |> Repo.insert!()
context = %{context: %{conn: %PlugConn{assigns: %{current_user: user}}}}
{:ok, result} = Resolver.delete(document, %{}, context)
assert get_in(result, [:errors]) == nil
end
test "show project", %{document: document, project: project, revision: revision} do
%Translation{revision_id: revision.id, document_id: document.id, key: "ok", corrected_text: "bar", proposed_text: "bar", conflicted: false} |> Repo.insert!()
{:ok, result} = Resolver.show_project(project, %{id: document.id}, %{})
assert get_in(result, [Access.key(:id)]) == document.id
assert get_in(result, [Access.key(:translations_count)]) == 1
assert get_in(result, [Access.key(:conflicts_count)]) == 0
assert get_in(result, [Access.key(:reviewed_count)]) == 1
end
test "list project", %{document: document, project: project, revision: revision} do
other_document = %Document{project_id: project.id, path: "test2", format: "json"} |> Repo.insert!()
_empty_document = %Document{project_id: project.id, path: "test3", format: "json"} |> Repo.insert!()
%Translation{revision_id: revision.id, document_id: document.id, key: "ok", corrected_text: "bar", proposed_text: "bar", conflicted: false} |> Repo.insert!()
%Translation{revision_id: revision.id, document_id: other_document.id, key: "ok", corrected_text: "bar", proposed_text: "bar", conflicted: true} |> Repo.insert!()
{:ok, result} = Resolver.list_project(project, %{}, %{})
assert get_in(result, [:entries, Access.all(), Access.key(:id)]) == [other_document.id, document.id]
assert get_in(result, [:entries, Access.all(), Access.key(:translations_count)]) == [1, 1]
assert get_in(result, [:entries, Access.all(), Access.key(:conflicts_count)]) == [1, 0]
assert get_in(result, [:entries, Access.all(), Access.key(:reviewed_count)]) == [0, 1]
end
end
| 42.382353
| 166
| 0.674185
|
79fd9646020f3995fb014846fb3a673f221b9ad1
| 3,116
|
ex
|
Elixir
|
lib/elixir_boilerplate_web/endpoint.ex
|
k0d3rR/Elixir-Boilerplate
|
e69693497f68cb92f9e2d4cd79b7c774564be437
|
[
"Unlicense"
] | 1
|
2020-09-20T14:06:40.000Z
|
2020-09-20T14:06:40.000Z
|
lib/elixir_boilerplate_web/endpoint.ex
|
yvc74/v9
|
c0c3913f1efb58113224bfed07dcbabfdd35e51a
|
[
"BSD-3-Clause"
] | null | null | null |
lib/elixir_boilerplate_web/endpoint.ex
|
yvc74/v9
|
c0c3913f1efb58113224bfed07dcbabfdd35e51a
|
[
"BSD-3-Clause"
] | null | null | null |
defmodule ElixirBoilerplateWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :elixir_boilerplate
use Sentry.Phoenix.Endpoint
socket(
"/socket",
ElixirBoilerplateWeb.Socket,
websocket: true
)
plug(ElixirBoilerplateWeb.Health.Plug)
plug(:canonical_host)
plug(:force_ssl)
plug(:cors)
plug(:basic_auth)
plug(:session)
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug(Plug.Static, at: "/", from: :elixir_boilerplate, gzip: false, only: ~w(css fonts images js favicon.ico))
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket(
"/phoenix/live_reload/socket",
Phoenix.LiveReloader.Socket,
websocket: true
)
plug(Phoenix.LiveReloader)
plug(Phoenix.CodeReloader)
end
plug(Plug.RequestId)
plug(Plug.Logger)
plug(
Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Jason
)
plug(Plug.MethodOverride)
plug(Plug.Head)
plug(ElixirBoilerplateWeb.Router)
@doc """
Callback invoked for dynamically configuring the endpoint.
It receives the endpoint configuration and checks if
configuration should be loaded from the system environment.
"""
def init(_key, config) do
if config[:load_from_system_env] do
port = Application.get_env(:elixir_boilerplate, ElixirBoilerplateWeb.Endpoint)[:http][:port] || raise "expected the PORT environment variable to be set"
{:ok, Keyword.put(config, :http, [:inet6, port: port])}
else
{:ok, config}
end
end
defp canonical_host(conn, _opts) do
opts = PlugCanonicalHost.init(canonical_host: Application.get_env(:elixir_boilerplate, :canonical_host))
PlugCanonicalHost.call(conn, opts)
end
defp force_ssl(conn, _opts) do
if Application.get_env(:elixir_boilerplate, :force_ssl) do
opts = Plug.SSL.init(rewrite_on: [:x_forwarded_proto])
Plug.SSL.call(conn, opts)
else
conn
end
end
defp cors(conn, _opts) do
opts = Corsica.init(Application.get_env(:elixir_boilerplate, Corsica))
Corsica.call(conn, opts)
end
defp basic_auth(conn, _opts) do
basic_auth_config = Application.get_env(:elixir_boilerplate, :basic_auth)
if basic_auth_config do
opts = BasicAuth.init(use_config: {:elixir_boilerplate, :basic_auth})
BasicAuth.call(conn, opts)
else
conn
end
end
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
defp session(conn, _opts) do
opts =
Plug.Session.init(
store: :cookie,
key: Application.get_env(:elixir_boilerplate, ElixirBoilerplateWeb.Endpoint)[:session_key],
signing_salt: Application.get_env(:elixir_boilerplate, ElixirBoilerplateWeb.Endpoint)[:signing_salt]
)
Plug.Session.call(conn, opts)
end
end
| 27.095652
| 158
| 0.705712
|
79fe033ff6f22dd90cb4bf2ed059c1ff91684ada
| 69
|
exs
|
Elixir
|
farmbot_celery_script/.formatter.exs
|
elbow-jason/farmbot_os
|
f5dfc8f58a309285ca3d441b1b7272f15315b2a9
|
[
"MIT"
] | 1
|
2019-08-06T11:51:48.000Z
|
2019-08-06T11:51:48.000Z
|
farmbot_celery_script/.formatter.exs
|
SeppPenner/farmbot_os
|
39ba5c5880f8aef71792e2c009514bed1177089c
|
[
"MIT"
] | null | null | null |
farmbot_celery_script/.formatter.exs
|
SeppPenner/farmbot_os
|
39ba5c5880f8aef71792e2c009514bed1177089c
|
[
"MIT"
] | null | null | null |
[
inputs: ["*.{ex,exs}", "{config,priv,lib,test}/**/*.{ex,exs}"]
]
| 17.25
| 64
| 0.478261
|
79fe3434d93ab172e946fa5a60f1fa5dfb7971bf
| 4,579
|
ex
|
Elixir
|
lib/sap.ex
|
slogsdon/sap
|
766f06cfac8a04772affd977a88d61210064e598
|
[
"MIT"
] | 7
|
2015-10-25T16:38:45.000Z
|
2020-01-12T19:06:57.000Z
|
lib/sap.ex
|
slogsdon/sap
|
766f06cfac8a04772affd977a88d61210064e598
|
[
"MIT"
] | null | null | null |
lib/sap.ex
|
slogsdon/sap
|
766f06cfac8a04772affd977a88d61210064e598
|
[
"MIT"
] | null | null | null |
defmodule Sap do
@moduledoc """
Sap is a toolkit for Plug applications to accept and respond to HTTP
requests by using a decision tree built with combinators.
## Making a decision with combinators
In most Plug applications, there is some sort of match and/or dispatch
process that takes place for an individual request. A request will come
in, a set of pattern matching statements created will be compared against
the requested path, and if there's a match, a function will be called to
send the client a response. Here, there is a clear separation between
matching/filtering a request and creating a response.
With Sap, the basic idea remains the same, but the way a request is
accepted and a response is created is slightly different. Combinators, a
fancy name for simple functions meant to be composed with one another, are
tasked with one of two goals:
- filter requests based on some criteria
- build up a response
The power of these functions comes from the ability to compose them in any
order. There is no clear separation of each type of combinators, so a
filtering combinator can be used both before and after a building
combinator and vice versa.
## Example
To help demonstrate this, let's walk through a small example.
def app do
choose [
get() ~>> resp_json() ~>> choose([
path("/body2") ~>> ok("{\\"data\\": \\"body 2\\"}"),
path("/body1") ~>> ok("{\\"data\\": \\"body 1\\"}")
])
]
end
Sap.serve(app: app)
# or Plug.Adapters.Cowboy.http Sap, [app: app], []
A few combinators are used, both filtering and building, to define our
application, with all of them working together to create a decision tree
for dealing with requests.
`choose/1` allows for multiple options at a decision tree node. `get/0`
only permits HTTP GET requests in the rest of a node's definition.
`resp_json/0` sets the response header for the rest of a node to
`application/json`. `path/1` filters out requests that do not match a
given path. `ok/1` sets the response status code of `200 Ok` and the given
response body.
With the above example, all POST, PUT, PATCH, etc. requests would not be
handled. There was an explicit declaration that only GET requests are
allowed since there is only one option in the first `choose/1` list. At
the second `choose/1` list, the application has two options, `/body1` and
`/body2`.
These combinators work together through a `Sap.Context` struct that is
passed between them that holds a `Plug.Conn` and a status for the current
decision path. As long as the status is `:ok`, further combinators can
affect the decision, but once the status is `:error`, the decision path
effectively ends, giving way to the next option path.
"""
use Application
alias Sap.Context, as: C
import Plug.Conn
@behaviour Plug
@doc """
Convenience function to start a Plug adapter with Sap as the plug.
## Options
- `:app` - (required) - the Sap app decision tree. An `ArgumentError`
will be raised if this option is not passed.
- `:scheme` - desired scheme on which the Plug adapter should attach
(defaults to `:http`)
## Adapter Options
- `:adapter` - desired Plug adapter (defaults to `Plug.Adapters.Cowboy`)
See [the Plug docs](http://hexdocs.pm/plug/) for a detailed list of
possible adapter options.
"""
@spec serve(Keyword.t, Keyword.t) :: {:ok, pid} | {:error, term}
def serve(opts \\ [], adapter_opts \\ []) do
unless opts[:app] do
raise ArgumentError, message: "missing `:app` option key"
end
adapter = adapter_opts[:adapter] || Plug.Adapters.Cowboy
adapter_opts = adapter_opts |> Keyword.delete(:adapter)
case opts[:scheme] || :http do
:http -> adapter.http __MODULE__, opts, adapter_opts
:https -> adapter.https __MODULE__, opts, adapter_opts
end
end
# Plug callbacks
@spec init(Keyword.t) :: Keyword.t
def init(opts), do: opts
@spec call(Plug.Conn.t, Keyword.t) :: Plug.Conn.t
def call(conn, opts) do
app = opts[:app]
conn
|> app.()
|> handle_error(conn)
|> send_resp
end
defp handle_error(%C{status: :error}, conn) do
resp(conn, 400, "Bad Request")
end
defp handle_error(context, _conn) do
Map.put(context.conn, :state, :set)
end
# Application callback
def start(_type, _args) do
import Supervisor.Spec, warn: false
opts = [strategy: :one_for_one, name: Sap.Supervisor]
Supervisor.start_link([], opts)
end
end
| 33.423358
| 76
| 0.689452
|
79fe3c0fd839e63a2fbcbe72b347e20eed970f86
| 9,079
|
exs
|
Elixir
|
test/phoenix/endpoint/endpoint_test.exs
|
esambo/phoenix
|
f7784f86ed803a39eb45486f0e031e974e708818
|
[
"MIT"
] | null | null | null |
test/phoenix/endpoint/endpoint_test.exs
|
esambo/phoenix
|
f7784f86ed803a39eb45486f0e031e974e708818
|
[
"MIT"
] | null | null | null |
test/phoenix/endpoint/endpoint_test.exs
|
esambo/phoenix
|
f7784f86ed803a39eb45486f0e031e974e708818
|
[
"MIT"
] | null | null | null |
System.put_env("ENDPOINT_TEST_HOST", "example.com")
defmodule Phoenix.Endpoint.EndpointTest do
use ExUnit.Case, async: true
use RouterHelper
@config [url: [host: {:system, "ENDPOINT_TEST_HOST"}, path: "/api"],
static_url: [host: "static.example.com"],
server: false, http: [port: 80], https: [port: 443],
force_ssl: [subdomains: true],
cache_static_manifest: "../../../../test/fixtures/digest/compile/cache_manifest.json",
pubsub_server: :endpoint_pub]
Application.put_env(:phoenix, __MODULE__.Endpoint, @config)
defmodule Endpoint do
use Phoenix.Endpoint, otp_app: :phoenix
# Assert endpoint variables
assert is_list(config)
assert @otp_app == :phoenix
assert code_reloading? == false
assert @compile_config == [force_ssl: [subdomains: true]]
end
defmodule NoConfigEndpoint do
use Phoenix.Endpoint, otp_app: :phoenix
end
setup_all do
ExUnit.CaptureLog.capture_log(fn -> start_supervised! Endpoint end)
start_supervised! {Phoenix.PubSub, name: :endpoint_pub}
on_exit fn -> Application.delete_env(:phoenix, :serve_endpoints) end
:ok
end
test "defines child_spec/1" do
assert Endpoint.child_spec([]) == %{
id: Endpoint,
start: {Endpoint, :start_link, [[]]},
type: :supervisor
}
end
test "warns if there is no configuration for an endpoint" do
assert ExUnit.CaptureLog.capture_log(fn ->
NoConfigEndpoint.start_link()
end) =~ "no configuration"
end
test "has reloadable configuration" do
endpoint_id = Endpoint.config(:endpoint_id)
assert Endpoint.config(:url) == [host: {:system, "ENDPOINT_TEST_HOST"}, path: "/api"]
assert Endpoint.config(:static_url) == [host: "static.example.com"]
assert Endpoint.url() == "https://example.com"
assert Endpoint.path("/") == "/api/"
assert Endpoint.static_url() == "https://static.example.com"
assert Endpoint.struct_url() == %URI{scheme: "https", host: "example.com", port: 443}
config =
@config
|> put_in([:url, :port], 1234)
|> put_in([:static_url, :port], 456)
assert Endpoint.config_change([{Endpoint, config}], []) == :ok
assert Endpoint.config(:endpoint_id) == endpoint_id
assert Enum.sort(Endpoint.config(:url)) ==
[host: {:system, "ENDPOINT_TEST_HOST"}, path: "/api", port: 1234]
assert Enum.sort(Endpoint.config(:static_url)) ==
[host: "static.example.com", port: 456]
assert Endpoint.url() == "https://example.com:1234"
assert Endpoint.path("/") == "/api/"
assert Endpoint.static_url() == "https://static.example.com:456"
assert Endpoint.struct_url() == %URI{scheme: "https", host: "example.com", port: 1234}
end
test "sets script name when using path" do
conn = conn(:get, "https://example.com/")
assert Endpoint.call(conn, []).script_name == ~w"api"
conn = put_in conn.script_name, ~w(foo)
assert Endpoint.call(conn, []).script_name == ~w"api"
end
@tag :capture_log
test "redirects http requests to https on force_ssl" do
conn = Endpoint.call(conn(:get, "/"), [])
assert get_resp_header(conn, "location") == ["https://example.com/"]
assert conn.halted
end
test "sends hsts on https requests on force_ssl" do
conn = Endpoint.call(conn(:get, "https://example.com/"), [])
assert get_resp_header(conn, "strict-transport-security") ==
["max-age=31536000; includeSubDomains"]
end
test "warms up caches on load and config change" do
assert Endpoint.config(:cache_static_manifest_hash) == "cjkuB6uDZecddGSFz7D7kg"
assert Endpoint.static_path("/foo.css") == "/foo-d978852bea6530fcd197b5445ed008fd.css?vsn=d"
# Trigger a config change and the cache should be warmed up again
config = put_in(@config[:cache_static_manifest], "../../../../test/fixtures/digest/compile/cache_manifest_upgrade.json")
assert Endpoint.config_change([{Endpoint, config}], []) == :ok
assert Endpoint.config(:cache_static_manifest_hash) == "3xAzpSnmcgte3bMevWqJMA"
assert Endpoint.static_path("/foo.css") == "/foo-ghijkl.css?vsn=d"
end
@tag :capture_log
test "invokes init/2 callback" do
defmodule InitEndpoint do
use Phoenix.Endpoint, otp_app: :phoenix
def init(:supervisor, opts) do
send opts[:parent], {self(), :sample}
{:ok, opts}
end
end
{:ok, pid} = InitEndpoint.start_link(parent: self())
assert_receive {^pid, :sample}
end
@tag :capture_log
test "uses url configuration for static path" do
Application.put_env(:phoenix, __MODULE__.UrlEndpoint, url: [path: "/api"])
defmodule UrlEndpoint do
use Phoenix.Endpoint, otp_app: :phoenix
end
UrlEndpoint.start_link()
assert UrlEndpoint.path("/phoenix.png") =~ "/api/phoenix.png"
assert UrlEndpoint.static_path("/phoenix.png") =~ "/api/phoenix.png"
end
@tag :capture_log
test "uses static_url configuration for static path" do
Application.put_env(:phoenix, __MODULE__.StaticEndpoint, static_url: [path: "/static"])
defmodule StaticEndpoint do
use Phoenix.Endpoint, otp_app: :phoenix
end
StaticEndpoint.start_link()
assert StaticEndpoint.path("/phoenix.png") =~ "/phoenix.png"
assert StaticEndpoint.static_path("/phoenix.png") =~ "/static/phoenix.png"
end
test "injects pubsub broadcast with configured server" do
Phoenix.PubSub.subscribe(:endpoint_pub, "sometopic")
some = spawn fn -> :ok end
Endpoint.broadcast_from(some, "sometopic", "event1", %{key: :val})
assert_receive %Phoenix.Socket.Broadcast{
event: "event1", payload: %{key: :val}, topic: "sometopic"}
Endpoint.broadcast_from!(some, "sometopic", "event2", %{key: :val})
assert_receive %Phoenix.Socket.Broadcast{
event: "event2", payload: %{key: :val}, topic: "sometopic"}
Endpoint.broadcast("sometopic", "event3", %{key: :val})
assert_receive %Phoenix.Socket.Broadcast{
event: "event3", payload: %{key: :val}, topic: "sometopic"}
Endpoint.broadcast!("sometopic", "event4", %{key: :val})
assert_receive %Phoenix.Socket.Broadcast{
event: "event4", payload: %{key: :val}, topic: "sometopic"}
Endpoint.local_broadcast_from(some, "sometopic", "event1", %{key: :val})
assert_receive %Phoenix.Socket.Broadcast{
event: "event1", payload: %{key: :val}, topic: "sometopic"}
Endpoint.local_broadcast("sometopic", "event3", %{key: :val})
assert_receive %Phoenix.Socket.Broadcast{
event: "event3", payload: %{key: :val}, topic: "sometopic"}
end
test "loads cache manifest from specified application" do
config = put_in(@config[:cache_static_manifest], {:phoenix, "../../../../test/fixtures/digest/compile/cache_manifest.json"})
assert Endpoint.config_change([{Endpoint, config}], []) == :ok
assert Endpoint.static_path("/foo.css") == "/foo-d978852bea6530fcd197b5445ed008fd.css?vsn=d"
end
test "server?/2 returns true for explicitly true server", config do
endpoint = Module.concat(__MODULE__, config.test)
Application.put_env(:phoenix, endpoint, server: true)
assert Phoenix.Endpoint.server?(:phoenix, endpoint)
end
test "server?/2 returns false for explicitly false server", config do
Application.put_env(:phoenix, :serve_endpoints, true)
endpoint = Module.concat(__MODULE__, config.test)
Application.put_env(:phoenix, endpoint, server: false)
refute Phoenix.Endpoint.server?(:phoenix, endpoint)
end
test "server?/2 returns true for global serve_endpoints as true", config do
Application.put_env(:phoenix, :serve_endpoints, true)
endpoint = Module.concat(__MODULE__, config.test)
Application.put_env(:phoenix, endpoint, [])
assert Phoenix.Endpoint.server?(:phoenix, endpoint)
end
test "server?/2 returns false for no global serve_endpoints config", config do
Application.delete_env(:phoenix, :serve_endpoints)
endpoint = Module.concat(__MODULE__, config.test)
Application.put_env(:phoenix, endpoint, [])
refute Phoenix.Endpoint.server?(:phoenix, endpoint)
end
test "static_path/1 validates paths are local/safe" do
safe_path = "/some_safe_path"
assert Endpoint.static_path(safe_path) == safe_path
assert_raise ArgumentError, ~r/unsafe characters/, fn ->
Endpoint.static_path("/\\unsafe_path")
end
assert_raise ArgumentError, ~r/expected a path starting with a single/, fn ->
Endpoint.static_path("//invalid_path")
end
end
test "static_integrity/1 validates paths are local/safe" do
safe_path = "/some_safe_path"
assert is_nil(Endpoint.static_integrity(safe_path))
assert_raise ArgumentError, ~r/unsafe characters/, fn ->
Endpoint.static_integrity("/\\unsafe_path")
end
assert_raise ArgumentError, ~r/expected a path starting with a single/, fn ->
Endpoint.static_integrity("//invalid_path")
end
end
test "__compile_config__/0 returns selected configs from compile time" do
assert Endpoint.__compile_config__() == [force_ssl: [subdomains: true]]
end
end
| 37.987448
| 128
| 0.685648
|
79fe5395ffea4e5d55c81a729b926ed1478972c8
| 1,368
|
ex
|
Elixir
|
apps/chex/test/support/data_case.ex
|
chrisfishwood/chex
|
04d3446f3d25c8c3c0badd282b50abccc59cc41d
|
[
"MIT"
] | null | null | null |
apps/chex/test/support/data_case.ex
|
chrisfishwood/chex
|
04d3446f3d25c8c3c0badd282b50abccc59cc41d
|
[
"MIT"
] | null | null | null |
apps/chex/test/support/data_case.ex
|
chrisfishwood/chex
|
04d3446f3d25c8c3c0badd282b50abccc59cc41d
|
[
"MIT"
] | null | null | null |
defmodule Chex.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Chex.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Chex.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Chex.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Chex.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transform changeset errors to a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Enum.reduce(opts, message, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 25.333333
| 77
| 0.67617
|
79fe7942900290dde1772c9229b5c5579eab5782
| 2,886
|
ex
|
Elixir
|
clients/admin/lib/google_api/admin/directory_v1/model/user_posix_account.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/admin/lib/google_api/admin/directory_v1/model/user_posix_account.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/admin/lib/google_api/admin/directory_v1/model/user_posix_account.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Admin.Directory_v1.Model.UserPosixAccount do
@moduledoc """
JSON template for a POSIX account entry.
## Attributes
* `accountId` (*type:* `String.t`, *default:* `nil`) - A POSIX account field identifier.
* `gecos` (*type:* `String.t`, *default:* `nil`) - The GECOS (user information) for this account.
* `gid` (*type:* `String.t`, *default:* `nil`) - The default group ID.
* `homeDirectory` (*type:* `String.t`, *default:* `nil`) - The path to the home directory for this account.
* `operatingSystemType` (*type:* `String.t`, *default:* `nil`) - The operating system type for this account.
* `primary` (*type:* `boolean()`, *default:* `nil`) - If this is user's primary account within the SystemId.
* `shell` (*type:* `String.t`, *default:* `nil`) - The path to the login shell for this account.
* `systemId` (*type:* `String.t`, *default:* `nil`) - System identifier for which account Username or Uid apply to.
* `uid` (*type:* `String.t`, *default:* `nil`) - The POSIX compliant user ID.
* `username` (*type:* `String.t`, *default:* `nil`) - The username of the account.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accountId => String.t() | nil,
:gecos => String.t() | nil,
:gid => String.t() | nil,
:homeDirectory => String.t() | nil,
:operatingSystemType => String.t() | nil,
:primary => boolean() | nil,
:shell => String.t() | nil,
:systemId => String.t() | nil,
:uid => String.t() | nil,
:username => String.t() | nil
}
field(:accountId)
field(:gecos)
field(:gid)
field(:homeDirectory)
field(:operatingSystemType)
field(:primary)
field(:shell)
field(:systemId)
field(:uid)
field(:username)
end
defimpl Poison.Decoder, for: GoogleApi.Admin.Directory_v1.Model.UserPosixAccount do
def decode(value, options) do
GoogleApi.Admin.Directory_v1.Model.UserPosixAccount.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Admin.Directory_v1.Model.UserPosixAccount do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39
| 119
| 0.662855
|
79fe9b12f0f226bdd8fdbc41020b5f680d2bf9c5
| 1,125
|
exs
|
Elixir
|
config/config.exs
|
szTheory/elixir-leaseweb
|
98be9c7cc496b52d7b12d4986078d1b68fca1e71
|
[
"MIT"
] | 1
|
2018-09-07T15:56:14.000Z
|
2018-09-07T15:56:14.000Z
|
config/config.exs
|
szTheory/elixir-leaseweb
|
98be9c7cc496b52d7b12d4986078d1b68fca1e71
|
[
"MIT"
] | null | null | null |
config/config.exs
|
szTheory/elixir-leaseweb
|
98be9c7cc496b52d7b12d4986078d1b68fca1e71
|
[
"MIT"
] | 2
|
2017-02-11T03:00:58.000Z
|
2020-03-03T21:21:42.000Z
|
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :leaseweb, leaseweb_api: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:leaseweb, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.290323
| 73
| 0.755556
|
79fed457661c0d812123436289e12a1625a5a5b6
| 86
|
ex
|
Elixir
|
testData/org/elixir_lang/parser_definition/matched_two_operation_parsing_test_case/MatchedDotOperation.ex
|
keyno63/intellij-elixir
|
4033e319992c53ddd42a683ee7123a97b5e34f02
|
[
"Apache-2.0"
] | 1,668
|
2015-01-03T05:54:27.000Z
|
2022-03-25T08:01:20.000Z
|
testData/org/elixir_lang/parser_definition/matched_two_operation_parsing_test_case/MatchedDotOperation.ex
|
keyno63/intellij-elixir
|
4033e319992c53ddd42a683ee7123a97b5e34f02
|
[
"Apache-2.0"
] | 2,018
|
2015-01-01T22:43:39.000Z
|
2022-03-31T20:13:08.000Z
|
testData/org/elixir_lang/parser_definition/matched_two_operation_parsing_test_case/MatchedDotOperation.ex
|
keyno63/intellij-elixir
|
4033e319992c53ddd42a683ee7123a97b5e34f02
|
[
"Apache-2.0"
] | 145
|
2015-01-15T11:37:16.000Z
|
2021-12-22T05:51:02.000Z
|
One.two ++ Three.four
One.two -- Three.four
One.two..Three.four
One.two <> Three.four
| 17.2
| 21
| 0.697674
|
79fedca4636dd2f84de2d452c6143fb464f89bf1
| 8,412
|
exs
|
Elixir
|
test/parent/client_test.exs
|
jlgeering/parent
|
90fb6d08e6ce123852783a3f274d21c674bb4753
|
[
"MIT"
] | 200
|
2018-04-08T20:36:15.000Z
|
2022-03-12T14:43:46.000Z
|
test/parent/client_test.exs
|
jlgeering/parent
|
90fb6d08e6ce123852783a3f274d21c674bb4753
|
[
"MIT"
] | 15
|
2018-05-31T11:03:21.000Z
|
2022-02-19T21:26:21.000Z
|
test/parent/client_test.exs
|
jlgeering/parent
|
90fb6d08e6ce123852783a3f274d21c674bb4753
|
[
"MIT"
] | 18
|
2018-05-16T02:09:35.000Z
|
2022-02-03T18:56:30.000Z
|
defmodule Parent.ClientTest do
use ExUnit.Case, async: true
import Parent.CaptureLog
alias Parent.Client
setup do
Mox.stub(Parent.RestartCounter.TimeProvider.Test, :now_ms, fn ->
:erlang.unique_integer([:monotonic, :positive]) * :timer.seconds(5)
end)
:ok
end
describe "child_pid/1" do
for registry? <- [true, false] do
test "returns the pid of the given child when registry is #{registry?}" do
parent =
start_parent!(
[child_spec(id: :child1), child_spec(id: :child2)],
registry?: unquote(registry?)
)
assert {:ok, pid1} = Client.child_pid(parent, :child1)
assert {:ok, pid2} = Client.child_pid(parent, :child2)
assert [{:child1, ^pid1, _, _}, {:child2, ^pid2, _, _}] =
:supervisor.which_children(parent)
end
test "can dereference aliases when registry is #{registry?}" do
registered_name = :"alias_#{System.unique_integer([:positive, :monotonic])}"
parent = start_parent!([child_spec(id: :child)], name: registered_name)
:global.register_name(registered_name, parent)
assert {:ok, _} = Client.child_pid(registered_name, :child)
assert {:ok, _} = Client.child_pid({:global, registered_name}, :child)
assert {:ok, _} = Client.child_pid({:via, :global, registered_name}, :child)
end
test "returns error when child is unknown when registry is #{registry?}" do
parent = start_parent!([], registry?: unquote(registry?))
assert Client.child_pid(parent, :child) == :error
end
test "returns error if child is stopped when registry is #{registry?}" do
parent =
start_parent!(
[child_spec(id: :child1), child_spec(id: :child2)],
registry?: unquote(registry?)
)
Client.shutdown_child(parent, :child1)
assert Client.child_pid(parent, :child1) == :error
refute Client.child_pid(parent, :child2) == :error
end
end
end
describe "children/1" do
for registry? <- [true, false] do
test "returns children when registry is #{registry?}" do
parent =
start_parent!(
[child_spec(id: :child1, meta: :meta1), child_spec(id: :child2, meta: :meta2)],
registry?: unquote(registry?)
)
{:ok, child1} = Client.child_pid(parent, :child1)
{:ok, child2} = Client.child_pid(parent, :child2)
assert Enum.sort_by(Client.children(parent), &"#{&1.id}") == [
%{id: :child1, meta: :meta1, pid: child1},
%{id: :child2, meta: :meta2, pid: child2}
]
end
end
end
describe "via tuple" do
for registry? <- [true, false] do
test "resolves the pid of the given child when registry is #{registry?}" do
parent =
start_parent!(
[child_spec(id: :child1), child_spec(id: :child2)],
registry?: unquote(registry?)
)
assert pid1 = GenServer.whereis({:via, Client, {parent, :child1}})
assert pid2 = GenServer.whereis({:via, Client, {parent, :child2}})
assert [{:child1, ^pid1, _, _}, {:child2, ^pid2, _, _}] =
:supervisor.which_children(parent)
end
test "returns nil when child is unknown when registry is #{registry?}" do
parent = start_parent!([], registry?: unquote(registry?))
assert GenServer.whereis({:via, Client, {parent, :child}}) == nil
end
end
end
describe "child_meta/1" do
for registry? <- [true, false] do
test "returns the meta of the given child when registry is #{registry?}" do
parent =
start_parent!(
[
child_spec(id: :child1, meta: :meta1),
child_spec(meta: :meta2)
],
registry?: unquote(registry?)
)
child1 = child_pid!(parent, :child1)
child2 = (parent |> Client.children() |> Enum.reject(&(&1.pid == child1)) |> hd).pid
assert Client.child_meta(parent, :child1) == {:ok, :meta1}
assert Client.child_meta(parent, child1) == {:ok, :meta1}
assert Client.child_meta(parent, child2) == {:ok, :meta2}
end
test "returns error when child is unknown when registry is #{registry?}" do
parent = start_parent!()
assert Client.child_meta(parent, :child) == :error
end
end
end
describe "update_child_meta/1" do
test "succeeds if child exists" do
parent = start_parent!([child_spec(id: :child, meta: 1)])
assert Client.update_child_meta(parent, :child, &(&1 + 1))
assert Client.child_meta(parent, :child) == {:ok, 2}
end
test "returns error when child is unknown" do
parent = start_parent!()
assert Client.update_child_meta(parent, :child, & &1) == :error
end
end
describe "start_child/1" do
test "adds the additional child" do
parent = start_parent!([child_spec(id: :child1)])
assert {:ok, child2} = Client.start_child(parent, child_spec(id: :child2))
assert child_pid!(parent, :child2) == child2
end
test "returns error" do
parent = start_parent!([child_spec(id: :child1)])
{:ok, child2} = Client.start_child(parent, child_spec(id: :child2))
assert Client.start_child(parent, child_spec(id: :child2)) ==
{:error, {:already_started, child2}}
assert child_ids(parent) == [:child1, :child2]
assert child_pid!(parent, :child2) == child2
end
test "handles child start crash" do
parent = start_parent!([child_spec(id: :child1)])
capture_log(fn ->
spec =
child_spec(id: :child2, start: {Agent, :start_link, [fn -> raise "some error" end]})
{:error, {_error, _stacktrace}} = Client.start_child(parent, spec)
Process.sleep(100)
end)
assert child_ids(parent) == [:child1]
end
end
describe "shutdown_child/1" do
test "stops the given child" do
parent = start_parent!([child_spec(id: :child)])
assert {:ok, _info} = Client.shutdown_child(parent, :child)
assert Client.child_pid(parent, :child) == :error
assert child_ids(parent) == []
end
test "returns error when child is unknown" do
parent = start_parent!()
assert Client.shutdown_child(parent, :child) == :error
end
end
describe "restart_child/1" do
test "stops the given child" do
parent = start_parent!([child_spec(id: :child)])
pid1 = child_pid!(parent, :child)
assert Client.restart_child(parent, :child) == :ok
assert child_ids(parent) == [:child]
refute child_pid!(parent, :child) == pid1
end
test "returns error when child is unknown" do
pid = start_parent!()
assert Client.restart_child(pid, :child) == :error
end
end
describe "shutdown_all/1" do
test "stops all children" do
parent = start_parent!([child_spec(id: :child1), child_spec(id: :child2)])
assert Map.keys(Client.shutdown_all(parent)) == ~w/child1 child2/a
assert child_ids(parent) == []
end
end
describe "return_children/1" do
test "returns all given children" do
parent =
start_parent!([
child_spec(id: :child1, shutdown_group: :group1),
child_spec(id: :child2, binds_to: [:child1], shutdown_group: :group2),
child_spec(id: :child3, binds_to: [:child2]),
child_spec(id: :child4, shutdown_group: :group1),
child_spec(id: :child5, shutdown_group: :group2),
child_spec(id: :child6)
])
{:ok, stopped_children} = Client.shutdown_child(parent, :child4)
assert child_ids(parent) == [:child6]
assert Client.return_children(parent, stopped_children) == :ok
assert child_ids(parent) == ~w/child1 child2 child3 child4 child5 child6/a
end
end
defp start_parent!(children \\ [], opts \\ []) do
parent = start_supervised!({Parent.Supervisor, {children, opts}})
Mox.allow(Parent.RestartCounter.TimeProvider.Test, self(), parent)
parent
end
defp child_spec(overrides),
do: Parent.child_spec(%{start: {Agent, :start_link, [fn -> :ok end]}}, overrides)
defp child_pid!(parent, child_id) do
{:ok, pid} = Client.child_pid(parent, child_id)
pid
end
defp child_ids(parent), do: Enum.map(Client.children(parent), & &1.id)
end
| 33.783133
| 94
| 0.611032
|
79fee3f4fdef4beb652db7fd38911914844cd505
| 1,111
|
ex
|
Elixir
|
graphical/lib/graphical/accounts/user.ex
|
joshmcarthur/learning-elixir
|
5ccfdd61dcfaba82c05559fb9c0e6f99cf4319e7
|
[
"MIT"
] | null | null | null |
graphical/lib/graphical/accounts/user.ex
|
joshmcarthur/learning-elixir
|
5ccfdd61dcfaba82c05559fb9c0e6f99cf4319e7
|
[
"MIT"
] | null | null | null |
graphical/lib/graphical/accounts/user.ex
|
joshmcarthur/learning-elixir
|
5ccfdd61dcfaba82c05559fb9c0e6f99cf4319e7
|
[
"MIT"
] | null | null | null |
defmodule Graphical.Accounts.User do
use Ecto.Schema
import Ecto.Changeset
alias Graphical.Accounts.User
schema "users" do
field :email, :string
field :name, :string
field :password, :string, virtual: true
field :encrypted_password
has_many :posts, Graphical.Posts.Post
timestamps()
end
@doc false
def changeset(user, attrs) do
user
|> cast(attrs, [:name, :email])
|> validate_required([:name, :email])
end
def update_changeset(%User{} = user, params \\ %{}) do
user
|> cast(params, [:name, :email], [:password])
|> validate_required([:name, :email])
|> put_pass_hash()
end
def registration_changeset(%User{} = user, params \\ %{}) do
user
|> cast(params, [:name, :email, :password])
|> validate_required([:name, :email, :password])
|> put_pass_hash()
end
defp put_pass_hash(changeset) do
case changeset do
%Ecto.Changeset{valid?: true, changes: %{password: password}} ->
put_change(changeset, :encrypted_password, Comeonin.Bcrypt.hashpwsalt(password))
_ -> changeset
end
end
end
| 24.152174
| 88
| 0.647165
|
79fef09518bb1976a70eda11134844b917a514e3
| 274
|
exs
|
Elixir
|
farmbot_core/priv/config/migrations/20180813200950_add_ssh_opts.exs
|
adamswsk/farmbot_os
|
d177d3b74888c1e7bcbf8f8595818708ee97f73b
|
[
"MIT"
] | 1
|
2021-08-23T13:36:14.000Z
|
2021-08-23T13:36:14.000Z
|
farmbot_core/priv/config/migrations/20180813200950_add_ssh_opts.exs
|
adamswsk/farmbot_os
|
d177d3b74888c1e7bcbf8f8595818708ee97f73b
|
[
"MIT"
] | null | null | null |
farmbot_core/priv/config/migrations/20180813200950_add_ssh_opts.exs
|
adamswsk/farmbot_os
|
d177d3b74888c1e7bcbf8f8595818708ee97f73b
|
[
"MIT"
] | null | null | null |
defmodule Farmbot.System.ConfigStorage.Migrations.AddSshOpts do
use Ecto.Migration
import FarmbotCore.Config.MigrationHelpers
def change do
create_settings_config("ssh_port", :float, 22.0)
create_settings_config("authorized_ssh_key", :string, nil)
end
end
| 24.909091
| 63
| 0.788321
|
79ff1af6791b95169552481c96f7d28437e9e9fb
| 2,609
|
exs
|
Elixir
|
phoenix/test/mehr_schulferien_web/controllers/api/country_controller_test.exs
|
komlanvi/www.mehr-schulferien.de
|
fe74772f2cc8ce430e04adf6e66023971456ce57
|
[
"MIT"
] | null | null | null |
phoenix/test/mehr_schulferien_web/controllers/api/country_controller_test.exs
|
komlanvi/www.mehr-schulferien.de
|
fe74772f2cc8ce430e04adf6e66023971456ce57
|
[
"MIT"
] | null | null | null |
phoenix/test/mehr_schulferien_web/controllers/api/country_controller_test.exs
|
komlanvi/www.mehr-schulferien.de
|
fe74772f2cc8ce430e04adf6e66023971456ce57
|
[
"MIT"
] | null | null | null |
defmodule MehrSchulferienWeb.Api.CountryControllerTest do
use MehrSchulferienWeb.ConnCase
alias MehrSchulferien.Locations
alias MehrSchulferien.Locations.Country
@create_attrs %{name: "some name", slug: "some slug"}
@update_attrs %{name: "some updated name", slug: "some updated slug"}
@invalid_attrs %{name: nil, slug: nil}
def fixture(:country) do
{:ok, country} = Locations.create_country(@create_attrs)
country
end
setup %{conn: conn} do
{:ok, conn: put_req_header(conn, "accept", "application/json")}
end
describe "index" do
test "lists all countries", %{conn: conn} do
conn = get conn, api_country_path(conn, :index)
assert json_response(conn, 200)["data"] == []
end
end
describe "create country" do
test "renders country when data is valid", %{conn: conn} do
conn = post conn, api_country_path(conn, :create), country: @create_attrs
assert %{"id" => id} = json_response(conn, 201)["data"]
conn = get conn, api_country_path(conn, :show, id)
assert json_response(conn, 200)["data"] == %{
"id" => id,
"name" => "some name",
"slug" => "some slug"}
end
test "renders errors when data is invalid", %{conn: conn} do
conn = post conn, api_country_path(conn, :create), country: @invalid_attrs
assert json_response(conn, 422)["errors"] != %{}
end
end
describe "update country" do
setup [:create_country]
test "renders country when data is valid", %{conn: conn, country: %Country{id: id} = country} do
conn = put conn, api_country_path(conn, :update, country), country: @update_attrs
assert %{"id" => ^id} = json_response(conn, 200)["data"]
conn = get conn, api_country_path(conn, :show, id)
assert json_response(conn, 200)["data"] == %{
"id" => id,
"name" => "some updated name",
"slug" => "some updated slug"}
end
test "renders errors when data is invalid", %{conn: conn, country: country} do
conn = put conn, api_country_path(conn, :update, country), country: @invalid_attrs
assert json_response(conn, 422)["errors"] != %{}
end
end
describe "delete country" do
setup [:create_country]
test "deletes chosen country", %{conn: conn, country: country} do
conn = delete conn, api_country_path(conn, :delete, country)
assert response(conn, 204)
assert_error_sent 404, fn ->
get conn, api_country_path(conn, :show, country)
end
end
end
defp create_country(_) do
country = fixture(:country)
{:ok, country: country}
end
end
| 31.817073
| 100
| 0.643542
|
79ff2135bd4f9db2abcd8cd3d25784323f706bda
| 2,791
|
ex
|
Elixir
|
instrumentation/opentelemetry_oban/lib/opentelemetry_oban/job_handler.ex
|
qdentity/opentelemetry-erlang-contrib
|
eecb238cff95edb111a70d3c617dd1995f6b0eba
|
[
"Apache-2.0"
] | null | null | null |
instrumentation/opentelemetry_oban/lib/opentelemetry_oban/job_handler.ex
|
qdentity/opentelemetry-erlang-contrib
|
eecb238cff95edb111a70d3c617dd1995f6b0eba
|
[
"Apache-2.0"
] | null | null | null |
instrumentation/opentelemetry_oban/lib/opentelemetry_oban/job_handler.ex
|
qdentity/opentelemetry-erlang-contrib
|
eecb238cff95edb111a70d3c617dd1995f6b0eba
|
[
"Apache-2.0"
] | null | null | null |
defmodule OpentelemetryOban.JobHandler do
alias OpenTelemetry.Span
@tracer_id :opentelemetry_oban
def attach() do
attach_job_start_handler()
attach_job_stop_handler()
attach_job_exception_handler()
end
defp attach_job_start_handler() do
:telemetry.attach(
"#{__MODULE__}.job_start",
[:oban, :job, :start],
&__MODULE__.handle_job_start/4,
[]
)
end
defp attach_job_stop_handler() do
:telemetry.attach(
"#{__MODULE__}.job_stop",
[:oban, :job, :stop],
&__MODULE__.handle_job_stop/4,
[]
)
end
defp attach_job_exception_handler() do
:telemetry.attach(
"#{__MODULE__}.job_exception",
[:oban, :job, :exception],
&__MODULE__.handle_job_exception/4,
[]
)
end
def handle_job_start(_event, _measurements, metadata, _config) do
%{
job: %{
id: id,
queue: queue,
worker: worker,
priority: priority,
inserted_at: inserted_at,
scheduled_at: scheduled_at,
attempt: attempt,
max_attempts: max_attempts,
meta: job_meta
}
} = metadata
:otel_propagator_text_map.extract(Map.to_list(job_meta))
parent = OpenTelemetry.Tracer.current_span_ctx()
links = if parent == :undefined, do: [], else: [OpenTelemetry.link(parent)]
OpenTelemetry.Tracer.set_current_span(:undefined)
attributes = [
"messaging.system": "oban",
"messaging.destination": queue,
"messaging.destination_kind": "queue",
"messaging.operation": "process",
"messaging.oban.job_id": id,
"messaging.oban.worker": worker,
"messaging.oban.priority": priority,
"messaging.oban.attempt": attempt,
"messaging.oban.max_attempts": max_attempts,
"messaging.oban.inserted_at":
if(inserted_at, do: DateTime.to_iso8601(inserted_at), else: nil),
"messaging.oban.scheduled_at": DateTime.to_iso8601(scheduled_at)
]
span_name = "#{worker} process"
OpentelemetryTelemetry.start_telemetry_span(@tracer_id, span_name, metadata, %{
kind: :consumer,
links: links,
attributes: attributes
})
end
def handle_job_stop(_event, _measurements, metadata, _config) do
OpentelemetryTelemetry.end_telemetry_span(@tracer_id, metadata)
end
def handle_job_exception(
_event,
_measurements,
%{stacktrace: stacktrace, error: error} = metadata,
_config
) do
ctx = OpentelemetryTelemetry.set_current_telemetry_span(@tracer_id, metadata)
# Record exception and mark the span as errored
Span.record_exception(ctx, error, stacktrace)
Span.set_status(ctx, OpenTelemetry.status(:error, ""))
OpentelemetryTelemetry.end_telemetry_span(@tracer_id, metadata)
end
end
| 27.362745
| 83
| 0.66607
|
79ff4067421ecbbda82fdfd2362b5d907658752b
| 787
|
ex
|
Elixir
|
out/manticoresearch-elixir/lib/manticoresearch/model/insert_document_request.ex
|
manticoresoftware/openapi
|
79bccede913291724c83ebcad5590f1f13ff7726
|
[
"MIT"
] | null | null | null |
out/manticoresearch-elixir/lib/manticoresearch/model/insert_document_request.ex
|
manticoresoftware/openapi
|
79bccede913291724c83ebcad5590f1f13ff7726
|
[
"MIT"
] | 3
|
2021-12-21T08:18:48.000Z
|
2022-03-24T10:50:37.000Z
|
out/manticoresearch-elixir/lib/manticoresearch/model/insert_document_request.ex
|
manticoresoftware/openapi
|
79bccede913291724c83ebcad5590f1f13ff7726
|
[
"MIT"
] | 5
|
2021-12-11T06:10:14.000Z
|
2022-03-18T11:05:24.000Z
|
# Manticore Search Client
# Copyright (c) 2020-2021, Manticore Software LTD (https://manticoresearch.com)
#
# All rights reserved
# Do not edit the class manually.
defmodule Manticoresearch.Model.InsertDocumentRequest do
@moduledoc """
Object with document data.
"""
@derive [Poison.Encoder]
defstruct [
:"index",
:"cluster",
:"id",
:"doc"
]
@type t :: %__MODULE__{
:"index" => String.t,
:"cluster" => String.t | nil,
:"id" => integer() | nil,
:"doc" => %{optional(String.t) => AnyType}
}
end
defimpl Poison.Decoder, for: Manticoresearch.Model.InsertDocumentRequest do
import Manticoresearch.Deserializer
def decode(value, options) do
value
|> deserialize(:"doc", :map, Manticoresearch.Model.AnyType, options)
end
end
| 21.27027
| 79
| 0.660737
|
79ff45f945f26e9fb38da31b8dfe5fc4dea4ee06
| 1,272
|
exs
|
Elixir
|
test/controllers/oembed_controller_test.exs
|
remerle/asciinema-server
|
895bf5a7ffb7db1b418c97cfd2ac9136c46dfb57
|
[
"Apache-2.0"
] | 893
|
2017-09-14T14:18:29.000Z
|
2022-03-31T21:45:08.000Z
|
test/controllers/oembed_controller_test.exs
|
remerle/asciinema-server
|
895bf5a7ffb7db1b418c97cfd2ac9136c46dfb57
|
[
"Apache-2.0"
] | 103
|
2017-09-29T22:15:33.000Z
|
2022-03-27T21:47:43.000Z
|
test/controllers/oembed_controller_test.exs
|
remerle/asciinema-server
|
895bf5a7ffb7db1b418c97cfd2ac9136c46dfb57
|
[
"Apache-2.0"
] | 152
|
2017-09-07T12:43:15.000Z
|
2022-03-10T18:47:47.000Z
|
defmodule Asciinema.OembedControllerTest do
use AsciinemaWeb.ConnCase
import Asciinema.Factory
alias AsciinemaWeb.Endpoint
describe "show" do
test "for JSON format", %{conn: conn} do
asciicast = insert(:asciicast)
url = Routes.asciicast_url(Endpoint, :show, asciicast)
conn =
get(
conn,
Routes.oembed_path(conn, :show,
url: url,
format: "json",
maxwidth: "500",
maxheight: "300"
)
)
assert response(conn, 200)
assert response_content_type(conn, :json)
end
test "for XML format", %{conn: conn} do
asciicast = insert(:asciicast)
url = Routes.asciicast_url(Endpoint, :show, asciicast)
conn = get(conn, Routes.oembed_path(conn, :show, url: url, format: "xml"))
assert response(conn, 200)
assert response_content_type(conn, :xml)
end
test "for maxwidth without maxheight", %{conn: conn} do
asciicast = insert(:asciicast)
url = Routes.asciicast_url(Endpoint, :show, asciicast)
conn = get(conn, Routes.oembed_path(conn, :show, url: url, format: "json", maxwidth: "500"))
assert response(conn, 200)
assert response_content_type(conn, :json)
end
end
end
| 27.06383
| 98
| 0.621069
|
79ff5d86d7d9ea37cb62a76e0c7b51592603f32e
| 1,142
|
ex
|
Elixir
|
lib/exqlite/stream.ex
|
laszlohegedus/exqlite
|
ed0668228fc668cf6d49e1989614eaf02c5a2dd9
|
[
"MIT"
] | null | null | null |
lib/exqlite/stream.ex
|
laszlohegedus/exqlite
|
ed0668228fc668cf6d49e1989614eaf02c5a2dd9
|
[
"MIT"
] | null | null | null |
lib/exqlite/stream.ex
|
laszlohegedus/exqlite
|
ed0668228fc668cf6d49e1989614eaf02c5a2dd9
|
[
"MIT"
] | null | null | null |
defmodule Exqlite.Stream do
@moduledoc false
defstruct [:conn, :query, :params, :options]
@type t :: %Exqlite.Stream{}
defimpl Enumerable do
def reduce(%Exqlite.Stream{query: %Exqlite.Query{} = query} = stream, acc, fun) do
# Possibly need to pass a chunk size option along so that we can let
# the NIF chunk it.
%Exqlite.Stream{conn: conn, params: params, options: opts} = stream
stream = %DBConnection.Stream{
conn: conn,
query: query,
params: params,
opts: opts
}
DBConnection.reduce(stream, acc, fun)
end
def reduce(%Exqlite.Stream{query: statement} = stream, acc, fun) do
%Exqlite.Stream{conn: conn, params: params, options: opts} = stream
query = %Exqlite.Query{name: "", statement: statement}
stream = %DBConnection.PrepareStream{
conn: conn,
query: query,
params: params,
opts: opts
}
DBConnection.reduce(stream, acc, fun)
end
def member?(_, _), do: {:error, __MODULE__}
def count(_), do: {:error, __MODULE__}
def slice(_), do: {:error, __MODULE__}
end
end
| 26.55814
| 86
| 0.614711
|
79ff6339477439a4ffac6d8bf775445e314844b6
| 3,055
|
exs
|
Elixir
|
test/wolfgang_api_web/controllers/album_controller_test.exs
|
davidenglishmusic/wolfgang_api
|
828df91ddf46dcce4dee86c0f416cf292fc261be
|
[
"MIT"
] | null | null | null |
test/wolfgang_api_web/controllers/album_controller_test.exs
|
davidenglishmusic/wolfgang_api
|
828df91ddf46dcce4dee86c0f416cf292fc261be
|
[
"MIT"
] | null | null | null |
test/wolfgang_api_web/controllers/album_controller_test.exs
|
davidenglishmusic/wolfgang_api
|
828df91ddf46dcce4dee86c0f416cf292fc261be
|
[
"MIT"
] | null | null | null |
defmodule WolfgangApiWeb.AlbumControllerTest do
use WolfgangApiWeb.ConnCase
alias WolfgangApi.Directory
alias WolfgangApi.Directory.Album
@create_attrs %{
album_artist: "some album_artist",
composer: "some composer",
genre: "some genre",
title: "some title"
}
@update_attrs %{
album_artist: "some updated album_artist",
composer: "some updated composer",
genre: "some updated genre",
title: "some updated title"
}
@invalid_attrs %{album_artist: nil, composer: nil, genre: nil, title: nil}
def fixture(:album) do
{:ok, album} = Directory.create_album(@create_attrs)
album
end
setup %{conn: conn} do
{:ok, conn: put_req_header(conn, "accept", "application/json")}
end
describe "index" do
test "lists all albums", %{conn: conn} do
conn = get(conn, Routes.album_path(conn, :index))
assert json_response(conn, 200)["data"] == []
end
end
describe "create album" do
test "renders album when data is valid", %{conn: conn} do
conn = post(conn, Routes.album_path(conn, :create), album: @create_attrs)
assert %{"id" => id} = json_response(conn, 201)["data"]
conn = get(conn, Routes.album_path(conn, :show, id))
assert %{
"id" => id,
"album_artist" => "some album_artist",
"composer" => "some composer",
"genre" => "some genre",
"title" => "some title"
} = json_response(conn, 200)["data"]
end
test "renders errors when data is invalid", %{conn: conn} do
conn = post(conn, Routes.album_path(conn, :create), album: @invalid_attrs)
assert json_response(conn, 422)["errors"] != %{}
end
end
describe "update album" do
setup [:create_album]
test "renders album when data is valid", %{conn: conn, album: %Album{id: id} = album} do
conn = put(conn, Routes.album_path(conn, :update, album), album: @update_attrs)
assert %{"id" => ^id} = json_response(conn, 200)["data"]
conn = get(conn, Routes.album_path(conn, :show, id))
assert %{
"id" => id,
"album_artist" => "some updated album_artist",
"composer" => "some updated composer",
"genre" => "some updated genre",
"title" => "some updated title"
} = json_response(conn, 200)["data"]
end
test "renders errors when data is invalid", %{conn: conn, album: album} do
conn = put(conn, Routes.album_path(conn, :update, album), album: @invalid_attrs)
assert json_response(conn, 422)["errors"] != %{}
end
end
describe "delete album" do
setup [:create_album]
test "deletes chosen album", %{conn: conn, album: album} do
conn = delete(conn, Routes.album_path(conn, :delete, album))
assert response(conn, 204)
assert_error_sent 404, fn ->
get(conn, Routes.album_path(conn, :show, album))
end
end
end
defp create_album(_) do
album = fixture(:album)
{:ok, album: album}
end
end
| 30.247525
| 92
| 0.606547
|
79ff6a8338931c195646796e111f93dcdb5c5caf
| 2,334
|
ex
|
Elixir
|
apps/astarte_housekeeping_api/lib/astarte_housekeeping_api/config.ex
|
Annopaolo/astarte
|
f8190e8bf044759a9b84bdeb5786a55b6f793a4f
|
[
"Apache-2.0"
] | 191
|
2018-03-30T13:23:08.000Z
|
2022-03-02T12:05:32.000Z
|
apps/astarte_housekeeping_api/lib/astarte_housekeeping_api/config.ex
|
Annopaolo/astarte
|
f8190e8bf044759a9b84bdeb5786a55b6f793a4f
|
[
"Apache-2.0"
] | 402
|
2018-03-30T13:37:00.000Z
|
2022-03-31T16:47:10.000Z
|
apps/astarte_housekeeping_api/lib/astarte_housekeeping_api/config.ex
|
Annopaolo/astarte
|
f8190e8bf044759a9b84bdeb5786a55b6f793a4f
|
[
"Apache-2.0"
] | 24
|
2018-03-30T13:29:48.000Z
|
2022-02-28T11:10:26.000Z
|
#
# This file is part of Astarte.
#
# Copyright 2018 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Astarte.Housekeeping.API.Config do
alias Astarte.Housekeeping.API.Config.JWTPublicKeyPEMType
use Skogsra
@envdoc "The bind address for the Phoenix server."
app_env :bind_address, :astarte_housekeeping_api, :bind_address,
os_env: "HOUSEKEEPING_API_BIND_ADDRESS",
type: :binary,
default: "0.0.0.0"
@envdoc """
Disables the authentication. CHANGING IT TO TRUE IS GENERALLY A REALLY BAD IDEA IN A PRODUCTION ENVIRONMENT, IF YOU DON'T KNOW WHAT YOU ARE DOING.
"""
app_env :disable_authentication, :astarte_housekeeping_api, :disable_authentication,
os_env: "HOUSEKEEPING_API_DISABLE_AUTHENTICATION",
type: :binary,
default: false
@envdoc "The JWT public key."
app_env :jwt_public_key_pem, :astarte_housekeeping_api, :jwt_public_key_pem,
os_env: "HOUSEKEEPING_API_JWT_PUBLIC_KEY_PATH",
type: JWTPublicKeyPEMType
@doc "The RPC client module."
app_env :rpc_client, :astarte_housekeeping_api, :rpc_client,
os_env: "HOUSEKEEPING_API_RPC_CLIENT",
binding_skip: [:system],
type: :unsafe_module,
default: Astarte.RPC.AMQP.Client
@doc """
Returns true if the authentication is disabled.
"""
@spec authentication_disabled?() :: boolean()
def authentication_disabled? do
disable_authentication!()
end
@doc """
Returns :ok if the JWT key is valid, otherwise raise an exception.
"""
def validate_jwt_public_key_pem!() do
if authentication_disabled?() do
:ok
else
case jwt_public_key_pem() do
{:ok, nil} ->
raise "JWT public key not found, HOUSEKEEPING_API_JWT_PUBLIC_KEY_PATH must be set when authentication is enabled."
{:ok, _key} ->
:ok
end
end
end
end
| 31.12
| 148
| 0.72665
|
79ff98fd4bc3287f6a8fbbe8a88a08c1e7b97996
| 1,341
|
ex
|
Elixir
|
apps/faqcheck_web/lib/faqcheck_web/views/error_helpers.ex
|
csboling/faqcheck
|
bc182c365d466c8dcacc6b1a5fe9186a2c912cd4
|
[
"CC0-1.0"
] | null | null | null |
apps/faqcheck_web/lib/faqcheck_web/views/error_helpers.ex
|
csboling/faqcheck
|
bc182c365d466c8dcacc6b1a5fe9186a2c912cd4
|
[
"CC0-1.0"
] | 20
|
2021-09-08T04:07:31.000Z
|
2022-03-10T21:52:24.000Z
|
apps/faqcheck_web/lib/faqcheck_web/views/error_helpers.ex
|
csboling/faqcheck
|
bc182c365d466c8dcacc6b1a5fe9186a2c912cd4
|
[
"CC0-1.0"
] | null | null | null |
defmodule FaqcheckWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
require FaqcheckWeb.Gettext
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
name = input_name(form, field)
form.errors
|> Keyword.get_values(field)
|> Enum.map(fn {msg, opts} ->
data = Keyword.get(opts, :data)
error = Keyword.get(opts, :error)
stacktrace = Keyword.get(opts, :stacktrace)
~E"""
<details class="invalid-feedback" phx-error-for="<%= field %>">
<summary><%= name <> ": " <> FaqcheckWeb.Gettext.dgettext "errors", "input was not understood: %{input}", input: inspect(data) %></summary>
<pre><%= Exception.format(:error, error, stacktrace) %></pre>
</details>
"""
end)
end
def change_warning(form, field) do
case form.source do
%Ecto.Changeset{changes: changes} ->
if Ecto.get_meta(form.source.data, :state) == :loaded do
change = changes[field]
if !is_nil(change) do
~E"""
<span class="alert-warning" phx-error-for="<%= field %>">
<%= FaqcheckWeb.Gettext.gettext "'%{field}' has changed", field: Atom.to_string(field) %>
</span>
"""
else
[]
end
else
[]
end
_ ->
[]
end
end
end
| 26.294118
| 147
| 0.604773
|
79ffa578542a453c51c9c90fd1781425765efe5e
| 1,305
|
exs
|
Elixir
|
bench/encoder.exs
|
rdf-elixir/rdf-xml-ex
|
abee756be39d64381a5126fb81f5cc1b4190f55d
|
[
"MIT"
] | 1
|
2020-11-16T15:07:18.000Z
|
2020-11-16T15:07:18.000Z
|
bench/encoder.exs
|
rdf-elixir/rdf-xml-ex
|
abee756be39d64381a5126fb81f5cc1b4190f55d
|
[
"MIT"
] | null | null | null |
bench/encoder.exs
|
rdf-elixir/rdf-xml-ex
|
abee756be39d64381a5126fb81f5cc1b4190f55d
|
[
"MIT"
] | null | null | null |
alias RDF.XML.Encoder
data_dir = "bench/data/"
example_file = Path.join(data_dir, "org.rdf")
example_xml = File.read!(example_file)
example_graph = RDF.XML.Decoder.decode!(example_xml)
Benchee.run(
%{
"encode to string directly" => fn ->
{:ok, xml} = Encoder.encode(example_graph)
end,
"encode to string via string stream" => fn ->
example_graph
|> Encoder.stream(mode: :string)
|> Enum.to_list()
|> IO.iodata_to_binary()
end,
"encode to string via iodata stream" => fn ->
example_graph
|> Encoder.stream(mode: :iodata)
|> Enum.to_list()
|> IO.iodata_to_binary()
end
},
memory_time: 2
)
tmp_dir = System.tmp_dir!()
tmp_file = Path.join(tmp_dir, "rdf_xml_encode_bench.rdf")
Benchee.run(
%{
"encode to file directly" => fn ->
{:ok, xml} = Encoder.encode(example_graph)
File.write!(tmp_file, xml)
end,
"encode to file via string stream" => fn ->
example_graph
|> Encoder.stream(mode: :string)
|> Enum.into(File.stream!(tmp_file))
end,
"encode to file via iodata stream" => fn ->
example_graph
|> Encoder.stream(mode: :iodata)
|> Enum.into(File.stream!(tmp_file))
end
},
memory_time: 2
)
if File.exists?(tmp_file) do
File.rm!(tmp_file)
end
| 23.727273
| 57
| 0.622222
|
79ffabbacce41054098999e35db8c12e86007dd1
| 67,241
|
exs
|
Elixir
|
integration_test/cases/repo.exs
|
zachahn/ecto
|
8119ad877f7caa837912647a014f4a63a951dba0
|
[
"Apache-2.0"
] | null | null | null |
integration_test/cases/repo.exs
|
zachahn/ecto
|
8119ad877f7caa837912647a014f4a63a951dba0
|
[
"Apache-2.0"
] | null | null | null |
integration_test/cases/repo.exs
|
zachahn/ecto
|
8119ad877f7caa837912647a014f4a63a951dba0
|
[
"Apache-2.0"
] | null | null | null |
Code.require_file "../support/types.exs", __DIR__
defmodule Ecto.Integration.RepoTest do
use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true)
alias Ecto.Integration.TestRepo
import Ecto.Query
alias Ecto.Integration.Post
alias Ecto.Integration.User
alias Ecto.Integration.Comment
alias Ecto.Integration.Permalink
alias Ecto.Integration.Custom
alias Ecto.Integration.Barebone
alias Ecto.Integration.CompositePk
alias Ecto.Integration.PostUserCompositePk
test "returns already started for started repos" do
assert {:error, {:already_started, _}} = TestRepo.start_link
end
test "fetch empty" do
assert TestRepo.all(Post) == []
assert TestRepo.all(from p in Post) == []
end
test "fetch with in" do
TestRepo.insert!(%Post{title: "hello"})
# Works without the query cache.
assert_raise Ecto.Query.CastError, fn ->
TestRepo.all(from p in Post, where: p.title in ^nil)
end
assert [] = TestRepo.all from p in Post, where: p.title in []
assert [] = TestRepo.all from p in Post, where: p.title in ["1", "2", "3"]
assert [] = TestRepo.all from p in Post, where: p.title in ^[]
assert [_] = TestRepo.all from p in Post, where: p.title not in []
assert [_] = TestRepo.all from p in Post, where: p.title in ["1", "hello", "3"]
assert [_] = TestRepo.all from p in Post, where: p.title in ["1", ^"hello", "3"]
assert [_] = TestRepo.all from p in Post, where: p.title in ^["1", "hello", "3"]
# Still doesn't work after the query cache.
assert_raise Ecto.Query.CastError, fn ->
TestRepo.all(from p in Post, where: p.title in ^nil)
end
end
test "fetch using named from" do
TestRepo.insert!(%Post{title: "hello"})
query =
from(p in Post, as: :post)
|> where([post: p], p.title == "hello")
assert [_] = TestRepo.all query
end
test "fetch without schema" do
%Post{} = TestRepo.insert!(%Post{title: "title1"})
%Post{} = TestRepo.insert!(%Post{title: "title2"})
assert ["title1", "title2"] =
TestRepo.all(from(p in "posts", order_by: p.title, select: p.title))
assert [_] =
TestRepo.all(from(p in "posts", where: p.title == "title1", select: p.id))
end
@tag :invalid_prefix
test "fetch with invalid prefix" do
assert catch_error(TestRepo.all("posts", prefix: "oops"))
end
test "insert, update and delete" do
post = %Post{title: "insert, update, delete", text: "fetch empty"}
meta = post.__meta__
assert %Post{} = inserted = TestRepo.insert!(post)
assert %Post{} = updated = TestRepo.update!(Ecto.Changeset.change(inserted, text: "new"))
deleted_meta = put_in meta.state, :deleted
assert %Post{__meta__: ^deleted_meta} = TestRepo.delete!(updated)
loaded_meta = put_in meta.state, :loaded
assert %Post{__meta__: ^loaded_meta} = TestRepo.insert!(post)
post = TestRepo.one(Post)
assert post.__meta__.state == :loaded
assert post.inserted_at
end
test "insert, update and delete with field source" do
permalink = %Permalink{url: "url"}
assert %Permalink{url: "url"} = inserted =
TestRepo.insert!(permalink)
assert %Permalink{url: "new"} = updated =
TestRepo.update!(Ecto.Changeset.change(inserted, url: "new"))
assert %Permalink{url: "new"} =
TestRepo.delete!(updated)
end
@tag :composite_pk
test "insert, update and delete with composite pk" do
c1 = TestRepo.insert!(%CompositePk{a: 1, b: 2, name: "first"})
c2 = TestRepo.insert!(%CompositePk{a: 1, b: 3, name: "second"})
assert CompositePk |> first |> TestRepo.one == c1
assert CompositePk |> last |> TestRepo.one == c2
changeset = Ecto.Changeset.cast(c1, %{name: "first change"}, ~w(name)a)
c1 = TestRepo.update!(changeset)
assert TestRepo.get_by!(CompositePk, %{a: 1, b: 2}) == c1
TestRepo.delete!(c2)
assert TestRepo.all(CompositePk) == [c1]
assert_raise ArgumentError, ~r"to have exactly one primary key", fn ->
TestRepo.get(CompositePk, [])
end
assert_raise ArgumentError, ~r"to have exactly one primary key", fn ->
TestRepo.get!(CompositePk, [1, 2])
end
end
@tag :composite_pk
test "insert, update and delete with associated composite pk" do
user = TestRepo.insert!(%User{})
post = TestRepo.insert!(%Post{title: "post title", text: "post text"})
user_post = TestRepo.insert!(%PostUserCompositePk{user_id: user.id, post_id: post.id})
assert TestRepo.get_by!(PostUserCompositePk, [user_id: user.id, post_id: post.id]) == user_post
TestRepo.delete!(user_post)
assert TestRepo.all(PostUserCompositePk) == []
end
@tag :invalid_prefix
test "insert, update and delete with invalid prefix" do
post = TestRepo.insert!(%Post{})
changeset = Ecto.Changeset.change(post, title: "foo")
assert catch_error(TestRepo.insert(%Post{}, prefix: "oops"))
assert catch_error(TestRepo.update(changeset, prefix: "oops"))
assert catch_error(TestRepo.delete(changeset, prefix: "oops"))
end
test "insert and update with changeset" do
# On insert we merge the fields and changes
changeset = Ecto.Changeset.cast(%Post{text: "x", title: "wrong"},
%{"title" => "hello", "temp" => "unknown"}, ~w(title temp)a)
post = TestRepo.insert!(changeset)
assert %Post{text: "x", title: "hello", temp: "unknown"} = post
assert %Post{text: "x", title: "hello", temp: "temp"} = TestRepo.get!(Post, post.id)
# On update we merge only fields, direct schema changes are discarded
changeset = Ecto.Changeset.cast(%{post | text: "y"},
%{"title" => "world", "temp" => "unknown"}, ~w(title temp)a)
assert %Post{text: "y", title: "world", temp: "unknown"} = TestRepo.update!(changeset)
assert %Post{text: "x", title: "world", temp: "temp"} = TestRepo.get!(Post, post.id)
end
test "insert and update with empty changeset" do
# On insert we merge the fields and changes
changeset = Ecto.Changeset.cast(%Permalink{}, %{}, ~w())
assert %Permalink{} = permalink = TestRepo.insert!(changeset)
# Assert we can update the same value twice,
# without changes, without triggering stale errors.
changeset = Ecto.Changeset.cast(permalink, %{}, ~w())
assert TestRepo.update!(changeset) == permalink
assert TestRepo.update!(changeset) == permalink
end
@tag :no_primary_key
test "insert with no primary key" do
assert %Barebone{num: nil} = TestRepo.insert!(%Barebone{})
assert %Barebone{num: 13} = TestRepo.insert!(%Barebone{num: 13})
end
@tag :read_after_writes
test "insert and update with changeset read after writes" do
defmodule RAW do
use Ecto.Schema
schema "comments" do
field :text, :string
field :lock_version, :integer, read_after_writes: true
end
end
changeset = Ecto.Changeset.cast(struct(RAW, %{}), %{}, ~w())
# If the field is nil, we will not send it
# and read the value back from the database.
assert %{id: cid, lock_version: 1} = raw = TestRepo.insert!(changeset)
# Set the counter to 11, so we can read it soon
TestRepo.update_all from(u in RAW, where: u.id == ^cid), set: [lock_version: 11]
# We will read back on update too
changeset = Ecto.Changeset.cast(raw, %{"text" => "0"}, ~w(text)a)
assert %{id: ^cid, lock_version: 11, text: "0"} = TestRepo.update!(changeset)
end
test "insert autogenerates for custom type" do
post = TestRepo.insert!(%Post{uuid: nil})
assert byte_size(post.uuid) == 36
assert TestRepo.get_by(Post, uuid: post.uuid) == post
end
@tag :id_type
test "insert autogenerates for custom id type" do
defmodule ID do
use Ecto.Schema
@primary_key {:id, Elixir.Custom.Permalink, autogenerate: true}
schema "posts" do
end
end
id = TestRepo.insert!(struct(ID, id: nil))
assert id.id
assert TestRepo.get_by(ID, id: "#{id.id}-hello") == id
end
@tag :id_type
@tag :assigns_id_type
test "insert with user-assigned primary key" do
assert %Post{id: 1} = TestRepo.insert!(%Post{id: 1})
end
@tag :id_type
@tag :assigns_id_type
test "insert and update with user-assigned primary key in changeset" do
changeset = Ecto.Changeset.cast(%Post{id: 11}, %{"id" => "13"}, ~w(id)a)
assert %Post{id: 13} = post = TestRepo.insert!(changeset)
changeset = Ecto.Changeset.cast(post, %{"id" => "15"}, ~w(id)a)
assert %Post{id: 15} = TestRepo.update!(changeset)
end
test "insert and fetch a schema with utc timestamps" do
datetime = DateTime.from_unix!(System.system_time(:seconds), :seconds)
TestRepo.insert!(%User{inserted_at: datetime})
assert [%{inserted_at: ^datetime}] = TestRepo.all(User)
end
test "optimistic locking in update/delete operations" do
import Ecto.Changeset, only: [cast: 3, optimistic_lock: 2]
base_post = TestRepo.insert!(%Comment{})
changeset_ok =
base_post
|> cast(%{"text" => "foo.bar"}, ~w(text)a)
|> optimistic_lock(:lock_version)
TestRepo.update!(changeset_ok)
changeset_stale = optimistic_lock(base_post, :lock_version)
assert_raise Ecto.StaleEntryError, fn -> TestRepo.update!(changeset_stale) end
assert_raise Ecto.StaleEntryError, fn -> TestRepo.delete!(changeset_stale) end
end
test "optimistic locking in update operation with nil field" do
import Ecto.Changeset, only: [cast: 3, optimistic_lock: 3]
base_post =
%Comment{}
|> cast(%{lock_version: nil}, [:lock_version])
|> TestRepo.insert!()
incrementer =
fn
nil -> 1
old_value -> old_value + 1
end
changeset_ok =
base_post
|> cast(%{"text" => "foo.bar"}, ~w(text)a)
|> optimistic_lock(:lock_version, incrementer)
TestRepo.update!(changeset_ok)
end
test "optimistic locking in delete operation with nil field" do
import Ecto.Changeset, only: [cast: 3, optimistic_lock: 3]
base_post =
%Comment{}
|> cast(%{lock_version: nil}, [:lock_version])
|> TestRepo.insert!()
incrementer =
fn
nil -> 1
old_value -> old_value + 1
end
changeset_ok = optimistic_lock(base_post, :lock_version, incrementer)
TestRepo.delete!(changeset_ok)
end
@tag :unique_constraint
test "unique constraint" do
changeset = Ecto.Changeset.change(%Post{}, uuid: Ecto.UUID.generate())
{:ok, _} = TestRepo.insert(changeset)
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "posts_uuid_index (unique_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
assert exception.message =~ "call `unique_constraint/3`"
message = ~r/constraint error when attempting to insert struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.unique_constraint(:uuid, name: :posts_email_changeset)
|> TestRepo.insert()
end
assert exception.message =~ "posts_email_changeset (unique_constraint)"
{:error, changeset} =
changeset
|> Ecto.Changeset.unique_constraint(:uuid)
|> TestRepo.insert()
assert changeset.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "posts_uuid_index"]}]
assert changeset.data.__meta__.state == :built
end
@tag :unique_constraint
test "unique constraint from association" do
uuid = Ecto.UUID.generate()
post = & %Post{} |> Ecto.Changeset.change(uuid: &1) |> Ecto.Changeset.unique_constraint(:uuid)
{:error, changeset} =
TestRepo.insert %User{
comments: [%Comment{}],
permalink: %Permalink{},
posts: [post.(uuid), post.(uuid), post.(Ecto.UUID.generate)]
}
[_, p2, _] = changeset.changes.posts
assert p2.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "posts_uuid_index"]}]
end
@tag :id_type
@tag :unique_constraint
test "unique constraint with binary_id" do
changeset = Ecto.Changeset.change(%Custom{}, uuid: Ecto.UUID.generate())
{:ok, _} = TestRepo.insert(changeset)
{:error, changeset} =
changeset
|> Ecto.Changeset.unique_constraint(:uuid)
|> TestRepo.insert()
assert changeset.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "customs_uuid_index"]}]
assert changeset.data.__meta__.state == :built
end
test "unique pseudo-constraint violation error message with join table at the repository" do
post =
TestRepo.insert!(%Post{title: "some post"})
|> TestRepo.preload(:unique_users)
user =
TestRepo.insert!(%User{name: "some user"})
# Violate the unique composite index
{:error, changeset} =
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:unique_users, [user, user])
|> TestRepo.update
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
assert errors == %{unique_users: [%{}, %{id: ["has already been taken"]}]}
refute changeset.valid?
end
@tag :join
@tag :unique_constraint
test "unique constraint violation error message with join table in single changeset" do
post =
TestRepo.insert!(%Post{title: "some post"})
|> TestRepo.preload(:constraint_users)
user =
TestRepo.insert!(%User{name: "some user"})
# Violate the unique composite index
{:error, changeset} =
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:constraint_users, [user, user])
|> Ecto.Changeset.unique_constraint(:user,
name: :posts_users_composite_pk_post_id_user_id_index,
message: "has already been assigned")
|> TestRepo.update
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
assert errors == %{constraint_users: [%{}, %{user: ["has already been assigned"]}]}
refute changeset.valid?
end
@tag :join
@tag :unique_constraint
test "unique constraint violation error message with join table and separate changesets" do
post =
TestRepo.insert!(%Post{title: "some post"})
|> TestRepo.preload(:constraint_users)
user = TestRepo.insert!(%User{name: "some user"})
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:constraint_users, [user])
|> TestRepo.update
# Violate the unique composite index
{:error, changeset} =
post
|> Ecto.Changeset.change
|> Ecto.Changeset.put_assoc(:constraint_users, [user])
|> Ecto.Changeset.unique_constraint(:user,
name: :posts_users_composite_pk_post_id_user_id_index,
message: "has already been assigned")
|> TestRepo.update
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
assert errors == %{constraint_users: [%{user: ["has already been assigned"]}]}
refute changeset.valid?
end
@tag :foreign_key_constraint
test "foreign key constraint" do
changeset = Ecto.Changeset.change(%Comment{post_id: 0})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_fkey (foreign_key_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
assert exception.message =~ "call `foreign_key_constraint/3`"
message = ~r/constraint error when attempting to insert struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.foreign_key_constraint(:post_id, name: :comments_post_id_other)
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_other (foreign_key_constraint)"
{:error, changeset} =
changeset
|> Ecto.Changeset.foreign_key_constraint(:post_id)
|> TestRepo.insert()
assert changeset.errors == [post_id: {"does not exist", [constraint: :foreign, constraint_name: "comments_post_id_fkey"]}]
end
@tag :foreign_key_constraint
test "assoc constraint" do
changeset = Ecto.Changeset.change(%Comment{post_id: 0})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_fkey (foreign_key_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
message = ~r/constraint error when attempting to insert struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.assoc_constraint(:post, name: :comments_post_id_other)
|> TestRepo.insert()
end
assert exception.message =~ "comments_post_id_other (foreign_key_constraint)"
{:error, changeset} =
changeset
|> Ecto.Changeset.assoc_constraint(:post)
|> TestRepo.insert()
assert changeset.errors == [post: {"does not exist", [constraint: :assoc, constraint_name: "comments_post_id_fkey"]}]
end
@tag :foreign_key_constraint
test "no assoc constraint error" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Permalink{user_id: user.id})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to delete struct/, fn ->
TestRepo.delete!(user)
end
assert exception.message =~ "permalinks_user_id_fkey (foreign_key_constraint)"
assert exception.message =~ "The changeset has not defined any constraint."
end
@tag :foreign_key_constraint
test "no assoc constraint with changeset mismatch" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Permalink{user_id: user.id})
message = ~r/constraint error when attempting to delete struct/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
user
|> Ecto.Changeset.change
|> Ecto.Changeset.no_assoc_constraint(:permalink, name: :permalinks_user_id_pther)
|> TestRepo.delete()
end
assert exception.message =~ "permalinks_user_id_pther (foreign_key_constraint)"
end
@tag :foreign_key_constraint
test "no assoc constraint with changeset match" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Permalink{user_id: user.id})
{:error, changeset} =
user
|> Ecto.Changeset.change
|> Ecto.Changeset.no_assoc_constraint(:permalink)
|> TestRepo.delete()
assert changeset.errors == [permalink: {"is still associated with this entry", [constraint: :no_assoc, constraint_name: "permalinks_user_id_fkey"]}]
end
@tag :foreign_key_constraint
test "insert and update with failing child foreign key" do
defmodule Order do
use Ecto.Integration.Schema
import Ecto.Changeset
schema "orders" do
embeds_one :item, Ecto.Integration.Item
belongs_to :comment, Ecto.Integration.Comment
end
def changeset(order, params) do
order
|> cast(params, [:comment_id])
|> cast_embed(:item, with: &item_changeset/2)
|> cast_assoc(:comment, with: &comment_changeset/2)
end
def item_changeset(item, params) do
item
|> cast(params, [:price])
end
def comment_changeset(comment, params) do
comment
|> cast(params, [:post_id, :text])
|> cast_assoc(:post)
|> assoc_constraint(:post)
end
end
changeset = Order.changeset(struct(Order, %{}), %{item: %{price: 10}, comment: %{text: "1", post_id: 0}})
assert %Ecto.Changeset{} = changeset.changes.item
{:error, changeset} = TestRepo.insert(changeset)
assert %Ecto.Changeset{} = changeset.changes.item
order = TestRepo.insert!(Order.changeset(struct(Order, %{}), %{}))
|> TestRepo.preload([:comment])
changeset = Order.changeset(order, %{item: %{price: 10}, comment: %{text: "1", post_id: 0}})
assert %Ecto.Changeset{} = changeset.changes.item
{:error, changeset} = TestRepo.update(changeset)
assert %Ecto.Changeset{} = changeset.changes.item
end
test "unsafe_validate_unique/3" do
{:ok, inserted_post} = TestRepo.insert(%Post{title: "Greetings", text: "hi"})
new_post_changeset = Post.changeset(%Post{}, %{title: "Greetings", text: "ho"})
changeset = Ecto.Changeset.unsafe_validate_unique(new_post_changeset, [:title], TestRepo)
assert changeset.errors[:title] ==
{"has already been taken", validation: :unsafe_unique, fields: [:title]}
changeset = Ecto.Changeset.unsafe_validate_unique(new_post_changeset, [:title, :text], TestRepo)
assert changeset.errors[:title] == nil
update_changeset = Post.changeset(inserted_post, %{text: "ho"})
changeset = Ecto.Changeset.unsafe_validate_unique(update_changeset, [:title], TestRepo)
assert changeset.errors[:title] == nil # cannot conflict with itself
end
test "unsafe_validate_unique/3 with composite keys" do
{:ok, inserted_post} = TestRepo.insert(%CompositePk{a: 123, b: 456, name: "UniqueName"})
different_pk = CompositePk.changeset(%CompositePk{}, %{name: "UniqueName", a: 789, b: 321})
changeset = Ecto.Changeset.unsafe_validate_unique(different_pk, [:name], TestRepo)
assert changeset.errors[:name] ==
{"has already been taken", validation: :unsafe_unique, fields: [:name]}
partial_pk = CompositePk.changeset(%CompositePk{}, %{name: "UniqueName", a: 789, b: 456})
changeset = Ecto.Changeset.unsafe_validate_unique(partial_pk, [:name], TestRepo)
assert changeset.errors[:name] ==
{"has already been taken", validation: :unsafe_unique, fields: [:name]}
update_changeset = CompositePk.changeset(inserted_post, %{name: "NewName"})
changeset = Ecto.Changeset.unsafe_validate_unique(update_changeset, [:name], TestRepo)
assert changeset.valid?
assert changeset.errors[:name] == nil # cannot conflict with itself
end
test "get(!)" do
post1 = TestRepo.insert!(%Post{title: "1", text: "hai"})
post2 = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert post1 == TestRepo.get(Post, post1.id)
assert post2 == TestRepo.get(Post, to_string post2.id) # With casting
assert post1 == TestRepo.get!(Post, post1.id)
assert post2 == TestRepo.get!(Post, to_string post2.id) # With casting
TestRepo.delete!(post1)
assert nil == TestRepo.get(Post, post1.id)
assert_raise Ecto.NoResultsError, fn ->
TestRepo.get!(Post, post1.id)
end
end
test "get(!) with custom source" do
custom = Ecto.put_meta(%Custom{}, source: "posts")
custom = TestRepo.insert!(custom)
bid = custom.bid
assert %Custom{bid: ^bid, __meta__: %{source: "posts"}} =
TestRepo.get(from(c in {"posts", Custom}), bid)
end
test "get(!) with binary_id" do
custom = TestRepo.insert!(%Custom{})
bid = custom.bid
assert %Custom{bid: ^bid} = TestRepo.get(Custom, bid)
end
test "get_by(!)" do
post1 = TestRepo.insert!(%Post{title: "1", text: "hai"})
post2 = TestRepo.insert!(%Post{title: "2", text: "hello"})
assert post1 == TestRepo.get_by(Post, id: post1.id)
assert post1 == TestRepo.get_by(Post, text: post1.text)
assert post1 == TestRepo.get_by(Post, id: post1.id, text: post1.text)
assert post2 == TestRepo.get_by(Post, id: to_string(post2.id)) # With casting
assert nil == TestRepo.get_by(Post, text: "hey")
assert nil == TestRepo.get_by(Post, id: post2.id, text: "hey")
assert post1 == TestRepo.get_by!(Post, id: post1.id)
assert post1 == TestRepo.get_by!(Post, text: post1.text)
assert post1 == TestRepo.get_by!(Post, id: post1.id, text: post1.text)
assert post2 == TestRepo.get_by!(Post, id: to_string(post2.id)) # With casting
assert post1 == TestRepo.get_by!(Post, %{id: post1.id})
assert_raise Ecto.NoResultsError, fn ->
TestRepo.get_by!(Post, id: post2.id, text: "hey")
end
end
test "first, last and one(!)" do
post1 = TestRepo.insert!(%Post{title: "1", text: "hai"})
post2 = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert post1 == Post |> first |> TestRepo.one
assert post2 == Post |> last |> TestRepo.one
query = from p in Post, order_by: p.title
assert post1 == query |> first |> TestRepo.one
assert post2 == query |> last |> TestRepo.one
query = from p in Post, order_by: [desc: p.title], limit: 10
assert post2 == query |> first |> TestRepo.one
assert post1 == query |> last |> TestRepo.one
query = from p in Post, where: is_nil(p.id)
refute query |> first |> TestRepo.one
refute query |> first |> TestRepo.one
assert_raise Ecto.NoResultsError, fn -> query |> first |> TestRepo.one! end
assert_raise Ecto.NoResultsError, fn -> query |> last |> TestRepo.one! end
end
test "aggregate" do
assert TestRepo.aggregate(Post, :max, :visits) == nil
TestRepo.insert!(%Post{visits: 10})
TestRepo.insert!(%Post{visits: 12})
TestRepo.insert!(%Post{visits: 14})
TestRepo.insert!(%Post{visits: 14})
# Barebones
assert TestRepo.aggregate(Post, :max, :visits) == 14
assert TestRepo.aggregate(Post, :min, :visits) == 10
assert TestRepo.aggregate(Post, :count, :visits) == 4
assert "50" = to_string(TestRepo.aggregate(Post, :sum, :visits))
assert "12.5" <> _ = to_string(TestRepo.aggregate(Post, :avg, :visits))
# With order_by
query = from Post, order_by: [asc: :visits]
assert TestRepo.aggregate(query, :max, :visits) == 14
# With order_by and limit
query = from Post, order_by: [asc: :visits], limit: 2
assert TestRepo.aggregate(query, :max, :visits) == 12
# With distinct
query = from Post, order_by: [asc: :visits], distinct: true
assert TestRepo.aggregate(query, :count, :visits) == 3
end
@tag :insert_cell_wise_defaults
test "insert all" do
assert {2, nil} = TestRepo.insert_all("comments", [[text: "1"], %{text: "2", lock_version: 2}])
assert {2, nil} = TestRepo.insert_all({"comments", Comment}, [[text: "3"], %{text: "4", lock_version: 2}])
assert [%Comment{text: "1", lock_version: 1},
%Comment{text: "2", lock_version: 2},
%Comment{text: "3", lock_version: 1},
%Comment{text: "4", lock_version: 2}] = TestRepo.all(Comment)
assert {2, nil} = TestRepo.insert_all(Post, [[], []])
assert [%Post{}, %Post{}] = TestRepo.all(Post)
assert {0, nil} = TestRepo.insert_all("posts", [])
assert {0, nil} = TestRepo.insert_all({"posts", Post}, [])
end
@tag :invalid_prefix
@tag :insert_cell_wise_defaults
test "insert all with invalid prefix" do
assert catch_error(TestRepo.insert_all(Post, [[], []], prefix: "oops"))
end
@tag :returning
@tag :insert_cell_wise_defaults
test "insert all with returning with schema" do
assert {0, []} = TestRepo.insert_all(Comment, [], returning: true)
assert {0, nil} = TestRepo.insert_all(Comment, [], returning: false)
{2, [c1, c2]} = TestRepo.insert_all(Comment, [[text: "1"], [text: "2"]], returning: [:id, :text])
assert %Comment{text: "1", __meta__: %{state: :loaded}} = c1
assert %Comment{text: "2", __meta__: %{state: :loaded}} = c2
{2, [c1, c2]} = TestRepo.insert_all(Comment, [[text: "3"], [text: "4"]], returning: true)
assert %Comment{text: "3", __meta__: %{state: :loaded}} = c1
assert %Comment{text: "4", __meta__: %{state: :loaded}} = c2
end
@tag :returning
@tag :insert_cell_wise_defaults
test "insert all with returning with schema with field source" do
assert {0, []} = TestRepo.insert_all(Permalink, [], returning: true)
assert {0, nil} = TestRepo.insert_all(Permalink, [], returning: false)
{2, [c1, c2]} = TestRepo.insert_all(Permalink, [[url: "1"], [url: "2"]], returning: [:id, :url])
assert %Permalink{url: "1", __meta__: %{state: :loaded}} = c1
assert %Permalink{url: "2", __meta__: %{state: :loaded}} = c2
{2, [c1, c2]} = TestRepo.insert_all(Permalink, [[url: "3"], [url: "4"]], returning: true)
assert %Permalink{url: "3", __meta__: %{state: :loaded}} = c1
assert %Permalink{url: "4", __meta__: %{state: :loaded}} = c2
end
@tag :returning
@tag :insert_cell_wise_defaults
test "insert all with returning without schema" do
{2, [c1, c2]} = TestRepo.insert_all("comments", [[text: "1"], [text: "2"]], returning: [:id, :text])
assert %{id: _, text: "1"} = c1
assert %{id: _, text: "2"} = c2
assert_raise ArgumentError, fn ->
TestRepo.insert_all("comments", [[text: "1"], [text: "2"]], returning: true)
end
end
@tag :insert_cell_wise_defaults
test "insert all with dumping" do
uuid = Ecto.UUID.generate
assert {1, nil} = TestRepo.insert_all(Post, [%{uuid: uuid}])
assert [%Post{uuid: ^uuid, title: nil}] = TestRepo.all(Post)
end
@tag :insert_cell_wise_defaults
test "insert all autogenerates for binary_id type" do
custom = TestRepo.insert!(%Custom{bid: nil})
assert custom.bid
assert TestRepo.get(Custom, custom.bid)
assert TestRepo.delete!(custom)
refute TestRepo.get(Custom, custom.bid)
uuid = Ecto.UUID.generate
assert {2, nil} = TestRepo.insert_all(Custom, [%{uuid: uuid}, %{bid: custom.bid}])
assert [%Custom{bid: bid2, uuid: nil},
%Custom{bid: bid1, uuid: ^uuid}] = Enum.sort_by(TestRepo.all(Custom), & &1.uuid)
assert bid1 && bid2
assert custom.bid != bid1
assert custom.bid == bid2
end
test "update all" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
assert {3, nil} = TestRepo.update_all(Post, set: [title: "x"])
assert %Post{title: "x"} = TestRepo.get(Post, id1)
assert %Post{title: "x"} = TestRepo.get(Post, id2)
assert %Post{title: "x"} = TestRepo.get(Post, id3)
assert {3, nil} = TestRepo.update_all("posts", [set: [title: nil]])
assert %Post{title: nil} = TestRepo.get(Post, id1)
assert %Post{title: nil} = TestRepo.get(Post, id2)
assert %Post{title: nil} = TestRepo.get(Post, id3)
end
@tag :invalid_prefix
test "update all with invalid prefix" do
assert catch_error(TestRepo.update_all(Post, [set: [title: "x"]], prefix: "oops"))
end
@tag :returning
test "update all with returning with schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
assert {3, posts} = TestRepo.update_all(select(Post, [p], p), [set: [title: "x"]])
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert %Post{id: ^id1, title: "x"} = p1
assert %Post{id: ^id2, title: "x"} = p2
assert %Post{id: ^id3, title: "x"} = p3
assert {3, posts} = TestRepo.update_all(select(Post, [:id, :visits]), [set: [visits: 11]])
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert %Post{id: ^id1, title: nil, visits: 11} = p1
assert %Post{id: ^id2, title: nil, visits: 11} = p2
assert %Post{id: ^id3, title: nil, visits: 11} = p3
end
@tag :returning
test "update all with returning without schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
assert {3, posts} = TestRepo.update_all(select("posts", [:id, :title]), [set: [title: "x"]])
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert p1 == %{id: id1, title: "x"}
assert p2 == %{id: id2, title: "x"}
assert p3 == %{id: id3, title: "x"}
end
test "update all with filter" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
query = from(p in Post, where: p.title == "1" or p.title == "2",
update: [set: [text: ^"y"]])
assert {2, nil} = TestRepo.update_all(query, set: [title: "x"])
assert %Post{title: "x", text: "y"} = TestRepo.get(Post, id1)
assert %Post{title: "x", text: "y"} = TestRepo.get(Post, id2)
assert %Post{title: "3", text: nil} = TestRepo.get(Post, id3)
end
test "update all no entries" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
query = from(p in Post, where: p.title == "4")
assert {0, nil} = TestRepo.update_all(query, set: [title: "x"])
assert %Post{title: "1"} = TestRepo.get(Post, id1)
assert %Post{title: "2"} = TestRepo.get(Post, id2)
assert %Post{title: "3"} = TestRepo.get(Post, id3)
end
test "update all increment syntax" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", visits: 0})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", visits: 1})
# Positive
query = from p in Post, where: not is_nil(p.id), update: [inc: [visits: 2]]
assert {2, nil} = TestRepo.update_all(query, [])
assert %Post{visits: 2} = TestRepo.get(Post, id1)
assert %Post{visits: 3} = TestRepo.get(Post, id2)
# Negative
query = from p in Post, where: not is_nil(p.id), update: [inc: [visits: -1]]
assert {2, nil} = TestRepo.update_all(query, [])
assert %Post{visits: 1} = TestRepo.get(Post, id1)
assert %Post{visits: 2} = TestRepo.get(Post, id2)
end
@tag :id_type
test "update all with casting and dumping on id type field" do
assert %Post{id: id1} = TestRepo.insert!(%Post{})
assert {1, nil} = TestRepo.update_all(Post, set: [counter: to_string(id1)])
assert %Post{counter: ^id1} = TestRepo.get(Post, id1)
end
test "update all with casting and dumping" do
text = "hai"
datetime = ~N[2014-01-16 20:26:51]
assert %Post{id: id} = TestRepo.insert!(%Post{})
assert {1, nil} = TestRepo.update_all(Post, set: [text: text, inserted_at: datetime])
assert %Post{text: "hai", inserted_at: ^datetime} = TestRepo.get(Post, id)
end
test "delete all" do
assert %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "3", text: "hai"})
assert {3, nil} = TestRepo.delete_all(Post)
assert [] = TestRepo.all(Post)
end
@tag :invalid_prefix
test "delete all with invalid prefix" do
assert catch_error(TestRepo.delete_all(Post, prefix: "oops"))
end
@tag :returning
test "delete all with returning with schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"})
assert {3, posts} = TestRepo.delete_all(select(Post, [p], p))
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert %Post{id: ^id1, title: "1"} = p1
assert %Post{id: ^id2, title: "2"} = p2
assert %Post{id: ^id3, title: "3"} = p3
end
@tag :returning
test "delete all with returning without schema" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"})
assert {3, posts} = TestRepo.delete_all(select("posts", [:id, :title]))
[p1, p2, p3] = Enum.sort_by(posts, & &1.id)
assert p1 == %{id: id1, title: "1"}
assert p2 == %{id: id2, title: "2"}
assert p3 == %{id: id3, title: "3"}
end
test "delete all with filter" do
assert %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "3", text: "hai"})
query = from(p in Post, where: p.title == "1" or p.title == "2")
assert {2, nil} = TestRepo.delete_all(query)
assert [%Post{}] = TestRepo.all(Post)
end
test "delete all no entries" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"})
query = from(p in Post, where: p.title == "4")
assert {0, nil} = TestRepo.delete_all(query)
assert %Post{title: "1"} = TestRepo.get(Post, id1)
assert %Post{title: "2"} = TestRepo.get(Post, id2)
assert %Post{title: "3"} = TestRepo.get(Post, id3)
end
test "virtual field" do
assert %Post{id: id} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert TestRepo.get(Post, id).temp == "temp"
end
## Query syntax
defmodule Foo do
defstruct [:title]
end
describe "query select" do
test "expressions" do
%Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert [{"1", "hai"}] ==
TestRepo.all(from p in Post, select: {p.title, p.text})
assert [["1", "hai"]] ==
TestRepo.all(from p in Post, select: [p.title, p.text])
assert [%{:title => "1", 3 => "hai", "text" => "hai"}] ==
TestRepo.all(from p in Post, select: %{
:title => p.title,
"text" => p.text,
3 => p.text
})
assert [%{:title => "1", "1" => "hai", "text" => "hai"}] ==
TestRepo.all(from p in Post, select: %{
:title => p.title,
p.title => p.text,
"text" => p.text
})
assert [%Foo{title: "1"}] ==
TestRepo.all(from p in Post, select: %Foo{title: p.title})
end
test "map update" do
%Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert [%Post{:title => "new title", text: "hai"}] =
TestRepo.all(from p in Post, select: %{p | title: "new title"})
assert [%Post{title: "new title", text: "hai"}] =
TestRepo.all(from p in Post, select: %Post{p | title: "new title"})
assert_raise KeyError, fn ->
TestRepo.all(from p in Post, select: %{p | unknown: "new title"})
end
assert_raise BadMapError, fn ->
TestRepo.all(from p in Post, select: %{p.title | title: "new title"})
end
assert_raise BadStructError, fn ->
TestRepo.all(from p in Post, select: %Foo{p | title: p.title})
end
end
test "take with structs" do
%{id: pid1} = TestRepo.insert!(%Post{title: "1"})
%{id: pid2} = TestRepo.insert!(%Post{title: "2"})
%{id: pid3} = TestRepo.insert!(%Post{title: "3"})
[p1, p2, p3] = Post |> select([p], struct(p, [:title])) |> order_by([:title]) |> TestRepo.all
refute p1.id
assert p1.title == "1"
assert match?(%Post{}, p1)
refute p2.id
assert p2.title == "2"
assert match?(%Post{}, p2)
refute p3.id
assert p3.title == "3"
assert match?(%Post{}, p3)
[p1, p2, p3] = Post |> select([:id]) |> order_by([:id]) |> TestRepo.all
assert %Post{id: ^pid1} = p1
assert %Post{id: ^pid2} = p2
assert %Post{id: ^pid3} = p3
end
test "take with maps" do
%{id: pid1} = TestRepo.insert!(%Post{title: "1"})
%{id: pid2} = TestRepo.insert!(%Post{title: "2"})
%{id: pid3} = TestRepo.insert!(%Post{title: "3"})
[p1, p2, p3] = "posts" |> select([p], map(p, [:title])) |> order_by([:title]) |> TestRepo.all
assert p1 == %{title: "1"}
assert p2 == %{title: "2"}
assert p3 == %{title: "3"}
[p1, p2, p3] = "posts" |> select([:id]) |> order_by([:id]) |> TestRepo.all
assert p1 == %{id: pid1}
assert p2 == %{id: pid2}
assert p3 == %{id: pid3}
end
test "take with preload assocs" do
%{id: pid} = TestRepo.insert!(%Post{title: "post"})
TestRepo.insert!(%Comment{post_id: pid, text: "comment"})
fields = [:id, :title, comments: [:text, :post_id]]
[p] = Post |> preload(:comments) |> select([p], ^fields) |> TestRepo.all
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
[p] = Post |> preload(:comments) |> select([p], struct(p, ^fields)) |> TestRepo.all
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
[p] = Post |> preload(:comments) |> select([p], map(p, ^fields)) |> TestRepo.all
assert p == %{id: pid, title: "post", comments: [%{text: "comment", post_id: pid}]}
end
test "take with nil preload assoc" do
%{id: cid} = TestRepo.insert!(%Comment{text: "comment"})
fields = [:id, :text, post: [:title]]
[c] = Comment |> preload(:post) |> select([c], ^fields) |> TestRepo.all
assert %Comment{id: ^cid, text: "comment", post: nil} = c
[c] = Comment |> preload(:post) |> select([c], struct(c, ^fields)) |> TestRepo.all
assert %Comment{id: ^cid, text: "comment", post: nil} = c
[c] = Comment |> preload(:post) |> select([c], map(c, ^fields)) |> TestRepo.all
assert c == %{id: cid, text: "comment", post: nil}
end
test "take with join assocs" do
%{id: pid} = TestRepo.insert!(%Post{title: "post"})
%{id: cid} = TestRepo.insert!(%Comment{post_id: pid, text: "comment"})
fields = [:id, :title, comments: [:text, :post_id, :id]]
query = from p in Post, where: p.id == ^pid, join: c in assoc(p, :comments), preload: [comments: c]
p = TestRepo.one(from q in query, select: ^fields)
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
p = TestRepo.one(from q in query, select: struct(q, ^fields))
assert %Post{title: "post"} = p
assert [%Comment{text: "comment"}] = p.comments
p = TestRepo.one(from q in query, select: map(q, ^fields))
assert p == %{id: pid, title: "post", comments: [%{text: "comment", post_id: pid, id: cid}]}
end
test "take with single nil column" do
%Post{} = TestRepo.insert!(%Post{title: "1", counter: nil})
assert %{counter: nil} =
TestRepo.one(from p in Post, where: p.title == "1", select: [:counter])
end
test "take with join assocs and single nil column" do
%{id: post_id} = TestRepo.insert!(%Post{title: "1"}, counter: nil)
TestRepo.insert!(%Comment{post_id: post_id, text: "comment"})
assert %{counter: nil} ==
TestRepo.one(from p in Post, join: c in assoc(p, :comments), where: p.title == "1", select: map(p, [:counter]))
end
test "field source" do
TestRepo.insert!(%Permalink{url: "url"})
assert ["url"] = Permalink |> select([p], p.url) |> TestRepo.all()
assert [1] = Permalink |> select([p], count(p.url)) |> TestRepo.all()
end
test "merge" do
%Post{} = TestRepo.insert!(%Post{title: "1", counter: nil})
# Merge on source
assert [%Post{title: "2"}] =
Post |> select([p], merge(p, %{title: "2"})) |> TestRepo.all()
assert [%Post{title: "2"}] =
Post |> select([p], p) |> select_merge([p], %{title: "2"}) |> TestRepo.all()
# Merge on struct
assert [%Post{title: "2"}] =
Post |> select([p], merge(%Post{title: p.title}, %{title: "2"})) |> TestRepo.all()
assert [%Post{title: "2"}] =
Post |> select([p], %Post{title: p.title}) |> select_merge([p], %{title: "2"}) |> TestRepo.all()
# Merge on map
assert [%{title: "2"}] =
Post |> select([p], merge(%{title: p.title}, %{title: "2"})) |> TestRepo.all()
assert [%{title: "2"}] =
Post |> select([p], %{title: p.title}) |> select_merge([p], %{title: "2"}) |> TestRepo.all()
end
test "merge with update on self" do
%Post{} = TestRepo.insert!(%Post{title: "1", counter: 1})
assert [%Post{title: "1", counter: 2}] =
Post |> select([p], merge(p, %{p | counter: 2})) |> TestRepo.all()
assert [%Post{title: "1", counter: 2}] =
Post |> select([p], p) |> select_merge([p], %{p | counter: 2}) |> TestRepo.all()
end
end
test "query count distinct" do
TestRepo.insert!(%Post{title: "1"})
TestRepo.insert!(%Post{title: "1"})
TestRepo.insert!(%Post{title: "2"})
assert [3] == Post |> select([p], count(p.title)) |> TestRepo.all
assert [2] == Post |> select([p], count(p.title, :distinct)) |> TestRepo.all
end
test "query where interpolation" do
post1 = TestRepo.insert!(%Post{text: "x", title: "hello"})
post2 = TestRepo.insert!(%Post{text: "y", title: "goodbye"})
assert [post1, post2] == Post |> where([], []) |> TestRepo.all |> Enum.sort_by(& &1.id)
assert [post1] == Post |> where([], [title: "hello"]) |> TestRepo.all
assert [post1] == Post |> where([], [title: "hello", id: ^post1.id]) |> TestRepo.all
params0 = []
params1 = [title: "hello"]
params2 = [title: "hello", id: post1.id]
assert [post1, post2] == (from Post, where: ^params0) |> TestRepo.all |> Enum.sort_by(& &1.id)
assert [post1] == (from Post, where: ^params1) |> TestRepo.all
assert [post1] == (from Post, where: ^params2) |> TestRepo.all
post3 = TestRepo.insert!(%Post{text: "y", title: "goodbye", uuid: nil})
params3 = [title: "goodbye", uuid: post3.uuid]
assert [post3] == (from Post, where: ^params3) |> TestRepo.all
end
## Logging
test "log entry logged on query" do
log = fn entry ->
assert %Ecto.LogEntry{result: {:ok, _}} = entry
assert is_integer(entry.query_time) and entry.query_time >= 0
assert is_integer(entry.decode_time) and entry.query_time >= 0
assert is_integer(entry.queue_time) and entry.queue_time >= 0
send(self(), :logged)
end
Process.put(:on_log, log)
_ = TestRepo.all(Post)
assert_received :logged
end
test "log entry not logged when log is false" do
Process.put(:on_log, fn _ -> flunk("logged") end)
TestRepo.insert!(%Post{title: "1"}, [log: false])
end
describe "upsert via insert" do
@describetag :upsert
test "on conflict raise" do
{:ok, inserted} = TestRepo.insert(%Post{title: "first"}, on_conflict: :raise)
assert catch_error(TestRepo.insert(%Post{id: inserted.id, title: "second"}, on_conflict: :raise))
end
test "on conflict ignore" do
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing)
assert inserted.id
assert inserted.__meta__.state == :loaded
{:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing)
assert not_inserted.id == nil
assert not_inserted.__meta__.state == :loaded
end
@tag :with_conflict_target
test "on conflict and associations" do
on_conflict = [set: [title: "second"]]
post = %Post{uuid: Ecto.UUID.generate(),
title: "first", comments: [%Comment{}]}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert inserted.id
end
@tag :with_conflict_target
test "on conflict with inc" do
uuid = "6fa459ea-ee8a-3ca4-894e-db77e160355e"
post = %Post{title: "first", uuid: uuid}
{:ok, _} = TestRepo.insert(post)
post = %{title: "upsert", uuid: uuid}
TestRepo.insert_all(Post, [post], on_conflict: [inc: [visits: 1]], conflict_target: :uuid)
end
@tag :with_conflict_target
test "on conflict ignore and conflict target" do
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid])
assert inserted.id
# Error on non-conflict target
assert catch_error(TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:id]))
# Error on conflict target
{:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid])
assert not_inserted.id == nil
end
@tag :without_conflict_target
test "on conflict keyword list" do
on_conflict = [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict)
assert inserted.id
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict)
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :with_conflict_target
test "on conflict keyword list and conflict target" do
on_conflict = [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert inserted.id
# Error on non-conflict target
assert catch_error(TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:id]))
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :returning
@tag :with_conflict_target
test "on conflict keyword list and conflict target and returning" do
{:ok, c1} = TestRepo.insert(%Post{})
{:ok, c2} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: [:id, :uuid])
{:ok, c3} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: true)
{:ok, c4} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: false)
assert c2.uuid == c1.uuid
assert c3.uuid == c1.uuid
assert c4.uuid != c1.uuid
end
@tag :with_conflict_target
@tag :with_conflict_target_on_constraint
test "on conflict keyword list and conflict target on constraint" do
on_conflict = [set: [title: "new"]]
post = %Post{title: "old"}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: {:constraint, :posts_pkey})
assert inserted.id
{:ok, updated} = TestRepo.insert(%{post | id: inserted.id}, on_conflict: on_conflict, conflict_target: {:constraint, :posts_pkey})
assert updated.id == inserted.id
assert updated.title != "new"
assert TestRepo.get!(Post, inserted.id).title == "new"
end
@tag :returning
@tag :with_conflict_target
test "on conflict keyword list and conflict target and returning and field source" do
TestRepo.insert!(%Permalink{url: "old"})
{:ok, c1} = TestRepo.insert(%Permalink{url: "old"},
on_conflict: [set: [url: "new1"]],
conflict_target: [:url],
returning: [:url])
TestRepo.insert!(%Permalink{url: "old"})
{:ok, c2} = TestRepo.insert(%Permalink{url: "old"},
on_conflict: [set: [url: "new2"]],
conflict_target: [:url],
returning: true)
assert c1.url == "new1"
assert c2.url == "new2"
end
@tag :returning
@tag :with_conflict_target
test "on conflict ignore and returning" do
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid])
assert inserted.id
{:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid], returning: true)
assert not_inserted.id == nil
end
@tag :without_conflict_target
test "on conflict query" do
on_conflict = from Post, update: [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict)
assert inserted.id
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict)
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :with_conflict_target
test "on conflict query and conflict target" do
on_conflict = from Post, update: [set: [title: "second"]]
post = %Post{title: "first", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert inserted.id
# Error on non-conflict target
assert catch_error(TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:id]))
{:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid])
assert updated.id == inserted.id
assert updated.title != "second"
assert TestRepo.get!(Post, inserted.id).title == "second"
end
@tag :without_conflict_target
test "on conflict replace_all" do
post = %Post{title: "first", text: "text", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :replace_all)
assert inserted.id
post = %Post{title: "updated", text: "updated", uuid: post.uuid}
post = TestRepo.insert!(post, on_conflict: :replace_all)
assert post.id == inserted.id
assert post.title == "updated"
assert post.text == "updated"
assert TestRepo.all(from p in Post, select: {p.id, p.title, p.text}) ==
[{inserted.id, "updated", "updated"}]
assert TestRepo.all(from p in Post, select: count(p.id)) == [1]
end
@tag :with_conflict_target
test "on conflict replace_all and conflict target" do
post = %Post{title: "first", text: "text", uuid: Ecto.UUID.generate()}
{:ok, inserted} = TestRepo.insert(post, on_conflict: :replace_all, conflict_target: :uuid)
assert inserted.id
post = %Post{title: "updated", text: "updated", uuid: post.uuid}
post = TestRepo.insert!(post, on_conflict: :replace_all, conflict_target: :uuid)
assert post.id == inserted.id
assert post.title == "updated"
assert post.text == "updated"
assert TestRepo.all(from p in Post, select: {p.id, p.title, p.text}) ==
[{inserted.id, "updated", "updated"}]
assert TestRepo.all(from p in Post, select: count(p.id)) == [1]
end
end
describe "upsert via insert_all" do
@describetag :upsert_all
test "on conflict raise" do
post = [title: "first", uuid: Ecto.UUID.generate()]
{1, nil} = TestRepo.insert_all(Post, [post], on_conflict: :raise)
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: :raise))
end
test "on conflict ignore" do
post = [title: "first", uuid: Ecto.UUID.generate()]
assert TestRepo.insert_all(Post, [post], on_conflict: :nothing) == {1, nil}
# PG returns 0, MySQL returns 1
{entries, nil} = TestRepo.insert_all(Post, [post], on_conflict: :nothing)
assert entries == 0 or entries == 1
assert length(TestRepo.all(Post)) == 1
end
@tag :with_conflict_target
test "on conflict ignore and conflict target" do
post = [title: "first", uuid: Ecto.UUID.generate()]
assert TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:uuid]) ==
{1, nil}
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:uuid]) ==
{0, nil}
end
@tag :with_conflict_target
test "on conflict keyword list and conflict target" do
on_conflict = [set: [title: "second"]]
post = [title: "first", uuid: Ecto.UUID.generate()]
{1, nil} = TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid])
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
assert TestRepo.all(from p in Post, select: p.title) == ["second"]
end
@tag :with_conflict_target
@tag :returning
test "on conflict keyword list and conflict target and returning and source field" do
on_conflict = [set: [url: "new"]]
permalink = [url: "old"]
assert {1, [%Permalink{url: "old"}]} =
TestRepo.insert_all(Permalink, [permalink],
on_conflict: on_conflict, conflict_target: [:url], returning: [:url])
assert {1, [%Permalink{url: "new"}]} =
TestRepo.insert_all(Permalink, [permalink],
on_conflict: on_conflict, conflict_target: [:url], returning: [:url])
end
@tag :with_conflict_target
test "on conflict query and conflict target" do
on_conflict = from Post, update: [set: [title: "second"]]
post = [title: "first", uuid: Ecto.UUID.generate()]
assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
assert TestRepo.all(from p in Post, select: p.title) == ["second"]
end
@tag :returning
@tag :with_conflict_target
test "on conflict query and conflict target and returning" do
on_conflict = from Post, update: [set: [title: "second"]]
post = [title: "first", uuid: Ecto.UUID.generate()]
{1, [%{id: id}]} = TestRepo.insert_all(Post, [post], on_conflict: on_conflict,
conflict_target: [:uuid], returning: [:id])
# Error on non-conflict target
assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict,
conflict_target: [:id], returning: [:id]))
# Error on conflict target
{1, [%Post{id: ^id, title: "second"}]} =
TestRepo.insert_all(Post, [post], on_conflict: on_conflict,
conflict_target: [:uuid], returning: [:id, :title])
end
@tag :with_conflict_target
test "source (without an ecto schema) on conflict query and conflict target" do
on_conflict = [set: [title: "second"]]
{:ok, uuid} = Ecto.UUID.dump(Ecto.UUID.generate())
post = [title: "first", uuid: uuid]
assert TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
# Error on non-conflict target
assert catch_error(TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:id]))
# Error on conflict target
assert TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:uuid]) ==
{1, nil}
assert TestRepo.all(from p in Post, select: p.title) == ["second"]
end
@tag :without_conflict_target
test "on conflict replace_all" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are also replaced
changes = [%{id: post_first.id + 2, title: "first_updated",
text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: :replace_all)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id + 2)
assert updated_first.title == "first_updated"
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id + 2)
assert updated_second.title == "second_updated"
assert updated_second.text == "second_updated"
end
@tag :with_conflict_target
test "on conflict replace_all and conflict_target" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate()}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate()}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all, conflict_target: :uuid)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all, conflict_target: :uuid)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are also replaced
changes = [%{id: post_second.id + 1, title: "first_updated",
text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: :replace_all, conflict_target: :uuid)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_second.id + 1)
assert updated_first.title == "first_updated"
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id + 2)
assert updated_second.title == "second_updated"
assert updated_second.text == "second_updated"
end
@tag :without_conflict_target
test "on conflict replace_all_except_primary_key" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all_except_primary_key)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all_except_primary_key)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are not replaced
changes = [%{id: post_first.id + 2, title: "first_updated",
text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: :replace_all_except_primary_key)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id)
assert updated_first.title == "first_updated"
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id)
assert updated_second.title == "second_updated"
assert updated_second.text == "second_updated"
end
@tag :with_conflict_target
test "on conflict replace_all_except_primary_key and conflict_target" do
post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate()}
post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate()}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all_except_primary_key, conflict_target: :uuid)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all_except_primary_key, conflict_target: :uuid)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note IDS are not replaced
changes = [%{id: post_first.id + 2, title: "first_updated",
text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id + 2, title: "second_updated",
text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: :replace_all_except_primary_key, conflict_target: :uuid)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id)
assert updated_first.title == "first_updated"
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id)
assert updated_second.title == "second_updated"
assert updated_second.text == "second_updated"
end
@tag :with_conflict_target
test "on conflict replace and conflict_target" do
post_first = %Post{title: "first", visits: 10, public: true, uuid: Ecto.UUID.generate}
post_second = %Post{title: "second", visits: 20, public: false, uuid: Ecto.UUID.generate}
{:ok, post_first} = TestRepo.insert(post_first, on_conflict: {:replace, [:title, :visits]}, conflict_target: :uuid)
{:ok, post_second} = TestRepo.insert(post_second, on_conflict: {:replace, [:title, :visits]}, conflict_target: :uuid)
assert post_first.id
assert post_second.id
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
# Multiple record change value: note `public` field is not changed
changes = [%{id: post_first.id, title: "first_updated", visits: 11, public: false, text: "first_updated", uuid: post_first.uuid},
%{id: post_second.id, title: "second_updated", visits: 21, public: true, text: "second_updated", uuid: post_second.uuid}]
TestRepo.insert_all(Post, changes, on_conflict: {:replace, [:title, :visits, :text]}, conflict_target: :uuid)
assert TestRepo.all(from p in Post, select: count(p.id)) == [2]
updated_first = TestRepo.get(Post, post_first.id)
assert updated_first.title == "first_updated"
assert updated_first.visits == 11
assert updated_first.public == true
assert updated_first.text == "first_updated"
updated_second = TestRepo.get(Post, post_second.id)
assert updated_second.title == "second_updated"
assert updated_second.visits == 21
assert updated_second.public == false
assert updated_second.text == "second_updated"
end
end
end
| 38.912616
| 152
| 0.633587
|
79ffc85502db343c15929bcf58c59b235ac6d73f
| 284
|
exs
|
Elixir
|
verify/mix.exs
|
elixytics/ua_inspector
|
11fd98f69b7853b70529ee73355ef57851248572
|
[
"Apache-2.0"
] | 57
|
2015-04-07T03:10:45.000Z
|
2019-03-11T01:01:40.000Z
|
verify/mix.exs
|
elixytics/ua_inspector
|
11fd98f69b7853b70529ee73355ef57851248572
|
[
"Apache-2.0"
] | 16
|
2015-03-09T19:56:17.000Z
|
2019-03-16T14:24:02.000Z
|
verify/mix.exs
|
elixytics/ua_inspector
|
11fd98f69b7853b70529ee73355ef57851248572
|
[
"Apache-2.0"
] | 15
|
2015-02-02T23:14:00.000Z
|
2019-03-16T13:15:05.000Z
|
defmodule UAInspector.Verify.MixProject do
use Mix.Project
def project do
[
app: :ua_inspector_verify,
version: "0.0.1",
elixir: "~> 1.9",
deps: [{:ua_inspector, path: "../"}],
deps_path: "../deps",
lockfile: "../mix.lock"
]
end
end
| 18.933333
| 43
| 0.552817
|
79ffd7801361e1832470b1abb2719c1955cda6aa
| 1,652
|
ex
|
Elixir
|
apps/frontend/lib/frontend_web/live/live_helpers.ex
|
ricardoebbers/exquisitle
|
69367f56aae110e4efbd1960bc49b8d049a7de89
|
[
"MIT"
] | null | null | null |
apps/frontend/lib/frontend_web/live/live_helpers.ex
|
ricardoebbers/exquisitle
|
69367f56aae110e4efbd1960bc49b8d049a7de89
|
[
"MIT"
] | null | null | null |
apps/frontend/lib/frontend_web/live/live_helpers.ex
|
ricardoebbers/exquisitle
|
69367f56aae110e4efbd1960bc49b8d049a7de89
|
[
"MIT"
] | null | null | null |
defmodule FrontendWeb.LiveHelpers do
import Phoenix.LiveView
import Phoenix.LiveView.Helpers
alias Phoenix.LiveView.JS
@doc """
Renders a live component inside a modal.
The rendered modal receives a `:return_to` option to properly update
the URL when the modal is closed.
## Examples
<.modal return_to={Routes.game_index_path(@socket, :index)}>
<.live_component
module={FrontendWeb.GameLive.FormComponent}
id={@game.id || :new}
title={@page_title}
action={@live_action}
return_to={Routes.game_index_path(@socket, :index)}
game: @game
/>
</.modal>
"""
def modal(assigns) do
assigns = assign_new(assigns, :return_to, fn -> nil end)
~H"""
<div id="modal" class="phx-modal fade-in" phx-remove={hide_modal()}>
<div
id="modal-content"
class="phx-modal-content fade-in-scale"
phx-click-away={JS.dispatch("click", to: "#close")}
phx-window-keydown={JS.dispatch("click", to: "#close")}
phx-key="escape"
>
<%= if @return_to do %>
<%= live_patch "✖",
to: @return_to,
id: "close",
class: "phx-modal-close",
phx_click: hide_modal()
%>
<% else %>
<a id="close" href="#" class="phx-modal-close" phx-click={hide_modal()}>✖</a>
<% end %>
<%= render_slot(@inner_block) %>
</div>
</div>
"""
end
defp hide_modal(js \\ %JS{}) do
js
|> JS.hide(to: "#modal", transition: "fade-out")
|> JS.hide(to: "#modal-content", transition: "fade-out-scale")
end
end
| 27.081967
| 86
| 0.562349
|
79ffd941f51bf803e6f719471bd405a3498759b0
| 259
|
exs
|
Elixir
|
config/test.exs
|
zampino/livex
|
74bcc6557ca8e5185b7e4dac85323b1c5174e52d
|
[
"MIT"
] | 6
|
2016-03-31T21:23:21.000Z
|
2020-09-15T01:36:17.000Z
|
config/test.exs
|
zampino/livex
|
74bcc6557ca8e5185b7e4dac85323b1c5174e52d
|
[
"MIT"
] | null | null | null |
config/test.exs
|
zampino/livex
|
74bcc6557ca8e5185b7e4dac85323b1c5174e52d
|
[
"MIT"
] | null | null | null |
use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :livex, Livex.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 23.545455
| 56
| 0.72973
|
79ffea9d69d858548968c3e3ebf26ffaf86a819c
| 3,011
|
ex
|
Elixir
|
lib/odd_job/scheduler.ex
|
msimonborg/odd_job
|
99c662b21e7a6ed34ebae6bdd79cb6ad997e9724
|
[
"MIT"
] | 3
|
2022-02-01T13:49:51.000Z
|
2022-02-04T05:56:34.000Z
|
lib/odd_job/scheduler.ex
|
msimonborg/odd_job
|
99c662b21e7a6ed34ebae6bdd79cb6ad997e9724
|
[
"MIT"
] | null | null | null |
lib/odd_job/scheduler.ex
|
msimonborg/odd_job
|
99c662b21e7a6ed34ebae6bdd79cb6ad997e9724
|
[
"MIT"
] | null | null | null |
defmodule OddJob.Scheduler do
@moduledoc """
The `OddJob.Scheduler` is responsible for execution of scheduled jobs.
Each scheduler is a dynamically supervised process that is created to manage a single timer
and a job or collection of jobs to send to a pool when the timer expires. After the jobs are
delivered to the pool the scheduler shuts itself down. The scheduler process will also
automatically shutdown if a timer is cancelled with `OddJob.cancel_timer/1`. If a timer is
cancelled with `Process.cancel_timer/1` then the scheduler will eventually timeout and shutdown
one second after the timer would have expired.
"""
@moduledoc since: "0.2.0"
@doc false
use GenServer, restart: :temporary
import OddJob.Guards
alias OddJob.Scheduler.Supervisor, as: SchedulerSup
@name __MODULE__
@registry OddJob.Registry
@typedoc false
@type timer :: non_neg_integer
@type pool :: atom
# <---- Client API ---->
@doc false
@spec perform(timer, pool, function) :: reference
def perform(timer, pool, function) when is_timer(timer) do
pool
|> SchedulerSup.start_child()
|> GenServer.call({:schedule_perform, timer, {pool, function}})
end
@spec perform_many(timer, pool, list | map, function) :: reference
def perform_many(timer, pool, collection, function) do
pool
|> SchedulerSup.start_child()
|> GenServer.call({:schedule_perform, timer, {pool, collection, function}})
end
@doc false
@spec cancel_timer(reference) :: non_neg_integer | false
def cancel_timer(timer_ref) when is_reference(timer_ref) do
result = Process.cancel_timer(timer_ref)
case lookup(timer_ref) do
[{pid, :timer}] -> GenServer.cast(pid, :abort)
[] -> :noop
end
result
end
defp lookup(timer_ref), do: Registry.lookup(@registry, timer_ref)
@doc false
@spec start_link([]) :: :ignore | {:error, any} | {:ok, pid}
def start_link([]) do
GenServer.start_link(@name, [])
end
# <---- Callbacks ---->
@impl GenServer
@spec init([]) :: {:ok, []}
def init([]) do
{:ok, []}
end
@impl GenServer
def handle_call({:schedule_perform, timer, dispatch}, _, state) do
timer_ref =
timer
|> set_timer(dispatch)
|> register()
timeout = Process.read_timer(timer_ref) + 1
{:reply, timer_ref, state, timeout}
end
defp set_timer(timer, dispatch) do
Process.send_after(self(), {:perform, dispatch}, timer)
end
defp register(timer_ref) do
Registry.register(@registry, timer_ref, :timer)
timer_ref
end
@impl GenServer
def handle_cast(:abort, state) do
{:stop, :normal, state}
end
@impl GenServer
def handle_info({:perform, {pool, fun}}, state) do
OddJob.perform(pool, fun)
{:stop, :normal, state}
end
def handle_info({:perform, {pool, collection, function}}, state) do
OddJob.perform_many(pool, collection, function)
{:stop, :normal, state}
end
def handle_info(:timeout, state) do
{:stop, :normal, state}
end
end
| 26.412281
| 97
| 0.682498
|
79fff9bdf43ba67fad73b5bb73ef8682d2eb7ad6
| 1,241
|
ex
|
Elixir
|
clients/gke_hub/lib/google_api/gke_hub/v1/model/migrate_state.ex
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
clients/gke_hub/lib/google_api/gke_hub/v1/model/migrate_state.ex
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
clients/gke_hub/lib/google_api/gke_hub/v1/model/migrate_state.ex
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.GKEHub.V1.Model.MigrateState do
@moduledoc """
MigrateState contains the state of Migrate subfeature
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.GKEHub.V1.Model.MigrateState do
def decode(value, options) do
GoogleApi.GKEHub.V1.Model.MigrateState.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.GKEHub.V1.Model.MigrateState do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 29.547619
| 74
| 0.758259
|
0300272e182f7121c4fd3eababc25c0ba6e3bde2
| 1,750
|
ex
|
Elixir
|
clients/content/lib/google_api/content/v2/model/pos_store.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | 1
|
2018-12-03T23:43:10.000Z
|
2018-12-03T23:43:10.000Z
|
clients/content/lib/google_api/content/v2/model/pos_store.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v2/model/pos_store.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Model.PosStore do
@moduledoc """
Store resource.
## Attributes
* `kind` (*type:* `String.t`, *default:* `content#posStore`) - Identifies what kind of resource this is. Value: the fixed string "content#posStore".
* `storeAddress` (*type:* `String.t`, *default:* `nil`) - The street address of the store.
* `storeCode` (*type:* `String.t`, *default:* `nil`) - A store identifier that is unique for the given merchant.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => String.t(),
:storeAddress => String.t(),
:storeCode => String.t()
}
field(:kind)
field(:storeAddress)
field(:storeCode)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.PosStore do
def decode(value, options) do
GoogleApi.Content.V2.Model.PosStore.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.PosStore do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.018868
| 152
| 0.708
|
03002c804d2e9039f50104fce662ca1936f8a559
| 1,236
|
exs
|
Elixir
|
nevernote/config/config.exs
|
dreamingechoes/create-an-mvp-with-phoenix-framework
|
2a75582fdece9c8f0931f6322e27db5467ae96a4
|
[
"MIT"
] | 4
|
2019-01-07T09:15:23.000Z
|
2021-06-18T02:56:14.000Z
|
nevernote/config/config.exs
|
dreamingechoes/create-an-mvp-with-phoenix-framework
|
2a75582fdece9c8f0931f6322e27db5467ae96a4
|
[
"MIT"
] | null | null | null |
nevernote/config/config.exs
|
dreamingechoes/create-an-mvp-with-phoenix-framework
|
2a75582fdece9c8f0931f6322e27db5467ae96a4
|
[
"MIT"
] | null | null | null |
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :nevernote,
ecto_repos: [Nevernote.Repo]
# Configures the endpoint
config :nevernote, NevernoteWeb.Endpoint,
url: [host: "localhost"],
secret_key_base:
"qpP0MvDXiEBLeTVByuXKrGkVU8jnUe2WCvN/+ze0aP2Y8JHXKalmBqeUf3s4Y9IG",
render_errors: [view: NevernoteWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: Nevernote.PubSub, adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:user_id]
# Guardian configuration
config :nevernote, Nevernote.Guardian,
issuer: "nevernote",
secret_key: "eP/Fjhc5Ns4WsmYqBqwvC51oA0i/aXYeobBLn8V7Rrtyddfct48rimYbVQj28MAX"
config :nevernote, NevernoteWeb.AuthPipeline,
module: NevernoteWeb.Guardian,
error_handler: NevernoteWeb.AuthErrorHandler
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 33.405405
| 80
| 0.783981
|
030031b82e7d8c204f893d08e37c16f4aa1e1cb6
| 611
|
ex
|
Elixir
|
lib/notoriety/note_file.ex
|
aoswalt/notoriety
|
e8599a7831be0b647511f59881c536b6c5d97147
|
[
"MIT"
] | null | null | null |
lib/notoriety/note_file.ex
|
aoswalt/notoriety
|
e8599a7831be0b647511f59881c536b6c5d97147
|
[
"MIT"
] | 5
|
2020-02-01T16:37:57.000Z
|
2021-05-08T16:44:10.000Z
|
lib/notoriety/note_file.ex
|
aoswalt/notoriety
|
e8599a7831be0b647511f59881c536b6c5d97147
|
[
"MIT"
] | null | null | null |
defmodule Notoriety.NoteFile do
@moduledoc """
A file name and its associated `Notoriety.Note`
"""
alias Notoriety.Note
@enforce_keys [:file_name, :note]
defstruct [:file_name, :note]
@doc """
Construct a `NoteFile` from the given file name and `Notoriety.Note`.
"""
def new(file_name, %Note{} = note) do
%__MODULE__{file_name: file_name, note: note}
end
@doc """
Return the `NoteFile`'s file name.
"""
def file_name(%__MODULE__{} = file), do: file.file_name
@doc """
Return the `NoteFile`'s `Notoriety.Note`.
"""
def note(%__MODULE__{} = file), do: file.note
end
| 21.821429
| 71
| 0.653028
|
0300367cd28236fef9bf0224c0bb5c82377e7fad
| 210
|
ex
|
Elixir
|
test/support/docs.ex
|
hasclass/tesla
|
c1ee0e3417ea269124869ca1ff33fd2364391154
|
[
"MIT"
] | 1
|
2020-12-21T03:45:23.000Z
|
2020-12-21T03:45:23.000Z
|
test/support/docs.ex
|
hasclass/tesla
|
c1ee0e3417ea269124869ca1ff33fd2364391154
|
[
"MIT"
] | 1
|
2019-10-25T19:27:33.000Z
|
2019-10-25T19:27:33.000Z
|
test/support/docs.ex
|
hasclass/tesla
|
c1ee0e3417ea269124869ca1ff33fd2364391154
|
[
"MIT"
] | 1
|
2020-04-04T03:19:50.000Z
|
2020-04-04T03:19:50.000Z
|
defmodule TeslaDocsTest do
defmodule Default do
use Tesla
end
defmodule NoDocs do
use Tesla, docs: false
@doc """
Something something
"""
def custom(url), do: get(url)
end
end
| 14
| 33
| 0.638095
|
030061b84eb9e442c4c860a6e800b06417a77a0f
| 3,075
|
ex
|
Elixir
|
apps/firestorm_data/lib/firestorm_data/categories/categories.ex
|
firestormforum/firestorm_elixir
|
80caba13daa21ef6087aa85f6cf0dd1f016e9aef
|
[
"MIT"
] | 14
|
2020-01-03T04:13:48.000Z
|
2021-12-01T16:16:24.000Z
|
apps/firestorm_data/lib/firestorm_data/categories/categories.ex
|
firestormforum/firestorm_elixir
|
80caba13daa21ef6087aa85f6cf0dd1f016e9aef
|
[
"MIT"
] | 1
|
2019-10-07T23:31:52.000Z
|
2019-10-07T23:31:52.000Z
|
apps/firestorm_data/lib/firestorm_data/categories/categories.ex
|
firestormforum/firestorm_elixir
|
80caba13daa21ef6087aa85f6cf0dd1f016e9aef
|
[
"MIT"
] | 4
|
2020-01-20T16:43:18.000Z
|
2022-03-03T23:05:37.000Z
|
defmodule FirestormData.Categories do
@moduledoc """
Threads exist within categories.
"""
alias FirestormData.{
Repo,
Categories.Category
}
@type index_params :: %{
optional(:pagination) => %{
optional(:per_page) => non_neg_integer(),
optional(:page) => non_neg_integer()
}
}
@type index :: %{
entries: [Category.t()],
page: non_neg_integer(),
per_page: non_neg_integer(),
total_pages: non_neg_integer(),
total_entries: non_neg_integer()
}
@doc """
List categories
"""
@spec list_categories(index_params()) :: index()
def list_categories(options) do
pagination = Map.get(options, :pagination, %{page: 1, per_page: 20})
page = Map.get(pagination, :page, 1)
per_page = Map.get(pagination, :per_page, 20)
Repo.paginate(Category, page: page, page_size: per_page)
end
@doc """
Finds a category by attributes
## Examples
iex> find_category(%{title: "Elixir"})
{:ok, %Category{}}
iex> find_category(%{field: bad_value})
{:error, :no_such_category}
"""
@spec find_category(map()) :: {:ok, Category.t()} | {:error, :no_such_category}
def find_category(attrs \\ %{}) do
Category
|> Repo.get_by(attrs)
|> case do
nil -> {:error, :no_such_category}
category -> {:ok, category}
end
end
@doc """
Creates a category.
## Examples
iex> create_category(%{title: "Elixir"})
{:ok, %Category{}}
iex> create_category(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec create_category(map()) :: {:ok, Category.t()} | {:error, Ecto.Changeset.t()}
def create_category(attrs \\ %{}) do
%Category{}
|> Category.changeset(attrs)
|> Repo.insert()
end
@doc """
Gets a single category by id.
## Examples
iex> get_category("123")
{:ok, %Category{}}
iex> get_category!("456")
{:error, :no_such_category}
"""
@spec get_category(String.t()) :: {:ok, Category.t()} | {:error, :no_such_category}
def get_category(id) do
case Repo.get(Category, id) do
nil -> {:error, :no_such_category}
category -> {:ok, category}
end
end
@doc """
Updates a category.
## Examples
iex> update_category(category, %{field: new_value})
{:ok, %Category{}}
iex> update_category(category, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec update_category(Category.t(), map()) :: {:ok, Category.t()} | {:error, Ecto.Changeset.t()}
def update_category(%Category{} = category, attrs) do
category
|> Category.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Category.
## Examples
iex> delete_category(category)
{:ok, %Category{}}
iex> delete_category(category)
{:error, %Ecto.Changeset{}}
"""
@spec delete_category(Category.t()) :: {:ok, Category.t()} | {:error, Ecto.Changeset.t()}
def delete_category(%Category{} = category) do
Repo.delete(category)
end
end
| 22.947761
| 98
| 0.592846
|
0300822715c1f27d59a3768d0d95d8d24945f918
| 217
|
ex
|
Elixir
|
test/support/mocks.ex
|
valiot/nerves_hub_web
|
01aef0b9b6b07d9f5c5440f7df0d2a3ec026daa8
|
[
"Apache-2.0"
] | null | null | null |
test/support/mocks.ex
|
valiot/nerves_hub_web
|
01aef0b9b6b07d9f5c5440f7df0d2a3ec026daa8
|
[
"Apache-2.0"
] | null | null | null |
test/support/mocks.ex
|
valiot/nerves_hub_web
|
01aef0b9b6b07d9f5c5440f7df0d2a3ec026daa8
|
[
"Apache-2.0"
] | null | null | null |
Code.compiler_options(ignore_module_conflict: true)
Mox.defmock(NervesHubWebCore.PatcherMock, for: NervesHubWebCore.Firmwares.Patcher)
Mox.defmock(NervesHubWebCore.UploadMock, for: NervesHubWebCore.Firmwares.Upload)
| 43.4
| 82
| 0.866359
|
0300886d72c64bac0409107817e005ef294aa02d
| 1,167
|
exs
|
Elixir
|
spec/assertions/enum/have_count_spec.exs
|
MeneDev/espec
|
ec4b3d579c5192999e930224a8a2650bb1fdf0bc
|
[
"Apache-2.0"
] | 807
|
2015-03-25T14:00:19.000Z
|
2022-03-24T08:08:15.000Z
|
spec/assertions/enum/have_count_spec.exs
|
MeneDev/espec
|
ec4b3d579c5192999e930224a8a2650bb1fdf0bc
|
[
"Apache-2.0"
] | 254
|
2015-03-27T10:12:25.000Z
|
2021-07-12T01:40:15.000Z
|
spec/assertions/enum/have_count_spec.exs
|
MeneDev/espec
|
ec4b3d579c5192999e930224a8a2650bb1fdf0bc
|
[
"Apache-2.0"
] | 85
|
2015-04-02T10:25:19.000Z
|
2021-01-30T21:30:43.000Z
|
defmodule ESpec.Assertions.Enum.HaveCountSpec do
use ESpec, async: true
let :range, do: 1..3
context "Success" do
it "checks success with `to`" do
message = expect(range()) |> to(have_count(3))
expect(message) |> to(eq "`1..3` has `3` elements.")
end
it "checks success with `not_to`" do
message = expect(range()) |> to_not(have_count(2))
expect(message) |> to(eq "`1..3` doesn't have `2` elements.")
end
end
context "aliases" do
it do: expect(range()) |> to(have_size(3))
it do: expect(range()) |> to(have_length(3))
end
context "Error" do
context "with `to`" do
before do
{:shared,
expectation: fn -> expect(range()) |> to(have_count(2)) end,
message: "Expected `1..3` to have `2` elements but it has `3`."}
end
it_behaves_like(CheckErrorSharedSpec)
end
context "with `not_to`" do
before do
{:shared,
expectation: fn -> expect(range()) |> to_not(have_count(3)) end,
message: "Expected `1..3` not to have `3` elements but it has `3`."}
end
it_behaves_like(CheckErrorSharedSpec)
end
end
end
| 25.933333
| 77
| 0.59126
|
0300adb53e51df744428395bde3e0ca49d507191
| 3,572
|
ex
|
Elixir
|
lib/cforum_web/views/voting_area_view.ex
|
campingrider/cforum_ex
|
cf27684c47d6dc26c9c37a946f1c729a79d27c70
|
[
"MIT"
] | null | null | null |
lib/cforum_web/views/voting_area_view.ex
|
campingrider/cforum_ex
|
cf27684c47d6dc26c9c37a946f1c729a79d27c70
|
[
"MIT"
] | null | null | null |
lib/cforum_web/views/voting_area_view.ex
|
campingrider/cforum_ex
|
cf27684c47d6dc26c9c37a946f1c729a79d27c70
|
[
"MIT"
] | null | null | null |
defmodule CforumWeb.VotingAreaView do
use CforumWeb, :view
alias Cforum.Messages.MessageHelpers
alias Cforum.Messages.Votes
def acceptance_status?(conn, message, top),
do: MessageHelpers.accepted?(message) && (top || !Abilities.accept?(conn, message))
def voting_svg(conn, path) do
[
{:safe,
"<svg width=\"150\" height=\"150\" viewBox=\"0 0 150 150\" xmlns=\"http://www.w3.org/2000/svg\"><use xlink:href=\""},
Routes.static_path(conn, "/images/cforum-voting-area-icons.svg"),
"#",
path,
{:safe, "\"></use></svg>"}
]
end
def acceptance_button(conn, thread, message, top) do
cond do
top ->
nil
Abilities.may?(conn, "messages/accept", :accept, {thread, message}) ->
VHelpers.Button.cf_button(
# voting_svg(conn, "accept"),
[{:safe, "<span>"}, gettext("accept answer"), {:safe, "</span>"}],
to: Path.message_path(conn, :accept, thread, message),
class: "accept unaccepted-answer",
params: VHelpers.std_args(conn),
title: gettext("accept answer")
)
Abilities.may?(conn, "messages/accept", :unaccept, {thread, message}) ->
VHelpers.Button.cf_button(
# voting_svg(conn, "accept"),
[{:safe, "<span>"}, gettext("unaccept answer"), {:safe, "</span>"}],
to: Path.message_path(conn, :unaccept, thread, message),
class: "accept accepted-answer",
params: VHelpers.std_args(conn),
title: gettext("unaccept answer")
)
acceptance_status?(conn, message, top) ->
# [{:safe, "<span class=\"accepted-answer accept disabled\">"}, voting_svg(conn, "accept"), {:safe, "</span>"}]
[{:safe, "<span class=\"accepted-answer accept disabled\"> </span>"}]
true ->
""
end
end
def active_upvoting_button(message, user) do
if Votes.upvoted?(message, user), do: " active"
end
def active_downvoting_button(message, user) do
if Votes.downvoted?(message, user), do: " active"
end
def vote_up_button(conn, thread, message, top) do
if top do
nil
else
# Abilities.may?(conn, "messages/vote", :upvote, {thread, message}) ->
VHelpers.Button.cf_button(
[{:safe, "<span>"}, gettext("vote up"), {:safe, "</span>"}],
to: Path.message_path(conn, :upvote, thread, message),
class: "vote-button vote-up #{active_upvoting_button(message, conn.assigns.current_user)}",
params: VHelpers.std_args(conn),
disabled: !Abilities.may?(conn, "messages/vote", :upvote, {thread, message}),
title: gettext("vote up")
)
# true ->
# [{:safe, "<span class=\"vote-button vote-up disabled\">"}, voting_svg(conn, "vote-up"), {:safe, "</span>"}]
end
end
def vote_down_button(conn, thread, message, top) do
if top do
nil
else
# Abilities.may?(conn, "messages/vote", :downvote, {thread, message}) ->
VHelpers.Button.cf_button(
[{:safe, "<span>"}, gettext("vote down"), {:safe, "</span>"}],
to: Path.message_path(conn, :downvote, thread, message),
class: "vote-button vote-down #{active_downvoting_button(message, conn.assigns.current_user)}",
params: VHelpers.std_args(conn),
disabled: !Abilities.may?(conn, "messages/vote", :downvote, {thread, message}),
title: gettext("vote down")
)
# true ->
# [{:safe, "<span class=\"vote-button vote-down disabled\">"}, voting_svg(conn, "vote-down"), {:safe, "</span>"}]
end
end
end
| 35.366337
| 124
| 0.595465
|
0300cddf296ce67cfd1818206de70e77fb23ce05
| 470
|
ex
|
Elixir
|
lib/api_banking/financial/account.ex
|
juniornelson123/api_banking_teste
|
db263511d78fffe9e4081551817d006df2094316
|
[
"MIT"
] | null | null | null |
lib/api_banking/financial/account.ex
|
juniornelson123/api_banking_teste
|
db263511d78fffe9e4081551817d006df2094316
|
[
"MIT"
] | null | null | null |
lib/api_banking/financial/account.ex
|
juniornelson123/api_banking_teste
|
db263511d78fffe9e4081551817d006df2094316
|
[
"MIT"
] | null | null | null |
defmodule ApiBanking.Financial.Account do
use Ecto.Schema
import Ecto.Changeset
schema "accounts" do
field :amount, :float
field :number, :string
belongs_to :user, ApiBanking.Financial.User
has_many :transactions, ApiBanking.Financial.Transaction
timestamps()
end
@doc false
def changeset(account, attrs) do
account
|> cast(attrs, [:amount, :number, :user_id])
|> validate_required([:amount, :number, :user_id])
end
end
| 21.363636
| 60
| 0.7
|
0300dec73a25510261873272b02ec3a02bf1d5b8
| 1,792
|
exs
|
Elixir
|
config/prod.exs
|
Baradoy/topshelf
|
bd3d5f96b3d3840990231cf244cc5f14bd807997
|
[
"MIT"
] | null | null | null |
config/prod.exs
|
Baradoy/topshelf
|
bd3d5f96b3d3840990231cf244cc5f14bd807997
|
[
"MIT"
] | null | null | null |
config/prod.exs
|
Baradoy/topshelf
|
bd3d5f96b3d3840990231cf244cc5f14bd807997
|
[
"MIT"
] | null | null | null |
import Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :topshelf, TopshelfWeb.Endpoint, cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :topshelf, TopshelfWeb.Endpoint,
# ...,
# url: [host: "example.com", port: 443],
# https: [
# ...,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :topshelf, TopshelfWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
| 35.84
| 96
| 0.708705
|
0300e828138615328f0eafb20c4af48810a817b1
| 1,693
|
exs
|
Elixir
|
elixir/config/dev.exs
|
Arthurb101/web-homework
|
d3f768a71c7d9e13e456028c491ced8e71e6cda4
|
[
"MIT"
] | null | null | null |
elixir/config/dev.exs
|
Arthurb101/web-homework
|
d3f768a71c7d9e13e456028c491ced8e71e6cda4
|
[
"MIT"
] | null | null | null |
elixir/config/dev.exs
|
Arthurb101/web-homework
|
d3f768a71c7d9e13e456028c491ced8e71e6cda4
|
[
"MIT"
] | null | null | null |
use Mix.Config
# Configure your database
config :homework, Homework.Repo,
username: "postgres",
password: "mypassword",
database: "homework_dev",
hostname: "35.188.173.102",
show_sensitive_data_on_connection_error: true,
pool_size: 10
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :homework, HomeworkWeb.Endpoint,
http: [port: System.get_env("PORT") || 8000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: []
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Do not include metadata nor timestamps in development logs
config :logger, level: :info, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 29.189655
| 68
| 0.724749
|
030123d1b28ce1a18d4bc44e9b262f459e728019
| 2,382
|
ex
|
Elixir
|
apps/omg_watcher/lib/omg_watcher/typed_data_hash/types.ex
|
omisego/elixir-omg
|
2c68973d8f29033d137f63a6e060f12e2a7dcd59
|
[
"Apache-2.0"
] | 177
|
2018-08-24T03:51:02.000Z
|
2020-05-30T13:29:25.000Z
|
apps/omg_watcher/lib/omg_watcher/typed_data_hash/types.ex
|
omisego/elixir-omg
|
2c68973d8f29033d137f63a6e060f12e2a7dcd59
|
[
"Apache-2.0"
] | 1,042
|
2018-08-25T00:52:39.000Z
|
2020-06-01T05:15:17.000Z
|
apps/omg_watcher/lib/omg_watcher/typed_data_hash/types.ex
|
omisego/elixir-omg
|
2c68973d8f29033d137f63a6e060f12e2a7dcd59
|
[
"Apache-2.0"
] | 47
|
2018-08-24T12:06:33.000Z
|
2020-04-28T11:49:25.000Z
|
# Copyright 2019-2020 OMG Network Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.TypedDataHash.Types do
@moduledoc """
Specifies all types needed to produce `eth_signTypedData` request.
See: https://github.com/ethereum/EIPs/blob/master/EIPS/eip-712.md#specification-of-the-eth_signtypeddata-json-rpc
"""
@type typedDataSignRequest_t() :: %{
types: map(),
primaryType: binary(),
domain: map(),
message: map()
}
@make_spec &%{name: &1, type: &2}
@eip_712_domain_spec [
@make_spec.("name", "string"),
@make_spec.("version", "string"),
@make_spec.("verifyingContract", "address"),
@make_spec.("salt", "bytes32")
]
@tx_spec Enum.concat([
[@make_spec.("txType", "uint256")],
Enum.map(0..3, fn i -> @make_spec.("input" <> Integer.to_string(i), "Input") end),
Enum.map(0..3, fn i -> @make_spec.("output" <> Integer.to_string(i), "Output") end),
[@make_spec.("txData", "uint256")],
[@make_spec.("metadata", "bytes32")]
])
@input_spec [
@make_spec.("blknum", "uint256"),
@make_spec.("txindex", "uint256"),
@make_spec.("oindex", "uint256")
]
@output_spec [
@make_spec.("outputType", "uint256"),
@make_spec.("outputGuard", "bytes20"),
@make_spec.("currency", "address"),
@make_spec.("amount", "uint256")
]
@types %{
EIP712Domain: @eip_712_domain_spec,
Transaction: @tx_spec,
Input: @input_spec,
Output: @output_spec
}
def eip712_types_specification(),
do: %{
types: @types,
primaryType: "Transaction"
}
def encode_type(type_name) when is_atom(type_name) do
"#{type_name}(#{
@types[type_name]
|> Enum.map(fn %{name: name, type: type} -> "#{type} #{name}" end)
|> Enum.join(",")
})"
end
end
| 30.151899
| 115
| 0.625945
|
03012a62181995def08f9d9bfbd220baabaa663a
| 709
|
ex
|
Elixir
|
apps/artemis/lib/artemis/contexts/permission/delete_permission.ex
|
artemis-platform/artemis_dashboard
|
5ab3f5ac4c5255478bbebf76f0e43b44992e3cab
|
[
"MIT"
] | 9
|
2019-08-19T19:56:34.000Z
|
2022-03-22T17:56:38.000Z
|
apps/artemis/lib/artemis/contexts/permission/delete_permission.ex
|
chrislaskey/artemis_teams
|
9930c3d9528e37b76f0525390e32b66eed7eadde
|
[
"MIT"
] | 7
|
2019-07-12T21:41:01.000Z
|
2020-08-17T21:29:22.000Z
|
apps/artemis/lib/artemis/contexts/permission/delete_permission.ex
|
chrislaskey/artemis_teams
|
9930c3d9528e37b76f0525390e32b66eed7eadde
|
[
"MIT"
] | 2
|
2019-04-10T13:34:15.000Z
|
2019-05-17T02:42:24.000Z
|
defmodule Artemis.DeletePermission do
use Artemis.Context
alias Artemis.GetPermission
alias Artemis.Repo
def call!(id, params \\ %{}, user) do
case call(id, params, user) do
{:error, _} -> raise(Artemis.Context.Error, "Error deleting permission")
{:ok, result} -> result
end
end
def call(id, params \\ %{}, user) do
id
|> get_record(user)
|> delete_record
|> Event.broadcast("permission:deleted", params, user)
end
def get_record(%{id: id}, user), do: get_record(id, user)
def get_record(id, user), do: GetPermission.call(id, user)
defp delete_record(nil), do: {:error, "Record not found"}
defp delete_record(record), do: Repo.delete(record)
end
| 26.259259
| 78
| 0.662906
|
03012d5f3261993745944c09d829225d14a68c7c
| 1,089
|
exs
|
Elixir
|
test/util/bank_test.exs
|
NickMcG/SNEEX
|
901215dea41fa21314a4f4db46b51648158f1544
|
[
"MIT"
] | 1
|
2019-11-16T00:33:02.000Z
|
2019-11-16T00:33:02.000Z
|
test/util/bank_test.exs
|
NickMcG/SNEEX
|
901215dea41fa21314a4f4db46b51648158f1544
|
[
"MIT"
] | 1
|
2019-08-11T23:02:15.000Z
|
2019-08-11T23:02:15.000Z
|
test/util/bank_test.exs
|
NickMcG/SNEEX
|
901215dea41fa21314a4f4db46b51648158f1544
|
[
"MIT"
] | null | null | null |
defmodule Util.BankTest do
use ExUnit.Case
import Util.Test.DataBuilder
test "new with exactly a bank worth of data returns a bank" do
data = build_block_of_ffs(Util.Bank.bank_size())
assert %Util.Bank{} = Util.Bank.new(data)
end
test "new with more than a bank worth of data returns a bank and the excess data" do
data = build_block_of_ffs(Util.Bank.bank_size() + 5)
assert {%Util.Bank{}, remainder} = Util.Bank.new(data)
assert byte_size(remainder) == 5
end
test "extract_header with the header on page 7 returns the header" do
bank = Util.Bank.new(build_data_for_bank_with_header_on_page_7())
assert %Util.Header{} = Util.Bank.extract_header(bank)
end
test "extract_header with the header on page F returns the header" do
bank = Util.Bank.new(build_data_for_bank_with_header_on_page_f())
assert %Util.Header{} = Util.Bank.extract_header(bank)
end
test "extract_header without a header returns nil" do
bank = Util.Bank.new(build_data_for_bank_without_header())
assert nil == Util.Bank.extract_header(bank)
end
end
| 35.129032
| 86
| 0.733701
|
030161a79053fdc12d86a7d7782c25fac0433321
| 1,193
|
ex
|
Elixir
|
lib/table_rex/renderer/text/meta.ex
|
elixir-poa/table_rex
|
af8f5b421215e53921d212185b60be7b03b524b1
|
[
"MIT"
] | 1
|
2018-04-26T13:41:33.000Z
|
2018-04-26T13:41:33.000Z
|
lib/table_rex/renderer/text/meta.ex
|
geolessel/table_rex
|
af8f5b421215e53921d212185b60be7b03b524b1
|
[
"MIT"
] | null | null | null |
lib/table_rex/renderer/text/meta.ex
|
geolessel/table_rex
|
af8f5b421215e53921d212185b60be7b03b524b1
|
[
"MIT"
] | null | null | null |
defmodule TableRex.Renderer.Text.Meta do
@moduledoc """
The data structure for the `TableRex.Renderer.Text` rendering module, it holds results
of style & dimension calculations to be passed down the render pipeline.
"""
alias TableRex.Renderer.Text.Meta
defstruct col_widths: %{},
row_heights: %{},
table_width: 0,
intersections: [],
render_horizontal_frame?: false,
render_vertical_frame?: false,
render_column_separators?: false,
render_row_separators?: false
@doc """
Retreives the "inner width" of the table, which is the full width minus any frame.
"""
def inner_width(%Meta{table_width: table_width, render_vertical_frame?: true}) do
table_width - 2
end
def inner_width(%Meta{table_width: table_width, render_vertical_frame?: false}) do
table_width
end
@doc """
Retreives the column width at the given column index.
"""
def col_width(meta, col_index) do
Map.get(meta.col_widths, col_index)
end
@doc """
Retreives the row width at the given row index.
"""
def row_height(meta, row_index) do
Map.get(meta.row_heights, row_index)
end
end
| 27.744186
| 88
| 0.678122
|
0301727c4e6e7fc6c31ea31db55d485c3aa15404
| 3,214
|
ex
|
Elixir
|
lib/gnat/handshake.ex
|
santif/nats.ex
|
84277ae3ae5a7ea2a7e6e70e624c59683c72d0de
|
[
"MIT"
] | 102
|
2019-04-17T06:19:48.000Z
|
2022-03-31T02:36:11.000Z
|
lib/gnat/handshake.ex
|
santif/nats.ex
|
84277ae3ae5a7ea2a7e6e70e624c59683c72d0de
|
[
"MIT"
] | 29
|
2019-04-16T03:45:04.000Z
|
2022-03-11T12:25:54.000Z
|
lib/gnat/handshake.ex
|
santif/nats.ex
|
84277ae3ae5a7ea2a7e6e70e624c59683c72d0de
|
[
"MIT"
] | 17
|
2019-05-07T19:33:17.000Z
|
2022-03-24T18:27:25.000Z
|
defmodule Gnat.Handshake do
@moduledoc false
alias Gnat.Parsec
@doc """
This function handles all of the variations of establishing a connection to
a nats server and just returns {:ok, socket} or {:error, reason}
"""
def connect(settings) do
host = settings.host |> to_charlist
case :gen_tcp.connect(host, settings.port, settings.tcp_opts, settings.connection_timeout) do
{:ok, tcp} -> perform_handshake(tcp, settings)
result -> result
end
end
def negotiate_settings(server_settings, user_settings) do
%{verbose: false}
|> negotiate_auth(server_settings, user_settings)
|> negotiate_headers(server_settings, user_settings)
end
defp perform_handshake(tcp, user_settings) do
receive do
{:tcp, ^tcp, operation} ->
{_, [{:info, server_settings}]} = Parsec.parse(Parsec.new(), operation)
{:ok, socket} = upgrade_connection(tcp, user_settings)
settings = negotiate_settings(server_settings, user_settings)
:ok = send_connect(user_settings, settings, socket)
{:ok, socket}
after 1000 ->
{:error, "timed out waiting for info"}
end
end
defp send_connect(%{tls: true}, settings, socket) do
:ssl.send(socket, "CONNECT " <> Jason.encode!(settings, maps: :strict) <> "\r\n")
end
defp send_connect(_, settings, socket) do
:gen_tcp.send(socket, "CONNECT " <> Jason.encode!(settings, maps: :strict) <> "\r\n")
end
defp negotiate_auth(settings, %{auth_required: true}=_server, %{username: username, password: password}=_user) do
Map.merge(settings, %{user: username, pass: password})
end
defp negotiate_auth(settings, %{auth_required: true}=_server, %{token: token}=_user) do
Map.merge(settings, %{auth_token: token})
end
defp negotiate_auth(settings, %{auth_required: true, nonce: nonce}=_server, %{nkey_seed: seed, jwt: jwt}=_user) do
{:ok, nkey} = NKEYS.from_seed(seed)
signature = NKEYS.sign(nkey, nonce) |> Base.url_encode64() |> String.replace("=", "")
Map.merge(settings, %{sig: signature, protocol: 1, jwt: jwt})
end
defp negotiate_auth(settings, %{auth_required: true, nonce: nonce}=_server, %{nkey_seed: seed}=_user) do
{:ok, nkey} = NKEYS.from_seed(seed)
signature = NKEYS.sign(nkey, nonce) |> Base.url_encode64() |> String.replace("=", "")
public = NKEYS.public_nkey(nkey)
Map.merge(settings, %{sig: signature, protocol: 1, nkey: public})
end
defp negotiate_auth(settings, _server, _user) do
settings
end
defp negotiate_headers(settings, %{headers: true} = _server, user_settings) do
if Map.get(user_settings, :headers, true) do
Map.put(settings, :headers, true)
else
Map.put(settings, :headers, false)
end
end
defp negotiate_headers(_settings, _server, %{headers: true} = _user) do
raise "NATS Server does not support headers, but your connection settings specify header support"
end
defp negotiate_headers(settings, _server, _user) do
settings
end
defp upgrade_connection(tcp, %{tls: true, ssl_opts: opts}) do
:ok = :inet.setopts(tcp, [active: true])
:ssl.connect(tcp, opts, 1_000)
end
defp upgrade_connection(tcp, _settings), do: {:ok, tcp}
end
| 37.372093
| 116
| 0.684816
|
0301906be6482283230b5942d643f06f610c1b94
| 1,505
|
ex
|
Elixir
|
clients/you_tube/lib/google_api/you_tube/v3/model/video_project_details.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | 1
|
2018-12-03T23:43:10.000Z
|
2018-12-03T23:43:10.000Z
|
clients/you_tube/lib/google_api/you_tube/v3/model/video_project_details.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
clients/you_tube/lib/google_api/you_tube/v3/model/video_project_details.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.VideoProjectDetails do
@moduledoc """
Project specific details about the content of a YouTube Video.
## Attributes
* `tags` (*type:* `list(String.t)`, *default:* `nil`) - A list of project tags associated with the video during the upload.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:tags => list(String.t())
}
field(:tags, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.VideoProjectDetails do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.VideoProjectDetails.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.VideoProjectDetails do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.021277
| 127
| 0.738206
|
0301913614a7d76f0ad4ab7823d6b412899dd500
| 428
|
exs
|
Elixir
|
test/_support/basic_gen_server.exs
|
ityonemo/multiverses
|
24b65367b369cc8122d24836816340ff2072c501
|
[
"MIT"
] | 4
|
2020-06-27T02:15:42.000Z
|
2020-12-21T14:24:42.000Z
|
test/_support/basic_gen_server.exs
|
ityonemo/multiverses
|
24b65367b369cc8122d24836816340ff2072c501
|
[
"MIT"
] | 4
|
2020-06-03T16:51:55.000Z
|
2020-09-08T19:28:55.000Z
|
test/_support/basic_gen_server.exs
|
ityonemo/multiverses
|
24b65367b369cc8122d24836816340ff2072c501
|
[
"MIT"
] | 1
|
2020-12-21T14:24:47.000Z
|
2020-12-21T14:24:47.000Z
|
defmodule MultiversesTest.BasicGenServer do
@moduledoc false
use Multiverses, with: GenServer
use GenServer
def start_link(options) do
GenServer.start_link(__MODULE__, :state, options)
end
@impl true
def init(:state), do: {:ok, :state}
def get_universe(srv), do: GenServer.call(srv, :universe)
@impl true
def handle_call(:universe, _from, state) do
{:reply, Multiverses.self(), state}
end
end
| 20.380952
| 59
| 0.712617
|
0301bad1686cfe30fa437035ce8b93734607decc
| 2,611
|
ex
|
Elixir
|
lib/yuri_template/simple_expander.ex
|
sirikid/yuri_template
|
66846ed0f5a8407749408a4ebf526f9c27cb9366
|
[
"Apache-2.0"
] | 3
|
2020-05-03T15:30:21.000Z
|
2021-05-09T01:35:41.000Z
|
lib/yuri_template/simple_expander.ex
|
sirikid/yuri_template
|
66846ed0f5a8407749408a4ebf526f9c27cb9366
|
[
"Apache-2.0"
] | 1
|
2021-04-15T20:08:14.000Z
|
2021-04-18T20:48:17.000Z
|
lib/yuri_template/simple_expander.ex
|
sirikid/yuri_template
|
66846ed0f5a8407749408a4ebf526f9c27cb9366
|
[
"Apache-2.0"
] | null | null | null |
defmodule YuriTemplate.SimpleExpander do
@moduledoc false
@behaviour YuriTemplate.Expander
@impl true
def expand(acc, _substitutes, []), do: acc
def expand(acc, substitutes, [{:explode, var} | vars]) do
case Access.fetch(substitutes, var) do
:error ->
expand(acc, substitutes, vars)
{:ok, value} ->
case value do
[{k1, v1} | kvs] ->
Enum.reduce(
kvs,
[acc, k1, "=", encode(v1)],
fn {k, v}, acc -> [acc, ",", k, "=", encode(v)] end
)
[v | vs] ->
Enum.reduce(
vs,
[acc, encode(v)],
fn v, acc -> [acc, ",", encode(v)] end
)
[] ->
acc
end
|> continue_expand(substitutes, vars)
end
end
def expand(acc, substitutes, [{:prefix, var, length} | vars]) do
case Access.fetch(substitutes, var) do
:error ->
expand(acc, substitutes, vars)
{:ok, v} when is_binary(v) ->
[acc, encode(String.slice(v, 0, length))]
|> continue_expand(substitutes, vars)
end
end
def expand(acc, substitutes, [var | vars]) do
case Access.fetch(substitutes, var) do
:error ->
expand(acc, substitutes, vars)
{:ok, value} ->
case value do
[{k, v} | kvs] ->
Enum.reduce(
kvs,
[acc, k, ",", encode(v)],
fn {k, v}, acc -> [acc, ",", k, ",", encode(v)] end
)
[v | vs] ->
Enum.reduce(vs, [acc, v], &[&2, ",", &1])
[] ->
acc
v ->
[acc, encode(v)]
end
|> continue_expand(substitutes, vars)
end
end
@spec continue_expand(iodata, Access.t(), YuriTemplate.Expander.spec()) :: iodata
defp continue_expand(acc, _substitues, []), do: acc
defp continue_expand(acc, substitutes, [var | vars]) do
case Access.fetch(substitutes, var) do
:error ->
acc
{:ok, value} ->
acc = [acc, ","]
case value do
[{k, v} | kvs] ->
Enum.reduce(
kvs,
[acc, k, ",", encode(v)],
fn {k, v}, acc -> [acc, ",", k, ",", encode(v)] end
)
[v | vs] ->
Enum.reduce(vs, [acc, v], &[&2, ",", &1])
[] ->
acc
v ->
[acc, encode(v)]
end
end
|> continue_expand(substitutes, vars)
end
@spec encode(String.t()) :: String.t()
defp encode(s), do: URI.encode(s, &URI.char_unreserved?/1)
end
| 23.736364
| 83
| 0.456147
|
0301bc780b48eb52d187ba90bb69b052ed261017
| 162
|
exs
|
Elixir
|
priv/repo/migrations/20161204021700_rename_owner_to_owner_id_in_rooms.exs
|
Poniverse/LunaTube-API
|
433473f32ec04a636f6de60642e445fd2e478f98
|
[
"Apache-2.0"
] | null | null | null |
priv/repo/migrations/20161204021700_rename_owner_to_owner_id_in_rooms.exs
|
Poniverse/LunaTube-API
|
433473f32ec04a636f6de60642e445fd2e478f98
|
[
"Apache-2.0"
] | null | null | null |
priv/repo/migrations/20161204021700_rename_owner_to_owner_id_in_rooms.exs
|
Poniverse/LunaTube-API
|
433473f32ec04a636f6de60642e445fd2e478f98
|
[
"Apache-2.0"
] | null | null | null |
defmodule Lunatube.Repo.Migrations.RenameOwnerToOwnerIdInRooms do
use Ecto.Migration
def change do
rename table(:rooms), :owner, to: :owner_id
end
end
| 20.25
| 65
| 0.765432
|
0301d572f7e0941544fffab2d0e8485a88322e34
| 601
|
ex
|
Elixir
|
lib/exfile/token.ex
|
sreecodeslayer/exfile
|
c88288563d688fb47a6fcae190dbe1b8eb64bf9b
|
[
"MIT"
] | 100
|
2015-12-25T12:38:41.000Z
|
2021-12-31T11:41:20.000Z
|
lib/exfile/token.ex
|
sreecodeslayer/exfile
|
c88288563d688fb47a6fcae190dbe1b8eb64bf9b
|
[
"MIT"
] | 62
|
2015-12-26T01:43:54.000Z
|
2019-09-15T16:16:35.000Z
|
lib/exfile/token.ex
|
sreecodeslayer/exfile
|
c88288563d688fb47a6fcae190dbe1b8eb64bf9b
|
[
"MIT"
] | 22
|
2016-04-19T11:54:38.000Z
|
2021-09-29T14:48:46.000Z
|
defmodule Exfile.Token do
@moduledoc false
@spec verify_token(Path.t, String.t) :: boolean
def verify_token(path, token) do
case Base.url_decode64(token) do
{:ok, hmac} ->
hmac == do_generate_token(path)
:error ->
false
end
end
@spec build_path(Path.t) :: Path.t
def build_path(path),
do: Path.join(generate_token(path), path)
@spec generate_token(Path.t) :: String.t
def generate_token(path),
do: do_generate_token(path) |> Base.url_encode64
defp do_generate_token(path),
do: :crypto.hmac(:sha256, Exfile.Config.secret, path)
end
| 24.04
| 57
| 0.668885
|
0301f264798579ce0e5b7f1953c1fd61a1444bcc
| 3,521
|
exs
|
Elixir
|
test/mix/tasks/phoenix/new_test.exs
|
knewter/phoenix
|
191909d97511ab99f9bbcc776cba9988c8b07596
|
[
"MIT"
] | null | null | null |
test/mix/tasks/phoenix/new_test.exs
|
knewter/phoenix
|
191909d97511ab99f9bbcc776cba9988c8b07596
|
[
"MIT"
] | null | null | null |
test/mix/tasks/phoenix/new_test.exs
|
knewter/phoenix
|
191909d97511ab99f9bbcc776cba9988c8b07596
|
[
"MIT"
] | null | null | null |
Code.require_file "../../mix_helper.exs", __DIR__
defmodule Mix.Tasks.Phoenix.NewTest do
# This test case needs to be sync because we rely on
# changing the current working directory which is global.
use ExUnit.Case
use Plug.Test
import MixHelper
import ExUnit.CaptureIO
@app_name "photo_blog"
@tmp_path tmp_path()
@project_path Path.join(@tmp_path, @app_name)
@epoch {{1970, 1, 1}, {0, 0, 0}}
setup_all do
# Clean up and create a new project
File.rm_rf(@project_path)
Mix.Tasks.Phoenix.New.run([@app_name, @project_path])
# Copy artifacts from Phoenix so we can compile and run tests
File.cp_r "_build", Path.join(@project_path, "_build")
File.cp_r "deps", Path.join(@project_path, "deps")
File.cp_r "mix.lock", Path.join(@project_path, "mix.lock")
:ok
end
test "creates files and directories" do
File.cd! @project_path, fn ->
assert_file ".gitignore"
assert_file "README.md"
assert_file "lib/photo_blog.ex", ~r/defmodule PhotoBlog do/
assert_file "lib/photo_blog/endpoint.ex", ~r/defmodule PhotoBlog.Endpoint do/
assert_file "priv/static/css/phoenix.css"
assert_file "priv/static/images/phoenix.png"
assert_file "priv/static/js/phoenix.js"
assert_file "test/photo_blog_test.exs"
assert_file "test/test_helper.exs"
assert_file "web/controllers/page_controller.ex", ~r/defmodule PhotoBlog.PageController/
assert_file "web/router.ex", ~r/defmodule PhotoBlog.Router/
end
end
test "compiles and recompiles project" do
Logger.disable(self())
Application.put_env(:phoenix, :code_reloader, true)
Application.put_env(:photo_blog, PhotoBlog.Endpoint,
secret_key_base: String.duplicate("abcdefgh", 8))
in_project :photo_blog, @project_path, fn _ ->
Mix.Task.run "compile", ["--no-deps-check"]
assert_received {:mix_shell, :info, ["Compiled lib/photo_blog.ex"]}
assert_received {:mix_shell, :info, ["Compiled web/router.ex"]}
refute_received {:mix_shell, :info, ["Compiled lib/phoenix.ex"]}
Mix.shell.flush
Mix.Task.clear
# Adding a new template touches file (through mix)
File.touch! "web/views/layout_view.ex", @epoch
File.write! "web/templates/layout/another.html.eex", "oops"
Mix.Task.run "compile", ["--no-deps-check"]
assert File.stat!("web/views/layout_view.ex").mtime > @epoch
# Adding a new template triggers recompilation (through request)
File.touch! "web/views/page_view.ex", @epoch
File.write! "web/templates/page/another.html.eex", "oops"
{:ok, _} = Application.ensure_all_started(:photo_blog)
PhotoBlog.Endpoint.call(conn(:get, "/"), [])
assert File.stat!("web/views/page_view.ex").mtime > @epoch
# TODO: We need to uncomment this after we move to Elixir v1.0.3
# as running tests would automatically shutdown the Logger.
# assert capture_io(fn ->
# Mix.Task.run("test", ["--no-start", "--no-compile"])
# end) =~ "1 tests, 0 failures"
end
after
Application.put_env(:phoenix, :code_reloader, false)
end
test "missing name and/or path arguments" do
assert_raise Mix.Error, fn ->
Mix.Tasks.Phoenix.New.run([])
end
end
defp in_project(app, path, fun) do
%{name: name, file: file} = Mix.Project.pop
try do
capture_io :stderr, fn ->
Mix.Project.in_project app, path, [], fun
end
after
Mix.Project.push name, file
end
end
end
| 32.906542
| 94
| 0.66998
|
03020be2a58ed7e6aa83afbbf60febf65ea9f0ed
| 1,564
|
ex
|
Elixir
|
todo_interface/lib/todo_interface_web/views/error_helpers.ex
|
moritzploss/todoServer
|
404db774493b42bb328c552c8c428fa4be2b10d1
|
[
"MIT"
] | null | null | null |
todo_interface/lib/todo_interface_web/views/error_helpers.ex
|
moritzploss/todoServer
|
404db774493b42bb328c552c8c428fa4be2b10d1
|
[
"MIT"
] | null | null | null |
todo_interface/lib/todo_interface_web/views/error_helpers.ex
|
moritzploss/todoServer
|
404db774493b42bb328c552c8c428fa4be2b10d1
|
[
"MIT"
] | null | null | null |
defmodule TodoInterfaceWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "help-block",
data: [phx_error_for: input_id(form, field)]
)
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(TodoInterfaceWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(TodoInterfaceWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.583333
| 82
| 0.66624
|
03022ffeb89b11e511a5ece8adfe99b8c4c41457
| 389
|
ex
|
Elixir
|
lib/codes/codes_i00.ex
|
badubizzle/icd_code
|
4c625733f92b7b1d616e272abc3009bb8b916c0c
|
[
"Apache-2.0"
] | null | null | null |
lib/codes/codes_i00.ex
|
badubizzle/icd_code
|
4c625733f92b7b1d616e272abc3009bb8b916c0c
|
[
"Apache-2.0"
] | null | null | null |
lib/codes/codes_i00.ex
|
badubizzle/icd_code
|
4c625733f92b7b1d616e272abc3009bb8b916c0c
|
[
"Apache-2.0"
] | null | null | null |
defmodule IcdCode.ICDCode.Codes_I00 do
alias IcdCode.ICDCode
def _I00 do
%ICDCode{full_code: "I00",
category_code: "I00",
short_code: "",
full_name: "Rheumatic fever without heart involvement",
short_name: "Rheumatic fever without heart involvement",
category_name: "Rheumatic fever without heart involvement"
}
end
end
| 24.3125
| 68
| 0.658098
|
03023f135fb8c6386b050a4217204ba001d6b142
| 435
|
ex
|
Elixir
|
lib/eds/dynamic_supervisor.ex
|
the-mediocre-dev/elixir-dev-server
|
1ea20f92e1344f7f9fe3f9ac4fee91fb7371d136
|
[
"MIT"
] | null | null | null |
lib/eds/dynamic_supervisor.ex
|
the-mediocre-dev/elixir-dev-server
|
1ea20f92e1344f7f9fe3f9ac4fee91fb7371d136
|
[
"MIT"
] | null | null | null |
lib/eds/dynamic_supervisor.ex
|
the-mediocre-dev/elixir-dev-server
|
1ea20f92e1344f7f9fe3f9ac4fee91fb7371d136
|
[
"MIT"
] | null | null | null |
defmodule EDS.DynamicSupervisor do
use DynamicSupervisor
def start_link(args) do
DynamicSupervisor.start_link(__MODULE__, args, name: __MODULE__)
end
@impl true
def init(_args) do
DynamicSupervisor.init(strategy: :one_for_one)
end
def start_child(args) do
DynamicSupervisor.start_child(__MODULE__, args)
end
def terminate_child(pid) do
DynamicSupervisor.terminate_child(__MODULE__, pid)
end
end
| 20.714286
| 68
| 0.763218
|
030246e6bb2291657a51f5082df3e1971749cfcc
| 1,814
|
exs
|
Elixir
|
test/parser/js/declaration_test.exs
|
desweemerl/origami_parser
|
52af0913f26be51162588d94c784aa5971beaf1a
|
[
"MIT"
] | null | null | null |
test/parser/js/declaration_test.exs
|
desweemerl/origami_parser
|
52af0913f26be51162588d94c784aa5971beaf1a
|
[
"MIT"
] | null | null | null |
test/parser/js/declaration_test.exs
|
desweemerl/origami_parser
|
52af0913f26be51162588d94c784aa5971beaf1a
|
[
"MIT"
] | null | null | null |
defmodule Origami.Parser.Js.DeclarationTest do
use ExUnit.Case
alias Origami.Parser
alias Origami.Parser.Js
test "check if single declarations are parsed" do
declaration = """
var a = 1;
let b = 1;
const c = 1;
"""
{:ok, {:root, _, children}} = Parser.parse(declaration, Js)
expectation = [
{:var, [interval: {0, 0, 0, 8}],
[
{:identifier, [interval: {0, 4, 0, 4}], ["a"]},
{:integer, [interval: {0, 8, 0, 8}], ["1"]}
]},
{:let, [interval: {1, 0, 1, 8}],
[
{:identifier, [interval: {1, 4, 1, 4}], ["b"]},
{:integer, [interval: {1, 8, 1, 8}], ["1"]}
]},
{:const, [interval: {2, 0, 2, 10}],
[
{:identifier, [interval: {2, 6, 2, 6}], ["c"]},
{:integer, [interval: {2, 10, 2, 10}], ["1"]}
]}
]
assert expectation == children
end
test "check if expression is parsed in declaration" do
declaration = "let a = (b + 1) / 3"
{:ok, {:root, _, children}} = Parser.parse(declaration, Js)
expectation = [
{
:let,
[interval: {0, 0, 0, 18}],
[
{
:identifier,
[interval: {0, 4, 0, 4}],
["a"]
},
{
:expr_operation,
[interval: {0, 8, 0, 18}],
[
{
:expr_operation,
[interval: {0, 8, 0, 14}],
[
{:identifier, [interval: {0, 9, 0, 9}], ["b"]},
"+",
{:integer, [interval: {0, 13, 0, 13}], ["1"]}
]
},
"/",
{:integer, [interval: {0, 18, 0, 18}], ["3"]}
]
}
]
}
]
assert expectation == children
end
end
| 23.868421
| 65
| 0.396913
|
03024ebe7f13f371ad3d00e0136821ab4e495ae9
| 2,158
|
exs
|
Elixir
|
test/views/job_view_test.exs
|
mathiaHT/ex_step_flow
|
6496e9511239de64f00119428476338dfcde9dea
|
[
"MIT"
] | 4
|
2019-12-07T05:18:26.000Z
|
2020-11-06T23:28:43.000Z
|
test/views/job_view_test.exs
|
mathiaHT/ex_step_flow
|
6496e9511239de64f00119428476338dfcde9dea
|
[
"MIT"
] | 53
|
2020-01-06T11:23:09.000Z
|
2021-06-25T15:30:07.000Z
|
test/views/job_view_test.exs
|
mathiaHT/ex_step_flow
|
6496e9511239de64f00119428476338dfcde9dea
|
[
"MIT"
] | 3
|
2020-01-30T15:37:40.000Z
|
2020-10-27T14:10:02.000Z
|
defmodule StepFlow.JobViewTest do
use ExUnit.Case
use Plug.Test
alias Ecto.Adapters.SQL.Sandbox
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
setup do
# Explicitly get a connection before each test
:ok = Sandbox.checkout(StepFlow.Repo)
# Setting the shared mode
Sandbox.mode(StepFlow.Repo, {:shared, self()})
end
@workflow %{
schema_version: "1.8",
identifier: "id",
version_major: 6,
version_minor: 5,
version_micro: 4,
reference: "some id",
steps: [],
rights: [
%{
action: "create",
groups: ["administrator"]
}
]
}
test "render a Job" do
{:ok, workflow} = StepFlow.Workflows.create_workflow(@workflow)
workflow = StepFlow.Repo.preload(workflow, [:artifacts, :jobs])
{:ok, job} =
StepFlow.Jobs.create_job(%{name: "job_test", step_id: 0, workflow_id: workflow.id})
assert render(StepFlow.JobView, "show.json", %{job: job}) == %{
data: %{
id: job.id,
inserted_at: job.inserted_at,
name: "job_test",
params: [],
progressions: [],
status: [],
step_id: 0,
updated_at: job.updated_at,
workflow_id: workflow.id
}
}
end
test "render many Jobs" do
{:ok, workflow} = StepFlow.Workflows.create_workflow(@workflow)
workflow = StepFlow.Repo.preload(workflow, [:artifacts, :jobs])
{:ok, job} =
StepFlow.Jobs.create_job(%{name: "job_test", step_id: 0, workflow_id: workflow.id})
assert render(StepFlow.JobView, "index.json", %{jobs: %{data: [job], total: 1}}) == %{
data: [
%{
id: job.id,
inserted_at: job.inserted_at,
name: "job_test",
params: [],
progressions: [],
status: [],
step_id: 0,
updated_at: job.updated_at,
workflow_id: workflow.id
}
],
total: 1
}
end
end
| 26.975
| 90
| 0.52873
|
0302501c35421b1e16900bf38e470e24f378217c
| 3,575
|
exs
|
Elixir
|
test/pushex/helpers_test.exs
|
mauricionr/pushex-1
|
a096335bd4b92da152cf9fe5f99c824433a6af41
|
[
"MIT"
] | 69
|
2016-03-29T15:35:29.000Z
|
2018-06-27T14:57:23.000Z
|
test/pushex/helpers_test.exs
|
mauricionr/pushex-1
|
a096335bd4b92da152cf9fe5f99c824433a6af41
|
[
"MIT"
] | 9
|
2016-04-29T09:14:49.000Z
|
2017-10-31T02:08:14.000Z
|
test/pushex/helpers_test.exs
|
mauricionr/pushex-1
|
a096335bd4b92da152cf9fe5f99c824433a6af41
|
[
"MIT"
] | 6
|
2016-08-26T12:56:05.000Z
|
2018-06-21T23:51:00.000Z
|
defmodule Pushex.HelpersTest do
use Pushex.Case
alias Pushex.Helpers
alias Pushex.GCM
test "send_notification delegates to GCM when using: :gcm" do
ref = Helpers.send_notification(%{body: "foo"}, to: "whoever", using: :gcm, with_app: "default_app")
assert_receive {{:ok, res}, req, ^ref}
assert match?(%GCM.Request{}, req)
assert match?(%GCM.Response{}, res)
assert [{_, req, {_, ^ref}}] = Pushex.Sandbox.wait_notifications
assert req.notification.body == "foo"
end
test "allows to pass platform as string" do
ref = Helpers.send_notification(%{}, to: "whoever", using: "gcm", with_app: "default_app")
assert_receive {{:ok, res}, _req, ^ref}
assert match?(%GCM.Response{}, res)
assert [{_, _, {_, ^ref}}] = Pushex.Sandbox.wait_notifications
end
test "allows to pass list to :to" do
ref = Helpers.send_notification(%{}, to: ["whoever"], using: "gcm", with_app: "default_app")
assert_receive {{:ok, res}, _req, ^ref}
assert match?(%GCM.Response{}, res)
assert [{_, _, {_, ^ref}}] = Pushex.Sandbox.wait_notifications
end
test "send_notification delegates to GCM when passing a GCM request" do
ref = Helpers.send_notification(%GCM.Request{})
assert_receive {{:ok, res}, _, ^ref}
assert match?(%GCM.Response{}, res)
assert [{_, _, {_, ^ref}}] = Pushex.Sandbox.wait_notifications
end
test "send_notification delegates to APNS when passing an APNS request" do
ref = Helpers.send_notification(%Pushex.APNS.Request{})
assert_receive {{:ok, res}, _, ^ref}
assert match?(%Pushex.APNS.Response{}, res)
assert [{_, _, {_, ^ref}}] = Pushex.Sandbox.wait_notifications
end
test "send_notification delegates to GCM when :with_app is a GCM app" do
ref = Helpers.send_notification(%{}, to: "whoever", with_app: %GCM.App{})
assert [{{:ok, res} , _, {_, ^ref}}] = Pushex.Sandbox.wait_notifications
assert match?(%GCM.Response{}, res)
end
test "send_notification delegates to APNS when :with_app is a APNS app" do
ref = Helpers.send_notification(%{}, to: "whoever", with_app: %Pushex.APNS.App{})
assert [{{:ok, res} , _, {_, ^ref}}] = Pushex.Sandbox.wait_notifications
assert match?(%Pushex.APNS.Response{}, res)
end
test "send_notification sends multiple notifications" do
refs = Enum.map (1..10), fn _ ->
Helpers.send_notification(%{}, to: "whoever", with_app: %GCM.App{})
end
assert (list when length(list) == 10) = Pushex.Sandbox.wait_notifications(count: 10)
Enum.each list, fn {_, _, {_, ref}} ->
assert Enum.find(refs, &(&1 == ref))
end
end
test "send_notification raises when neither with_app nor :using is passed" do
assert_raise ArgumentError, fn -> Helpers.send_notification(%{}) end
end
test "send_notification raises when :using is passed without a default app" do
assert_raise ArgumentError, fn -> Helpers.send_notification(%{}, using: :apns) end
end
test "send_notification raises when app does not exist" do
assert_raise Pushex.AppNotFoundError, fn ->
Helpers.send_notification(%{}, with_app: "foo", using: :apns)
end
end
test "send_notification raises when :with_app is binary and :using is not passed" do
assert_raise ArgumentError, fn -> Helpers.send_notification(%{}, with_app: "foo") end
end
test "send_notification gives a proper error message when platform does not exist" do
assert_raise ArgumentError, ~s("foo" is not a valid platform), fn ->
Helpers.send_notification(%{}, with_app: "foo", using: "foo")
end
end
end
| 39.722222
| 104
| 0.677762
|
03027fef620f7dee84a09bf79aa5b159738190ab
| 3,686
|
exs
|
Elixir
|
test/printer/one_of_printer_test.exs
|
HenkPoley/json-schema-to-elm
|
92230ac907d1eab27a0c8e4d969c5104f8e66383
|
[
"MIT"
] | null | null | null |
test/printer/one_of_printer_test.exs
|
HenkPoley/json-schema-to-elm
|
92230ac907d1eab27a0c8e4d969c5104f8e66383
|
[
"MIT"
] | null | null | null |
test/printer/one_of_printer_test.exs
|
HenkPoley/json-schema-to-elm
|
92230ac907d1eab27a0c8e4d969c5104f8e66383
|
[
"MIT"
] | null | null | null |
defmodule JS2ETest.Printer.OneOfPrinter do
use ExUnit.Case
require Logger
alias JS2E.Printer
alias JsonSchema.Types
alias Printer.OneOfPrinter
alias Types.{ObjectType, OneOfType, SchemaDefinition, TypeReference}
test "print 'one of' type value" do
result =
one_of_type()
|> OneOfPrinter.print_type(schema_def(), %{}, module_name())
expected_one_of_type_program = """
type Shape
= ShapeSq Square
| ShapeCi Circle
"""
one_of_type_program = result.printed_schema
assert one_of_type_program == expected_one_of_type_program
end
test "print 'one of' decoder" do
result =
one_of_type()
|> OneOfPrinter.print_decoder(schema_def(), %{}, module_name())
expected_one_of_decoder_program = """
shapeDecoder : Decoder Shape
shapeDecoder =
oneOf [ squareDecoder |> andThen (succeed << ShapeSq)
, circleDecoder |> andThen (succeed << ShapeCi)
]
"""
one_of_decoder_program = result.printed_schema
assert one_of_decoder_program == expected_one_of_decoder_program
end
test "print 'one of' encoder" do
result =
one_of_type()
|> OneOfPrinter.print_encoder(schema_def(), %{}, module_name())
expected_one_of_encoder_program = """
encodeShape : Shape -> Value
encodeShape shape =
case shape of
ShapeSq square ->
encodeSquare square
ShapeCi circle ->
encodeCircle circle
"""
one_of_encoder_program = result.printed_schema
assert one_of_encoder_program == expected_one_of_encoder_program
end
test "print 'one of' fuzzer" do
result =
one_of_type()
|> OneOfPrinter.print_fuzzer(schema_def(), %{}, module_name())
expected_one_of_fuzzer_program = """
shapeFuzzer : Fuzzer Shape
shapeFuzzer =
Fuzz.oneOf
[ squareFuzzer
, circleFuzzer
]
encodeDecodeShapeTest : Test
encodeDecodeShapeTest =
fuzz shapeFuzzer "can encode and decode Shape object" <|
\\shape ->
shape
|> encodeShape
|> Decode.decodeValue shapeDecoder
|> Expect.equal (Ok shape)
"""
one_of_fuzzer_program = result.printed_schema
assert one_of_fuzzer_program == expected_one_of_fuzzer_program
end
defp module_name, do: "Domain"
defp one_of_type,
do: %OneOfType{
name: "shape",
path: URI.parse("#/definitions/shape"),
types: [
URI.parse("#/shape/oneOf/0"),
URI.parse("#/shape/oneOf/1")
]
}
defp schema_def,
do: %SchemaDefinition{
description: "Test schema",
id: URI.parse("http://example.com/test.json"),
title: "Test",
types: type_dict()
}
defp type_dict,
do: %{
"#/shape/oneOf/0" => %TypeReference{
name: "square",
path: URI.parse("#/definitions/square")
},
"#/shape/oneOf/1" => %TypeReference{
name: "circle",
path: URI.parse("#/definitions/circle")
},
"#/definitions/square" => %ObjectType{
name: "square",
path: URI.parse("#"),
required: ["color", "size"],
properties: %{
"color" => URI.parse("#/properties/color"),
"title" => URI.parse("#/properties/size")
}
},
"#/definitions/circle" => %ObjectType{
name: "circle",
path: URI.parse("#"),
required: ["color", "radius"],
properties: %{
"color" => URI.parse("#/properties/color"),
"radius" => URI.parse("#/properties/radius")
}
}
}
end
| 25.597222
| 70
| 0.589528
|
0302b03cd4172835d5617d2edfeb3a61f2206890
| 164
|
exs
|
Elixir
|
config/test.exs
|
elixirsc/git-pair
|
b8fde0e560fb00ccdc5f41b6bf8ae2eb48dc9eff
|
[
"MIT"
] | 11
|
2020-03-04T13:40:18.000Z
|
2020-11-07T02:04:50.000Z
|
config/test.exs
|
elixirsc/git-pair
|
b8fde0e560fb00ccdc5f41b6bf8ae2eb48dc9eff
|
[
"MIT"
] | 36
|
2020-03-07T11:35:39.000Z
|
2020-12-15T13:58:25.000Z
|
config/test.exs
|
elixirsc/git-pair
|
b8fde0e560fb00ccdc5f41b6bf8ae2eb48dc9eff
|
[
"MIT"
] | null | null | null |
import Config
config :git_pair,
command_runner: GitPair.SystemMock
config :git_pair,
storage: GitPair.StorageMock
config :git_pair,
hook: GitPair.HookMock
| 14.909091
| 36
| 0.786585
|
0302bfc627bf5ecd75c0f0bec54a5771ca0ca399
| 5,827
|
ex
|
Elixir
|
lib/sanbase/kafka/kafka_exporter.ex
|
rmoorman/sanbase2
|
226784ab43a24219e7332c49156b198d09a6dd85
|
[
"MIT"
] | 1
|
2022-01-30T19:51:39.000Z
|
2022-01-30T19:51:39.000Z
|
lib/sanbase/kafka/kafka_exporter.ex
|
rmoorman/sanbase2
|
226784ab43a24219e7332c49156b198d09a6dd85
|
[
"MIT"
] | null | null | null |
lib/sanbase/kafka/kafka_exporter.ex
|
rmoorman/sanbase2
|
226784ab43a24219e7332c49156b198d09a6dd85
|
[
"MIT"
] | null | null | null |
defmodule Sanbase.KafkaExporter do
@moduledoc ~s"""
Module for persisting any data to Kafka.
The module exposes one function that should be used - `persist/1`.
This functions adds the data to an internal buffer that is flushed
every `kafka_flush_timeout` milliseconds or when the buffer is big enough.
The exporter cannot send data more than once every 1 second so the
GenServer cannot die too often and crash its supervisor
"""
use GenServer
require Logger
require Sanbase.Utils.Config, as: Config
@producer Application.compile_env(:sanbase, [Sanbase.KafkaExporter, :producer])
@type data :: {String.t(), String.t()}
@type result :: :ok | {:error, String.t()}
@typedoc ~s"""
Options that describe to which kafka topic and how often to send the batches.
These options do not describe the connection
"""
@type options :: [
{:name, atom()}
| {:topic, String.t()}
| {:kafka_flush_timeout, non_neg_integer()}
| {:buffering_max_messages, non_neg_integer()}
| {:can_send_after_interval, non_neg_integer()}
]
@spec start_link(options) :: GenServer.on_start()
def start_link(opts) do
name = Keyword.get(opts, :name, __MODULE__)
GenServer.start_link(__MODULE__, opts, name: name)
end
def child_spec(opts) do
%{
id: Keyword.fetch!(opts, :id),
start: {__MODULE__, :start_link, [opts]}
}
end
@spec init(options) :: {:ok, state} when state: map()
def init(opts) do
kafka_flush_timeout = Keyword.get(opts, :kafka_flush_timeout, 30_000)
buffering_max_messages = Keyword.get(opts, :buffering_max_messages, 1000)
can_send_after_interval = Keyword.get(opts, :can_send_after_interval, 1000)
Process.send_after(self(), :flush, kafka_flush_timeout)
{:ok,
%{
topic: Keyword.fetch!(opts, :topic),
data: [],
size: 0,
kafka_flush_timeout: kafka_flush_timeout,
buffering_max_messages: buffering_max_messages,
can_send_after_interval: can_send_after_interval,
can_send_after: DateTime.utc_now() |> DateTime.add(can_send_after_interval, :millisecond)
}}
end
@doc ~s"""
Asynchronously add data to be exported to the buffer.
It will be sent no longer than `kafka_flush_timeout` milliseconds later. The data
is pushed to an internal buffer that is then send at once to Kafka.
"""
@spec persist_async(data | [data], pid() | atom()) :: :ok
def persist_async(data, exporter) do
GenServer.cast(exporter, {:persist, data})
end
@spec persist_sync(data | [data], pid() | atom()) :: result
def persist_sync(data, exporter, timeout \\ 60_000) do
GenServer.call(exporter, {:persist, data}, timeout)
end
def send_data_to_topic_from_current_process(data, topic) do
send_data_immediately(data, %{topic: topic, size: length(data)})
end
def flush(exporter \\ __MODULE__) do
GenServer.call(exporter, :flush)
end
@doc ~s"""
Send all available data in the buffers before shutting down.
The data recorder should be started before the Endpoint in the supervison tree.
This means that when shutting down it will be stopped after the Endpoint so
all data will be stored in Kafka and no more data is expected.
"""
def terminate(_reason, state) do
Logger.info(
"Terminating the KafkaExporter. Sending #{length(state.data)} events to kafka topic: #{state.topic}"
)
send_data(state.data, state)
:ok
end
@spec handle_call({:persist, data | [data]}, any(), state) :: {:reply, result, state}
when state: map()
def handle_call({:persist, data}, _from, state) do
data = List.wrap(data)
send_data_result =
(data ++ state.data)
|> send_data_immediately(%{state | size: state.size + length(data)})
{:reply, send_data_result, %{state | data: [], size: 0}}
end
def handle_call(:flush, _from, state) do
send_data_immediately(state.data, state)
{:reply, :ok, %{state | data: [], size: 0}}
end
@spec handle_cast({:persist, data | [data]}, state) :: {:noreply, state}
when state: map()
def handle_cast({:persist, data}, state) do
data = List.wrap(data)
new_messages_length = length(data)
case state.size + new_messages_length >= state.buffering_max_messages do
true ->
:ok = send_data(data ++ state.data, %{state | size: state.size + new_messages_length})
{:noreply,
%{
state
| data: [],
size: 0,
can_send_after:
DateTime.utc_now() |> DateTime.add(state.can_send_after_interval, :millisecond)
}}
false ->
{:noreply, %{state | data: data ++ state.data, size: state.size + new_messages_length}}
end
end
def handle_info(:flush, state) do
send_data(state.data, state)
Process.send_after(self(), :flush, state.kafka_flush_timeout)
{:noreply, %{state | data: [], size: 0}}
end
defp send_data([], _), do: :ok
defp send_data(nil, _), do: :ok
# In case there is no wait period between sends, do not execute the sleep_until
# at all
defp send_data(data, %{topic: topic, can_send_after_interval: 0, size: size}) do
Logger.info("Sending #{size} events to Kafka topic: #{topic}")
@producer.send_data(topic, data)
end
defp send_data(data, %{topic: topic, can_send_after: can_send_after, size: size}) do
Sanbase.DateTimeUtils.sleep_until(can_send_after)
Logger.info("Sending #{size} events to Kafka topic: #{topic}")
@producer.send_data(topic, data)
end
defp send_data_immediately([], _), do: :ok
defp send_data_immediately(nil, _), do: :ok
defp send_data_immediately(data, %{topic: topic, size: size}) do
Logger.info("Sending #{size} events to Kafka topic: #{topic}")
@producer.send_data(topic, data)
end
end
| 32.372222
| 106
| 0.669126
|
0302c00f0a768799f57f98f917b8533272023615
| 176
|
exs
|
Elixir
|
daniel/prog_elix/ch22/eg1.exs
|
jdashton/glowing-succotash
|
44580c2d4cb300e33156d42e358e8a055948a079
|
[
"MIT"
] | null | null | null |
daniel/prog_elix/ch22/eg1.exs
|
jdashton/glowing-succotash
|
44580c2d4cb300e33156d42e358e8a055948a079
|
[
"MIT"
] | 1
|
2020-02-26T14:55:23.000Z
|
2020-02-26T14:55:23.000Z
|
daniel/prog_elix/ch22/eg1.exs
|
jdashton/glowing-succotash
|
44580c2d4cb300e33156d42e358e8a055948a079
|
[
"MIT"
] | null | null | null |
defmodule My do
defmacro macro(code) do
IO.inspect code
quote do: IO.puts "Different code"
end
end
defmodule Test do
require My
My.macro(IO.puts("hello"))
end
| 14.666667
| 38
| 0.693182
|
0302c10df99a157ef76aacc9bae6853385db261c
| 1,026
|
exs
|
Elixir
|
test/registery_test.exs
|
alisinabh/yasd
|
b42f164512ae3060de5aa50a43ff18ebbf5e9354
|
[
"MIT"
] | null | null | null |
test/registery_test.exs
|
alisinabh/yasd
|
b42f164512ae3060de5aa50a43ff18ebbf5e9354
|
[
"MIT"
] | null | null | null |
test/registery_test.exs
|
alisinabh/yasd
|
b42f164512ae3060de5aa50a43ff18ebbf5e9354
|
[
"MIT"
] | null | null | null |
defmodule YASD.RegistryTest do
use ExUnit.Case
alias YASD.Registry
test "register a service and fetching nodes work" do
service_name = "svc-#{Enum.random(1..99999)}"
:ok = Registry.register(service_name, "192.168.1.1")
:ok = Registry.register(service_name, "192.168.1.2", [])
:ok = Registry.register(service_name, "192.168.1.1", [])
{:ok, nodes} = Registry.list_nodes(service_name)
assert ["192.168.1.1", "192.168.1.2"] == Enum.sort(nodes)
assert {:ok, [service_name]} == Registry.list_services()
end
test "tegister a tagged service and fetch by tag works" do
service_name = "tsvc-#{Enum.random(1..99999)}"
:ok = Registry.register(service_name, "192.168.1.1")
:ok = Registry.register(service_name, "192.168.1.2", ["client"])
:ok = Registry.register(service_name, "192.168.1.1", ["server"])
assert {:ok, ["192.168.1.2"]} == Registry.list_nodes(service_name, ["client"])
assert {:ok, ["192.168.1.1"]} == Registry.list_nodes(service_name, ["server"])
end
end
| 36.642857
| 82
| 0.65692
|
0302c59357f0588e9f33cf58c1e11266f6ac2d7c
| 589
|
ex
|
Elixir
|
lib/Voice/udp.ex
|
ElinksFr/alchemy
|
4c64e5c619977a62a5674dbd2b1ef29f76f6f44e
|
[
"MIT"
] | null | null | null |
lib/Voice/udp.ex
|
ElinksFr/alchemy
|
4c64e5c619977a62a5674dbd2b1ef29f76f6f44e
|
[
"MIT"
] | null | null | null |
lib/Voice/udp.ex
|
ElinksFr/alchemy
|
4c64e5c619977a62a5674dbd2b1ef29f76f6f44e
|
[
"MIT"
] | null | null | null |
defmodule Alchemy.Voice.UDP do
@moduledoc false
def open_udp(endpoint, port, ssrc) do
{:ok, discord_ip} = :inet.parse_address(to_charlist(endpoint))
data = <<ssrc :: size(560)>>
udp_opts = [:binary, active: false, reuseaddr: true]
{:ok, udp} = :gen_udp.open(0, udp_opts)
:gen_udp.send(udp, discord_ip, port, data)
{:ok, discovery} = :gen_udp.recv(udp, 70)
<<_padding :: size(32), my_ip :: bitstring-size(112),
_null :: size(400), my_port :: size(16)>> =
discovery |> Tuple.to_list |> List.last
{my_ip, my_port, discord_ip, udp}
end
end
| 34.647059
| 66
| 0.634975
|
03030560f74e5afb5be531b2e040c2f779547d13
| 1,267
|
ex
|
Elixir
|
lib/dialyxir/warnings/negative_guard_fail.ex
|
groksrc/dialyxir
|
2968bea2c22e67571bfa939422fc370ea5ce5b4c
|
[
"Apache-2.0"
] | null | null | null |
lib/dialyxir/warnings/negative_guard_fail.ex
|
groksrc/dialyxir
|
2968bea2c22e67571bfa939422fc370ea5ce5b4c
|
[
"Apache-2.0"
] | null | null | null |
lib/dialyxir/warnings/negative_guard_fail.ex
|
groksrc/dialyxir
|
2968bea2c22e67571bfa939422fc370ea5ce5b4c
|
[
"Apache-2.0"
] | null | null | null |
defmodule Dialyxir.Warnings.NegativeGuardFail do
@behaviour Dialyxir.Warning
@impl Dialyxir.Warning
@spec warning() :: :neg_guard_fail
def warning(), do: :neg_guard_fail
@impl Dialyxir.Warning
@spec format_short([String.t()]) :: String.t()
def format_short(_) do
"Guard test can never succeed."
end
@impl Dialyxir.Warning
@spec format_long([String.t()]) :: String.t()
def format_long([guard, args]) do
pretty_args = Erlex.pretty_print_args(args)
"""
Guard test:
not #{guard}#{pretty_args}
can never succeed.
"""
end
def format_long([arg1, infix, arg2]) do
pretty_infix = Erlex.pretty_print_infix(infix)
"""
Guard test:
not #{arg1} #{pretty_infix} #{arg2}
can never succeed.
"""
end
@impl Dialyxir.Warning
@spec explain() :: String.t()
def explain() do
"""
The function guard either presents an impossible guard or the only
calls will never succeed against the guards.
Example:
defmodule Example do
def ok(ok = "ok") when not is_bitstring(ok) do
:ok
end
end
or
defmodule Example do
def ok() do
ok(:ok)
end
defp ok(ok) when not is_atom(ok) do
:ok
end
end
"""
end
end
| 18.910448
| 70
| 0.621942
|
0303066612d79d9052dc8e0e86566898ea34d900
| 65
|
exs
|
Elixir
|
config/test.exs
|
grahac/eth
|
950eeb89e4059f0e8711ec5c7944732c161a4bef
|
[
"MIT"
] | 4
|
2021-12-01T17:20:34.000Z
|
2021-12-09T23:09:03.000Z
|
config/test.exs
|
grahac/eth
|
950eeb89e4059f0e8711ec5c7944732c161a4bef
|
[
"MIT"
] | null | null | null |
config/test.exs
|
grahac/eth
|
950eeb89e4059f0e8711ec5c7944732c161a4bef
|
[
"MIT"
] | 4
|
2021-04-29T23:42:55.000Z
|
2022-01-30T23:55:58.000Z
|
use Mix.Config
config :ethereumex, url: "http://localhost:8545"
| 16.25
| 48
| 0.738462
|
03031de0afd54e0db34d5ac3aaa3ab2cd8fd1d19
| 1,377
|
exs
|
Elixir
|
mix.exs
|
arduino-man/fona_modern
|
61845bbbbc46a61a50e59a97c68709f2722078a6
|
[
"MIT"
] | null | null | null |
mix.exs
|
arduino-man/fona_modern
|
61845bbbbc46a61a50e59a97c68709f2722078a6
|
[
"MIT"
] | null | null | null |
mix.exs
|
arduino-man/fona_modern
|
61845bbbbc46a61a50e59a97c68709f2722078a6
|
[
"MIT"
] | null | null | null |
defmodule Fona.Mixfile do
use Mix.Project
def project do
[app: :fona_modern,
version: "0.2.0",
elixir: "~> 1.11",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
name: "Fona_Modern",
source_url: "https://github.com/st23am/fona"
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger],
mod: {Fona.Application, []}]
end
defp description() do
"A hex package to control the Fona 808 shield by Adafruit Industries now updated for Circuits UART"
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE*"],
maintainers: ["James Smith", "Mario Solorzano"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/arduino-man/fona_modern"}
]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:circuits_uart, "~> 1.4"},
{:ex_doc, ">= 0.0.0", only: :dev}
]
end
end
| 24.589286
| 103
| 0.606391
|
03032ed9b2305dac0192a722f3176c8df0fcf2f6
| 2,379
|
ex
|
Elixir
|
lib/minecraft_controller_web.ex
|
TenTakano/Minecraft_Controller
|
a118a6e9694da3c0fdfa9ec93872790d38f093e3
|
[
"MIT"
] | null | null | null |
lib/minecraft_controller_web.ex
|
TenTakano/Minecraft_Controller
|
a118a6e9694da3c0fdfa9ec93872790d38f093e3
|
[
"MIT"
] | 29
|
2021-02-13T06:54:56.000Z
|
2021-06-06T09:55:36.000Z
|
lib/minecraft_controller_web.ex
|
TenTakano/Minecraft_Controller
|
a118a6e9694da3c0fdfa9ec93872790d38f093e3
|
[
"MIT"
] | null | null | null |
defmodule MinecraftControllerWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use MinecraftControllerWeb, :controller
use MinecraftControllerWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: MinecraftControllerWeb
import Plug.Conn
import MinecraftControllerWeb.Gettext
alias MinecraftControllerWeb.Router.Helpers, as: Routes
alias MinecraftControllerWeb.Error
alias MinecraftControllerWeb.Plug.VerifyApiToken
@spec error_json(Conn.t(), module) :: Conn.t()
defp error_json(conn, error_module) do
error = error_module.new()
conn
|> put_status(error.status)
|> json(Map.take(error, [:type, :message]))
end
end
end
def view do
quote do
use Phoenix.View,
root: "lib/minecraft_controller_web/templates",
namespace: MinecraftControllerWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import MinecraftControllerWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import MinecraftControllerWeb.ErrorHelpers
import MinecraftControllerWeb.Gettext
alias MinecraftControllerWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 25.580645
| 76
| 0.696091
|
030368ffccc5f29ecb7f71377bdbb31c4a1981d4
| 659
|
exs
|
Elixir
|
test/codec_gsm_test.exs
|
xirsys/xmedialib
|
8e59b7691ffae93ea62fd9f037bf380c7f676ba0
|
[
"Apache-2.0"
] | 7
|
2018-12-10T13:50:05.000Z
|
2020-10-20T14:16:42.000Z
|
test/codec_gsm_test.exs
|
xirsys/xmedialib
|
8e59b7691ffae93ea62fd9f037bf380c7f676ba0
|
[
"Apache-2.0"
] | null | null | null |
test/codec_gsm_test.exs
|
xirsys/xmedialib
|
8e59b7691ffae93ea62fd9f037bf380c7f676ba0
|
[
"Apache-2.0"
] | 4
|
2019-01-09T17:46:44.000Z
|
2020-02-02T12:00:04.000Z
|
defmodule XMediaLib.CodecGSMTest do
use ExUnit.Case
alias XMediaLib.TestUtils
test "decoding from GSM to PCM" do
assert TestUtils.codec_decode(
"test/samples/gsm/sample-gsm-16-mono-8khz.raw",
"test/samples/gsm/sample-pcm-16-mono-8khz.raw",
33,
"GSM",
{'GSM', 8000, 1}
)
end
test "Test encoding from PCM to GSM" do
assert TestUtils.codec_encode(
"test/samples/gsm/sample-pcm-16-mono-8khz.raw",
"test/samples/gsm/sample-gsm-16-mono-8khz.from_pcm",
320,
"GSM",
{'GSM', 8000, 1}
)
end
end
| 26.36
| 65
| 0.550835
|
030390aebcae188341777c7d263baa87e6d16f92
| 339
|
ex
|
Elixir
|
lib/offer_hunters/comments/get.ex
|
Ryandls/offer_hunters-backend
|
abedff162d8623e2fbaa4c5e4a518f1726bef436
|
[
"MIT"
] | 3
|
2021-10-01T21:13:02.000Z
|
2021-11-05T22:25:55.000Z
|
lib/offer_hunters/comments/get.ex
|
Ryandls/offer_hunters-backend
|
abedff162d8623e2fbaa4c5e4a518f1726bef436
|
[
"MIT"
] | null | null | null |
lib/offer_hunters/comments/get.ex
|
Ryandls/offer_hunters-backend
|
abedff162d8623e2fbaa4c5e4a518f1726bef436
|
[
"MIT"
] | null | null | null |
defmodule OfferHunters.Comments.Get do
@moduledoc """
Module for get comments
"""
alias OfferHunters.{Comment, Error, Repo}
def by_id(id) do
case Repo.get(Comment, id) do
%Comment{} = comment ->
{:ok, comment}
nil ->
{:error, Error.build(:not_found, "This id does't exist")}
end
end
end
| 19.941176
| 65
| 0.60472
|
0303c2a44ea706ed09d7f9f120de1ffa8838fe7a
| 510
|
ex
|
Elixir
|
lib/map.ex
|
ElixirCourse/lists-recursion-live-coding
|
57c3d2dca0495451b9155de0c11db0d46fcf5031
|
[
"MIT"
] | null | null | null |
lib/map.ex
|
ElixirCourse/lists-recursion-live-coding
|
57c3d2dca0495451b9155de0c11db0d46fcf5031
|
[
"MIT"
] | null | null | null |
lib/map.ex
|
ElixirCourse/lists-recursion-live-coding
|
57c3d2dca0495451b9155de0c11db0d46fcf5031
|
[
"MIT"
] | null | null | null |
defmodule List.Map do
def map([], _), do: []
def map([h | t], fun), do: [fun.(h) | map(t, fun)]
def rmap(list, fun) do
list
|> List.Reduce.foldr([], fn elem, acc ->
[fun.(elem) | acc]
end)
end
def lmap(list, fun) do
list
|> List.Reduce.foldl([], fn elem, acc ->
[fun.(elem) | acc]
end)
|> List.Reverse.reverse()
end
def emap(list, fun) do
list
|> Enum.reduce([], fn elem, acc ->
[fun.(elem) | acc]
end)
|> Enum.reverse()
end
end
| 18.214286
| 52
| 0.505882
|
0303ccf3ab5b6f1340ff8c0d8cb094acec631b5d
| 235
|
exs
|
Elixir
|
priv/repo/migrations/20181217050227_add_live_to_queue_sources.exs
|
upmaru/cineplex
|
7d1d516d3e3d3683b2ad4425b61517a8f556f721
|
[
"MIT"
] | null | null | null |
priv/repo/migrations/20181217050227_add_live_to_queue_sources.exs
|
upmaru/cineplex
|
7d1d516d3e3d3683b2ad4425b61517a8f556f721
|
[
"MIT"
] | null | null | null |
priv/repo/migrations/20181217050227_add_live_to_queue_sources.exs
|
upmaru/cineplex
|
7d1d516d3e3d3683b2ad4425b61517a8f556f721
|
[
"MIT"
] | null | null | null |
defmodule Cineplex.Repo.Migrations.AddLiveToQueueSources do
use Ecto.Migration
def change do
alter table(:queue_sources) do
add :live, :boolean, default: true
end
create index(:queue_sources, [:live])
end
end
| 19.583333
| 59
| 0.714894
|
0303f1201e716c22dc67a0081bc3f61194813644
| 3,700
|
ex
|
Elixir
|
apps/language_server/lib/language_server/providers/formatting.ex
|
KTSCode/elixir-ls
|
23d8b2a1cdbf8df1ae7e97c786779bf2c29201bc
|
[
"Apache-2.0"
] | null | null | null |
apps/language_server/lib/language_server/providers/formatting.ex
|
KTSCode/elixir-ls
|
23d8b2a1cdbf8df1ae7e97c786779bf2c29201bc
|
[
"Apache-2.0"
] | null | null | null |
apps/language_server/lib/language_server/providers/formatting.ex
|
KTSCode/elixir-ls
|
23d8b2a1cdbf8df1ae7e97c786779bf2c29201bc
|
[
"Apache-2.0"
] | null | null | null |
defmodule ElixirLS.LanguageServer.Providers.Formatting do
import ElixirLS.LanguageServer.Protocol, only: [range: 4]
alias ElixirLS.LanguageServer.SourceFile
def supported? do
function_exported?(Code, :format_string!, 2)
end
def format(source_file, uri, project_dir) do
if can_format?(uri, project_dir) do
case SourceFile.formatter_opts(uri) do
{:ok, opts} ->
if should_format?(uri, project_dir, opts[:inputs]) do
formatted = IO.iodata_to_binary([Code.format_string!(source_file.text, opts), ?\n])
response =
source_file.text
|> String.myers_difference(formatted)
|> myers_diff_to_text_edits()
{:ok, response}
else
{:ok, []}
end
:error ->
{:error, :internal_error, "Unable to fetch formatter options"}
end
else
msg =
"Cannot format file from current directory " <>
"(Currently in #{Path.relative_to(File.cwd!(), project_dir)})"
{:error, :internal_error, msg}
end
rescue
_e in [TokenMissingError, SyntaxError] ->
{:error, :internal_error, "Unable to format due to syntax error"}
end
# If in an umbrella project, the cwd might be set to a sub-app if it's being compiled. This is
# fine if the file we're trying to format is in that app. Otherwise, we return an error.
defp can_format?(file_uri, project_dir) do
project_dir = project_dir |> String.downcase()
file_path = file_uri |> SourceFile.path_from_uri() |> String.downcase()
cwd = File.cwd!() |> String.downcase()
not String.starts_with?(file_path, project_dir) or
String.starts_with?(Path.absname(file_path), cwd)
end
def should_format?(file_uri, project_dir, inputs) when is_list(inputs) do
file = String.trim_leading(file_uri, "file://")
Enum.any?(inputs, fn glob ->
project_dir
|> Path.join(glob)
|> Path.wildcard(match_dot: true)
|> Enum.any?(&(file == &1))
end)
end
def should_format?(_file_uri, _project_dir, _inputs), do: true
defp myers_diff_to_text_edits(myers_diff, starting_pos \\ {0, 0}) do
myers_diff_to_text_edits(myers_diff, starting_pos, [])
end
defp myers_diff_to_text_edits([], _pos, edits) do
edits
end
defp myers_diff_to_text_edits([diff | rest], {line, col}, edits) do
case {diff, rest} do
{{:eq, str}, _} ->
myers_diff_to_text_edits(rest, advance_pos({line, col}, str), edits)
{{:ins, str}, _} ->
edit = %{"range" => range(line, col, line, col), "newText" => str}
myers_diff_to_text_edits(rest, {line, col}, [edit | edits])
{{:del, del_str}, [{:ins, ins_str} | rest]} ->
{end_line, end_col} = advance_pos({line, col}, del_str)
edit = %{"range" => range(line, col, end_line, end_col), "newText" => ins_str}
myers_diff_to_text_edits(rest, {end_line, end_col}, [edit | edits])
{{:del, str}, _} ->
{end_line, end_col} = advance_pos({line, col}, str)
edit = %{"range" => range(line, col, end_line, end_col), "newText" => ""}
myers_diff_to_text_edits(rest, {end_line, end_col}, [edit | edits])
end
end
defp advance_pos({line, col}, str) do
Enum.reduce(String.split(str, "", trim: true), {line, col}, fn char, {line, col} ->
if char in ["\n", "\r"] do
{line + 1, 0}
else
# LSP contentChanges positions are based on UTF-16 string representation
# https://microsoft.github.io/language-server-protocol/specification#textDocuments
{line, col + div(byte_size(:unicode.characters_to_binary(char, :utf8, :utf16)), 2)}
end
end)
end
end
| 34.90566
| 96
| 0.628649
|
0303f5f85e461455d5ad95efb2fd43d8a2e41438
| 1,993
|
ex
|
Elixir
|
clients/apigee/lib/google_api/apigee/v1/model/google_iam_v1_audit_log_config.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | null | null | null |
clients/apigee/lib/google_api/apigee/v1/model/google_iam_v1_audit_log_config.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | 1
|
2020-12-18T09:25:12.000Z
|
2020-12-18T09:25:12.000Z
|
clients/apigee/lib/google_api/apigee/v1/model/google_iam_v1_audit_log_config.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | 1
|
2020-10-04T10:12:44.000Z
|
2020-10-04T10:12:44.000Z
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Apigee.V1.Model.GoogleIamV1AuditLogConfig do
@moduledoc """
Provides the configuration for logging a type of permissions. Example: { "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" } ] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while exempting jose@example.com from DATA_READ logging.
## Attributes
* `exemptedMembers` (*type:* `list(String.t)`, *default:* `nil`) - Specifies the identities that do not cause logging for this type of permission. Follows the same format of Binding.members.
* `logType` (*type:* `String.t`, *default:* `nil`) - The log type that this config enables.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:exemptedMembers => list(String.t()),
:logType => String.t()
}
field(:exemptedMembers, type: :list)
field(:logType)
end
defimpl Poison.Decoder, for: GoogleApi.Apigee.V1.Model.GoogleIamV1AuditLogConfig do
def decode(value, options) do
GoogleApi.Apigee.V1.Model.GoogleIamV1AuditLogConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Apigee.V1.Model.GoogleIamV1AuditLogConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.86
| 316
| 0.732564
|
0304022c4d0e070999c2366b6af8aa051ec2e5f9
| 819
|
ex
|
Elixir
|
lib/app/storage.ex
|
drumser/markovich
|
387d95b3d5cbc31f3c43dfa00906048ee8c29086
|
[
"MIT"
] | null | null | null |
lib/app/storage.ex
|
drumser/markovich
|
387d95b3d5cbc31f3c43dfa00906048ee8c29086
|
[
"MIT"
] | null | null | null |
lib/app/storage.ex
|
drumser/markovich
|
387d95b3d5cbc31f3c43dfa00906048ee8c29086
|
[
"MIT"
] | null | null | null |
defmodule App.Storage do
use GenServer
require Logger
def start_link do
Logger.log(:info, "Started storage")
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def init(:ok) do
CubDB.start_link(data_dir: Path.join([App.Utils.get_data_dir(), "cubdb"]))
end
def handle_cast({:put, key, value}, db) do
CubDB.put(db, key, value)
{:noreply, db}
end
def handle_call({:get, key}, _from, db) do
{:reply, CubDB.get(db, key), db}
end
def put(key, value) do
GenServer.cast(__MODULE__, {:put, key, value})
end
def get(key, default \\ nil) do
GenServer.call(__MODULE__, {:get, key}) || default
end
def inc(key) do
current_value = (get(key) || 0) + 1
put(key, current_value)
current_value
end
def reset(key) do
put(key, 0)
end
end
| 19.5
| 78
| 0.638584
|
030434e5dda2f52555d5d1b367c7341e15f65c1c
| 1,973
|
ex
|
Elixir
|
clients/health_care/lib/google_api/health_care/v1beta1/model/deidentify_summary.ex
|
ukrbublik/elixir-google-api
|
364cec36bc76f60bec94cbcad34844367a29d174
|
[
"Apache-2.0"
] | null | null | null |
clients/health_care/lib/google_api/health_care/v1beta1/model/deidentify_summary.ex
|
ukrbublik/elixir-google-api
|
364cec36bc76f60bec94cbcad34844367a29d174
|
[
"Apache-2.0"
] | null | null | null |
clients/health_care/lib/google_api/health_care/v1beta1/model/deidentify_summary.ex
|
ukrbublik/elixir-google-api
|
364cec36bc76f60bec94cbcad34844367a29d174
|
[
"Apache-2.0"
] | 1
|
2020-10-04T10:12:44.000Z
|
2020-10-04T10:12:44.000Z
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1beta1.Model.DeidentifySummary do
@moduledoc """
Contains a detailed summary of the Deidentify operation.
## Attributes
* `failureResourceCount` (*type:* `String.t`, *default:* `nil`) - Number of resources that failed to process. The failures might be caused by: * Invalid user input data * Transient errors that could be skipped
* `successResourceCount` (*type:* `String.t`, *default:* `nil`) - Number of resources successfully processed.
* `successStoreCount` (*type:* `String.t`, *default:* `nil`) - Number of stores successfully processed.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:failureResourceCount => String.t(),
:successResourceCount => String.t(),
:successStoreCount => String.t()
}
field(:failureResourceCount)
field(:successResourceCount)
field(:successStoreCount)
end
defimpl Poison.Decoder, for: GoogleApi.HealthCare.V1beta1.Model.DeidentifySummary do
def decode(value, options) do
GoogleApi.HealthCare.V1beta1.Model.DeidentifySummary.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HealthCare.V1beta1.Model.DeidentifySummary do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.226415
| 213
| 0.737962
|
0304571d370cf2d91b725c0bec2b57fe186eb2e8
| 453
|
ex
|
Elixir
|
lib/ellie_web/graphql/types/project_id.ex
|
HenkPoley/ellie
|
045212b56142341fc95b79659c3ca218b0d5d282
|
[
"BSD-3-Clause"
] | 377
|
2018-04-05T03:36:00.000Z
|
2022-03-30T19:12:44.000Z
|
lib/ellie_web/graphql/types/project_id.ex
|
HenkPoley/ellie
|
045212b56142341fc95b79659c3ca218b0d5d282
|
[
"BSD-3-Clause"
] | 91
|
2018-05-24T21:56:06.000Z
|
2022-02-26T03:54:04.000Z
|
lib/ellie_web/graphql/types/project_id.ex
|
HenkPoley/ellie
|
045212b56142341fc95b79659c3ca218b0d5d282
|
[
"BSD-3-Clause"
] | 34
|
2018-05-29T03:54:35.000Z
|
2022-01-13T07:12:46.000Z
|
defmodule EllieWeb.Graphql.Types.PrettyId do
use Absinthe.Schema.Notation
alias Ellie.Types.PrettyId
scalar :project_id, name: "PrettyId" do
serialize &to_string/1
parse &cast_from_string/1
end
defp cast_from_string(%Absinthe.Blueprint.Input.String{value: value}) do
PrettyId.cast(value)
end
defp cast_from_string(%Absinthe.Blueprint.Input.Null{}) do
{:ok, nil}
end
defp cast_from_string(_) do
:error
end
end
| 20.590909
| 74
| 0.730684
|
030471463c5feafbffa724ffcbb01d7301f59cff
| 1,892
|
ex
|
Elixir
|
lib/evercam_models/analytics/geese_counting.ex
|
mekongit/evercam_models
|
7c36d52f2b47ffc89a3eaca65c2e353716fafacc
|
[
"MIT"
] | null | null | null |
lib/evercam_models/analytics/geese_counting.ex
|
mekongit/evercam_models
|
7c36d52f2b47ffc89a3eaca65c2e353716fafacc
|
[
"MIT"
] | null | null | null |
lib/evercam_models/analytics/geese_counting.ex
|
mekongit/evercam_models
|
7c36d52f2b47ffc89a3eaca65c2e353716fafacc
|
[
"MIT"
] | null | null | null |
defmodule Analytics.GeeseCounting do
@moduledoc """
The Reports context.
"""
use Evercam.Schema
import Ecto.Query, warn: false
alias Analytics.GeeseCounting
schema "geese_counting" do
field :cameraex, :string
field :snapshot_date, :string
field :number, :integer
timestamps()
end
def get_all_roi(exid) do
RegionOfInterest
|> where(cameraex: ^exid)
|> order_by(asc: :from_date)
|> AnalyticsRepo.all
end
def geese_available_months(exid) do
query = from g in GeeseCounting,
select: fragment("substring(?, ?, ?)", g.snapshot_date, 0, 8),
where: g.cameraex == ^exid,
group_by: [fragment("substring(?, ?, ?)", g.snapshot_date, 0, 8)],
order_by: [fragment("substring(?, ?, ?)", g.snapshot_date, 0, 8)]
query
|> AnalyticsRepo.all
end
def months_all_cameras() do
query = from g in GeeseCounting,
select: fragment("substring(?, ?, ?)", g.snapshot_date, 0, 8),
group_by: [fragment("substring(?, ?, ?)", g.snapshot_date, 0, 8)],
order_by: [fragment("substring(?, ?, ?)", g.snapshot_date, 0, 8)]
query
|> AnalyticsRepo.all
end
def geese_data_camera(exid, year, month) do
date_value = "#{year}-#{month}%"
query = from g in GeeseCounting,
where: (g.cameraex == ^exid) and (ilike(g.snapshot_date, ^date_value))
query
|> order_by(asc: :snapshot_date)
|> AnalyticsRepo.all
end
def detections_all_cameras(year, month) do
date_value = "#{year}-#{month}%"
query = from g in GeeseCounting,
where: ilike(g.snapshot_date, ^date_value)
query
|> order_by(asc: :snapshot_date)
|> AnalyticsRepo.all
end
def get_geese_counting!(id), do: AnalyticsRepo.get!(GeeseCounting, id)
@doc false
def changeset(%GeeseCounting{} = geese_counting, attrs) do
geese_counting
|> cast(attrs, [:cameraex, :snapshot_date, :number])
end
end
| 26.277778
| 75
| 0.650634
|
0304b05e87fc998eaef95fa7b3991aaf5c74eb5f
| 3,242
|
ex
|
Elixir
|
lib/graphvix/subgraph.ex
|
ssajnani/graphvix
|
e5fb0fbceb5b9ab72941633b702d414302868acc
|
[
"MIT"
] | 47
|
2016-10-02T21:44:30.000Z
|
2022-02-21T18:01:40.000Z
|
lib/graphvix/subgraph.ex
|
ssajnani/graphvix
|
e5fb0fbceb5b9ab72941633b702d414302868acc
|
[
"MIT"
] | 17
|
2016-10-27T17:11:27.000Z
|
2021-12-23T13:19:39.000Z
|
lib/graphvix/subgraph.ex
|
ssajnani/graphvix
|
e5fb0fbceb5b9ab72941633b702d414302868acc
|
[
"MIT"
] | 10
|
2017-03-20T18:21:21.000Z
|
2021-07-25T14:34:28.000Z
|
defmodule Graphvix.Subgraph do
@moduledoc """
[Internal] Models a subgraph or cluster for inclusion in a graph.
The functions included in this module are for internal use only. See
* `Graphvix.Graph.add_subgraph/3`
* `Graphvix.Graph.add_cluster/3`
for the public interface for creating and including subgraphs and clusters.
"""
import Graphvix.DotHelpers
defstruct [
id: nil,
vertex_ids: [],
global_properties: [node: [], edge: []],
subgraph_properties: [],
is_cluster: false
]
@doc false
def new(id, vertex_ids, is_cluster \\ false, properties \\ []) do
node_properties = Keyword.get(properties, :node, [])
edge_properties = Keyword.get(properties, :edge, [])
subgraph_properties = properties |> Keyword.delete(:node) |> Keyword.delete(:edge)
%Graphvix.Subgraph{
id: id_prefix(is_cluster) <> "#{id}",
is_cluster: is_cluster,
vertex_ids: vertex_ids,
global_properties: [
node: node_properties,
edge: edge_properties
],
subgraph_properties: subgraph_properties
}
end
@doc false
def to_dot(subgraph, graph) do
[vtab, _, _] = Graphvix.Graph.digraph_tables(graph)
vertices_from_graph = :ets.tab2list(vtab)
[
"subgraph #{subgraph.id} {",
global_properties_to_dot(subgraph),
subgraph_properties_to_dot(subgraph),
subgraph_vertices_to_dot(subgraph.vertex_ids, vertices_from_graph),
subgraph_edges_to_dot(subgraph, graph),
"}"
] |> List.flatten
|> compact()
|> Enum.map(&indent/1)
|> Enum.join("\n\n")
end
@doc false
def subgraph_edges_to_dot(subgraph, graph) do
subgraph
|> edges_with_both_vertices_in_subgraph(graph)
|> sort_elements_by_id()
|> elements_to_dot(fn {_, [:"$v" | v1], [:"$v" | v2], attributes} ->
"v#{v1} -> v#{v2} #{attributes_to_dot(attributes)}" |> String.trim |> indent
end)
end
@doc false
def both_vertices_in_subgraph?(vertex_ids, vid1, vid2) do
vid1 in vertex_ids && vid2 in vertex_ids
end
## Private
defp subgraph_vertices_to_dot(subgraph_vertex_ids, vertices_from_graph) do
subgraph_vertex_ids
|> vertices_in_this_subgraph(vertices_from_graph)
|> sort_elements_by_id()
|> elements_to_dot(fn {[_ | id] , attributes} ->
[
"v#{id}",
attributes_to_dot(attributes)
] |> compact |> Enum.join(" ") |> indent
end)
end
defp vertices_in_this_subgraph(subgraph_vertex_ids, vertices_from_graph) do
vertices_from_graph
|> Enum.filter(fn {vid, _attributes} -> vid in subgraph_vertex_ids end)
end
defp subgraph_properties_to_dot(%{subgraph_properties: properties}) do
properties
|> Enum.map(fn {key, value} ->
indent(attribute_to_dot(key, value))
end)
|> compact()
|> return_joined_list_or_nil()
end
defp edges_with_both_vertices_in_subgraph(%{vertex_ids: vertex_ids}, graph) do
[_, etab, _] = Graphvix.Graph.digraph_tables(graph)
edges = :ets.tab2list(etab)
Enum.filter(edges, fn {_, vid1, vid2, _} ->
both_vertices_in_subgraph?(vertex_ids, vid1, vid2)
end)
end
defp id_prefix(_is_cluster = true), do: "cluster"
defp id_prefix(_is_cluster = false), do: "subgraph"
end
| 29.207207
| 86
| 0.673041
|
0304bc75fab6f9c1da60f74c3f7e3306d0a3ec42
| 1,702
|
ex
|
Elixir
|
lib/clockr/masterclock_crypt.ex
|
powerhome/clockr
|
751ceb03885a1a0a0f20e7443e961d6f1ca0a091
|
[
"Apache-2.0"
] | null | null | null |
lib/clockr/masterclock_crypt.ex
|
powerhome/clockr
|
751ceb03885a1a0a0f20e7443e961d6f1ca0a091
|
[
"Apache-2.0"
] | null | null | null |
lib/clockr/masterclock_crypt.ex
|
powerhome/clockr
|
751ceb03885a1a0a0f20e7443e961d6f1ca0a091
|
[
"Apache-2.0"
] | null | null | null |
defmodule Clockr.MasterclockCrypt do
@moduledoc """
Utilities for managing encrypted Masterclock packets
"""
use Bitwise
@prog_key {0x74, 0x12, 0x02, 0xfb, 0xcc, 0x24, 0x5b, 0x82, 0x61, 0xe7, 0x3f, 0x9a, 0x26, 0x7c, 0xd3, 0xa0, 0x42}
@doc """
Transforms a list bytes using the Masterclock algorithm
data should be a list of bytes to be encrypted
Returns binary string
## Examples
iex> input = [0x23, 0x81, 0xD7, 0x65, 0x10, 0xB3, 0x2F, 0xE1, 0x00, 0x00, 0x14, 0xC7, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x0C, 0x22, 0x38]
iex> Clockr.MasterclockCrypt.mccrypt(input)
[0x56, 0x91, 0xD6, 0x9A, 0xD9, 0x91, 0x73, 0x6B, 0x68, 0xED, 0x20, 0x51, 0x2B, 0x72, 0xDC, 0x30, 0x53, 0x66, 0x01, 0x16, 0xEE, 0xDA, 0x33, 0x43, 0x9B, 0x7B, 0xFC, 0x23, 0x87, 0x38, 0x63, 0xF3, 0x81, 0x60, 0x57, 0x36, 0x27, 0xDD, 0xEB, 0x0C, 0x72, 0xA8, 0x4A, 0xCB, 0x10, 0xB8, 0x2B, 0x74]
"""
@spec mccrypt([any()]) :: binary()
def mccrypt(data) do
{bytes, _, _} = Enum.reduce data, {[], 1, 0}, fn(inbyte, acc) ->
{bytes, padcnt, keycnt} = acc
{newbyte, newpadcnt, newkeycnt} = mccrypt_byte({inbyte, padcnt, keycnt})
{bytes ++ [newbyte], newpadcnt, newkeycnt}
end
bytes
end
defp mccrypt_byte({byte, padcnt, keycnt}) do
crypted_byte = (byte ^^^ padcnt ^^^ elem(@prog_key, keycnt))
newpadcnt = if(padcnt < 254, do: padcnt + 1, else: 1)
newkeycnt = if(keycnt + 1 >= tuple_size(@prog_key), do: 0, else: keycnt + 1)
{crypted_byte, newpadcnt, newkeycnt}
end
end
| 40.52381
| 309
| 0.645711
|
0304d690067a8d7a8f938c8fc8e7294e31214187
| 803
|
ex
|
Elixir
|
sample_app/lib/my_app_web/requests/shared/address.ex
|
smanolloff/phoenix_params
|
90ce2f08b04db11a267ddadbf72bb645ceac275f
|
[
"MIT"
] | 15
|
2018-09-05T13:36:40.000Z
|
2021-03-18T15:00:12.000Z
|
sample_app/lib/my_app_web/requests/shared/address.ex
|
smanolloff/phoenix_params
|
90ce2f08b04db11a267ddadbf72bb645ceac275f
|
[
"MIT"
] | null | null | null |
sample_app/lib/my_app_web/requests/shared/address.ex
|
smanolloff/phoenix_params
|
90ce2f08b04db11a267ddadbf72bb645ceac275f
|
[
"MIT"
] | 1
|
2018-11-15T09:33:53.000Z
|
2018-11-15T09:33:53.000Z
|
defmodule MyAppWeb.Requests.Shared.Address do
use PhoenixParams, error_view: MyAppWeb.ErrorView
typedef Locale, &__MODULE__.coerce_locale/1
param :country,
type: String,
required: true
param :city,
type: String
param :street_name,
type: String
param :street_number,
type: Integer
param :locale,
type: Locale
#
# Coercers for our custom type
#
def coerce_locale(l) when is_nil(l), do: l
def coerce_locale(l) when not is_bitstring(l), do: {:error, "invalid locale"}
def coerce_locale(l) do
captures = Regex.run(~r/\A([a-z]{2})-([A-Z]{2})\z/, l)
if captures do
%{
language: Enum.at(captures, 1),
country: Enum.at(captures, 2)
}
else
{:error, "invalid locale"}
end
end
end
| 20.075
| 79
| 0.615193
|
0304dc3d29568f124cd79374407ed1ea214c8b55
| 68
|
exs
|
Elixir
|
test/test_helper.exs
|
prio101/bosque
|
3b9d0a789a4c33dce829d5cab9d198145f28b8fd
|
[
"MIT"
] | null | null | null |
test/test_helper.exs
|
prio101/bosque
|
3b9d0a789a4c33dce829d5cab9d198145f28b8fd
|
[
"MIT"
] | null | null | null |
test/test_helper.exs
|
prio101/bosque
|
3b9d0a789a4c33dce829d5cab9d198145f28b8fd
|
[
"MIT"
] | null | null | null |
ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(Bosque.Repo, :manual)
| 22.666667
| 52
| 0.779412
|
0304dea768562001f5026ad79ee07ee3c19cf089
| 599
|
exs
|
Elixir
|
apps/tai/test/tai/events/lock_account_ok_test.exs
|
chrism2671/tai-1
|
847827bd23908adfad4a82c83d5295bdbc022796
|
[
"MIT"
] | null | null | null |
apps/tai/test/tai/events/lock_account_ok_test.exs
|
chrism2671/tai-1
|
847827bd23908adfad4a82c83d5295bdbc022796
|
[
"MIT"
] | null | null | null |
apps/tai/test/tai/events/lock_account_ok_test.exs
|
chrism2671/tai-1
|
847827bd23908adfad4a82c83d5295bdbc022796
|
[
"MIT"
] | 1
|
2020-05-03T23:32:11.000Z
|
2020-05-03T23:32:11.000Z
|
defmodule Tai.Events.LockAccountOkTest do
use ExUnit.Case, async: true
test ".to_data/1 transforms decimal data to strings" do
event = %Tai.Events.LockAccountOk{
venue_id: :my_venue,
credential_id: :my_credential,
asset: :btc,
min: Decimal.new("0.1"),
max: Decimal.new("0.3"),
qty: Decimal.new("0.2")
}
assert Tai.LogEvent.to_data(event) == %{
venue_id: :my_venue,
credential_id: :my_credential,
asset: :btc,
min: "0.1",
max: "0.3",
qty: "0.2"
}
end
end
| 24.958333
| 57
| 0.542571
|
030513018a3537fe475b60510a1f39171cece577
| 2,403
|
exs
|
Elixir
|
apps/omg_watcher/test/db/eth_event_db_test.exs
|
Pongch/elixir-omg
|
8a33c246898b49cba62b847e0989d9b6c89f5106
|
[
"Apache-2.0"
] | null | null | null |
apps/omg_watcher/test/db/eth_event_db_test.exs
|
Pongch/elixir-omg
|
8a33c246898b49cba62b847e0989d9b6c89f5106
|
[
"Apache-2.0"
] | null | null | null |
apps/omg_watcher/test/db/eth_event_db_test.exs
|
Pongch/elixir-omg
|
8a33c246898b49cba62b847e0989d9b6c89f5106
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.DB.EthEventDBTest do
use ExUnitFixtures
use ExUnit.Case, async: false
use OMG.API.Fixtures
alias OMG.API.Crypto
alias OMG.API.Utxo
alias OMG.Watcher.DB.EthEventDB
alias OMG.Watcher.DB.TxOutputDB
require Utxo
@eth Crypto.zero_address()
describe "EthEvent database" do
@tag fixtures: [:phoenix_ecto_sandbox]
test "insert deposits: creates deposit event and utxo" do
owner = <<1::160>>
EthEventDB.insert_deposits([%{blknum: 1, owner: owner, currency: @eth, amount: 1, hash: "hash1"}])
[event] = EthEventDB.get_all()
assert %EthEventDB{deposit_blknum: 1, deposit_txindex: 0, event_type: :deposit, hash: "hash1"} = event
[utxo] = TxOutputDB.get_all()
assert %TxOutputDB{owner: ^owner, currency: @eth, amount: 1, creating_deposit: "hash1"} = utxo
end
@tag fixtures: [:phoenix_ecto_sandbox, :alice]
test "insert deposits: creates deposits and retrieves them by hash", %{alice: alice} do
[{:ok, _evnt1}, {:ok, _evnt2}, {:ok, _evnt3}] =
EthEventDB.insert_deposits([
%{blknum: 1, owner: alice.addr, currency: @eth, amount: 1, hash: "hash1"},
%{blknum: 1000, owner: alice.addr, currency: @eth, amount: 2, hash: "hash2"},
%{blknum: 2013, owner: alice.addr, currency: @eth, amount: 3, hash: "hash3"}
])
assert %EthEventDB{deposit_blknum: 1, deposit_txindex: 0, event_type: :deposit} = EthEventDB.get("hash1")
assert %EthEventDB{deposit_blknum: 1000, deposit_txindex: 0, event_type: :deposit} = EthEventDB.get("hash2")
assert %EthEventDB{deposit_blknum: 2013, deposit_txindex: 0, event_type: :deposit} = EthEventDB.get("hash3")
assert ["hash1", "hash2", "hash3"] == TxOutputDB.get_utxos(alice.addr) |> Enum.map(& &1.creating_deposit)
end
end
end
| 39.393443
| 114
| 0.692468
|
03051cd21e4822b75c6ab937890e8d01826d3d7c
| 3,558
|
exs
|
Elixir
|
test/oban/migrations_test.exs
|
nathanl/oban
|
e7b22c8fb5a9f7fc6fdeecdfdff3c2eb9b4cb39b
|
[
"Apache-2.0"
] | null | null | null |
test/oban/migrations_test.exs
|
nathanl/oban
|
e7b22c8fb5a9f7fc6fdeecdfdff3c2eb9b4cb39b
|
[
"Apache-2.0"
] | null | null | null |
test/oban/migrations_test.exs
|
nathanl/oban
|
e7b22c8fb5a9f7fc6fdeecdfdff3c2eb9b4cb39b
|
[
"Apache-2.0"
] | null | null | null |
defmodule Oban.MigrationsTest do
use Oban.Case, async: true
import Oban.Migrations, only: [initial_version: 0, current_version: 0, migrated_version: 2]
@arbitrary_checks 20
defmodule StepMigration do
use Ecto.Migration
def up do
Oban.Migrations.up(version: up_version(), prefix: "migrating")
end
def down do
Oban.Migrations.down(version: down_version(), prefix: "migrating")
end
defp up_version do
Application.get_env(:oban, :up_version)
end
def down_version do
Application.get_env(:oban, :down_version)
end
end
defmodule DefaultMigration do
use Ecto.Migration
def up do
Oban.Migrations.up(prefix: "migrating")
end
def down do
Oban.Migrations.down(prefix: "migrating")
end
end
@base_version 20_300_000_000_000
test "migrating up and down between specific versions" do
for up <- initial_version()..current_version() do
Application.put_env(:oban, :up_version, up)
assert :ok = Ecto.Migrator.up(Repo, @base_version + up, StepMigration)
assert migrated_version() == up
end
assert table_exists?("oban_jobs")
assert table_exists?("oban_beats")
assert migrated_version() == current_version()
Application.put_env(:oban, :down_version, 2)
assert :ok = Ecto.Migrator.down(Repo, @base_version + 2, StepMigration)
assert table_exists?("oban_jobs")
refute table_exists?("oban_beats")
assert migrated_version() == 1
Application.put_env(:oban, :down_version, 1)
assert :ok = Ecto.Migrator.down(Repo, @base_version + 1, StepMigration)
refute table_exists?("oban_jobs")
refute table_exists?("oban_beats")
after
clear_migrated()
end
test "migrating up and down between default versions" do
assert :ok = Ecto.Migrator.up(Repo, @base_version, DefaultMigration)
assert table_exists?("oban_jobs")
assert table_exists?("oban_beats")
assert migrated_version() == current_version()
# Migrating once more to replicate multiple migrations that don't specify a version.
assert :ok = Ecto.Migrator.up(Repo, @base_version + 1, DefaultMigration)
assert :ok = Ecto.Migrator.down(Repo, @base_version + 1, DefaultMigration)
refute table_exists?("oban_jobs")
refute table_exists?("oban_beats")
# Migrating once more to replicate multiple migrations that don't specify a version.
assert :ok = Ecto.Migrator.down(Repo, @base_version, DefaultMigration)
after
clear_migrated()
end
test "migrating up and down between arbitrary versions" do
ups = 2..current_version()
dns = 1..(current_version() - 1)
ups
|> Enum.zip(dns)
|> Enum.shuffle()
|> Enum.take(@arbitrary_checks)
|> Enum.each(fn {up, down} ->
Application.put_env(:oban, :up_version, up)
Application.put_env(:oban, :down_version, down)
assert :ok = Ecto.Migrator.up(Repo, @base_version, StepMigration)
assert :ok = Ecto.Migrator.down(Repo, @base_version, StepMigration)
clear_migrated()
end)
end
defp migrated_version do
migrated_version(Repo, "migrating")
end
defp table_exists?(table) do
query = """
SELECT EXISTS (
SELECT 1
FROM pg_tables
WHERE schemaname = 'migrating'
AND tablename = '#{table}'
)
"""
{:ok, %{rows: [[bool]]}} = Repo.query(query)
bool
end
defp clear_migrated do
Repo.query("DELETE FROM schema_migrations WHERE version >= #{@base_version}")
Repo.query("DROP SCHEMA IF EXISTS migrating CASCADE")
end
end
| 26.552239
| 93
| 0.68353
|
030522bd1512f8436ec7106e69e354bd6d16b5e3
| 5,402
|
ex
|
Elixir
|
lib/logger/lib/logger/config.ex
|
xtian/elixir
|
c680eb1a3992309c272e8f808e15990ea5318d6e
|
[
"Apache-2.0"
] | 1
|
2017-07-25T21:46:25.000Z
|
2017-07-25T21:46:25.000Z
|
lib/logger/lib/logger/config.ex
|
xtian/elixir
|
c680eb1a3992309c272e8f808e15990ea5318d6e
|
[
"Apache-2.0"
] | null | null | null |
lib/logger/lib/logger/config.ex
|
xtian/elixir
|
c680eb1a3992309c272e8f808e15990ea5318d6e
|
[
"Apache-2.0"
] | 1
|
2017-07-25T21:46:48.000Z
|
2017-07-25T21:46:48.000Z
|
defmodule Logger.Config do
@moduledoc false
@behaviour :gen_event
@name __MODULE__
@table __MODULE__
@data :__data__
@deleted_handlers :__deleted_handlers__
def start_link do
GenServer.start_link(__MODULE__, :ok, name: @name)
end
def configure(options) do
:gen_event.call(Logger, @name, {:configure, options})
end
def add_translator(translator) do
:gen_event.call(Logger, @name, {:add_translator, translator})
end
def remove_translator(translator) do
:gen_event.call(Logger, @name, {:remove_translator, translator})
end
def handlers() do
for backend <- backends() do
{Logger, translate_backend(backend), backend}
end
end
def backends() do
:gen_event.call(Logger, @name, :backends)
end
def add_backend(backend) do
:gen_event.call(Logger, @name, {:add_backend, backend})
end
def remove_backend(backend) do
:gen_event.call(Logger, @name, {:remove_backend, backend})
end
def translate_backend(:console), do: Logger.Backends.Console
def translate_backend(other), do: other
def __data__() do
try do
:ets.lookup_element(@table, @data, 2)
rescue
ArgumentError ->
raise "cannot use Logger, the :logger application is not running"
else
nil ->
raise "cannot use Logger, the :logger application is not running"
data ->
data
end
end
def deleted_handlers() do
try do
:ets.lookup_element(@table, @deleted_handlers, 2)
rescue
ArgumentError ->
[]
end
end
def deleted_handlers(handlers) do
:gen_event.call(Logger, @name, {:deleted_handlers, handlers})
end
def new() do
tab = :ets.new(@table, [:named_table, :public, {:read_concurrency, true}])
true = :ets.insert_new(@table, [{@data, nil}, {@deleted_handlers, []}])
tab
end
def delete(@table) do
:ets.delete(@table)
end
## Callbacks
def init(_) do
# Use previous data if available in case this handler crashed.
state = :ets.lookup_element(@table, @data, 2) || compute_state(:async)
{:ok, state}
end
def handle_event({_type, gl, _msg} = event, state) when node(gl) != node() do
# Cross node messages are always async which also
# means this handler won't crash in case Logger
# is not installed in the other node.
:gen_event.notify({Logger, node(gl)}, event)
{:ok, state}
end
def handle_event(_event, %{mode: mode} = state) do
case compute_mode(state) do
^mode ->
{:ok, state}
new_mode ->
{:ok, persist(%{state | mode: new_mode})}
end
end
def handle_call(:backends, state) do
{:ok, Application.get_env(:logger, :backends), state}
end
def handle_call({:configure, options}, state) do
Enum.each options, fn {key, value} ->
Application.put_env(:logger, key, value)
end
{:ok, :ok, compute_state(state.mode)}
end
def handle_call({:add_translator, translator}, state) do
state = update_translators(state, fn t -> [translator | List.delete(t, translator)] end)
{:ok, :ok, state}
end
def handle_call({:remove_translator, translator}, state) do
state = update_translators(state, &List.delete(&1, translator))
{:ok, :ok, state}
end
def handle_call({:add_backend, backend}, state) do
update_backends(&[backend | List.delete(&1, backend)])
{:ok, :ok, state}
end
def handle_call({:remove_backend, backend}, state) do
update_backends(&List.delete(&1, backend))
{:ok, :ok, state}
end
def handle_call({:deleted_handlers, new}, state) do
old = deleted_handlers()
true = :ets.update_element(@table, @deleted_handlers, {2, new})
{:ok, old, state}
end
def handle_info(_msg, state) do
{:ok, state}
end
def terminate(_reason, _state) do
:ok
end
def code_change(_old, state, _extra) do
{:ok, state}
end
## Helpers
defp compute_mode(state) do
{:message_queue_len, len} = Process.info(self(), :message_queue_len)
cond do
len > state.sync_threshold and state.mode == :async ->
:sync
len < state.async_threshold and state.mode == :sync ->
:async
true ->
state.mode
end
end
defp update_backends(fun) do
backends = fun.(Application.get_env(:logger, :backends, []))
Application.put_env(:logger, :backends, backends)
end
defp update_translators(%{translators: translators} = state, fun) do
translators = fun.(translators)
Application.put_env(:logger, :translators, translators)
persist %{state | translators: translators}
end
defp compute_state(mode) do
level = Application.get_env(:logger, :level)
utc_log = Application.get_env(:logger, :utc_log)
truncate = Application.get_env(:logger, :truncate)
translators = Application.get_env(:logger, :translators)
sync_threshold = Application.get_env(:logger, :sync_threshold)
async_threshold = trunc(sync_threshold * 0.75)
state = %{level: level, mode: mode, truncate: truncate,
utc_log: utc_log, sync_threshold: sync_threshold,
async_threshold: async_threshold, translators: translators}
case compute_mode(state) do
^mode ->
persist(state)
new_mode ->
persist(%{state | mode: new_mode})
end
end
defp persist(state) do
:ets.update_element(@table, @data, {2, state})
state
end
end
| 25.601896
| 92
| 0.655128
|
03052b44dc74991d4385d69146fea0f5cb70d8ee
| 1,190
|
ex
|
Elixir
|
web/channels/user_socket.ex
|
ngnclht1102/trello-clone
|
669bf0ab765ca1d9735124f3f99cb245582b45fa
|
[
"MIT"
] | null | null | null |
web/channels/user_socket.ex
|
ngnclht1102/trello-clone
|
669bf0ab765ca1d9735124f3f99cb245582b45fa
|
[
"MIT"
] | null | null | null |
web/channels/user_socket.ex
|
ngnclht1102/trello-clone
|
669bf0ab765ca1d9735124f3f99cb245582b45fa
|
[
"MIT"
] | null | null | null |
defmodule TrelloRewriteMe.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", TrelloRewriteMe.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# TrelloRewriteMe.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 31.315789
| 88
| 0.707563
|
0305412ca982a16f1b9d62b4f2f9ffabbc24b89b
| 1,293
|
exs
|
Elixir
|
apps/brain/rel/config.exs
|
elicopter/core
|
7731dc7558dea39bd1c473ab9e512c9db9e1b2c9
|
[
"MIT"
] | 39
|
2016-11-01T07:21:51.000Z
|
2021-02-05T20:19:02.000Z
|
apps/brain/rel/config.exs
|
elicopter/core
|
7731dc7558dea39bd1c473ab9e512c9db9e1b2c9
|
[
"MIT"
] | null | null | null |
apps/brain/rel/config.exs
|
elicopter/core
|
7731dc7558dea39bd1c473ab9e512c9db9e1b2c9
|
[
"MIT"
] | null | null | null |
use Mix.Releases.Config,
# This sets the default release built by `mix release`
default_release: :default,
# This sets the default environment used by `mix release`
default_environment: :dev
# For a full list of config options for both releases
# and environments, visit https://hexdocs.pm/distillery/configuration.html
# You may define one or more environments in this file,
# an environment's settings will override those of a release
# when building in that environment, this combination of release
# and environment configuration is called a profile
environment :dev do
set cookie: :"rMHkIB&gCxn3s79u/EJDPQcacIL?J`oxz)BY(P}Z$J*>P1T%mBvZDqkyqyjYJ2N5"
end
environment :prod do
set cookie: :"rMHkIB&gCxn3s79u/EJDPQcacIL?J`oxz)BY(P}Z$J*>P1T%mBvZDqkyqyjYJ2N5"
end
# You may define one or more releases in this file.
# If you have not set a default release, or selected one
# when running `mix release`, the first release in the file
# will be used by default
release :brain do
set version: current_version(:brain)
if System.get_env("NERVES_SYSTEM") do
set dev_mode: false
set include_src: false
set include_erts: System.get_env("ERL_LIB_DIR")
set include_system_libs: System.get_env("ERL_SYSTEM_LIB_DIR")
set vm_args: "rel/vm.args"
end
end
| 32.325
| 81
| 0.755607
|
03056375edaa966ef464d281b0db0fa9fab6bc08
| 875
|
ex
|
Elixir
|
lib/balance/balance.ex
|
mfeckie/pin_elixir
|
8b360c0d189109b2a71a34f7f80c78ce9bc9092c
|
[
"MIT"
] | 6
|
2015-11-16T08:13:08.000Z
|
2016-05-26T04:00:58.000Z
|
lib/balance/balance.ex
|
mfeckie/pin_elixir
|
8b360c0d189109b2a71a34f7f80c78ce9bc9092c
|
[
"MIT"
] | null | null | null |
lib/balance/balance.ex
|
mfeckie/pin_elixir
|
8b360c0d189109b2a71a34f7f80c78ce9bc9092c
|
[
"MIT"
] | null | null | null |
defmodule PinElixir.Balance do
import PinElixir.Utils.RequestOptions
import PinElixir.Utils.Response
@moduledoc """
Allows querying of pin account balance
"""
@pin_url Application.get_env(:pin_elixir, :pin_url)
@doc """
Provides a representation of the current pin account balance and pending transactions
returns a tuple
```
{:ok,
%{balance:
%{
available: [%{amount: 50000, currency: "AUD"}],
pending: [%{amount: 50000, currency: "AUD"}]
}
}
}
```
OR
{:error, error_map}
"""
def get do
HTTPotion.get("https://#{@pin_url}/balance", with_auth)
|> handle_get
end
defp handle_get(%{status_code: 200, body: body}) do
decoded = body |> decode
{:ok, %{balance: decoded.response}}
end
defp handle_get(%{status_code: ___, body: body}) do
body |> to_error_tuple
end
end
| 19.021739
| 87
| 0.632
|
030597b439d61a69e9dece27427f799cf18c8832
| 2,138
|
exs
|
Elixir
|
config/prod.exs
|
cjen07/lssn
|
a3b9f4998dc497935896657473b1c30a570c92e8
|
[
"MIT"
] | null | null | null |
config/prod.exs
|
cjen07/lssn
|
a3b9f4998dc497935896657473b1c30a570c92e8
|
[
"MIT"
] | null | null | null |
config/prod.exs
|
cjen07/lssn
|
a3b9f4998dc497935896657473b1c30a570c92e8
|
[
"MIT"
] | null | null | null |
use Mix.Config
# For production, we configure the host to read the PORT
# from the system environment. Therefore, you will need
# to set PORT=80 before running your server.
#
# You should also configure the url host to something
# meaningful, we use this information when generating URLs.
#
# Finally, we also include the path to a manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :lssn, Lssn.Endpoint,
http: [port: {:system, "PORT"}],
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :lssn, Lssn.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :lssn, Lssn.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :lssn, Lssn.Endpoint, server: true
#
# You will also need to set the application root to `.` in order
# for the new static assets to be served after a hot upgrade:
#
# config :lssn, Lssn.Endpoint, root: "."
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 32.393939
| 67
| 0.708138
|
030599eb2ce0857750a548def3d5e812fc2b1c9a
| 15,356
|
ex
|
Elixir
|
clients/street_view_publish/lib/google_api/street_view_publish/v1/api/photos.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
clients/street_view_publish/lib/google_api/street_view_publish/v1/api/photos.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
clients/street_view_publish/lib/google_api/street_view_publish/v1/api/photos.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.StreetViewPublish.V1.Api.Photos do
@moduledoc """
API calls for all endpoints tagged `Photos`.
"""
alias GoogleApi.StreetViewPublish.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Deletes a list of Photos and their
metadata.
Note that if
BatchDeletePhotos
fails, either critical fields are missing or there is an authentication
error. Even if
BatchDeletePhotos
succeeds, individual photos in the batch may have failures.
These failures are specified in each
PhotoResponse.status
in
BatchDeletePhotosResponse.results.
See
DeletePhoto
for specific failures that can occur per photo.
## Parameters
* `connection` (*type:* `GoogleApi.StreetViewPublish.V1.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.StreetViewPublish.V1.Model.BatchDeletePhotosRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.StreetViewPublish.V1.Model.BatchDeletePhotosResponse{}}` on success
* `{:error, info}` on failure
"""
@spec streetviewpublish_photos_batch_delete(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.StreetViewPublish.V1.Model.BatchDeletePhotosResponse.t()}
| {:error, Tesla.Env.t()}
def streetviewpublish_photos_batch_delete(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/photos:batchDelete", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.StreetViewPublish.V1.Model.BatchDeletePhotosResponse{}]
)
end
@doc """
Gets the metadata of the specified
Photo batch.
Note that if
BatchGetPhotos
fails, either critical fields are missing or there is an authentication
error. Even if
BatchGetPhotos
succeeds, individual photos in the batch may have failures.
These failures are specified in each
PhotoResponse.status
in
BatchGetPhotosResponse.results.
See
GetPhoto
for specific failures that can occur per photo.
## Parameters
* `connection` (*type:* `GoogleApi.StreetViewPublish.V1.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:languageCode` (*type:* `String.t`) - The BCP-47 language code, such as "en-US" or "sr-Latn". For more
information, see
http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
If language_code is unspecified, the user's language preference for Google
services is used.
* `:photoIds` (*type:* `list(String.t)`) - Required. IDs of the Photos. For HTTP
GET requests, the URL query parameter should be
`photoIds=<id1>&photoIds=<id2>&...`.
* `:view` (*type:* `String.t`) - Specifies if a download URL for the photo bytes should be returned in the
Photo response.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.StreetViewPublish.V1.Model.BatchGetPhotosResponse{}}` on success
* `{:error, info}` on failure
"""
@spec streetviewpublish_photos_batch_get(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.StreetViewPublish.V1.Model.BatchGetPhotosResponse.t()}
| {:error, Tesla.Env.t()}
def streetviewpublish_photos_batch_get(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:languageCode => :query,
:photoIds => :query,
:view => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/photos:batchGet", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.StreetViewPublish.V1.Model.BatchGetPhotosResponse{}]
)
end
@doc """
Updates the metadata of Photos, such
as pose, place association, connections, etc. Changing the pixels of photos
is not supported.
Note that if
BatchUpdatePhotos
fails, either critical fields are missing or there is an authentication
error. Even if
BatchUpdatePhotos
succeeds, individual photos in the batch may have failures.
These failures are specified in each
PhotoResponse.status
in
BatchUpdatePhotosResponse.results.
See
UpdatePhoto
for specific failures that can occur per photo.
Only the fields specified in
updateMask
field are used. If `updateMask` is not present, the update applies to all
fields.
The number of
UpdatePhotoRequest
messages in a
BatchUpdatePhotosRequest
must not exceed 20.
<aside class="note"><b>Note:</b> To update
Pose.altitude,
Pose.latLngPair has to be
filled as well. Otherwise, the request will fail.</aside>
## Parameters
* `connection` (*type:* `GoogleApi.StreetViewPublish.V1.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.StreetViewPublish.V1.Model.BatchUpdatePhotosRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.StreetViewPublish.V1.Model.BatchUpdatePhotosResponse{}}` on success
* `{:error, info}` on failure
"""
@spec streetviewpublish_photos_batch_update(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.StreetViewPublish.V1.Model.BatchUpdatePhotosResponse.t()}
| {:error, Tesla.Env.t()}
def streetviewpublish_photos_batch_update(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/photos:batchUpdate", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.StreetViewPublish.V1.Model.BatchUpdatePhotosResponse{}]
)
end
@doc """
Lists all the Photos that belong to
the user.
<aside class="note"><b>Note:</b> Recently created photos that are still
being indexed are not returned in the response.</aside>
## Parameters
* `connection` (*type:* `GoogleApi.StreetViewPublish.V1.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - The filter expression. For example: `placeId=ChIJj61dQgK6j4AR4GeTYWZsKWw`.
The only filter supported at the moment is `placeId`.
* `:languageCode` (*type:* `String.t`) - The BCP-47 language code, such as "en-US" or "sr-Latn". For more
information, see
http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
If language_code is unspecified, the user's language preference for Google
services is used.
* `:pageSize` (*type:* `integer()`) - The maximum number of photos to return.
`pageSize` must be non-negative. If `pageSize` is zero or is not provided,
the default page size of 100 is used.
The number of photos returned in the response may be less than `pageSize`
if the number of photos that belong to the user is less than `pageSize`.
* `:pageToken` (*type:* `String.t`) - The
nextPageToken
value returned from a previous
ListPhotos
request, if any.
* `:view` (*type:* `String.t`) - Specifies if a download URL for the photos bytes should be returned in the
Photos response.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.StreetViewPublish.V1.Model.ListPhotosResponse{}}` on success
* `{:error, info}` on failure
"""
@spec streetviewpublish_photos_list(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.StreetViewPublish.V1.Model.ListPhotosResponse.t()}
| {:error, Tesla.Env.t()}
def streetviewpublish_photos_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:languageCode => :query,
:pageSize => :query,
:pageToken => :query,
:view => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/photos", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.StreetViewPublish.V1.Model.ListPhotosResponse{}]
)
end
end
| 42.893855
| 196
| 0.646783
|
0305b466fc53b9aa23f5f8e0b861feac587f53cc
| 2,308
|
ex
|
Elixir
|
lib/sudoku_web.ex
|
ream88/sudoku
|
7b319ce075a23bf8ca58ffcefa2c2965795cc9a2
|
[
"MIT"
] | null | null | null |
lib/sudoku_web.ex
|
ream88/sudoku
|
7b319ce075a23bf8ca58ffcefa2c2965795cc9a2
|
[
"MIT"
] | null | null | null |
lib/sudoku_web.ex
|
ream88/sudoku
|
7b319ce075a23bf8ca58ffcefa2c2965795cc9a2
|
[
"MIT"
] | null | null | null |
defmodule SudokuWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use SudokuWeb, :controller
use SudokuWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: SudokuWeb
import Plug.Conn
import SudokuWeb.Gettext
alias SudokuWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/sudoku_web/templates",
namespace: SudokuWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {SudokuWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import SudokuWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView helpers (live_render, live_component, live_patch, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import SudokuWeb.ErrorHelpers
import SudokuWeb.Gettext
alias SudokuWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 22.407767
| 78
| 0.680243
|
0305c58cfe054a78090e7df09398972429612fb4
| 1,115
|
exs
|
Elixir
|
config/config.exs
|
aforward/my_app
|
b6e13308657996fc4b0124c887836d9cc29d371a
|
[
"MIT"
] | null | null | null |
config/config.exs
|
aforward/my_app
|
b6e13308657996fc4b0124c887836d9cc29d371a
|
[
"MIT"
] | null | null | null |
config/config.exs
|
aforward/my_app
|
b6e13308657996fc4b0124c887836d9cc29d371a
|
[
"MIT"
] | null | null | null |
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :my_app, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:my_app, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 35.967742
| 73
| 0.750673
|
0305ee0cf8781f66d8bba8297bc29995778f61b9
| 2,036
|
ex
|
Elixir
|
clients/big_query/lib/google_api/big_query/v2/model/view_definition.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | null | null | null |
clients/big_query/lib/google_api/big_query/v2/model/view_definition.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | null | null | null |
clients/big_query/lib/google_api/big_query/v2/model/view_definition.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | 1
|
2018-07-28T20:50:50.000Z
|
2018-07-28T20:50:50.000Z
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.BigQuery.V2.Model.ViewDefinition do
@moduledoc """
## Attributes
- query (String.t): [Required] A query that BigQuery executes when the view is referenced. Defaults to: `null`.
- useLegacySql (boolean()): Specifies whether to use BigQuery's legacy SQL for this view. The default value is true. If set to false, the view will use BigQuery's standard SQL: https://cloud.google.com/bigquery/sql-reference/ Queries and views that reference this view must use the same flag value. Defaults to: `null`.
- userDefinedFunctionResources ([UserDefinedFunctionResource]): Describes user-defined function resources used in the query. Defaults to: `null`.
"""
defstruct [
:"query",
:"useLegacySql",
:"userDefinedFunctionResources"
]
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.ViewDefinition do
import GoogleApi.BigQuery.V2.Deserializer
def decode(value, options) do
value
|> deserialize(:"userDefinedFunctionResources", :list, GoogleApi.BigQuery.V2.Model.UserDefinedFunctionResource, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.ViewDefinition do
def encode(value, options) do
GoogleApi.BigQuery.V2.Deserializer.serialize_non_nil(value, options)
end
end
| 39.153846
| 329
| 0.759823
|
0305fa98ea44f9e95255210ffd9b8af6da52cc28
| 269
|
ex
|
Elixir
|
lib/liberator_example.ex
|
Cantido/liberator_example
|
d14014277cdc803a8ef895f80aac0651cea7108e
|
[
"MIT"
] | null | null | null |
lib/liberator_example.ex
|
Cantido/liberator_example
|
d14014277cdc803a8ef895f80aac0651cea7108e
|
[
"MIT"
] | null | null | null |
lib/liberator_example.ex
|
Cantido/liberator_example
|
d14014277cdc803a8ef895f80aac0651cea7108e
|
[
"MIT"
] | null | null | null |
defmodule LiberatorExample do
@moduledoc """
LiberatorExample keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 26.9
| 66
| 0.769517
|
0305fb58d2a890a8477cad17a6d1a964f06deb41
| 6,182
|
ex
|
Elixir
|
lib/article/article.ex
|
breunigs/veloroute
|
ac3b1eeb2ef2369c27186a138f6ffd8284652dab
|
[
"0BSD"
] | 12
|
2018-06-15T10:18:43.000Z
|
2022-01-24T12:50:54.000Z
|
lib/article/article.ex
|
breunigs/veloroute
|
ac3b1eeb2ef2369c27186a138f6ffd8284652dab
|
[
"0BSD"
] | 15
|
2018-06-21T18:04:12.000Z
|
2021-10-16T12:54:39.000Z
|
lib/article/article.ex
|
breunigs/veloroute
|
ac3b1eeb2ef2369c27186a138f6ffd8284652dab
|
[
"0BSD"
] | 2
|
2020-03-09T19:21:36.000Z
|
2022-01-16T03:29:51.000Z
|
defmodule Article do
require Logger
@type t :: %__MODULE__{
date: Date.t() | nil,
end: Data.RoughDate.t() | nil,
full_title: binary() | nil,
hide_footer: boolean() | nil,
name: binary(),
no_auto_title: boolean() | nil,
start: Data.RoughDate.t() | nil,
text: binary(),
title: binary() | nil,
summary: binary() | nil,
type: binary() | nil,
tracks: [Video.Track.t()]
}
@type collection() :: %{binary() => t()}
@known_params [
:bbox,
:construction_site_id_hh,
:date,
:dynamic,
:end,
:full_title,
:hide_footer,
:icon,
:name,
:no_auto_title,
:range,
:search_text,
:search_title,
:start,
:summary,
:tags,
:text,
:title,
:type,
:tracks
]
defstruct @known_params
@enforce_keys [:type, :title, :full_title, :text, :date, :name]
def required_params, do: @enforce_keys
def age_in_days(%__MODULE__{date: date}) do
Date.diff(Date.utc_today(), date)
end
@spec article_ways(Map.Parsed.t()) :: [Map.Way.t()]
def article_ways(%Map.Parsed{} = map) do
Map.Element.filter_by_tag(map.ways, :type, "article")
end
@spec enrich_with_map(t(), [Map.Way.t()], %{binary() => Geo.BoundingBox.t()}) :: t()
def enrich_with_map(%__MODULE__{} = art, article_ways, tag_bboxes)
when is_list(article_ways) and is_map(tag_bboxes) do
ways = Map.Element.filter_by_tag(article_ways, :name, art.name)
bbox =
Map.Element.bbox(ways) ||
Enum.find_value(art.tags, fn tag ->
if is_map_key(tag_bboxes, tag), do: tag_bboxes[tag], else: nil
end)
%{art | bbox: bbox}
end
@doc ~S"""
Filters down a list or map of articles. The filter is a keyword list with the
keys being fields to filter on. The values are regular lists to denote
acceptable values.
## Examples
iex> %{
...> "a" => ArticleTest.example_article(),
...> "b" => ArticleTest.example_article() |> Map.delete(:tags),
...> }
...> |> Article.filter([tags: ["7"]])
%{"a" => ArticleTest.example_article()}
iex> %{"a" => ArticleTest.example_article()}
...> |> Article.filter([tags: ["7"], unknown_key: ["7"]])
{:error, "Unknown filter key(s) unknown_key"}
iex> %{"a" => ArticleTest.example_article(date: nil)}
...> |> Article.filter([date: [nil]])
%{"a" => ArticleTest.example_article(date: nil)}
"""
def filter(all, filter) when is_list(filter) do
find_invalid_keys(filter)
|> case do
[] ->
all
|> Enum.filter(fn {_name, art} ->
Enum.all?(filter, fn {key, allowed_values} ->
allowed = MapSet.new(allowed_values)
have = Map.get(art, key) |> Kernel.||([nil]) |> List.wrap() |> MapSet.new()
matches = MapSet.intersection(allowed, have)
MapSet.size(matches) > 0
end)
end)
|> Enum.into(%{})
invalid ->
{:error, "Unknown filter key(s) #{invalid |> Enum.join(", ")}"}
end
end
defp find_invalid_keys(filter) do
Enum.reject(filter, fn {key, _vals} ->
Enum.member?(@known_params, key)
end)
|> Keyword.keys()
end
def ordered(various, key) when is_binary(key),
do: ordered(various, String.to_existing_atom(key))
def ordered(various, key) do
case key do
:start -> ordered_by_start(various)
:date -> ordered_by_date(various)
nil -> ordered_by_date(various)
end
end
def ordered_by_start(various) do
various
|> orderable_only()
|> Enum.sort_by(
fn art -> art.start end,
&Data.RoughDate.compare(&1, &2)
)
end
def ordered_by_date(various) do
various
|> orderable_only()
|> Enum.sort_by(fn
%__MODULE__{date: %Date{} = d} -> {d.year, d.month, d.day}
end)
end
def related(_all, %__MODULE__{tags: nil}), do: %{}
def related(_all, %__MODULE__{tags: []}), do: %{}
def related(all, %__MODULE__{name: name, tags: tags}) when is_list(tags) do
filter(all, tags: tags)
|> Map.delete(name)
end
def range(%__MODULE__{start: from, end: to}) do
Data.RoughDate.range(from, to)
end
def orderable_only(map) when is_map(map),
do: map |> Map.values() |> orderable_only()
def orderable_only([{name, %__MODULE__{name: name}} | _rest] = list),
do: list |> Enum.map(&elem(&1, 1)) |> orderable_only()
def orderable_only(list) when is_list(list) do
list
|> Enum.reject(fn
%__MODULE__{date: nil} -> true
_ -> false
end)
end
@spec related_routes(t()) :: [Route.t()]
def related_routes(art) do
Enum.filter(Route.all(), &Route.has_group?(&1, art.tags))
end
@spec related_route(t()) :: Route.t() | nil
def related_route(art) do
Enum.find(Route.all(), &Route.has_group?(&1, art.tags))
end
@doc """
Find a track that is related to this article. If the article has own tracks,
it will prefer those. Otherwise it uses the tags to find related routes and
pick the first track for the first route matched.
"""
@spec related_track(t()) :: Video.Track.t() | nil
def related_track(%{tracks: [track | _rest]}), do: track
def related_track(art) do
route = related_route(art)
if route, do: hd(route.tracks())
end
@doc """
Tries to find a picture of a related video track around the center of the
article's bbox.
"""
@spec start_image_path(t()) :: binary() | nil
def start_image_path(%{bbox: bbox} = art) when is_map(bbox) do
rendered = art |> related_track() |> Video.Rendered.get()
if rendered do
center = Geo.CheapRuler.center(bbox)
%{point: %{time_offset_ms: ms}} =
Geo.CheapRuler.closest_point_on_line(rendered.coords(), center)
VelorouteWeb.Router.Helpers.image_extract_path(
VelorouteWeb.Endpoint,
:image,
rendered.hash(),
ms
)
end
end
def start_image_path(_art), do: nil
@spec path(t()) :: binary()
def path(%__MODULE__{name: "0000-00-00-" <> page_name}), do: "/#{page_name}"
def path(%__MODULE__{name: page_name}), do: "/article/#{page_name}"
end
| 27.23348
| 87
| 0.598835
|
030627041baf92cc54b9a25183dfd0ebbf246ec5
| 4,098
|
exs
|
Elixir
|
test/parser_test.exs
|
alg/auto_linker
|
084f75e5efb85ba37568581ab098c6431978c7ef
|
[
"MIT"
] | null | null | null |
test/parser_test.exs
|
alg/auto_linker
|
084f75e5efb85ba37568581ab098c6431978c7ef
|
[
"MIT"
] | null | null | null |
test/parser_test.exs
|
alg/auto_linker
|
084f75e5efb85ba37568581ab098c6431978c7ef
|
[
"MIT"
] | null | null | null |
defmodule AutoLinker.ParserTest do
use ExUnit.Case
doctest AutoLinker.Parser
import AutoLinker.Parser
describe "is_url" do
test "valid scheme true" do
valid_scheme_urls()
|> Enum.each(fn url ->
assert is_url?(url, true)
end)
end
test "invalid scheme true" do
invalid_scheme_urls()
|> Enum.each(fn url ->
refute is_url?(url, true)
end)
end
test "valid scheme false" do
valid_non_scheme_urls()
|> Enum.each(fn url ->
assert is_url?(url, false)
end)
end
test "invalid scheme false" do
invalid_non_scheme_urls()
|> Enum.each(fn url ->
refute is_url?(url, false)
end)
end
end
describe "match_phone" do
test "valid" do
valid_phone_nunbers()
|> Enum.each(fn number ->
assert number |> match_phone() |> valid_number?(number)
end)
end
test "invalid" do
invalid_phone_numbers()
|> Enum.each(fn number ->
assert number |> match_phone() |> is_nil
end)
end
end
describe "parse" do
test "does not link attributes" do
text = "Check out <a href='google.com'>google</a>"
assert parse(text) == text
text = "Check out <img src='google.com' alt='google.com'/>"
assert parse(text) == text
text = "Check out <span><img src='google.com' alt='google.com'/></span>"
assert parse(text) == text
end
test "links url inside html" do
text = "Check out <div class='section'>google.com</div>"
expected = "Check out <div class='section'><a href='http://google.com'>google.com</a></div>"
assert parse(text, class: false, rel: false, new_window: false) == expected
end
test "excludes html with specified class" do
text = "```Check out <div class='section'>google.com</div>```"
assert parse(text, exclude_pattern: "```") == text
end
test "links urls with schema" do
opts = [scheme: true, class: false, new_window: false, rel: false]
assert parse("foo http://google.com/foo bar", opts) == "foo <a href='http://google.com/foo'>google.com/foo</a> bar"
assert parse("foo https://google.com bar", opts) == "foo <a href='https://google.com'>google.com</a> bar"
end
test "with converter" do
assert parse("foo google.com bar", converter: fn(url) -> String.upcase(url) end) == "foo <a href='HTTP://GOOGLE.COM' class='auto-linker' target='_blank' rel='noopener noreferrer'>google.com</a> bar"
end
end
def valid_number?([list], number) do
assert List.last(list) == number
end
def valid_number?(_, _), do: false
def valid_scheme_urls, do: [
"https://www.example.com",
"http://www2.example.com",
"http://home.example-site.com",
"http://blog.example.com",
"http://www.example.com/product",
"http://www.example.com/products?id=1&page=2",
"http://www.example.com#up",
"http://255.255.255.255",
"http://www.site.com:8008"
]
def invalid_scheme_urls, do: [
"http://invalid.com/perl.cgi?key= | http://web-site.com/cgi-bin/perl.cgi?key1=value1&key2",
]
def valid_non_scheme_urls, do: [
"www.example.com",
"www2.example.com",
"www.example.com:2000",
"www.example.com?abc=1",
"example.example-site.com",
"example.com",
"example.ca",
"example.tv",
"example.com:999?one=one",
"255.255.255.255",
"255.255.255.255:3000?one=1&two=2",
]
def invalid_non_scheme_urls, do: [
"invalid.com/perl.cgi?key= | web-site.com/cgi-bin/perl.cgi?key1=value1&key2",
"invalid.",
"hi..there",
"555.555.5555"
]
def valid_phone_nunbers, do: [
"x55",
"x555",
"x5555",
"x12345",
"+1 555 555-5555",
"555 555-5555",
"555.555.5555",
"613-555-5555",
"1 (555) 555-5555",
"(555) 555-5555",
"1.555.555.5555",
"800 555-5555",
"1.800.555.5555",
"1 (800) 555-5555",
"888 555-5555",
"887 555-5555",
"1-877-555-5555",
"1 800 710-5515"
]
def invalid_phone_numbers, do: [
"5555",
"x5",
"(555) 555-55",
]
end
| 26.43871
| 204
| 0.596633
|
0306321063fa7b01c0176fa90d392679cec93331
| 2,860
|
ex
|
Elixir
|
deps/plug_crypto/lib/plug/crypto/key_generator.ex
|
rpillar/Top5_Elixir
|
9c450d2e9b291108ff1465dc066dfe442dbca822
|
[
"MIT"
] | null | null | null |
deps/plug_crypto/lib/plug/crypto/key_generator.ex
|
rpillar/Top5_Elixir
|
9c450d2e9b291108ff1465dc066dfe442dbca822
|
[
"MIT"
] | null | null | null |
deps/plug_crypto/lib/plug/crypto/key_generator.ex
|
rpillar/Top5_Elixir
|
9c450d2e9b291108ff1465dc066dfe442dbca822
|
[
"MIT"
] | null | null | null |
defmodule Plug.Crypto.KeyGenerator do
@moduledoc """
`KeyGenerator` implements PBKDF2 (Password-Based Key Derivation Function 2),
part of PKCS #5 v2.0 (Password-Based Cryptography Specification).
It can be used to derive a number of keys for various purposes from a given
secret. This lets applications have a single secure secret, but avoid reusing
that key in multiple incompatible contexts.
see http://tools.ietf.org/html/rfc2898#section-5.2
"""
use Bitwise
@max_length bsl(1, 32) - 1
@doc """
Returns a derived key suitable for use.
## Options
* `:iterations` - defaults to 1000 (increase to at least 2^16 if used for passwords);
* `:length` - a length in octets for the derived key. Defaults to 32;
* `:digest` - an hmac function to use as the pseudo-random function. Defaults to `:sha256`;
* `:cache` - an ETS table name to be used as cache.
Only use an ETS table as cache if the secret and salt is a bound set of values.
For example: `:ets.new(:your_name, [:named_table, :public, read_concurrency: true])`
"""
def generate(secret, salt, opts \\ []) do
iterations = Keyword.get(opts, :iterations, 1000)
length = Keyword.get(opts, :length, 32)
digest = Keyword.get(opts, :digest, :sha256)
cache = Keyword.get(opts, :cache)
if length > @max_length do
raise ArgumentError, "length must be less than or equal to #{@max_length}"
else
with_cache(cache, {secret, salt, iterations, length, digest}, fn ->
generate(mac_fun(digest, secret), salt, iterations, length, 1, [], 0)
end)
end
rescue
e -> reraise e, Plug.Crypto.prune_args_from_stacktrace(System.stacktrace())
end
defp with_cache(nil, _key, fun), do: fun.()
defp with_cache(ets, key, fun) do
case :ets.lookup(ets, key) do
[{_key, value}] ->
value
[] ->
value = fun.()
:ets.insert(ets, [{key, value}])
value
end
end
defp generate(_fun, _salt, _iterations, max_length, _block_index, acc, length)
when length >= max_length do
acc
|> IO.iodata_to_binary()
|> binary_part(0, max_length)
end
defp generate(fun, salt, iterations, max_length, block_index, acc, length) do
initial = fun.(<<salt::binary, block_index::integer-size(32)>>)
block = iterate(fun, iterations - 1, initial, initial)
length = byte_size(block) + length
generate(fun, salt, iterations, max_length, block_index + 1, [acc | block], length)
end
defp iterate(_fun, 0, _prev, acc), do: acc
defp iterate(fun, iteration, prev, acc) do
next = fun.(prev)
iterate(fun, iteration - 1, next, :crypto.exor(next, acc))
end
defp mac_fun(digest, secret) do
&:crypto.hmac(digest, secret, &1)
end
end
| 33.255814
| 100
| 0.639161
|
0306345e8ec73c4f552cfcf44226cf060069198e
| 11,971
|
ex
|
Elixir
|
lib/mix/lib/mix/project.ex
|
pap/elixir
|
c803afe90c766663823c74397fb23ed40ec52c5b
|
[
"Apache-2.0"
] | null | null | null |
lib/mix/lib/mix/project.ex
|
pap/elixir
|
c803afe90c766663823c74397fb23ed40ec52c5b
|
[
"Apache-2.0"
] | null | null | null |
lib/mix/lib/mix/project.ex
|
pap/elixir
|
c803afe90c766663823c74397fb23ed40ec52c5b
|
[
"Apache-2.0"
] | null | null | null |
defmodule Mix.Project do
@moduledoc """
Defines and manipulate Mix projects.
In order to configure Mix, a developer needs to use
`Mix.Project` in a module and define a function named
`project` that returns a keyword list with configuration.
defmodule MyApp do
use Mix.Project
def project do
[app: :my_app,
version: "0.6.0"]
end
end
After being defined, the configuration for this project can be read
as `Mix.Project.config/0`. Notice that `config/0` won't fail if a
project is not defined; this allows many Mix tasks to work
without a project.
In case the developer needs a project or wants to access a special
function in the project, the developer can call `Mix.Project.get!/0`
which fails with `Mix.NoProjectError` in case a project is not
defined.
## Erlang projects
Mix can be used to manage Erlang projects that don't have any Elixir code. To
ensure Mix tasks work correctly for an Erlang project, `language: :erlang`
has to be added to `project`.
The setting also makes sure Elixir is not added as a dependency to the
generated .app file or to the escript generated with `mix escript.build`,
etc.
"""
@doc false
defmacro __using__(_) do
quote do
@after_compile Mix.Project
end
end
@private_config [:build_path, :app_path]
# Invoked after each Mix.Project is compiled.
@doc false
def __after_compile__(env, _binary) do
push env.module, env.file
end
# Push a project onto the project stack.
# Only the top of the stack can be accessed.
@doc false
def push(atom, file \\ nil, app \\ nil) when is_atom(atom) do
file = file ||
(atom && List.to_string(atom.__info__(:compile)[:source]))
config = ([app: app] ++ default_config)
|> Keyword.merge(get_project_config(atom))
|> Keyword.drop(@private_config)
case Mix.ProjectStack.push(atom, config, file) do
:ok ->
:ok
{:error, other} when is_binary(other) ->
Mix.raise "Trying to load #{inspect atom} from #{inspect file}" <>
" but another project with the same name was already defined at #{inspect other}"
end
end
# Pops a project from the stack.
@doc false
def pop do
Mix.ProjectStack.pop
end
# The configuration that is pushed down to dependencies.
@doc false
def deps_config(config \\ config()) do
[build_embedded: config[:build_embedded],
build_path: build_path(config),
build_per_environment: config[:build_per_environment],
consolidate_protocols: false,
deps_path: deps_path(config)]
end
@doc """
Retrieves the current project if there is one.
Otherwise `nil` is returned. It may happen in cases
there is no mixfile in the current directory.
If you expect a project to be defined, i.e. it is a
requirement of the current task, you should call
`get!/0` instead.
"""
def get do
case Mix.ProjectStack.peek do
%{name: name} -> name
_ -> nil
end
end
@doc """
Same as `get/0`, but raises an exception if there is no current project.
This is usually called by tasks that need additional
functions on the project to be defined. Since such
tasks usually depend on a project being defined, this
function raises `Mix.NoProjectError` in case no project
is available.
"""
def get! do
get || Mix.raise Mix.NoProjectError, []
end
@doc """
Returns the project configuration.
If there is no project defined, it still returns a keyword
list with default values. This allows many Mix tasks to work
without the need for an underlying project.
Note this configuration is cached once the project is
pushed into the stack. Calling it multiple times won't
cause it to be recomputed.
Do not use `Mix.Project.config/0` to rely on runtime configuration.
Use it only to configure aspects of your project (like
compilation directories) and not your application runtime.
"""
def config do
case Mix.ProjectStack.peek do
%{config: config} -> config
_ -> default_config
end
end
@doc """
Returns a list of project configuration files for this project.
This function is usually used in compilation tasks to trigger
a full recompilation whenever such configuration files change.
By default it includes the mix.exs file, the lock manifest and
all config files in the `config` directory.
"""
def config_files do
[Mix.Dep.Lock.manifest] ++
case Mix.ProjectStack.peek do
%{config: config, file: file} ->
configs =
(config[:config_path] || "config/config.exs")
|> Path.dirname
|> Path.join("**/*.*")
|> Path.wildcard
|> Enum.reject(&String.starts_with?(Path.basename(&1), "."))
[file|configs]
_ ->
[]
end
end
@doc """
Returns `true` if project is an umbrella project.
"""
def umbrella?(config \\ config()) do
config[:apps_path] != nil
end
@doc """
Runs the given `fun` inside the given project.
This function changes the current working directory and
loads the project at the given directory onto the project
stack.
A `post_config` can be passed that will be merged into
the project configuration.
"""
def in_project(app, path, post_config \\ [], fun)
def in_project(app, ".", post_config, fun) do
cached = try do
load_project(app, post_config)
rescue
any ->
Mix.shell.error "Error while loading project #{inspect app} at #{File.cwd!}"
reraise any, System.stacktrace
end
try do
fun.(cached)
after
Mix.Project.pop
end
end
def in_project(app, path, post_config, fun) do
File.cd! path, fn ->
in_project(app, ".", post_config, fun)
end
end
@doc """
Returns the path to store dependencies for this project.
The returned path will be expanded.
## Examples
Mix.Project.deps_path
#=> "/path/to/project/deps"
"""
def deps_path(config \\ config()) do
Path.expand config[:deps_path]
end
@doc """
Returns the build path for this project.
The returned path will be expanded.
## Examples
Mix.Project.build_path
#=> "/path/to/project/_build/shared"
If :build_per_environment is set to `true` (the default), it
will create a new build per environment:
Mix.env
#=> :dev
Mix.Project.build_path
#=> "/path/to/project/_build/dev"
"""
def build_path(config \\ config()) do
config[:build_path] || if config[:build_per_environment] do
Path.expand("_build/#{Mix.env}")
else
Path.expand("_build/shared")
end
end
@doc """
The path to store manifests.
By default they are stored in the app path inside
the build directory. Umbrella applications have
the manifest path set to the root of the build directory.
Directories may be changed in future releases.
The returned path will be expanded.
## Examples
Mix.Project.manifest_path
#=> "/path/to/project/_build/shared/lib/app"
"""
def manifest_path(config \\ config()) do
config[:app_path] ||
if app = config[:app] do
Path.join([build_path(config), "lib", Atom.to_string(app)])
else
build_path(config)
end
end
@doc """
Returns the application path inside the build.
The returned path will be expanded.
## Examples
Mix.Project.app_path
#=> "/path/to/project/_build/shared/lib/app"
"""
def app_path(config \\ config()) do
config[:app_path] || cond do
app = config[:app] ->
Path.join([build_path(config), "lib", Atom.to_string(app)])
config[:apps_path] ->
raise "Trying to access Mix.Project.app_path for an umbrella project but umbrellas have no app"
true ->
Mix.raise "Cannot access build without an application name, " <>
"please ensure you are in a directory with a mix.exs file and it defines " <>
"an :app name under the project configuration"
end
end
@doc """
Returns the paths this project compiles to.
The returned path will be expanded.
## Examples
Mix.Project.compile_path
#=> "/path/to/project/_build/shared/lib/app/ebin"
"""
def compile_path(config \\ config()) do
Path.join(app_path(config), "ebin")
end
@doc """
Compiles the given project.
It will run the compile task unless the project
is in build embedded mode, which may fail as a
explicit command to `mix compile` is required.
"""
def compile(args, config \\ config()) do
if config[:build_embedded] do
path = if umbrella?(config), do: build_path(config), else: compile_path(config)
unless File.exists?(path) do
Mix.raise "Cannot execute task because the project was not yet compiled. " <>
"When build_embedded is set to true, \"MIX_ENV=#{Mix.env} mix compile\" " <>
"must be explicitly executed"
end
Mix.Task.run "loadpaths", args
else
Mix.Task.run "compile", args
end
end
@doc """
Builds the project structure for the current application.
## Options
* `:symlink_ebin` - symlink ebin instead of copying it
"""
def build_structure(config \\ config(), opts \\ []) do
app = app_path(config)
File.mkdir_p!(app)
source = Path.expand("ebin")
target = Path.join(app, "ebin")
_ = cond do
opts[:symlink_ebin] ->
_ = symlink_or_copy(config, source, target)
match?({:ok, _}, :file.read_link(target)) ->
_ = File.rm_rf!(target)
File.mkdir_p!(target)
true ->
File.mkdir_p!(target)
end
_ = symlink_or_copy(config, Path.expand("include"), Path.join(app, "include"))
_ = symlink_or_copy(config, Path.expand("priv"), Path.join(app, "priv"))
:ok
end
defp symlink_or_copy(config, source, target) do
if config[:build_embedded] do
if File.exists?(source) do
File.rm_rf!(target)
File.cp_r!(source, target)
end
else
Mix.Utils.symlink_or_copy(source, target)
end
end
@doc """
Ensures the project structure exists.
In case it does exist, it is a no-op. Otherwise, it is built.
"""
def ensure_structure(config \\ config(), opts \\ []) do
if File.exists?(app_path(config)) do
:ok
else
build_structure(config, opts)
end
end
@doc """
Returns all load paths for this project.
"""
def load_paths(config \\ config()) do
if umbrella?(config) do
[]
else
[compile_path(config)]
end
end
# Loads mix.exs in the current directory or loads the project from the
# mixfile cache and pushes the project to the project stack.
defp load_project(app, post_config) do
Mix.ProjectStack.post_config(post_config)
if cached = Mix.ProjectStack.read_cache(app) do
{project, file} = cached
push(project, file, app)
project
else
file = Path.expand("mix.exs")
old_proj = get()
new_proj =
if File.regular?(file) do
_ = Code.load_file(file)
case get() do
^old_proj -> Mix.raise "Could not find a Mix project at #{file}"
new_proj -> new_proj
end
else
file = "nofile"
push(nil, file, app)
nil
end
Mix.ProjectStack.write_cache(app, {new_proj, file})
new_proj
end
end
defp default_config do
[aliases: [],
build_per_environment: true,
build_embedded: false,
consolidate_protocols: true,
default_task: "run",
deps: [],
deps_path: "deps",
elixirc_paths: ["lib"],
erlc_paths: ["src"],
erlc_include_path: "include",
erlc_options: [:debug_info],
lockfile: "mix.lock",
preferred_cli_env: [],
start_permanent: false]
end
defp get_project_config(nil), do: []
defp get_project_config(atom), do: atom.project
end
| 26.426049
| 103
| 0.645811
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.