hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
list | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
list | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
list | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9e65ae992ac4344dc4c38ea1cf6bbc822d7b95b1
| 17,760
|
ex
|
Elixir
|
clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/sites.ex
|
ukrbublik/elixir-google-api
|
364cec36bc76f60bec94cbcad34844367a29d174
|
[
"Apache-2.0"
] | null | null | null |
clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/sites.ex
|
ukrbublik/elixir-google-api
|
364cec36bc76f60bec94cbcad34844367a29d174
|
[
"Apache-2.0"
] | null | null | null |
clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/sites.ex
|
ukrbublik/elixir-google-api
|
364cec36bc76f60bec94cbcad34844367a29d174
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V33.Api.Sites do
@moduledoc """
API calls for all endpoints tagged `Sites`.
"""
alias GoogleApi.DFAReporting.V33.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Gets one site by ID.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `id` (*type:* `String.t`) - Site ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.Site{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_sites_get(Tesla.Env.client(), String.t(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V33.Model.Site.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_sites_get(connection, profile_id, id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/sites/{id}", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"id" => URI.encode(id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.Site{}])
end
@doc """
Inserts a new site.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.Site.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.Site{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_sites_insert(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V33.Model.Site.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_sites_insert(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/sites", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.Site{}])
end
@doc """
Retrieves a list of sites, possibly filtered. This method supports paging.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:acceptsInStreamVideoPlacements` (*type:* `boolean()`) - This search filter is no longer supported and will have no effect on the results returned.
* `:acceptsInterstitialPlacements` (*type:* `boolean()`) - This search filter is no longer supported and will have no effect on the results returned.
* `:acceptsPublisherPaidPlacements` (*type:* `boolean()`) - Select only sites that accept publisher paid placements.
* `:adWordsSite` (*type:* `boolean()`) - Select only AdWords sites.
* `:approved` (*type:* `boolean()`) - Select only approved sites.
* `:campaignIds` (*type:* `list(String.t)`) - Select only sites with these campaign IDs.
* `:directorySiteIds` (*type:* `list(String.t)`) - Select only sites with these directory site IDs.
* `:ids` (*type:* `list(String.t)`) - Select only sites with these IDs.
* `:maxResults` (*type:* `integer()`) - Maximum number of results to return.
* `:pageToken` (*type:* `String.t`) - Value of the nextPageToken from the previous result page.
* `:searchString` (*type:* `String.t`) - Allows searching for objects by name, ID or keyName. Wildcards (*) are allowed. For example, "site*2015" will return objects with names like "site June 2015", "site April 2015", or simply "site 2015". Most of the searches also add wildcards implicitly at the start and the end of the search string. For example, a search string of "site" will match objects with name "my site", "site 2015", or simply "site".
* `:sortField` (*type:* `String.t`) - Field by which to sort the list.
* `:sortOrder` (*type:* `String.t`) - Order of sorted results.
* `:subaccountId` (*type:* `String.t`) - Select only sites with this subaccount ID.
* `:unmappedSite` (*type:* `boolean()`) - Select only sites that have not been mapped to a directory site.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.SitesListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_sites_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V33.Model.SitesListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_sites_list(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:acceptsInStreamVideoPlacements => :query,
:acceptsInterstitialPlacements => :query,
:acceptsPublisherPaidPlacements => :query,
:adWordsSite => :query,
:approved => :query,
:campaignIds => :query,
:directorySiteIds => :query,
:ids => :query,
:maxResults => :query,
:pageToken => :query,
:searchString => :query,
:sortField => :query,
:sortOrder => :query,
:subaccountId => :query,
:unmappedSite => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/sites", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.SitesListResponse{}])
end
@doc """
Updates an existing site. This method supports patch semantics.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:id` (*type:* `String.t`) - Site ID.
* `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.Site.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.Site{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_sites_patch(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V33.Model.Site.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_sites_patch(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:id => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/sites", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.Site{}])
end
@doc """
Updates an existing site.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.Site.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.Site{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_sites_update(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V33.Model.Site.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_sites_update(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/sites", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.Site{}])
end
end
| 48.92562
| 457
| 0.624606
|
9e66832457266469df58dbfb8db58b1c9d7328b3
| 90
|
ex
|
Elixir
|
web/views/live_view.ex
|
jschoch/unpolydrab
|
419fa9ac2e23edcc347dc96102caffaedc0beb36
|
[
"MIT"
] | null | null | null |
web/views/live_view.ex
|
jschoch/unpolydrab
|
419fa9ac2e23edcc347dc96102caffaedc0beb36
|
[
"MIT"
] | null | null | null |
web/views/live_view.ex
|
jschoch/unpolydrab
|
419fa9ac2e23edcc347dc96102caffaedc0beb36
|
[
"MIT"
] | null | null | null |
defmodule DrabTestApp.LiveView do
@moduledoc false
use DrabTestApp.Web, :view
end
| 12.857143
| 33
| 0.755556
|
9e669c09c3d301b9d31129bcbeddf81b8eb4d511
| 978
|
ex
|
Elixir
|
lib/tix.ex
|
elitau/tix
|
2aa5fe4d91e7962ebcdc9b668aacf65e09ff9bb8
|
[
"MIT"
] | 1
|
2021-08-16T18:52:45.000Z
|
2021-08-16T18:52:45.000Z
|
lib/tix.ex
|
elitau/tix
|
2aa5fe4d91e7962ebcdc9b668aacf65e09ff9bb8
|
[
"MIT"
] | 16
|
2021-03-09T19:39:31.000Z
|
2022-03-15T15:20:24.000Z
|
lib/tix.ex
|
elitau/tix
|
2aa5fe4d91e7962ebcdc9b668aacf65e09ff9bb8
|
[
"MIT"
] | null | null | null |
defmodule Tix do
require Logger
@moduledoc """
Documentation for Tix.
"""
@doc """
Starts the Tix supervisor that watches the file system and runs the appropriate test(s).
"""
def start do
Code.compiler_options(ignore_module_conflict: true)
{:ok, _started_apps} = Application.ensure_all_started(:file_system, :permanent)
Tix.Supervisor.start_link()
end
def running? do
Process.whereis(Tix.Supervisor)
end
def run do
# changed_file |> select_tests() |> execute_tests()
# manually_chosen_test |> execute_tests()
end
@doc """
Pin a test so that only this test will be executed on save.
"""
def pin(path) do
Tix.PinnedTest.pin(path)
end
@doc """
Unpin any previously pinned test.
"""
def unpin do
Tix.PinnedTest.unpin()
end
def debug(content) when is_binary(content) do
:ok = content |> Logger.debug()
content
end
def debug(content) do
content |> inspect() |> debug()
end
end
| 19.959184
| 90
| 0.668712
|
9e669f06636582b687565cebd194017e6f7d6b5e
| 399
|
exs
|
Elixir
|
mix.exs
|
qwertystop/dominions5.analyst
|
7c39ea14640b3a2d2c70df249688bc501f2f2184
|
[
"MIT"
] | 1
|
2019-07-04T00:39:31.000Z
|
2019-07-04T00:39:31.000Z
|
mix.exs
|
qwertystop/dominions5.analyst
|
7c39ea14640b3a2d2c70df249688bc501f2f2184
|
[
"MIT"
] | null | null | null |
mix.exs
|
qwertystop/dominions5.analyst
|
7c39ea14640b3a2d2c70df249688bc501f2f2184
|
[
"MIT"
] | null | null | null |
defmodule TurnStructure.Mixfile do
use Mix.Project
def project do
[
apps_path: "apps",
start_permanent: Mix.env == :prod,
deps: deps()
]
end
# Dependencies listed here are available only for this
# project and cannot be accessed from applications inside
# the apps folder.
#
# Run "mix help deps" for examples and options.
defp deps do
[]
end
end
| 19
| 59
| 0.656642
|
9e66c2b4d01039bf232261e16636c5755edf482d
| 9,388
|
ex
|
Elixir
|
lib/protobuf/encoder.ex
|
jechol/protobuf
|
8a361e684da2b14d0e68a02372888833e4929c91
|
[
"MIT"
] | null | null | null |
lib/protobuf/encoder.ex
|
jechol/protobuf
|
8a361e684da2b14d0e68a02372888833e4929c91
|
[
"MIT"
] | null | null | null |
lib/protobuf/encoder.ex
|
jechol/protobuf
|
8a361e684da2b14d0e68a02372888833e4929c91
|
[
"MIT"
] | null | null | null |
defmodule Protobuf.Encoder do
@moduledoc false
import Protobuf.WireTypes
import Bitwise, only: [bsr: 2, band: 2, bsl: 2, bor: 2]
alias Protobuf.{MessageProps, FieldProps}
@spec encode(atom, map | struct, keyword) :: iodata
def encode(mod, msg, opts) do
case msg do
%{__struct__: ^mod} ->
encode(msg, opts)
_ ->
encode(mod.new(msg), opts)
end
end
@spec encode(struct, keyword) :: iodata
def encode(%mod{} = struct, opts \\ []) do
res = encode!(struct, mod.__message_props__())
case Keyword.fetch(opts, :iolist) do
{:ok, true} -> res
_ -> IO.iodata_to_binary(res)
end
end
@spec encode!(struct, MessageProps.t()) :: iodata
def encode!(struct, %{field_props: field_props} = props) do
syntax = props.syntax
oneofs = oneof_actual_vals(props, struct)
encoded = encode_fields(Map.values(field_props), syntax, struct, oneofs, [])
encoded =
if syntax == :proto2 do
encode_extensions(struct, encoded)
else
encoded
end
encoded
|> Enum.reverse()
catch
{e, msg, st} ->
reraise e, msg, st
end
defp encode_fields([], _, _, _, acc) do
acc
end
defp encode_fields([prop | tail], syntax, struct, oneofs, acc) do
%{name_atom: name, oneof: oneof} = prop
val =
if oneof do
oneofs[name]
else
case struct do
%{^name => v} ->
v
_ ->
nil
end
end
if skip_field?(syntax, val, prop) || skip_enum?(prop, val) do
encode_fields(tail, syntax, struct, oneofs, acc)
else
acc = [encode_field(class_field(prop), val, prop) | acc]
encode_fields(tail, syntax, struct, oneofs, acc)
end
rescue
error ->
msg =
"Got error when encoding #{inspect(struct.__struct__)}##{prop.name_atom}: #{
Exception.format(:error, error)
}"
throw({Protobuf.EncodeError, [message: msg], __STACKTRACE__})
end
@doc false
def skip_field?(syntax, val, prop)
def skip_field?(_, [], _), do: true
def skip_field?(_, v, _) when map_size(v) == 0, do: true
def skip_field?(:proto2, nil, %{optional?: true}), do: true
def skip_field?(:proto3, nil, _), do: true
def skip_field?(:proto3, 0, %{oneof: nil}), do: true
def skip_field?(:proto3, 0.0, %{oneof: nil}), do: true
def skip_field?(:proto3, "", %{oneof: nil}), do: true
def skip_field?(:proto3, false, %{oneof: nil}), do: true
def skip_field?(_, _, _), do: false
@spec encode_field(atom, any, FieldProps.t()) :: iodata
defp encode_field(:normal, val, %{encoded_fnum: fnum, type: type, repeated?: is_repeated}) do
repeated_or_not(val, is_repeated, fn v ->
[fnum | encode_type(type, v)]
end)
end
defp encode_field(
:embedded,
val,
%{encoded_fnum: fnum, repeated?: is_repeated, map?: is_map, type: type} = prop
) do
repeated = is_repeated || is_map
repeated_or_not(val, repeated, fn v ->
v = if is_map, do: struct(prop.type, %{key: elem(v, 0), value: elem(v, 1)}), else: v
# so that oneof {:atom, v} can be encoded
encoded = encode(type, v, iolist: true)
byte_size = IO.iodata_length(encoded)
[fnum | encode_varint(byte_size)] ++ encoded
end)
end
defp encode_field(:packed, val, %{type: type, encoded_fnum: fnum}) do
encoded = Enum.map(val, fn v -> encode_type(type, v) end)
byte_size = IO.iodata_length(encoded)
[fnum | encode_varint(byte_size)] ++ encoded
end
@spec class_field(map) :: atom
defp class_field(%{wire_type: wire_delimited(), embedded?: true}) do
:embedded
end
defp class_field(%{repeated?: true, packed?: true}) do
:packed
end
defp class_field(_) do
:normal
end
@doc false
@spec encode_fnum(integer, integer) :: binary
def encode_fnum(fnum, wire_type) do
fnum
|> bsl(3)
|> bor(wire_type)
|> encode_varint()
|> IO.iodata_to_binary()
end
@doc false
@spec encode_type(atom, any) :: iodata
def encode_type(:int32, n) when n >= -0x80000000 and n <= 0x7FFFFFFF, do: encode_varint(n)
def encode_type(:int64, n) when n >= -0x8000000000000000 and n <= 0x7FFFFFFFFFFFFFFF,
do: encode_varint(n)
def encode_type(:string, n), do: encode_type(:bytes, n)
def encode_type(:uint32, n) when n >= 0 and n <= 0xFFFFFFFF, do: encode_varint(n)
def encode_type(:uint64, n) when n >= 0 and n <= 0xFFFFFFFFFFFFFFFF, do: encode_varint(n)
def encode_type(:bool, true), do: encode_varint(1)
def encode_type(:bool, false), do: encode_varint(0)
def encode_type({:enum, type}, n) when is_atom(n), do: n |> type.value() |> encode_varint()
def encode_type({:enum, _}, n), do: encode_varint(n)
def encode_type(:float, :infinity), do: [0, 0, 128, 127]
def encode_type(:float, :negative_infinity), do: [0, 0, 128, 255]
def encode_type(:float, :nan), do: [0, 0, 192, 127]
def encode_type(:float, n), do: <<n::32-float-little>>
def encode_type(:double, :infinity), do: [0, 0, 0, 0, 0, 0, 240, 127]
def encode_type(:double, :negative_infinity), do: [0, 0, 0, 0, 0, 0, 240, 255]
def encode_type(:double, :nan), do: [1, 0, 0, 0, 0, 0, 248, 127]
def encode_type(:double, n), do: <<n::64-float-little>>
def encode_type(:bytes, n) do
len = n |> IO.iodata_length() |> encode_varint()
len ++ n
end
def encode_type(:sint32, n) when n >= -0x80000000 and n <= 0x7FFFFFFF,
do: n |> encode_zigzag |> encode_varint
def encode_type(:sint64, n) when n >= -0x8000000000000000 and n <= 0x7FFFFFFFFFFFFFFF,
do: n |> encode_zigzag |> encode_varint
def encode_type(:fixed64, n) when n >= 0 and n <= 0xFFFFFFFFFFFFFFFF, do: <<n::64-little>>
def encode_type(:sfixed64, n) when n >= -0x8000000000000000 and n <= 0x7FFFFFFFFFFFFFFF,
do: <<n::64-signed-little>>
def encode_type(:fixed32, n) when n >= 0 and n <= 0xFFFFFFFF, do: <<n::32-little>>
def encode_type(:sfixed32, n) when n >= -0x80000000 and n <= 0x7FFFFFFF,
do: <<n::32-signed-little>>
def encode_type(type, n) do
raise Protobuf.TypeEncodeError, message: "#{inspect(n)} is invalid for type #{type}"
end
@spec encode_zigzag(integer) :: integer
defp encode_zigzag(val) when val >= 0, do: val * 2
defp encode_zigzag(val) when val < 0, do: val * -2 - 1
@doc false
@spec encode_varint(integer) :: iolist
def encode_varint(n) when n < 0 do
<<n::64-unsigned-native>> = <<n::64-signed-native>>
encode_varint(n)
end
def encode_varint(n) when n <= 127 do
[n]
end
def encode_varint(n) do
[<<1::1, band(n, 127)::7>> | encode_varint(bsr(n, 7))]
end
@doc false
@spec wire_type(atom) :: integer
def wire_type(:int32), do: wire_varint()
def wire_type(:int64), do: wire_varint()
def wire_type(:uint32), do: wire_varint()
def wire_type(:uint64), do: wire_varint()
def wire_type(:sint32), do: wire_varint()
def wire_type(:sint64), do: wire_varint()
def wire_type(:bool), do: wire_varint()
def wire_type({:enum, _}), do: wire_varint()
def wire_type(:enum), do: wire_varint()
def wire_type(:fixed64), do: wire_64bits()
def wire_type(:sfixed64), do: wire_64bits()
def wire_type(:double), do: wire_64bits()
def wire_type(:string), do: wire_delimited()
def wire_type(:bytes), do: wire_delimited()
def wire_type(:fixed32), do: wire_32bits()
def wire_type(:sfixed32), do: wire_32bits()
def wire_type(:float), do: wire_32bits()
def wire_type(mod) when is_atom(mod), do: wire_delimited()
defp repeated_or_not(val, repeated, func) do
if repeated do
Enum.map(val, func)
else
func.(val)
end
end
defp skip_enum?(prop, value)
defp skip_enum?(%{enum?: false}, _), do: false
defp skip_enum?(%{enum?: true, oneof: oneof}, _) when not is_nil(oneof), do: false
defp skip_enum?(%{required?: true}, _), do: false
defp skip_enum?(%{type: type}, value), do: is_enum_default?(type, value)
defp is_enum_default?({_, type}, v) when is_atom(v), do: type.value(v) == 0
defp is_enum_default?({_, _}, v) when is_integer(v), do: v == 0
defp is_enum_default?({_, _}, _), do: false
defp oneof_actual_vals(
%{field_tags: field_tags, field_props: field_props, oneof: oneof},
struct
) do
Enum.reduce(oneof, %{}, fn {field, index}, acc ->
case Map.get(struct, field, nil) do
{f, val} ->
%{oneof: oneof} = field_props[field_tags[f]]
if oneof != index do
raise Protobuf.EncodeError,
message: ":#{f} doesn't belongs to #{inspect(struct.__struct__)}##{field}"
else
Map.put(acc, f, val)
end
nil ->
acc
_ ->
raise Protobuf.EncodeError,
message: "#{inspect(struct.__struct__)}##{field} should be {key, val} or nil"
end
end)
end
defp encode_extensions(%mod{__pb_extensions__: pb_exts}, encoded) when is_map(pb_exts) do
Enum.reduce(pb_exts, encoded, fn {{ext_mod, key}, val}, acc ->
case Protobuf.Extension.get_extension_props(mod, ext_mod, key) do
%{field_props: prop} ->
if skip_field?(:proto2, val, prop) || skip_enum?(prop, val) do
encoded
else
[encode_field(class_field(prop), val, prop) | acc]
end
_ ->
acc
end
end)
end
defp encode_extensions(_, encoded) do
encoded
end
end
| 31.086093
| 95
| 0.623775
|
9e66ca5fd95e5ebe4e1f164bf7538261b1a6b28f
| 1,348
|
ex
|
Elixir
|
apps/omg_watcher/lib/challenger/challenge.ex
|
Pongch/elixir-omg
|
8a33c246898b49cba62b847e0989d9b6c89f5106
|
[
"Apache-2.0"
] | null | null | null |
apps/omg_watcher/lib/challenger/challenge.ex
|
Pongch/elixir-omg
|
8a33c246898b49cba62b847e0989d9b6c89f5106
|
[
"Apache-2.0"
] | null | null | null |
apps/omg_watcher/lib/challenger/challenge.ex
|
Pongch/elixir-omg
|
8a33c246898b49cba62b847e0989d9b6c89f5106
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.Challenger.Challenge do
@moduledoc """
Represents a challenge
"""
# NOTE: eutxoindex and cutxopos names were chosen for consistency with Solidity contract source code
# eutoxoindex is index of exiting utxo in challenging transaction
# cutxopos is position of challenging utxo
defstruct cutxopos: 0, eutxoindex: 0, txbytes: nil, proof: nil, sigs: nil
@type t() :: %__MODULE__{
cutxopos: non_neg_integer(),
eutxoindex: non_neg_integer(),
txbytes: String.t(),
proof: String.t(),
sigs: String.t()
}
def create(cutxopos, eutxoindex, txbytes, proof, sigs) do
%__MODULE__{cutxopos: cutxopos, eutxoindex: eutxoindex, txbytes: txbytes, proof: proof, sigs: sigs}
end
end
| 36.432432
| 103
| 0.718101
|
9e66ed36dfda02e85629c387c99e37c797e1aa51
| 1,320
|
exs
|
Elixir
|
phoenix_commerce/config/dev.exs
|
enilsen16/elixir
|
b4d1d45858a25e4beb39e07de8685f3d93d6a520
|
[
"MIT"
] | null | null | null |
phoenix_commerce/config/dev.exs
|
enilsen16/elixir
|
b4d1d45858a25e4beb39e07de8685f3d93d6a520
|
[
"MIT"
] | null | null | null |
phoenix_commerce/config/dev.exs
|
enilsen16/elixir
|
b4d1d45858a25e4beb39e07de8685f3d93d6a520
|
[
"MIT"
] | null | null | null |
use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :phoenix_commerce, PhoenixCommerce.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin"]]
# Watch static and templates for browser reloading.
config :phoenix_commerce, PhoenixCommerce.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development.
# Do not configure such in production as keeping
# and calculating stacktraces is usually expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
config :phoenix_commerce, PhoenixCommerce.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "phoenix_commerce_dev",
hostname: "localhost",
pool_size: 10
| 30.697674
| 74
| 0.72197
|
9e672a250e1f539db5fa7947b97e8f996a26ea17
| 1,343
|
ex
|
Elixir
|
lib/bitcoin/protocol/messages/reject.ex
|
anthdm/bitcoin-elixir
|
5ca9f8bf4e9b2b38527670e80568a85e0aa612c0
|
[
"Apache-2.0"
] | 81
|
2017-04-20T17:42:59.000Z
|
2022-02-08T03:49:22.000Z
|
lib/bitcoin/protocol/messages/reject.ex
|
anthdm/bitcoin-elixir
|
5ca9f8bf4e9b2b38527670e80568a85e0aa612c0
|
[
"Apache-2.0"
] | 2
|
2017-09-07T13:58:57.000Z
|
2018-01-13T10:38:07.000Z
|
lib/bitcoin/protocol/messages/reject.ex
|
anthdm/bitcoin-elixir
|
5ca9f8bf4e9b2b38527670e80568a85e0aa612c0
|
[
"Apache-2.0"
] | 22
|
2017-08-16T14:19:44.000Z
|
2021-12-22T04:36:57.000Z
|
defmodule Bitcoin.Protocol.Messages.Reject do
@moduledoc """
The reject message is sent when messages are rejected.
https://en.bitcoin.it/wiki/Protocol_specification#reject
"""
alias Bitcoin.Protocol.Types.VarString
@reject_reasons %{
0x01 => :malformed,
0x10 => :invalid,
0x11 => :obsolete,
0x12 => :duplicate,
0x40 => :nonstandard,
0x41 => :dust,
0x42 => :insufficientfee,
0x43 => :checkpoint
}
defstruct message: "", # type of message rejected
code: 0, # code relating to the rejected message
reason: "", # text version of the reason for rejection
data: <<>> # Optional extra data provided by some errors. Currently, all errors which provide this field
# fill it with the TXID or block header hash of the object being rejected, so the field is 32 bytes.
@type t :: %__MODULE__{
message: binary,
code: non_neg_integer,
reason: binary,
data: binary
}
@spec parse(binary) :: t
def parse(data) do
{message, payload} = VarString.parse_stream(data)
<<code::bytes-size(1),payload::binary>> = payload
{reason, data} = VarString.parse_stream(payload)
%__MODULE__{
message: message,
code: Map.get(@reject_reasons, code),
reason: reason,
data: data
}
end
end
| 25.826923
| 123
| 0.63589
|
9e674f5f62dac27d965baafde7b872478d1b8ce4
| 425
|
exs
|
Elixir
|
test/roleboard_web/views/error_view_test.exs
|
JuneShores/roleboard
|
76a3b1f68f65aa5b18866ed62cd018235f7b9b13
|
[
"MIT"
] | null | null | null |
test/roleboard_web/views/error_view_test.exs
|
JuneShores/roleboard
|
76a3b1f68f65aa5b18866ed62cd018235f7b9b13
|
[
"MIT"
] | 2
|
2021-03-10T04:37:08.000Z
|
2021-05-11T00:20:22.000Z
|
test/roleboard_web/views/error_view_test.exs
|
JuneShores/roleboard
|
76a3b1f68f65aa5b18866ed62cd018235f7b9b13
|
[
"MIT"
] | null | null | null |
defmodule RoleboardWeb.ErrorViewTest do
use RoleboardWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(RoleboardWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(RoleboardWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 28.333333
| 94
| 0.738824
|
9e6750bae9aa71f5dbe0969bedabe3988642ae93
| 131
|
ex
|
Elixir
|
debian/rtchmod.cron.d.ex
|
tarhovalex/rtchmod
|
35b207cfb0974fab7444ef9bf0b93c38822e2bd7
|
[
"MIT"
] | null | null | null |
debian/rtchmod.cron.d.ex
|
tarhovalex/rtchmod
|
35b207cfb0974fab7444ef9bf0b93c38822e2bd7
|
[
"MIT"
] | 5
|
2018-07-31T15:16:02.000Z
|
2021-12-05T23:08:52.000Z
|
debian/rtchmod.cron.d.ex
|
tarhovalex/rtchmod
|
35b207cfb0974fab7444ef9bf0b93c38822e2bd7
|
[
"MIT"
] | 1
|
2015-11-15T21:44:51.000Z
|
2015-11-15T21:44:51.000Z
|
#
# Regular cron jobs for the rtchmod package
#
0 4 * * * root [ -x /usr/bin/rtchmod_maintenance ] && /usr/bin/rtchmod_maintenance
| 26.2
| 82
| 0.70229
|
9e6788167556295ab0d3c142f4feeb88d4575314
| 1,432
|
exs
|
Elixir
|
test/petal/avatar_test.exs
|
MortadaAK/petal_components
|
69980b96afec232715071d75d3a394ac6daf75b3
|
[
"MIT"
] | null | null | null |
test/petal/avatar_test.exs
|
MortadaAK/petal_components
|
69980b96afec232715071d75d3a394ac6daf75b3
|
[
"MIT"
] | null | null | null |
test/petal/avatar_test.exs
|
MortadaAK/petal_components
|
69980b96afec232715071d75d3a394ac6daf75b3
|
[
"MIT"
] | null | null | null |
defmodule PetalComponents.AvatarTest do
use ComponentCase
import PetalComponents.Avatar
test "it renders the avatar correctly" do
assigns = %{}
html =
rendered_to_string(~H"""
<.avatar src="image.png" />
""")
assert html =~ "<img"
end
test "it renders the avatar with placeholder" do
assigns = %{}
html =
rendered_to_string(~H"""
<.avatar />
""")
assert html =~ "<svg"
end
test "it renders a group of avatars with images" do
assigns = %{}
html =
rendered_to_string(~H"""
<.avatar_group avatars={[
"image.png",
"image.png",
"image.png",
"image.png",
]} size="xs" class="inline-block"/>
""")
assert html =~ "<div"
assert html =~ "-space-x-"
end
test "it renders the avatar with initials" do
assigns = %{}
html =
rendered_to_string(~H"""
<.avatar name="John Smith" />
""")
assert html =~ "<div style"
end
test "it renders the avatar with initials and randomly generates the color correctly" do
assigns = %{}
html =
rendered_to_string(~H"""
<.avatar name="John Smith" random_color />
""")
assert html =~ "background-color:"
end
test "dark mode" do
assigns = %{}
html =
rendered_to_string(~H"""
<.avatar />
""")
assert html =~ "<svg"
assert html =~ "dark:"
end
end
| 18.358974
| 90
| 0.553073
|
9e67945b48e6d8bcbf1ce02e6c50008ccf715d07
| 3,339
|
ex
|
Elixir
|
apps/omg_watcher/lib/omg_watcher/block_getter/supervisor.ex
|
omisego/elixir-omg
|
2c68973d8f29033d137f63a6e060f12e2a7dcd59
|
[
"Apache-2.0"
] | 177
|
2018-08-24T03:51:02.000Z
|
2020-05-30T13:29:25.000Z
|
apps/omg_watcher/lib/omg_watcher/block_getter/supervisor.ex
|
omisego/elixir-omg
|
2c68973d8f29033d137f63a6e060f12e2a7dcd59
|
[
"Apache-2.0"
] | 1,042
|
2018-08-25T00:52:39.000Z
|
2020-06-01T05:15:17.000Z
|
apps/omg_watcher/lib/omg_watcher/block_getter/supervisor.ex
|
omisego/elixir-omg
|
2c68973d8f29033d137f63a6e060f12e2a7dcd59
|
[
"Apache-2.0"
] | 47
|
2018-08-24T12:06:33.000Z
|
2020-04-28T11:49:25.000Z
|
# Copyright 2019-2020 OMG Network Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.BlockGetter.Supervisor do
@moduledoc """
This supervisor takes care of BlockGetter and State processes.
In case one process fails, this supervisor's role is to restore consistent state
"""
use Supervisor
require Logger
alias OMG.Watcher.BlockGetter
alias OMG.Watcher.Configuration
def start_link(args) do
Supervisor.start_link(__MODULE__, args, name: __MODULE__)
end
def init(args) do
contract_deployment_height = Keyword.fetch!(args, :contract_deployment_height)
block_getter_reorg_margin = Configuration.block_getter_reorg_margin()
maximum_block_withholding_time_ms = Configuration.maximum_block_withholding_time_ms()
maximum_number_of_unapplied_blocks = Configuration.maximum_number_of_unapplied_blocks()
metrics_collection_interval = Configuration.metrics_collection_interval()
child_chain_url = Configuration.child_chain_url()
child_block_interval = OMG.Eth.Configuration.child_block_interval()
contracts = OMG.Eth.Configuration.contracts()
block_getter_loops_interval_ms = Configuration.ethereum_events_check_interval_ms()
fee_claimer_address = Base.decode16!("DEAD000000000000000000000000000000000000")
# State and Block Getter are linked, because they must restore their state to the last stored state
# If Block Getter fails, it starts from the last checkpoint while State might have had executed some transactions
# such a situation will cause error when trying to execute already executed transaction
children = [
# NOTE: Watcher doesn't need the actual fee claimer address
{OMG.Watcher.State,
[
fee_claimer_address: fee_claimer_address,
child_block_interval: child_block_interval,
metrics_collection_interval: metrics_collection_interval
]},
%{
id: BlockGetter,
start:
{BlockGetter, :start_link,
[
[
child_block_interval: child_block_interval,
block_getter_reorg_margin: block_getter_reorg_margin,
maximum_block_withholding_time_ms: maximum_block_withholding_time_ms,
maximum_number_of_unapplied_blocks: maximum_number_of_unapplied_blocks,
metrics_collection_interval: metrics_collection_interval,
block_getter_loops_interval_ms: block_getter_loops_interval_ms,
child_chain_url: child_chain_url,
contract_deployment_height: contract_deployment_height,
contracts: contracts
]
]},
restart: :transient
}
]
opts = [strategy: :one_for_all]
_ = Logger.info("Starting #{inspect(__MODULE__)}")
Supervisor.init(children, opts)
end
end
| 40.719512
| 117
| 0.736149
|
9e67b51c92bc2076b9a7399b11252b098ec3ebfd
| 716
|
ex
|
Elixir
|
lib/secret_grinch_web/controllers/session_controller.ex
|
clorofila-league/secret_grinch
|
b06ac85ff5f06d5405d190ccc9966b01f0406b87
|
[
"Apache-2.0"
] | 3
|
2017-08-03T16:49:18.000Z
|
2018-10-03T03:30:26.000Z
|
lib/secret_grinch_web/controllers/session_controller.ex
|
clorofila-league/secret_grinch
|
b06ac85ff5f06d5405d190ccc9966b01f0406b87
|
[
"Apache-2.0"
] | 18
|
2017-08-04T12:43:08.000Z
|
2017-08-05T14:15:41.000Z
|
lib/secret_grinch_web/controllers/session_controller.ex
|
clorofila-league/secret_grinch
|
b06ac85ff5f06d5405d190ccc9966b01f0406b87
|
[
"Apache-2.0"
] | 1
|
2018-10-03T03:30:29.000Z
|
2018-10-03T03:30:29.000Z
|
defmodule SecretGrinchWeb.SessionController do
use SecretGrinchWeb, :controller
def new(conn, _) do
render conn, "new.html"
end
def create(conn, %{"session" => %{"email" => email, "password" => pass}}) do
case SecretGrinchWeb.Auth.login_by_email_and_pass(conn, email, pass, repo: SecretGrinch.Repo) do
{:ok, conn} ->
conn
|> put_flash(:info, "Welcome back!")
|> redirect(to: match_path(conn, :index))
{:error, _reason, conn} ->
conn
|> put_flash(:error, "Invalid name/password combination")
|> render("new.html")
end
end
def delete(conn, _) do
conn
|> SecretGrinchWeb.Auth.logout()
|> redirect(to: page_path(conn, :index))
end
end
| 26.518519
| 100
| 0.634078
|
9e67bb7143a85974970e8c1d05f4e92b4250d29a
| 938
|
ex
|
Elixir
|
apps/exsemantica_phx/lib/exsemantica_phx/protect.ex
|
Chlorophytus/exsemantica
|
f1c64cb8ae0543e5a2f015a65071d81d57fa3224
|
[
"Apache-2.0"
] | 1
|
2021-09-11T15:46:04.000Z
|
2021-09-11T15:46:04.000Z
|
apps/exsemantica_phx/lib/exsemantica_phx/protect.ex
|
Chlorophytus/exsemantica
|
f1c64cb8ae0543e5a2f015a65071d81d57fa3224
|
[
"Apache-2.0"
] | 4
|
2021-01-18T00:49:02.000Z
|
2022-02-23T05:18:37.000Z
|
apps/exsemantica_phx/lib/exsemantica_phx/protect.ex
|
Chlorophytus/exsemantica
|
f1c64cb8ae0543e5a2f015a65071d81d57fa3224
|
[
"Apache-2.0"
] | null | null | null |
defmodule ExsemanticaPhx.Protect do
import Ecto.Query
@paranoia 4096
def create_contract(username) do
user_struct =
ExsemanticaPhx.Repo.one(
from(u in ExsemanticaPhx.Site.User, where: u.username == ^username and is_nil(u.contract))
)
cond do
is_nil(user_struct) ->
nil
true ->
jwk = JOSE.JWK.generate_key({:rsa, @paranoia})
{_kty, bin} = JOSE.JWK.to_der(jwk)
ExsemanticaPhx.Repo.update(
Ecto.Changeset.change(user_struct, %{contract: bin})
)
jwk |> JOSE.JWK.to_public()
end
end
def find_user(username) do
ExsemanticaPhx.Repo.one(from(u in ExsemanticaPhx.Site.User, where: u.username == ^username))
end
def find_contract(user) when not is_nil(user.contract) do
{%{kty: :jose_jwk_kty_rsa}, user.contract}
|> JOSE.JWK.from_der()
|> JOSE.JWK.to_public()
end
def find_contract(_user), do: nil
end
| 22.878049
| 98
| 0.641791
|
9e67e2c69790ac83ff6c710206b16ba3937822bf
| 2,615
|
ex
|
Elixir
|
lib/html_sanitize_ex/scrubber/markdown_html.ex
|
v1z4/html_sanitize_ex
|
03b0894a492a71083d00a524dc9e1156a529bc65
|
[
"MIT"
] | null | null | null |
lib/html_sanitize_ex/scrubber/markdown_html.ex
|
v1z4/html_sanitize_ex
|
03b0894a492a71083d00a524dc9e1156a529bc65
|
[
"MIT"
] | null | null | null |
lib/html_sanitize_ex/scrubber/markdown_html.ex
|
v1z4/html_sanitize_ex
|
03b0894a492a71083d00a524dc9e1156a529bc65
|
[
"MIT"
] | null | null | null |
defmodule HtmlSanitizeEx.Scrubber.MarkdownHTML do
@moduledoc """
Allows basic HTML tags to support user input for writing relatively
plain text with Markdown (GitHub flavoured Markdown supported).
Technically this is a more relaxed version of the BasicHTML scrubber.
Does not allow any mailto-links, styling, HTML5 tags, video embeds etc.
"""
require HtmlSanitizeEx.Scrubber.Meta
alias HtmlSanitizeEx.Scrubber.Meta
@valid_schemes ["http", "https", "mailto"]
# Removes any CDATA tags before the traverser/scrubber runs.
Meta.remove_cdata_sections_before_scrub()
Meta.strip_comments()
Meta.allow_tag_with_uri_attributes("a", ["href"], @valid_schemes)
Meta.allow_tag_with_these_attributes("a", [
"name",
"title",
"class",
"data-thread",
"data-num"
])
Meta.allow_tag_with_this_attribute_values("a", "target", ["_blank"])
Meta.allow_tag_with_this_attribute_values("a", "rel", [
"noopener",
"noreferrer"
])
Meta.allow_tag_with_these_attributes("b", [])
Meta.allow_tag_with_these_attributes("blockquote", [])
Meta.allow_tag_with_these_attributes("br", [])
Meta.allow_tag_with_these_attributes("code", ["class"])
Meta.allow_tag_with_these_attributes("del", [])
Meta.allow_tag_with_these_attributes("em", [])
# Meta.allow_tag_with_these_attributes("h1", [])
# Meta.allow_tag_with_these_attributes("h2", [])
# Meta.allow_tag_with_these_attributes("h3", [])
# Meta.allow_tag_with_these_attributes("h4", [])
# Meta.allow_tag_with_these_attributes("h5", [])
# Meta.allow_tag_with_these_attributes("h6", [])
Meta.allow_tag_with_these_attributes("hr", [])
Meta.allow_tag_with_these_attributes("i", [])
Meta.allow_tag_with_uri_attributes("img", ["src"], @valid_schemes)
Meta.allow_tag_with_these_attributes("img", [
"width",
"height",
"title",
"alt"
])
Meta.allow_tag_with_these_attributes("li", [])
Meta.allow_tag_with_these_attributes("ol", [])
Meta.allow_tag_with_these_attributes("p", [])
Meta.allow_tag_with_these_attributes("pre", [])
Meta.allow_tag_with_these_attributes("span", ["class"])
Meta.allow_tag_with_these_attributes("strong", [])
Meta.allow_tag_with_these_attributes("table", [])
Meta.allow_tag_with_these_attributes("tbody", [])
Meta.allow_tag_with_these_attributes("td", [])
Meta.allow_tag_with_these_attributes("th", [])
Meta.allow_tag_with_these_attributes("thead", [])
Meta.allow_tag_with_these_attributes("tr", [])
Meta.allow_tag_with_these_attributes("u", [])
Meta.allow_tag_with_these_attributes("ul", [])
Meta.strip_everything_not_covered()
end
| 33.101266
| 73
| 0.734608
|
9e67e727932c9085d779bffd622b278aa3815770
| 833
|
exs
|
Elixir
|
test/support/test_factory.exs
|
dark-elixir/dark_testing
|
1969ea2a08f6dbeaa0f3556da0338bae9bbb465b
|
[
"MIT"
] | null | null | null |
test/support/test_factory.exs
|
dark-elixir/dark_testing
|
1969ea2a08f6dbeaa0f3556da0338bae9bbb465b
|
[
"MIT"
] | null | null | null |
test/support/test_factory.exs
|
dark-elixir/dark_testing
|
1969ea2a08f6dbeaa0f3556da0338bae9bbb465b
|
[
"MIT"
] | null | null | null |
defmodule DarkTesting.TestFactory do
# credo:disable-for-this-file
defmodule TestStruct do
defstruct [:a, :b, :c, :d, :no_matcher, :matcher]
end
def example_test_mutation_factory do
%{no_matcher: []}
end
def example_test_with_matcher_mutation_matchers do
%{matcher: &is_nil/1}
end
def example_test_with_matcher_mutation_factory do
%{matcher: "true"}
end
def example_test_with_struct_mutation_factory do
%TestStruct{no_matcher: true, a: 2, c: 4}
end
def example_test_struct_factory do
%TestStruct{a: 100, b: 100}
end
def build(name, params \\ %{}) when is_atom(name) do
__MODULE__
|> Kernel.apply(:"#{name}_factory", [])
|> Map.merge(params)
# |> DarkTesting.struct_merge(params)
end
def insert(name, params \\ %{}) do
build(name, params)
end
end
| 20.825
| 54
| 0.683073
|
9e67fa48286a1fdf6a813800c5293c2253841a1f
| 629
|
exs
|
Elixir
|
apps/tai/test/tai/orders/transitions/accept_cancel_test.exs
|
ccamateur/tai
|
41c4b3e09dafc77987fa3f6b300c15461d981e16
|
[
"MIT"
] | 276
|
2018-01-16T06:36:06.000Z
|
2021-03-20T21:48:01.000Z
|
apps/tai/test/tai/orders/transitions/accept_cancel_test.exs
|
ccamateur/tai
|
41c4b3e09dafc77987fa3f6b300c15461d981e16
|
[
"MIT"
] | 73
|
2018-10-05T18:45:06.000Z
|
2021-02-08T05:46:33.000Z
|
apps/tai/test/tai/orders/transitions/accept_cancel_test.exs
|
ccamateur/tai
|
41c4b3e09dafc77987fa3f6b300c15461d981e16
|
[
"MIT"
] | 43
|
2018-06-09T09:54:51.000Z
|
2021-03-07T07:35:17.000Z
|
defmodule Tai.Orders.Transitions.AcceptCancelTest do
use ExUnit.Case, async: false
alias Tai.Orders.Transitions
test ".attrs/1 returns a list of updatable order attributes" do
last_received_at = DateTime.utc_now()
last_venue_timestamp = DateTime.utc_now()
transition = %Transitions.AcceptCancel{
last_received_at: last_received_at,
last_venue_timestamp: last_venue_timestamp
}
attrs = Transitions.AcceptCancel.attrs(transition)
assert length(attrs) == 2
assert attrs[:last_received_at] == last_received_at
assert attrs[:last_venue_timestamp] == last_venue_timestamp
end
end
| 31.45
| 65
| 0.758347
|
9e67fb38806c73ae88282a03b2028b17f9e368de
| 28,019
|
exs
|
Elixir
|
test/trento/domain/sap_system/sap_system_test.exs
|
trento-project/web
|
3260b30c781bffbbb0e5205cd650966c4026b9ac
|
[
"Apache-2.0"
] | 1
|
2022-03-22T16:59:34.000Z
|
2022-03-22T16:59:34.000Z
|
test/trento/domain/sap_system/sap_system_test.exs
|
trento-project/web
|
3260b30c781bffbbb0e5205cd650966c4026b9ac
|
[
"Apache-2.0"
] | 24
|
2022-03-22T16:45:25.000Z
|
2022-03-31T13:00:02.000Z
|
test/trento/domain/sap_system/sap_system_test.exs
|
trento-project/web
|
3260b30c781bffbbb0e5205cd650966c4026b9ac
|
[
"Apache-2.0"
] | 1
|
2022-03-30T14:16:16.000Z
|
2022-03-30T14:16:16.000Z
|
defmodule Trento.SapSystemTest do
use Trento.AggregateCase, aggregate: Trento.Domain.SapSystem, async: true
import Trento.Factory
alias Trento.Domain.Commands.{
RegisterApplicationInstance,
RegisterDatabaseInstance
}
alias Trento.Domain.Events.{
ApplicationInstanceHealthChanged,
ApplicationInstanceRegistered,
DatabaseHealthChanged,
DatabaseInstanceHealthChanged,
DatabaseInstanceRegistered,
DatabaseInstanceSystemReplicationChanged,
DatabaseRegistered,
SapSystemHealthChanged,
SapSystemRegistered
}
alias Trento.Domain.SapSystem
describe "SAP System registration" do
test "should create an incomplete SAP system aggregate and register a database instance" do
sap_system_id = Faker.UUID.v4()
sid = Faker.StarWars.planet()
tenant = Faker.Beer.style()
instance_number = "00"
instance_hostname = Faker.Airports.iata()
features = Faker.Pokemon.name()
http_port = 80
https_port = 443
start_priority = "0.9"
host_id = Faker.UUID.v4()
assert_events_and_state(
[],
RegisterDatabaseInstance.new!(%{
sap_system_id: sap_system_id,
sid: sid,
tenant: tenant,
instance_number: instance_number,
instance_hostname: instance_hostname,
features: features,
http_port: http_port,
https_port: https_port,
start_priority: start_priority,
host_id: host_id,
system_replication: "Primary",
system_replication_status: "ACTIVE",
health: :passing
}),
[
%DatabaseRegistered{
sap_system_id: sap_system_id,
sid: sid,
health: :passing
},
%DatabaseInstanceRegistered{
sap_system_id: sap_system_id,
sid: sid,
tenant: tenant,
instance_number: instance_number,
instance_hostname: instance_hostname,
features: features,
http_port: http_port,
https_port: https_port,
start_priority: start_priority,
host_id: host_id,
system_replication: "Primary",
system_replication_status: "ACTIVE",
health: :passing
}
],
%SapSystem{
sap_system_id: sap_system_id,
# The SAP System aggregate is not complete yet.
# The sid will be set when the first application instance is registered.
sid: nil,
database: %SapSystem.Database{
sid: sid,
health: :passing,
instances: [
%SapSystem.Instance{
sid: sid,
system_replication: "Primary",
system_replication_status: "ACTIVE",
instance_number: instance_number,
features: features,
host_id: host_id,
health: :passing
}
]
}
}
)
end
test "should add a database instance to an existing Database" do
sap_system_id = Faker.UUID.v4()
sid = Faker.StarWars.planet()
tenant = Faker.Beer.style()
instance_number = "00"
features = Faker.Pokemon.name()
host_id = Faker.UUID.v4()
initial_events = [
build(
:database_registered_event,
sap_system_id: sap_system_id,
sid: sid
),
build(
:database_instance_registered_event,
sap_system_id: sap_system_id,
sid: sid,
tenant: tenant,
instance_number: "10"
)
]
assert_events_and_state(
initial_events,
build(
:register_database_instance_command,
sap_system_id: sap_system_id,
sid: sid,
tenant: tenant,
instance_number: instance_number,
features: features,
host_id: host_id,
health: :passing
),
build(
:database_instance_registered_event,
sap_system_id: sap_system_id,
sid: sid,
tenant: tenant,
instance_number: instance_number,
features: features,
host_id: host_id,
health: :passing
),
fn state ->
assert %SapSystem{
database: %SapSystem.Database{
instances: [
%SapSystem.Instance{
sid: ^sid,
instance_number: ^instance_number,
features: ^features,
host_id: ^host_id,
health: :passing
}
| _
]
}
} = state
end
)
end
test "should not add a database instance if the database instance was already registered" do
database_registered_event = build(:database_registered_event)
database_instance_registered_event =
build(
:database_instance_registered_event,
sap_system_id: database_registered_event.sap_system_id
)
initial_events = [
database_registered_event,
database_instance_registered_event
]
assert_events(
initial_events,
build(
:register_database_instance_command,
sap_system_id: database_registered_event.sap_system_id,
sid: database_instance_registered_event.sid,
tenant: database_instance_registered_event.tenant,
instance_number: database_instance_registered_event.instance_number,
features: database_instance_registered_event.features,
host_id: database_instance_registered_event.host_id,
system_replication: database_instance_registered_event.system_replication,
system_replication_status: database_instance_registered_event.system_replication_status,
health: :passing
),
[]
)
end
test "should change the system replication of a database instance" do
database_registered_event = build(:database_registered_event)
database_instance_registered_event =
build(
:database_instance_registered_event,
sap_system_id: database_registered_event.sap_system_id,
system_replication: "Secondary",
system_replication_status: ""
)
initial_events = [
database_registered_event,
database_instance_registered_event
]
assert_events(
initial_events,
build(
:register_database_instance_command,
sap_system_id: database_registered_event.sap_system_id,
sid: database_instance_registered_event.sid,
tenant: database_instance_registered_event.tenant,
instance_number: database_instance_registered_event.instance_number,
features: database_instance_registered_event.features,
host_id: database_instance_registered_event.host_id,
system_replication: "Primary",
system_replication_status: "ACTIVE",
health: :passing
),
%DatabaseInstanceSystemReplicationChanged{
sap_system_id: database_registered_event.sap_system_id,
host_id: database_instance_registered_event.host_id,
instance_number: database_instance_registered_event.instance_number,
system_replication: "Primary",
system_replication_status: "ACTIVE"
}
)
end
test "should register a SAP System and add an application instance" do
sap_system_id = Faker.UUID.v4()
sid = Faker.StarWars.planet()
db_host = Faker.Internet.ip_v4_address()
tenant = Faker.Beer.style()
instance_hostname = Faker.Airports.iata()
features = Faker.Pokemon.name()
http_port = 80
https_port = 443
start_priority = "0.9"
host_id = Faker.UUID.v4()
initial_events = [
build(
:database_registered_event,
sap_system_id: sap_system_id,
sid: sid
),
build(
:database_instance_registered_event,
sap_system_id: sap_system_id,
sid: sid,
tenant: tenant
)
]
assert_events_and_state(
initial_events,
RegisterApplicationInstance.new!(%{
sap_system_id: sap_system_id,
sid: sid,
db_host: db_host,
tenant: tenant,
instance_number: "00",
instance_hostname: instance_hostname,
features: features,
http_port: http_port,
https_port: https_port,
start_priority: start_priority,
host_id: host_id,
health: :passing
}),
[
%SapSystemRegistered{
sap_system_id: sap_system_id,
sid: sid,
db_host: db_host,
tenant: tenant,
health: :passing
},
%ApplicationInstanceRegistered{
sap_system_id: sap_system_id,
sid: sid,
instance_number: "00",
instance_hostname: instance_hostname,
features: features,
http_port: http_port,
https_port: https_port,
start_priority: start_priority,
host_id: host_id,
health: :passing
}
],
fn state ->
assert %SapSystem{
sid: ^sid,
application: %SapSystem.Application{
sid: ^sid,
instances: [
%SapSystem.Instance{
sid: ^sid,
instance_number: "00",
features: ^features,
host_id: ^host_id,
health: :passing
}
]
}
} = state
end
)
end
test "should add an application instance to a registered SAP System" do
sap_system_id = Faker.UUID.v4()
sid = Faker.StarWars.planet()
initial_events = [
build(:database_registered_event, sap_system_id: sap_system_id, sid: sid),
build(:database_instance_registered_event, sap_system_id: sap_system_id, sid: sid),
build(:sap_system_registered_event, sap_system_id: sap_system_id, sid: sid),
build(:application_instance_registered_event, sap_system_id: sap_system_id, sid: sid)
]
new_instance_db_host = Faker.Internet.ip_v4_address()
new_instance_tenant = Faker.Beer.style()
new_instance_number = "10"
new_instance_features = Faker.Pokemon.name()
new_instance_host_id = Faker.UUID.v4()
assert_events_and_state(
initial_events,
build(
:register_application_instance_command,
sap_system_id: sap_system_id,
sid: sid,
db_host: new_instance_db_host,
tenant: new_instance_tenant,
instance_number: new_instance_number,
features: new_instance_features,
host_id: new_instance_host_id,
health: :passing
),
build(
:application_instance_registered_event,
sap_system_id: sap_system_id,
sid: sid,
instance_number: new_instance_number,
features: new_instance_features,
host_id: new_instance_host_id,
health: :passing
),
fn state ->
assert %SapSystem{
application: %SapSystem.Application{
sid: ^sid,
instances: [
%SapSystem.Instance{
sid: ^sid,
instance_number: ^new_instance_number,
features: ^new_instance_features,
host_id: ^new_instance_host_id,
health: :passing
}
| _
]
}
} = state
end
)
end
test "should not add an application instance if the application instance was already registered" do
sap_system_id = Faker.UUID.v4()
application_instance_registered_event =
build(:application_instance_registered_event, sap_system_id: sap_system_id)
initial_events = [
build(:database_registered_event, sap_system_id: sap_system_id),
build(:database_instance_registered_event, sap_system_id: sap_system_id),
build(:sap_system_registered_event, sap_system_id: sap_system_id),
application_instance_registered_event
]
assert_events(
initial_events,
build(
:register_application_instance_command,
sap_system_id: application_instance_registered_event.sap_system_id,
sid: application_instance_registered_event.sid,
db_host: Faker.Internet.ip_v4_address(),
tenant: Faker.Beer.hop(),
instance_number: application_instance_registered_event.instance_number,
features: application_instance_registered_event.features,
host_id: application_instance_registered_event.host_id,
health: :passing
),
[]
)
end
end
describe "SAP System health" do
test "should change the health of a Database when a new Database instance is registered" do
sap_system_id = Faker.UUID.v4()
sid = Faker.StarWars.planet()
tenant = Faker.Beer.style()
instance_number = "00"
features = Faker.Pokemon.name()
host_id = Faker.UUID.v4()
initial_events = [
build(:database_registered_event, sap_system_id: sap_system_id),
build(:database_instance_registered_event, sap_system_id: sap_system_id)
]
assert_events_and_state(
initial_events,
build(
:register_database_instance_command,
sap_system_id: sap_system_id,
sid: sid,
tenant: tenant,
instance_number: instance_number,
features: features,
host_id: host_id,
health: :critical
),
[
build(
:database_instance_registered_event,
sap_system_id: sap_system_id,
sid: sid,
tenant: tenant,
instance_number: instance_number,
features: features,
host_id: host_id,
health: :critical
),
%DatabaseHealthChanged{
sap_system_id: sap_system_id,
health: :critical
}
],
fn state ->
%SapSystem{
database: %SapSystem.Database{
health: :critical,
instances: [
%SapSystem.Instance{
health: :critical
},
%SapSystem.Instance{
health: :passing
}
]
}
} = state
end
)
end
test "should change the health of a Database when a Database instance has changed the health status" do
sap_system_id = Faker.UUID.v4()
host_id = Faker.UUID.v4()
instance_number = "00"
database_instance_registered_event =
build(
:database_instance_registered_event,
sap_system_id: sap_system_id,
host_id: host_id,
instance_number: instance_number
)
initial_events = [
build(:database_registered_event, sap_system_id: sap_system_id),
database_instance_registered_event
]
assert_events_and_state(
initial_events,
build(
:register_database_instance_command,
sap_system_id: sap_system_id,
sid: database_instance_registered_event.sid,
tenant: database_instance_registered_event.tenant,
instance_number: instance_number,
features: database_instance_registered_event.features,
host_id: host_id,
health: :critical
),
[
%DatabaseInstanceHealthChanged{
sap_system_id: sap_system_id,
instance_number: instance_number,
host_id: host_id,
health: :critical
},
%DatabaseHealthChanged{
sap_system_id: sap_system_id,
health: :critical
}
],
fn state ->
assert %SapSystem{
database: %SapSystem.Database{
health: :critical,
instances: [
%SapSystem.Instance{
instance_number: ^instance_number,
host_id: ^host_id,
health: :critical
}
]
}
} = state
end
)
end
test "should not change the health of a Database if no instance has changed the health status" do
sap_system_id = Faker.UUID.v4()
new_instance_number = "20"
new_instance_features = Faker.Pokemon.name()
new_instance_host_id = Faker.UUID.v4()
database_instance_registered_event =
build(
:database_instance_registered_event,
sap_system_id: sap_system_id,
health: :warning
)
initial_events = [
build(:database_registered_event, sap_system_id: sap_system_id, health: :warning),
database_instance_registered_event
]
assert_events_and_state(
initial_events,
[
build(
:register_database_instance_command,
sap_system_id: sap_system_id,
sid: database_instance_registered_event.sid,
tenant: database_instance_registered_event.tenant,
instance_number: database_instance_registered_event.instance_number,
features: database_instance_registered_event.features,
host_id: database_instance_registered_event.host_id,
health: :warning
),
build(
:register_database_instance_command,
sap_system_id: sap_system_id,
sid: database_instance_registered_event.sid,
tenant: database_instance_registered_event.tenant,
instance_number: new_instance_number,
features: new_instance_features,
host_id: new_instance_host_id,
health: :warning
)
],
[
build(
:database_instance_registered_event,
sap_system_id: sap_system_id,
sid: database_instance_registered_event.sid,
tenant: database_instance_registered_event.tenant,
instance_number: new_instance_number,
features: new_instance_features,
host_id: new_instance_host_id,
health: :warning
)
],
fn state ->
assert %SapSystem{
database: %SapSystem.Database{
health: :warning,
instances: [
%SapSystem.Instance{
health: :warning
},
%SapSystem.Instance{
health: :warning
}
]
}
} = state
end
)
end
test "should change the health of a SAP System when a new Application instance is registered" do
sap_system_id = Faker.UUID.v4()
sid = Faker.StarWars.planet()
tenant = Faker.Beer.style()
db_host = Faker.Internet.ip_v4_address()
instance_number = "00"
features = Faker.Pokemon.name()
host_id = Faker.UUID.v4()
initial_events = [
build(:database_registered_event, sap_system_id: sap_system_id),
build(:database_instance_registered_event, sap_system_id: sap_system_id)
]
assert_events_and_state(
initial_events,
build(
:register_application_instance_command,
sap_system_id: sap_system_id,
sid: sid,
tenant: tenant,
db_host: db_host,
instance_number: instance_number,
features: features,
host_id: host_id,
health: :critical
),
[
%SapSystemRegistered{
sap_system_id: sap_system_id,
sid: sid,
db_host: db_host,
tenant: tenant,
health: :critical
},
build(
:application_instance_registered_event,
sap_system_id: sap_system_id,
sid: sid,
instance_number: instance_number,
features: features,
host_id: host_id,
health: :critical
)
],
fn state ->
assert %SapSystem{
health: :critical,
application: %SapSystem.Application{
instances: [
%SapSystem.Instance{
health: :critical
}
]
}
} = state
end
)
end
test "should change the health of a SAP System when an Application has changed the health status" do
sap_system_id = Faker.UUID.v4()
application_instance_registered =
build(:application_instance_registered_event, sap_system_id: sap_system_id)
sap_system_registered_event =
build(:sap_system_registered_event, sap_system_id: sap_system_id)
initial_events = [
build(:database_registered_event, sap_system_id: sap_system_id),
build(:database_instance_registered_event, sap_system_id: sap_system_id),
application_instance_registered,
sap_system_registered_event
]
assert_events_and_state(
initial_events,
build(
:register_application_instance_command,
sap_system_id: sap_system_id,
sid: application_instance_registered.sid,
tenant: sap_system_registered_event.tenant,
db_host: sap_system_registered_event.db_host,
instance_number: application_instance_registered.instance_number,
features: application_instance_registered.features,
host_id: application_instance_registered.host_id,
health: :critical
),
[
%ApplicationInstanceHealthChanged{
sap_system_id: sap_system_id,
instance_number: application_instance_registered.instance_number,
host_id: application_instance_registered.host_id,
health: :critical
},
%SapSystemHealthChanged{
sap_system_id: sap_system_id,
health: :critical
}
],
fn state ->
assert %SapSystem{
health: :critical,
application: %SapSystem.Application{
instances: [
%SapSystem.Instance{
health: :critical
}
]
}
} = state
end
)
end
test "should not change the health of a SAP System if no instance has changed the health status" do
sap_system_id = Faker.UUID.v4()
new_instance_number = "20"
new_instance_features = Faker.Pokemon.name()
new_instance_host_id = Faker.UUID.v4()
application_instance_registered_event =
build(
:application_instance_registered_event,
sap_system_id: sap_system_id,
health: :warning
)
sap_system_registered_event =
build(:sap_system_registered_event, sap_system_id: sap_system_id, health: :warning)
initial_events = [
build(:database_registered_event, sap_system_id: sap_system_id),
build(
:database_instance_registered_event,
sap_system_id: sap_system_id,
health: :warning
),
sap_system_registered_event,
application_instance_registered_event
]
assert_events_and_state(
initial_events,
[
build(
:register_application_instance_command,
sap_system_id: sap_system_id,
sid: application_instance_registered_event.sid,
tenant: sap_system_registered_event.tenant,
db_host: sap_system_registered_event.db_host,
instance_number: application_instance_registered_event.instance_number,
features: application_instance_registered_event.features,
host_id: application_instance_registered_event.host_id,
health: :warning
),
build(
:register_application_instance_command,
sap_system_id: sap_system_id,
sid: application_instance_registered_event.sid,
tenant: sap_system_registered_event.tenant,
db_host: sap_system_registered_event.db_host,
instance_number: new_instance_number,
features: new_instance_features,
host_id: new_instance_host_id,
health: :warning
)
],
[
build(
:application_instance_registered_event,
sap_system_id: sap_system_id,
sid: application_instance_registered_event.sid,
instance_number: new_instance_number,
features: new_instance_features,
host_id: new_instance_host_id,
health: :warning
)
],
fn state ->
assert %SapSystem{
health: :warning,
application: %SapSystem.Application{
instances: [
%SapSystem.Instance{
health: :warning
},
%SapSystem.Instance{
health: :warning
}
]
}
} = state
end
)
end
test "should change the health of a SAP System when the Database has changed the health status" do
sap_system_id = Faker.UUID.v4()
new_instance_number = "20"
new_instance_features = Faker.Pokemon.name()
new_instance_host_id = Faker.UUID.v4()
initial_events = [
build(:database_registered_event, sap_system_id: sap_system_id),
database_instance_registered_event =
build(:database_instance_registered_event, sap_system_id: sap_system_id),
build(:sap_system_registered_event, sap_system_id: sap_system_id),
build(:application_instance_registered_event, sap_system_id: sap_system_id)
]
assert_events_and_state(
initial_events,
build(
:register_database_instance_command,
sap_system_id: sap_system_id,
sid: database_instance_registered_event.sid,
tenant: database_instance_registered_event.tenant,
instance_number: new_instance_number,
features: new_instance_features,
host_id: new_instance_host_id,
health: :warning
),
[
build(
:database_instance_registered_event,
sap_system_id: sap_system_id,
sid: database_instance_registered_event.sid,
tenant: database_instance_registered_event.tenant,
instance_number: new_instance_number,
features: new_instance_features,
host_id: new_instance_host_id,
health: :warning
),
%DatabaseHealthChanged{
sap_system_id: sap_system_id,
health: :warning
},
%SapSystemHealthChanged{
sap_system_id: sap_system_id,
health: :warning
}
],
fn state ->
assert %SapSystem{health: :warning} = state
end
)
end
end
end
| 32.77076
| 107
| 0.580463
|
9e68051f52192e791650d542d492fa14f4ac94f0
| 1,756
|
ex
|
Elixir
|
clients/big_query/lib/google_api/big_query/v2/model/table_data_insert_all_response_insert_errors.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/big_query/lib/google_api/big_query/v2/model/table_data_insert_all_response_insert_errors.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | 1
|
2020-12-18T09:25:12.000Z
|
2020-12-18T09:25:12.000Z
|
clients/big_query/lib/google_api/big_query/v2/model/table_data_insert_all_response_insert_errors.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | 1
|
2020-10-04T10:12:44.000Z
|
2020-10-04T10:12:44.000Z
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQuery.V2.Model.TableDataInsertAllResponseInsertErrors do
@moduledoc """
## Attributes
* `errors` (*type:* `list(GoogleApi.BigQuery.V2.Model.ErrorProto.t)`, *default:* `nil`) - Error information for the row indicated by the index property.
* `index` (*type:* `integer()`, *default:* `nil`) - The index of the row that error applies to.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:errors => list(GoogleApi.BigQuery.V2.Model.ErrorProto.t()),
:index => integer()
}
field(:errors, as: GoogleApi.BigQuery.V2.Model.ErrorProto, type: :list)
field(:index)
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.TableDataInsertAllResponseInsertErrors do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.TableDataInsertAllResponseInsertErrors.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.TableDataInsertAllResponseInsertErrors do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.12
| 156
| 0.739749
|
9e6811fa19f0f34d505e261196b1a5af6eddb87d
| 617
|
ex
|
Elixir
|
lib/phone/nanp.ex
|
turnhub/phone
|
020d4ac43d1b191a7b2b7ff3c15cfe8078646c7a
|
[
"Apache-2.0"
] | null | null | null |
lib/phone/nanp.ex
|
turnhub/phone
|
020d4ac43d1b191a7b2b7ff3c15cfe8078646c7a
|
[
"Apache-2.0"
] | null | null | null |
lib/phone/nanp.ex
|
turnhub/phone
|
020d4ac43d1b191a7b2b7ff3c15cfe8078646c7a
|
[
"Apache-2.0"
] | null | null | null |
defmodule Phone.NANP do
@moduledoc false
use Helper.Country
matcher(:modules, [
Phone.NANP.AS,
Phone.NANP.AI,
Phone.NANP.AG,
Phone.NANP.BS,
Phone.NANP.BB,
Phone.NANP.BM,
Phone.NANP.CA,
Phone.NANP.DM,
Phone.NANP.DO,
Phone.NANP.GD,
Phone.NANP.GU,
Phone.NANP.JM1,
Phone.NANP.JM2,
Phone.NANP.KN,
Phone.NANP.KY,
Phone.NANP.LC,
Phone.NANP.MP,
Phone.NANP.MS,
Phone.NANP.PR,
Phone.NANP.SX,
Phone.NANP.TC,
Phone.NANP.TT,
Phone.NANP.US,
Phone.NANP.VC,
Phone.NANP.VG,
Phone.NANP.VI,
Phone.NANP.TollFree
])
end
| 17.138889
| 23
| 0.602917
|
9e6834e8a90532979cffaf0779ec7d9e50ce5537
| 3,499
|
exs
|
Elixir
|
test/mailto_test.exs
|
kianmeng/url
|
e65a0f3cff4bea203c7965d4d7c4cf612169d726
|
[
"Apache-2.0"
] | null | null | null |
test/mailto_test.exs
|
kianmeng/url
|
e65a0f3cff4bea203c7965d4d7c4cf612169d726
|
[
"Apache-2.0"
] | null | null | null |
test/mailto_test.exs
|
kianmeng/url
|
e65a0f3cff4bea203c7965d4d7c4cf612169d726
|
[
"Apache-2.0"
] | null | null | null |
defmodule MailtoTest do
use ExUnit.Case
test "simple mailto" do
assert URL.parse("mailto:infobot@example.com?subject=current-issue") ==
%URL{
authority: nil,
fragment: nil,
host: nil,
parsed_path: %URL.Mailto{
params: %{"subject" => "current-issue"},
to: ["infobot@example.com"]
},
path: "infobot@example.com",
port: nil,
query: "subject=current-issue",
scheme: "mailto",
userinfo: nil
}
end
test "with params" do
assert URL.parse("mailto:infobot@example.com?body=send%20current-issue%0D%0Asend%20index") ==
%URL{
authority: nil,
fragment: nil,
host: nil,
parsed_path: %URL.Mailto{
params: %{"body" => "send current-issue\r\nsend index"},
to: ["infobot@example.com"]
},
path: "infobot@example.com",
port: nil,
query: "body=send%20current-issue%0D%0Asend%20index",
scheme: "mailto",
userinfo: nil
}
end
test "percent decode address" do
assert URL.parse("mailto:gorby%25kremvax@example.com") ==
%URL{
authority: nil,
fragment: nil,
host: nil,
parsed_path: %URL.Mailto{params: %{}, to: ["gorby%kremvax@example.com"]},
path: "gorby%25kremvax@example.com",
port: nil,
query: nil,
scheme: "mailto",
userinfo: nil
}
end
test "more complications" do
assert URL.parse("mailto:%22not%40me%22@example.org") ==
%URL{
authority: nil,
fragment: nil,
host: nil,
parsed_path: %URL.Mailto{params: %{}, to: ["\"not@me\"@example.org"]},
path: "%22not%40me%22@example.org",
port: nil,
query: nil,
scheme: "mailto",
userinfo: nil
}
assert URL.parse("mailto:%22oh%5C%5Cno%22@example.org") ==
%URL{
authority: nil,
fragment: nil,
host: nil,
parsed_path: %URL.Mailto{params: %{}, to: ["\"oh\\\\no\"@example.org"]},
path: "%22oh%5C%5Cno%22@example.org",
port: nil,
query: nil,
scheme: "mailto",
userinfo: nil
}
end
test "simple utf8 percent encoding" do
assert URL.parse("mailto:user@example.org?subject=caf%C3%A9") ==
%URL{
authority: nil,
fragment: nil,
host: nil,
parsed_path: %URL.Mailto{
params: %{"subject" => "café"},
to: ["user@example.org"]
},
path: "user@example.org",
port: nil,
query: "subject=caf%C3%A9",
scheme: "mailto",
userinfo: nil
}
end
# test "utf-8 word encoding" do
# assert URL.parse("mailto:user@example.org?subject=%3D%3Futf-8%3FQ%3Fcaf%3DC3%3DA9%3F%3D") ==
#
# end
#
# test "iso8859 encoding" do
# assert URL.parse("mailto:user@example.org?subject=%3D%3Fiso-8859-1%3FQ%3Fcaf%3DE9%3F%3D") ==
#
# end
test "utf8 encoding domain name" do
assert URL.parse("mailto:user@%E7%B4%8D%E8%B1%86.example.org?subject=Test&body=NATTO") ==
%URL{
authority: nil,
fragment: nil,
host: nil,
parsed_path: %URL.Mailto{
params: %{"body" => "NATTO", "subject" => "Test"},
to: ["user@納豆.example.org"]
},
path: "user@%E7%B4%8D%E8%B1%86.example.org",
port: nil,
query: "subject=Test&body=NATTO",
scheme: "mailto",
userinfo: nil
}
end
end
| 27.335938
| 98
| 0.54044
|
9e68400b328d44222f90ff8de64b15de685928a1
| 158
|
ex
|
Elixir
|
lib/bitpay/errors.ex
|
bitpay/elixir-client
|
cccf8abbad5da7ad3d01d186bd14bf69eca68770
|
[
"MIT"
] | 35
|
2015-01-18T02:16:11.000Z
|
2021-11-14T01:55:34.000Z
|
lib/bitpay/errors.ex
|
philosodad/bitpay-elixir
|
cccf8abbad5da7ad3d01d186bd14bf69eca68770
|
[
"MIT"
] | 3
|
2015-06-24T15:18:57.000Z
|
2017-05-17T17:52:05.000Z
|
lib/bitpay/errors.ex
|
bitpay/elixir-client
|
cccf8abbad5da7ad3d01d186bd14bf69eca68770
|
[
"MIT"
] | 13
|
2015-01-16T21:11:22.000Z
|
2021-10-29T23:23:59.000Z
|
defmodule BitPay.ArgumentError do
defexception message: "incorrect argument"
end
defmodule BitPay.BitPayError do
defexception message: "BitPay Error"
end
| 22.571429
| 44
| 0.822785
|
9e6842ad0e99c0aec33de52836c9bd455f7b5c9e
| 1,835
|
ex
|
Elixir
|
clients/analytics_admin/lib/google_api/analytics_admin/v1alpha/model/google_analytics_admin_v1alpha_update_user_link_request.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/analytics_admin/lib/google_api/analytics_admin/v1alpha/model/google_analytics_admin_v1alpha_update_user_link_request.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/analytics_admin/lib/google_api/analytics_admin/v1alpha/model/google_analytics_admin_v1alpha_update_user_link_request.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaUpdateUserLinkRequest do
@moduledoc """
Request message for UpdateUserLink RPC.
## Attributes
* `userLink` (*type:* `GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaUserLink.t`, *default:* `nil`) - Required. The user link to update.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:userLink =>
GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaUserLink.t() | nil
}
field(:userLink, as: GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaUserLink)
end
defimpl Poison.Decoder,
for: GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaUpdateUserLinkRequest do
def decode(value, options) do
GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaUpdateUserLinkRequest.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.AnalyticsAdmin.V1alpha.Model.GoogleAnalyticsAdminV1alphaUpdateUserLinkRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.622642
| 160
| 0.768937
|
9e68804aba4b32ce1c1fe19d43f420a6590f9c84
| 684
|
ex
|
Elixir
|
lib/mix/tasks/gen.referrals.ex
|
cesium/safira
|
07a02f54f9454db1cfb5a510da68f40c47dcd916
|
[
"MIT"
] | 40
|
2018-07-04T19:13:45.000Z
|
2021-12-16T23:53:43.000Z
|
lib/mix/tasks/gen.referrals.ex
|
cesium/safira
|
10dd45357c20e8afc22563f114f49ccb74008114
|
[
"MIT"
] | 94
|
2018-07-25T13:13:39.000Z
|
2022-02-15T04:09:42.000Z
|
lib/mix/tasks/gen.referrals.ex
|
cesium/safira
|
07a02f54f9454db1cfb5a510da68f40c47dcd916
|
[
"MIT"
] | 5
|
2018-11-26T17:19:03.000Z
|
2021-02-23T08:09:37.000Z
|
defmodule Mix.Tasks.Gen.Referrals do
use Mix.Task
alias Safira.Contest
def run(args) do
cond do
length(args) != 2 ->
Mix.shell.info "Needs to receive badge id and number of refarrels"
args |> List.last |> String.to_integer <= 0 ->
Mix.shell.info "Number of refarrels needs to be above 0."
true ->
args |> Enum.map(&String.to_integer/1) |> create
end
end
defp create([id|number]) do
Mix.Task.run "app.start"
IO.puts(Contest.get_badge!(id).name)
for _n <- 1..List.last(number) do
Contest.create_referral(%{badge_id: id})
|> elem(1)
|> Map.get(:id)
end
|> Enum.map(&IO.puts/1)
end
end
| 25.333333
| 74
| 0.605263
|
9e6880a96d8f6c66d2c49022f711554f88a8c943
| 15,315
|
ex
|
Elixir
|
lib/elixir/lib/code.ex
|
enokd/elixir
|
e39b32f235082b8a29fcb22d250c822cca98609f
|
[
"Apache-2.0"
] | 1
|
2015-11-12T19:23:45.000Z
|
2015-11-12T19:23:45.000Z
|
lib/elixir/lib/code.ex
|
enokd/elixir
|
e39b32f235082b8a29fcb22d250c822cca98609f
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/lib/code.ex
|
enokd/elixir
|
e39b32f235082b8a29fcb22d250c822cca98609f
|
[
"Apache-2.0"
] | null | null | null |
defmodule Code do
defexception LoadError, [:file, :message] do
def exception(opts) do
file = opts[:file]
LoadError[message: "could not load #{file}", file: file]
end
end
@moduledoc """
Utilities for managing code compilation, code evaluation and code loading.
This module complements [Erlang's code module](http://www.erlang.org/doc/man/code.html)
to add behaviour which is specific to Elixir.
"""
@doc """
List all loaded files.
"""
def loaded_files do
:elixir_code_server.call :loaded
end
@doc """
Remove files from the loaded files list.
The modules defined in the file are not removed;
calling this function only removes them from the list,
allowing them to be required again.
"""
def unload_files(files) do
:elixir_code_server.cast { :unload_files, files }
end
@doc """
Append a path to the Erlang VM code path.
The path is expanded with `Path.expand/1` before being appended.
"""
def append_path(path) do
:code.add_pathz(Path.expand to_char_list(path))
end
@doc """
Prepend a path to the Erlang VM code path.
The path is expanded with `Path.expand/1` before being prepended.
"""
def prepend_path(path) do
:code.add_patha(Path.expand to_char_list(path))
end
@doc """
Delete a path from the Erlang VM code path.
The path is expanded with `Path.expand/1` before being deleted.
"""
def delete_path(path) do
:code.del_path(Path.expand to_char_list(path))
end
@doc """
Evaluate the contents given by `string`.
The `binding` argument is a keyword list of variable bindings.
The `opts` argument is a keyword list of environment options.
Those options can be:
* `:file` - the file to be considered in the evaluation
* `:line` - the line on which the script starts
* `:delegate_locals_to` - delegate local calls to the given module,
the default is to not delegate
Additionally, the following scope values can be configured:
* `:aliases` - a list of tuples with the alias and its target
* `:requires` - a list of modules required
* `:functions` - a list of tuples where the first element is a module
and the second a list of imported function names and arity. The list
of function names and arity must be sorted
* `:macros` - a list of tuples where the first element is a module
and the second a list of imported macro names and arity. The list
of function names and arity must be sorted
Notice that setting any of the values above overrides Elixir's default
values. For example, setting `:requires` to `[]`, will no longer
automatically require the `Kernel` module; in the same way setting
`:macros` will no longer auto-import `Kernel` macros like `if`, `case`,
etc.
Returns a tuple of the form `{ value, binding }`,
where `value` is the the value returned from evaluating `string`.
If an error occurs while evaluating `string` an exception will be raised.
`binding` is a keyword list with the value of all variable bindings
after evaluating `string`. The binding key is usually an atom, but it
may be a tuple for variables defined in a different context.
## Examples
iex> Code.eval_string("a + b", [a: 1, b: 2], file: __ENV__.file, line: __ENV__.line)
{3, [a: 1, b: 2]}
iex> Code.eval_string("c = a + b", [a: 1, b: 2], __ENV__)
{3, [a: 1, b: 2, c: 3]}
iex> Code.eval_string("a = a + b", [a: 1, b: 2])
{3, [a: 3, b: 2]}
For convenience, you can pass `__ENV__` as the `opts` argument and
all imports, requires and aliases defined in the current environment
will be automatically carried over:
iex> Code.eval_string("a + b", [a: 1, b: 2], __ENV__)
{3, [a: 1, b: 2]}
"""
def eval_string(string, binding \\ [], opts \\ [])
def eval_string(string, binding, Macro.Env[] = env) do
{ value, binding, _env, _scope } = :elixir.eval to_char_list(string), binding, env.to_keywords
{ value, binding }
end
def eval_string(string, binding, opts) when is_list(opts) do
validate_eval_opts(opts)
{ value, binding, _env, _scope } = :elixir.eval to_char_list(string), binding, opts
{ value, binding }
end
@doc """
Evaluate the quoted contents.
See `eval_string/3` for a description of arguments and return values.
## Examples
iex> contents = quote(hygiene: [vars: false], do: a + b)
iex> Code.eval_quoted(contents, [a: 1, b: 2], file: __ENV__.file, line: __ENV__.line)
{3, [a: 1, b: 2]}
For convenience, you can pass `__ENV__` as the `opts` argument and
all options will be automatically extracted from the current environment:
iex> contents = quote(hygiene: [vars: false], do: a + b)
iex> Code.eval_quoted(contents, [a: 1, b: 2], __ENV__)
{3, [a: 1, b: 2]}
"""
def eval_quoted(quoted, binding \\ [], opts \\ [])
def eval_quoted(quoted, binding, Macro.Env[] = env) do
{ value, binding, _env, _scope } = :elixir.eval_quoted quoted, binding, env.to_keywords
{ value, binding }
end
def eval_quoted(quoted, binding, opts) when is_list(opts) do
validate_eval_opts(opts)
{ value, binding, _env, _scope } = :elixir.eval_quoted quoted, binding, opts
{ value, binding }
end
defp validate_eval_opts(opts) do
if f = opts[:functions], do: validate_imports(:functions, f)
if m = opts[:macros], do: validate_imports(:macros, m)
if a = opts[:aliases], do: validate_aliases(:aliases, a)
if r = opts[:requires], do: validate_requires(:requires, r)
end
defp validate_requires(kind, requires) do
valid = is_list(requires) and Enum.all?(requires, &is_atom(&1))
unless valid do
raise ArgumentError, message: "expected :#{kind} option given to eval in the format: [module]"
end
end
defp validate_aliases(kind, aliases) do
valid = is_list(aliases) and Enum.all?(aliases, fn { k, v } ->
is_atom(k) and is_atom(v)
end)
unless valid do
raise ArgumentError, message: "expected :#{kind} option given to eval in the format: [{ module, module }]"
end
end
defp validate_imports(kind, imports) do
valid = is_list(imports) and Enum.all?(imports, fn { k, v } ->
is_atom(k) and is_list(v) and Enum.all?(v, fn { name, arity } ->
is_atom(name) and is_integer(arity)
end)
end)
unless valid do
raise ArgumentError, message: "expected :#{kind} option given to eval in the format: [{ module, [{ name, arity }] }]"
end
end
@doc """
Convert the given string to its quoted form.
Returns `{ :ok, quoted_form }`
if it succeeds, `{ :error, { line, error, token } }` otherwise.
## Options
* `:file` - The filename to be used in stacktraces
and the file reported in the `__ENV__` variable.
* `:line` - The line reported in the `__ENV__` variable.
* `:existing_atoms_only` - When `true`, raises an error
when non-existing atoms are found by the tokenizer.
## Macro.to_string/2
The opposite of converting a string to its quoted form is
`Macro.to_string/2`, which converts a quoted form to a string/binary
representation.
"""
def string_to_quoted(string, opts \\ []) when is_list(opts) do
file = Keyword.get opts, :file, "nofile"
line = Keyword.get opts, :line, 1
:elixir.string_to_quoted(to_char_list(string), line, file, opts)
end
@doc """
Convert the given string to its quoted form.
It returns the ast if it succeeds,
raises an exception otherwise. The exception is a `TokenMissingError`
in case a token is missing (usually because the expression is incomplete),
`SyntaxError` otherwise.
Check `string_to_quoted/2` for options information.
"""
def string_to_quoted!(string, opts \\ []) when is_list(opts) do
file = Keyword.get opts, :file, "nofile"
line = Keyword.get opts, :line, 1
:elixir.string_to_quoted!(to_char_list(string), line, file, opts)
end
@doc """
Evals the given file.
Accepts `relative_to` as an argument to tell where the file is located.
While `load_file` loads a file and returns the loaded modules and their
byte code, `eval_file` simply evalutes the file contents and returns the
evaluation result and its bindings.
"""
def eval_file(file, relative_to \\ nil) do
file = find_file(file, relative_to)
eval_string File.read!(file), [], []
end
@doc """
Load the given file.
Accepts `relative_to` as an argument to tell where the file is located.
If the file was already required/loaded, loads it again.
It returns a list of tuples `{ ModuleName, <<byte_code>> }`, one tuple for
each module defined in the file.
Notice that if `load_file` is invoked by different processes concurrently,
the target file will be loaded concurrently many times. Check `require_file/2`
if you don't want a file to be loaded concurrently.
"""
def load_file(file, relative_to \\ nil) when is_binary(file) do
file = find_file(file, relative_to)
:elixir_code_server.call { :acquire, file }
loaded = :elixir_compiler.file file
:elixir_code_server.cast { :loaded, file }
loaded
end
@doc """
Requires the given `file`.
Accepts `relative_to` as an argument to tell where the file is located.
The return value is the same as that of `load_file/2`. If the file was already
required/loaded, doesn't do anything and returns `nil`.
Notice that if `require_file` is invoked by different processes concurrently,
the first process to invoke `require_file` acquires a lock and the remaining
ones will block until the file is available. I.e. if `require_file` is called
N times with a given file, it will be loaded only once. The first process to
call `require_file` will get the list of loaded modules, others will get `nil`.
Check `load_file/2` if you want a file to be loaded multiple times.
"""
def require_file(file, relative_to \\ nil) when is_binary(file) do
file = find_file(file, relative_to)
case :elixir_code_server.call({ :acquire, file }) do
:loaded ->
nil
{ :queued, ref } ->
receive do { :elixir_code_server, ^ref, :loaded } -> nil end
:proceed ->
loaded = :elixir_compiler.file file
:elixir_code_server.cast { :loaded, file }
loaded
end
end
@doc """
Gets the compilation options from the code server.
Check `compiler_options/1` for more information.
"""
def compiler_options do
:elixir_code_server.call :compiler_options
end
@doc """
Sets compilation options.
These options are global since they are stored by Elixir's Code Server.
Available options are:
* `:docs` - when `true`, retain documentation in the compiled module,
`true` by default;
* `:debug_info` - when `true`, retain debug information in the compiled module.
This allows a developer to reconstruct the original source
code, `false` by default;
* `:ignore_module_conflict` - when `true`, override modules that were already defined
without raising errors, `false` by default;
* `:warnings_as_errors` - cause compilation to fail when warnings are generated;
"""
def compiler_options(opts) do
:elixir_code_server.cast { :compiler_options, opts }
end
@doc """
Compiles the given string.
Returns a list of tuples where the first element is the module name
and the second one is its byte code (as a binary).
For compiling many files at once, check `Kernel.ParallelCompiler.files/2`.
"""
def compile_string(string, file \\ "nofile") when is_binary(file) do
:elixir_compiler.string to_char_list(string), file
end
@doc """
Compiles the quoted expression.
Returns a list of tuples where the first element is the module name and
the second one is its byte code (as a binary).
"""
def compile_quoted(quoted, file \\ "nofile") when is_binary(file) do
:elixir_compiler.quoted quoted, file
end
@doc """
Ensures the given module is loaded.
If the module is already loaded, this works as no-op. If the module
was not yet loaded, it tries to load it.
If it succeeds loading the module, it returns `{ :module, module }`.
If not, returns `{ :error, reason }` with the error reason.
## Code loading on the Erlang VM
Erlang has two modes to load code: interactive and embedded.
By default, the Erlang VM runs in interactive mode, where modules
are loaded as needed. In embedded mode the opposite happens, as all
modules need to be loaded upfront or explicitly.
Therefore, this function is used to check if a module is loaded
before using it and allows one to react accordingly. For example, the `URI`
module uses this function to check if a specific parser exists for a given
URI scheme.
## `Code.ensure_compiled/1`
Elixir also contains an `ensure_compiled/1` function that is a
superset of `ensure_loaded/1`.
Since Elixir's compilation happens in parallel, in some situations
you may need to use a module that was not yet compiled, therefore
it can't even be loaded.
`ensure_compiled/1` halts the current process until the
module we are depending on is available.
In most cases, `ensure_loaded/1` is enough. `ensure_compiled/1`
must be used in rare cases, usually involving macros that need to
invoke a module for callback information.
"""
def ensure_loaded(module) when is_atom(module) do
:code.ensure_loaded(module)
end
@doc """
Ensures the given module is loaded.
Similar to `ensure_loaded/1`, but returns `true` if the module
is already loaded or was successfully loaded. Returns `false`
otherwise.
"""
def ensure_loaded?(module) do
match?({ :module, ^module }, ensure_loaded(module))
end
@doc """
Ensures the given module is compiled and loaded.
If the module is already loaded, it works as no-op. If the module was
not loaded yet, it checks if it needs to be compiled first and then
tries to load it.
If it succeeds loading the module, it returns `{ :module, module }`.
If not, returns `{ :error, reason }` with the error reason.
Check `ensure_loaded/1` for more information on module loading
and when to use `ensure_loaded/1` or `ensure_compiled/1`.
"""
def ensure_compiled(module) when is_atom(module) do
case :code.ensure_loaded(module) do
{ :error, :nofile } = error ->
case :erlang.get(:elixir_ensure_compiled) do
:undefined -> error
_ ->
try do
module.__info__(:module)
{ :module, module }
rescue
UndefinedFunctionError -> error
end
end
other -> other
end
end
@doc """
Ensures the given module is compiled and loaded.
Similar to `ensure_compiled/1`, but returns `true` if the module
is already loaded or was successfully loaded and compiled.
Returns `false` otherwise.
"""
def ensure_compiled?(module) do
match?({ :module, ^module }, ensure_compiled(module))
end
## Helpers
# Finds the file given the relative_to path.
#
# If the file is found, returns its path in binary, fails otherwise.
defp find_file(file, relative_to) do
file = if relative_to do
Path.expand(file, relative_to)
else
Path.expand(file)
end
if File.regular?(file) do
file
else
raise LoadError, file: file
end
end
end
| 32.106918
| 123
| 0.685929
|
9e6880b238363492ca359aeaa0728c610bdee7ff
| 2,035
|
exs
|
Elixir
|
test/fighter/accounts/accounts_test.exs
|
gautambaghel/fighter
|
970a098f0d234892af351070b6b2b596b9a2d83c
|
[
"Apache-2.0"
] | null | null | null |
test/fighter/accounts/accounts_test.exs
|
gautambaghel/fighter
|
970a098f0d234892af351070b6b2b596b9a2d83c
|
[
"Apache-2.0"
] | null | null | null |
test/fighter/accounts/accounts_test.exs
|
gautambaghel/fighter
|
970a098f0d234892af351070b6b2b596b9a2d83c
|
[
"Apache-2.0"
] | null | null | null |
defmodule Fighter.AccountsTest do
use Fighter.DataCase
alias Fighter.Accounts
describe "users" do
alias Fighter.Accounts.User
@valid_attrs %{email: "some email", name: "some name"}
@update_attrs %{email: "some updated email", name: "some updated name"}
@invalid_attrs %{email: nil, name: nil}
def user_fixture(attrs \\ %{}) do
{:ok, user} =
attrs
|> Enum.into(@valid_attrs)
|> Accounts.create_user()
user
end
test "list_users/0 returns all users" do
user = user_fixture()
assert Accounts.list_users() == [user]
end
test "get_user!/1 returns the user with given id" do
user = user_fixture()
assert Accounts.get_user!(user.id) == user
end
test "create_user/1 with valid data creates a user" do
assert {:ok, %User{} = user} = Accounts.create_user(@valid_attrs)
assert user.email == "some email"
assert user.name == "some name"
end
test "create_user/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Accounts.create_user(@invalid_attrs)
end
test "update_user/2 with valid data updates the user" do
user = user_fixture()
assert {:ok, user} = Accounts.update_user(user, @update_attrs)
assert %User{} = user
assert user.email == "some updated email"
assert user.name == "some updated name"
end
test "update_user/2 with invalid data returns error changeset" do
user = user_fixture()
assert {:error, %Ecto.Changeset{}} = Accounts.update_user(user, @invalid_attrs)
assert user == Accounts.get_user!(user.id)
end
test "delete_user/1 deletes the user" do
user = user_fixture()
assert {:ok, %User{}} = Accounts.delete_user(user)
assert_raise Ecto.NoResultsError, fn -> Accounts.get_user!(user.id) end
end
test "change_user/1 returns a user changeset" do
user = user_fixture()
assert %Ecto.Changeset{} = Accounts.change_user(user)
end
end
end
| 29.926471
| 85
| 0.649631
|
9e688e3f7424afa50ea51305072d83fc6fce7acf
| 1,022
|
exs
|
Elixir
|
test/support/endpoint.exs
|
devonestes/ex_admin
|
e135ae7c28de78fc87baf519ff8a32da12e8bf66
|
[
"MIT"
] | 1,347
|
2015-10-05T18:23:49.000Z
|
2022-01-09T18:38:36.000Z
|
test/support/endpoint.ex
|
fanduel/ex_admin
|
05806a718859a0e155d3447c3ffde8a536fd676a
|
[
"MIT"
] | 402
|
2015-10-03T13:53:32.000Z
|
2021-07-08T09:52:22.000Z
|
test/support/endpoint.ex
|
fanduel/ex_admin
|
05806a718859a0e155d3447c3ffde8a536fd676a
|
[
"MIT"
] | 333
|
2015-10-12T22:56:57.000Z
|
2021-05-26T18:40:24.000Z
|
defmodule TestExAdmin.Endpoint do
use Phoenix.Endpoint, otp_app: :ex_admin
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug(
Plug.Static,
at: "/",
from: :ex_admin,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket("/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket)
plug(Phoenix.LiveReloader)
plug(Phoenix.CodeReloader)
end
plug(Plug.RequestId)
plug(Plug.Logger)
plug(
Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
)
plug(Plug.MethodOverride)
plug(Plug.Head)
plug(
Plug.Session,
store: :cookie,
key: "_binaryid_key",
signing_salt: "JFbk5iZ6"
)
plug(TestExAdmin.Router)
end
| 22.217391
| 70
| 0.681996
|
9e689fcb88390c6574c4343b144868c161cb0de1
| 6,375
|
ex
|
Elixir
|
clients/iam/lib/google_api/iam/v1/model/policy.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/iam/lib/google_api/iam/v1/model/policy.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/iam/lib/google_api/iam/v1/model/policy.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.IAM.V1.Model.Policy do
@moduledoc """
An Identity and Access Management (IAM) policy, which specifies access controls for Google Cloud resources. A `Policy` is a collection of `bindings`. A `binding` binds one or more `members`, or principals, to a single `role`. Principals can be user accounts, service accounts, Google groups, and domains (such as G Suite). A `role` is a named list of permissions; each `role` can be an IAM predefined role or a user-created custom role. For some types of Google Cloud resources, a `binding` can also specify a `condition`, which is a logical expression that allows access to a resource only if the expression evaluates to `true`. A condition can add constraints based on attributes of the request, the resource, or both. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). **JSON example:** { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 } **YAML example:** bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3 For a description of IAM and its features, see the [IAM documentation](https://cloud.google.com/iam/docs/).
## Attributes
* `auditConfigs` (*type:* `list(GoogleApi.IAM.V1.Model.AuditConfig.t)`, *default:* `nil`) - Specifies cloud audit logging configuration for this policy.
* `bindings` (*type:* `list(GoogleApi.IAM.V1.Model.Binding.t)`, *default:* `nil`) - Associates a list of `members`, or principals, with a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one principal. The `bindings` in a `Policy` can refer to up to 1,500 principals; up to 250 of these principals can be Google groups. Each occurrence of a principal counts towards these limits. For example, if the `bindings` grant 50 different roles to `user:alice@example.com`, and not to any other principal, then you can add another 1,450 principals to the `bindings` in the `Policy`.
* `etag` (*type:* `String.t`, *default:* `nil`) - `etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost.
* `version` (*type:* `integer()`, *default:* `nil`) - Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:auditConfigs => list(GoogleApi.IAM.V1.Model.AuditConfig.t()) | nil,
:bindings => list(GoogleApi.IAM.V1.Model.Binding.t()) | nil,
:etag => String.t() | nil,
:version => integer() | nil
}
field(:auditConfigs, as: GoogleApi.IAM.V1.Model.AuditConfig, type: :list)
field(:bindings, as: GoogleApi.IAM.V1.Model.Binding, type: :list)
field(:etag)
field(:version)
end
defimpl Poison.Decoder, for: GoogleApi.IAM.V1.Model.Policy do
def decode(value, options) do
GoogleApi.IAM.V1.Model.Policy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.IAM.V1.Model.Policy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 113.839286
| 2,012
| 0.748549
|
9e68cfe3f457a6b01d9b8477cc539a4623e86714
| 1,631
|
ex
|
Elixir
|
lib/web/router.ex
|
rozerosie/heycake
|
d080531705c0402fa53696d02307d6c08d25a60a
|
[
"MIT"
] | 1
|
2020-06-03T07:45:17.000Z
|
2020-06-03T07:45:17.000Z
|
lib/web/router.ex
|
rozerosie/heycake
|
d080531705c0402fa53696d02307d6c08d25a60a
|
[
"MIT"
] | 6
|
2021-06-17T23:13:19.000Z
|
2021-08-31T21:15:21.000Z
|
lib/web/router.ex
|
rozerosie/heycake
|
d080531705c0402fa53696d02307d6c08d25a60a
|
[
"MIT"
] | 2
|
2020-06-03T22:08:06.000Z
|
2022-03-11T22:13:36.000Z
|
defmodule Web.Router do
use Web, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
plug Web.Plugs.FetchUser
end
pipeline :logged_in do
plug Web.Plugs.EnsureUser
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", Web do
pipe_through :browser
get "/", PageController, :index
get("/sign-in", SessionController, :new)
post("/sign-in", SessionController, :create)
delete("/sign-out", SessionController, :delete)
get("/register", RegistrationController, :new)
post("/register", RegistrationController, :create)
get("/register/reset", RegistrationResetController, :new)
post("/register/reset", RegistrationResetController, :create)
get("/register/reset/verify", RegistrationResetController, :edit)
post("/register/reset/verify", RegistrationResetController, :update)
get("/users/confirm", ConfirmationController, :confirm)
get("/_health", PageController, :health)
end
scope "/", Web do
pipe_through([:browser, :logged_in])
resources("/profile", ProfileController, singleton: true, only: [:show, :edit, :update])
end
scope "/auth", Web do
pipe_through([:browser, :logged_in])
get("/:provider", AuthController, :request)
get("/:provider/callback", AuthController, :callback)
end
scope "/", Web do
pipe_through([:api])
post("/webhooks/slack", WebhookController, :slack)
end
if Mix.env() == :dev do
forward("/emails/sent", Bamboo.SentEmailViewerPlug)
end
end
| 22.652778
| 92
| 0.678725
|
9e68d97e833f58880a1f2c659137e7d6c1e01cc2
| 12,534
|
ex
|
Elixir
|
lib/engines/pki/pki.ex
|
azohra/ptolemy
|
a1a7007f5408271cc88b30560ed636c6bd26e77c
|
[
"MIT"
] | 15
|
2018-12-20T15:02:21.000Z
|
2020-10-09T00:38:47.000Z
|
lib/engines/pki/pki.ex
|
azohra/ptolemy
|
a1a7007f5408271cc88b30560ed636c6bd26e77c
|
[
"MIT"
] | 7
|
2019-03-01T19:05:09.000Z
|
2019-04-12T19:38:45.000Z
|
lib/engines/pki/pki.ex
|
azohra/ptolemy
|
a1a7007f5408271cc88b30560ed636c6bd26e77c
|
[
"MIT"
] | 2
|
2019-02-26T16:02:12.000Z
|
2020-04-24T15:44:34.000Z
|
defmodule Ptolemy.Engines.PKI do
@moduledoc """
`Ptolemy.Engines.PKI` provides a public facing API for CRUD operations for the Vault PKI engine.
Some function in this modules have additional options that can be provided to vault, you can get the option
values from: https://www.vaultproject.io/api/secret/pki/index.html
"""
alias Ptolemy.Engines.PKI.Engine
alias Ptolemy.Server
@doc """
Create a role with a role from the specification provided.
Optional payload is provided if there is a need to overide other options.
See https://www.vaultproject.io/api/secret/pki/index.html#create-update-role for options.
## Example
```elixir
iex(2)> Ptolemy.Engines.PKI.create(:production, :pki_engine1, :test_role1, %{allow_any_name: true})
{:ok, "PKI role created"}
```
"""
@spec create(atom(), atom(), atom(), map()) :: {:ok, String.t()} | {:error, String.t()}
def create(server_name, engine_name, role, params \\ %{}) do
path = get_pki_path!(server_name, engine_name, role, "roles")
path_create(server_name, path, params)
end
@doc """
Create a role from the specification provided, errors out if an errors occurs.
Optional payload is provided if there is a need to overide other options.
See https://www.vaultproject.io/api/secret/pki/index.html#create-update-role for options.
"""
@spec create!(atom(), atom(), atom(), map()) :: :ok | no_return()
def create!(server_name, engine_name, role, params \\ %{}) do
case create(server_name, engine_name, role, params) do
{:error, msg} -> raise RuntimeError, message: msg
_resp -> :ok
end
end
@doc """
Create a role from the specification provided via a specific path.
## Example
```elixir
iex(2)> Ptolemy.Engines.PKI.path_create(:production, "/pki/data/", %{allow_any_name: true})
{:ok, "PKI role created"}
```
"""
@spec path_create(atom(), String.t(), map()) :: {:ok, String.t()} | {:error, String.t()}
def path_create(server_name, path, params \\ %{}) do
client = create_client(server_name)
Engine.create_role(client, path, params)
end
@doc """
Reads a brand new generated certificate from a role.
Optional payload is provided if there is a need to overide other options.
See https://www.vaultproject.io/api/secret/pki/index.html#generate-certificate for options.
## Example
```elixir
iex(2)> Ptolemy.Engines.PKI.read(:production, :pki_engine1, :test_role1, "www.example.com")
{:ok,
%{
"auth" => nil,
"data" => %{
"certificate" => "-----BEGIN CERTIFICATE-----generated-cert-----END CERTIFICATE-----",
"expiration" => 1555610944,
"issuing_ca" => "-----BEGIN CERTIFICATE-----ca-cert-goes-here-----END CERTIFICATE-----",
"private_key" => "-----BEGIN RSA PRIVATE KEY-----some-rsa-key-here-----END RSA PRIVATE KEY-----",
"private_key_type" => "rsa",
"serial_number" => "1c:42:ac:e6:80:4c:7c:fc:70:af:c9:64:55:11:95:84:44:22:6f:e5"
},
"lease_duration" => 0,
"lease_id" => "",
"renewable" => false,
"request_id" => "f53c85d0-46ef-df35-349f-dfe4e43ac6d8",
"warnings" => nil,
"wrap_info" => nil
}
}
```
"""
@spec read(atom(), atom(), atom(), String.t(), map()) :: {:ok, map()} | {:error, String.t()}
def read(server_name, engine_name, role, common_name, payload \\ %{}) do
path = get_pki_path!(server_name, engine_name, role, "issue")
path_read(server_name, path, common_name, payload)
end
@doc """
Reads a brand new generated certificate from a role, errors out if an error occurs.
"""
@spec read!(atom(), atom(), atom(), String.t(), map()) :: map() | no_return()
def read!(server_name, engine_name, role, common_name, payload \\ %{}) do
case read(server_name, engine_name, role, common_name, payload) do
{:error, msg} -> raise RuntimeError, message: msg
{:ok, resp} -> resp
end
end
@doc """
Reads a brand new generated certificate from a role via given a specific path.
Optional payload is provided if there is a need to overide other options.
See https://www.vaultproject.io/api/secret/pki/index.html#generate-certificate for options.
## Example
```elixir
iex(2)> Ptolemy.Engines.PKI.path_read(:production, "/pki/test", "www.example.com")
{:ok,
%{
"auth" => nil,
"data" => %{
"certificate" => "-----BEGIN CERTIFICATE-----generated-cert-----END CERTIFICATE-----",
"expiration" => 1555610944,
"issuing_ca" => "-----BEGIN CERTIFICATE-----ca-cert-goes-here-----END CERTIFICATE-----",
"private_key" => "-----BEGIN RSA PRIVATE KEY-----some-rsa-key-here-----END RSA PRIVATE KEY-----",
"private_key_type" => "rsa",
"serial_number" => "1c:42:ac:e6:80:4c:7c:fc:70:af:c9:64:55:11:95:84:44:22:6f:e5"
},
"lease_duration" => 0,
"lease_id" => "",
"renewable" => false,
"request_id" => "f53c85d0-46ef-df35-349f-dfe4e43ac6d8",
"warnings" => nil,
"wrap_info" => nil
}
}
"""
@spec path_read(atom(), String.t(), String.t(), map()) :: {:ok, map()} | {:error, String.t()}
def path_read(server_name, path, common_name, payload \\ %{}) do
client = create_client(server_name)
Engine.generate_secret(client, path, common_name, payload)
end
@doc """
Update a pki role in vault.
Optional payload is provided if there is a need to overide other options.
See https://www.vaultproject.io/api/secret/pki/index.html#create-update-role for options.
## Example
```elixir
iex(2)> Ptolemy.Engines.PKI.update(:production, :pki_engine1, :test_role1, %{allow_any_name: false})
{:ok, "PKI role updated"}
```
"""
@spec update(atom(), atom(), atom(), map()) :: {:ok, String.t()} | {:error, String.t()}
def update(server_name, engine_name, role, payload \\ %{}) do
path = get_pki_path!(server_name, engine_name, role, "roles")
path_update(server_name, path, payload)
end
@doc """
Update a pki role in vault, errors out if an errors occurs.
Optional payload is provided if there is a need to overide other options.
See https://www.vaultproject.io/api/secret/pki/index.html#create-update-role for options.
"""
@spec update!(atom(), atom(), atom(), map()) :: :ok | no_return()
def update!(server_name, engine_name, secret, payload \\ %{}) do
case update(server_name, engine_name, secret, payload) do
{:error, msg} -> raise RuntimeError, message: msg
_resp -> :ok
end
end
@doc """
Update a pki role in vault via a specified path.
Optional payload is provided if there is a need to overide other options.
See https://www.vaultproject.io/api/secret/pki/index.html#create-update-role for options.
## Example
```elixir
iex(2)> Ptolemy.Engines.PKI.path_update(:production, "pki/test", %{allow_any_name: false})
{:ok, "PKI role updated"}
```
"""
@spec path_update(atom(), String.t(), map()) :: {:ok, String.t()} | {:error, String.t()}
def path_update(server_name, path, payload \\ %{}) do
client = create_client(server_name)
case Engine.create_role(client, path, payload) do
{:ok, _} -> {:ok, "PKI role updated"}
err -> err
end
end
@doc """
Revoke either a certificate or a role from the pki engine in vault.
Optional payload is provided if there is a need to overide other options.
See:
- For role deletion options: https://www.vaultproject.io/api/secret/pki/index.html#delete-role
- For cert deletion options:
## Example
```elixir
iex(2)> Ptolemy.Engines.PKI.delete(:production, :pki_engine1, :certificate, "17:84:7f:5b:bd:90:da:21:16")
{:ok, "PKI certificate revoked"}
iex(3)> Ptolemy.Engines.PKI.delete(:production, :pki_engine1, :role, :test_role1)
{:ok, "PKI role revoked"}
```
"""
@spec delete(atom(), atom(), atom(), any()) :: {:ok, String.t()} | {:error, String.t()}
def delete(server_name, engine_name, deleteType, arg1) do
case deleteType do
:certificate -> delete_cert(server_name, engine_name, arg1)
:role -> delete_role(server_name, engine_name, arg1)
end
end
@doc """
Revoke either a certificate or a role from the pki engine in vault, errors out if an errors occurs.
Optional payload is provided if there is a need to overide other options.
See https://www.vaultproject.io/api/secret/pki/index.html#delete-role for options.
"""
@spec delete!(atom(), atom(), atom(), any()) :: :ok | no_return()
def delete!(server_name, engine_name, deleteType, arg1) do
case delete(server_name, engine_name, deleteType, arg1) do
{:ok, _} -> :ok
_ -> raise "Failed to delete from PKI engine"
end
end
@doc """
Revoke a certificate in vault.
Optional payload is provided if there is a need to overide other options.
See https://www.vaultproject.io/api/secret/pki/index.html#delete-role for options.
## Example
```elixir
iex(2)> Ptolemy.Engines.PKI.delete_cert(:production, :pki_engine1, serial_number)
{:ok, "PKI certificate revoked"}
```
"""
@spec delete_cert(atom(), atom(), String.t()) :: {:ok, String.t()} | {:error, String.t()}
def delete_cert(server_name, engine_name, serial_number) do
path = get_pki_path!(server_name, engine_name, "revoke")
path_delete_cert(server_name, path, serial_number)
end
@doc """
Revoke a certificate in vault.
"""
@spec path_delete_cert(atom(), String.t(), String.t()) ::
{:ok, String.t()} | {:error, String.t()}
def path_delete_cert(server_name, path, serial_number) do
client = create_client(server_name)
Engine.revoke_cert(client, path, serial_number)
end
@doc """
Revoke a role in vault.
## Example
```elixir
iex(2)> Ptolemy.Engines.PKI.delete_role(:production, :pki_engine1, :test_role1)
{:ok, "PKI role revoked"}
```
"""
@spec delete_role(atom(), atom(), atom()) :: {:ok, String.t()} | {:error, String.t()}
def delete_role(server_name, engine_name, role) do
path = get_pki_path!(server_name, engine_name, role, "roles")
path_delete_role(server_name, path)
end
@doc """
Revoke a role in vault.
"""
@spec path_delete_role(atom(), String.t()) :: {:ok, String.t()} | {:error, String.t()}
def path_delete_role(server_name, path) do
client = create_client(server_name)
Engine.revoke_role(client, path)
end
# Tesla client function
defp create_client(server_name) do
creds = Server.fetch_credentials(server_name)
{:ok, http_opts} = Server.get_data(server_name, :http_opts)
{:ok, url} = Server.get_data(server_name, :vault_url)
Tesla.client(
[
{Tesla.Middleware.BaseUrl, "#{url}/v1"},
{Tesla.Middleware.Headers, creds},
{Tesla.Middleware.Opts, http_opts},
{Tesla.Middleware.JSON, []}
],
{Tesla.Adapter.Hackney, [ssl_options: [{:versions, [:"tlsv1.2"]}], recv_timeout: 10_000]}
)
end
# Helper functions to make paths
defp get_pki_path!(server_name, engine_name, role, operation) when is_atom(role) do
with {:ok, conf} <- Server.get_data(server_name, :engines),
{:ok, pki_conf} <- Keyword.fetch(conf, engine_name),
%{engine_path: path, roles: roles} <- pki_conf do
{:ok, role} = Map.fetch(roles, role)
make_pki_path!(path, role, operation)
else
{:error, _msg} -> throw("#{server_name} does not have a pki_engine config")
:error -> throw("Could not find engine_name in specified config")
end
end
defp get_pki_path!(server_name, engine_name, role, operation) when is_bitstring(role) do
with {:ok, conf} <- Server.get_data(server_name, :engines),
{:ok, pki_conf} <- Keyword.fetch(conf, engine_name),
%{engine_path: path, roles: roles} <- pki_conf do
{:ok, role} = Map.fetch(roles, role)
make_pki_path!(path, role, operation)
else
{:error, _msg} -> raise "#{server_name} does not have a pki_engine config"
:error -> raise "Could not find engine_name in specified config"
end
end
defp get_pki_path!(server_name, engine_name, operation) do
with {:ok, conf} <- Server.get_data(server_name, :engines),
{:ok, pki_conf} <- Keyword.fetch(conf, engine_name),
%{engine_path: path} <- pki_conf do
"/#{path}#{operation}"
else
{:error, _msg} -> raise "#{server_name} does not have a pki_engine config"
:error -> raise "Could not find engine_name in specified config"
end
end
defp make_pki_path!(engine_path, role_path, operation) do
"/#{engine_path}#{operation}#{role_path}"
end
end
| 36.649123
| 109
| 0.651189
|
9e68e6e446b84820cd11d880b206cd5f2beaeb6c
| 787
|
ex
|
Elixir
|
lib/mix/tasks/google_apis.discover.ex
|
hauptbenutzer/elixir-google-api
|
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
|
[
"Apache-2.0"
] | null | null | null |
lib/mix/tasks/google_apis.discover.ex
|
hauptbenutzer/elixir-google-api
|
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
|
[
"Apache-2.0"
] | null | null | null |
lib/mix/tasks/google_apis.discover.ex
|
hauptbenutzer/elixir-google-api
|
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
|
[
"Apache-2.0"
] | 1
|
2020-11-10T16:58:27.000Z
|
2020-11-10T16:58:27.000Z
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule Mix.Tasks.GoogleApis.Discover do
use Mix.Task
@shortdoc "Download GoogleApi list"
def run([output]) do
GoogleApis.discover(output)
end
def run(_) do
GoogleApis.discover()
end
end
| 29.148148
| 74
| 0.747141
|
9e68f1430a3d471a7c72051cfe1bf1faf384628b
| 962
|
ex
|
Elixir
|
api/lib/remote_day/emails/late_remote_day.ex
|
arkanoryn/remote_day
|
10e0a4b8995c44fae774c21189725b54f69186b4
|
[
"Apache-2.0"
] | null | null | null |
api/lib/remote_day/emails/late_remote_day.ex
|
arkanoryn/remote_day
|
10e0a4b8995c44fae774c21189725b54f69186b4
|
[
"Apache-2.0"
] | 24
|
2018-10-19T07:54:11.000Z
|
2022-02-26T13:28:55.000Z
|
api/lib/remote_day/emails/late_remote_day.ex
|
arkanoryn/remote_day
|
10e0a4b8995c44fae774c21189725b54f69186b4
|
[
"Apache-2.0"
] | null | null | null |
defmodule RemoteDay.Emails.LateRemoteWorkers do
@moduledoc """
email to late announcement
"""
import Bamboo.Email
alias RemoteDay.{
Account.User,
Emails.Receivers
}
require Logger
@from "remote@your_office.com"
def announcement(%User{} = user) do
date = Timex.format!(Timex.today(), "{0D}-{0M}-{YYYY}")
new_email()
|> to(Receivers.all())
|> from(@from)
|> subject("[#{date}] Remote workers - late announcement")
|> text_body(generate_text_body(user))
|> html_body(generate_html_body(user))
end
defp generate_text_body(user) do
"""
Sorry, I am late :(
<p>
I will be working remotely today.
<p>
<br />
--
#{user.username || user.email}
"""
end
defp generate_html_body(user) do
"""
<h1>Sorry, I am late :(</h1>
I will be working remotely today.
<br /><br />--<br />
<i>
#{user.username || user.email}
</i>
"""
end
end
| 17.814815
| 62
| 0.586279
|
9e6954a0fe41f98d1492479e36316e9fafee22da
| 3,782
|
ex
|
Elixir
|
clients/big_query/lib/google_api/big_query/v2/model/dataset_access.ex
|
linjunpop/elixir-google-api
|
444cb2b2fb02726894535461a474beddd8b86db4
|
[
"Apache-2.0"
] | null | null | null |
clients/big_query/lib/google_api/big_query/v2/model/dataset_access.ex
|
linjunpop/elixir-google-api
|
444cb2b2fb02726894535461a474beddd8b86db4
|
[
"Apache-2.0"
] | null | null | null |
clients/big_query/lib/google_api/big_query/v2/model/dataset_access.ex
|
linjunpop/elixir-google-api
|
444cb2b2fb02726894535461a474beddd8b86db4
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.BigQuery.V2.Model.DatasetAccess do
@moduledoc """
## Attributes
- domain (String.t): [Pick one] A domain to grant access to. Any users signed in with the domain specified will be granted the specified access. Example: \"example.com\". Maps to IAM policy member \"domain:DOMAIN\". Defaults to: `null`.
- groupByEmail (String.t): [Pick one] An email address of a Google Group to grant access to. Maps to IAM policy member \"group:GROUP\". Defaults to: `null`.
- iamMember (String.t): [Pick one] Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. Defaults to: `null`.
- role (String.t): [Required] An IAM role ID that should be granted to the user, group, or domain specified in this access entry. The following legacy mappings will be applied: OWNER roles/bigquery.dataOwner WRITER roles/bigquery.dataEditor READER roles/bigquery.dataViewer This field will accept any of the above formats, but will return only the legacy format. For example, if you set this field to \"roles/bigquery.dataOwner\", it will be returned back as \"OWNER\". Defaults to: `null`.
- specialGroup (String.t): [Pick one] A special group to grant access to. Possible values include: projectOwners: Owners of the enclosing project. projectReaders: Readers of the enclosing project. projectWriters: Writers of the enclosing project. allAuthenticatedUsers: All authenticated BigQuery users. Maps to similarly-named IAM members. Defaults to: `null`.
- userByEmail (String.t): [Pick one] An email address of a user to grant access to. For example: fred@example.com. Maps to IAM policy member \"user:EMAIL\" or \"serviceAccount:EMAIL\". Defaults to: `null`.
- view (TableReference): [Pick one] A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:domain => any(),
:groupByEmail => any(),
:iamMember => any(),
:role => any(),
:specialGroup => any(),
:userByEmail => any(),
:view => GoogleApi.BigQuery.V2.Model.TableReference.t()
}
field(:domain)
field(:groupByEmail)
field(:iamMember)
field(:role)
field(:specialGroup)
field(:userByEmail)
field(:view, as: GoogleApi.BigQuery.V2.Model.TableReference)
end
defimpl Poison.Decoder, for: GoogleApi.BigQuery.V2.Model.DatasetAccess do
def decode(value, options) do
GoogleApi.BigQuery.V2.Model.DatasetAccess.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigQuery.V2.Model.DatasetAccess do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 57.30303
| 515
| 0.735854
|
9e699edda0c977ae77c74479defcf40be97df9d4
| 190
|
ex
|
Elixir
|
testData/org/elixir_lang/parser_definition/bracket_operation_parsing_test_case/DualOperator.ex
|
keyno63/intellij-elixir
|
4033e319992c53ddd42a683ee7123a97b5e34f02
|
[
"Apache-2.0"
] | 1,668
|
2015-01-03T05:54:27.000Z
|
2022-03-25T08:01:20.000Z
|
testData/org/elixir_lang/parser_definition/bracket_operation_parsing_test_case/DualOperator.ex
|
keyno63/intellij-elixir
|
4033e319992c53ddd42a683ee7123a97b5e34f02
|
[
"Apache-2.0"
] | 2,018
|
2015-01-01T22:43:39.000Z
|
2022-03-31T20:13:08.000Z
|
testData/org/elixir_lang/parser_definition/bracket_operation_parsing_test_case/DualOperator.ex
|
keyno63/intellij-elixir
|
4033e319992c53ddd42a683ee7123a97b5e34f02
|
[
"Apache-2.0"
] | 145
|
2015-01-15T11:37:16.000Z
|
2021-12-22T05:51:02.000Z
|
Module.+[key: value]
Module.+ [key: value]
Module.+[()]
Module.+ [()]
Module.+[matched_expression]
Module.+ [matched_expression]
Module.+[matched_expression,]
Module.+ [matched_expression,]
| 21.111111
| 30
| 0.715789
|
9e69d6cb50a8c3021ce6daee1dbc9aa0c799b309
| 498
|
exs
|
Elixir
|
test/test_helper.exs
|
xavier/ex_png
|
58a23a705ace85e2351cbf0787e88df79b8f6494
|
[
"Apache-2.0"
] | 10
|
2015-03-15T16:12:00.000Z
|
2021-02-09T09:30:17.000Z
|
test/test_helper.exs
|
xavier/ex_png
|
58a23a705ace85e2351cbf0787e88df79b8f6494
|
[
"Apache-2.0"
] | 1
|
2019-04-20T12:09:24.000Z
|
2019-04-20T12:09:24.000Z
|
test/test_helper.exs
|
xavier/ex_png
|
58a23a705ace85e2351cbf0787e88df79b8f6494
|
[
"Apache-2.0"
] | 5
|
2019-01-19T19:47:09.000Z
|
2021-11-11T23:54:25.000Z
|
ExUnit.start()
defmodule Fixtures do
def path(filename) do
Path.join [File.cwd!, "test", "fixtures", filename]
end
def read(filename) do
filename |> path |> File.read!
end
end
defmodule StagingArea do
@keep_files false
def path(filename) do
Path.join [File.cwd!, "test", "tmp", filename]
end
def list_files do
path("*.png") |> Path.wildcard
end
def delete_files do
unless @keep_files do
list_files() |> Enum.each(&File.rm/1)
end
end
end
| 15.090909
| 55
| 0.644578
|
9e69ee57ee0727ef8ecd351d5023838767713fee
| 24,282
|
ex
|
Elixir
|
data/auto_generated/video/85ccd74b8a2167063c1f4cf12e45b62d.ex
|
breunigs/veloroute
|
ac3b1eeb2ef2369c27186a138f6ffd8284652dab
|
[
"0BSD"
] | 12
|
2018-06-15T10:18:43.000Z
|
2022-01-24T12:50:54.000Z
|
data/auto_generated/video/85ccd74b8a2167063c1f4cf12e45b62d.ex
|
breunigs/veloroute
|
ac3b1eeb2ef2369c27186a138f6ffd8284652dab
|
[
"0BSD"
] | 15
|
2018-06-21T18:04:12.000Z
|
2021-10-16T12:54:39.000Z
|
data/auto_generated/video/85ccd74b8a2167063c1f4cf12e45b62d.ex
|
breunigs/veloroute
|
ac3b1eeb2ef2369c27186a138f6ffd8284652dab
|
[
"0BSD"
] | 2
|
2020-03-09T19:21:36.000Z
|
2022-01-16T03:29:51.000Z
|
defmodule(Data.AutoGenerated.Video.Rendered_85ccd74b8a2167063c1f4cf12e45b62d) do
@moduledoc "#{"Billeradweg (FR4): Walter-Hammer-Weg Richtung Westen (Nebenstrecke FR4)"}
AUTOGENERATED. To update this file, run mix velo.videos.generate.
See Video.Rendered for functionality.
"
@behaviour Video.Rendered
@impl Video.Rendered
def(name()) do
"Billeradweg (FR4): Walter-Hammer-Weg Richtung Westen (Nebenstrecke FR4)"
end
@impl Video.Rendered
def(hash()) do
"85ccd74b8a2167063c1f4cf12e45b62d"
end
@impl Video.Rendered
def(length_ms()) do
97564
end
@impl Video.Rendered
def(sources()) do
[{"2021-04-25-freizeitroute4/GX021401", "00:01:18.111", "00:02:55.675"}]
end
@impl Video.Rendered
def(coords()) do
[
%Video.TimedPoint{lat: 53.50875938323353, lon: 10.163970751497006, time_offset_ms: 0},
%Video.TimedPoint{lat: 53.508772, lon: 10.163932, time_offset_ms: 301},
%Video.TimedPoint{lat: 53.50879, lon: 10.163854, time_offset_ms: 634},
%Video.TimedPoint{lat: 53.508812, lon: 10.163764, time_offset_ms: 968},
%Video.TimedPoint{lat: 53.508839, lon: 10.16367, time_offset_ms: 1302},
%Video.TimedPoint{lat: 53.508874, lon: 10.163574, time_offset_ms: 1635},
%Video.TimedPoint{lat: 53.508912, lon: 10.163478, time_offset_ms: 1969},
%Video.TimedPoint{lat: 53.508951, lon: 10.16338, time_offset_ms: 2303},
%Video.TimedPoint{lat: 53.508982, lon: 10.163275, time_offset_ms: 2636},
%Video.TimedPoint{lat: 53.509017, lon: 10.163172, time_offset_ms: 2970},
%Video.TimedPoint{lat: 53.509053, lon: 10.163073, time_offset_ms: 3304},
%Video.TimedPoint{lat: 53.509085, lon: 10.162969, time_offset_ms: 3637},
%Video.TimedPoint{lat: 53.509115, lon: 10.162864, time_offset_ms: 3971},
%Video.TimedPoint{lat: 53.509144, lon: 10.162756, time_offset_ms: 4305},
%Video.TimedPoint{lat: 53.509174, lon: 10.162646, time_offset_ms: 4638},
%Video.TimedPoint{lat: 53.509199, lon: 10.162538, time_offset_ms: 4972},
%Video.TimedPoint{lat: 53.509229, lon: 10.162438, time_offset_ms: 5306},
%Video.TimedPoint{lat: 53.509261, lon: 10.162336, time_offset_ms: 5639},
%Video.TimedPoint{lat: 53.509289, lon: 10.162228, time_offset_ms: 5973},
%Video.TimedPoint{lat: 53.509315, lon: 10.162141, time_offset_ms: 6307},
%Video.TimedPoint{lat: 53.509348, lon: 10.162069, time_offset_ms: 6640},
%Video.TimedPoint{lat: 53.509378, lon: 10.161985, time_offset_ms: 6974},
%Video.TimedPoint{lat: 53.509404, lon: 10.161892, time_offset_ms: 7308},
%Video.TimedPoint{lat: 53.509432, lon: 10.161794, time_offset_ms: 7641},
%Video.TimedPoint{lat: 53.509454, lon: 10.161694, time_offset_ms: 7975},
%Video.TimedPoint{lat: 53.509471, lon: 10.161596, time_offset_ms: 8309},
%Video.TimedPoint{lat: 53.509488, lon: 10.161504, time_offset_ms: 8642},
%Video.TimedPoint{lat: 53.509504, lon: 10.161438, time_offset_ms: 8976},
%Video.TimedPoint{lat: 53.50952, lon: 10.161391, time_offset_ms: 9310},
%Video.TimedPoint{lat: 53.509541, lon: 10.161358, time_offset_ms: 9643},
%Video.TimedPoint{lat: 53.509574, lon: 10.1613, time_offset_ms: 9977},
%Video.TimedPoint{lat: 53.509609, lon: 10.161218, time_offset_ms: 10311},
%Video.TimedPoint{lat: 53.509637, lon: 10.161113, time_offset_ms: 10644},
%Video.TimedPoint{lat: 53.509668, lon: 10.161001, time_offset_ms: 10978},
%Video.TimedPoint{lat: 53.509701, lon: 10.160885, time_offset_ms: 11312},
%Video.TimedPoint{lat: 53.509739, lon: 10.160767, time_offset_ms: 11645},
%Video.TimedPoint{lat: 53.509779, lon: 10.160653, time_offset_ms: 11979},
%Video.TimedPoint{lat: 53.509818, lon: 10.16054, time_offset_ms: 12313},
%Video.TimedPoint{lat: 53.509857, lon: 10.160416, time_offset_ms: 12646},
%Video.TimedPoint{lat: 53.509895, lon: 10.160295, time_offset_ms: 12980},
%Video.TimedPoint{lat: 53.50993, lon: 10.160176, time_offset_ms: 13314},
%Video.TimedPoint{lat: 53.509966, lon: 10.160051, time_offset_ms: 13647},
%Video.TimedPoint{lat: 53.510004, lon: 10.159926, time_offset_ms: 13981},
%Video.TimedPoint{lat: 53.510044, lon: 10.159803, time_offset_ms: 14315},
%Video.TimedPoint{lat: 53.510081, lon: 10.159683, time_offset_ms: 14648},
%Video.TimedPoint{lat: 53.510119, lon: 10.159568, time_offset_ms: 14982},
%Video.TimedPoint{lat: 53.51016, lon: 10.159452, time_offset_ms: 15316},
%Video.TimedPoint{lat: 53.5102, lon: 10.159335, time_offset_ms: 15649},
%Video.TimedPoint{lat: 53.510236, lon: 10.159221, time_offset_ms: 15983},
%Video.TimedPoint{lat: 53.510266, lon: 10.15911, time_offset_ms: 16317},
%Video.TimedPoint{lat: 53.510296, lon: 10.159005, time_offset_ms: 16650},
%Video.TimedPoint{lat: 53.51032, lon: 10.158911, time_offset_ms: 16984},
%Video.TimedPoint{lat: 53.510338, lon: 10.158814, time_offset_ms: 17318},
%Video.TimedPoint{lat: 53.510369, lon: 10.158722, time_offset_ms: 17651},
%Video.TimedPoint{lat: 53.510404, lon: 10.158643, time_offset_ms: 17985},
%Video.TimedPoint{lat: 53.510425, lon: 10.158568, time_offset_ms: 18319},
%Video.TimedPoint{lat: 53.510446, lon: 10.1585, time_offset_ms: 18652},
%Video.TimedPoint{lat: 53.510463, lon: 10.158442, time_offset_ms: 18986},
%Video.TimedPoint{lat: 53.510482, lon: 10.158385, time_offset_ms: 19320},
%Video.TimedPoint{lat: 53.510511, lon: 10.158305, time_offset_ms: 19653},
%Video.TimedPoint{lat: 53.510534, lon: 10.158197, time_offset_ms: 19987},
%Video.TimedPoint{lat: 53.51056, lon: 10.15808, time_offset_ms: 20321},
%Video.TimedPoint{lat: 53.510591, lon: 10.157958, time_offset_ms: 20654},
%Video.TimedPoint{lat: 53.510628, lon: 10.157835, time_offset_ms: 20988},
%Video.TimedPoint{lat: 53.510668, lon: 10.157715, time_offset_ms: 21322},
%Video.TimedPoint{lat: 53.510702, lon: 10.157585, time_offset_ms: 21655},
%Video.TimedPoint{lat: 53.510732, lon: 10.157451, time_offset_ms: 21989},
%Video.TimedPoint{lat: 53.510759, lon: 10.157316, time_offset_ms: 22323},
%Video.TimedPoint{lat: 53.510781, lon: 10.157182, time_offset_ms: 22656},
%Video.TimedPoint{lat: 53.510802, lon: 10.157042, time_offset_ms: 22990},
%Video.TimedPoint{lat: 53.510816, lon: 10.156909, time_offset_ms: 23324},
%Video.TimedPoint{lat: 53.510828, lon: 10.156794, time_offset_ms: 23657},
%Video.TimedPoint{lat: 53.51084, lon: 10.156697, time_offset_ms: 23991},
%Video.TimedPoint{lat: 53.51085, lon: 10.15664, time_offset_ms: 24325},
%Video.TimedPoint{lat: 53.510862, lon: 10.156607, time_offset_ms: 24658},
%Video.TimedPoint{lat: 53.510867, lon: 10.156569, time_offset_ms: 24992},
%Video.TimedPoint{lat: 53.510865, lon: 10.15651, time_offset_ms: 25326},
%Video.TimedPoint{lat: 53.510868, lon: 10.15643, time_offset_ms: 25659},
%Video.TimedPoint{lat: 53.510873, lon: 10.156339, time_offset_ms: 25993},
%Video.TimedPoint{lat: 53.510885, lon: 10.156237, time_offset_ms: 26327},
%Video.TimedPoint{lat: 53.510901, lon: 10.156132, time_offset_ms: 26660},
%Video.TimedPoint{lat: 53.510911, lon: 10.156014, time_offset_ms: 26994},
%Video.TimedPoint{lat: 53.510916, lon: 10.155899, time_offset_ms: 27328},
%Video.TimedPoint{lat: 53.510922, lon: 10.155788, time_offset_ms: 27661},
%Video.TimedPoint{lat: 53.510927, lon: 10.155678, time_offset_ms: 27995},
%Video.TimedPoint{lat: 53.51093, lon: 10.15557, time_offset_ms: 28329},
%Video.TimedPoint{lat: 53.510931, lon: 10.155461, time_offset_ms: 28662},
%Video.TimedPoint{lat: 53.510929, lon: 10.155355, time_offset_ms: 28996},
%Video.TimedPoint{lat: 53.510928, lon: 10.155257, time_offset_ms: 29330},
%Video.TimedPoint{lat: 53.510925, lon: 10.155156, time_offset_ms: 29663},
%Video.TimedPoint{lat: 53.510926, lon: 10.155051, time_offset_ms: 29997},
%Video.TimedPoint{lat: 53.510937, lon: 10.15495, time_offset_ms: 30331},
%Video.TimedPoint{lat: 53.510945, lon: 10.154842, time_offset_ms: 30664},
%Video.TimedPoint{lat: 53.51094, lon: 10.15473, time_offset_ms: 30998},
%Video.TimedPoint{lat: 53.510942, lon: 10.154619, time_offset_ms: 31332},
%Video.TimedPoint{lat: 53.510962, lon: 10.15452, time_offset_ms: 31665},
%Video.TimedPoint{lat: 53.510972, lon: 10.154412, time_offset_ms: 31999},
%Video.TimedPoint{lat: 53.510968, lon: 10.154303, time_offset_ms: 32333},
%Video.TimedPoint{lat: 53.510967, lon: 10.15419, time_offset_ms: 32666},
%Video.TimedPoint{lat: 53.510967, lon: 10.154069, time_offset_ms: 33000},
%Video.TimedPoint{lat: 53.510962, lon: 10.153941, time_offset_ms: 33334},
%Video.TimedPoint{lat: 53.510966, lon: 10.153812, time_offset_ms: 33667},
%Video.TimedPoint{lat: 53.510979, lon: 10.153691, time_offset_ms: 34001},
%Video.TimedPoint{lat: 53.510987, lon: 10.153571, time_offset_ms: 34335},
%Video.TimedPoint{lat: 53.510989, lon: 10.153447, time_offset_ms: 34668},
%Video.TimedPoint{lat: 53.51099, lon: 10.153323, time_offset_ms: 35002},
%Video.TimedPoint{lat: 53.510994, lon: 10.153196, time_offset_ms: 35336},
%Video.TimedPoint{lat: 53.511, lon: 10.153067, time_offset_ms: 35669},
%Video.TimedPoint{lat: 53.511007, lon: 10.152955, time_offset_ms: 36003},
%Video.TimedPoint{lat: 53.511018, lon: 10.152832, time_offset_ms: 36337},
%Video.TimedPoint{lat: 53.511024, lon: 10.152699, time_offset_ms: 36670},
%Video.TimedPoint{lat: 53.511034, lon: 10.152567, time_offset_ms: 37004},
%Video.TimedPoint{lat: 53.511039, lon: 10.152433, time_offset_ms: 37338},
%Video.TimedPoint{lat: 53.511043, lon: 10.1523, time_offset_ms: 37671},
%Video.TimedPoint{lat: 53.511048, lon: 10.152171, time_offset_ms: 38005},
%Video.TimedPoint{lat: 53.511048, lon: 10.152048, time_offset_ms: 38339},
%Video.TimedPoint{lat: 53.511044, lon: 10.151924, time_offset_ms: 38672},
%Video.TimedPoint{lat: 53.511043, lon: 10.151802, time_offset_ms: 39006},
%Video.TimedPoint{lat: 53.511045, lon: 10.151695, time_offset_ms: 39340},
%Video.TimedPoint{lat: 53.511055, lon: 10.151617, time_offset_ms: 39673},
%Video.TimedPoint{lat: 53.511062, lon: 10.151538, time_offset_ms: 40007},
%Video.TimedPoint{lat: 53.511056, lon: 10.15145, time_offset_ms: 40341},
%Video.TimedPoint{lat: 53.511058, lon: 10.151348, time_offset_ms: 40674},
%Video.TimedPoint{lat: 53.511065, lon: 10.151236, time_offset_ms: 41008},
%Video.TimedPoint{lat: 53.511063, lon: 10.151125, time_offset_ms: 41342},
%Video.TimedPoint{lat: 53.511062, lon: 10.151018, time_offset_ms: 41675},
%Video.TimedPoint{lat: 53.511068, lon: 10.150899, time_offset_ms: 42009},
%Video.TimedPoint{lat: 53.511077, lon: 10.150776, time_offset_ms: 42343},
%Video.TimedPoint{lat: 53.511079, lon: 10.150654, time_offset_ms: 42676},
%Video.TimedPoint{lat: 53.511081, lon: 10.150528, time_offset_ms: 43010},
%Video.TimedPoint{lat: 53.511085, lon: 10.1504, time_offset_ms: 43344},
%Video.TimedPoint{lat: 53.511092, lon: 10.150269, time_offset_ms: 43677},
%Video.TimedPoint{lat: 53.511099, lon: 10.150132, time_offset_ms: 44011},
%Video.TimedPoint{lat: 53.511106, lon: 10.149985, time_offset_ms: 44345},
%Video.TimedPoint{lat: 53.51111, lon: 10.149843, time_offset_ms: 44678},
%Video.TimedPoint{lat: 53.511113, lon: 10.149694, time_offset_ms: 45012},
%Video.TimedPoint{lat: 53.511118, lon: 10.149544, time_offset_ms: 45346},
%Video.TimedPoint{lat: 53.511123, lon: 10.149394, time_offset_ms: 45679},
%Video.TimedPoint{lat: 53.511128, lon: 10.149242, time_offset_ms: 46013},
%Video.TimedPoint{lat: 53.511133, lon: 10.149092, time_offset_ms: 46347},
%Video.TimedPoint{lat: 53.511137, lon: 10.148943, time_offset_ms: 46680},
%Video.TimedPoint{lat: 53.511141, lon: 10.148793, time_offset_ms: 47014},
%Video.TimedPoint{lat: 53.511143, lon: 10.148642, time_offset_ms: 47348},
%Video.TimedPoint{lat: 53.511145, lon: 10.148493, time_offset_ms: 47681},
%Video.TimedPoint{lat: 53.511147, lon: 10.148343, time_offset_ms: 48015},
%Video.TimedPoint{lat: 53.511149, lon: 10.14819, time_offset_ms: 48349},
%Video.TimedPoint{lat: 53.511153, lon: 10.148038, time_offset_ms: 48682},
%Video.TimedPoint{lat: 53.511157, lon: 10.147884, time_offset_ms: 49016},
%Video.TimedPoint{lat: 53.51116, lon: 10.147743, time_offset_ms: 49350},
%Video.TimedPoint{lat: 53.511163, lon: 10.147672, time_offset_ms: 49683},
%Video.TimedPoint{lat: 53.511164, lon: 10.147606, time_offset_ms: 50017},
%Video.TimedPoint{lat: 53.511164, lon: 10.147512, time_offset_ms: 50351},
%Video.TimedPoint{lat: 53.511164, lon: 10.147401, time_offset_ms: 50684},
%Video.TimedPoint{lat: 53.51116, lon: 10.14728, time_offset_ms: 51018},
%Video.TimedPoint{lat: 53.511161, lon: 10.147152, time_offset_ms: 51352},
%Video.TimedPoint{lat: 53.511172, lon: 10.147024, time_offset_ms: 51685},
%Video.TimedPoint{lat: 53.511175, lon: 10.146898, time_offset_ms: 52019},
%Video.TimedPoint{lat: 53.511176, lon: 10.14677, time_offset_ms: 52353},
%Video.TimedPoint{lat: 53.51119, lon: 10.146636, time_offset_ms: 52686},
%Video.TimedPoint{lat: 53.511199, lon: 10.146493, time_offset_ms: 53020},
%Video.TimedPoint{lat: 53.511205, lon: 10.146349, time_offset_ms: 53354},
%Video.TimedPoint{lat: 53.511212, lon: 10.146202, time_offset_ms: 53687},
%Video.TimedPoint{lat: 53.511217, lon: 10.146054, time_offset_ms: 54021},
%Video.TimedPoint{lat: 53.51122, lon: 10.145902, time_offset_ms: 54355},
%Video.TimedPoint{lat: 53.511222, lon: 10.145752, time_offset_ms: 54688},
%Video.TimedPoint{lat: 53.511221, lon: 10.145598, time_offset_ms: 55022},
%Video.TimedPoint{lat: 53.511216, lon: 10.145439, time_offset_ms: 55356},
%Video.TimedPoint{lat: 53.511209, lon: 10.145287, time_offset_ms: 55689},
%Video.TimedPoint{lat: 53.511203, lon: 10.145129, time_offset_ms: 56023},
%Video.TimedPoint{lat: 53.511204, lon: 10.144977, time_offset_ms: 56357},
%Video.TimedPoint{lat: 53.511219, lon: 10.144824, time_offset_ms: 56690},
%Video.TimedPoint{lat: 53.511239, lon: 10.144672, time_offset_ms: 57024},
%Video.TimedPoint{lat: 53.511267, lon: 10.144523, time_offset_ms: 57358},
%Video.TimedPoint{lat: 53.511297, lon: 10.144379, time_offset_ms: 57691},
%Video.TimedPoint{lat: 53.511326, lon: 10.144233, time_offset_ms: 58025},
%Video.TimedPoint{lat: 53.511355, lon: 10.144087, time_offset_ms: 58359},
%Video.TimedPoint{lat: 53.511383, lon: 10.143942, time_offset_ms: 58692},
%Video.TimedPoint{lat: 53.51141, lon: 10.143796, time_offset_ms: 59026},
%Video.TimedPoint{lat: 53.511436, lon: 10.143648, time_offset_ms: 59360},
%Video.TimedPoint{lat: 53.511461, lon: 10.143501, time_offset_ms: 59693},
%Video.TimedPoint{lat: 53.511483, lon: 10.143355, time_offset_ms: 60027},
%Video.TimedPoint{lat: 53.511505, lon: 10.143215, time_offset_ms: 60361},
%Video.TimedPoint{lat: 53.511528, lon: 10.143077, time_offset_ms: 60694},
%Video.TimedPoint{lat: 53.51155, lon: 10.142937, time_offset_ms: 61028},
%Video.TimedPoint{lat: 53.511572, lon: 10.142795, time_offset_ms: 61362},
%Video.TimedPoint{lat: 53.511598, lon: 10.14265, time_offset_ms: 61695},
%Video.TimedPoint{lat: 53.511626, lon: 10.142504, time_offset_ms: 62029},
%Video.TimedPoint{lat: 53.511658, lon: 10.142363, time_offset_ms: 62363},
%Video.TimedPoint{lat: 53.511695, lon: 10.142224, time_offset_ms: 62696},
%Video.TimedPoint{lat: 53.511729, lon: 10.142087, time_offset_ms: 63030},
%Video.TimedPoint{lat: 53.511765, lon: 10.141953, time_offset_ms: 63364},
%Video.TimedPoint{lat: 53.511799, lon: 10.141823, time_offset_ms: 63697},
%Video.TimedPoint{lat: 53.511836, lon: 10.141697, time_offset_ms: 64031},
%Video.TimedPoint{lat: 53.511875, lon: 10.141596, time_offset_ms: 64365},
%Video.TimedPoint{lat: 53.511917, lon: 10.141518, time_offset_ms: 64698},
%Video.TimedPoint{lat: 53.511963, lon: 10.141445, time_offset_ms: 65032},
%Video.TimedPoint{lat: 53.512008, lon: 10.141369, time_offset_ms: 65366},
%Video.TimedPoint{lat: 53.512054, lon: 10.141281, time_offset_ms: 65699},
%Video.TimedPoint{lat: 53.512099, lon: 10.141184, time_offset_ms: 66033},
%Video.TimedPoint{lat: 53.512135, lon: 10.141093, time_offset_ms: 66367},
%Video.TimedPoint{lat: 53.51217, lon: 10.141018, time_offset_ms: 66700},
%Video.TimedPoint{lat: 53.512204, lon: 10.140933, time_offset_ms: 67034},
%Video.TimedPoint{lat: 53.512228, lon: 10.140838, time_offset_ms: 67368},
%Video.TimedPoint{lat: 53.512257, lon: 10.140752, time_offset_ms: 67701},
%Video.TimedPoint{lat: 53.512287, lon: 10.140665, time_offset_ms: 68035},
%Video.TimedPoint{lat: 53.512327, lon: 10.14057, time_offset_ms: 68369},
%Video.TimedPoint{lat: 53.512377, lon: 10.140479, time_offset_ms: 68702},
%Video.TimedPoint{lat: 53.512429, lon: 10.140374, time_offset_ms: 69036},
%Video.TimedPoint{lat: 53.512475, lon: 10.140268, time_offset_ms: 69370},
%Video.TimedPoint{lat: 53.512517, lon: 10.14016, time_offset_ms: 69703},
%Video.TimedPoint{lat: 53.512556, lon: 10.140048, time_offset_ms: 70037},
%Video.TimedPoint{lat: 53.512594, lon: 10.139937, time_offset_ms: 70371},
%Video.TimedPoint{lat: 53.512633, lon: 10.139831, time_offset_ms: 70704},
%Video.TimedPoint{lat: 53.512667, lon: 10.139726, time_offset_ms: 71038},
%Video.TimedPoint{lat: 53.512699, lon: 10.139626, time_offset_ms: 71372},
%Video.TimedPoint{lat: 53.512731, lon: 10.139529, time_offset_ms: 71705},
%Video.TimedPoint{lat: 53.512765, lon: 10.139431, time_offset_ms: 72039},
%Video.TimedPoint{lat: 53.512802, lon: 10.139336, time_offset_ms: 72373},
%Video.TimedPoint{lat: 53.512845, lon: 10.139245, time_offset_ms: 72706},
%Video.TimedPoint{lat: 53.512893, lon: 10.139156, time_offset_ms: 73040},
%Video.TimedPoint{lat: 53.512941, lon: 10.139067, time_offset_ms: 73374},
%Video.TimedPoint{lat: 53.512991, lon: 10.138971, time_offset_ms: 73707},
%Video.TimedPoint{lat: 53.513033, lon: 10.13887, time_offset_ms: 74041},
%Video.TimedPoint{lat: 53.513078, lon: 10.138782, time_offset_ms: 74375},
%Video.TimedPoint{lat: 53.513124, lon: 10.138713, time_offset_ms: 74708},
%Video.TimedPoint{lat: 53.513165, lon: 10.138664, time_offset_ms: 75042},
%Video.TimedPoint{lat: 53.51321, lon: 10.138634, time_offset_ms: 75376},
%Video.TimedPoint{lat: 53.513253, lon: 10.138618, time_offset_ms: 75709},
%Video.TimedPoint{lat: 53.513296, lon: 10.138609, time_offset_ms: 76043},
%Video.TimedPoint{lat: 53.51333, lon: 10.138619, time_offset_ms: 76377},
%Video.TimedPoint{lat: 53.513367, lon: 10.138633, time_offset_ms: 76710},
%Video.TimedPoint{lat: 53.513409, lon: 10.13867, time_offset_ms: 77044},
%Video.TimedPoint{lat: 53.513466, lon: 10.138719, time_offset_ms: 77378},
%Video.TimedPoint{lat: 53.513528, lon: 10.138782, time_offset_ms: 77711},
%Video.TimedPoint{lat: 53.513586, lon: 10.138861, time_offset_ms: 78045},
%Video.TimedPoint{lat: 53.513647, lon: 10.138959, time_offset_ms: 78379},
%Video.TimedPoint{lat: 53.513717, lon: 10.139038, time_offset_ms: 78712},
%Video.TimedPoint{lat: 53.5138, lon: 10.139084, time_offset_ms: 79046},
%Video.TimedPoint{lat: 53.513883, lon: 10.139101, time_offset_ms: 79380},
%Video.TimedPoint{lat: 53.513968, lon: 10.139082, time_offset_ms: 79713},
%Video.TimedPoint{lat: 53.514056, lon: 10.139037, time_offset_ms: 80047},
%Video.TimedPoint{lat: 53.514141, lon: 10.138978, time_offset_ms: 80381},
%Video.TimedPoint{lat: 53.514225, lon: 10.138918, time_offset_ms: 80714},
%Video.TimedPoint{lat: 53.514306, lon: 10.138861, time_offset_ms: 81048},
%Video.TimedPoint{lat: 53.514384, lon: 10.138805, time_offset_ms: 81382},
%Video.TimedPoint{lat: 53.514463, lon: 10.138743, time_offset_ms: 81715},
%Video.TimedPoint{lat: 53.514546, lon: 10.138672, time_offset_ms: 82049},
%Video.TimedPoint{lat: 53.514621, lon: 10.138584, time_offset_ms: 82383},
%Video.TimedPoint{lat: 53.514692, lon: 10.138493, time_offset_ms: 82716},
%Video.TimedPoint{lat: 53.514765, lon: 10.138411, time_offset_ms: 83050},
%Video.TimedPoint{lat: 53.514848, lon: 10.138351, time_offset_ms: 83384},
%Video.TimedPoint{lat: 53.51493, lon: 10.138296, time_offset_ms: 83717},
%Video.TimedPoint{lat: 53.515003, lon: 10.138224, time_offset_ms: 84051},
%Video.TimedPoint{lat: 53.515067, lon: 10.138118, time_offset_ms: 84385},
%Video.TimedPoint{lat: 53.51512, lon: 10.137985, time_offset_ms: 84718},
%Video.TimedPoint{lat: 53.515161, lon: 10.137841, time_offset_ms: 85052},
%Video.TimedPoint{lat: 53.515193, lon: 10.137692, time_offset_ms: 85386},
%Video.TimedPoint{lat: 53.51522, lon: 10.137547, time_offset_ms: 85719},
%Video.TimedPoint{lat: 53.515247, lon: 10.137405, time_offset_ms: 86053},
%Video.TimedPoint{lat: 53.515271, lon: 10.13727, time_offset_ms: 86387},
%Video.TimedPoint{lat: 53.515295, lon: 10.137145, time_offset_ms: 86720},
%Video.TimedPoint{lat: 53.515315, lon: 10.13702, time_offset_ms: 87054},
%Video.TimedPoint{lat: 53.515337, lon: 10.13689, time_offset_ms: 87388},
%Video.TimedPoint{lat: 53.515361, lon: 10.136755, time_offset_ms: 87721},
%Video.TimedPoint{lat: 53.515385, lon: 10.136616, time_offset_ms: 88055},
%Video.TimedPoint{lat: 53.515409, lon: 10.136478, time_offset_ms: 88389},
%Video.TimedPoint{lat: 53.515431, lon: 10.136338, time_offset_ms: 88722},
%Video.TimedPoint{lat: 53.515452, lon: 10.1362, time_offset_ms: 89056},
%Video.TimedPoint{lat: 53.515479, lon: 10.136061, time_offset_ms: 89390},
%Video.TimedPoint{lat: 53.515511, lon: 10.135924, time_offset_ms: 89723},
%Video.TimedPoint{lat: 53.51555, lon: 10.135791, time_offset_ms: 90057},
%Video.TimedPoint{lat: 53.515596, lon: 10.135671, time_offset_ms: 90391},
%Video.TimedPoint{lat: 53.515646, lon: 10.135558, time_offset_ms: 90724},
%Video.TimedPoint{lat: 53.515698, lon: 10.13545, time_offset_ms: 91058},
%Video.TimedPoint{lat: 53.515753, lon: 10.135345, time_offset_ms: 91392},
%Video.TimedPoint{lat: 53.515807, lon: 10.135243, time_offset_ms: 91725},
%Video.TimedPoint{lat: 53.515861, lon: 10.135138, time_offset_ms: 92059},
%Video.TimedPoint{lat: 53.515917, lon: 10.13503, time_offset_ms: 92393},
%Video.TimedPoint{lat: 53.515974, lon: 10.134923, time_offset_ms: 92726},
%Video.TimedPoint{lat: 53.516033, lon: 10.134816, time_offset_ms: 93060},
%Video.TimedPoint{lat: 53.51609, lon: 10.134703, time_offset_ms: 93394},
%Video.TimedPoint{lat: 53.516149, lon: 10.134587, time_offset_ms: 93727},
%Video.TimedPoint{lat: 53.516211, lon: 10.13447, time_offset_ms: 94061},
%Video.TimedPoint{lat: 53.516273, lon: 10.134354, time_offset_ms: 94395},
%Video.TimedPoint{lat: 53.516334, lon: 10.134235, time_offset_ms: 94728},
%Video.TimedPoint{lat: 53.516393, lon: 10.134121, time_offset_ms: 95062},
%Video.TimedPoint{lat: 53.516451, lon: 10.134008, time_offset_ms: 95396},
%Video.TimedPoint{lat: 53.516512, lon: 10.133895, time_offset_ms: 95729},
%Video.TimedPoint{lat: 53.516575, lon: 10.133783, time_offset_ms: 96063},
%Video.TimedPoint{lat: 53.516634, lon: 10.133667, time_offset_ms: 96397},
%Video.TimedPoint{lat: 53.516692, lon: 10.13355, time_offset_ms: 96730},
%Video.TimedPoint{lat: 53.516746, lon: 10.133437, time_offset_ms: 97064},
%Video.TimedPoint{lat: 53.516795, lon: 10.133338, time_offset_ms: 97398},
%Video.TimedPoint{lat: 53.51682191891892, lon: 10.13329861861862, time_offset_ms: 97564}
]
end
@impl Video.Rendered
def(rendered?()) do
true
end
end
| 73.138554
| 94
| 0.699036
|
9e69f64482c0524bf2c04ef3169fd8d2bc68d603
| 2,880
|
ex
|
Elixir
|
clients/policy_simulator/lib/google_api/policy_simulator/v1/model/google_cloud_policysimulator_v1_access_state_diff.ex
|
renovate-bot/elixir-google-api
|
1da34cd39b670c99f067011e05ab90af93fef1f6
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/policy_simulator/lib/google_api/policy_simulator/v1/model/google_cloud_policysimulator_v1_access_state_diff.ex
|
swansoffiee/elixir-google-api
|
9ea6d39f273fb430634788c258b3189d3613dde0
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/policy_simulator/lib/google_api/policy_simulator/v1/model/google_cloud_policysimulator_v1_access_state_diff.ex
|
dazuma/elixir-google-api
|
6a9897168008efe07a6081d2326735fe332e522c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1AccessStateDiff do
@moduledoc """
A summary and comparison of the principal's access under the current (baseline) policies and the proposed (simulated) policies for a single access tuple.
## Attributes
* `accessChange` (*type:* `String.t`, *default:* `nil`) - How the principal's access, specified in the AccessState field, changed between the current (baseline) policies and proposed (simulated) policies.
* `baseline` (*type:* `GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1ExplainedAccess.t`, *default:* `nil`) - The results of evaluating the access tuple under the current (baseline) policies. If the AccessState couldn't be fully evaluated, this field explains why.
* `simulated` (*type:* `GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1ExplainedAccess.t`, *default:* `nil`) - The results of evaluating the access tuple under the proposed (simulated) policies. If the AccessState couldn't be fully evaluated, this field explains why.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accessChange => String.t() | nil,
:baseline =>
GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1ExplainedAccess.t()
| nil,
:simulated =>
GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1ExplainedAccess.t()
| nil
}
field(:accessChange)
field(:baseline,
as: GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1ExplainedAccess
)
field(:simulated,
as: GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1ExplainedAccess
)
end
defimpl Poison.Decoder,
for: GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1AccessStateDiff do
def decode(value, options) do
GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1AccessStateDiff.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.PolicySimulator.V1.Model.GoogleCloudPolicysimulatorV1AccessStateDiff do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.352941
| 287
| 0.754167
|
9e69f9f36a63dddff4be684f690522984569306e
| 138
|
exs
|
Elixir
|
lectures/webserv/test/webserv_test.exs
|
Murre3/ID1019
|
8240d07be35843610c6c14a40bcb3ed21b3ea36f
|
[
"MIT"
] | null | null | null |
lectures/webserv/test/webserv_test.exs
|
Murre3/ID1019
|
8240d07be35843610c6c14a40bcb3ed21b3ea36f
|
[
"MIT"
] | null | null | null |
lectures/webserv/test/webserv_test.exs
|
Murre3/ID1019
|
8240d07be35843610c6c14a40bcb3ed21b3ea36f
|
[
"MIT"
] | null | null | null |
defmodule WServerTest do
use ExUnit.Case
doctest WServer
test "greets the world" do
assert WServer.hello() == :world
end
end
| 15.333333
| 36
| 0.710145
|
9e6a1a49f4f0a7f09b36c319097247e3837fae22
| 118
|
exs
|
Elixir
|
test/Microgun_test.exs
|
ridrum/Microgun
|
44f0c1d17c97a7d9d43a8f1ee95b045e744f16e1
|
[
"MIT"
] | null | null | null |
test/Microgun_test.exs
|
ridrum/Microgun
|
44f0c1d17c97a7d9d43a8f1ee95b045e744f16e1
|
[
"MIT"
] | null | null | null |
test/Microgun_test.exs
|
ridrum/Microgun
|
44f0c1d17c97a7d9d43a8f1ee95b045e744f16e1
|
[
"MIT"
] | null | null | null |
defmodule MicrogunTest do
use ExUnit.Case
doctest Microgun
test "the truth" do
assert 1 + 1 == 2
end
end
| 13.111111
| 25
| 0.677966
|
9e6a1cad31a5144133c6a875d08e7b5ac0a0add3
| 3,544
|
exs
|
Elixir
|
test/bus_car/request_test.exs
|
elbow-jason/bus_car
|
cd70b9f6b8dd404eb518f642491e0a4430e2d9f9
|
[
"MIT"
] | 18
|
2016-09-25T21:36:39.000Z
|
2021-02-17T15:09:52.000Z
|
test/bus_car/request_test.exs
|
elbow-jason/bus_car
|
cd70b9f6b8dd404eb518f642491e0a4430e2d9f9
|
[
"MIT"
] | 7
|
2016-12-08T05:01:23.000Z
|
2018-04-05T08:55:11.000Z
|
test/bus_car/request_test.exs
|
elbow-jason/bus_car
|
cd70b9f6b8dd404eb518f642491e0a4430e2d9f9
|
[
"MIT"
] | 1
|
2020-04-24T02:10:15.000Z
|
2020-04-24T02:10:15.000Z
|
defmodule BusCarRequestTest do
use ExUnit.Case, async: true
doctest BusCar.Request
alias BusCar.Request
@valid %{
host: "beef",
port: 4444,
}
test "Request.new raises without :host" do
assert_raise RuntimeError, fn -> Request.new end
end
test "Request.new raises without :port" do
assert_raise RuntimeError, fn -> Request.new(%{host: "beef"}) end
end
test "Request.new has sane defaults" do
assert Request.new(@valid) == %BusCar.Request{
body: "",
headers: [],
host: "beef",
method: :get,
path: "/",
port: 4444,
protocol: "http",
query: nil,
}
end
test "Request.new jsonifies map bodies" do
map = Map.merge(@valid, %{body: %{some: :thing}})
req = Request.new(map)
assert req.body == ~s({"some":"thing"})
end
test "Request.new jsonifies map bodies and adds json headers" do
map = Map.merge(@valid, %{body: %{some: :thing}})
req = Request.new(map)
assert req.body == ~s({"some":"thing"})
assert req.headers == [{"Content-Type", "application/json"}]
end
test "protocol is configurable" do
map = Map.merge(@valid, %{protocol: "https"})
req = Request.new(map)
assert req.protocol == "https"
end
test "headers are configurable" do
map = Map.merge(@valid, %{headers: [{"fleep-x", "flop-x"}]})
req = Request.new(map)
assert req.headers == [{"fleep-x", "flop-x"}]
end
test "headers are configurable with json headers from map body" do
map = Map.merge(@valid, %{headers: [{"fleep-x", "flop-x"}], body: %{"some" => "thing"}})
req = Request.new(map)
assert req.headers == [{"Content-Type", "application/json"}, {"fleep-x", "flop-x"}]
end
test "path can take a string" do
map = Map.merge(@valid, %{path: "/fleep"})
req = Request.new(map)
assert req.path == "/fleep"
end
test "path can take a list of strings" do
map = Map.merge(@valid, %{path: ["fleep", "floop"]})
req = Request.new(map)
assert req.path == "/fleep/floop"
end
test "path can take a list of atoms" do
map = Map.merge(@valid, %{path: [:"fleep", :"floop"]})
req = Request.new(map)
assert req.path == "/fleep/floop"
end
test "path can take a list of mixed strings and atoms" do
map = Map.merge(@valid, %{path: ["fleep", :"floop"]})
req = Request.new(map)
assert req.path == "/fleep/floop"
end
test "path removes nil parts of a list" do
map = Map.merge(@valid, %{path: [nil, "fleep", :"floop"]})
req = Request.new(map)
assert req.path == "/fleep/floop"
end
test "query can take a string" do
map = Map.merge(@valid, %{query: "ryan=liar_about_boxing_gif"})
req = Request.new(map)
assert req.query == "ryan=liar_about_boxing_gif"
end
test "query can take a map" do
map = Map.merge(@valid, %{query: %{"ryan" => "liar_about_boxing_gif", page: 2}})
req = Request.new(map)
assert req.query == "page=2&ryan=liar_about_boxing_gif"
end
test "implements String.Chars" do
map = Map.merge(@valid, %{
query: %{"ryan" => "liar_about_boxing_gif", page: 2},
path: ["fleep", :floop],
})
expected = "http://beef:4444/fleep/floop?page=2&ryan=liar_about_boxing_gif"
assert Request.new(map) |> to_string == expected
end
test "send works on localhost elasticsearch" do
resp = %{host: "127.0.0.1", port: 9200, method: :get}
|> Request.new
|> Request.send
assert resp |> elem(0) == :ok
assert resp |> elem(1) |> Map.get(:status_code) == 200
end
end
| 28.352
| 92
| 0.610045
|
9e6a2d9e91f3b3b69d2c2633c93ce9918797f288
| 55,532
|
exs
|
Elixir
|
test/wifi_compatibility_test.exs
|
axelson/vintage_net_wifi
|
b989ec5a8122fca212cf62e7e6b489af4314cb5e
|
[
"Apache-2.0"
] | 20
|
2019-12-03T18:26:25.000Z
|
2022-02-08T09:27:23.000Z
|
test/wifi_compatibility_test.exs
|
axelson/vintage_net_wifi
|
b989ec5a8122fca212cf62e7e6b489af4314cb5e
|
[
"Apache-2.0"
] | 29
|
2020-01-21T02:21:47.000Z
|
2022-02-16T13:02:24.000Z
|
test/wifi_compatibility_test.exs
|
axelson/vintage_net_wifi
|
b989ec5a8122fca212cf62e7e6b489af4314cb5e
|
[
"Apache-2.0"
] | 12
|
2020-02-16T16:48:03.000Z
|
2022-03-24T12:10:52.000Z
|
defmodule WiFiCompatibilityTest do
use ExUnit.Case
alias VintageNet.Interface.RawConfig
import VintageNetWiFiTest.Utils
import ExUnit.CaptureLog
#
# These tests ensure that VintageNet.Technology.WiFi users get updated properly.
# This is super-important to keep for a while so that pre-0.7.0 users are not broken.
#
# There is no need to add to them unless a bug is found where a configuration does
# not update properly or the implementation changes.
#
# 10K foot overview:
# 1. Input type `VintageNet.Technology.WiFi` is changed to `VintageNetWiFi`
# 2. The `:wifi` tag is changed to `:vintage_net_wifi`
# 3. The old way of specifying access points still works
#
test "normalizes the really old way of specifying ssid" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{ssid: "guest", key_mgmt: :none}
}
normalized_input = %{
type: VintageNetWiFi,
ipv4: %{method: :dhcp},
vintage_net_wifi: %{
networks: [
%{
ssid: "guest",
key_mgmt: :none,
mode: :infrastructure
}
]
}
}
assert capture_log(fn ->
assert normalized_input == VintageNet.Technology.WiFi.normalize(input)
end) =~ "deprecated"
end
test "normalizes the really old way of specifying ap mode" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{mode: :host, ssid: "my_ap", key_mgmt: :none}
}
normalized_input = %{
type: VintageNetWiFi,
ipv4: %{method: :dhcp},
vintage_net_wifi: %{
networks: [
%{
ssid: "my_ap",
key_mgmt: :none,
mode: :ap
}
]
}
}
assert capture_log(fn ->
assert normalized_input == VintageNet.Technology.WiFi.normalize(input)
end) =~ "deprecated"
end
test "normalizes really old way of specifying infrastructure mode" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "guest",
key_mgmt: :none,
mode: :client
}
]
}
}
normalized_input = %{
type: VintageNetWiFi,
ipv4: %{method: :dhcp},
vintage_net_wifi: %{
networks: [
%{
ssid: "guest",
key_mgmt: :none,
mode: :infrastructure
}
]
}
}
assert normalized_input == VintageNet.Technology.WiFi.normalize(input)
end
test "normalizing an empty config works" do
# An empty config should be normalized to a configuration that
# allows the user to scan for networks.
input = %{
type: VintageNet.Technology.WiFi
}
normalized = %{
type: VintageNetWiFi,
vintage_net_wifi: %{networks: []},
ipv4: %{method: :disabled}
}
assert normalized == VintageNet.Technology.WiFi.normalize(input)
end
test "an empty config enables wifi scanning" do
input = %{
type: VintageNet.Technology.WiFi
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "verbose flag turns on wpa_supplicant debug" do
input = %{
type: VintageNet.Technology.WiFi,
verbose: true
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: true
]}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "normalization converts passphrases to PSKs" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [%{ssid: "IEEE", psk: "password", key_mgmt: :wpa_psk}]
}
}
normalized_input = %{
type: VintageNetWiFi,
ipv4: %{method: :dhcp},
vintage_net_wifi: %{
networks: [
%{
ssid: "IEEE",
psk: "F42C6FC52DF0EBEF9EBB4B90B38A5F902E83FE1B135A70E23AED762E9710A12E",
key_mgmt: :wpa_psk,
mode: :infrastructure
}
]
}
}
assert normalized_input == VintageNet.Technology.WiFi.normalize(input)
end
test "normalization converts passphrases to psks for multiple networks" do
input = %{
type: VintageNet.Technology.WiFi,
ipv4: %{method: :dhcp},
wifi: %{
networks: [
%{
ssid: "IEEE",
psk: "password",
key_mgmt: :wpa_psk
},
%{
ssid: "IEEE2",
psk: "password",
key_mgmt: :wpa_psk
}
]
}
}
normalized_input = %{
type: VintageNetWiFi,
ipv4: %{method: :dhcp},
vintage_net_wifi: %{
networks: [
%{
ssid: "IEEE",
psk: "F42C6FC52DF0EBEF9EBB4B90B38A5F902E83FE1B135A70E23AED762E9710A12E",
key_mgmt: :wpa_psk,
mode: :infrastructure
},
%{
ssid: "IEEE2",
psk: "B06433395BD30B1455F538904B239D10A51964932A81D1407BAF2BA0767E22E9",
key_mgmt: :wpa_psk,
mode: :infrastructure
}
]
}
}
assert normalized_input == VintageNet.Technology.WiFi.normalize(input)
end
test "create a WPA2 WiFi configuration" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "testing",
psk: "1234567890123456789012345678901234567890123456789012345678901234",
key_mgmt: :wpa_psk
}
]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="testing"
key_mgmt=WPA-PSK
mode=0
psk=1234567890123456789012345678901234567890123456789012345678901234
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "create an open WiFi configuration" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "guest"
}
]
},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="guest"
key_mgmt=NONE
mode=0
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "Set regulatory_domain at runtime" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
regulatory_domain: "AU"
},
ipv4: %{method: :disabled},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=AU
wps_cred_processing=1
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "create a WPA2 WiFi configuration with passphrase" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [%{ssid: "testing", psk: "a_passphrase_and_not_a_psk", key_mgmt: :wpa_psk}]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="testing"
key_mgmt=WPA-PSK
mode=0
psk=1EE0A473A954F61007E526365D4FDC056FE2A102ED2CE77D64492A9495B83030
}
"""}
],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "create a password-less WiFi configuration" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{networks: [%{ssid: "testing", key_mgmt: :none}]},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="testing"
key_mgmt=NONE
mode=0
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "create a WEP WiFi configuration" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "testing",
bssid: "00:11:22:33:44:55",
wep_key0: "42FEEDDEAFBABEDEAFBEEFAA55",
wep_key1: "42FEEDDEAFBABEDEAFBEEFAA55",
wep_key2: "ABEDEA42FFBEEFAA55EEDDEAFB",
wep_key3: "EDEADEAFBABFBEEFAA5542FEED",
key_mgmt: :none,
wep_tx_keyidx: 0
}
]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="testing"
bssid=00:11:22:33:44:55
key_mgmt=NONE
mode=0
wep_key0=42FEEDDEAFBABEDEAFBEEFAA55
wep_key1=42FEEDDEAFBABEDEAFBEEFAA55
wep_key2=ABEDEA42FFBEEFAA55EEDDEAFB
wep_key3=EDEADEAFBABFBEEFAA5542FEED
wep_tx_keyidx=0
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "create a hidden WiFi configuration" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "testing",
psk: "1234567890123456789012345678901234567890123456789012345678901234",
key_mgmt: :wpa_psk,
scan_ssid: 1
}
]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="testing"
key_mgmt=WPA-PSK
scan_ssid=1
mode=0
psk=1234567890123456789012345678901234567890123456789012345678901234
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "create a basic EAP network" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "testing",
key_mgmt: :wpa_eap,
scan_ssid: 1,
pairwise: "CCMP TKIP",
group: "CCMP TKIP",
eap: "PEAP",
identity: "user1",
password: "supersecret",
phase1: "peapver=auto",
phase2: "MSCHAPV2"
}
]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="testing"
key_mgmt=WPA-EAP
scan_ssid=1
mode=0
identity="user1"
password="supersecret"
pairwise=CCMP TKIP
group=CCMP TKIP
eap=PEAP
phase1="peapver=auto"
phase2="MSCHAPV2"
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "WPA-Personal(PSK) with TKIP and enforcement for frequent PTK rekeying" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "example",
proto: "WPA",
key_mgmt: :wpa_psk,
scan_ssid: 1,
pairwise: "TKIP",
psk: "not so secure passphrase",
wpa_ptk_rekey: 600
}
]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="example"
key_mgmt=WPA-PSK
scan_ssid=1
mode=0
psk=F7C00EB4F1A1BF28F0C6D18C689DB6634FC85C894286A11DE979F2BA1C022988
wpa_ptk_rekey=600
pairwise=TKIP
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "Only WPA-EAP is used. Both CCMP and TKIP is accepted" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "example",
proto: "RSN",
key_mgmt: :wpa_eap,
pairwise: "CCMP TKIP",
eap: "TLS",
identity: "user@example.com",
ca_cert: "/etc/cert/ca.pem",
client_cert: "/etc/cert/user.pem",
private_key: "/etc/cert/user.prv",
private_key_passwd: "password",
priority: 1
}
]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="example"
key_mgmt=WPA-EAP
priority=1
mode=0
identity="user@example.com"
pairwise=CCMP TKIP
eap=TLS
ca_cert="/etc/cert/ca.pem"
client_cert="/etc/cert/user.pem"
private_key="/etc/cert/user.prv"
private_key_passwd="password"
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "EAP-PEAP/MSCHAPv2 configuration for RADIUS servers that use the new peaplabel" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "example",
key_mgmt: :wpa_eap,
eap: "PEAP",
identity: "user@example.com",
password: "foobar",
ca_cert: "/etc/cert/ca.pem",
phase1: "peaplabel=1",
phase2: "auth=MSCHAPV2",
priority: 10
}
]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="example"
key_mgmt=WPA-EAP
priority=10
mode=0
identity="user@example.com"
password="foobar"
eap=PEAP
phase1="peaplabel=1"
phase2="auth=MSCHAPV2"
ca_cert="/etc/cert/ca.pem"
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "EAP-TTLS/EAP-MD5-Challenge configuration with anonymous identity" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "example",
key_mgmt: :wpa_eap,
eap: "TTLS",
identity: "user@example.com",
anonymous_identity: "anonymous@example.com",
password: "foobar",
ca_cert: "/etc/cert/ca.pem",
priority: 2
}
]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="example"
key_mgmt=WPA-EAP
priority=2
mode=0
identity="user@example.com"
anonymous_identity="anonymous@example.com"
password="foobar"
eap=TTLS
ca_cert="/etc/cert/ca.pem"
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "WPA-EAP, EAP-TTLS with different CA certificate used for outer and inner authentication" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "example",
key_mgmt: :wpa_eap,
eap: "TTLS",
anonymous_identity: "anonymous@example.com",
ca_cert: "/etc/cert/ca.pem",
phase2: "autheap=TLS",
ca_cert2: "/etc/cert/ca2.pem",
client_cert2: "/etc/cer/user.pem",
private_key2: "/etc/cer/user.prv",
private_key2_passwd: "password",
priority: 2
}
]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="example"
key_mgmt=WPA-EAP
priority=2
mode=0
anonymous_identity="anonymous@example.com"
eap=TTLS
phase2="autheap=TLS"
ca_cert="/etc/cert/ca.pem"
ca_cert2="/etc/cert/ca2.pem"
client_cert2="/etc/cer/user.pem"
private_key2="/etc/cer/user.prv"
private_key2_passwd="password"
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "EAP-SIM with a GSM SIM or USIM" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [%{ssid: "eap-sim-test", key_mgmt: :wpa_eap, eap: "SIM", pin: "1234", pcsc: ""}]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="eap-sim-test"
key_mgmt=WPA-EAP
mode=0
eap=SIM
pin="1234"
pcsc=""
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "EAP PSK" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "eap-psk-test",
key_mgmt: :wpa_eap,
eap: "PSK",
anonymous_identity: "eap_psk_user",
password: "06b4be19da289f475aa46a33cb793029",
identity: "eap_psk_user@example.com"
}
]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="eap-psk-test"
key_mgmt=WPA-EAP
mode=0
identity="eap_psk_user@example.com"
anonymous_identity="eap_psk_user"
password="06b4be19da289f475aa46a33cb793029"
eap=PSK
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "IEEE 802.1X/EAPOL with dynamically generated WEP keys" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "1x-test",
key_mgmt: :IEEE8021X,
eap: "TLS",
identity: "user@example.com",
ca_cert: "/etc/cert/ca.pem",
client_cert: "/etc/cert/user.pem",
private_key: "/etc/cert/user.prv",
private_key_passwd: "password",
eapol_flags: 3
}
]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="1x-test"
key_mgmt=IEEE8021X
mode=0
identity="user@example.com"
eap=TLS
eapol_flags=3
ca_cert="/etc/cert/ca.pem"
client_cert="/etc/cert/user.pem"
private_key="/etc/cert/user.prv"
private_key_passwd="password"
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "configuration denying two APs" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "example",
key_mgmt: :wpa_psk,
psk: "very secret passphrase",
bssid_denylist: "02:11:22:33:44:55 02:22:aa:44:55:66"
}
]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="example"
key_mgmt=WPA-PSK
bssid_blacklist=02:11:22:33:44:55 02:22:aa:44:55:66
mode=0
psk=3033345C1478F89E4BE9C4937401DEAFD58808CD3E63568DCBFBBD4A8D281175
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "configuration limiting AP selection to a specific set of APs" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "example",
key_mgmt: :wpa_psk,
psk: "very secret passphrase",
bssid_allowlist:
"02:55:ae:bc:00:00/ff:ff:ff:ff:00:00 00:00:77:66:55:44/00:00:ff:ff:ff:ff"
}
]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="example"
key_mgmt=WPA-PSK
bssid_whitelist=02:55:ae:bc:00:00/ff:ff:ff:ff:00:00 00:00:77:66:55:44/00:00:ff:ff:ff:ff
mode=0
psk=3033345C1478F89E4BE9C4937401DEAFD58808CD3E63568DCBFBBD4A8D281175
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "host AP mode" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{mode: :ap, ssid: "example ap", psk: "very secret passphrase", key_mgmt: :wpa_psk}
]
},
ipv4: %{method: :disabled},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: true,
verbose: false
]}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="example ap"
key_mgmt=WPA-PSK
mode=2
psk=94A7360596213CEB96007A25A63FCBCF4D540314CEB636353C62A86632A6BD6E
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: [
"/tmp/vintage_net/wpa_supplicant/p2p-dev-wlan0",
"/tmp/vintage_net/wpa_supplicant/wlan0"
]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "create a multi-network WiFi configuration" do
# All of the IPv4 settings need to be the same for this configuration. This is
# probably "good enough". `nerves_network` does better, though.
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
ssid: "first_priority",
psk: "1234567890123456789012345678901234567890123456789012345678901234",
key_mgmt: :wpa_psk,
priority: 100
},
%{
ssid: "second_priority",
psk: "1234567890123456789012345678901234567890123456789012345678901234",
key_mgmt: :wpa_psk,
priority: 1
},
%{
ssid: "third_priority",
key_mgmt: :none,
priority: 0
}
]
},
ipv4: %{method: :dhcp},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
udhcpc_child_spec("wlan0", "unit_test"),
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="first_priority"
key_mgmt=WPA-PSK
priority=100
mode=0
psk=1234567890123456789012345678901234567890123456789012345678901234
}
network={
ssid="second_priority"
key_mgmt=WPA-PSK
priority=1
mode=0
psk=1234567890123456789012345678901234567890123456789012345678901234
}
network={
ssid="third_priority"
key_mgmt=NONE
priority=0
mode=0
}
"""}
],
up_cmds: [{:run, "ip", ["link", "set", "wlan0", "up"]}],
down_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "creates a static ip config" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [%{ssid: "example ap", psk: "very secret passphrase", key_mgmt: :wpa_psk}]
},
ipv4: %{
method: :static,
address: "192.168.1.2",
netmask: "255.255.0.0",
gateway: "192.168.1.1"
},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: false,
verbose: false
]},
{VintageNet.Connectivity.InternetChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="example ap"
key_mgmt=WPA-PSK
mode=0
psk=94A7360596213CEB96007A25A63FCBCF4D540314CEB636353C62A86632A6BD6E
}
"""}
],
up_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["addr", "add", "192.168.1.2/16", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "up"]},
{:fun, VintageNet.RouteManager, :set_route,
["wlan0", [{{192, 168, 1, 2}, 16}], {192, 168, 1, 1}]},
{:fun, VintageNet.NameResolver, :clear, ["wlan0"]}
],
down_cmds: [
{:fun, VintageNet.RouteManager, :clear_route, ["wlan0"]},
{:fun, VintageNet.NameResolver, :clear, ["wlan0"]},
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
cleanup_files: ["/tmp/vintage_net/wpa_supplicant/wlan0"]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "create an AP running dhcpd config" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
mode: :ap,
ssid: "example ap",
key_mgmt: :none,
scan_ssid: 1
}
],
ap_scan: 1,
bgscan: :simple
},
ipv4: %{
method: :static,
address: "192.168.24.1",
netmask: "255.255.255.0"
},
dhcpd: %{
start: "192.168.24.2",
end: "192.168.24.100",
options: %{
dns: ["192.168.24.1"],
subnet: {255, 255, 255, 0},
router: ["192.168.24.1"],
domain: "example.com",
search: ["example.com"]
}
},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: true,
verbose: false
]},
{VintageNet.Connectivity.LANChecker, "wlan0"},
udhcpd_child_spec("wlan0")
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
bgscan="simple"
ap_scan=1
network={
ssid="example ap"
key_mgmt=NONE
scan_ssid=1
mode=2
}
"""},
{"/tmp/vintage_net/udhcpd.conf.wlan0",
"""
interface wlan0
pidfile /tmp/vintage_net/udhcpd.wlan0.pid
lease_file /tmp/vintage_net/udhcpd.wlan0.leases
notify_file #{Application.app_dir(:beam_notify, ["priv", "beam_notify"])}
end 192.168.24.100
opt dns 192.168.24.1
opt domain example.com
opt router 192.168.24.1
opt search example.com
opt subnet 255.255.255.0
start 192.168.24.2
"""}
],
down_cmds: [
{:fun, VintageNet.RouteManager, :clear_route, ["wlan0"]},
{:fun, VintageNet.NameResolver, :clear, ["wlan0"]},
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
up_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["addr", "add", "192.168.24.1/24", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "up"]},
{:fun, VintageNet.RouteManager, :clear_route, ["wlan0"]},
{:fun, VintageNet.NameResolver, :clear, ["wlan0"]}
],
cleanup_files: [
"/tmp/vintage_net/wpa_supplicant/p2p-dev-wlan0",
"/tmp/vintage_net/wpa_supplicant/wlan0"
]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
test "create an ad hoc network" do
input = %{
type: VintageNet.Technology.WiFi,
wifi: %{
networks: [
%{
mode: :ibss,
ssid: "my_mesh",
key_mgmt: :none,
frequency: 2412
}
]
},
ipv4: %{
method: :static,
address: "192.168.24.1",
netmask: "255.255.255.0"
},
hostname: "unit_test"
}
output = %RawConfig{
ifname: "wlan0",
type: VintageNetWiFi,
source_config: VintageNet.Technology.WiFi.normalize(input),
required_ifnames: ["wlan0"],
child_specs: [
{VintageNetWiFi.WPASupplicant,
[
wpa_supplicant: "wpa_supplicant",
ifname: "wlan0",
wpa_supplicant_conf_path: "/tmp/vintage_net/wpa_supplicant.conf.wlan0",
control_path: "/tmp/vintage_net/wpa_supplicant",
ap_mode: true,
verbose: false
]},
{VintageNet.Connectivity.LANChecker, "wlan0"}
],
restart_strategy: :rest_for_one,
files: [
{"/tmp/vintage_net/wpa_supplicant.conf.wlan0",
"""
ctrl_interface=/tmp/vintage_net/wpa_supplicant
country=00
wps_cred_processing=1
network={
ssid="my_mesh"
key_mgmt=NONE
mode=1
frequency=2412
}
"""}
],
down_cmds: [
{:fun, VintageNet.RouteManager, :clear_route, ["wlan0"]},
{:fun, VintageNet.NameResolver, :clear, ["wlan0"]},
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "down"]}
],
up_cmds: [
{:run_ignore_errors, "ip", ["addr", "flush", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["addr", "add", "192.168.24.1/24", "dev", "wlan0", "label", "wlan0"]},
{:run, "ip", ["link", "set", "wlan0", "up"]},
{:fun, VintageNet.RouteManager, :clear_route, ["wlan0"]},
{:fun, VintageNet.NameResolver, :clear, ["wlan0"]}
],
cleanup_files: [
"/tmp/vintage_net/wpa_supplicant/p2p-dev-wlan0",
"/tmp/vintage_net/wpa_supplicant/wlan0"
]
}
assert output == VintageNet.Technology.WiFi.to_raw_config("wlan0", input, default_opts())
end
end
| 30.579295
| 99
| 0.553014
|
9e6a31804b5cbb0d8bc0c57e71adb39895610952
| 69
|
ex
|
Elixir
|
lib/todays_pizza_web/views/page_view.ex
|
JohnB/todays_pizza
|
816c8f4f566558bbf8910ef6dd20e363bcddf8fd
|
[
"MIT"
] | null | null | null |
lib/todays_pizza_web/views/page_view.ex
|
JohnB/todays_pizza
|
816c8f4f566558bbf8910ef6dd20e363bcddf8fd
|
[
"MIT"
] | null | null | null |
lib/todays_pizza_web/views/page_view.ex
|
JohnB/todays_pizza
|
816c8f4f566558bbf8910ef6dd20e363bcddf8fd
|
[
"MIT"
] | null | null | null |
defmodule TodaysPizzaWeb.PageView do
use TodaysPizzaWeb, :view
end
| 17.25
| 36
| 0.826087
|
9e6a3a2425df9a19de7aab88daa553ae0c6a9a11
| 1,011
|
ex
|
Elixir
|
lib/login/session.ex
|
pablobm/doorman
|
3e28226b91dcd50a0405a99fcc1437becb515bd1
|
[
"MIT"
] | null | null | null |
lib/login/session.ex
|
pablobm/doorman
|
3e28226b91dcd50a0405a99fcc1437becb515bd1
|
[
"MIT"
] | null | null | null |
lib/login/session.ex
|
pablobm/doorman
|
3e28226b91dcd50a0405a99fcc1437becb515bd1
|
[
"MIT"
] | null | null | null |
defmodule Doorman.Login.Session do
use Doorman.Login
@session_key :user_id
@session_secret :session_secret
@doc """
Logs in given user by setting `:user_id` on the session of passed in `conn`.
"""
def login(conn, user) do
conn
|> Plug.Conn.put_session(@session_key, user.id)
|> Plug.Conn.put_session(@session_secret, user.session_secret)
end
@doc """
Logs out current user.
"""
def logout(conn) do
conn
|> Plug.Conn.delete_session(@session_key)
|> Plug.Conn.delete_session(@session_secret)
end
@doc """
Returns the current user or nil based on `:user_id` in the session.
"""
def get_current_user(conn) do
id = Plug.Conn.get_session(conn, @session_key)
secret = Plug.Conn.get_session(conn, @session_secret)
repo = Application.get_env(:doorman, :repo)
user_module = Application.get_env(:doorman, :user_module)
if !is_nil(id) && !is_nil(secret) do
repo.get_by(user_module, [id: id, session_secret: secret])
end
end
end
| 25.923077
| 78
| 0.684471
|
9e6a5b20cf88145eff8880a90ef90ed1243d8819
| 7,466
|
exs
|
Elixir
|
lib/elixir/test/elixir/code_formatter/literals_test.exs
|
davidsulc/elixir
|
dd4fd6ab742acd75862e34e26dbdb86e0cf6453f
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/test/elixir/code_formatter/literals_test.exs
|
davidsulc/elixir
|
dd4fd6ab742acd75862e34e26dbdb86e0cf6453f
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/test/elixir/code_formatter/literals_test.exs
|
davidsulc/elixir
|
dd4fd6ab742acd75862e34e26dbdb86e0cf6453f
|
[
"Apache-2.0"
] | null | null | null |
Code.require_file "../test_helper.exs", __DIR__
defmodule Code.Formatter.LiteralsTest do
use ExUnit.Case, async: true
import CodeFormatterHelpers
@short_length [line_length: 10]
describe "integers" do
test "in decimal base" do
assert_same "0"
assert_same "100"
assert_same "007"
assert_same "10000"
assert_format "100000", "100_000"
assert_format "1000000", "1_000_000"
end
test "in binary base" do
assert_same "0b0"
assert_same "0b1"
assert_same "0b101"
assert_same "0b01"
assert_format "0b111_111", "0b111111"
end
test "in octal base" do
assert_same "0o77"
assert_same "0o0"
assert_same "0o01"
assert_format "0o777_777", "0o777777"
end
test "in hex base" do
assert_same "0x1"
assert_format "0xabcdef", "0xABCDEF"
assert_same "0x01"
assert_format "0xFFF_FFF", "0xFFFFFF"
end
test "as chars" do
assert_same "?a"
assert_same "?1"
assert_same "?è"
assert_same "??"
assert_same "?\\\\"
assert_same "?\\s"
assert_same "?🎾"
end
end
describe "floats" do
test "with normal notation" do
assert_same "0.0"
assert_same "1.0"
assert_same "123.456"
assert_same "0.0000001"
assert_same "001.100"
assert_format "0_10000_0.000_000", "0_100_000.000000"
end
test "with scientific notation" do
assert_same "1.0e1"
assert_same "1.0e-1"
assert_same "1.0e01"
assert_same "1.0e-01"
assert_same "001.100e-010"
assert_format "0_1_00_0_000.100e-010", "01_000_000.100e-010"
assert_format "1.0E01", "1.0e01"
assert_format "1.0E-01", "1.0e-01"
end
end
describe "atoms" do
test "true, false, nil" do
assert_same "nil"
assert_same "true"
assert_same "false"
end
test "without escapes" do
assert_same ~S[:foo]
end
test "with escapes" do
assert_same ~S[:"f\a\b\ro"]
assert_format ~S[:'f\a\b\ro'], ~S[:"f\a\b\ro"]
assert_format ~S[:'single \' quote'], ~S[:"single ' quote"]
assert_format ~S[:"double \" quote"], ~S[:"double \" quote"]
end
# TODO: Remove this check once we depend only on 20
if :erlang.system_info(:otp_release) >= '20' do
test "with unicode" do
assert_same ~S[:ólá]
end
end
test "does not reformat aliases" do
assert_same ~S[:"Elixir.String"]
end
test "removes quotes when they are not necessary" do
assert_format ~S[:"foo"], ~S[:foo]
assert_format ~S[:"++"], ~S[:++]
end
test "uses double quotes even when single quotes are used" do
assert_format ~S[:'foo bar'], ~S[:"foo bar"]
end
test "with interpolation" do
assert_same ~S[:"one #{2} three"]
end
test "with escapes and interpolation" do
assert_same ~S[:"one\n\"#{2}\"\nthree"]
end
test "with interpolation on line limit" do
bad = ~S"""
:"one #{"two"} three"
"""
good = ~S"""
:"one #{
"two"
} three"
"""
assert_format bad, good, @short_length
end
end
describe "strings" do
test "without escapes" do
assert_same ~S["foo"]
end
test "with escapes" do
assert_same ~S["f\a\b\ro"]
assert_same ~S["double \" quote"]
end
test "keeps literal new lines" do
assert_same """
"fo
o"
"""
end
test "with interpolation" do
assert_same ~S["one #{} three"]
assert_same ~S["one #{2} three"]
end
test "with interpolation uses block content" do
assert_format ~S["one #{@two(three)}"], ~S["one #{@two three}"]
end
test "with interpolation on line limit" do
bad = ~S"""
"one #{"two"} three"
"""
good = ~S"""
"one #{
"two"
} three"
"""
assert_format bad, good, @short_length
end
test "with escaped interpolation" do
assert_same ~S["one\#{two}three"]
end
test "with escapes and interpolation" do
assert_same ~S["one\n\"#{2}\"\nthree"]
end
test "is measured in graphemes" do
assert_same ~S"""
"áá#{0}áá"
""", @short_length
end
test "literal new lines don't count towards line limit" do
assert_same ~S"""
"one
#{"two"}
three"
""", @short_length
end
end
describe "charlists" do
test "without escapes" do
assert_same ~S['']
assert_same ~S[' ']
assert_same ~S['foo']
end
test "with escapes" do
assert_same ~S['f\a\b\ro']
assert_same ~S['single \' quote']
end
test "keeps literal new lines" do
assert_same """
'fo
o'
"""
end
test "with interpolation" do
assert_same ~S['one #{2} three']
end
test "with escape and interpolation" do
assert_same ~S['one\n\'#{2}\'\nthree']
end
test "with interpolation on line limit" do
bad = ~S"""
'one #{"two"} three'
"""
good = ~S"""
'one #{
"two"
} three'
"""
assert_format bad, good, @short_length
end
test "literal new lines don't count towards line limit" do
assert_same ~S"""
'one
#{"two"}
three'
""", @short_length
end
end
describe "string heredocs" do
test "without escapes" do
assert_same to_string(~S'''
"""
hello
"""
''')
end
test "with escapes" do
assert_same to_string(~S'''
"""
f\a\b\ro
"""
''')
assert_same to_string(~S'''
"""
multiple "\"" quotes
"""
''')
end
test "with interpolation" do
assert_same to_string(~S'''
"""
one
#{2}
three
"""
''')
assert_same to_string(~S'''
"""
one
"
#{2}
"
three
"""
''')
end
test "with interpolation on line limit" do
bad = to_string(~S'''
"""
one #{"two two"} three
"""
''')
good = to_string(~S'''
"""
one #{
"two two"
} three
"""
''')
assert_format bad, good, @short_length
end
test "literal new lines don't count towards line limit" do
assert_same to_string(~S'''
"""
one
#{"two"}
three
"""
'''), @short_length
end
end
describe "charlist heredocs" do
test "without escapes" do
assert_same ~S"""
'''
hello
'''
"""
end
test "with escapes" do
assert_same ~S"""
'''
f\a\b\ro
'''
"""
assert_same ~S"""
'''
multiple "\"" quotes
'''
"""
end
test "with interpolation" do
assert_same ~S"""
'''
one
#{2}
three
'''
"""
assert_same ~S"""
'''
one
"
#{2}
"
three
'''
"""
end
test "with interpolation on line limit" do
bad = ~S"""
'''
one #{"two two"} three
'''
"""
good = ~S"""
'''
one #{
"two two"
} three
'''
"""
assert_format bad, good, @short_length
end
test "literal new lines don't count towards line limit" do
assert_same ~S"""
'''
one
#{"two"}
three
'''
""", @short_length
end
end
end
| 18.949239
| 69
| 0.518216
|
9e6a65c85810ffab32c0b2652617f778cfa8fdb7
| 1,253
|
ex
|
Elixir
|
lib/ibu/cup.ex
|
ericgoodwin/ibu
|
949ce9baf4f794ed74c0903f43e25c1f9caec840
|
[
"Apache-2.0"
] | 1
|
2021-01-18T17:39:05.000Z
|
2021-01-18T17:39:05.000Z
|
lib/ibu/cup.ex
|
ericgoodwin/ibu
|
949ce9baf4f794ed74c0903f43e25c1f9caec840
|
[
"Apache-2.0"
] | null | null | null |
lib/ibu/cup.ex
|
ericgoodwin/ibu
|
949ce9baf4f794ed74c0903f43e25c1f9caec840
|
[
"Apache-2.0"
] | null | null | null |
defmodule IBU.Cup do
import IBU.DateHelper, only: [to_date_time: 1]
defstruct([
:ibu_id,
:as_of,
:description,
:name,
:short_name,
:completed_races,
:total_races,
:standings
])
@type t :: %__MODULE__{
ibu_id: binary,
as_of: DateTime.t(),
description: binary,
name: binary,
short_name: binary,
completed_races: integer,
total_races: integer,
standings: list
}
@spec build_from_api(map) :: t
def build_from_api(data) when is_map(data) do
%__MODULE__{
ibu_id: data["CupId"],
as_of: data["AsOf"] |> to_date_time,
description: data["CupInfo"] |> String.capitalize(),
name: data["CupName"],
short_name: data["CupShortName"],
completed_races: data["RaceCount"],
total_races: data["TotalRaces"],
standings:
Enum.map(data["Rows"], &IBU.Standing.build_from_api(&1, data["CupId"], data["AsOf"]))
}
end
@spec ibu_ids(integer) :: [binary]
def ibu_ids(season_id) do
prefix = ["BT#{season_id}SWRLCP__"]
Enum.reduce(~w(TS IN PU MS RL SP NC), ["#{prefix}MXRL"], fn type, acc ->
["#{prefix}SW#{type}", "#{prefix}SM#{type}"] ++ acc
end)
end
end
| 25.06
| 93
| 0.5834
|
9e6a77dbee66fa9ee881e8a0e3bcef8e019d9ca8
| 4,636
|
ex
|
Elixir
|
clients/identity_toolkit/lib/google_api/identity_toolkit/v3/request_builder.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | null | null | null |
clients/identity_toolkit/lib/google_api/identity_toolkit/v3/request_builder.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | null | null | null |
clients/identity_toolkit/lib/google_api/identity_toolkit/v3/request_builder.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | 1
|
2018-07-28T20:50:50.000Z
|
2018-07-28T20:50:50.000Z
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.IdentityToolkit.V3.RequestBuilder do
@moduledoc """
Helper functions for building Tesla requests
"""
@path_template_regex ~r/{(\+?[^}]+)}/i
@doc """
Specify the request method when building a request
## Parameters
- request (Map) - Collected request options
- m (String) - Request method
## Returns
Map
"""
@spec method(map(), String.t) :: map()
def method(request, m) do
Map.put_new(request, :method, m)
end
@doc """
Specify the request method when building a request
## Parameters
- request (Map) - Collected request options
- u (String) - Request URL
## Returns
Map
"""
@spec url(map(), String.t, Map.t) :: map()
def url(request, u, replacements) do
url(request, replace_path_template_vars(u, replacements))
end
def url(request, u) do
Map.put_new(request, :url, u)
end
def replace_path_template_vars(u, replacements) do
Regex.replace(@path_template_regex, u, fn (_, var) -> replacement_value(var, replacements) end)
end
defp replacement_value("+" <> name, replacements) do
URI.decode(replacement_value(name, replacements))
end
defp replacement_value(name, replacements) do
Map.get(replacements, name, "")
end
@doc """
Add optional parameters to the request
## Parameters
- request (Map) - Collected request options
- definitions (Map) - Map of parameter name to parameter location.
- options (KeywordList) - The provided optional parameters
## Returns
Map
"""
@spec add_optional_params(map(), %{optional(:atom) => :atom}, keyword()) :: map()
def add_optional_params(request, _, []), do: request
def add_optional_params(request, definitions, [{key, value} | tail]) do
case definitions do
%{^key => location} ->
request
|> add_param(location, key, value)
|> add_optional_params(definitions, tail)
_ ->
add_optional_params(request, definitions, tail)
end
end
@doc """
Add optional parameters to the request
## Parameters
- request (Map) - Collected request options
- location (atom) - Where to put the parameter
- key (atom) - The name of the parameter
- value (any) - The value of the parameter
## Returns
Map
"""
@spec add_param(map(), :atom, :atom, any()) :: map()
def add_param(request, :body, :body, value), do: Map.put(request, :body, value)
def add_param(request, :body, key, value) do
request
|> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
|> Map.update!(:body, &(Tesla.Multipart.add_field(&1, key, Poison.encode!(value), headers: [{:"Content-Type", "application/json"}])))
end
def add_param(request, :file, name, path) do
request
|> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
|> Map.update!(:body, &(Tesla.Multipart.add_file(&1, path, name: name)))
end
def add_param(request, :form, name, value) do
request
|> Map.update(:body, %{name => value}, &(Map.put(&1, name, value)))
end
def add_param(request, location, key, value) do
Map.update(request, location, [{key, value}], &(&1 ++ [{key, value}]))
end
@doc """
Handle the response for a Tesla request
## Parameters
- env (Tesla.Env) - The response object
- struct - The shape of the struct to deserialize into
## Returns
{:ok, struct} on success
{:error, info} on failure
"""
@spec decode(Tesla.Env.t) :: {:ok, struct()} | {:error, Tesla.Env.t}
def decode(%Tesla.Env{status: 200, body: body}), do: Poison.decode(body)
def decode(response) do
{:error, response}
end
@spec decode(Tesla.Env.t, struct()) :: {:ok, struct()} | {:error, Tesla.Env.t}
def decode(%Tesla.Env{status: 200} = env, false), do: {:ok, env}
def decode(%Tesla.Env{status: 200, body: body}, struct) do
Poison.decode(body, as: struct)
end
def decode(response, _struct) do
{:error, response}
end
end
| 28.795031
| 137
| 0.669974
|
9e6ae782d138cc8414acc01007f55963a919e5b6
| 39,698
|
ex
|
Elixir
|
lib/phoenix/endpoint.ex
|
gjaldon/phoenix
|
a85a13d8bfbd7d768bb86d38da427875df3fb703
|
[
"MIT"
] | 1
|
2015-05-19T11:04:16.000Z
|
2015-05-19T11:04:16.000Z
|
lib/phoenix/endpoint.ex
|
gjaldon/phoenix
|
a85a13d8bfbd7d768bb86d38da427875df3fb703
|
[
"MIT"
] | null | null | null |
lib/phoenix/endpoint.ex
|
gjaldon/phoenix
|
a85a13d8bfbd7d768bb86d38da427875df3fb703
|
[
"MIT"
] | null | null | null |
defmodule Phoenix.Endpoint do
@moduledoc ~S"""
Defines a Phoenix endpoint.
The endpoint is the boundary where all requests to your
web application start. It is also the interface your
application provides to the underlying web servers.
Overall, an endpoint has three responsibilities:
* to provide a wrapper for starting and stopping the
endpoint as part of a supervision tree;
* to define an initial plug pipeline for requests
to pass through;
* to host web specific configuration for your
application.
## Endpoints
An endpoint is simply a module defined with the help
of `Phoenix.Endpoint`. If you have used the `mix phx.new`
generator, an endpoint was automatically generated as
part of your application:
defmodule YourApp.Endpoint do
use Phoenix.Endpoint, otp_app: :your_app
# plug ...
# plug ...
plug YourApp.Router
end
Endpoints must be explicitly started as part of your application
supervision tree. Endpoints are added by default
to the supervision tree in generated applications. Endpoints can be
added to the supervision tree as follows:
supervisor(YourApp.Endpoint, [])
### Endpoint configuration
All endpoints are configured in your application environment.
For example:
config :your_app, YourApp.Endpoint,
secret_key_base: "kjoy3o1zeidquwy1398juxzldjlksahdk3"
Endpoint configuration is split into two categories. Compile-time
configuration means the configuration is read during compilation
and changing it at runtime has no effect. The compile-time
configuration is mostly related to error handling and instrumentation.
Runtime configuration, instead, is accessed during or
after your application is started and can be read through the
`c:config/2` function:
YourApp.Endpoint.config(:port)
YourApp.Endpoint.config(:some_config, :default_value)
### Dynamic configuration
For dynamically configuring the endpoint, such as loading data
from environment variables or configuration files, Phoenix invokes
the `init/2` callback on the endpoint, passing a `:supervisor`
atom as first argument and the endpoint configuration as second.
All of Phoenix configuration, except the Compile-time configuration
below can be set dynamically from the `c:init/2` callback.
### Compile-time configuration
* `:code_reloader` - when `true`, enables code reloading functionality
* `:debug_errors` - when `true`, uses `Plug.Debugger` functionality for
debugging failures in the application. Recommended to be set to `true`
only in development as it allows listing of the application source
code during debugging. Defaults to `false`.
* `:render_errors` - responsible for rendering templates whenever there
is a failure in the application. For example, if the application crashes
with a 500 error during a HTML request, `render("500.html", assigns)`
will be called in the view given to `:render_errors`. Defaults to:
[view: MyApp.ErrorView, accepts: ~w(html), layout: false]
The default format is used when none is set in the connection.
* `:instrumenters` - a list of instrumenter modules whose callbacks will
be fired on instrumentation events. Read more on instrumentation in the
"Instrumentation" section below.
### Runtime configuration
* `:cache_static_manifest` - a path to a json manifest file that contains
static files and their digested version. This is typically set to
"priv/static/cache_manifest.json" which is the file automatically generated
by `mix phx.digest`.
* `:check_origin` - configure transports to check origins or not. May
be false, true or a list of hosts that are allowed. Hosts also support
wildcards. For example:
check_origin: ["//phoenixframework.org", "//*.example.com"]
* `:http` - the configuration for the HTTP server. Currently uses
Cowboy and accepts all options as defined by
[`Plug.Adapters.Cowboy`](https://hexdocs.pm/plug/Plug.Adapters.Cowboy.html).
Defaults to `false`.
* `:https` - the configuration for the HTTPS server. Currently uses
Cowboy and accepts all options as defined by
[`Plug.Adapters.Cowboy`](https://hexdocs.pm/plug/Plug.Adapters.Cowboy.html).
Defaults to `false`.
* `:force_ssl` - ensures no data is ever sent via HTTP, always redirecting
to HTTPS. It expects a list of options which are forwarded to `Plug.SSL`.
By default it sets the "strict-transport-security" header in HTTPS requests,
forcing browsers to always use HTTPS. If an unsafe request (HTTP) is sent,
it redirects to the HTTPS version using the `:host` specified in the `:url`
configuration. To dynamically redirect to the `host` of the current request,
set `:host` in the `:force_ssl` configuration to `nil`.
* `:secret_key_base` - a secret key used as a base to generate secrets
for encrypting and signing data. For example, cookies and tokens
are signed by default, but they may also be encrypted if desired.
Defaults to `nil` as it must be set per application.
* `:server` - when `true`, starts the web server when the endpoint
supervision tree starts. Defaults to `false`. The `mix phx.server`
task automatically sets this to `true`.
* `:url` - configuration for generating URLs throughout the app.
Accepts the `:host`, `:scheme`, `:path` and `:port` options. All
keys except `:path` can be changed at runtime. Defaults to:
[host: "localhost", path: "/"]
The `:port` option requires either an integer, string, or
`{:system, "ENV_VAR"}`. When given a tuple like `{:system, "PORT"}`,
the port will be referenced from `System.get_env("PORT")` at runtime
as a workaround for releases where environment specific information
is loaded only at compile-time.
The `:host` option requires a string or `{:system, "ENV_VAR"}`. Similar
to `:port`, when given a tuple like `{:system, "HOST"}`, the host
will be referenced from `System.get_env("HOST")` at runtime.
The `:scheme` option accepts `"http"` and `"https"` values. Default value
is infered from top level `:http` or `:https` option. It is useful
when hosting Phoenix behind a load balancer or reverse proxy and
terminating SSL there.
The `:path` option can be used to override root path. Useful when hosting
Phoenix behind a reverse proxy with URL rewrite rules.
* `:static_url` - configuration for generating URLs for static files.
It will fallback to `url` if no option is provided. Accepts the same
options as `url`.
* `:watchers` - a set of watchers to run alongside your server. It
expects a list of tuples containing the executable and its arguments.
Watchers are guaranteed to run in the application directory, but only
when the server is enabled. For example, the watcher below will run
the "watch" mode of the brunch build tool when the server starts.
You can configure it to whatever build tool or command you want:
[node: ["node_modules/brunch/bin/brunch", "watch"]]
* `:live_reload` - configuration for the live reload option.
Configuration requires a `:patterns` option which should be a list of
file patterns to watch. When these files change, it will trigger a reload.
If you are using a tool like [pow](http://pow.cx) in development,
you may need to set the `:url` option appropriately.
live_reload: [
url: "ws://localhost:4000",
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
* `:pubsub` - configuration for this endpoint's pubsub adapter.
Configuration either requires a `:name` of the registered pubsub
server or a `:name` and `:adapter` pair. The pubsub name and adapter
are compile time configuration, while the remaining options are runtime.
The given adapter and name pair will be started as part of the supervision
tree. If no adapter is specified, the pubsub system will work by sending
events and subscribing to the given name. Defaults to:
[adapter: Phoenix.PubSub.PG2, name: MyApp.PubSub]
It also supports custom adapter configuration:
[name: :my_pubsub, adapter: Phoenix.PubSub.Redis,
host: "192.168.100.1"]
## Endpoint API
In the previous section, we have used the `c:config/2` function that is
automatically generated in your endpoint. Here's a list of all the functions
that are automatically defined in your endpoint:
* for handling paths and URLs: `c:struct_url/0`, `c:url/0`, `c:path/1`,
`c:static_url/0`, and `c:static_path/1`;
* for handling channel subscriptions: `c:subscribe/2` and `c:unsubscribe/1`;
* for broadcasting to channels: `c:broadcast/3`, `c:broadcast!/3`,
`c:broadcast_from/4`, and `c:broadcast_from!/4`
* for configuration: `c:start_link/0`, `c:config/2`, and `c:config_change/2`;
* for instrumentation: `c:instrument/3`;
* as required by the `Plug` behaviour: `c:Plug.init/1` and `c:Plug.call/2`.
## Instrumentation
Phoenix supports instrumentation through an extensible API. Each endpoint
defines an `c:instrument/3` macro that both users and Phoenix internals can call
to instrument generic events. This macro is responsible for measuring the time
it takes for the event to be processed and for notifying a list of interested
instrumenter modules of this measurement.
You can configure this list of instrumenter modules in the compile-time
configuration of your endpoint. (see the `:instrumenters` option above). The
way these modules express their interest in events is by exporting public
functions where the name of each function is the name of an event. For
example, if someone instruments the `:render_view` event, then each
instrumenter module interested in that event will have to export
`render_view/3`.
### Callbacks cycle
The event callback sequence is:
1. The event callback is called *before* the event happens (in this case,
before the view is rendered) with the atom `:start` as the first
argument; see the "Before clause" section below.
2. The event occurs (in this case, the view is rendered).
3. The same event callback is called again, this time with the atom `:stop`
as the first argument; see the "After clause" section below.
The second and third argument that each event callback takes depends on the
callback being an "after" or a "before" callback i.e. it depends on the
value of the first argument, `:start` or `:stop`. For this reason, most of
the time you will want to define (at least) two separate clauses for each
event callback, one for the "before" and one for the "after" callbacks.
All event callbacks are run in the same process that calls the `c:instrument/3`
macro; hence, instrumenters should be careful to avoid performing blocking actions.
If an event callback fails in any way (exits, throws, or raises), it won't
affect anything as the error is caught, but the failure will be logged. Note
that "after" callbacks are not guaranteed to be called as, for example, a link
may break before they've been called.
#### "Before" clause
When the first argument to an event callback is `:start`, the signature of
that callback is:
event_callback(:start, compile_metadata, runtime_metadata)
where:
* `compile_metadata` is a map of compile-time metadata about the environment
where `instrument/3` has been called. It contains the module where the
instrumentation is happening (under the `:module` key), the file and line
(`:file` and `:line`), and the function inside which the instrumentation
is happening (under `:function`). This information can be used arbitrarily
by the callback.
* `runtime_metadata` is a map of runtime data that the instrumentation
passes to the callbacks. This can be used for any purposes: for example,
when instrumenting the rendering of a view, the name of the view could be
passed in these runtime data so that instrumenters know which view is
being rendered (`instrument(:view_render, %{view: "index.html"}, fn
...)`).
#### "After" clause
When the first argument to an event callback is `:stop`, the signature of that
callback is:
event_callback(:stop, time_diff, result_of_before_callback)
where:
* `time_diff` is an integer representing the time it took to execute the
instrumented function **in native units**.
* `result_of_before_callback` is the return value of the "before" clause of
the same `event_callback`. This is a means of passing data from the
"before" clause to the "after" clause when instrumenting.
The return value of each "before" event callback will be stored and passed to
the corresponding "after" callback.
### Using instrumentation
Each Phoenix endpoint defines its own `instrument/3` macro. This macro is
called like this:
require MyApp.Endpoint
MyApp.Endpoint.instrument(:render_view, %{view: "index.html"}, fn ->
# actual view rendering
end)
All the instrumenter modules that export a `render_view/3` function will be
notified of the event so that they can perform their respective actions.
### Phoenix default events
By default, Phoenix instruments the following events:
* `:phoenix_controller_call` - it's the whole controller pipeline.
The `%Plug.Conn{}` is passed as runtime metadata.
* `:phoenix_controller_render` - the rendering of a view from a
controller. The map of runtime metadata passed to instrumentation
callbacks has the `:view` key - for the name of the view, e.g. `HexWeb.ErrorView`,
the `:template` key - for the name of the template, e.g.,
`"index.html"`, the `:format` key - for the format of the template, and
the `:conn` key - containing the `%Plug.Conn{}`.
* `:phoenix_channel_join` - the joining of a channel. The `%Phoenix.Socket{}`
and join params are passed as runtime metadata via `:socket` and `:params`.
* `:phoenix_channel_receive` - the receipt of an incoming message over a
channel. The `%Phoenix.Socket{}`, payload, event, and ref are passed as
runtime metadata via `:socket`, `:params`, `:event`, and `:ref`.
### Dynamic instrumentation
If you want to instrument a piece of code, but the endpoint that should
instrument it (the one that contains the `c:instrument/3` macro you want to use)
is not known at compile time, only at runtime, you can use the
`Phoenix.Endpoint.instrument/4` macro. Refer to its documentation for more
information.
"""
@type topic :: String.t
@type event :: String.t
@type msg :: map
# Configuration
@doc """
Starts the Endpoint supervision tree.
Starts endpoint's configuration cache and possibly the servers for
handling requests.
"""
@callback start_link() :: Supervisor.on_start
@doc """
Access the endpoint configuration given by key.
"""
@callback config(key :: atom, default :: term) :: term
@doc """
Reload the endpoint configuration on application upgrades.
"""
@callback config_change(changed :: term, removed :: term) :: term
@doc """
Initialize the endpoint configuration.
Invoked when the endpoint supervisor starts, allows dynamically
configuring the endpoint from system environment or other runtime sources.
"""
@callback init(:supervisor, config :: Keyword.t) :: {:ok, Keyword.t}
# Paths and URLs
@doc """
Generates the endpoint base URL, but as a `URI` struct.
"""
@callback struct_url() :: URI.t
@doc """
Generates the endpoint base URL without any path information.
"""
@callback url() :: String.t
@doc """
Generates the path information when routing to this endpoint.
"""
@callback path(path :: String.t) :: String.t
@doc """
Generates the static URL without any path information.
"""
@callback static_url() :: String.t
@doc """
Generates a route to a static file in `priv/static`
"""
@callback static_path(path :: String.t) :: String.t
# Channels
@doc """
Subscribes the caller to the given topic.
See `Phoenix.PubSub.subscribe/3` for options.
"""
@callback subscribe(topic, opts :: Keyword.t) :: :ok | {:error, term}
@doc """
Unsubscribes the caller from the given topic.
"""
@callback unsubscribe(topic) :: :ok | {:error, term}
@doc """
Broadcasts a `msg` as `event` in the given `topic`.
"""
@callback broadcast(topic, event, msg) :: :ok | {:error, term}
@doc """
Broadcasts a `msg` as `event` in the given `topic`.
Raises in case of failures.
"""
@callback broadcast!(topic, event, msg) :: :ok | no_return
@doc """
Broadcasts a `msg` from the given `from` as `event` in the given `topic`.
"""
@callback broadcast_from(from :: pid, topic, event, msg) :: :ok | {:error, term}
@doc """
Broadcasts a `msg` from the given `from` as `event` in the given `topic`.
Raises in case of failures.
"""
@callback broadcast_from!(from :: pid, topic, event, msg) :: :ok | no_return
# Instrumentation
@doc """
Allows instrumenting operation defined by `function`.
`runtime_metadata` may be omitted and defaults to `nil`.
Read more about instrumentation in the "Instrumentation" section.
"""
@macrocallback instrument(instrument_event :: Macro.t, runtime_metadata :: Macro.t, funcion :: Macro.t) :: Macro.t
@doc false
defmacro __using__(opts) do
quote do
@behaviour Phoenix.Endpoint
unquote(config(opts))
unquote(pubsub())
unquote(plug())
unquote(server())
end
end
defp config(opts) do
quote do
@otp_app unquote(opts)[:otp_app] || raise "endpoint expects :otp_app to be given"
var!(config) = Phoenix.Endpoint.Supervisor.config(@otp_app, __MODULE__)
var!(code_reloading?) = var!(config)[:code_reloader]
# Avoid unused variable warnings
_ = var!(code_reloading?)
@doc """
Callback invoked on endpoint initialization.
"""
def init(_key, config) do
{:ok, config}
end
defoverridable init: 2
end
end
@doc false
def __pubsub_server__!(module) do
if server = module.__pubsub_server__() do
server
else
raise ArgumentError, """
No pubsub server configured, please setup pubsub in your config.
By default this looks like:
config :my_app, MyApp.PubSub,
...,
pubsub: [name: MyApp.PubSub,
adapter: Phoenix.PubSub.PG2]
"""
end
end
defp pubsub() do
quote do
@pubsub_server var!(config)[:pubsub][:name] ||
(if var!(config)[:pubsub][:adapter] do
raise ArgumentError, "an adapter was given to :pubsub but no :name was defined, " <>
"please pass the :name option accordingly"
end)
def __pubsub_server__, do: @pubsub_server
# TODO v2: Remove pid version
@doc false
def subscribe(pid, topic) when is_pid(pid) and is_binary(topic) do
IO.warn "#{__MODULE__}.subscribe/2 is deprecated, please use subscribe/1"
Phoenix.PubSub.subscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), pid, topic, [])
end
def subscribe(pid, topic, opts) when is_pid(pid) and is_binary(topic) and is_list(opts) do
Phoenix.PubSub.subscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), pid, topic, opts)
end
def subscribe(topic) when is_binary(topic) do
Phoenix.PubSub.subscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic, [])
end
def subscribe(topic, opts) when is_binary(topic) and is_list(opts) do
Phoenix.PubSub.subscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic, opts)
end
# TODO v2: Remove pid version
@doc false
def unsubscribe(pid, topic) do
IO.warn "#{__MODULE__}.unsubscribe/2 is deprecated, please use unsubscribe/1"
Phoenix.PubSub.unsubscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic)
end
def unsubscribe(topic) do
Phoenix.PubSub.unsubscribe(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic)
end
def broadcast_from(from, topic, event, msg) do
Phoenix.Channel.Server.broadcast_from(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), from, topic, event, msg)
end
def broadcast_from!(from, topic, event, msg) do
Phoenix.Channel.Server.broadcast_from!(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), from, topic, event, msg)
end
def broadcast(topic, event, msg) do
Phoenix.Channel.Server.broadcast(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic, event, msg)
end
def broadcast!(topic, event, msg) do
Phoenix.Channel.Server.broadcast!(Phoenix.Endpoint.__pubsub_server__!(__MODULE__), topic, event, msg)
end
end
end
defp plug() do
quote location: :keep do
use Plug.Builder
import Phoenix.Endpoint
Module.register_attribute(__MODULE__, :phoenix_sockets, accumulate: true)
if force_ssl = Phoenix.Endpoint.__force_ssl__(__MODULE__, var!(config)) do
plug Plug.SSL, force_ssl
end
if var!(config)[:debug_errors] do
use Plug.Debugger, otp_app: @otp_app, style: [
primary: "#EB532D",
logo: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAJEAAABjCAYAAACbguIxAAAAAXNSR0IArs4c6QAAAAlwSFlzAAALEwAACxMBAJqcGAAAHThJREFUeAHtPWlgVOW197vbLNkTFoFQlixAwpIVQZ8ooE+tRaBWdoK4VF5tfe2r1tb2ta611r6n9b1Xd4GETRGxIuJSoKACAlkIkD0hsiRoIHtmues7J3LpOJ2Z3Jm5yUxi5s+991vOOd+5Z777fWf7CGXA79Ct46ZGmyPnshw9WaX5qTSlJBCKjqU51aoohKVUivaIRqUUmlactEK3iCp1gablTztsnZ9kbK16w2P7wcKw5AAJhKqiBWlzIyIjVrKsnKtQ7HiiqiaGZQOC5Qm/JAkiUekqSha2X7/x2JP1FOXw1G6wLDw4oPvFl94+ZVmkib9HJnQuy7MRfUW+qoqSLMtHWi60PzB9Z+2BvsI7iEc/B3wK0d8Wjk8dHRX7B5hjbqBZU6R+sMa3VBWFUiSxqLmhdc303XVHjMcwCDFQDngUosO3JF0VPzz2eSKRLJrjPLbxhVARYYXDUCKlKAJFMV00yw731d6fOlWVKadT/mjSxsIb/ek32Lb3OPANAdl/c3La8CExmziGnUYYz2thd1JwhpBk5RDDyBccTuWgKNpqWxzCsdk76iuwbdXiyd/nIqO2ufcL9lmVBZvgcP5k4pYTrwcLa7B/cBy4LESVeVlvsxS9wN+ZR1Jkioi2B5M3nPiTJ1LqVuXaCcuaPdUZUSbJjg9T1hXfZASsQRiBcYDULJ/2OM1zDxOa0zf1eMFDROmcQ5Jeam7peE+iKOfQ+IjFHM//gqF7T4A0UhD3dflHkusHd3EaS/r0SupWZO+lCHWFwislio2Kpi30cKKQZEKYGEL7L1e4ZqFkRSWs/2upYEauSpKjpblldvaOmkPBwBns6z8HLn/O3Lsenjs+N2pU7G94hr6JpjnevT4cn0GQ1HZb29JBZWXfvh2vQuRCBg2z1W5i4q9zKQvfW1mmOrrsy6duPb4pfIkcWJTp+V4p4zcUzrY72h9SJCX8R88wVGSEdWPZkskrw5/YgUGhnpno8khLbk9dHBMZu4Wimctl4XqjKCrV4ehcmbH5xAZXGsuWTLpFdSpylyC1t3RIjQfLv2h6pInqdG0zeO8fB/wSIgR9clnGw1aL5Un/0ISmtSorVJe97cYpb1R8pFFQtSzzBc5iXoPPMqyhCKOqlEycKqW2gHL0vCqRvR1S146srRX7tD6DV98c8FuIEFxlXnYxz/EZvkGHR60kSUrjVy1TZu2qKdMoqr4j8wOWMXvVeOMsJqlyB0vkfRdPtz42aGbROOf5GpAQIai61Tlgiw1Ot+SZJONLFUUU5q49GlPvokequStzM0OZl/SEDWczmLIq2mwdv8rcVvVOT+2/jfV6FtYe+SJQ9CseK8KwEFUUu1flNLqSlvxa8VKH0/msa5mnezT/EJ6fGBubsL1qdfahVxOj4z21+zaXBTwTIdNq7siVGIYN/1X2pTcsCY6alILiFNcXfmxR+qrICMsrIGica7m3e0WWRFWyP+zNzOOt30AuD3gmQqbAwnRPf2IOy5uTa1dlfuxK87Q3T64/V9o0RhLFBtdyb/c0w3KMKeqZyhVZu721+baVByVELS3tv+pvDANT3vUVt019xpXuWYVfNKbkHx0liM7tuKjW8+NNpjk1q6af/9vkcYa5uejBG45tgvqc4YCq83I6WY7rM09Ho5jY1n5xiSfzCOqRLBbrWormh+rBBYt20emw/yht88lX9bQfiG2CmomQIYqifN4fGRMZGb1p46QRY9xpT9tSvnPc2sJhotjxgiLLTvd692dcS1ms0a9U5uW85173bXkOWohssrSjPzKLAfXEjNzEclfa86cOH4aRK1iWmn/iR0nrDpslQdiqqKLo2s7TPc9xt1Tm5bafXDL1fk/1A7ks6M/Z7mmJo8ZmjDpLs0HLY0j4jAtqXA8hclzfjM+M/7ugCqUTNxxf7EIQe3LFlGdZYlrC89wQl3KPt7IoXJAVeqfU1b4lfXvlB66Ntt88OmnikJhFxEbH7zt+4el7qxouuNb3x/ughQgHXZU3vZPjmH63LtJemCRIx1IKjnRr4E8unHCTJTZ2l6jIdRPWH03S2mjX0vmp3zVbI+6jeeYqQjGxPf15upWVYFNBPytCE4jAU0WiKC2CxHz44aHa+++vaW7XYPfXqzFCtHz6Kc7MjO2vTEC6FcX5XtLaonl4j4JkjY/fJUO0UofofCBzc+lzWO7+++yWpMnDYyMXixQ7nefIBAjFjCZEtUA7FvTcDAM7PZUhqqLS4OyptqhELBEd4sa0LScK3GH152dDhKhmedZ+xmy6pj8zAmmXFfHl5LVH78X76vkTfsAOid+K9+h+2253/EKvj9IPR1LW5fEjEzY2N1x8uYGyIYxgfwe/m3JldBSXwUhsMmdhR6gmlVFE9UvJQVU7VMeJUBqMDRGiyhW563gTuypYRoVD/06b8NSUzYUPIy0YqcKazW9prr4oTJIsrE3eeOw/e5tWnOVi46z3WhjTXIUm42iKNnt1V4ZgCZjuHLIqldrt0p/1CrtRYzBEiMpXZDxiNll+ZxRRoYYjO2xPaIKCbsJxo4fsZxnGrNGFBl14bcVSl1yQ9mYJ2hAhvi74H35G+cjIOxWKzOYYZojesC13zIIk1rWdbV7SV94HhggR2p+io6LXuQ+mPz/bHfYn0zaW/AbH8MhQKnLZTbnlHM8muo+JyJIsqmoDuCaVU4rzI8Uhnjxc/OWh1fWtre5tXZ9xVzs0Ne5as4WZrlDMbI6iU2iOxfWUIT8VTHyCKP9u4qbixw0B6AOIIUKkLUR94OmXVXab49W0zcX3aMR3x+Yx/EKa9s02FCxYU4sQ8yIwtGSTZGJHGDRLWWSFtcLim4f9Gs+yva8XcQqdz00sOP4zbQy9cfXNDZ0YcdE3fHj8Ia/fbJ1wwrGZ6LTtSN1w7FaNtuOLJ/5rpDVig16ziNYvlFdvJh6jaOqfGkKjRq8DDmeyzqtbmX1Zs42utmgWcbZ2/QnSlTh0gAh5k8iImI29SYQhQoQ2SAr0aAP1h05paGg+sWhitx4JxzlxW+mDKesOW9DGJshSR6jHjv7i3mhAn6+qpZk7vdUHW27I5wxtTtdkjWkA9VrYOqih5lhQpFJVkbfbZaUyyuYUO62mRCvDzuNYMoMwvLUnZn6dvEJ6KzW/8Hb3tjUrJj8AMNaAFns85B4whK/uOLRnRQTHcVWqVwh3UHYIn6uivbZVkM7yFjbJyloywI63EN7EFML8Y82F4V7791XG9bTg13D4czVksOEuROiN2NLWNidne9Wn3phTtiLzVRPN3KknoQVkzGlz2OwPpb9R9pI7vP3ZY0YMGR/zM85ims8Q6jtGJbNAtQJYTqpE1bFpUsGJpwGvzyBAtAOOzorfBgEVV2s0uipTtTIjroYIUbcRNvuK0zQJP8d9zFrS0dl+nR6NLuqEYkYl7OY5NkoPc0X498s222OTtp1EXZHH3/GFk25gIyw3w7phGsXQYymVDCUU7MwYiqMU0s1/lIbudQUDzwqoDVFHrqgCTOunZUqusovC2+7xcx6ReSgsWzTlZ+ZIy39DbgUK0vE0jV9XOMxDs6CKDBGitWNjY6+ZlXKB4cLP3xomoYbk9V9b6fVyqvaOnHqa4cbobY8vxympG/YfPv97vVZ5nL2ThltGMhZyeUZRRIYRz9guXHui4Yxe3HradQedRidswU96/s7Po4wO1jREiHAgdXfmOAjhTHoG1Zdt0OV1Qn7R9/3FWbUyq4jjTZn+9MMYN0LJpwVZ3c112D5I+WvlW/707822WtCmvbP1vrQ3yv9iJC7DhKhq1ZVtHEtHG0mcEbCCUbZVrZy6jeMj/BZAjW70AiCM0qnI9JegYHTSKjFJolSTurl4IbQxxFSi4dJzxYRjsIcrSc0/MlNPe71tDNnidyNTlLD0i6EJ/0+mCr3MSS0ovc3W2bYGdkPdGme9/bR2+HmnaT6G5dhUCBKZAnvw0QorVUE9uIb0/U9S7WtZosYYjZk1CiCjyhAc+M+2JaPgBwqHZugZgfbFfpd2YC/V5GW9D9v3G8C+5RfPcDsuU9RRsaP9UXcvx2DoCqRvU2PnywmJVuMmjktEGPY5q1s1rYCw1hWBDK43+2Am250H6mKN8CAcS1HmD1ZOeYol3DzwaExUVdbkyY4GubedlKie6pKo7fM2Fz5W7xK+3Ztj1QkbhejyYl5nH5/NDBOiikVpa0xRMS/4xBaiStQqo+O90egP35oyK9JqGqPS7GgTeDR2KOpFkypWY8SI0bjCGZ5hQoRKtsSpVzSEoxEWbVxoogjnF9GfaTNMiJAJvb1DU2UJwtxAXQfmFU+fEV8vwuG0PzppQ8kjvtqEYx266UrRXApR2RRCkUTw9rfAuToyHMDDKERtpmS5pNPpKMp9q/KvoaLfUCGqzMvYx3OWWUYORpLEM6oqvS122D+4UN1xsq7T1pGenpAWHRN5K01Mi/UGCOACNyn/iK6kDUbS7y8sNPJyZutqnqZmKoRO0JtoApSqqDKoVFXnxpT842gW6bOfoUJkpIcjWqVFxf5rsBM95YsbR34wYX6cNfJVhuN7jAdzCo59EwuKr/MFLxR1Y2HB/uGK3BdZTlmAKoFgacBgS0mit0zIP5wXLCw9/Q0VIkRYuypXhLM8/NoGeyLU2dVxlz9HLmC2D0zW4AmWa1lHe2fYZJZFc9Gs2eMLCKFvAm2/XzzDODb4qAk0kbp1TiohrAofejjiC/LPX9rFC6Iqs9QrEMFyH/Cg13RThgtR9cqsz1jedJXri/P3Xpac9cnri8b52w8t8RaT+S5f/XBddfb4V4mYCcRXu96uQ1rNPLPKH+FR0K6iSkWdorwZ/mR7Zrx7qtSFThoScMWOHh8XMzLBmsxwplQ+klkNm/mhXTbHbzGFjktbQ28NFyI8oWjoFcM+C4ZKm93+6/RNJb8PBEb58mmPms3W3/rqK4pyV2r+4ZAcvYWpkU1m8/+AgVf3Z0sGn20wnr696+CpuwPRd2F2t7vPtjf74kkwdYYLERKDeXvAmW54oIS12ZvnZGyq3Btof83Y6Ks/+Oc0J609muCrjZF16N8zNjPufYY3ZfkDV1aFwvrDzbdcf+LUl/7068u2fn2H9RLW0tV275CY+ICTZEp2VdSLy1O71E3F/1a1Ytoo9I/2VI9lsOuJr12dc3H/3pqk3vD2c8VbtjTzFRPP3uHPWhHdSzpsjgf9+Qx1H6URa8kgVjqNU7mhAk1FgXdSE22XWxy8cszW6jh51a6aYlfajLjvlZkICTuVl9NAcdyIQIhsbb240IhMrTV5OccZjpvsiwZURDrs7fNdc137ao8OeFFjLEnT363e76sdfkKuuibpaTPPrvDHu1EW5Xan0/mX9DeO/coXfK2uaOnUpVaWuZejSTZk843sSdkrgj88ZJeoUJ32Fye+WfaiBieYa68J0Wc3jM0Y+Z0RAUm9e7xXMAOsyZvexnCMTxeV7qNBKflyHL4vfHiw4BVD416jCRmnggZQkZWzhBJr4R/vlAlrg8wfQ3mangauiqP1enriwTaCSmpkwfG/6VtKn/eFX6srvy39Hi4y4vFglg2YxEsUxCcgwPEJDW4g114TIiSmdnXWDpo2fc9fwsCH+XzS2sKAZjF3XC+ljhxy/b+M/FLPC0UvyPY2W17WO2U9JfVkIe/jU6yVW6TSdKK/QYiqgnGNik0SmQrZ4dxbfKLp/5aXN37hTrunZ5wJvzNtxB50L/FU76kM13+gbH2v1WF/W7VLTSxnspis/JUmhr5NUdh40tn2YDAOdL0qRDggzB6m12dZYwDODAcPnR6rl7FaP29X1AJHRMW9663etRxxy7JwuLGpY7VrFn7XNu73JcsmzDbRlmsZmeSqHD2SAidprQ3ogOw0JbfQRL5oF0m5U1VONR/v2BPIQrlsefoveM76e3/SPjud9rUTN5TcqdHj6YqCOffY2XOe6vSUXR6snsaBtMETrcdHJ1T4G0YD/9BPkjcWGWZCqcrLeA6yK/673jHIqKijSKHN1vakEeszvXi9tatcPmUTb45c6q3evRz/DA5H5z19kZC014UIB1e2NP1uTI7pPlCfz3Bu2UcHzg7V6/juE9alyupVmQfgONqZetq6tsHPgSyre5wdtpenbC//2LXOqHuczd75uPKIJyf6QOh2tLb/0FcUyt55YycOi7TOZNSvEwtA7s1aPRExnsbbJ0KEiDF3tCk24gFPRHgrc4py9cT8w7q//d7guJYHs2tEOKiohN1NOVGEUggCeOfcefuJG/d/ccoVh5573L3NzB0x3RJtXi6ppoWQ+OGLgp1FV7oLUc3KrEJ/dUvePBZQBRA7LOYRxkxfDUe0Rmt5l7rpxRxHRHGCD1+F0yH80Z8cR30mREho1fLM5zmz+Sd6mKy1sXd0/kfam8ef1Z6NuNbdkd2lJ+JVDy70nKSI0gX/505RZZqJIrdCfqEmVRWcsIPr1sMRlhcVSTXD+mg47OiGQXhZDFTEqpeOtMBt95Ej5ya4rwErV+Ye4Xk2Rw8dWhvB0bl5wsbjy7RnvKIVIT5h6HaGI7pjzmCTcRxCrVAx2qPNrU+FCAd0cknG73gL/wir8+A9zLNTfaopKZB/O+Lz9EMHulGTh532R/nnCY4RZbLorE3OL0p2hxWIW43qFP6Op2S6w8IASlOk5WmQdhqickeBX1KCnkhfUHjaGptar7x6Z+0Jd5iuz30uRIgc09hRJvMmjtMXp4YnTc9ZfySu3kBf5cJ5yTPihsR+FsrjtgSnc8+EDUVzXV8I3mNQABhQb3Yv9/UsCNLRCQVHcn210epwszM6KvYPNGHm96SewLCnpgutV898v/pzrb/7NSRChERgcsxfzs0uxIwb7kR5eobptXXD+0dHu68ZPLXVW4bTfNyQ+E96YqReeHrboSeB3SE+lr6l5FH3PoEEPHibgdxhuz/vuCExZdLIkZ/0pLBEA/AXxY1jvKkBQiZE2oDQ6s6x3C8hLovXyrxdMf6rtaVlTvaOmkPe2vhbjovN+MT4T/Xg9xe2p/b4+Spv/OrmeR+frXavDySBqt3peC1tQ/Hd7rD8edZjHkLtdlNz03Q395NuNCEXokuDZcvzsraxhPleT7OCih41qvP51PySn/rDKF9tUdkGQQYlerLl+4Ljq04QpQ74LP/Rm4mhekXGetZk0e2JCCcBdHXZ2+/ydMiNLzq81ek5khXTCNrsnfe7h2GHRIhqV2RtQAvzpPyi+a6DwgNbcrOHga+N+UZIreNzZsKMHJJof9jIxOIVKzP/buLN17rSFOw9mNQ6HYK4Ln3Dca+7UvgD/dXMmS6n9POJE5SgDqLscOedax+c0RhemSyLlB08IKsdsrTHwvHfx5wExbdm326NoZZPKChc4NoH74GOg0BHj8GeuHMTnI5nzjR0fFp/XuwIiRBholBzbNwuyBvU0FDUMMNTFoyy5RlP8DSzElKRj2YgXb37gC8/y87zTkFef7a0/dlATAmX4Vy6wQwaUdaYP8POLWB/qG4HREWt7pKEF71l49fwYio/PetCXJfIinKoqvHL1Z4+hRo8vKJ2Hs4huZ+wNLG3dz3DmLlUnufnj3vtIKlZlXMOPt0j8d61j3ZftXzaa6CQXY19tTJvV/DlVhw26bEeG3oDEGw5OtijzxEkXgJ7q7gudeMxj26t3ZrVmKj7TLTpOkJIErg6WLy5O6AbBbgAnmJU54Zgj9fEvD6syXQv6HrA1dR3yhxcKKu0bANdUBmRlY++OHHxRW+LUI1v5Usn/5znLY+DsFq0MvcrWvchQqoRkhZt37u75rf+eCeiioBWuWw4sySyenXOFpbmFquCUAG+2BPgEHfq+oKj1novu11MxD4kPvYFjqZzwPHqG0nYUS8G1mMbZD+pFBTnG3/7vPHFkAkRMszVlRU1wZCt/jktd7Q7Q7Vn3JrTkdYZVsaUQdFyNOg8INQd5is4RoMGDZ9EMZLd2bbLqLUC5rBePCt9KYmOyIY1wTCwwIugFuBoRemQiFThlKgzpSebPsor/fIrjUYvVxr0NXMjovk8WeUWuh80iMm4OPj2SApzUaSEOiKp75e3XNi0cNeZWi/wfBZXrcypAKVmEoZJVa7M/oTlyFXdngzwOVRoqu1Ue/OV12+vw+QSPn/IbytvmiIR1gwa7YtfSV1H3fuFVIiQend3EVUWbaJEth74tPqnRnscfjhrzLjEkXF5LA/+PpSSAAkavoLPRNn59rbNs3fUV/jkZpCVOKOOiI170cTAQTLwg7nrNBw5dBoOFGnsghONlE7bodt21JTUe5kd/EWP6xueIZPApSYWTSegKQfNs/Q2CKmFZbkft7W1LfCVftAffCEXIiQW/imwM+Lhxf7jh2sAilZKhC7b6+67gX+06vkO/YnmZI/4JTHTi2mFHuXtW48KTYck/ldPM2HPGL22wI0CBhj2yQ/HnWyhTfhZ3Td55Ojq1s4u7XOIBwO+fvRUjVGH14SFECFXcfrleK77X+rOZZjjBULEGkhk+LkiObcVH2s94W5n0vog865Kj8lkIsyLzTR7DXgaJvnKagvCI6m0coHIdLtDFrf2ohBpJA64a9gIEXJW704FF3eEhu0roRzgCGbHvuA4bGJpxQzJNa16vBhReOwO4U96fZkRx+DPMwfCSoiQRNiClsIWdIpncg0qlWW5tu1CmvsC0SDo3zowl+Jtw2fc4H4wFQ2TvUmRCruTQQEyjsNhJ0Q4NLRsi6L9zzpcWQLiBCT9jUdvy4A6D3b6Jw6E3efMlcLi21IXREbFbnY9sM61Pph79EEWRNubX5W3/zTUcfnBjCMc+oa1EF1iEF+Tl1sEWuP03mAYqu7BqHsKZqdDHc7OHbZOpWrZrpryeoP0Nb1Bc7jB7A9C1M0z9Ig0W9iHIfzZp2E2WAbjDKVSYECRaYEBtbGsgm8Bo0CkDy3CQXcXVFUpkxSpvKK5OT9QbXKwNIZb/34jRJcYx4JNaDdP87NA9xNSXqJdC+wsLaD5PnDxq7anpu+sPRBSgkKIvL8JUTer0CMRDISvEZaZCKkLQ8i+r1Hj7KXIYm2LrevnocydGCpG9Esh0piFsVoRTMQTkAcUzivT0oNptaG5gvXkYMr64qCSfIWG8sCx9msh0oaNJ/bMmHLFU7BcgjPGSEJvzU5oaWcUOEtKwUOBARPtWUOCRuTGppYeoyQ0+vv7dUAIketLQNeFyLj4H0Es2NUwNyX6sxDH0GnI5iECU2yQ//AcIVKjSHO1YofzJMU4K+0XhJb2aKoN8VkddERUNDuUoUgyy/LZkBA9FRIjTwJfnTjNxbe1SViU+W7hVlf6BuL9gBMi95eEXpR8FD+NIfRkQaFHw0vvTkNM06pNoZmLquxophWqrl2mz3W22o7pTeLgjkd7xoxoIybHrDHxzI8hiDGq9VzzNdN31x3R6gfidcALkZEv7cDNyZmxUZbrBNXZ8Pmxzt095QlAAcazWXsK/jOSxlDAGhQiP7iOkaSWePOdRGZmghfBKAJZrWSacmBKOzgbsxFcaY/YHLZ39WZd8wN1WDcdFKIAX0/Zooz7OAv7EHgJjnYHAX5P7USRPty3t3qN5gjm3mYgPQ8KUZBvs2hB2tzouIh1kIE80R0UhiBDvNnatM3F97jXDaTnQSEy6G1WrMh43WSyrPYEDqMsxhcUTvJUNxDKBoXIwLdYsnTyimizeb2nJBGSIJxKKSgcbyC6sAE1KEQGvwp0gh86JOEouOh2qxJcwQuiUDIhvzDTtWwg3HtWuQ6EkYVoDJjw4PyZC9PRQOtOAs/xGRXLpv3Bvby/Pw8KUS+8was/ri+52NW+UJHAPuL2482mhzAixa24Xz8OClEvvT605jd3tS6ApKHfOGKCEIaaM3NkUS+hDQnYQSHqRbajIH1WeCZRFaVvhCujbqlmdc5LvYi6T0EPLqz7iN14Wjdtivg1C0eha9Z/OB/x0P49lbf0d4XkoBD1kRBpaNChLiYhYY2JUufIrDpCEkkR5FrE3No9ZmnVYITb9f8BhSZnYemqCy4AAAAASUVORK5CYII="
]
end
# Compile after the debugger so we properly wrap it.
@before_compile Phoenix.Endpoint
@phoenix_render_errors var!(config)[:render_errors]
end
end
defp server() do
quote location: :keep, unquote: false do
@doc false
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
defoverridable child_spec: 1
@doc """
Starts the endpoint supervision tree.
"""
def start_link(_opts \\ []) do
Phoenix.Endpoint.Supervisor.start_link(@otp_app, __MODULE__)
end
@doc """
Returns the endpoint configuration for `key`
Returns `default` if the key does not exist.
"""
def config(key, default \\ nil) do
case :ets.lookup(__MODULE__, key) do
[{^key, val}] -> val
[] -> default
end
end
@doc """
Reloads the configuration given the application environment changes.
"""
def config_change(changed, removed) do
Phoenix.Endpoint.Supervisor.config_change(__MODULE__, changed, removed)
end
@doc """
Generates the endpoint base URL without any path information.
It uses the configuration under `:url` to generate such.
"""
def url do
Phoenix.Config.cache(__MODULE__,
:__phoenix_url__,
&Phoenix.Endpoint.Supervisor.url/1)
end
@doc """
Generates the static URL without any path information.
It uses the configuration under `:static_url` to generate
such. It falls back to `:url` if `:static_url` is not set.
"""
def static_url do
Phoenix.Config.cache(__MODULE__,
:__phoenix_static_url__,
&Phoenix.Endpoint.Supervisor.static_url/1)
end
@doc """
Generates the endpoint base URL but as a `URI` struct.
It uses the configuration under `:url` to generate such.
Useful for manipulating the URL data and passing it to
URL helpers.
"""
def struct_url do
Phoenix.Config.cache(__MODULE__,
:__phoenix_struct_url__,
&Phoenix.Endpoint.Supervisor.struct_url/1)
end
@doc """
Returns the host for the given endpoint.
"""
def host do
Phoenix.Config.cache(__MODULE__,
:__phoenix_host__,
&Phoenix.Endpoint.Supervisor.host/1)
end
@doc """
Generates the path information when routing to this endpoint.
"""
def path(path) do
Phoenix.Config.cache(__MODULE__,
:__phoenix_path__,
&Phoenix.Endpoint.Supervisor.path/1) <> path
end
@doc """
Generates the script name.
"""
def script_name do
Phoenix.Config.cache(__MODULE__,
:__phoenix_script_name__,
&Phoenix.Endpoint.Supervisor.script_name/1)
end
@doc """
Generates a route to a static file in `priv/static`.
"""
def static_path(path) do
Phoenix.Config.cache(__MODULE__, :__phoenix_static__,
&Phoenix.Endpoint.Supervisor.static_path/1) <>
Phoenix.Config.cache(__MODULE__, {:__phoenix_static__, path},
&Phoenix.Endpoint.Supervisor.static_path(&1, path))
end
end
end
@doc false
def __force_ssl__(module, config) do
if force_ssl = config[:force_ssl] do
host = force_ssl[:host] || config[:url][:host] || "localhost"
if host == "localhost" do
IO.puts :stderr, """
warning: you have enabled :force_ssl but your host is currently set to localhost.
Please configure your endpoint url host properly:
config #{inspect module}, url: [host: "YOURHOST.com"]
"""
end
Keyword.put_new(force_ssl, :host, {module, :host, []})
end
end
@doc false
defmacro __before_compile__(env) do
sockets = Module.get_attribute(env.module, :phoenix_sockets)
otp_app = Module.get_attribute(env.module, :otp_app)
instrumentation = Phoenix.Endpoint.Instrument.definstrument(otp_app, env.module)
quote do
defoverridable [call: 2]
# Inline render errors so we set the endpoint before calling it.
def call(conn, opts) do
conn = put_in conn.secret_key_base, config(:secret_key_base)
conn = put_in conn.script_name, script_name()
conn = Plug.Conn.put_private(conn, :phoenix_endpoint, __MODULE__)
try do
super(conn, opts)
catch
kind, reason ->
Phoenix.Endpoint.RenderErrors.__catch__(conn, kind, reason, @phoenix_render_errors)
end
end
@doc """
Returns all sockets configured in this endpoint.
"""
def __sockets__, do: unquote(sockets)
unquote(instrumentation)
end
end
## API
@doc """
Defines a mount-point for a Socket module to handle channel definitions.
## Examples
socket "/ws", MyApp.UserSocket
socket "/ws/admin", MyApp.AdminUserSocket
By default, the given path is a websocket upgrade endpoint,
with long-polling fallback. The transports can be configured
within the Socket handler. See `Phoenix.Socket` for more information
on defining socket handlers.
"""
defmacro socket(path, module) do
# Tear the alias to simply store the root in the AST.
# This will make Elixir unable to track the dependency
# between endpoint <-> socket and avoid recompiling the
# endpoint (alongside the whole project ) whenever the
# socket changes.
module = tear_alias(module)
quote do
@phoenix_sockets {unquote(path), unquote(module)}
end
end
@doc """
Instruments the given function using the instrumentation provided by
the given endpoint.
To specify the endpoint that will provide instrumentation, the first argument
can be:
* a module name - the endpoint itself
* a `Plug.Conn` struct - this macro will look for the endpoint module in the
`:private` field of the connection; if it's not there, `fun` will be
executed with no instrumentation
* a `Phoenix.Socket` struct - this macro will look for the endpoint module in the
`:endpoint` field of the socket; if it's not there, `fun` will be
executed with no instrumentation
Usually, users should prefer to instrument events using the `c:instrument/3`
macro defined in every Phoenix endpoint. This macro should only be used for
cases when the endpoint is dynamic and not known at compile time.
## Examples
endpoint = MyApp.Endpoint
Phoenix.Endpoint.instrument endpoint, :render_view, fn -> ... end
"""
defmacro instrument(endpoint_or_conn_or_socket, event, runtime \\ Macro.escape(%{}), fun) do
compile = Phoenix.Endpoint.Instrument.strip_caller(__CALLER__) |> Macro.escape()
quote do
case Phoenix.Endpoint.Instrument.extract_endpoint(unquote(endpoint_or_conn_or_socket)) do
nil -> unquote(fun).()
endpoint -> endpoint.instrument(unquote(event), unquote(compile), unquote(runtime), unquote(fun))
end
end
end
@doc """
Checks if Endpoint's web server has been configured to start.
* `otp_app` - The otp app running the endpoint, for example `:my_app`
* `endpoint` - The endpoint module, for example `MyApp.Endpoint`
## Examples
iex> Phoenix.Endpoint.server?(:my_app, MyApp.Endpoint)
true
"""
def server?(otp_app, endpoint) when is_atom(otp_app) and is_atom(endpoint) do
Phoenix.Endpoint.Supervisor.server?(otp_app, endpoint)
end
defp tear_alias({:__aliases__, meta, [h|t]}) do
alias = {:__aliases__, meta, [h]}
quote do
Module.concat([unquote(alias)|unquote(t)])
end
end
defp tear_alias(other), do: other
end
| 48.060533
| 10,136
| 0.752884
|
9e6af080c6e8640a9ac0b7c4e8e051480df77e86
| 496
|
exs
|
Elixir
|
apps/core/priv/prm_repo/migrations/20180611121332_create_contract_divisions.exs
|
ehealth-ua/ehealth.api
|
4ffe26a464fe40c95fb841a4aa2e147068f65ca2
|
[
"Apache-2.0"
] | 8
|
2019-06-14T11:34:49.000Z
|
2021-08-05T19:14:24.000Z
|
apps/core/priv/prm_repo/migrations/20180611121332_create_contract_divisions.exs
|
edenlabllc/ehealth.api.public
|
4ffe26a464fe40c95fb841a4aa2e147068f65ca2
|
[
"Apache-2.0"
] | 1
|
2019-07-08T15:20:22.000Z
|
2019-07-08T15:20:22.000Z
|
apps/core/priv/prm_repo/migrations/20180611121332_create_contract_divisions.exs
|
ehealth-ua/ehealth.api
|
4ffe26a464fe40c95fb841a4aa2e147068f65ca2
|
[
"Apache-2.0"
] | 6
|
2018-05-11T13:59:32.000Z
|
2022-01-19T20:15:22.000Z
|
defmodule Core.PRMRepo.Migrations.CreateContractDivisions do
@moduledoc false
use Ecto.Migration
def change do
create table(:contract_divisions, primary_key: false) do
add(:id, :uuid, primary_key: true)
add(:division_id, :uuid, null: false)
add(:contract_id, references(:contracts, type: :uuid, on_delete: :nothing))
add(:inserted_by, :uuid, null: false)
add(:updated_by, :uuid, null: false)
timestamps(type: :utc_datetime_usec)
end
end
end
| 27.555556
| 81
| 0.693548
|
9e6af111c8050d37e1ad5f5256efb1c473ab6b89
| 14,164
|
ex
|
Elixir
|
lib/ash_json_api/controllers/helpers.ex
|
ash-project/ash_json_api
|
93cb568a3f1ee91f8cfc0f493088d15b42a3f6ea
|
[
"MIT"
] | 11
|
2020-09-21T22:03:42.000Z
|
2022-02-02T23:48:11.000Z
|
lib/ash_json_api/controllers/helpers.ex
|
ash-project/ash_json_api
|
93cb568a3f1ee91f8cfc0f493088d15b42a3f6ea
|
[
"MIT"
] | 44
|
2020-05-02T04:37:42.000Z
|
2021-06-25T14:38:44.000Z
|
lib/ash_json_api/controllers/helpers.ex
|
ash-project/ash_json_api
|
93cb568a3f1ee91f8cfc0f493088d15b42a3f6ea
|
[
"MIT"
] | 9
|
2020-08-25T20:23:34.000Z
|
2022-02-14T04:40:10.000Z
|
defmodule AshJsonApi.Controllers.Helpers do
@moduledoc false
# @moduledoc """
# When we open up ash json api tooling to allow people to build custom
# behavior around it, we can use this documentation
# Tools for control flow around a request, and common controller utilities.
# While we haven't focused on supporting it yet, this will eventually be a set of tools
# that can be used to build custom controller actions, without having to write everything
# yourself.
# `chain/2` lets us pipe cleanly, only doing stateful things if no errors
# have been generated yet.
# """
alias AshJsonApi.Controllers.Response
alias AshJsonApi.{Error, Request}
alias AshJsonApi.Includes.Includer
require Ash.Query
def render_or_render_errors(request, conn, function) do
chain(request, function,
fallback: fn request ->
Response.render_errors(conn, request)
end
)
end
def fetch_includes(request) do
chain(request, fn request ->
{new_result, includes} = Includer.get_includes(request.assigns.result, request)
request
|> Request.assign(:result, new_result)
|> Request.assign(:includes, includes)
end)
end
def fetch_records(request) do
chain(request, fn request ->
params =
if AshJsonApi.authorize?(request.api) do
[actor: request.actor]
else
[]
end
page_params = Map.get(request.assigns, :page)
params =
if page_params do
Keyword.put(params, :page, page_params)
else
params
end
request.resource
|> Ash.Query.new(request.api)
|> Ash.Query.load(request.includes_keyword)
|> Ash.Query.filter(^request.filter)
|> Ash.Query.sort(request.sort)
|> Ash.Query.load(fields(request, request.resource))
|> Ash.Query.set_tenant(request.tenant)
|> Ash.Query.set_arguments(request.arguments)
|> request.api.read(params)
|> case do
{:ok, result} ->
Request.assign(request, :result, result)
{:error, error} ->
Request.add_error(request, error, :read)
end
end)
end
def create_record(request) do
chain(request, fn %{api: api, resource: resource} ->
params =
if AshJsonApi.authorize?(request.api) do
[
action: request.action,
actor: request.actor
]
else
[
action: request.action
]
end
resource
|> Ash.Changeset.new(request.attributes || %{})
|> Ash.Changeset.set_tenant(request.tenant)
|> Ash.Changeset.set_arguments(request.arguments)
|> api.create(params)
|> api.load(fields(request, request.resource) ++ (request.includes_keyword || []))
|> case do
{:ok, record} ->
Request.assign(request, :result, record)
{:error, error} ->
Request.add_error(request, error, :create)
end
end)
end
def update_record(request) do
chain(request, fn %{api: api, assigns: %{result: result}} ->
params =
if AshJsonApi.authorize?(request.api) do
[
action: request.action,
actor: request.actor
]
else
[
action: request.action
]
end
result
|> Ash.Changeset.new(request.attributes || %{})
|> Ash.Changeset.set_tenant(request.tenant)
|> Ash.Changeset.set_arguments(request.arguments)
|> api.update(params)
|> api.load(fields(request, request.resource) ++ (request.includes_keyword || []))
|> case do
{:ok, record} ->
Request.assign(request, :result, record)
{:error, error} ->
Request.add_error(request, error, :update)
end
end)
end
def add_to_relationship(request, relationship_name) do
chain(request, fn %{api: api, assigns: %{result: result}} ->
params =
if AshJsonApi.authorize?(request.api) do
[actor: request.actor]
else
[]
end
result
|> Ash.Changeset.new()
|> Ash.Changeset.append_to_relationship(relationship_name, request.resource_identifiers)
|> Ash.Changeset.set_tenant(request.tenant)
|> Ash.Changeset.set_arguments(request.arguments)
|> api.update(params)
|> api.load(fields(request, request.resource))
|> case do
{:ok, updated} ->
request
|> Request.assign(:record_from_path, updated)
|> Request.assign(:result, Map.get(updated, relationship_name))
{:error, error} ->
Request.add_error(request, error, :add_to_relationship)
end
end)
end
def replace_relationship(request, relationship_name) do
chain(request, fn %{api: api, assigns: %{result: result}} ->
params =
if AshJsonApi.authorize?(request.api) do
[actor: request.actor]
else
[]
end
result
|> Ash.Changeset.new()
|> Ash.Changeset.replace_relationship(relationship_name, request.resource_identifiers)
|> Ash.Changeset.set_tenant(request.tenant)
|> Ash.Changeset.set_arguments(request.arguments)
|> api.update(params)
|> api.load(fields(request, request.resource))
|> case do
{:ok, updated} ->
request
|> Request.assign(:record_from_path, updated)
|> Request.assign(:result, Map.get(updated, relationship_name))
{:error, error} ->
Request.add_error(request, error, :replace_relationship)
end
end)
end
def delete_from_relationship(request, relationship_name) do
chain(request, fn %{api: api, assigns: %{result: result}} ->
params = [
relationships: %{
relationship_name => %{
remove: request.resource_identifiers
}
}
]
params =
if AshJsonApi.authorize?(request.api) do
Keyword.put(params, :actor, request.actor)
else
params
end
result
|> Ash.Changeset.new()
|> Ash.Changeset.remove_from_relationship(relationship_name, request.resource_identifiers)
|> Ash.Changeset.set_tenant(request.tenant)
|> Ash.Changeset.set_arguments(request.arguments)
|> api.update(params)
|> api.load(fields(request, request.resource))
|> case do
{:ok, updated} ->
request
|> Request.assign(:record_from_path, updated)
|> Request.assign(:result, Map.get(updated, relationship_name))
{:error, error} ->
Request.add_error(request, error, :delete_from_relationship)
end
end)
end
def destroy_record(request) do
chain(request, fn %{api: api, assigns: %{result: result}} ->
params =
if AshJsonApi.authorize?(request.api) do
[
action: request.action,
actor: request.actor
]
else
[action: request.action]
end
result
|> Ash.Changeset.new()
|> Ash.Changeset.set_tenant(request.tenant)
|> api.destroy(params)
|> case do
:ok ->
Request.assign(request, :result, nil)
{:error, error} ->
Request.add_error(request, error, :destroy)
end
end)
end
defp path_filter(path_params, resource) do
Enum.reduce(path_params, %{}, fn {key, value}, acc ->
case Ash.Resource.Info.public_attribute(resource, key) do
nil ->
acc
attribute ->
Map.put(acc, attribute.name, value)
end
end)
end
def fetch_record_from_path(request, through_resource \\ nil) do
chain(request, fn %{api: api, resource: request_resource} = request ->
resource = through_resource || request_resource
filter = path_filter(request.path_params, resource)
query =
resource
|> Ash.Query.filter(^filter)
|> Ash.Query.set_tenant(request.tenant)
|> Ash.Query.set_arguments(request.arguments)
params =
if through_resource || request.action.type != :read do
[page: false]
else
[
action: request.action,
page: false
]
end
params =
if AshJsonApi.authorize?(api) do
Keyword.put(params, :actor, request.actor)
else
params
end
fields_to_load =
if through_resource do
[]
else
fields(request, request.resource)
end
with {:ok, [record]} when not is_nil(record) <- api.read(query, params),
{:ok, record} <- api.load(record, fields_to_load ++ (request.includes_keyword || [])) do
request
|> Request.assign(:result, record)
|> Request.assign(:record_from_path, record)
else
{:ok, _} ->
error = Error.NotFound.new(filter: filter, resource: resource)
Request.add_error(request, error, :fetch_from_path)
{:error, error} ->
Request.add_error(request, error, :fetch_from_path)
end
end)
end
def fetch_related(request) do
request
|> chain(fn %{
api: api,
assigns: %{result: %source_resource{} = record},
relationship: relationship
} = request ->
relationship = Ash.Resource.Info.public_relationship(source_resource, relationship)
sort = request.sort || default_sort(request.resource)
load_params =
if Map.get(request.assigns, :page) do
[page: request.assigns.page]
else
[]
end
destination_query =
relationship.destination
|> Ash.Query.new(request.api)
|> Ash.Query.filter(^request.filter)
|> Ash.Query.sort(sort)
|> Ash.Query.load(request.includes_keyword)
|> Ash.Query.load(fields(request, request.resource))
|> Ash.Query.put_context(:override_api_params, load_params)
origin_query =
source_resource
|> Ash.Query.new(request.api)
|> Ash.Query.load([{relationship.name, destination_query}])
|> Ash.Query.set_tenant(request.tenant)
params =
if AshJsonApi.authorize?(api) do
[actor: request.actor]
else
[]
end
case api.load(
record,
origin_query,
params
) do
{:ok, record} ->
paginated_result =
record
|> Map.get(relationship.name)
|> paginator_or_list()
request
|> Request.assign(:record_from_path, record)
|> Request.assign(:result, paginated_result)
{:error, error} ->
Request.add_error(request, error, :fetch_related)
end
end)
end
defp paginator_or_list(result) do
case result do
%{results: _} = paginator ->
paginator
other ->
List.wrap(other)
end
end
defp fields(request, resource) do
Map.get(request.fields, resource) || []
end
defp default_sort(resource) do
created_at =
Ash.Resource.Info.public_attribute(resource, :created_at) ||
Ash.Resource.Info.public_attribute(resource, :inserted_at)
if created_at do
[{created_at.name, :asc}]
else
Ash.Resource.Info.primary_key(resource)
end
end
def fetch_id_path_param(request) do
chain(request, fn request ->
case request.path_params do
%{"id" => id} ->
Request.assign(request, :id, id)
_ ->
Request.add_error(
request,
"id path parameter not present in get route: #{request.url}",
:id_path_param
)
end
end)
end
# This doesn't need to use chain, because its stateless and safe to
# do anytime. Returning multiple errors is a nice feature of JSON API
def fetch_pagination_parameters(request) do
request
|> add_pagination_parameter(:limit, :integer)
|> add_pagination_parameter(:offset, :integer)
|> add_pagination_parameter(:after, :string)
|> add_pagination_parameter(:before, :string)
|> add_pagination_parameter(:count, :boolean)
end
defp add_pagination_parameter(request, parameter, type) do
with %{"page" => page} <- request.query_params,
{:ok, value} <- Map.fetch(page, to_string(parameter)) do
case cast_pagination_parameter(value, type) do
{:ok, value} ->
Request.update_assign(
request,
:page,
[{parameter, value}],
&Keyword.put(&1, parameter, value)
)
:error ->
Request.add_error(
request,
Error.InvalidPagination.new(source_parameter: "page[#{parameter}]"),
:read
)
end
else
_ ->
request
end
end
defp cast_pagination_parameter(value, :integer) do
case Integer.parse(value) do
{integer, ""} ->
{:ok, integer}
_ ->
:error
end
end
defp cast_pagination_parameter("true", :boolean), do: {:ok, true}
defp cast_pagination_parameter("false", :boolean), do: {:ok, false}
defp cast_pagination_parameter(_, :boolean), do: :error
defp cast_pagination_parameter(value, :string) when is_binary(value) do
{:ok, value}
end
defp cast_pagination_parameter(_, _), do: :error
def chain(request, func, opts \\ []) do
case request.errors do
[] ->
func.(request)
_ ->
case Keyword.fetch(opts, :fallback) do
{:ok, fallback} ->
fallback.(request)
_ ->
request
end
end
end
# @spec with_request(
# Plug.Conn.t(),
# Ash.Resource.t(),
# Ash.action(),
# (AshJsonApi.Request.t() -> Plug.Conn.t())
# ) :: Plug.Conn.t()
# def with_request(conn, resource, action, function) do
# case AshJsonApi.Request.from(conn, resource, action) do
# %{errors: []} = request ->
# function.(request)
# %{errors: errors} = request ->
# Response.render_errors(conn, request, errors)
# end
# end
end
| 27.827112
| 99
| 0.588323
|
9e6afa26a155f8fee0908d20c9a80c1d2947087c
| 2,148
|
ex
|
Elixir
|
lib/challenge_gov/saved_challenges.ex
|
jennstein2017/Challenge_gov
|
e0820df8b124a32ff8b78cb827ae43551492988b
|
[
"CC0-1.0"
] | 9
|
2020-02-26T20:24:38.000Z
|
2022-03-22T21:14:52.000Z
|
lib/challenge_gov/saved_challenges.ex
|
jennstein2017/Challenge_gov
|
e0820df8b124a32ff8b78cb827ae43551492988b
|
[
"CC0-1.0"
] | 15
|
2020-04-22T19:33:24.000Z
|
2022-03-26T15:11:17.000Z
|
lib/challenge_gov/saved_challenges.ex
|
jennstein2017/Challenge_gov
|
e0820df8b124a32ff8b78cb827ae43551492988b
|
[
"CC0-1.0"
] | 4
|
2020-04-27T22:58:57.000Z
|
2022-01-14T13:42:09.000Z
|
defmodule ChallengeGov.SavedChallenges do
@moduledoc """
Context for saved challenges
"""
@behaviour Stein.Filter
import Ecto.Query
alias ChallengeGov.Challenges
alias ChallengeGov.GovDelivery
alias ChallengeGov.Repo
alias ChallengeGov.SavedChallenges.SavedChallenge
alias Stein.Filter
def all(user, opts \\ []) do
SavedChallenge
|> base_preload
|> where([sc], sc.user_id == ^user.id)
|> Filter.filter(opts[:filter], __MODULE__)
|> Repo.paginate(opts[:page], opts[:per])
end
def get(id) do
SavedChallenge
|> base_preload
|> where([sc], sc.id == ^id)
|> Repo.one()
|> case do
nil ->
{:error, :not_found}
saved_challenge ->
{:ok, saved_challenge}
end
end
def get_saved_challenge(id) do
SavedChallenge
|> where([sc], sc.id == ^id)
|> Repo.one()
|> case do
nil ->
{:error, :not_found}
saved_challenge ->
{:ok, saved_challenge}
end
end
def create(user, challenge) do
if is_nil(challenge.deleted_at) and Challenges.is_public?(challenge) do
result =
%SavedChallenge{}
|> SavedChallenge.changeset(user, challenge)
|> Repo.insert()
GovDelivery.subscribe_user_general(user)
GovDelivery.subscribe_user_challenge(user, challenge)
result
else
{:error, :not_saved}
end
end
def delete(user, saved_challenge) do
if user.id === saved_challenge.user_id do
Repo.delete(saved_challenge)
else
{:error, :not_allowed}
end
end
def check_manager(user, saved_challenge) do
if user.id === saved_challenge.user_id do
{:ok, saved_challenge}
else
{:error, :wrong_manager}
end
end
defp base_preload(saved_challenge) do
preload(saved_challenge, [:user, challenge: [:agency]])
end
def count_for_challenge(challenge) do
SavedChallenge
|> select([sc], count(sc))
|> where([sc], sc.challenge_id == ^challenge.id)
|> Repo.one()
end
@impl Stein.Filter
def filter_on_attribute({"user_id", value}, query) do
where(query, [sc], sc.user_id == ^value)
end
end
| 21.918367
| 75
| 0.637803
|
9e6b0371063781488f44e6f1c15338891d9c6d20
| 41,297
|
ex
|
Elixir
|
clients/big_query_data_transfer/lib/google_api/big_query_data_transfer/v1/api/projects.ex
|
linjunpop/elixir-google-api
|
444cb2b2fb02726894535461a474beddd8b86db4
|
[
"Apache-2.0"
] | null | null | null |
clients/big_query_data_transfer/lib/google_api/big_query_data_transfer/v1/api/projects.ex
|
linjunpop/elixir-google-api
|
444cb2b2fb02726894535461a474beddd8b86db4
|
[
"Apache-2.0"
] | null | null | null |
clients/big_query_data_transfer/lib/google_api/big_query_data_transfer/v1/api/projects.ex
|
linjunpop/elixir-google-api
|
444cb2b2fb02726894535461a474beddd8b86db4
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.BigQueryDataTransfer.V1.Api.Projects do
@moduledoc """
API calls for all endpoints tagged `Projects`.
"""
alias GoogleApi.BigQueryDataTransfer.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Returns true if valid credentials exist for the given data source and requesting user. Some data sources doesn't support service account, so we need to talk to them on behalf of the end user. This API just checks whether we have OAuth token for the particular user, which is a pre-requisite before user can create a transfer config.
## Parameters
- connection (GoogleApi.BigQueryDataTransfer.V1.Connection): Connection to server
- name (String.t): The data source in the form: `projects/{project_id}/dataSources/{data_source_id}`
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (CheckValidCredsRequest):
## Returns
{:ok, %GoogleApi.BigQueryDataTransfer.V1.Model.CheckValidCredsResponse{}} on success
{:error, info} on failure
"""
@spec bigquerydatatransfer_projects_locations_data_sources_check_valid_creds(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.BigQueryDataTransfer.V1.Model.CheckValidCredsResponse.t()}
| {:error, Tesla.Env.t()}
def bigquerydatatransfer_projects_locations_data_sources_check_valid_creds(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+name}:checkValidCreds", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryDataTransfer.V1.Model.CheckValidCredsResponse{}]
)
end
@doc """
Lists supported data sources and returns their settings, which can be used for UI rendering.
## Parameters
- connection (GoogleApi.BigQueryDataTransfer.V1.Connection): Connection to server
- parent (String.t): The BigQuery project id for which data sources should be returned. Must be in the form: `projects/{project_id}`
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :pageSize (integer()): Page size. The default page size is the maximum value of 1000 results.
- :pageToken (String.t): Pagination token, which can be used to request a specific page of `ListDataSourcesRequest` list results. For multiple-page results, `ListDataSourcesResponse` outputs a `next_page` token, which can be used as the `page_token` value to request the next page of list results.
## Returns
{:ok, %GoogleApi.BigQueryDataTransfer.V1.Model.ListDataSourcesResponse{}} on success
{:error, info} on failure
"""
@spec bigquerydatatransfer_projects_locations_data_sources_list(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.BigQueryDataTransfer.V1.Model.ListDataSourcesResponse.t()}
| {:error, Tesla.Env.t()}
def bigquerydatatransfer_projects_locations_data_sources_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+parent}/dataSources", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryDataTransfer.V1.Model.ListDataSourcesResponse{}]
)
end
@doc """
Lists information about the supported locations for this service.
## Parameters
- connection (GoogleApi.BigQueryDataTransfer.V1.Connection): Connection to server
- name (String.t): The resource that owns the locations collection, if applicable.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :filter (String.t): The standard list filter.
- :pageSize (integer()): The standard list page size.
- :pageToken (String.t): The standard list page token.
## Returns
{:ok, %GoogleApi.BigQueryDataTransfer.V1.Model.ListLocationsResponse{}} on success
{:error, info} on failure
"""
@spec bigquerydatatransfer_projects_locations_list(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.BigQueryDataTransfer.V1.Model.ListLocationsResponse.t()}
| {:error, Tesla.Env.t()}
def bigquerydatatransfer_projects_locations_list(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}/locations", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryDataTransfer.V1.Model.ListLocationsResponse{}]
)
end
@doc """
Creates a new data transfer configuration.
## Parameters
- connection (GoogleApi.BigQueryDataTransfer.V1.Connection): Connection to server
- parent (String.t): The BigQuery project id where the transfer configuration should be created. Must be in the format projects/{project_id}/locations/{location_id} If specified location and location of the destination bigquery dataset do not match - the request will fail.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :authorizationCode (String.t): Optional OAuth2 authorization code to use with this transfer configuration. This is required if new credentials are needed, as indicated by `CheckValidCreds`. In order to obtain authorization_code, please make a request to https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri> * client_id should be OAuth client_id of BigQuery DTS API for the given data source returned by ListDataSources method. * data_source_scopes are the scopes returned by ListDataSources method. * redirect_uri is an optional parameter. If not specified, then authorization code is posted to the opener of authorization flow window. Otherwise it will be sent to the redirect uri. A special value of urn:ietf:wg:oauth:2.0:oob means that authorization code should be returned in the title bar of the browser, with the page text prompting the user to copy the code and paste it in the application.
- :versionInfo (String.t): Optional version info. If users want to find a very recent access token, that is, immediately after approving access, users have to set the version_info claim in the token request. To obtain the version_info, users must use the “none+gsession” response type. which be return a version_info back in the authorization response which be be put in a JWT claim in the token request.
- :body (TransferConfig):
## Returns
{:ok, %GoogleApi.BigQueryDataTransfer.V1.Model.TransferConfig{}} on success
{:error, info} on failure
"""
@spec bigquerydatatransfer_projects_locations_transfer_configs_create(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.BigQueryDataTransfer.V1.Model.TransferConfig.t()}
| {:error, Tesla.Env.t()}
def bigquerydatatransfer_projects_locations_transfer_configs_create(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:authorizationCode => :query,
:versionInfo => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+parent}/transferConfigs", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryDataTransfer.V1.Model.TransferConfig{}]
)
end
@doc """
Returns information about all data transfers in the project.
## Parameters
- connection (GoogleApi.BigQueryDataTransfer.V1.Connection): Connection to server
- parent (String.t): The BigQuery project id for which data sources should be returned: `projects/{project_id}`.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :dataSourceIds ([String.t]): When specified, only configurations of requested data sources are returned.
- :pageSize (integer()): Page size. The default page size is the maximum value of 1000 results.
- :pageToken (String.t): Pagination token, which can be used to request a specific page of `ListTransfersRequest` list results. For multiple-page results, `ListTransfersResponse` outputs a `next_page` token, which can be used as the `page_token` value to request the next page of list results.
## Returns
{:ok, %GoogleApi.BigQueryDataTransfer.V1.Model.ListTransferConfigsResponse{}} on success
{:error, info} on failure
"""
@spec bigquerydatatransfer_projects_locations_transfer_configs_list(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.BigQueryDataTransfer.V1.Model.ListTransferConfigsResponse.t()}
| {:error, Tesla.Env.t()}
def bigquerydatatransfer_projects_locations_transfer_configs_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:dataSourceIds => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+parent}/transferConfigs", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryDataTransfer.V1.Model.ListTransferConfigsResponse{}]
)
end
@doc """
Updates a data transfer configuration. All fields must be set, even if they are not updated.
## Parameters
- connection (GoogleApi.BigQueryDataTransfer.V1.Connection): Connection to server
- name (String.t): The resource name of the transfer config. Transfer config names have the form of `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`. The name is automatically generated based on the config_id specified in CreateTransferConfigRequest along with project_id and region. If config_id is not provided, usually a uuid, even though it is not guaranteed or required, will be generated for config_id.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :authorizationCode (String.t): Optional OAuth2 authorization code to use with this transfer configuration. If it is provided, the transfer configuration will be associated with the authorizing user. In order to obtain authorization_code, please make a request to https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri> * client_id should be OAuth client_id of BigQuery DTS API for the given data source returned by ListDataSources method. * data_source_scopes are the scopes returned by ListDataSources method. * redirect_uri is an optional parameter. If not specified, then authorization code is posted to the opener of authorization flow window. Otherwise it will be sent to the redirect uri. A special value of urn:ietf:wg:oauth:2.0:oob means that authorization code should be returned in the title bar of the browser, with the page text prompting the user to copy the code and paste it in the application.
- :updateMask (String.t): Required list of fields to be updated in this request.
- :versionInfo (String.t): Optional version info. If users want to find a very recent access token, that is, immediately after approving access, users have to set the version_info claim in the token request. To obtain the version_info, users must use the “none+gsession” response type. which be return a version_info back in the authorization response which be be put in a JWT claim in the token request.
- :body (TransferConfig):
## Returns
{:ok, %GoogleApi.BigQueryDataTransfer.V1.Model.TransferConfig{}} on success
{:error, info} on failure
"""
@spec bigquerydatatransfer_projects_locations_transfer_configs_patch(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.BigQueryDataTransfer.V1.Model.TransferConfig.t()}
| {:error, Tesla.Env.t()}
def bigquerydatatransfer_projects_locations_transfer_configs_patch(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:authorizationCode => :query,
:updateMask => :query,
:versionInfo => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryDataTransfer.V1.Model.TransferConfig{}]
)
end
@doc """
Deletes the specified transfer run.
## Parameters
- connection (GoogleApi.BigQueryDataTransfer.V1.Connection): Connection to server
- name (String.t): The field will contain name of the resource requested, for example: `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
## Returns
{:ok, %GoogleApi.BigQueryDataTransfer.V1.Model.Empty{}} on success
{:error, info} on failure
"""
@spec bigquerydatatransfer_projects_locations_transfer_configs_runs_delete(
Tesla.Env.client(),
String.t(),
keyword()
) :: {:ok, GoogleApi.BigQueryDataTransfer.V1.Model.Empty.t()} | {:error, Tesla.Env.t()}
def bigquerydatatransfer_projects_locations_transfer_configs_runs_delete(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQueryDataTransfer.V1.Model.Empty{}])
end
@doc """
Returns information about the particular transfer run.
## Parameters
- connection (GoogleApi.BigQueryDataTransfer.V1.Connection): Connection to server
- name (String.t): The field will contain name of the resource requested, for example: `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
## Returns
{:ok, %GoogleApi.BigQueryDataTransfer.V1.Model.TransferRun{}} on success
{:error, info} on failure
"""
@spec bigquerydatatransfer_projects_locations_transfer_configs_runs_get(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.BigQueryDataTransfer.V1.Model.TransferRun.t()} | {:error, Tesla.Env.t()}
def bigquerydatatransfer_projects_locations_transfer_configs_runs_get(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.BigQueryDataTransfer.V1.Model.TransferRun{}])
end
@doc """
Returns information about running and completed jobs.
## Parameters
- connection (GoogleApi.BigQueryDataTransfer.V1.Connection): Connection to server
- parent (String.t): Name of transfer configuration for which transfer runs should be retrieved. Format of transfer configuration resource name is: `projects/{project_id}/transferConfigs/{config_id}`.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :pageSize (integer()): Page size. The default page size is the maximum value of 1000 results.
- :pageToken (String.t): Pagination token, which can be used to request a specific page of `ListTransferRunsRequest` list results. For multiple-page results, `ListTransferRunsResponse` outputs a `next_page` token, which can be used as the `page_token` value to request the next page of list results.
- :runAttempt (String.t): Indicates how run attempts are to be pulled.
- :states ([String.t]): When specified, only transfer runs with requested states are returned.
## Returns
{:ok, %GoogleApi.BigQueryDataTransfer.V1.Model.ListTransferRunsResponse{}} on success
{:error, info} on failure
"""
@spec bigquerydatatransfer_projects_locations_transfer_configs_runs_list(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.BigQueryDataTransfer.V1.Model.ListTransferRunsResponse.t()}
| {:error, Tesla.Env.t()}
def bigquerydatatransfer_projects_locations_transfer_configs_runs_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:pageSize => :query,
:pageToken => :query,
:runAttempt => :query,
:states => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+parent}/runs", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryDataTransfer.V1.Model.ListTransferRunsResponse{}]
)
end
@doc """
Returns user facing log messages for the data transfer run.
## Parameters
- connection (GoogleApi.BigQueryDataTransfer.V1.Connection): Connection to server
- parent (String.t): Transfer run name in the form: `projects/{project_id}/transferConfigs/{config_Id}/runs/{run_id}`.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :messageTypes ([String.t]): Message types to return. If not populated - INFO, WARNING and ERROR messages are returned.
- :pageSize (integer()): Page size. The default page size is the maximum value of 1000 results.
- :pageToken (String.t): Pagination token, which can be used to request a specific page of `ListTransferLogsRequest` list results. For multiple-page results, `ListTransferLogsResponse` outputs a `next_page` token, which can be used as the `page_token` value to request the next page of list results.
## Returns
{:ok, %GoogleApi.BigQueryDataTransfer.V1.Model.ListTransferLogsResponse{}} on success
{:error, info} on failure
"""
@spec bigquerydatatransfer_projects_locations_transfer_configs_runs_transfer_logs_list(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.BigQueryDataTransfer.V1.Model.ListTransferLogsResponse.t()}
| {:error, Tesla.Env.t()}
def bigquerydatatransfer_projects_locations_transfer_configs_runs_transfer_logs_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:messageTypes => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+parent}/transferLogs", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryDataTransfer.V1.Model.ListTransferLogsResponse{}]
)
end
@doc """
Creates transfer runs for a time range [start_time, end_time]. For each date - or whatever granularity the data source supports - in the range, one transfer run is created. Note that runs are created per UTC time in the time range. DEPRECATED: use StartManualTransferRuns instead.
## Parameters
- connection (GoogleApi.BigQueryDataTransfer.V1.Connection): Connection to server
- parent (String.t): Transfer configuration name in the form: `projects/{project_id}/transferConfigs/{config_id}`.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (ScheduleTransferRunsRequest):
## Returns
{:ok, %GoogleApi.BigQueryDataTransfer.V1.Model.ScheduleTransferRunsResponse{}} on success
{:error, info} on failure
"""
@spec bigquerydatatransfer_projects_locations_transfer_configs_schedule_runs(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.BigQueryDataTransfer.V1.Model.ScheduleTransferRunsResponse.t()}
| {:error, Tesla.Env.t()}
def bigquerydatatransfer_projects_locations_transfer_configs_schedule_runs(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+parent}:scheduleRuns", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryDataTransfer.V1.Model.ScheduleTransferRunsResponse{}]
)
end
@doc """
Start manual transfer runs to be executed now with schedule_time equal to current time. The transfer runs can be created for a time range where the run_time is between start_time (inclusive) and end_time (exclusive), or for a specific run_time.
## Parameters
- connection (GoogleApi.BigQueryDataTransfer.V1.Connection): Connection to server
- parent (String.t): Transfer configuration name in the form: `projects/{project_id}/transferConfigs/{config_id}`.
- optional_params (KeywordList): [optional] Optional parameters
- :$.xgafv (String.t): V1 error format.
- :access_token (String.t): OAuth access token.
- :alt (String.t): Data format for response.
- :callback (String.t): JSONP
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :body (StartManualTransferRunsRequest):
## Returns
{:ok, %GoogleApi.BigQueryDataTransfer.V1.Model.StartManualTransferRunsResponse{}} on success
{:error, info} on failure
"""
@spec bigquerydatatransfer_projects_locations_transfer_configs_start_manual_runs(
Tesla.Env.client(),
String.t(),
keyword()
) ::
{:ok, GoogleApi.BigQueryDataTransfer.V1.Model.StartManualTransferRunsResponse.t()}
| {:error, Tesla.Env.t()}
def bigquerydatatransfer_projects_locations_transfer_configs_start_manual_runs(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:upload_protocol => :query,
:uploadType => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+parent}:startManualRuns", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.BigQueryDataTransfer.V1.Model.StartManualTransferRunsResponse{}]
)
end
end
| 47.467816
| 1,062
| 0.678427
|
9e6b03cf60af4f1b30c51202bd33490a338dcc5a
| 1,660
|
ex
|
Elixir
|
clients/compute/lib/google_api/compute/v1/model/network_endpoint_groups_attach_endpoints_request.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/compute/lib/google_api/compute/v1/model/network_endpoint_groups_attach_endpoints_request.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/compute/lib/google_api/compute/v1/model/network_endpoint_groups_attach_endpoints_request.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.NetworkEndpointGroupsAttachEndpointsRequest do
@moduledoc """
## Attributes
* `networkEndpoints` (*type:* `list(GoogleApi.Compute.V1.Model.NetworkEndpoint.t)`, *default:* `nil`) - The list of network endpoints to be attached.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:networkEndpoints => list(GoogleApi.Compute.V1.Model.NetworkEndpoint.t()) | nil
}
field(:networkEndpoints, as: GoogleApi.Compute.V1.Model.NetworkEndpoint, type: :list)
end
defimpl Poison.Decoder,
for: GoogleApi.Compute.V1.Model.NetworkEndpointGroupsAttachEndpointsRequest do
def decode(value, options) do
GoogleApi.Compute.V1.Model.NetworkEndpointGroupsAttachEndpointsRequest.decode(value, options)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Compute.V1.Model.NetworkEndpointGroupsAttachEndpointsRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.877551
| 153
| 0.760843
|
9e6b06448f3678b9646e7988b9ccac3f44287385
| 2,573
|
ex
|
Elixir
|
clients/content/lib/google_api/content/v2/model/accounts_custom_batch_response_entry.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v2/model/accounts_custom_batch_response_entry.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v2/model/accounts_custom_batch_response_entry.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | 1
|
2020-10-04T10:12:44.000Z
|
2020-10-04T10:12:44.000Z
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.AccountsCustomBatchResponseEntry do
@moduledoc """
A batch entry encoding a single non-batch accounts response.
## Attributes
* `account` (*type:* `GoogleApi.Content.V2.Model.Account.t`, *default:* `nil`) - The retrieved, created, or updated account. Not defined if the method was `delete`, `claimwebsite` or `link`.
* `batchId` (*type:* `integer()`, *default:* `nil`) - The ID of the request entry this entry responds to.
* `errors` (*type:* `GoogleApi.Content.V2.Model.Errors.t`, *default:* `nil`) - A list of errors defined if and only if the request failed.
* `kind` (*type:* `String.t`, *default:* `content#accountsCustomBatchResponseEntry`) - Identifies what kind of resource this is. Value: the fixed string "`content#accountsCustomBatchResponseEntry`"
* `linkStatus` (*type:* `String.t`, *default:* `nil`) - Deprecated. This field is never set.
Acceptable values are:
- "`active`"
- "`inactive`"
- "`pending`"
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:account => GoogleApi.Content.V2.Model.Account.t(),
:batchId => integer(),
:errors => GoogleApi.Content.V2.Model.Errors.t(),
:kind => String.t(),
:linkStatus => String.t()
}
field(:account, as: GoogleApi.Content.V2.Model.Account)
field(:batchId)
field(:errors, as: GoogleApi.Content.V2.Model.Errors)
field(:kind)
field(:linkStatus)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.AccountsCustomBatchResponseEntry do
def decode(value, options) do
GoogleApi.Content.V2.Model.AccountsCustomBatchResponseEntry.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.AccountsCustomBatchResponseEntry do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.203125
| 201
| 0.707346
|
9e6b09ff4fbea17f1a7fe0f8722fca41d4b8772c
| 1,411
|
ex
|
Elixir
|
lib/uro_web/controllers/session_controller.ex
|
V-Sekai/uro
|
0b23da65d5c7e459efcd6b2c3d9bdf91c533b737
|
[
"MIT"
] | 1
|
2022-01-11T04:05:39.000Z
|
2022-01-11T04:05:39.000Z
|
lib/uro_web/controllers/session_controller.ex
|
V-Sekai/uro
|
0b23da65d5c7e459efcd6b2c3d9bdf91c533b737
|
[
"MIT"
] | 35
|
2021-02-10T08:18:57.000Z
|
2021-05-06T17:19:50.000Z
|
lib/uro_web/controllers/session_controller.ex
|
V-Sekai/uro
|
0b23da65d5c7e459efcd6b2c3d9bdf91c533b737
|
[
"MIT"
] | null | null | null |
defmodule UroWeb.SessionController do
use UroWeb, :controller
use UroWeb.Helpers.Auth
@doc false
defp login_valid(conn) do
conn
|> put_flash(:info, gettext("Welcome back!"))
|> redirect(to: Routes.page_path(conn, :index))
end
@doc false
defp login_invalid(conn) do
conn
|> put_flash(:info, gettext("Invalid email or password"))
|> render("new.html", changeset: Pow.Plug.change_user(conn, conn.params["user"]))
end
@doc false
defp email_unconfirmed(conn) do
conn
|> Pow.Plug.delete()
|> put_flash(:info, gettext("Your e-mail address has not been confirmed."))
|> redirect(to: Routes.signin_path(conn, :new))
end
def new(conn, _params) do
changeset = Pow.Plug.change_user(conn)
render(conn, "new.html", changeset: changeset)
end
def create(conn, %{"user" => user_params}) do
conn
|> validate_login(user_params)
|> case do
{:ok, conn} ->
conn
|> Uro.EnsureUserNotLockedPlug.call(UroWeb.AuthErrorHandler)
|> UroWeb.Helpers.Auth.verify_confirmed_or_send_confirmation_email
|> case do
{:ok, conn} -> login_valid(conn)
{:failed, conn} -> email_unconfirmed(conn)
end
{:error, conn} ->
login_invalid(conn)
end
end
def delete(conn, _params) do
conn
|> Pow.Plug.delete
|> redirect(to: Routes.page_path(conn, :index))
end
end
| 25.196429
| 85
| 0.639972
|
9e6b17e9640457758c53a415df54231bc240fe68
| 12,732
|
ex
|
Elixir
|
lib/rummage_ecto/hooks/sort.ex
|
ramansah/rummage_ecto
|
0f24fdccfe504e3c5b8337698446c17fefc60766
|
[
"MIT"
] | 1
|
2019-02-11T19:54:24.000Z
|
2019-02-11T19:54:24.000Z
|
lib/rummage_ecto/hooks/sort.ex
|
ramansah/rummage_ecto
|
0f24fdccfe504e3c5b8337698446c17fefc60766
|
[
"MIT"
] | null | null | null |
lib/rummage_ecto/hooks/sort.ex
|
ramansah/rummage_ecto
|
0f24fdccfe504e3c5b8337698446c17fefc60766
|
[
"MIT"
] | 2
|
2019-11-02T21:36:27.000Z
|
2021-03-02T15:58:31.000Z
|
defmodule Rummage.Ecto.Hook.Sort do
@moduledoc """
`Rummage.Ecto.Hook.Sort` is the default sort hook that comes with
`Rummage.Ecto`.
This module provides a operations that can add sorting functionality to
a pipeline of `Ecto` queries. This module works by taking the `field` that should
be used to `order_by`, `order` which can be `asc` or `desc` and `assoc`,
which is a keyword list of assocations associated with those `fields`.
NOTE: This module doesn't return a list of entries, but a `Ecto.Query.t`.
This module `uses` `Rummage.Ecto.Hook`.
_____________________________________________________________________________
# ABOUT:
## Arguments:
This Hook expects a `queryable` (an `Ecto.Queryable`) and
`sort_params` (a `Map`). The map should be in the format:
`%{field: :field_name, assoc: [], order: :asc}`
Details:
* `field`: The field name (atom) to sorted by.
* `assoc`: List of associations in the sort.
* `order`: Specifies the type of order `asc` or `desc`.
* `ci` : Case Insensitivity. Defaults to `false`
For example, if we want to sort products with descending `price`, we would
do the following:
```elixir
Rummage.Ecto.Hook.Sort.run(Product, %{field: :price,
assoc: [], order: :desc})
```
## Assoications:
Assocaitions can be given to this module's run function as a key corresponding
to params associated with a field. For example, if we want to sort products
that belong to a category by ascending category_name, we would do the
following:
```elixir
params = %{field: :category_name, assoc: [inner: :category],
order: :asc}
Rummage.Ecto.Hook.Sort.run(Product, params)
```
The above operation will return an `Ecto.Query.t` struct which represents
a query equivalent to:
```elixir
from p in Product
|> join(:inner, :category)
|> order_by([p, c], {asc, c.category_name})
```
____________________________________________________________________________
# ASSUMPTIONS/NOTES:
* This Hook has the default `order` of `:asc`.
* This Hook has the default `assoc` of `[]`.
* This Hook assumes that the field passed is a field on the `Ecto.Schema`
that corresponds to the last association in the `assoc` list or the `Ecto.Schema`
that corresponds to the `from` in `queryable`, if `assoc` is an empty list.
NOTE: It is adviced to not use multiple associated sorts in one operation
as `assoc` still has some minor bugs when used with multiple sorts. If you
need to use two sorts with associations, I would pipe the call to another
sort operation:
```elixir
Sort.run(queryable, params1}
|> Sort.run(%{field2: params2}
```
____________________________________________________________________________
# USAGE:
For a regular sort:
This returns a `queryable` which upon running will give a list of `Parent`(s)
sorted by ascending `field_1`
```elixir
alias Rummage.Ecto.Hook.Sort
sorted_queryable = Sort.run(Parent, %{assoc: [], field: :name, order: :asc}})
```
For a case-insensitive sort:
This returns a `queryable` which upon running will give a list of `Parent`(s)
sorted by ascending case insensitive `field_1`.
Keep in mind that `case_insensitive` can only be called for `text` fields
```elixir
alias Rummage.Ecto.Hook.Sort
sorted_queryable = Sort.run(Parent, %{assoc: [], field: :name, order: :asc, ci: true}})
```
This module can be overridden with a custom module while using `Rummage.Ecto`
in `Ecto` struct module.
In the `Ecto` module:
```elixir
Rummage.Ecto.rummage(queryable, rummage, sort: CustomHook)
```
OR
Globally for all models in `config.exs`:
```elixir
config :rummage_ecto,
Rummage.Ecto,
.sort: CustomHook
```
The `CustomHook` must use `Rummage.Ecto.Hook`. For examples of `CustomHook`,
check out some `custom_hooks` that are shipped with `Rummage.Ecto`:
`Rummage.Ecto.CustomHook.SimpleSearch`, `Rummage.Ecto.CustomHook.SimpleSort`,
Rummage.Ecto.CustomHook.SimplePaginate
"""
use Rummage.Ecto.Hook
import Ecto.Query
@expected_keys ~w{field order assoc}a
@err_msg ~s{Error in params, No values given for keys: }
# Only for Postgres (only one interpolation is supported)
# TODO: Fix this once Ecto 3.0 comes out with `unsafe_fragment`
@supported_fragments_one ["date_part('day', ?)",
"date_part('month', ?)",
"date_part('year', ?)",
"date_part('hour', ?)",
"lower(?)",
"upper(?)"]
@supported_fragments_two ["concat(?, ?)",
"coalesce(?, ?)"]
@doc """
This is the callback implementation of `Rummage.Ecto.Hook.run/2`.
Builds a sort `Ecto.Query.t` on top of the given `Ecto.Queryable` variable
using given `params`.
Besides an `Ecto.Query.t` an `Ecto.Schema` module can also be passed as it
implements `Ecto.Queryable`
Params is a `Map` which is expected to have the keys `#{Enum.join(@expected_keys, ", ")}`.
This funciton expects a `field` atom, `order` which can be `asc` or `desc`,
`ci` which is a boolean indicating the case-insensitivity and `assoc` which
is a list of associations with their join types.
## Examples
When an empty map is passed as `params`:
iex> alias Rummage.Ecto.Hook.Sort
iex> Sort.run(Parent, %{})
** (RuntimeError) Error in params, No values given for keys: field, order, assoc
When a non-empty map is passed as `params`, but with a missing key:
iex> alias Rummage.Ecto.Hook.Sort
iex> Sort.run(Parent, %{field: :name})
** (RuntimeError) Error in params, No values given for keys: order, assoc
When a valid map of params is passed with an `Ecto.Schema` module:
iex> alias Rummage.Ecto.Hook.Sort
iex> Sort.run(Rummage.Ecto.Product, %{field: :name, assoc: [], order: :asc})
#Ecto.Query<from p in subquery(from p in Rummage.Ecto.Product), order_by: [asc: p.name]>
When the `queryable` passed is an `Ecto.Query` variable:
iex> alias Rummage.Ecto.Hook.Sort
iex> import Ecto.Query
iex> queryable = from u in "products"
#Ecto.Query<from p in "products">
iex> Sort.run(queryable, %{field: :name, assoc: [], order: :asc})
#Ecto.Query<from p in subquery(from p in "products"), order_by: [asc: p.name]>
When the `queryable` passed is an `Ecto.Query` variable, with `desc` order:
iex> alias Rummage.Ecto.Hook.Sort
iex> import Ecto.Query
iex> queryable = from u in "products"
#Ecto.Query<from p in "products">
iex> Sort.run(queryable, %{field: :name, assoc: [], order: :desc})
#Ecto.Query<from p in subquery(from p in "products"), order_by: [desc: p.name]>
When the `queryable` passed is an `Ecto.Query` variable, with `ci` true:
iex> alias Rummage.Ecto.Hook.Sort
iex> import Ecto.Query
iex> queryable = from u in "products"
#Ecto.Query<from p in "products">
iex> Sort.run(queryable, %{field: :name, assoc: [], order: :asc, ci: true})
#Ecto.Query<from p in subquery(from p in "products"), order_by: [asc: fragment("lower(?)", p.name)]>
When the `queryable` passed is an `Ecto.Query` variable, with associations:
iex> alias Rummage.Ecto.Hook.Sort
iex> import Ecto.Query
iex> queryable = from u in "products"
#Ecto.Query<from p in "products">
iex> Sort.run(queryable, %{field: :name, assoc: [inner: :category, left: :category], order: :asc})
#Ecto.Query<from p in subquery(from p in "products"), join: c0 in assoc(p, :category), left_join: c1 in assoc(c0, :category), order_by: [asc: c1.name]>
When the `queryable` passed is an `Ecto.Schema` module with associations,
`desc` order and `ci` true:
iex> alias Rummage.Ecto.Hook.Sort
iex> queryable = Rummage.Ecto.Product
Rummage.Ecto.Product
iex> Sort.run(queryable, %{field: :name, assoc: [inner: :category], order: :desc, ci: true})
#Ecto.Query<from p in subquery(from p in Rummage.Ecto.Product), join: c in assoc(p, :category), order_by: [desc: fragment("lower(?)", c.name)]>
"""
@spec run(Ecto.Query.t(), map()) :: Ecto.Query.t()
def run(queryable, sort_params) do
:ok = validate_params(sort_params)
handle_sort(queryable, sort_params)
end
# Helper function which handles addition of paginated query on top of
# the sent queryable variable
defp handle_sort(queryable, sort_params) do
order = Map.get(sort_params, :order)
field = sort_params
|> Map.get(:field)
|> resolve_field(queryable)
assocs = Map.get(sort_params, :assoc)
ci = Map.get(sort_params, :ci, false)
assocs
|> Enum.reduce(from(e in subquery(queryable)), &join_by_assoc(&1, &2))
|> handle_ordering(field, order, ci)
end
# Helper function which handles associations in a query with a join
# type.
defp join_by_assoc({join, assoc}, query) do
join(query, join, [..., p1], p2 in assoc(p1, ^assoc))
end
# This is a helper macro to get case_insensitive query using fragments
defmacrop case_insensitive(field) do
quote do
fragment("lower(?)", unquote(field))
end
end
# NOTE: These functions can be used in future for multiple sort fields that
# are associated.
# defp applied_associations(queryable) when is_atom(queryable), do: []
# defp applied_associations(queryable), do: Enum.map(queryable.joins, & Atom.to_string(elem(&1.assoc, 1)))
# Helper function that handles adding order_by to a query based on order type
# case insensitivity and field
defp handle_ordering(queryable, field, order, ci) do
order_by_assoc(queryable, order, field, ci)
end
for fragment <- @supported_fragments_one do
defp order_by_assoc(queryable, order_type, {:fragment, unquote(fragment), field}, false) do
order_by(queryable, [p0, ..., p2], [{^order_type, fragment(unquote(fragment), field(p2, ^field))}])
end
defp order_by_assoc(queryable, order_type, {:fragment, unquote(fragment), field}, true) do
order_by(queryable, [p0, ..., p2],
[{^order_type, case_insensitive(fragment(unquote(fragment), field(p2, ^field)))}])
end
end
for fragment <- @supported_fragments_two do
defp order_by_assoc(queryable, order_type, {:fragment, unquote(fragment), field1, field2}, false) do
order_by(queryable, [p0, ..., p2], [{^order_type, fragment(unquote(fragment), field(p2, ^field1), field(p2, ^field2))}])
end
defp order_by_assoc(queryable, order_type, {:fragment, unquote(fragment), field1, field2}, true) do
order_by(queryable, [p0, ..., p2],
[{^order_type, case_insensitive(fragment(unquote(fragment), field(p2, ^field1), field(p2, ^field2)))}])
end
end
defp order_by_assoc(queryable, order_type, field, false) do
order_by(queryable, [p0, ..., p2], [{^order_type, field(p2, ^field)}])
end
defp order_by_assoc(queryable, order_type, field, true) do
order_by(queryable, [p0, ..., p2],
[{^order_type, case_insensitive(field(p2, ^field))}])
end
# Helper function that validates the list of params based on
# @expected_keys list
defp validate_params(params) do
key_validations = Enum.map(@expected_keys, &Map.fetch(params, &1))
case Enum.filter(key_validations, & &1 == :error) do
[] -> :ok
_ -> raise @err_msg <> missing_keys(key_validations)
end
end
# Helper function used to build error message using missing keys
defp missing_keys(key_validations) do
key_validations
|> Enum.with_index()
|> Enum.filter(fn {v, _i} -> v == :error end)
|> Enum.map(fn {_v, i} -> Enum.at(@expected_keys, i) end)
|> Enum.map(&to_string/1)
|> Enum.join(", ")
end
@doc """
Callback implementation for `Rummage.Ecto.Hook.format_params/2`.
This function ensures that params for each field have keys `assoc`, `order1
which are essential for running this hook module.
## Examples
iex> alias Rummage.Ecto.Hook.Sort
iex> Sort.format_params(Parent, %{}, [])
%{assoc: [], order: :asc}
"""
@spec format_params(Ecto.Query.t(), map() | tuple(), keyword()) :: map()
def format_params(queryable, {sort_scope, order}, opts) do
module = get_module(queryable)
name = :"__rummage_sort_#{sort_scope}"
sort_params = case function_exported?(module, name, 1) do
true -> apply(module, name, [order])
_ -> raise "No scope `#{sort_scope}` of type sort defined in the #{module}"
end
format_params(queryable, sort_params, opts)
end
def format_params(_queryable, sort_params, _opts) do
sort_params
|> Map.put_new(:assoc, [])
|> Map.put_new(:order, :asc)
end
end
| 35.07438
| 157
| 0.670908
|
9e6b4070dd18306fb553986d1c4e4d5a23f8b8da
| 1,888
|
ex
|
Elixir
|
clients/android_enterprise/lib/google_api/android_enterprise/v1/model/service_account.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | null | null | null |
clients/android_enterprise/lib/google_api/android_enterprise/v1/model/service_account.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | 1
|
2020-12-18T09:25:12.000Z
|
2020-12-18T09:25:12.000Z
|
clients/android_enterprise/lib/google_api/android_enterprise/v1/model/service_account.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | 1
|
2020-10-04T10:12:44.000Z
|
2020-10-04T10:12:44.000Z
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidEnterprise.V1.Model.ServiceAccount do
@moduledoc """
A service account identity, including the name and credentials that can be used to authenticate as the service account.
## Attributes
* `key` (*type:* `GoogleApi.AndroidEnterprise.V1.Model.ServiceAccountKey.t`, *default:* `nil`) - Credentials that can be used to authenticate as this ServiceAccount.
* `name` (*type:* `String.t`, *default:* `nil`) - The account name of the service account, in the form of an email address. Assigned by the server.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:key => GoogleApi.AndroidEnterprise.V1.Model.ServiceAccountKey.t(),
:name => String.t()
}
field(:key, as: GoogleApi.AndroidEnterprise.V1.Model.ServiceAccountKey)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidEnterprise.V1.Model.ServiceAccount do
def decode(value, options) do
GoogleApi.AndroidEnterprise.V1.Model.ServiceAccount.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidEnterprise.V1.Model.ServiceAccount do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.76
| 169
| 0.745233
|
9e6b8d75e4709e3c058e3d727c325ca34a018674
| 312
|
ex
|
Elixir
|
lib/api_web/views/attendance_view.ex
|
nunopolonia/psc-api
|
2e358503851cc04cdaa89201a3f56586f8746736
|
[
"MIT"
] | 1
|
2017-09-10T23:51:40.000Z
|
2017-09-10T23:51:40.000Z
|
lib/api_web/views/attendance_view.ex
|
nunopolonia/psc-api
|
2e358503851cc04cdaa89201a3f56586f8746736
|
[
"MIT"
] | 24
|
2018-03-14T18:17:00.000Z
|
2021-03-01T07:47:53.000Z
|
lib/api_web/views/attendance_view.ex
|
portosummerofcode/psc-api
|
2e358503851cc04cdaa89201a3f56586f8746736
|
[
"MIT"
] | null | null | null |
defmodule ApiWeb.AttendanceView do
use Api.Web, :view
alias ApiWeb.{Admin.UserView}
def render("attendance_user.json", %{attendance: attendance}) do
Map.merge(
render_one(attendance.user, UserView, "user_short.json"),
%{
checked_in: attendance.checked_in,
}
)
end
end
| 20.8
| 66
| 0.666667
|
9e6bb13bf047d2ef2a8105d67e5bdf4b4f425b9d
| 461
|
exs
|
Elixir
|
rel/config.exs
|
Nebo15/ael.api
|
22624a2673d0af1ef5f89b2429979456d2589eda
|
[
"MIT"
] | 19
|
2017-04-27T14:00:16.000Z
|
2021-06-08T20:47:39.000Z
|
rel/config.exs
|
Nebo15/ael.api
|
22624a2673d0af1ef5f89b2429979456d2589eda
|
[
"MIT"
] | 3
|
2017-05-17T13:13:33.000Z
|
2017-05-29T15:08:30.000Z
|
rel/config.exs
|
Nebo15/ael.api
|
22624a2673d0af1ef5f89b2429979456d2589eda
|
[
"MIT"
] | 4
|
2017-07-27T16:15:22.000Z
|
2021-06-08T20:47:45.000Z
|
use Mix.Releases.Config,
default_release: :default,
default_environment: :default
environment :default do
set pre_start_hook: "bin/hooks/pre-start.sh"
set dev_mode: false
set include_erts: false
set include_src: false
set overlays: [
{:template, "rel/templates/vm.args.eex", "releases/<%= release_version %>/vm.args"}
]
end
release :ael_api do
set version: current_version(:ael_api)
set applications: [
ael_api: :permanent
]
end
| 21.952381
| 87
| 0.720174
|
9e6bc38e35eda9032796c31615763d5a67c59f91
| 2,387
|
exs
|
Elixir
|
mix.exs
|
FunkyStudioHQ/phoenix-pagination
|
558de222d268529031de8782e8027c01831076af
|
[
"MIT"
] | null | null | null |
mix.exs
|
FunkyStudioHQ/phoenix-pagination
|
558de222d268529031de8782e8027c01831076af
|
[
"MIT"
] | null | null | null |
mix.exs
|
FunkyStudioHQ/phoenix-pagination
|
558de222d268529031de8782e8027c01831076af
|
[
"MIT"
] | null | null | null |
defmodule Phoenix.Pagination.Mixfile do
use Mix.Project
@version "0.6.0"
def project do
[app: :phoenix_pagination,
version: @version,
elixir: "~> 1.7",
elixirc_paths: path(Mix.env),
package: package(),
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
aliases: aliases(),
name: "phoenix_pagination",
docs: [
main: "readme",
extras: ["README.md"],
source_ref: "v#{@version}"],
source_url: "https://github.com/FunkyStudioHQ/phoenix_pagination.git",
homepage_url: "https://github.com/FunkyStudioHQ/phoenix_pagination",
description: """
Simple pagination for Ecto and Phoenix using plaing EEx templates.
"""]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: application(Mix.env)]
end
defp application(:test), do: [:postgrex, :ecto_sql, :logger]
defp application(_), do: [:plug, :phoenix_html, :ecto, :ecto_sql, :logger]
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:phoenix_html, "~> 3.2.0"},
{:plug, "~> 1.13.6"},
{:ecto_sql, "~> 3.7"},
# Test dependencies
{:postgrex, ">= 0.0.0", only: [:test]},
# {:credo, "~> 1.0.0", only: [:dev, :test], runtime: false},
{:credo, "~> 1.6", only: [:dev, :test], runtime: false},
# Docs dependencies
{:earmark, "~> 1.4", only: :dev},
{:ex_doc, "~> 0.26", only: :dev, runtime: false},
# {:inch_ex, "~> 0.5", only: :dev}
{:inch_ex, github: "rrrene/inch_ex", only: [:dev, :test]}
]
end
defp path(:test) do
["lib", "test/support", "test/fixtures"]
end
defp path(_), do: ["lib"]
defp package do
[
maintainers: ["FunkyStudio"],
licenses: ["MIT"],
links: %{
"Github" => "https://github.com/FunkyStudioHQ/phoenix_pagination.git",
"FunkyStudio" => "http://funky.studio"
},
files: ~w(lib test config) ++ ~w(CHANGELOG.md LICENSE.md mix.exs README.md)
]
end
def aliases do
[test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"]]
end
end
| 28.759036
| 81
| 0.581902
|
9e6be0da1797951cc3234d2fc84fd62e19df3ca1
| 2,828
|
ex
|
Elixir
|
lib/rogger.ex
|
szTheory/rogger
|
bf8acdd90fedce5451e60170ce7abd8a8645e61d
|
[
"MIT"
] | 15
|
2015-04-28T07:16:38.000Z
|
2020-08-25T01:22:59.000Z
|
lib/rogger.ex
|
szTheory/rogger
|
bf8acdd90fedce5451e60170ce7abd8a8645e61d
|
[
"MIT"
] | null | null | null |
lib/rogger.ex
|
szTheory/rogger
|
bf8acdd90fedce5451e60170ce7abd8a8645e61d
|
[
"MIT"
] | 2
|
2015-11-05T16:08:34.000Z
|
2020-03-03T21:16:36.000Z
|
defmodule Rogger do
use Application
use GenServer
use Timex
require Logger
@info_exchange "info"
@warn_exchange "warn"
@error_exchange "error"
@app Application.get_env :rogger, :app
### Public API
def init(_) do
{:ok, configure([])}
end
def handle_call({:configure, options}, _state) do
{:ok, :ok, configure(options)}
end
def handle_event({_level, gl, _event}, state) when node(gl) != node() do
{:ok, state}
end
def handle_event({level, _gl, {Logger, msg, ts, md}}, %{level: min_level} = state) do
if is_nil(min_level) or Logger.compare_levels(level, min_level) != :lt do
log_event(level, msg, ts, md, state)
end
{:ok, state}
end
## Helpers
defp configure(options) do
env = Application.get_env(:logger, :rogger, [])
rogger = configure_merge(env, options)
Application.put_env(:logger, :rogger, rogger)
{:ok, conn} = open_connection
{:ok, chan} = open_channel conn
declare_exchange chan, @info_exchange
declare_exchange chan, @warn_exchange
declare_exchange chan, @error_exchange
format = console
|> Keyword.get(:format)
|> Logger.Formatter.compile
level = Keyword.get(console, :level)
metadata = Keyword.get(console, :metadata, [])
colors = configure_colors(console)
%{format: format, metadata: metadata, level: level, colors: colors}
end
defp configure_merge(env, options) do
Keyword.merge(env, options, fn
:colors, v1, v2 -> Keyword.merge(v1, v2)
_, _v1, v2 -> v2
end)
end
defp configure_colors(console) do
colors = Keyword.get(console, :colors, [])
debug = Keyword.get(colors, :debug, :cyan)
info = Keyword.get(colors, :info, :normal)
warn = Keyword.get(colors, :warn, :yellow)
error = Keyword.get(colors, :error, :red)
enabled = Keyword.get(colors, :enabled, IO.ANSI.enabled?)
%{debug: debug, info: info, warn: warn, error: error, enabled: enabled}
end
defp log_event(level, msg, ts, md, %{colors: colors} = state) do
ansidata = format_event(level, msg, ts, md, state)
chardata = color_event(level, ansidata, colors)
timestamp = Date.local |> Date.convert(:secs)
publish channel, @info_exchange, level, chardata, [app_id: @app, timestamp: timestamp]
end
defp format_event(level, msg, ts, md, %{format: format, metadata: metadata}) do
Logger.Formatter.format(format, level, msg, ts, Dict.take(md, metadata))
end
defp color_event(level, data, %{enabled: true} = colors), do:
[IO.ANSI.format_fragment(Map.fetch!(colors, level), true), data|IO.ANSI.reset]
defp color_event(_level, data, %{enabled: false}), do:
data
def publish(chan, exchange, routing_key, message, opts) do
AMQP.Basic.publish chan, exchange, routing_key, message, opts
{:ok}
end
end
| 29.768421
| 90
| 0.662659
|
9e6c0b4b4831276fa073410f484d932932fc3ebc
| 2,355
|
ex
|
Elixir
|
lib/joq/job_event.ex
|
FelixKiunke/joq
|
aa71b20b6a385eb80a06045b28d19aad2d697ade
|
[
"Unlicense",
"MIT"
] | null | null | null |
lib/joq/job_event.ex
|
FelixKiunke/joq
|
aa71b20b6a385eb80a06045b28d19aad2d697ade
|
[
"Unlicense",
"MIT"
] | 2
|
2017-01-20T17:39:37.000Z
|
2017-01-20T17:40:49.000Z
|
lib/joq/job_event.ex
|
FelixKiunke/joq
|
aa71b20b6a385eb80a06045b28d19aad2d697ade
|
[
"Unlicense",
"MIT"
] | null | null | null |
defmodule Joq.JobEvent do
@moduledoc ~S"""
Reports events from the job running lifecycle.
Events are `{result, job}` messages where `job` is a `Joq.Job` struct and
`result` can be
* `:finished` - the job was completed successfully,
* `:failed` - an error has occurred (note this event after the last attempt
if a job may be retried)
* `:dropped` - the job was removed from the queue as a duplicate (only if
the job's worker has `duplicates: :drop` set, see `Joq.Worker`)
These events are sent to all subscribed processes as regular messages.
## Example
Joq.JobEvent.subscribe
receive do
{:finished, job} ->
IO.puts("Hooray, job #{job.id} was completed successfully")
{:failed, job} ->
IO.puts("Booo, job #{job.id} failed")
{:dropped, job} ->
IO.puts("Job #{job.id} was a dupe and has been dropped")
end
# Note that events will be sent until we unsubscribe
Joq.JobEvent.unsubscribe
"""
use GenServer
@doc false
def start_link do
{:ok, _pid} =
GenServer.start_link(__MODULE__, %{listeners: []}, name: __MODULE__)
end
@doc """
Subscribes the current process to events.
"""
def subscribe do
GenServer.call(__MODULE__, :subscribe)
end
@doc """
Unsubscribes the current process.
"""
def unsubscribe do
GenServer.call(__MODULE__, :unsubscribe)
end
def handle_call(:subscribe, {caller, _ref}, state) do
state = Map.put(state, :listeners, state.listeners ++ [caller])
{:reply, :ok, state}
end
def handle_call(:unsubscribe, {caller, _ref}, state) do
state = Map.put(state, :listeners, state.listeners -- [caller])
{:reply, :ok, state}
end
def handle_cast({:notify, event}, state) do
Enum.each state.listeners, fn (pid) ->
send pid, event
end
{:noreply, state}
end
@doc false
# Send a :finished event
@spec finished(Joq.Job.t) :: term
def finished(job) do
notify {:finished, job}
end
@doc false
# Send a :dropped event
@spec dropped(Joq.Job.t) :: term
def dropped(job) do
notify {:dropped, job}
end
@doc false
# Send a :failed event
@spec failed(Joq.Job.t) :: term
def failed(job) do
notify {:failed, job}
end
defp notify(event) do
GenServer.cast(__MODULE__, {:notify, event})
end
end
| 24.53125
| 79
| 0.637792
|
9e6c52a2a806ad718a150226e51e1e940ceb98f1
| 18,465
|
ex
|
Elixir
|
lib/elixir/lib/module/types/pattern.ex
|
tverlaan/elixir
|
d25b61db13d94ee83374748581c7bcb8e2dfc0e6
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/lib/module/types/pattern.ex
|
tverlaan/elixir
|
d25b61db13d94ee83374748581c7bcb8e2dfc0e6
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/lib/module/types/pattern.ex
|
tverlaan/elixir
|
d25b61db13d94ee83374748581c7bcb8e2dfc0e6
|
[
"Apache-2.0"
] | null | null | null |
defmodule Module.Types.Pattern do
@moduledoc false
alias Module.Types.Of
import Module.Types.{Helpers, Unify}
@doc """
Handles patterns and guards at once.
"""
def of_head(patterns, guards, stack, context) do
with {:ok, types, context} <-
map_reduce_ok(patterns, context, &of_pattern(&1, stack, &2)),
# TODO: Check that of_guard/3 returns boolean() | :fail
{:ok, _, context} <- of_guard(guards_to_or(guards), stack, context),
do: {:ok, types, context}
end
@doc """
Return the type and typing context of a pattern expression or an error
in case of a typing conflict.
"""
def of_pattern(pattern, %{context: stack_context} = stack, context)
when stack_context != :pattern do
of_pattern(pattern, %{stack | context: :pattern}, context)
end
# :atom
def of_pattern(atom, _stack, context) when is_atom(atom) do
{:ok, {:atom, atom}, context}
end
# 12
def of_pattern(literal, _stack, context) when is_integer(literal) do
{:ok, :integer, context}
end
# 1.2
def of_pattern(literal, _stack, context) when is_float(literal) do
{:ok, :float, context}
end
# "..."
def of_pattern(literal, _stack, context) when is_binary(literal) do
{:ok, :binary, context}
end
# <<...>>>
def of_pattern({:<<>>, _meta, args}, stack, context) do
result = Of.binary(args, stack, context, &of_pattern/3)
case result do
{:ok, context} -> {:ok, :binary, context}
{:error, reason} -> {:error, reason}
end
end
# left | []
def of_pattern({:|, _meta, [left_expr, []]} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
of_pattern(left_expr, stack, context)
end
# left | right
def of_pattern({:|, _meta, [left_expr, right_expr]} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
case of_pattern(left_expr, stack, context) do
{:ok, left, context} ->
case of_pattern(right_expr, stack, context) do
{:ok, {:list, right}, context} ->
{:ok, to_union([left, right], context), context}
{:ok, right, context} ->
{:ok, to_union([left, right], context), context}
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
{:error, reason}
end
end
# []
def of_pattern([], _stack, context) do
{:ok, {:list, :dynamic}, context}
end
# [expr, ...]
def of_pattern(exprs, stack, context) when is_list(exprs) do
stack = push_expr_stack(exprs, stack)
case map_reduce_ok(exprs, context, &of_pattern(&1, stack, &2)) do
{:ok, types, context} -> {:ok, {:list, to_union(types, context)}, context}
{:error, reason} -> {:error, reason}
end
end
# left ++ right
def of_pattern(
{{:., _meta1, [:erlang, :++]}, _meta2, [left_expr, right_expr]} = expr,
stack,
context
) do
stack = push_expr_stack(expr, stack)
case of_pattern(left_expr, stack, context) do
{:ok, {:list, left}, context} ->
case of_pattern(right_expr, stack, context) do
{:ok, {:list, right}, context} ->
{:ok, {:list, to_union([left, right], context)}, context}
{:ok, right, context} ->
{:ok, {:list, to_union([left, right], context)}, context}
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
{:error, reason}
end
end
# _
def of_pattern({:_, _meta, atom}, _stack, context) when is_atom(atom) do
{:ok, :dynamic, context}
end
# ^var
def of_pattern({:^, _meta, [var]}, _stack, context) do
{:ok, get_var!(var, context), context}
end
# var
def of_pattern(var, _stack, context) when is_var(var) do
{type, context} = new_var(var, context)
{:ok, type, context}
end
# {left, right}
def of_pattern({left, right}, stack, context) do
of_pattern({:{}, [], [left, right]}, stack, context)
end
# {...}
def of_pattern({:{}, _meta, exprs} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
case map_reduce_ok(exprs, context, &of_pattern(&1, stack, &2)) do
{:ok, types, context} -> {:ok, {:tuple, length(types), types}, context}
{:error, reason} -> {:error, reason}
end
end
# left = right
def of_pattern({:=, _meta, [left_expr, right_expr]} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
with {:ok, left_type, context} <- of_pattern(left_expr, stack, context),
{:ok, right_type, context} <- of_pattern(right_expr, stack, context),
do: unify(left_type, right_type, stack, context)
end
# %{...}
def of_pattern({:%{}, _meta, args} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
Of.open_map(args, stack, context, &of_pattern/3)
end
# %Struct{...}
def of_pattern({:%, meta1, [module, {:%{}, _meta2, args}]} = expr, stack, context)
when is_atom(module) do
stack = push_expr_stack(expr, stack)
with {:ok, struct, context} <- Of.struct(module, meta1, context),
{:ok, map, context} <- Of.open_map(args, stack, context, &of_pattern/3) do
unify(map, struct, stack, context)
end
end
# %_{...}
def of_pattern(
{:%, _meta1, [{:_, _meta2, var_context}, {:%{}, _meta3, args}]} = expr,
stack,
context
)
when is_atom(var_context) do
stack = push_expr_stack(expr, stack)
with {:ok, {:map, pairs}, context} <- Of.open_map(args, stack, context, &of_pattern/3) do
{:ok, {:map, [{:required, {:atom, :__struct__}, :atom} | pairs]}, context}
end
end
# %var{...} and %^var{...}
def of_pattern({:%, _meta1, [var, {:%{}, _meta2, args}]} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
with {:ok, var_type, context} = of_pattern(var, stack, context),
{:ok, _, context} <- unify(var_type, :atom, stack, context),
{:ok, {:map, pairs}, context} <- Of.open_map(args, stack, context, &of_pattern/3) do
{:ok, {:map, [{:required, {:atom, :__struct__}, var_type} | pairs]}, context}
end
end
def unify_kinds(:required, _), do: :required
def unify_kinds(_, :required), do: :required
def unify_kinds(:optional, :optional), do: :optional
## GUARDS
# TODO: Some guards can be changed to intersection types or higher order types
@boolean {:union, [{:atom, true}, {:atom, false}]}
@number {:union, [:integer, :float]}
@guard_functions %{
{:is_atom, 1} => {[:atom], @boolean},
{:is_binary, 1} => {[:binary], @boolean},
{:is_bitstring, 1} => {[:binary], @boolean},
{:is_boolean, 1} => {[@boolean], @boolean},
{:is_float, 1} => {[:float], @boolean},
{:is_function, 1} => {[:fun], @boolean},
{:is_function, 2} => {[:fun, :integer], @boolean},
{:is_integer, 1} => {[:integer], @boolean},
{:is_list, 1} => {[{:list, :dynamic}], @boolean},
{:is_map, 1} => {[{:map, [{:optional, :dynamic, :dynamic}]}], @boolean},
{:is_map_key, 2} => {[:dynamic, {:map, [{:optional, :dynamic, :dynamic}]}], :dynamic},
{:is_number, 1} => {[@number], @boolean},
{:is_pid, 1} => {[:pid], @boolean},
{:is_port, 1} => {[:port], @boolean},
{:is_reference, 1} => {[:reference], @boolean},
{:is_tuple, 1} => {[:tuple], @boolean},
{:<, 2} => {[:dynamic, :dynamic], @boolean},
{:"=<", 2} => {[:dynamic, :dynamic], @boolean},
{:>, 2} => {[:dynamic, :dynamic], @boolean},
{:>=, 2} => {[:dynamic, :dynamic], @boolean},
{:"/=", 2} => {[:dynamic, :dynamic], @boolean},
{:"=/=", 2} => {[:dynamic, :dynamic], @boolean},
{:==, 2} => {[:dynamic, :dynamic], @boolean},
{:"=:=", 2} => {[:dynamic, :dynamic], @boolean},
{:*, 2} => {[@number, @number], @number},
{:+, 1} => {[@number], @number},
{:+, 2} => {[@number, @number], @number},
{:-, 1} => {[@number], @number},
{:-, 2} => {[@number, @number], @number},
{:/, 2} => {[@number, @number], @number},
{:abs, 1} => {[@number], @number},
{:ceil, 1} => {[@number], :integer},
{:floor, 1} => {[@number], :integer},
{:round, 1} => {[@number], :integer},
{:trunc, 1} => {[@number], :integer},
{:element, 2} => {[:integer, :tuple], :dynamic},
{:hd, 1} => {[{:list, :dynamic}], :dynamic},
{:length, 1} => {[{:list, :dynamic}], :integer},
{:map_get, 2} => {[:dynamic, {:map, [{:optional, :dynamic, :dynamic}]}], :dynamic},
{:map_size, 1} => {[{:map, [{:optional, :dynamic, :dynamic}]}], :integer},
{:tl, 1} => {[{:list, :dynamic}], :dynamic},
{:tuple_size, 1} => {[:tuple], :integer},
{:node, 1} => {[{:union, [:pid, :reference, :port]}], :atom},
{:binary_part, 3} => {[:binary, :integer, :integer], :binary},
{:bit_size, 1} => {[:binary], :integer},
{:byte_size, 1} => {[:binary], :integer},
{:size, 1} => {[{:union, [:binary, :tuple]}], @boolean},
{:div, 2} => {[:integer, :integer], :integer},
{:rem, 2} => {[:integer, :integer], :integer},
{:node, 0} => {[], :atom},
{:self, 0} => {[], :pid},
{:bnot, 1} => {[:integer], :integer},
{:band, 2} => {[:integer, :integer], :integer},
{:bor, 2} => {[:integer, :integer], :integer},
{:bxor, 2} => {[:integer, :integer], :integer},
{:bsl, 2} => {[:integer, :integer], :integer},
{:bsr, 2} => {[:integer, :integer], :integer},
{:or, 2} => {[@boolean, @boolean], @boolean},
{:and, 2} => {[@boolean, @boolean], @boolean},
{:xor, 2} => {[@boolean, @boolean], @boolean},
{:not, 1} => {[@boolean], @boolean}
# Following guards are matched explicitly to handle
# type guard functions such as is_atom/1
# {:andalso, 2} => {[@boolean, @boolean], @boolean}
# {:orelse, 2} => {[@boolean, @boolean], @boolean}
}
@type_guards [
:is_atom,
:is_binary,
:is_bitstring,
:is_boolean,
:is_float,
:is_function,
:is_function,
:is_integer,
:is_list,
:is_map,
:is_number,
:is_pid,
:is_port,
:is_reference,
:is_tuple
]
@doc """
Refines the type variables in the typing context using type check guards
such as `is_integer/1`.
"""
def of_guard(expr, %{context: stack_context} = stack, context) when stack_context != :pattern do
of_guard(expr, %{stack | context: :pattern}, context)
end
def of_guard({{:., _, [:erlang, :andalso]}, _, [left, right]} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
fresh_context = fresh_context(context)
with {:ok, left_type, left_context} <- of_guard(left, stack, fresh_context),
{:ok, right_type, right_context} <- of_guard(right, stack, fresh_context),
{:ok, context} <- merge_context_and(context, stack, left_context, right_context),
{:ok, _, context} <- unify(left_type, @boolean, stack, context),
{:ok, _, context} <- unify(right_type, @boolean, stack, context),
do: {:ok, @boolean, context}
end
def of_guard({{:., _, [:erlang, :orelse]}, _, [left, right]} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
fresh_context = fresh_context(context)
with {:ok, left_type, left_context} <- of_guard(left, stack, fresh_context),
{:ok, _right_type, right_context} <- of_guard(right, stack, fresh_context),
{:ok, context} <- merge_context_or(context, stack, left_context, right_context),
{:ok, _, context} <- unify(left_type, @boolean, stack, context),
do: {:ok, @boolean, context}
end
# The unary operators + and - are special cased to avoid common warnings until
# we add support for intersection types for the guard functions
# -integer / +integer
def of_guard({{:., _, [:erlang, guard]}, _, [integer]}, _stack, context)
when guard in [:+, :-] and is_integer(integer) do
{:ok, :integer, context}
end
# -float / +float
def of_guard({{:., _, [:erlang, guard]}, _, [float]}, _stack, context)
when guard in [:+, :-] and is_float(float) do
{:ok, :float, context}
end
# fun(args)
def of_guard({{:., _, [:erlang, guard]}, _, args} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
{param_types, return_type} = guard_signature(guard, length(args))
type_guard? = type_guard?(guard)
# Only check type guards in the context of and/or/not,
# a type guard in the context of is_tuple(x) > :foo
# should not affect the inference of x
if not type_guard? or stack.type_guards_enabled? do
arg_stack = %{stack | type_guards_enabled?: type_guard?}
with {:ok, arg_types, context} <-
map_reduce_ok(args, context, &of_guard(&1, arg_stack, &2)),
{:ok, context} <- unify_call(arg_types, param_types, stack, context) do
{arg_types, guard_sources} =
case arg_types do
[{:var, index} | rest_arg_types] when type_guard? ->
guard_sources =
Map.update(context.guard_sources, index, [:guarded], &[:guarded | &1])
{rest_arg_types, guard_sources}
_ ->
{arg_types, context.guard_sources}
end
guard_sources =
Enum.reduce(arg_types, guard_sources, fn
{:var, index}, guard_sources ->
Map.update(guard_sources, index, [:fail], &[:fail | &1])
_, guard_sources ->
guard_sources
end)
{:ok, return_type, %{context | guard_sources: guard_sources}}
end
else
{:ok, return_type, context}
end
end
# map.field
def of_guard({{:., meta1, [map, field]}, meta2, []}, stack, context) do
of_guard({{:., meta1, [:erlang, :map_get]}, meta2, [field, map]}, stack, context)
end
# var
def of_guard(var, _stack, context) when is_var(var) do
type = Map.fetch!(context.vars, var_name(var))
{:ok, type, context}
end
# other literals
def of_guard(expr, stack, context) do
# Fall back to of_pattern/3 for literals
of_pattern(expr, stack, context)
end
defp fresh_context(context) do
types = Map.new(context.types, fn {var, _} -> {var, :unbound} end)
traces = Map.new(context.traces, fn {var, _} -> {var, []} end)
%{context | types: types, traces: traces}
end
defp unify_call(args, params, stack, context) do
reduce_ok(Enum.zip(args, params), context, fn {arg, param}, context ->
case unify(arg, param, stack, context) do
{:ok, _, context} -> {:ok, context}
{:error, reason} -> {:error, reason}
end
end)
end
defp merge_context_and(context, stack, left, right) do
with {:ok, context} <- unify_new_types(context, stack, left),
{:ok, context} <- unify_new_types(context, stack, right) do
guard_sources = and_guard_sources(left.guard_sources, right.guard_sources)
guard_sources = merge_guard_sources([context.guard_sources, guard_sources])
{:ok, %{context | guard_sources: guard_sources}}
end
end
defp unify_new_types(context, stack, new_context) do
context = merge_traces(context, new_context)
reduce_ok(Map.to_list(new_context.types), context, fn
{_index, :unbound}, context ->
{:ok, context}
{index, new_type}, context ->
case unify({:var, index}, new_type, %{stack | trace: false}, context) do
{:ok, _, context} ->
{:ok, context}
{:error, reason} ->
{:error, reason}
end
end)
end
defp merge_guard_sources(sources) do
Enum.reduce(sources, fn left, right ->
Map.merge(left, right, fn _index, left, right -> join_guard_source(left, right) end)
end)
end
defp join_guard_source(left, right) do
sources = left ++ right
cond do
:fail in sources -> [:fail]
:guarded in sources -> [:guarded]
true -> []
end
end
defp and_guard_sources(left, right) do
Map.merge(left, right, fn _index, left, right ->
# When the failing guard function wont fail due to type check function before it,
# for example: is_list(x) and length(x)
if :guarded in left and :fail in right do
[:guarded]
else
join_guard_source(left, right)
end
end)
end
defp merge_traces(context, new_context) do
traces =
:maps.fold(
fn index, new_traces, traces ->
:maps.update_with(index, &(new_traces ++ &1), new_traces, traces)
end,
context.traces,
new_context.traces
)
%{context | traces: traces}
end
defp merge_context_or(context, stack, left, right) do
context =
case {Map.to_list(left.types), Map.to_list(right.types)} do
{[{index, :unbound}], [{index, type}]} ->
refine_var(index, type, stack, context)
{[{index, type}], [{index, :unbound}]} ->
refine_var(index, type, stack, context)
{[{index, left_type}], [{index, right_type}]} ->
# Only include right side if left side is from type guard such as is_list(x),
# do not refine in case of length(x)
left_guard_sources = Map.get(left.guard_sources, index, [])
if :fail in left_guard_sources do
guard_sources = Map.put(context.guard_sources, index, [:fail])
context = %{context | guard_sources: guard_sources}
refine_var(index, left_type, stack, context)
else
guard_sources =
merge_guard_sources([
context.guard_sources,
left.guard_sources,
right.guard_sources
])
context = %{context | guard_sources: guard_sources}
refine_var(index, to_union([left_type, right_type], context), stack, context)
end
{left_types, _right_types} ->
Enum.reduce(left_types, context, fn {index, left_type}, context ->
left_guard_sources = Map.get(left.guard_sources, index, [])
if :fail in left_guard_sources do
guard_sources =
merge_guard_sources([
context.guard_sources,
left.guard_sources,
right.guard_sources
])
context = %{context | guard_sources: guard_sources}
refine_var(index, left_type, stack, context)
else
context
end
end)
end
{:ok, context}
end
defp guard_signature(name, arity) do
Map.fetch!(@guard_functions, {name, arity})
end
defp type_guard?(name) do
name in @type_guards
end
end
| 33.330325
| 98
| 0.579529
|
9e6ca5a6bc53aaaaa882c44e6213a2e9de9b741d
| 684
|
exs
|
Elixir
|
priv/repo/migrations/20161202114834_create_snapmails.exs
|
mekongit/evercam_models
|
7c36d52f2b47ffc89a3eaca65c2e353716fafacc
|
[
"MIT"
] | null | null | null |
priv/repo/migrations/20161202114834_create_snapmails.exs
|
mekongit/evercam_models
|
7c36d52f2b47ffc89a3eaca65c2e353716fafacc
|
[
"MIT"
] | null | null | null |
priv/repo/migrations/20161202114834_create_snapmails.exs
|
mekongit/evercam_models
|
7c36d52f2b47ffc89a3eaca65c2e353716fafacc
|
[
"MIT"
] | null | null | null |
defmodule Evercam.Repo.Migrations.CreateSnapmail do
use Ecto.Migration
def up do
create table(:snapmails) do
add :exid, :string, null: false
add :subject, :string, null: false
add :recipients, :string
add :message, :string
add :notify_days, :string
add :notify_time, :string, null: false
add :is_public, :boolean, null: false, default: false
add :user_id, references(:users, on_delete: :nothing)
add :camera_id, references(:cameras, on_delete: :nothing), null: false
timestamps
end
create unique_index :snapmails, [:exid], name: :exid_unique_index
end
def down do
drop table(:snapmails)
end
end
| 27.36
| 76
| 0.669591
|
9e6cee06115a4b3df6397d2c9c238fbe71be04cb
| 1,394
|
exs
|
Elixir
|
test/bson/types_test.exs
|
kamciokodzi/mongodb-old
|
99e96a4ae6b963914ab733a8ba551ad1fcf75b2c
|
[
"Apache-2.0"
] | null | null | null |
test/bson/types_test.exs
|
kamciokodzi/mongodb-old
|
99e96a4ae6b963914ab733a8ba551ad1fcf75b2c
|
[
"Apache-2.0"
] | null | null | null |
test/bson/types_test.exs
|
kamciokodzi/mongodb-old
|
99e96a4ae6b963914ab733a8ba551ad1fcf75b2c
|
[
"Apache-2.0"
] | null | null | null |
defmodule BSON.TypesTest do
use ExUnit.Case, async: true
test "inspect BSON.Binary" do
value = %BSON.Binary{binary: <<1, 2, 3>>}
assert inspect(value) == "#BSON.Binary<010203>"
value = %BSON.Binary{binary: <<1, 2, 3>>, subtype: :uuid}
assert inspect(value) == "#BSON.Binary<010203, uuid>"
end
test "inspect BSON.ObjectId" do
value = %BSON.ObjectId{value: <<29, 32, 69, 244, 101, 119, 228, 28, 61, 24, 21, 215>>}
assert inspect(value) == "#BSON.ObjectId<1d2045f46577e41c3d1815d7>"
end
test "inspect BSON.DateTime" do
value = %BSON.DateTime{utc: 1437940203000}
assert inspect(value) == "#BSON.DateTime<2015-07-26T19:50:03Z>"
end
test "inspect BSON.Regex" do
value = %BSON.Regex{pattern: "abc"}
assert inspect(value) == "#BSON.Regex<\"abc\">"
value = %BSON.Regex{pattern: "abc", options: "i"}
assert inspect(value) == "#BSON.Regex<\"abc\", \"i\">"
end
test "inspect BSON.JavaScript" do
value = %BSON.JavaScript{code: "this === null"}
assert inspect(value) == "#BSON.JavaScript<\"this === null\">"
value = %BSON.JavaScript{code: "this === value", scope: %{value: nil}}
assert inspect(value) == "#BSON.JavaScript<\"this === value\", %{value: nil}>"
end
test "inspect BSON.Timestamp" do
value = %BSON.Timestamp{value: 1412180887}
assert inspect(value) == "#BSON.Timestamp<1412180887>"
end
end
| 32.418605
| 90
| 0.635581
|
9e6d216da1d80d665a55b016a01a99cec2e322bd
| 1,723
|
ex
|
Elixir
|
apps/tai/lib/tai/venue_adapters/bitmex/stream/process_auth/messages/update_orders/new_partial_fill.ex
|
ihorkatkov/tai
|
09f9f15d2c385efe762ae138a8570f1e3fd41f26
|
[
"MIT"
] | 1
|
2019-12-19T05:16:26.000Z
|
2019-12-19T05:16:26.000Z
|
apps/tai/lib/tai/venue_adapters/bitmex/stream/process_auth/messages/update_orders/new_partial_fill.ex
|
ihorkatkov/tai
|
09f9f15d2c385efe762ae138a8570f1e3fd41f26
|
[
"MIT"
] | null | null | null |
apps/tai/lib/tai/venue_adapters/bitmex/stream/process_auth/messages/update_orders/new_partial_fill.ex
|
ihorkatkov/tai
|
09f9f15d2c385efe762ae138a8570f1e3fd41f26
|
[
"MIT"
] | 1
|
2020-05-03T23:32:11.000Z
|
2020-05-03T23:32:11.000Z
|
defmodule Tai.VenueAdapters.Bitmex.Stream.ProcessAuth.Messages.UpdateOrders.NewPartialFill do
defstruct ~w(
account
cl_ord_id
cum_qty
leaves_qty
order_id
symbol
timestamp
)a
end
defimpl Tai.VenueAdapters.Bitmex.Stream.ProcessAuth.Message,
for: Tai.VenueAdapters.Bitmex.Stream.ProcessAuth.Messages.UpdateOrders.NewPartialFill do
alias Tai.VenueAdapters.Bitmex
@date_format "{ISO:Extended}"
def process(message, received_at, _state) do
message.cl_ord_id
|> case do
"gtc-" <> id ->
client_id = Bitmex.ClientId.from_base64(id)
venue_timestamp = message.timestamp |> Timex.parse!(@date_format)
leaves_qty = message.leaves_qty |> Decimal.cast()
cumulative_qty = message.cum_qty |> Decimal.cast()
%Tai.Trading.OrderStore.Actions.PassivePartialFill{
client_id: client_id,
cumulative_qty: cumulative_qty,
leaves_qty: leaves_qty,
last_received_at: received_at,
last_venue_timestamp: venue_timestamp
}
|> Tai.Trading.OrderStore.update()
|> notify()
_ ->
:ignore
end
:ok
end
defp notify({:ok, {old, updated}}) do
Tai.Trading.NotifyOrderUpdate.notify!(old, updated)
end
defp notify({:error, {:invalid_status, was, required, %action_name{} = action}}) do
Tai.Events.warn(%Tai.Events.OrderUpdateInvalidStatus{
was: was,
required: required,
client_id: action.client_id,
action: action_name
})
end
defp notify({:error, {:not_found, %action_name{} = action}}) do
Tai.Events.warn(%Tai.Events.OrderUpdateNotFound{
client_id: action.client_id,
action: action_name
})
end
end
| 26.507692
| 93
| 0.669762
|
9e6d555ce683488592e23382cefec5e38f78f088
| 4,660
|
ex
|
Elixir
|
src/turtlebot2_src/src/orocos-bayesian-filtering/orocos_bfl/debian/manpage.sgml.ex
|
alexoterno/turtlebot2_with_head
|
ac714f77379dd0f47ddb76d83896fdabee269a03
|
[
"MIT"
] | null | null | null |
src/turtlebot2_src/src/orocos-bayesian-filtering/orocos_bfl/debian/manpage.sgml.ex
|
alexoterno/turtlebot2_with_head
|
ac714f77379dd0f47ddb76d83896fdabee269a03
|
[
"MIT"
] | null | null | null |
src/turtlebot2_src/src/orocos-bayesian-filtering/orocos_bfl/debian/manpage.sgml.ex
|
alexoterno/turtlebot2_with_head
|
ac714f77379dd0f47ddb76d83896fdabee269a03
|
[
"MIT"
] | null | null | null |
<!doctype refentry PUBLIC "-//OASIS//DTD DocBook V4.1//EN" [
<!-- Process this file with docbook-to-man to generate an nroff manual
page: `docbook-to-man manpage.sgml > manpage.1'. You may view
the manual page with: `docbook-to-man manpage.sgml | nroff -man |
less'. A typical entry in a Makefile or Makefile.am is:
manpage.1: manpage.sgml
docbook-to-man $< > $@
The docbook-to-man binary is found in the docbook-to-man package.
Please remember that if you create the nroff version in one of the
debian/rules file targets (such as build), you will need to include
docbook-to-man in your Build-Depends control field.
-->
<!-- Fill in your name for FIRSTNAME and SURNAME. -->
<!ENTITY dhfirstname "<firstname>FIRSTNAME</firstname>">
<!ENTITY dhsurname "<surname>SURNAME</surname>">
<!-- Please adjust the date whenever revising the manpage. -->
<!ENTITY dhdate "<date>May 8, 2007</date>">
<!-- SECTION should be 1-8, maybe w/ subsection other parameters are
allowed: see man(7), man(1). -->
<!ENTITY dhsection "<manvolnum>SECTION</manvolnum>">
<!ENTITY dhemail "<email>wim.meeussen@mech.kuleuven.be</email>">
<!ENTITY dhusername "Wim Meeussen">
<!ENTITY dhucpackage "<refentrytitle>OROCOS-BFL</refentrytitle>">
<!ENTITY dhpackage "orocos-bfl">
<!ENTITY debian "<productname>Debian</productname>">
<!ENTITY gnu "<acronym>GNU</acronym>">
<!ENTITY gpl "&gnu; <acronym>GPL</acronym>">
]>
<refentry>
<refentryinfo>
<address>
&dhemail;
</address>
<author>
&dhfirstname;
&dhsurname;
</author>
<copyright>
<year>2003</year>
<holder>&dhusername;</holder>
</copyright>
&dhdate;
</refentryinfo>
<refmeta>
&dhucpackage;
&dhsection;
</refmeta>
<refnamediv>
<refname>&dhpackage;</refname>
<refpurpose>program to do something</refpurpose>
</refnamediv>
<refsynopsisdiv>
<cmdsynopsis>
<command>&dhpackage;</command>
<arg><option>-e <replaceable>this</replaceable></option></arg>
<arg><option>--example <replaceable>that</replaceable></option></arg>
</cmdsynopsis>
</refsynopsisdiv>
<refsect1>
<title>DESCRIPTION</title>
<para>This manual page documents briefly the
<command>&dhpackage;</command> and <command>bar</command>
commands.</para>
<para>This manual page was written for the &debian; distribution
because the original program does not have a manual page.
Instead, it has documentation in the &gnu;
<application>Info</application> format; see below.</para>
<para><command>&dhpackage;</command> is a program that...</para>
</refsect1>
<refsect1>
<title>OPTIONS</title>
<para>These programs follow the usual &gnu; command line syntax,
with long options starting with two dashes (`-'). A summary of
options is included below. For a complete description, see the
<application>Info</application> files.</para>
<variablelist>
<varlistentry>
<term><option>-h</option>
<option>--help</option>
</term>
<listitem>
<para>Show summary of options.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option>-v</option>
<option>--version</option>
</term>
<listitem>
<para>Show version of program.</para>
</listitem>
</varlistentry>
</variablelist>
</refsect1>
<refsect1>
<title>SEE ALSO</title>
<para>bar (1), baz (1).</para>
<para>The programs are documented fully by <citetitle>The Rise and
Fall of a Fooish Bar</citetitle> available via the
<application>Info</application> system.</para>
</refsect1>
<refsect1>
<title>AUTHOR</title>
<para>This manual page was written by &dhusername; &dhemail; for
the &debian; system (but may be used by others). Permission is
granted to copy, distribute and/or modify this document under
the terms of the &gnu; General Public License, Version 2 any
later version published by the Free Software Foundation.
</para>
<para>
On Debian systems, the complete text of the GNU General Public
License can be found in /usr/share/common-licenses/GPL.
</para>
</refsect1>
</refentry>
<!-- Keep this comment at the end of the file
Local variables:
mode: sgml
sgml-omittag:t
sgml-shorttag:t
sgml-minimize-attributes:nil
sgml-always-quote-attributes:t
sgml-indent-step:2
sgml-indent-data:t
sgml-parent-document:nil
sgml-default-dtd-file:nil
sgml-exposed-tags:nil
sgml-local-catalogs:nil
sgml-local-ecat-files:nil
End:
-->
| 29.681529
| 75
| 0.659013
|
9e6d8eaa17553f6a07263044328de3298357488f
| 1,580
|
exs
|
Elixir
|
test/corker/slack/users_test.exs
|
justmendes/corker
|
b7a6d7cbae47d30816bb99b8fe8576e9a0d5722d
|
[
"0BSD"
] | 5
|
2019-05-24T17:00:42.000Z
|
2019-05-27T13:50:56.000Z
|
test/corker/slack/users_test.exs
|
justmendes/corker
|
b7a6d7cbae47d30816bb99b8fe8576e9a0d5722d
|
[
"0BSD"
] | 17
|
2019-05-27T08:59:36.000Z
|
2020-02-17T06:45:27.000Z
|
test/corker/slack/users_test.exs
|
justmendes/corker
|
b7a6d7cbae47d30816bb99b8fe8576e9a0d5722d
|
[
"0BSD"
] | 1
|
2019-08-27T14:27:06.000Z
|
2019-08-27T14:27:06.000Z
|
defmodule Corker.Slack.UsersTest do
use Corker.DataCase, async: true
doctest Corker.Slack.Users
alias Corker.{
Accounts.User,
Repo,
Slack.Users
}
describe "extract/1" do
test "creates a user for each slack user in the list" do
slack_users = build_list(5, :slack_user) |> Enum.into(%{}, &{&1.id, &1})
Users.extract(slack_users)
assert Repo.aggregate(User, :count, :id) == 5
end
test "ignores repeated users" do
user = insert(:user)
repeated_user = build(:slack_user, id: user.slack_id)
slack_users =
build_list(4, :slack_user)
|> Enum.into(%{}, &{&1.id, &1})
|> Map.merge(%{repeated_user.id => repeated_user})
Users.extract(slack_users)
assert Repo.aggregate(User, :count, :id) == 5
end
test "ignores bot and app users" do
%{id: slack_id, name: username} = real_user = build(:slack_user)
slack_users =
Enum.into(
[
real_user,
build(:slack_app_user),
build(:slack_bot_user),
build(:slackbot)
],
%{},
&{&1.id, &1}
)
Users.extract(slack_users)
assert [%User{slack_id: ^slack_id, username: ^username}] = Repo.all(User)
end
test "ignores users with similar usernames" do
user = insert(:user)
repeated_user = build(:slack_user, name: user.username)
slack_users = %{repeated_user.id => repeated_user}
Users.extract(slack_users)
assert Repo.aggregate(User, :count, :id) == 1
end
end
end
| 23.939394
| 79
| 0.589241
|
9e6d90f7ff3958ba37565f6f7f5592ca53bd3082
| 683
|
ex
|
Elixir
|
lib/adyen_checkout_ex/model/ideal_details.ex
|
rsystem-se/adyen_checkout_ex
|
4210f1c7150152e81f350dc6ca55aeb19665b85b
|
[
"MIT"
] | 1
|
2021-05-30T20:45:10.000Z
|
2021-05-30T20:45:10.000Z
|
lib/adyen_checkout_ex/model/ideal_details.ex
|
rsystem-se/adyen_checkout_ex
|
4210f1c7150152e81f350dc6ca55aeb19665b85b
|
[
"MIT"
] | null | null | null |
lib/adyen_checkout_ex/model/ideal_details.ex
|
rsystem-se/adyen_checkout_ex
|
4210f1c7150152e81f350dc6ca55aeb19665b85b
|
[
"MIT"
] | null | null | null |
# NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule AdyenCheckoutEx.Model.IdealDetails do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"issuer",
:"recurringDetailReference",
:"storedPaymentMethodId",
:"type"
]
@type t :: %__MODULE__{
:"issuer" => String.t,
:"recurringDetailReference" => String.t | nil,
:"storedPaymentMethodId" => String.t | nil,
:"type" => String.t | nil
}
end
defimpl Poison.Decoder, for: AdyenCheckoutEx.Model.IdealDetails do
def decode(value, _options) do
value
end
end
| 21.34375
| 91
| 0.670571
|
9e6d94fb1ea4d04bc87aa88d1738575b983695cf
| 1,546
|
ex
|
Elixir
|
test/support/conn_case.ex
|
longnd/elixir-gscraper
|
894570afd89e54b80ca591a56a182da55ac6ee61
|
[
"MIT"
] | null | null | null |
test/support/conn_case.ex
|
longnd/elixir-gscraper
|
894570afd89e54b80ca591a56a182da55ac6ee61
|
[
"MIT"
] | 25
|
2021-03-23T07:27:21.000Z
|
2021-10-31T15:09:52.000Z
|
test/support/conn_case.ex
|
longnd/elixir-gscraper
|
894570afd89e54b80ca591a56a182da55ac6ee61
|
[
"MIT"
] | null | null | null |
defmodule GscraperWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use GscraperWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
alias Gscraper.Guardian.Authentication
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import Gscraper.Factory
import GscraperWeb.ConnCase
import GscraperWeb.Gettext
alias GscraperWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint GscraperWeb.Endpoint
def login_user(conn, user \\ insert(:user)) do
conn =
conn
|> Plug.Test.init_test_session(%{})
|> Authentication.log_in(user)
conn
end
end
end
setup tags do
:ok = Sandbox.checkout(Gscraper.Repo)
unless tags[:async] do
Sandbox.mode(Gscraper.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 26.655172
| 62
| 0.698577
|
9e6da132757356c13c53c87f1fa89c315f3b23f9
| 2,311
|
ex
|
Elixir
|
clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/non_billable_winning_bid_status_row.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | 1
|
2018-12-03T23:43:10.000Z
|
2018-12-03T23:43:10.000Z
|
clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/non_billable_winning_bid_status_row.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/non_billable_winning_bid_status_row.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.AdExchangeBuyer.V2beta1.Model.NonBillableWinningBidStatusRow do
@moduledoc """
The number of winning bids with the specified dimension values for which the
buyer was not billed, as described by the specified status.
## Attributes
* `bidCount` (*type:* `GoogleApi.AdExchangeBuyer.V2beta1.Model.MetricValue.t`, *default:* `nil`) - The number of bids with the specified status.
* `rowDimensions` (*type:* `GoogleApi.AdExchangeBuyer.V2beta1.Model.RowDimensions.t`, *default:* `nil`) - The values of all dimensions associated with metric values in this row.
* `status` (*type:* `String.t`, *default:* `nil`) - The status specifying why the winning bids were not billed.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:bidCount => GoogleApi.AdExchangeBuyer.V2beta1.Model.MetricValue.t(),
:rowDimensions => GoogleApi.AdExchangeBuyer.V2beta1.Model.RowDimensions.t(),
:status => String.t()
}
field(:bidCount, as: GoogleApi.AdExchangeBuyer.V2beta1.Model.MetricValue)
field(:rowDimensions, as: GoogleApi.AdExchangeBuyer.V2beta1.Model.RowDimensions)
field(:status)
end
defimpl Poison.Decoder,
for: GoogleApi.AdExchangeBuyer.V2beta1.Model.NonBillableWinningBidStatusRow do
def decode(value, options) do
GoogleApi.AdExchangeBuyer.V2beta1.Model.NonBillableWinningBidStatusRow.decode(value, options)
end
end
defimpl Poison.Encoder,
for: GoogleApi.AdExchangeBuyer.V2beta1.Model.NonBillableWinningBidStatusRow do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.267857
| 181
| 0.754652
|
9e6dc7a51897f252177b483c40f96c7d4643ffd0
| 1,256
|
exs
|
Elixir
|
mix.exs
|
maartenvanvliet/receivex
|
4ec326e99047a8a2d21c36fdde6cb5335270a889
|
[
"MIT"
] | 18
|
2018-12-14T16:53:27.000Z
|
2022-03-16T09:30:36.000Z
|
mix.exs
|
maartenvanvliet/receivex
|
4ec326e99047a8a2d21c36fdde6cb5335270a889
|
[
"MIT"
] | 44
|
2019-04-16T04:32:17.000Z
|
2022-03-24T04:03:53.000Z
|
mix.exs
|
maartenvanvliet/receivex
|
4ec326e99047a8a2d21c36fdde6cb5335270a889
|
[
"MIT"
] | 3
|
2020-01-17T15:29:33.000Z
|
2021-12-12T19:50:33.000Z
|
defmodule Receivex.MixProject do
use Mix.Project
@version "0.8.2"
def project do
[
app: :receivex,
version: @version,
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
elixirc_paths: elixirc_paths(Mix.env()),
name: "Receivex",
description: "Handle incoming mail webhooks for common mail providers",
package: [
maintainers: ["Maarten van Vliet"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/maartenvanvliet/receivex"},
files: ~w(LICENSE README.md lib mix.exs)
],
source_url: "https://github.com/maartenvanvliet/receivex",
docs: [
main: "readme",
extras: ["README.md"]
]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:plug, "~> 1.11"},
{:jason, ">= 0.0.0", only: [:dev, :test]},
{:ex_doc, ">= 0.0.0", only: :dev},
{:credo, "~> 1.5.0", only: [:dev, :test], runtime: false}
]
end
end
| 25.632653
| 77
| 0.563694
|
9e6de028625522423d6b0d3def42a0310fabda9f
| 1,182
|
exs
|
Elixir
|
test/etherscan/api/stats_test.exs
|
L1h3r/etherscan
|
5e12bae407e1e8175cb58f72de042387383c5c50
|
[
"MIT"
] | 14
|
2017-10-20T03:10:44.000Z
|
2021-10-04T03:00:07.000Z
|
test/etherscan/api/stats_test.exs
|
L1h3r/etherscan
|
5e12bae407e1e8175cb58f72de042387383c5c50
|
[
"MIT"
] | 5
|
2017-10-20T03:36:37.000Z
|
2018-03-25T00:58:01.000Z
|
test/etherscan/api/stats_test.exs
|
L1h3r/etherscan
|
5e12bae407e1e8175cb58f72de042387383c5c50
|
[
"MIT"
] | 4
|
2018-03-25T00:49:18.000Z
|
2019-11-28T00:33:06.000Z
|
defmodule Etherscan.StatsTest do
use ExUnit.Case
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
use Etherscan.Constants
setup_all do
HTTPoison.start()
:ok
end
describe "get_token_supply/1" do
test "with valid token address" do
use_cassette "get_token_supply" do
response = Etherscan.get_token_supply(@test_token_address)
assert {:ok, @test_token_supply} = response
end
end
test "with invalid token address" do
response = Etherscan.get_token_supply({:token})
assert {:error, :invalid_token_address} = response
end
end
describe "get_eth_supply/0" do
test "returns the current supply of eth" do
use_cassette "get_eth_supply" do
response = Etherscan.get_eth_supply()
assert {:ok, @test_eth_supply} = response
end
end
end
describe "get_eth_price/0" do
test "returns the current eth price" do
use_cassette "get_eth_price" do
response = Etherscan.get_eth_price()
assert {:ok, price} = response
assert %{"ethbtc" => @test_eth_btc_price} = price
assert %{"ethusd" => @test_eth_usd_price} = price
end
end
end
end
| 26.266667
| 66
| 0.669205
|
9e6e04f74c6dcc84ad10a11316f5e8b6ec554e27
| 377
|
ex
|
Elixir
|
test/support/authentication_test_helpers.ex
|
fikape/code-corps-api
|
c21674b0b2a19fa26945c94268db8894420ca181
|
[
"MIT"
] | 275
|
2015-06-23T00:20:51.000Z
|
2021-08-19T16:17:37.000Z
|
test/support/authentication_test_helpers.ex
|
fikape/code-corps-api
|
c21674b0b2a19fa26945c94268db8894420ca181
|
[
"MIT"
] | 1,304
|
2015-06-26T02:11:54.000Z
|
2019-12-12T21:08:00.000Z
|
test/support/authentication_test_helpers.ex
|
fikape/code-corps-api
|
c21674b0b2a19fa26945c94268db8894420ca181
|
[
"MIT"
] | 140
|
2016-01-01T18:19:47.000Z
|
2020-11-22T06:24:47.000Z
|
defmodule CodeCorps.AuthenticationTestHelpers do
use Phoenix.ConnTest
import CodeCorps.Factories
def authenticate(conn) do
user = insert(:user)
conn
|> authenticate(user)
end
def authenticate(conn, user) do
{:ok, token, _} = user |> CodeCorps.Guardian.encode_and_sign()
conn
|> put_req_header("authorization", "Bearer #{token}")
end
end
| 19.842105
| 66
| 0.69496
|
9e6e0827e06a276b245fb9318a97a24402fff332
| 1,693
|
exs
|
Elixir
|
config/dev.exs
|
juniornelson123/api_banking_teste
|
db263511d78fffe9e4081551817d006df2094316
|
[
"MIT"
] | null | null | null |
config/dev.exs
|
juniornelson123/api_banking_teste
|
db263511d78fffe9e4081551817d006df2094316
|
[
"MIT"
] | null | null | null |
config/dev.exs
|
juniornelson123/api_banking_teste
|
db263511d78fffe9e4081551817d006df2094316
|
[
"MIT"
] | null | null | null |
use Mix.Config
# Configure your database
config :api_banking, ApiBanking.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "api_banking",
hostname: "db",
show_sensitive_data_on_connection_error: true,
pool_size: 10
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :api_banking, ApiBankingWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: []
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 28.694915
| 68
| 0.728293
|
9e6e1a019b5c0ca7f062402d0f55685967da2fcb
| 2,851
|
ex
|
Elixir
|
lib/cforum_web/channels/users_channel.ex
|
campingrider/cforum_ex
|
cf27684c47d6dc26c9c37a946f1c729a79d27c70
|
[
"MIT"
] | null | null | null |
lib/cforum_web/channels/users_channel.ex
|
campingrider/cforum_ex
|
cf27684c47d6dc26c9c37a946f1c729a79d27c70
|
[
"MIT"
] | null | null | null |
lib/cforum_web/channels/users_channel.ex
|
campingrider/cforum_ex
|
cf27684c47d6dc26c9c37a946f1c729a79d27c70
|
[
"MIT"
] | null | null | null |
defmodule CforumWeb.UsersChannel do
use CforumWeb, :channel
use Appsignal.Instrumentation.Decorators
alias Cforum.Accounts.User
alias Cforum.Forums
alias Cforum.ConfigManager
alias Cforum.Messages.ReadMessages
alias Cforum.Accounts.Notifications
alias Cforum.Accounts.PrivMessages
@decorate channel_action()
def join("users:lobby", _payload, socket), do: {:ok, socket}
@decorate channel_action()
def join("users:" <> user_id, _payload, socket) do
if authorized?(socket.assigns[:current_user], String.to_integer(user_id)),
do: {:ok, socket},
else: {:error, %{reason: "unauthorized"}}
end
@decorate channel_action()
def handle_in("current_user", _payload, socket),
do: {:reply, {:ok, socket.assigns[:current_user]}, socket}
@decorate channel_action()
def handle_in("settings", _payload, socket) do
settings = Cforum.ConfigManager.settings_map(nil, socket.assigns[:current_user])
config =
Enum.reduce(Cforum.ConfigManager.visible_config_keys(), %{}, fn key, opts ->
Map.put(opts, key, ConfigManager.uconf(settings, key))
end)
{:reply, {:ok, config}, socket}
end
@decorate channel_action()
def handle_in("visible_forums", _payload, socket) do
forums = Forums.list_visible_forums(socket.assigns[:current_user])
{:reply, {:ok, %{forums: forums}}, socket}
end
@decorate channel_action()
def handle_in("title_infos", _payload, socket) do
forums = Forums.list_visible_forums(socket.assigns[:current_user])
{_, num_messages} = ReadMessages.count_unread_messages(socket.assigns[:current_user], forums)
assigns = %{
unread_notifications: Notifications.count_notifications(socket.assigns[:current_user], true),
unread_mails: PrivMessages.count_priv_messages(socket.assigns[:current_user], true),
unread_messages: num_messages,
current_user: socket.assigns[:current_user]
}
str = CforumWeb.LayoutView.numeric_infos(socket.assigns[:current_user], assigns)
{:reply,
{:ok,
%{
infos: str,
unread_notifications: assigns[:unread_notifications],
unread_mails: assigns[:unread_mails],
unread_messages: assigns[:unread_messages]
}}, socket}
end
# # Channels can be used in a request/response fashion
# # by sending replies to requests from the client
# def handle_in("ping", payload, socket) do
# {:reply, {:ok, payload}, socket}
# end
# # It is also common to receive messages from the client and
# # broadcast to everyone in the current topic (users:lobby).
# def handle_in("shout", payload, socket) do
# broadcast(socket, "shout", payload)
# {:noreply, socket}
# end
# Add authorization logic here as required.
defp authorized?(%User{user_id: uid}, id) when uid == id, do: true
defp authorized?(_, _), do: false
end
| 33.151163
| 99
| 0.70221
|
9e6e32c14da05d60e9d529c90fe1c519874c22d5
| 1,259
|
ex
|
Elixir
|
apps/exred_ui/lib/exred_ui_web/channels/user_socket.ex
|
exredorg/exred
|
0ece8e6680747ba8f30b4413ede598a45495aa7c
|
[
"MIT"
] | null | null | null |
apps/exred_ui/lib/exred_ui_web/channels/user_socket.ex
|
exredorg/exred
|
0ece8e6680747ba8f30b4413ede598a45495aa7c
|
[
"MIT"
] | null | null | null |
apps/exred_ui/lib/exred_ui_web/channels/user_socket.ex
|
exredorg/exred
|
0ece8e6680747ba8f30b4413ede598a45495aa7c
|
[
"MIT"
] | null | null | null |
defmodule ExredUIWeb.UserSocket do
use Phoenix.Socket
## Channels
channel "cmd:*", ExredUIWeb.CmdChannel
channel "event:*", ExredUIWeb.EventChannel
# channel "room:*", ExredUIWeb.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# ExredUIWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 31.475
| 83
| 0.706116
|
9e6e34b01d592b89e3bf0740d552fdd7d31e3ac5
| 347
|
exs
|
Elixir
|
test/tortoise/package/disconnect_test.exs
|
lucaong/tortoise
|
fd2f83527937ba39b47f58eb8d392a1aa927e28f
|
[
"Apache-2.0"
] | 272
|
2018-04-22T22:47:35.000Z
|
2022-03-01T05:22:02.000Z
|
test/tortoise/package/disconnect_test.exs
|
lucaong/tortoise
|
fd2f83527937ba39b47f58eb8d392a1aa927e28f
|
[
"Apache-2.0"
] | 120
|
2018-04-22T20:42:04.000Z
|
2022-01-20T23:12:13.000Z
|
test/tortoise/package/disconnect_test.exs
|
lucaong/tortoise
|
fd2f83527937ba39b47f58eb8d392a1aa927e28f
|
[
"Apache-2.0"
] | 58
|
2018-04-24T06:28:36.000Z
|
2022-02-09T06:55:42.000Z
|
defmodule Tortoise.Package.DisconnectTest do
use ExUnit.Case
doctest Tortoise.Package.Disconnect
alias Tortoise.Package
test "encoding and decoding disconnect messages" do
disconnect = %Package.Disconnect{}
assert ^disconnect =
disconnect
|> Package.encode()
|> Package.decode()
end
end
| 21.6875
| 53
| 0.67147
|
9e6e6b17dd765ce9654ffa3353b7f50e74bbe39b
| 1,824
|
exs
|
Elixir
|
mix.exs
|
dkataskin/mongodb
|
6ae204966e0842c852466431c9fcb06056e1ceb9
|
[
"Apache-2.0"
] | 286
|
2017-06-06T04:21:31.000Z
|
2021-09-11T16:37:59.000Z
|
mix.exs
|
dkataskin/mongodb
|
6ae204966e0842c852466431c9fcb06056e1ceb9
|
[
"Apache-2.0"
] | 202
|
2017-05-28T13:22:01.000Z
|
2020-05-15T20:15:51.000Z
|
mix.exs
|
dkataskin/mongodb
|
6ae204966e0842c852466431c9fcb06056e1ceb9
|
[
"Apache-2.0"
] | 120
|
2016-12-16T17:05:12.000Z
|
2020-05-15T16:20:17.000Z
|
defmodule Mongodb.Mixfile do
use Mix.Project
@version "1.0.0-beta.1"
def project do
[
app: :mongodb,
version: @version,
elixirc_paths: elixirc_paths(Mix.env()),
elixir: "~> 1.5",
name: "Mongodb",
deps: deps(),
docs: docs(),
description: description(),
package: package(),
dialyzer: dialyzer(),
consolidate_protocols: Mix.env() != :test
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
def application do
[
mod: {Mongo.App, []},
env: [],
extra_applications: [:crypto, :logger, :ssl],
registered: [
Mongo.PBKDF2Cache,
Mongo.Session.Supervisor,
Mongo.Events,
Mongo.IdServer,
Mongo.SessionPool
]
]
end
defp deps do
[
{:db_connection, "~> 2.4.0"},
{:decimal, "~> 2.0.0"},
{:jason, "~> 1.2.2", only: :test},
{:ex_doc, ">= 0.0.0", only: :dev},
{:earmark, ">= 0.0.0", only: :dev},
{:dialyxir, "~> 1.1.0", only: :dev, runtime: false}
]
end
defp docs do
[
main: "readme",
extras: ["README.md"],
source_ref: "v#{@version}",
source_url: "https://github.com/elixir-mongo/mongodb"
]
end
defp description do
"MongoDB driver for Elixir"
end
defp package do
[
maintainers: ["Eric Meadows-Jönsson", "Justin Wood"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/elixir-mongo/mongodb"}
]
end
# Configures dialyzer.
#
# The `dialyzer.plt` file takes a long time to generate first time round, so we store it in a
# custom location where it can then be easily cached during CI.
defp dialyzer do
[
plt_file: {:no_warn, "priv/plts/dialyzer.plt"}
]
end
end
| 22.243902
| 95
| 0.558662
|
9e6e76a11c762596421711d68d16706c6560bbc1
| 1,492
|
ex
|
Elixir
|
clients/area120_tables/lib/google_api/area120_tables/v1alpha1/model/empty.ex
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
clients/area120_tables/lib/google_api/area120_tables/v1alpha1/model/empty.ex
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
clients/area120_tables/lib/google_api/area120_tables/v1alpha1/model/empty.ex
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Area120Tables.V1alpha1.Model.Empty do
@moduledoc """
A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); }
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.Area120Tables.V1alpha1.Model.Empty do
def decode(value, options) do
GoogleApi.Area120Tables.V1alpha1.Model.Empty.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Area120Tables.V1alpha1.Model.Empty do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.52381
| 282
| 0.763405
|
9e6e8784708569563b83b7392689e54f2fcf563b
| 1,139
|
exs
|
Elixir
|
clients/o_auth2/config/config.exs
|
hauptbenutzer/elixir-google-api
|
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
|
[
"Apache-2.0"
] | 1
|
2018-12-03T23:43:10.000Z
|
2018-12-03T23:43:10.000Z
|
clients/o_auth2/config/config.exs
|
hauptbenutzer/elixir-google-api
|
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
|
[
"Apache-2.0"
] | null | null | null |
clients/o_auth2/config/config.exs
|
hauptbenutzer/elixir-google-api
|
7b9e3a114a49cfc774a7afd03e299a0d43e4e6b2
|
[
"Apache-2.0"
] | 1
|
2020-11-10T16:58:27.000Z
|
2020-11-10T16:58:27.000Z
|
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :google_o_auth2_api, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:google_o_auth2_api, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.741935
| 73
| 0.755926
|
9e6eb56d5d580e22859a3796353b6dd476869d36
| 453
|
ex
|
Elixir
|
lib/ex_platform_web/plugs/i18n.ex
|
joseph-lozano/ex_platform
|
55208dfc50e1bd1ed232141798579fdcb6fc54af
|
[
"MIT"
] | 129
|
2021-05-30T10:10:59.000Z
|
2022-03-27T14:42:48.000Z
|
lib/ex_platform_web/plugs/i18n.ex
|
joseph-lozano/ex_platform
|
55208dfc50e1bd1ed232141798579fdcb6fc54af
|
[
"MIT"
] | 48
|
2021-05-30T21:34:03.000Z
|
2022-02-21T14:20:04.000Z
|
lib/ex_platform_web/plugs/i18n.ex
|
joseph-lozano/ex_platform
|
55208dfc50e1bd1ed232141798579fdcb6fc54af
|
[
"MIT"
] | 12
|
2021-05-30T22:03:49.000Z
|
2022-02-08T19:59:16.000Z
|
defmodule ExPlatformWeb.Plugs.I18n do
@moduledoc """
I18n plug to set the language on an HTTP request.
"""
import Plug.Conn
def init(param), do: param
def call(%Plug.Conn{private: %{cldr_locale: %Cldr.LanguageTag{language: locale}}} = conn, _opts) do
Gettext.put_locale(locale)
ExPlatform.Cldr.put_locale(locale)
conn |> put_session(:locale, locale)
end
def call(conn, _opts), do: conn |> put_session(:locale, "en")
end
| 26.647059
| 101
| 0.693157
|
9e6eb5af7541a2e852faa1ee2a67d5a07205842a
| 14,534
|
ex
|
Elixir
|
lib/phoenix_live_view/test/live_view_test.ex
|
shamanime/phoenix_live_view
|
1e12305ca065f99543fc66db6504cb8c8f30c9fc
|
[
"MIT"
] | null | null | null |
lib/phoenix_live_view/test/live_view_test.ex
|
shamanime/phoenix_live_view
|
1e12305ca065f99543fc66db6504cb8c8f30c9fc
|
[
"MIT"
] | null | null | null |
lib/phoenix_live_view/test/live_view_test.ex
|
shamanime/phoenix_live_view
|
1e12305ca065f99543fc66db6504cb8c8f30c9fc
|
[
"MIT"
] | null | null | null |
defmodule Phoenix.LiveViewTest do
@moduledoc """
Conveniences for testing Phoenix live views.
In LiveView tests, we interact with views via process
communication in substitution of a browser. Like a browser,
our test process receives messages about the rendered updates
from the view which can be asserted against to test the
life-cycle and behavior of live views and their children.
## LiveView Testing
The life-cycle of a live view as outlined in the `Phoenix.LiveView`
docs details how a view starts as a stateless HTML render in a disconnected
socket state. Once the browser receives the HTML, it connects to the
server and a new LiveView process is started, remounted in a connected
socket state, and the view continues statefully. The LiveView test functions
support testing both disconnected and connected mounts separately, for example:
use Phoenix.ConnTest
@endpoint MyEndpoint
test "disconnected and connected mount", %{conn: conn} do
conn = get(conn, "/my-path")
assert html_response(conn, 200) =~ "<h1>My Disconnected View</h1>"
{:ok, view, html} = live(conn)
end
test "redirected mount", %{conn: conn} do
assert {:error, %{redirect: %{to: "/somewhere"}}} = live(conn, "my-path")
end
Here, we start by using the familiar `Phoenix.ConnTest` function, `get/2` to
test the regular HTTP get request which invokes mount with a disconnect socket.
Next, `live/1` is called with our sent connection to mount the view in a connected
state, which starts our stateful LiveView process.
In general, it's often more convenient to test the mounting of a view
in a single step, provided you don't need the result of the stateless HTTP
render. This is done with a single call to `live/2`, which performs the
`get` step for us:
test "connected mount", %{conn: conn} do
{:ok, view, html} = live(conn, "/my-path")
assert html =~ "<h1>My Connected View</h1>"
end
## Testing Events
The browser can send a variety of events to a live view via `phx-` bindings,
which are sent to the `handle_event/3` callback. To test events sent by the
browser and assert on the rendered side-effect of the event, use the
`render_*` functions:
* `render_click/3` - sends a phx-click event and value and
returns the rendered result of the `handle_event/3` callback.
* `render_submit/3` - sends a form phx-submit event and value and
returns the rendered result of the `handle_event/3` callback.
* `render_change/3` - sends a form phx-change event and value and
returns the rendered result of the `handle_event/3` callback.
* `render_keydown/3` - sends a form phx-keydown event and value and
returns the rendered result of the `handle_event/3` callback.
* `render_keyup/3` - sends a form phx-keyup event and value and
returns the rendered result of the `handle_event/3` callback.
For example:
{:ok, view, _html} = live(conn, "/thermo")
assert render_click(view, :inc) =~ "The temperature is: 31℉"
assert render_click(view, :set_temp, 35) =~ "The temperature is: 35℉"
assert render_submit(view, :save, %{deg: 30}) =~ "The temperature is: 30℉"
assert render_change(view, :validate, %{deg: -30}) =~ "invalid temperature"
assert render_keydown(view, :key, :ArrowUp) =~ "The temperature is: 31℉"
assert render_keydown(view, :key, :ArrowDown) =~ "The temperature is: 30℉"
## Testing regular messages
Live views are `GenServer`'s under the hood, and can send and receive messages
just like any other server. To test the side effects of sending or receiving
messages, simply message the view and use the `render` function to test the
result:
send(view.pid, {:set_temp: 50})
assert render(view) =~ "The temperature is: 50℉"
## Testing shutdowns and stopping views
Like all processes, views can shutdown normally or abnormally, and this
can be tested with `assert_remove/3`. For example:
send(view.pid, :boom)
assert_remove view, {:shutdown, %RuntimeError{}}
stop(view)
assert_remove view, {:shutdown, :stop}
Nested views can be removed by a parent at any time based on conditional
rendering. In these cases, the removal of the view is detected by the
browser, or our test client, and the child is shutdown gracefully. This
can be tested in the same way as above:
assert render(parent) =~ "some content in child"
[child] = children(parent)
send(parent.pid, :msg_that_removes_child)
assert_remove child, _
refute render(parent) =~ "some content in child"
"""
alias Phoenix.LiveViewTest.{View, ClientProxy, DOM}
@doc """
Spawns a connected LiveView process.
Accepts either a previously rendered `%Plug.Conn{}` or
an unsent `%Plug.Conn{}`. The latter case is a conveience
to perform the `get/2` and connected mount in a single
step.
## Options
* `:connect_parmas` - the map of params available in connected mount
## Examples
{:ok, view, html} = live(conn, "/path")
assert view.module = MyLive
assert html =~ "the count is 3"
assert {:error, %{redirect: %{to: "/somewhere"}}} = live(conn, "/path")
{:ok, view, html} =
conn
|> get("/path")
|> live()
"""
defmacro live(conn, path_or_opts \\ []) do
quote bind_quoted: binding(), unquote: true, generated: true do
case path_or_opts do
opts when is_list(opts) ->
unquote(__MODULE__).__live__(conn, conn.request_path, opts, :noop)
path when is_binary(path) ->
unquote(__MODULE__).__live__(conn, path, [], fn conn, path -> get(conn, path) end)
end
end
end
defmacro live(conn, path, opts) do
quote bind_quoted: binding(), unquote: true do
unquote(__MODULE__).__live__(conn, path, opts, fn conn, path -> get(conn, path) end)
end
end
@doc false
def __live__(%Plug.Conn{state: state, status: status} = conn, path, opts, get_func) do
case {state, status, get_func} do
{:sent, 200, _} ->
connect_from_static_token(conn, path, opts)
{:sent, 302, _} ->
{:error, %{redirect: %{to: hd(Plug.Conn.get_resp_header(conn, "location"))}}}
{_, _, get} when is_function(get) ->
connect_from_static_token(get.(conn, path), path, opts)
{_, _, :noop} ->
raise ArgumentError, """
a request has not yet been sent.
live/1 must use a connection with a sent response. Either call get/2
prior to live/1, or use live/2 while providing a path to have a get
request issues for you. For example issuing a get yourself:
{:ok, view, _html} =
conn
|> get("#{path}")
|> live()
or performing the GET and live connect in a single step:
{:ok, view, _html} = live(conn, "#{path}")
"""
end
end
defp connect_from_static_token(%Plug.Conn{status: redir} = conn, _path, _opts) when redir in [301, 302] do
{:error, %{redirect: %{to: hd(Plug.Conn.get_resp_header(conn, "location"))}}}
end
defp connect_from_static_token(%Plug.Conn{status: 200} = conn, path, opts) do
html =
conn
|> Phoenix.ConnTest.html_response(200)
|> IO.iodata_to_binary()
case DOM.find_sessions(html) do
[{session_token, nil, id} | _] -> do_connect(conn, path, html, session_token, id, opts)
[] -> {:error, :nosession}
end
end
defp do_connect(%Plug.Conn{} = conn, path, html, session_token, id, opts) do
live_path = live_path(conn, path)
child_statics = DOM.find_static_views(html)
timeout = opts[:timeout] || 5000
%View{ref: ref, topic: topic} =
view =
View.build(
dom_id: id,
mount_path: live_path,
connect_params: opts[:connect_params] || %{},
session_token: session_token,
module: conn.assigns.live_view_module,
router: Phoenix.Controller.router_module(conn),
endpoint: Phoenix.Controller.endpoint_module(conn),
child_statics: child_statics
)
case ClientProxy.start_link(caller: {ref, self()}, view: view, timeout: timeout) do
{:ok, proxy_pid} ->
receive do
{^ref, {:mounted, view_pid, html}} ->
receive do
{^ref, {:redirect, _topic, opts}} ->
%{to: to} = opts
ensure_down!(view_pid)
{:error, %{redirect: to}}
after
0 ->
view = %View{view | pid: view_pid, proxy: proxy_pid, topic: topic}
{:ok, view, html}
end
end
:ignore ->
receive do
{^ref, {%_{} = exception, [_|_] = stack}} -> reraise(exception, stack)
{^ref, %{external_live_redirect: opts}} -> {:error, %{redirect: opts}}
{^ref, reason} -> {:error, reason}
end
end
end
defp live_path(%Plug.Conn{} = conn, path) do
if conn.body_params != %{} or conn.query_string != "" do
query_params = Plug.Conn.Query.decode(conn.query_string, conn.body_params)
path <> "?" <> Plug.Conn.Query.encode(query_params)
else
path
end
end
@doc """
Sends a click event to the view and returns the rendered result.
## Examples
{:ok, view, html} = live(conn, "/thermo")
assert html =~ "The temperature is: 30℉"
assert render_click(view, :inc) =~ "The temperature is: 31℉"
"""
def render_click(view, event, value \\ %{}) do
render_event(view, :click, event, value)
end
@doc """
Sends a form submit event to the view and returns the rendered result.
## Examples
{:ok, view, html} = live(conn, "/thermo")
assert html =~ "The temp is: 30℉"
assert render_submit(view, :refresh, %{deg: 32}) =~ "The temp is: 32℉"
"""
def render_submit(view, event, value \\ %{}) do
encoded_form = Plug.Conn.Query.encode(value)
render_event(view, :form, event, encoded_form)
end
@doc """
Sends a form change event to the view and returns the rendered result.
## Examples
{:ok, view, html} = live(conn, "/thermo")
assert html =~ "The temp is: 30℉"
assert render_change(view, :validate, %{deg: 123}) =~ "123 exceeds limits"
"""
def render_change(view, event, value \\ %{}) do
encoded_form = Plug.Conn.Query.encode(value)
render_event(view, :form, event, encoded_form)
end
@doc """
Sends a keyup event to the view and returns the rendered result.
## Examples
{:ok, view, html} = live(conn, "/thermo")
assert html =~ "The temp is: 30℉"
assert render_keyup(view, :inc, :ArrowUp) =~ "The temp is: 32℉"
"""
def render_keyup(view, event, key_code) do
render_event(view, :keyup, event, key_code)
end
@doc """
Sends a keydown event to the view and returns the rendered result.
## Examples
{:ok, view, html} = live(conn, "/thermo")
assert html =~ "The temp is: 30℉"
assert render_keyup(view, :inc, :ArrowUp) =~ "The temp is: 32℉"
"""
def render_keydown(view, event, key_code) do
render_event(view, :keydown, event, key_code)
end
@doc """
Sends a blur event to the view and returns the rendered result.
## Examples
{:ok, view, html} = live(conn, "/thermo")
assert html =~ "The temp is: 30℉"
assert render_blur(view, :inactive) =~ "Tap to wake"
"""
def render_blur(view, event, value \\ %{}) do
render_event(view, :blur, event, value)
end
@doc """
Sends a focus event to the view and returns the rendered result.
## Examples
{:ok, view, html} = live(conn, "/thermo")
assert html =~ "The temp is: 30℉"
assert render_blur(view, :inactive) =~ "Tap to wake"
assert render_focus(view, :active) =~ "Waking up..."
"""
def render_focus(view, event, value \\ %{}) do
render_event(view, :focus, event, value)
end
defp render_event(view, type, event, value) do
case GenServer.call(view.proxy, {:render_event, view, type, event, value}) do
{:ok, html} -> html
{:error, reason} -> {:error, reason}
end
end
@doc """
Simulates a live_link click to the view and returns the rendered result.
"""
def render_live_link(view, path) do
case GenServer.call(view.proxy, {:render_live_link, view, path}) do
{:ok, html} -> html
{:error, reason} -> {:error, reason}
end
end
@doc """
Returns the current list of children of the parent live view.
Children are return in the order they appear in the rendered HTML.
## Examples
{:ok, view, _html} = live(conn, "/thermo")
assert [clock_view] = children(view)
assert render_click(clock_view, :snooze) =~ "snoozing"
"""
def children(%View{} = parent) do
GenServer.call(parent.proxy, {:children, parent})
end
@doc """
Returns the string of HTML of the rendered view.
"""
def render(%View{} = view) do
{:ok, html} = GenServer.call(view.proxy, {:render_tree, view})
html
end
@doc """
Asserts a redirect was peformed after execution of the provided
function.
## Examples
assert_redirect view, "/path", fn ->
assert render_click(view, :event_that_triggers_redirect)
end
"""
defmacro assert_redirect(view, to, func) do
quote do
%View{ref: ref, proxy: proxy_pid, topic: topic} = unquote(view)
unquote(func).()
assert_receive {^ref, {:redirect, ^topic, %{to: unquote(to)}}}
end
end
@doc """
Asserts a view was removed by a parent or shutdown itself.
## Examples
[child1, child2] = children(parent_view)
send(parent_view.pid, :msg_that_removes_child)
assert_remove child1, _
assert_remove child2, {:shutdown, :removed}
"""
defmacro assert_remove(view, reason, timeout \\ 100) do
quote do
%Phoenix.LiveViewTest.View{ref: ref, topic: topic} = unquote(view)
assert_receive {^ref, {:removed, ^topic, unquote(reason)}}, unquote(timeout)
end
end
@doc """
Stops a LiveView process.
## Examples
stop(view)
assert_remove view, {:shutdown, :stop}
"""
def stop(%View{} = view) do
GenServer.call(view.proxy, {:stop, view})
end
defp ensure_down!(pid, timeout \\ 100) do
ref = Process.monitor(pid)
receive do
{:DOWN, ^ref, :process, ^pid, reason} -> {:ok, reason}
after
timeout -> {:error, :timeout}
end
end
@doc false
def encode!(msg), do: msg
end
| 31.188841
| 108
| 0.635544
|
9e6ec21a04e3138e0b3dc61ccb44b9d6803facc3
| 1,807
|
exs
|
Elixir
|
test/game/command/drop_test.exs
|
stevegrossi/ex_venture
|
e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa
|
[
"MIT"
] | 1
|
2019-02-10T10:22:39.000Z
|
2019-02-10T10:22:39.000Z
|
test/game/command/drop_test.exs
|
stevegrossi/ex_venture
|
e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa
|
[
"MIT"
] | null | null | null |
test/game/command/drop_test.exs
|
stevegrossi/ex_venture
|
e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa
|
[
"MIT"
] | null | null | null |
defmodule Game.Command.DropTest do
use Data.ModelCase
@socket Test.Networking.Socket
@room Test.Game.Room
alias Game.Command.Drop
setup do
start_and_clear_items()
insert_item(%{id: 1, name: "Sword", keywords: []})
@socket.clear_messages()
user = base_user()
state = session_state(%{user: user})
%{state: state}
end
test "drop an item in a room", %{state: state} do
@room.clear_drops()
state = %{state | save: %{state.save | room_id: 1, items: [item_instance(1)]}}
{:update, state} = Drop.run({"sword"}, state)
assert state.save.items |> length == 0
[{_socket, look}] = @socket.get_echos()
assert Regex.match?(~r(You dropped), look)
assert [{1, {:player, _}, %{id: 1}}] = @room.get_drops()
end
test "drop currency in a room", %{state: state} do
@room.clear_drop_currencies()
state = %{state | save: %{state.save | room_id: 1, currency: 101}}
{:update, state} = Drop.run({"100 gold"}, state)
assert state.save.currency == 1
[{_socket, look}] = @socket.get_echos()
assert Regex.match?(~r(You dropped), look)
assert [{1, {:player, _}, 100}] = @room.get_drop_currencies()
end
test "drop currency in a room - not enough to do so", %{state: state} do
@room.clear_drop_currencies()
state = %{state | save: %{state.save | room_id: 1, currency: 101}}
:ok = Drop.run({"110 gold"}, state)
[{_socket, look}] = @socket.get_echos()
assert Regex.match?(~r(You do not have enough), look)
end
test "item not found in your inventory", %{state: state} do
state = %{state | save: %{state.save | room_id: 1, items: [item_instance(2)]}}
:ok = Drop.run({"sword"}, state)
[{_socket, look}] = @socket.get_echos()
assert Regex.match?(~r(Could not find), look)
end
end
| 26.573529
| 82
| 0.620365
|
9e6f0a8245ff2fe1829a802404af894f369690f1
| 1,954
|
exs
|
Elixir
|
config/dev.exs
|
kenforthewin/mentat
|
417ce989e13f2f08990b872027ce8dcb3a1e6e99
|
[
"MIT"
] | 88
|
2018-06-17T17:36:56.000Z
|
2021-11-20T20:29:27.000Z
|
config/dev.exs
|
kenforthewin/scalar
|
417ce989e13f2f08990b872027ce8dcb3a1e6e99
|
[
"MIT"
] | 47
|
2018-05-12T00:12:37.000Z
|
2018-06-16T19:52:52.000Z
|
config/dev.exs
|
kenforthewin/scalar
|
417ce989e13f2f08990b872027ce8dcb3a1e6e99
|
[
"MIT"
] | 6
|
2018-06-17T17:37:11.000Z
|
2020-04-12T04:05:49.000Z
|
use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :app, AppWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/webpack/bin/webpack.js", "--watch", "--progress", "--config", "webpack.config.js",
cd: Path.expand("../assets", __DIR__)]]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# command from your terminal:
#
# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
#
# The `http:` config above can be replaced with:
#
# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :app, AppWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/app_web/views/.*(ex)$},
~r{lib/app_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
config :app, App.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: System.get_env("POSTGRES_PASSWORD"),
database: "app_dev",
hostname: "db",
pool_size: 10
| 33.118644
| 170
| 0.697544
|
9e6f1413cc96f495dd2f054f928b8879c4e08b66
| 1,827
|
ex
|
Elixir
|
clients/content/lib/google_api/content/v21/model/accounts_custom_batch_response.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v21/model/accounts_custom_batch_response.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | 1
|
2020-12-18T09:25:12.000Z
|
2020-12-18T09:25:12.000Z
|
clients/content/lib/google_api/content/v21/model/accounts_custom_batch_response.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V21.Model.AccountsCustomBatchResponse do
@moduledoc """
## Attributes
* `entries` (*type:* `list(GoogleApi.Content.V21.Model.AccountsCustomBatchResponseEntry.t)`, *default:* `nil`) - The result of the execution of the batch requests.
* `kind` (*type:* `String.t`, *default:* `nil`) - Identifies what kind of resource this is. Value: the fixed string "content#accountsCustomBatchResponse".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:entries => list(GoogleApi.Content.V21.Model.AccountsCustomBatchResponseEntry.t()),
:kind => String.t()
}
field(:entries, as: GoogleApi.Content.V21.Model.AccountsCustomBatchResponseEntry, type: :list)
field(:kind)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V21.Model.AccountsCustomBatchResponse do
def decode(value, options) do
GoogleApi.Content.V21.Model.AccountsCustomBatchResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V21.Model.AccountsCustomBatchResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.54
| 167
| 0.746032
|
9e6f19a2b53aa69e77fbbed04184c4a7db30c43e
| 21,905
|
ex
|
Elixir
|
lib/mail_slurp_api/api/inbox_controller.ex
|
sumup-bank/mailslurp-client-elixir
|
87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8
|
[
"MIT"
] | null | null | null |
lib/mail_slurp_api/api/inbox_controller.ex
|
sumup-bank/mailslurp-client-elixir
|
87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8
|
[
"MIT"
] | null | null | null |
lib/mail_slurp_api/api/inbox_controller.ex
|
sumup-bank/mailslurp-client-elixir
|
87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8
|
[
"MIT"
] | null | null | null |
# NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule MailSlurpAPI.Api.InboxController do
@moduledoc """
API calls for all endpoints tagged `InboxController`.
"""
alias MailSlurpAPI.Connection
import MailSlurpAPI.RequestBuilder
@doc """
Create an Inbox (email address)
Create a new inbox and with a randomized email address to send and receive from. Pass emailAddress parameter if you wish to use a specific email address. Creating an inbox is required before sending or receiving emails. If writing tests it is recommended that you create a new inbox during each test method so that it is unique and empty.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :description (String.t): Optional description of the inbox for labelling purposes. Is shown in the dashboard and can be used with
- :email_address (String.t): A custom email address to use with the inbox. Defaults to null. When null MailSlurp will assign a random email address to the inbox such as `123@mailslurp.com`. If you use the `useDomainPool` option when the email address is null it will generate an email address with a more varied domain ending such as `123@mailslurp.info` or `123@mailslurp.biz`. When a custom email address is provided the address is split into a domain and the domain is queried against your user. If you have created the domain in the MailSlurp dashboard and verified it you can use any email address that ends with the domain. Send an email to this address and the inbox will receive and store it for you. To retrieve the email use the Inbox and Email Controller endpoints with the inbox ID.
- :expires_at (DateTime.t): Optional inbox expiration date. If null then this inbox is permanent and the emails in it won't be deleted. If an expiration date is provided or is required by your plan the inbox will be closed when the expiration time is reached. Expired inboxes still contain their emails but can no longer send or receive emails. An ExpiredInboxRecord is created when an inbox and the email address and inbox ID are recorded. The expiresAt property is a timestamp string in ISO DateTime Format yyyy-MM-dd'T'HH:mm:ss.SSSXXX.
- :expires_in (integer()): Number of milliseconds that inbox should exist for
- :favourite (boolean()): Is the inbox favorited. Favouriting inboxes is typically done in the dashboard for quick access or filtering
- :name (String.t): Optional name of the inbox. Displayed in the dashboard for easier search
- :tags ([String.t]): Tags that inbox has been tagged with. Tags can be added to inboxes to group different inboxes within an account. You can also search for inboxes by tag in the dashboard UI.
- :use_domain_pool (boolean()): Use the MailSlurp domain name pool with this inbox when creating the email address. Defaults to null. If enabled the inbox will be an email address with a domain randomly chosen from a list of the MailSlurp domains. This is useful when the default `@mailslurp.com` email addresses used with inboxes are blocked or considered spam by a provider or receiving service. When domain pool is enabled an email address will be generated ending in `@mailslurp.{world,info,xyz,...}` . This means a TLD is randomly selecting from a list of `.biz`, `.info`, `.xyz` etc to add variance to the generated email addresses. When null or false MailSlurp uses the default behavior of `@mailslurp.com` or custom email address provided by the emailAddress field.
## Returns
{:ok, %MailSlurpAPI.Model.Inbox{}} on success
{:error, info} on failure
"""
@spec create_inbox(Tesla.Env.client, keyword()) :: {:ok, MailSlurpAPI.Model.Inbox.t} | {:error, Tesla.Env.t}
def create_inbox(connection, opts \\ []) do
optional_params = %{
:"description" => :query,
:"emailAddress" => :query,
:"expiresAt" => :query,
:"expiresIn" => :query,
:"favourite" => :query,
:"name" => :query,
:"tags" => :query,
:"useDomainPool" => :query
}
%{}
|> method(:post)
|> url("/inboxes")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 201, %MailSlurpAPI.Model.Inbox{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Create an inbox with additional options
Additional endpoint that allows inbox creation with request body options. Can be more flexible that other methods for some clients.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- create_inbox_dto (CreateInboxDto): createInboxDto
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %MailSlurpAPI.Model.Inbox{}} on success
{:error, info} on failure
"""
@spec create_inbox_with_options(Tesla.Env.client, MailSlurpAPI.Model.CreateInboxDto.t, keyword()) :: {:ok, MailSlurpAPI.Model.Inbox.t} | {:error, Tesla.Env.t}
def create_inbox_with_options(connection, create_inbox_dto, _opts \\ []) do
%{}
|> method(:post)
|> url("/inboxes/withOptions")
|> add_param(:body, :body, create_inbox_dto)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 201, %MailSlurpAPI.Model.Inbox{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Delete all inboxes
Permanently delete all inboxes and associated email addresses. This will also delete all emails within the inboxes. Be careful as inboxes cannot be recovered once deleted. Note: deleting inboxes will not impact your usage limits. Monthly inbox creation limits are based on how many inboxes were created in the last 30 days, not how many inboxes you currently have.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec delete_all_inboxes(Tesla.Env.client, keyword()) :: {:ok, nil} | {:error, Tesla.Env.t}
def delete_all_inboxes(connection, _opts \\ []) do
%{}
|> method(:delete)
|> url("/inboxes")
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 204, false},
{ 401, false},
{ 403, false}
])
end
@doc """
Delete inbox
Permanently delete an inbox and associated email address as well as all emails within the given inbox. This action cannot be undone. Note: deleting an inbox will not affect your account usage. Monthly inbox usage is based on how many inboxes you create within 30 days, not how many exist at time of request.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): inboxId
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec delete_inbox(Tesla.Env.client, String.t, keyword()) :: {:ok, nil} | {:error, Tesla.Env.t}
def delete_inbox(connection, inbox_id, _opts \\ []) do
%{}
|> method(:delete)
|> url("/inboxes/#{inbox_id}")
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 204, false},
{ 401, false},
{ 403, false}
])
end
@doc """
List All Inboxes Paginated
List inboxes in paginated form. The results are available on the `content` property of the returned object. This method allows for page index (zero based), page size (how many results to return(, and a sort direction (based on createdAt time). You Can also filter by whether an inbox is favorited or use email address pattern. This method is the recommended way to query inboxes. The alternative `getInboxes` method returns a full list of inboxes but is limited to 100 results.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :favourite (boolean()): Optionally filter results for favourites only
- :page (integer()): Optional page index in inbox list pagination
- :search (String.t): Optionally filter by search words partial matching ID, tags, name, and email address
- :size (integer()): Optional page size in inbox list pagination
- :sort (String.t): Optional createdAt sort direction ASC or DESC
- :tag (String.t): Optionally filter by tags
## Returns
{:ok, %MailSlurpAPI.Model.PageInboxProjection{}} on success
{:error, info} on failure
"""
@spec get_all_inboxes(Tesla.Env.client, keyword()) :: {:ok, MailSlurpAPI.Model.PageInboxProjection.t} | {:error, Tesla.Env.t}
def get_all_inboxes(connection, opts \\ []) do
optional_params = %{
:"favourite" => :query,
:"page" => :query,
:"search" => :query,
:"size" => :query,
:"sort" => :query,
:"tag" => :query
}
%{}
|> method(:get)
|> url("/inboxes/paginated")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.PageInboxProjection{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Get emails in an Inbox. This method is not idempotent as it allows retries and waits if you want certain conditions to be met before returning. For simple listing and sorting of known emails use the email controller instead.
List emails that an inbox has received. Only emails that are sent to the inbox's email address will appear in the inbox. It may take several seconds for any email you send to an inbox's email address to appear in the inbox. To make this endpoint wait for a minimum number of emails use the `minCount` parameter. The server will retry the inbox database until the `minCount` is satisfied or the `retryTimeout` is reached
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): Id of inbox that emails belongs to
- opts (KeywordList): [optional] Optional parameters
- :limit (integer()): Limit the result set, ordered by received date time sort direction. Maximum 100. For more listing options see the email controller
- :min_count (integer()): Minimum acceptable email count. Will cause request to hang (and retry) until minCount is satisfied or retryTimeout is reached.
- :retry_timeout (integer()): Maximum milliseconds to spend retrying inbox database until minCount emails are returned
- :since (DateTime.t): Exclude emails received before this ISO 8601 date time
- :size (integer()): Alias for limit. Assessed first before assessing any passed limit.
- :sort (String.t): Sort the results by received date and direction ASC or DESC
## Returns
{:ok, [%EmailPreview{}, ...]} on success
{:error, info} on failure
"""
@spec get_emails(Tesla.Env.client, String.t, keyword()) :: {:ok, list(MailSlurpAPI.Model.EmailPreview.t)} | {:error, Tesla.Env.t}
def get_emails(connection, inbox_id, opts \\ []) do
optional_params = %{
:"limit" => :query,
:"minCount" => :query,
:"retryTimeout" => :query,
:"since" => :query,
:"size" => :query,
:"sort" => :query
}
%{}
|> method(:get)
|> url("/inboxes/#{inbox_id}/emails")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, [%MailSlurpAPI.Model.EmailPreview{}]},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Get Inbox
Returns an inbox's properties, including its email address and ID.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): inboxId
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %MailSlurpAPI.Model.Inbox{}} on success
{:error, info} on failure
"""
@spec get_inbox(Tesla.Env.client, String.t, keyword()) :: {:ok, MailSlurpAPI.Model.Inbox.t} | {:error, Tesla.Env.t}
def get_inbox(connection, inbox_id, _opts \\ []) do
%{}
|> method(:get)
|> url("/inboxes/#{inbox_id}")
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.Inbox{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Get inbox emails paginated
Get a paginated list of emails in an inbox. Does not hold connections open.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): Id of inbox that emails belongs to
- opts (KeywordList): [optional] Optional parameters
- :page (integer()): Optional page index in inbox emails list pagination
- :size (integer()): Optional page size in inbox emails list pagination
- :sort (String.t): Optional createdAt sort direction ASC or DESC
## Returns
{:ok, %MailSlurpAPI.Model.PageEmailPreview{}} on success
{:error, info} on failure
"""
@spec get_inbox_emails_paginated(Tesla.Env.client, String.t, keyword()) :: {:ok, MailSlurpAPI.Model.PageEmailPreview.t} | {:error, Tesla.Env.t}
def get_inbox_emails_paginated(connection, inbox_id, opts \\ []) do
optional_params = %{
:"page" => :query,
:"size" => :query,
:"sort" => :query
}
%{}
|> method(:get)
|> url("/inboxes/#{inbox_id}/emails/paginated")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.PageEmailPreview{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Get Inbox Sent Emails
Returns an inbox's sent email receipts. Call individual sent email endpoints for more details. Note for privacy reasons the full body of sent emails is never stored. An MD5 hash hex is available for comparison instead.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): inboxId
- opts (KeywordList): [optional] Optional parameters
- :page (integer()): Optional page index in inbox sent email list pagination
- :size (integer()): Optional page size in inbox sent email list pagination
- :sort (String.t): Optional createdAt sort direction ASC or DESC
## Returns
{:ok, %MailSlurpAPI.Model.PageSentEmailProjection{}} on success
{:error, info} on failure
"""
@spec get_inbox_sent_emails(Tesla.Env.client, String.t, keyword()) :: {:ok, MailSlurpAPI.Model.PageSentEmailProjection.t} | {:error, Tesla.Env.t}
def get_inbox_sent_emails(connection, inbox_id, opts \\ []) do
optional_params = %{
:"page" => :query,
:"size" => :query,
:"sort" => :query
}
%{}
|> method(:get)
|> url("/inboxes/#{inbox_id}/sent")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.PageSentEmailProjection{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Get inbox tags
Get all inbox tags
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, [%String{}, ...]} on success
{:error, info} on failure
"""
@spec get_inbox_tags(Tesla.Env.client, keyword()) :: {:ok, list(String.t)} | {:error, Tesla.Env.t}
def get_inbox_tags(connection, _opts \\ []) do
%{}
|> method(:get)
|> url("/inboxes/tags")
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, []},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
List Inboxes / Email Addresses
List the inboxes you have created. Note use of the more advanced `getAllEmails` is recommended. You can provide a limit and sort parameter.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :size (integer()): Optional result size limit. Note an automatic limit of 100 results is applied. See the paginated `getAllEmails` for larger queries.
- :sort (String.t): Optional createdAt sort direction ASC or DESC
## Returns
{:ok, [%Inbox{}, ...]} on success
{:error, info} on failure
"""
@spec get_inboxes(Tesla.Env.client, keyword()) :: {:ok, list(MailSlurpAPI.Model.Inbox.t)} | {:error, Tesla.Env.t}
def get_inboxes(connection, opts \\ []) do
optional_params = %{
:"size" => :query,
:"sort" => :query
}
%{}
|> method(:get)
|> url("/inboxes")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, [%MailSlurpAPI.Model.Inbox{}]},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Send Email
Send an email from an inbox's email address. The request body should contain the `SendEmailOptions` that include recipients, attachments, body etc. See `SendEmailOptions` for all available properties. Note the `inboxId` refers to the inbox's id not the inbox's email address. See https://www.mailslurp.com/guides/ for more information on how to send emails. This method does not return a sent email entity due to legacy reasons. To send and get a sent email as returned response use the sister method `sendEmailAndConfirm`.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): ID of the inbox you want to send the email from
- opts (KeywordList): [optional] Optional parameters
- :send_email_options (SendEmailOptions): Options for the email
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec send_email(Tesla.Env.client, String.t, keyword()) :: {:ok, nil} | {:error, Tesla.Env.t}
def send_email(connection, inbox_id, opts \\ []) do
optional_params = %{
:"sendEmailOptions" => :body
}
%{}
|> method(:post)
|> url("/inboxes/#{inbox_id}")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 201, false},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Send email and return sent confirmation
Sister method for standard `sendEmail` method with the benefit of returning a `SentEmail` entity confirming the successful sending of the email with link the the sent object created for it.
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): ID of the inbox you want to send the email from
- opts (KeywordList): [optional] Optional parameters
- :send_email_options (SendEmailOptions): Options for the email
## Returns
{:ok, %MailSlurpAPI.Model.SentEmailDto{}} on success
{:error, info} on failure
"""
@spec send_email_and_confirm(Tesla.Env.client, String.t, keyword()) :: {:ok, MailSlurpAPI.Model.SentEmailDto.t} | {:error, Tesla.Env.t}
def send_email_and_confirm(connection, inbox_id, opts \\ []) do
optional_params = %{
:"sendEmailOptions" => :body
}
%{}
|> method(:post)
|> url("/inboxes/#{inbox_id}/confirm")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 201, %MailSlurpAPI.Model.SentEmailDto{}},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Set inbox favourited state
Set and return new favourite state for an inbox
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): inboxId
- set_inbox_favourited_options (SetInboxFavouritedOptions): setInboxFavouritedOptions
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %MailSlurpAPI.Model.Inbox{}} on success
{:error, info} on failure
"""
@spec set_inbox_favourited(Tesla.Env.client, String.t, MailSlurpAPI.Model.SetInboxFavouritedOptions.t, keyword()) :: {:ok, MailSlurpAPI.Model.Inbox.t} | {:error, Tesla.Env.t}
def set_inbox_favourited(connection, inbox_id, set_inbox_favourited_options, _opts \\ []) do
%{}
|> method(:put)
|> url("/inboxes/#{inbox_id}/favourite")
|> add_param(:body, :body, set_inbox_favourited_options)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.Inbox{}},
{ 201, false},
{ 401, false},
{ 403, false},
{ 404, false}
])
end
@doc """
Update Inbox
Update editable fields on an inbox
## Parameters
- connection (MailSlurpAPI.Connection): Connection to server
- inbox_id (String.t): inboxId
- update_inbox_options (UpdateInboxOptions): updateInboxOptions
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %MailSlurpAPI.Model.Inbox{}} on success
{:error, info} on failure
"""
@spec update_inbox(Tesla.Env.client, String.t, MailSlurpAPI.Model.UpdateInboxOptions.t, keyword()) :: {:ok, MailSlurpAPI.Model.Inbox.t} | {:error, Tesla.Env.t}
def update_inbox(connection, inbox_id, update_inbox_options, _opts \\ []) do
%{}
|> method(:patch)
|> url("/inboxes/#{inbox_id}")
|> add_param(:body, :body, update_inbox_options)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> evaluate_response([
{ 200, %MailSlurpAPI.Model.Inbox{}},
{ 204, false},
{ 401, false},
{ 403, false}
])
end
end
| 41.174812
| 798
| 0.675097
|
9e6f1dfeaa4a709dddbe6e4016a9ad97c248113c
| 2,666
|
ex
|
Elixir
|
clients/calendar/lib/google_api/calendar/v3/api/channels.ex
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
clients/calendar/lib/google_api/calendar/v3/api/channels.ex
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
clients/calendar/lib/google_api/calendar/v3/api/channels.ex
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Calendar.V3.Api.Channels do
@moduledoc """
API calls for all endpoints tagged `Channels`.
"""
alias GoogleApi.Calendar.V3.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Stop watching resources through this channel
## Parameters
- connection (GoogleApi.Calendar.V3.Connection): Connection to server
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :resource (Channel):
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec calendar_channels_stop(Tesla.Env.client(), keyword()) ::
{:ok, nil} | {:error, Tesla.Env.t()}
def calendar_channels_stop(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:resource => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/channels/stop")
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
end
| 36.520548
| 170
| 0.693923
|
9e6f23d459793ed9fda93d52f911e53d83b8b4db
| 259
|
ex
|
Elixir
|
lib/bike_brigade.ex
|
bikebrigade/dispatch
|
eb622fe4f6dab7c917d678d3d7a322a01f97da44
|
[
"Apache-2.0"
] | 28
|
2021-10-11T01:53:53.000Z
|
2022-03-24T17:45:55.000Z
|
lib/bike_brigade.ex
|
bikebrigade/dispatch
|
eb622fe4f6dab7c917d678d3d7a322a01f97da44
|
[
"Apache-2.0"
] | 20
|
2021-10-21T08:12:31.000Z
|
2022-03-31T13:35:53.000Z
|
lib/bike_brigade.ex
|
bikebrigade/dispatch
|
eb622fe4f6dab7c917d678d3d7a322a01f97da44
|
[
"Apache-2.0"
] | null | null | null |
defmodule BikeBrigade do
@moduledoc """
BikeBrigade keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 25.9
| 66
| 0.760618
|
9e6f29caeffd90f889fdcb0d37910271f7ec54b1
| 3,624
|
exs
|
Elixir
|
test/strip_js_test.exs
|
appcues/strip_js
|
120a75affaea83c60df9d25a7d808446103b133e
|
[
"MIT"
] | 12
|
2019-04-02T22:13:39.000Z
|
2021-05-04T09:03:34.000Z
|
test/strip_js_test.exs
|
appcues/strip_js
|
120a75affaea83c60df9d25a7d808446103b133e
|
[
"MIT"
] | 2
|
2020-04-15T20:13:45.000Z
|
2020-04-27T14:33:35.000Z
|
test/strip_js_test.exs
|
appcues/strip_js
|
120a75affaea83c60df9d25a7d808446103b133e
|
[
"MIT"
] | 1
|
2019-04-19T17:02:57.000Z
|
2019-04-19T17:02:57.000Z
|
defmodule StripJsTest do
use ExSpec, async: true
doctest StripJs
@html_with_js """
<html>
<head>
<title>garbage</title>
<script src="poop.js"></script>
<script>wowCool();</script>
</head>
<body>
<!--omg comment-->
<a href="http://example.com" onclick="alert('wow')">Click me</a>
<div>
<a href="javascript:alert('omg')">Click me too</a>
<script>
OMG_WOW();
</script>
</div>
<script>alert('wat');</script>
<p>Hi, mom!</p>
</body>
</html>
"""
@html_without_js """
<html>
<head>
<title>garbage</title>
</head>
<body>
<!--omg comment-->
<a href="http://example.com">Click me</a>
<div>
<a href="#">Click me too</a>
</div>
<p>Hi, mom!</p>
</body>
</html>
"""
context "test cases" do
it "passes test cases" do
Enum.each(TestCases.test_cases(), fn {input, out} ->
real_output_tree = input |> StripJs.clean_html()
expected_output_tree = out
assert(expected_output_tree == real_output_tree)
end)
end
end
context "strip_js" do
it "strips js from html" do
stripped_html = Floki.parse_fragment(StripJs.clean_html(@html_with_js))
assert(stripped_html == Floki.parse_fragment(@html_without_js))
end
it "leaves regular html alone" do
stripped_html = Floki.parse_fragment(StripJs.clean_html(@html_without_js))
assert(stripped_html == Floki.parse_fragment(@html_without_js))
end
it "handles plain text" do
assert("asdf" == StripJs.clean_html("asdf"))
assert(" asdf omg " == StripJs.clean_html(" asdf omg "))
assert(
" asdf omg " ==
StripJs.clean_html(" asdf <script>alert('LOL');</script> omg ")
)
end
it "handles mixed text and HTML" do
assert("<tt>1</tt>lol" == StripJs.clean_html("<tt>1</tt>lol"))
assert("asdf<tt>1</tt>lol" == StripJs.clean_html("asdf<tt>1</tt>lol"))
assert(
"asdf <tt> 1</tt> lol" == StripJs.clean_html("asdf <tt> 1</tt> lol")
)
assert(
"asdf <tt> 1</tt> lol" ==
StripJs.clean_html(
"asdf <tt> 1<script src='bad.js'></script></tt> lol"
)
)
assert(
"asdf <tt> 1</tt> lol" ==
StripJs.clean_html(
"asdf <tt onclick=\"alert('hah');\"> 1<script src='bad.js'></script></tt> lol"
)
)
assert(
" asdf omg " ==
StripJs.clean_html(" asdf <script>alert('LOL');</script> omg ")
)
end
it "HTML-encodes output" do
assert("<" == StripJs.clean_html("<"))
assert("<" == StripJs.clean_html("<"))
assert("<tt><</tt>" == StripJs.clean_html("<tt><</tt>"))
assert("<tt><</tt>" == StripJs.clean_html("<tt><</tt>"))
assert(
"<tt attr=\"<\"><</tt>" ==
StripJs.clean_html("<tt attr='<'><</tt>")
)
assert(
"<tt attr=\"<\"><</tt>" ==
StripJs.clean_html("<tt attr='<'><</tt>")
)
assert(
"<script> alert('pwnt'); </script>" ==
StripJs.clean_html("<script> alert('pwnt'); </script>")
)
end
@premangled_html """
<a data-attrs-event="{"event":"Primary use case set"}">test</a>
"""
it "doesn't mangle SGML entities in HTML attributes" do
stripped_html = @premangled_html |> StripJs.clean_html()
assert(
stripped_html
|> String.contains?(
"{"event":"Primary use case set"}"
)
)
end
end
end
| 25.885714
| 90
| 0.54277
|
9e6f3d0ce264cf667a837998240a70288338e82b
| 1,187
|
ex
|
Elixir
|
web/lib/squitter_web/channels/user_socket.ex
|
electricshaman/squitter
|
7a0dfbc125118b764d192f02b42b36596f6d4ac6
|
[
"MIT"
] | 34
|
2017-08-30T02:29:41.000Z
|
2021-05-29T20:21:43.000Z
|
web/lib/squitter_web/channels/user_socket.ex
|
electricshaman/squitter
|
7a0dfbc125118b764d192f02b42b36596f6d4ac6
|
[
"MIT"
] | 7
|
2017-09-12T05:27:23.000Z
|
2020-01-06T22:07:52.000Z
|
web/lib/squitter_web/channels/user_socket.ex
|
electricshaman/squitter
|
7a0dfbc125118b764d192f02b42b36596f6d4ac6
|
[
"MIT"
] | 9
|
2017-09-11T22:17:55.000Z
|
2022-01-31T03:07:58.000Z
|
defmodule Squitter.Web.UserSocket do
use Phoenix.Socket
## Channels
channel("aircraft:*", Squitter.Web.AircraftChannel)
## Transports
transport(:websocket, Phoenix.Transports.WebSocket)
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# Squitter.Web.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 31.236842
| 84
| 0.704297
|
9e6f52a45473a728b85fc9ec083921c127b6e033
| 6,046
|
exs
|
Elixir
|
youtube/elixir_tutorial/main.exs
|
jim80net/elixir_tutorial_projects
|
db19901a9305b297faa90642bebcc08455621b52
|
[
"Unlicense"
] | null | null | null |
youtube/elixir_tutorial/main.exs
|
jim80net/elixir_tutorial_projects
|
db19901a9305b297faa90642bebcc08455621b52
|
[
"Unlicense"
] | null | null | null |
youtube/elixir_tutorial/main.exs
|
jim80net/elixir_tutorial_projects
|
db19901a9305b297faa90642bebcc08455621b52
|
[
"Unlicense"
] | null | null | null |
defmodule M do
def data_stuff do
# my_float = 3.14159
# IO.puts "Float: #{is_float(my_float)}"
IO.puts "Atom: #{is_atom(:Pittsburgh)}"
# one_to_10 = 1..10
end
def string_stuff do
my_str = "My Sentence"
IO.puts "Length: #{String.length(my_str)}"
longer_str = my_str <> " is longer"
IO.puts "Equal: #{"Egg" === "egg"}"
IO.puts "My ?: #{String.contains?(my_str, "My")}"
IO.puts "First: #{String.first(my_str)}"
IO.puts "Index 4: #{String.at(my_str, 4)}"
IO.puts "Substring: #{String.slice(my_str, 3, 8)}"
IO.inspect String.split(longer_str, " ")
IO.puts String.reverse(longer_str)
IO.puts String.upcase(longer_str)
IO.puts String.downcase(longer_str)
IO.puts String.capitalize(longer_str)
4 * 10 |> IO.puts
end
def puts(string, expression) do
IO.puts("#{string} = " <> "#{expression}")
end
defmacro puts(clause) do
string = Macro.to_string(clause)
quote do
puts(unquote(string), unquote(clause))
end
end
def math_stuff do
puts "5 + 4 = ", 5+4
puts "5 - 4 = ", 5 - 4
puts 5 * 4
puts 5 / 4
puts div(5,4)
puts rem(5,4)
end
def compare_stuff do
puts 5 == 5.0
puts 5 === 5.0
puts 5 != 5.0
puts 5 !== 5.0
puts 6 > 5
puts 6 >= 5
puts 6 < 5
puts 6 <= 5
end
def logical_stuff do
age = 16
puts age >= 16 and age >= 18
puts age >= 16 or age >= 18
puts not true
end
def decision_stuff do
age = 16
if age >= 18 do
IO.puts "Can vote"
else
IO.puts "Can't vote"
end
unless age === 18 do
IO.puts "you are not 18"
else
IO.puts "you are 18"
end
cond do
age >= 18 -> IO.puts "You can vote"
age >= 16 -> IO.puts "You can drive"
age >= 14 -> IO.puts "You can wait"
true -> "You can default"
end
case 2 do
1 -> 1
2 -> 2
_ -> true
end
puts if age > 18, do: "Can Vote", else: "Can't Vote"
end
def tuple_stuff do
my_stats = {174, 6.25, :Jim}
puts is_tuple(my_stats)
my_stats2 = Tuple.append(my_stats, 42)
puts "Age #{elem(my_stats2, 3)}"
puts "Size: #{tuple_size(my_stats2)}"
my_stats3 = Tuple.delete_at(my_stats2, 0)
my_stats4 = Tuple.insert_at(my_stats3, 0, 1974)
IO.inspect my_stats4
many_zeros = Tuple.duplicate(0, 5)
IO.inspect many_zeros
{weight, height, name} = {175, 6.25, "Jim"}
IO.inspect {weight, height, name}
end
def list_stuff do
list1 = [1,2,3]
list2 = [4,5,6]
list3 = list1 ++ list2
list4 = list3 -- list1
puts list4 == list2
puts 6 in list4
[head|tail] = list3
IO.inspect head
IO.inspect tail
IO.write "Tail : "
IO.inspect tail
IO.inspect [97,98], charlists: :as_lists
Enum.each tail, fn item ->
IO.puts item
end
words = ["Random", "words", "in a", "list"]
Enum.each words, fn word ->
IO.puts word
end
display_list(words)
IO.puts display_list(List.delete(words, "Random"))
IO.puts display_list(List.delete_at(words, 1))
IO.puts display_list(List.insert_at(words, 4, ", Yeah!"))
puts List.first(words)
puts List.last(words)
my_stats = [name: "Derek", height: 6.25]
IO.inspect my_stats
end
def display_list([word|words]) do
IO.puts word
display_list(words)
end
def display_list([]), do: nil
def map_stuff do
capitals = %{
"Alabama" => "Montgomery",
"Alaska" => "Juneau",
"Arizona" => "Phoenix"
}
puts capitals["Alaska"]
capitals2 = %{
:alabama => "Montgomery",
:alaska => "Juneau",
:arizona => "Phoenix"
}
puts capitals2[:alaska]
capitals3 = Map.put_new(capitals, "Arkansas", "Little Rock")
IO.inspect capitals3
end
def pattern_matching_stuff do
[length, width] = [20, 30]
puts length
puts width
[_,[_, a]] = [20, [30,40]]
puts a
end
def anonymous_function_stuff do
get_sum = fn(x, y) -> x + y end
puts get_sum.(5, 5)
get_less = &(&1 - &2)
puts get_less.(8,3)
add_sum = fn
{x} -> x
{x, y} -> x + y
{x, y, z} -> x + y + z
end
puts add_sum.({1,2})
puts add_sum.({1,2,3})
IO.puts do_it()
end
def do_it(x \\ 1, y \\ 1) do
x + y
end
def recursive_stuff do
puts factorial(4)
end
def factorial(num) when num == 1 do
1
end
def factorial(num) do
num * factorial(num - 1)
end
def looping_stuff do
puts sum([1,2,3,4,5])
loop(5,1)
end
def sum([]), do: 0
def sum([h|t]), do: h + sum(t)
def loop(0, _, do: _), do: nil
def loop(max, min, do: _) when max < min do
loop(0, min, do: nil)
end
def loop(max, min, do: function) do
IO.inspect [max, min]
loop(max - 1, min, do: function)
end
def loop(max,min) do
loop(max, min, do: nil)
end
def enumerable_stuff do
puts Enum.all?([1,2,3], fn(n) -> rem(n, 2) == 0 end)
puts Enum.any?([1,2,3], fn(n) -> rem(n, 2) == 0 end)
Enum.each([1,2,3], fn(n) -> IO.puts n end)
IO.inspect Enum.map([1,2,3], fn(n) -> n * 2 end)
puts Enum.reduce([1,2,3], fn(n,c) -> n + c end)
IO.inspect Enum.uniq([1,2,3,3])
end
def list_comprehensions_stuff do
IO.inspect for n <- [1,2,3], do: n * 2
even_list = for n <- [1,2,3,4], rem(n,2) == 0, do: n
IO.inspect even_list
end
def exception_handling_stuff do
err = try do
5 / 0
rescue
ArithmeticError -> "Can't do it"
end
IO.puts err
end
def concurrency_stuff do
spawn (fn() -> loop(5, 1, do: send(self(), {:french, "Bob"})) end)
spawn (fn() -> loop(10, 5) end)
receive do
{:german, name} -> IO.puts "Guten tag #{name}"
{:french, name} -> IO.puts "Bonjour #{name}"
{:english, name} -> IO.puts "Hello #{name}"
after
500 -> IO.puts "Time up!"
end
end
def main do
# name = IO.gets("What is your name? ") |> String.trim
# IO.puts "Hello, #{name}"
M.concurrency_stuff
end
end
| 19.822951
| 70
| 0.560536
|
9e6f557e0922a7cbd94ee2766f2ec7d4da6a7365
| 1,856
|
exs
|
Elixir
|
test/tembeza_web/controllers/url_controller_test.exs
|
AdolfodelSel/Tembeza
|
20c19d6cc090e7c128bf35f016b7a3843cfc0dad
|
[
"Apache-2.0"
] | null | null | null |
test/tembeza_web/controllers/url_controller_test.exs
|
AdolfodelSel/Tembeza
|
20c19d6cc090e7c128bf35f016b7a3843cfc0dad
|
[
"Apache-2.0"
] | 1
|
2021-05-11T18:22:04.000Z
|
2021-05-11T18:22:04.000Z
|
test/tembeza_web/controllers/url_controller_test.exs
|
AdolfodelSel/Tembeza
|
20c19d6cc090e7c128bf35f016b7a3843cfc0dad
|
[
"Apache-2.0"
] | null | null | null |
defmodule TembezaWeb.UrlControllerTest do
use TembezaWeb.ConnCase
alias TembezaWeb.UrlController
setup do
bypass = Bypass.open()
{:ok, bypass: bypass}
end
describe "fetch urls" do
test "success fetch", %{conn: conn, bypass: bypass}do
Bypass.expect(bypass, fn conn ->
Plug.Conn.resp(
conn,
200,
"<!doctype html><html><body><a href='http://github.com/philss/floki'>Github page</a><img src='img_girl.jpg' alt='Girl in a jacket' width='500' height='600'></body></html>"
)
end)
response =
UrlController.fetch(conn, %{"url" => "http://localhost:#{bypass.port}/"})
|> response(200)
|> Poison.decode!()
expected = %{"result" => %{"assets" => ["img_girl.jpg"], "links" => ["http://github.com/philss/floki"]}, "status" => true}
assert response == expected
end
test "empty response", %{conn: conn, bypass: bypass}do
Bypass.expect(bypass, fn conn ->
Plug.Conn.resp(
conn,
200,
"<!doctype html><html><body></body></html>"
)
end)
response =
UrlController.fetch(conn, %{"url" => "http://localhost:#{bypass.port}/"})
|> response(200)
|> Poison.decode!()
expected = %{"result" => %{"assets" => [], "links" => []}, "status" => true}
assert response == expected
end
test "not found", %{conn: conn, bypass: bypass} do
Bypass.expect(bypass, fn conn ->
Plug.Conn.resp(
conn,
404,
""
)
end)
response =
UrlController.fetch(conn, %{"url" => "http://localhost:#{bypass.port}/"})
|> response(200)
|> Poison.decode!()
expected = %{"error" => "Error fetching the url", "status" => false}
assert response == expected
end
end
end
| 26.898551
| 181
| 0.541487
|
9e6f9014c1963fa6e9847ab0dc6a8d102aad9602
| 1,595
|
ex
|
Elixir
|
clients/content/lib/google_api/content/v2/model/orders_cancel_test_order_by_customer_response.ex
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v2/model/orders_cancel_test_order_by_customer_response.ex
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v2/model/orders_cancel_test_order_by_customer_response.ex
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Model.OrdersCancelTestOrderByCustomerResponse do
@moduledoc """
## Attributes
- kind (String.t): Identifies what kind of resource this is. Value: the fixed string \"content#ordersCancelTestOrderByCustomerResponse\". Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => any()
}
field(:kind)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.OrdersCancelTestOrderByCustomerResponse do
def decode(value, options) do
GoogleApi.Content.V2.Model.OrdersCancelTestOrderByCustomerResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.OrdersCancelTestOrderByCustomerResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.229167
| 170
| 0.760502
|
9e6f91832377057de56aec4ed79a43786bd443ad
| 2,945
|
ex
|
Elixir
|
apps/tai/lib/tai/venue_adapters/okex/stream/process_auth.ex
|
ihorkatkov/tai
|
09f9f15d2c385efe762ae138a8570f1e3fd41f26
|
[
"MIT"
] | 1
|
2019-12-19T05:16:26.000Z
|
2019-12-19T05:16:26.000Z
|
apps/tai/lib/tai/venue_adapters/okex/stream/process_auth.ex
|
ihorkatkov/tai
|
09f9f15d2c385efe762ae138a8570f1e3fd41f26
|
[
"MIT"
] | null | null | null |
apps/tai/lib/tai/venue_adapters/okex/stream/process_auth.ex
|
ihorkatkov/tai
|
09f9f15d2c385efe762ae138a8570f1e3fd41f26
|
[
"MIT"
] | null | null | null |
defmodule Tai.VenueAdapters.OkEx.Stream.ProcessAuth do
use GenServer
alias Tai.Events
alias Tai.VenueAdapters.OkEx.{ClientId, Stream}
defmodule State do
@type venue_id :: Tai.Venues.Adapter.venue_id()
@type t :: %State{venue: atom, tasks: map}
@enforce_keys ~w(venue tasks)a
defstruct ~w(venue tasks)a
end
@type venue_id :: Tai.Venues.Adapter.venue_id()
@type state :: State.t()
def start_link(venue: venue) do
state = %State{venue: venue, tasks: %{}}
name = venue |> to_name()
GenServer.start_link(__MODULE__, state, name: name)
end
@spec init(state) :: {:ok, state}
def init(state) do
Process.flag(:trap_exit, true)
{:ok, state}
end
@spec to_name(venue_id) :: atom
def to_name(venue), do: :"#{__MODULE__}_#{venue}"
@product_types ["swap/order", "futures/order"]
def handle_cast(
{%{"table" => table, "data" => orders}, received_at},
state
)
when table in @product_types do
new_tasks =
orders
|> Enum.map(fn %{"client_oid" => venue_client_id} = venue_order ->
Task.async(fn ->
venue_client_id
|> ClientId.from_base32()
|> Stream.UpdateOrder.update(venue_order, received_at)
end)
end)
|> Enum.reduce(%{}, fn t, acc -> Map.put(acc, t.ref, true) end)
|> Map.merge(state.tasks)
new_state = state |> Map.put(:tasks, new_tasks)
{:noreply, new_state}
end
def handle_cast({msg, received_at}, state) do
%Events.StreamMessageUnhandled{
venue_id: state.venue,
msg: msg,
received_at: received_at
}
|> Events.warn()
{:noreply, state}
end
def handle_info({_reference, response}, state) do
response |> notify
{:noreply, state}
end
def handle_info({:DOWN, reference, :process, _pid, :normal}, state) do
new_tasks = state.tasks |> Map.delete(reference)
new_state = state |> Map.put(:tasks, new_tasks)
{:noreply, new_state}
end
def handle_info({:DOWN, reference, :process, _pid, reason}, state) do
%Events.StreamError{
venue_id: state.venue,
reason: reason
}
|> Events.error()
new_tasks = state.tasks |> Map.delete(reference)
new_state = state |> Map.put(:tasks, new_tasks)
{:noreply, new_state}
end
def handle_info({:EXIT, _, _}, state), do: {:noreply, state}
defp notify(:ok), do: nil
defp notify({:ok, {old, updated}}) do
Tai.Trading.NotifyOrderUpdate.notify!(old, updated)
end
defp notify({:error, {:invalid_status, was, required, %action_name{} = action}}) do
Tai.Events.warn(%Tai.Events.OrderUpdateInvalidStatus{
was: was,
required: required,
client_id: action.client_id,
action: action_name
})
end
defp notify({:error, {:not_found, %action_name{} = action}}) do
Tai.Events.warn(%Tai.Events.OrderUpdateNotFound{
client_id: action.client_id,
action: action_name
})
end
end
| 26.061947
| 85
| 0.634635
|
9e6fd969ccfc80be43ce2060f080a2383faa014a
| 1,764
|
exs
|
Elixir
|
bench/run.exs
|
Overbryd/poison
|
b095851a0ea3e3106e5700b743187226c607de26
|
[
"CC0-1.0"
] | null | null | null |
bench/run.exs
|
Overbryd/poison
|
b095851a0ea3e3106e5700b743187226c607de26
|
[
"CC0-1.0"
] | null | null | null |
bench/run.exs
|
Overbryd/poison
|
b095851a0ea3e3106e5700b743187226c607de26
|
[
"CC0-1.0"
] | 1
|
2021-08-09T09:19:05.000Z
|
2021-08-09T09:19:05.000Z
|
encode_jobs = %{
"Poison" => &Poison.encode!/1,
"JSX" => &JSX.encode!/1,
"Tiny" => &Tiny.encode!/1,
"jsone" => &:jsone.encode/1,
"jiffy" => &:jiffy.encode/1,
"JSON" => &JSON.encode!/1,
}
encode_inputs = [
"GitHub",
"Giphy",
"GovTrack",
"Blockchain",
"Pokedex",
"JSON Generator",
"UTF-8 unescaped",
"Issue 90",
]
decode_jobs = %{
"Poison" => &Poison.decode!/1,
"JSX" => &JSX.decode!(&1, [:strict]),
"Tiny" => &Tiny.decode!/1,
"jsone" => &:jsone.decode/1,
"jiffy" => &:jiffy.decode(&1, [:return_maps]),
"JSON" => &JSON.decode!/1,
}
decode_inputs = [
"GitHub",
"Giphy",
"GovTrack",
"Blockchain",
"Pokedex",
"JSON Generator",
"JSON Generator (Pretty)",
"UTF-8 escaped",
"UTF-8 unescaped",
"Issue 90",
]
read_data = fn (name) ->
name
|> String.downcase
|> String.replace(~r/([^\w]|-|_)+/, "-")
|> String.trim("-")
|> (&"data/#{&1}.json").()
|> Path.expand(__DIR__)
|> File.read!
end
Benchee.run(encode_jobs,
parallel: 4,
# warmup: 5,
# time: 30,
inputs: for name <- encode_inputs, into: %{} do
name
|> read_data.()
|> Poison.decode!
|> (&{name, &1}).()
end,
formatters: [
&Benchee.Formatters.HTML.output/1,
&Benchee.Formatters.Console.output/1,
],
formatter_options: [
html: [
file: Path.expand("output/encode.html", __DIR__)
]
]
)
Benchee.run(decode_jobs,
parallel: 4,
# warmup: 5,
# time: 30,
inputs: for name <- decode_inputs, into: %{} do
name
|> read_data.()
|> (&{name, &1}).()
end,
formatters: [
&Benchee.Formatters.HTML.output/1,
&Benchee.Formatters.Console.output/1,
],
formatter_options: [
html: [
file: Path.expand("output/decode.html", __DIR__)
]
]
)
| 18.967742
| 54
| 0.554989
|
9e6fe5e6402dff960b0e8d2929ac37e864f30a90
| 217
|
ex
|
Elixir
|
lib/job_board_web/controllers/plugs/add_site_config.ex
|
TDogVoid/job_board
|
23793917bd1cc4e68bccce737b971093030a31eb
|
[
"MIT"
] | null | null | null |
lib/job_board_web/controllers/plugs/add_site_config.ex
|
TDogVoid/job_board
|
23793917bd1cc4e68bccce737b971093030a31eb
|
[
"MIT"
] | null | null | null |
lib/job_board_web/controllers/plugs/add_site_config.ex
|
TDogVoid/job_board
|
23793917bd1cc4e68bccce737b971093030a31eb
|
[
"MIT"
] | null | null | null |
defmodule JobBoardWeb.Plugs.AddSiteConfig do
import Plug.Conn
alias JobBoard.Siteconfigs
def init(_params) do
end
def call(conn, _params) do
conn
|> assign(:config, Siteconfigs.get_main)
end
end
| 16.692308
| 44
| 0.728111
|
9e6ff065916499038d09a57b4caac6629268fdbd
| 3,017
|
ex
|
Elixir
|
lib/brando/traits/sequenced.ex
|
univers-agency/brando
|
69c3c52498a3f64518da3522cd9f27294a52cc68
|
[
"Apache-2.0"
] | 1
|
2020-04-26T09:53:02.000Z
|
2020-04-26T09:53:02.000Z
|
lib/brando/traits/sequenced.ex
|
univers-agency/brando
|
69c3c52498a3f64518da3522cd9f27294a52cc68
|
[
"Apache-2.0"
] | 198
|
2019-08-20T16:16:07.000Z
|
2020-07-03T15:42:07.000Z
|
lib/brando/traits/sequenced.ex
|
univers-agency/brando
|
69c3c52498a3f64518da3522cd9f27294a52cc68
|
[
"Apache-2.0"
] | null | null | null |
defmodule Brando.Trait.Sequenced do
@moduledoc """
A sequenced resource
## Options
- `append: true`: Sequences the item to the last possible position.
"""
use Brando.Trait
alias Brando.Cache
alias Brando.Datasource
alias Ecto.Changeset
import Ecto.Query
@type changeset :: Changeset.t()
attributes do
attribute :sequence, :integer, default: 0
end
@doc """
Sequences ids or composite keys
With composite keys:
sequence %{module, "composite_keys" => [%{"id" => 1, "additional_id" => 2}, %{...}]}
With regular ids
sequence %{module, "ids" => [3, 5, 1]}
"""
def sequence(module, %{"composite_keys" => composite_keys}) do
table = module.__schema__(:source)
Brando.repo().transaction(fn ->
for {o, idx} <- Enum.with_index(composite_keys) do
q = from t in table, update: [set: [sequence: ^idx]]
q =
Enum.reduce(o, q, fn {k, v}, nq ->
from t in nq, where: field(t, ^String.to_existing_atom(k)) == ^v
end)
Brando.repo().update_all(q, [])
end
end)
# throw out cached listings
Cache.Query.evict_schema(module)
# update referenced Datasources in Villains
Datasource.update_datasource(module)
end
def sequence(module, %{"ids" => keys} = params) do
offset =
params
|> Map.get("sortable_offset", 0)
|> maybe_convert_to_integer()
# standard list of ids
vals = Range.new(0 + offset, offset + length(keys) - 1) |> Enum.to_list()
table = module.__schema__(:source)
q =
from a in table,
join:
numbers in fragment(
"SELECT * FROM unnest(?, ?) AS t(key, value)",
type(^keys, {:array, :integer}),
type(^vals, {:array, :integer})
),
on: a.id == numbers.key,
update: [set: [sequence: numbers.value]]
Brando.repo().update_all(q, [])
# throw out cached listings
Cache.Query.evict_schema(module)
# update referenced Datasources in Villains
Datasource.update_datasource(module)
end
def changeset_mutator(module, %{append: true}, changeset, _user, _opts) do
Changeset.prepare_changes(changeset, fn
%{action: :insert} = cs ->
# set as highest sequence on insert
seq = get_highest_sequence(module)
Changeset.put_change(cs, :sequence, seq)
cs ->
cs
end)
end
def changeset_mutator(_module, _config, changeset, _user, _opts) do
changeset
end
def get_highest_sequence(module) do
query =
from t in module,
select: t.sequence,
order_by: [desc: t.sequence],
limit: 1
case Brando.repo().all(query) do
[] -> 0
[seq] -> seq + 1
end
end
defp maybe_convert_to_integer(sortable_offset) when is_binary(sortable_offset) do
{integer, _} = Integer.parse(sortable_offset)
integer
end
defp maybe_convert_to_integer(sortable_offset) when is_integer(sortable_offset) do
sortable_offset
end
end
| 23.944444
| 90
| 0.617501
|
9e6ff3de2ff31cc5cdbca62221f204ab9ac61cbe
| 1,189
|
exs
|
Elixir
|
test/couchdb_connector/as_map_test.exs
|
PinheiroRodrigo/couchdb_connector
|
464136ebe47049c5b3143bbd5f74977c82c9c621
|
[
"Apache-2.0"
] | 64
|
2016-01-20T17:48:05.000Z
|
2017-05-24T18:00:14.000Z
|
test/couchdb_connector/as_map_test.exs
|
PinheiroRodrigo/couchdb_connector
|
464136ebe47049c5b3143bbd5f74977c82c9c621
|
[
"Apache-2.0"
] | 40
|
2016-01-16T18:39:46.000Z
|
2017-06-03T10:02:54.000Z
|
test/couchdb_connector/as_map_test.exs
|
PinheiroRodrigo/couchdb_connector
|
464136ebe47049c5b3143bbd5f74977c82c9c621
|
[
"Apache-2.0"
] | 22
|
2015-12-28T00:23:20.000Z
|
2017-05-28T21:58:21.000Z
|
defmodule Couchdb.Connector.AsMapTest do
use ExUnit.Case
import Couchdb.Connector.AsMap
test "as_map/1 with invalid json string should raise RuntimeError" do
invalid = "{\"_id\":\"foo\",\"_rev\":\"1-0f97561a543ed2e9c98a24dea818ec10\",test_key\":\"test_value\"}\n"
assert_raise(RuntimeError, fn -> as_map(invalid) end)
end
test "as_map/1 with empty string should raise RuntimeError" do
empty = ""
assert_raise(RuntimeError, fn -> as_map(empty) end)
end
test "as_map/1 with valid json string should return decoded Map" do
valid = "{\"_id\":\"foo\",\"_rev\":\"1-0f97561a543ed2e9c98a24dea818ec10\",\"test_key\":\"test_value\"}\n"
decoded = as_map(valid)
assert decoded["_id"] == "foo"
assert decoded["_rev"] == "1-0f97561a543ed2e9c98a24dea818ec10"
assert decoded["test_key"] == "test_value"
end
test "as_map/1 with a List of String Tuples should return decoded Map" do
list_of_tuples = [
{"Server", "CouchDB/1.6.1 (Erlang OTP/19)"},
{"Location", "http://127.0.0.1:5984/couchdb_connector_test/42"}
]
decoded = as_map(list_of_tuples)
assert decoded["Server"] == "CouchDB/1.6.1 (Erlang OTP/19)"
end
end
| 32.135135
| 109
| 0.677881
|
9e6ff6f93e80b70441f4a2d65883f395514ac7d4
| 1,806
|
ex
|
Elixir
|
clients/script/lib/google_api/script/v1/model/list_deployments_response.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/script/lib/google_api/script/v1/model/list_deployments_response.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/script/lib/google_api/script/v1/model/list_deployments_response.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Script.V1.Model.ListDeploymentsResponse do
@moduledoc """
Response with the list of deployments for the specified Apps Script project.
## Attributes
* `deployments` (*type:* `list(GoogleApi.Script.V1.Model.Deployment.t)`, *default:* `nil`) - The list of deployments.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - The token that can be used in the next call to get the next page of results.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:deployments => list(GoogleApi.Script.V1.Model.Deployment.t()) | nil,
:nextPageToken => String.t() | nil
}
field(:deployments, as: GoogleApi.Script.V1.Model.Deployment, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.Script.V1.Model.ListDeploymentsResponse do
def decode(value, options) do
GoogleApi.Script.V1.Model.ListDeploymentsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Script.V1.Model.ListDeploymentsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.12
| 139
| 0.734773
|
9e7001ad1e41933e287938fdc0182ae331ec4edd
| 2,319
|
ex
|
Elixir
|
lib/exmoji/util.ex
|
christhekeele/exmoji
|
02cb57a5aa06dba5fe0c64c2eae4c74151fa2ee5
|
[
"MIT"
] | null | null | null |
lib/exmoji/util.ex
|
christhekeele/exmoji
|
02cb57a5aa06dba5fe0c64c2eae4c74151fa2ee5
|
[
"MIT"
] | null | null | null |
lib/exmoji/util.ex
|
christhekeele/exmoji
|
02cb57a5aa06dba5fe0c64c2eae4c74151fa2ee5
|
[
"MIT"
] | 1
|
2017-09-19T01:12:18.000Z
|
2017-09-19T01:12:18.000Z
|
defmodule Exmoji.Util.Unified do
@moduledoc false
# actual conversion function, used by `Exmoji.Util` to generate precompiled
# methods, and also used as a fallback for unmatched values.
def _unified_to_char(uid) do
uid
|> String.split("-")
|> Enum.map( &(String.to_integer(&1, 16)) )
|> List.to_string
end
end
defmodule Exmoji.Util.Char do
@moduledoc false
# actual conversion function, used by `Exmoji.Util` to generate precompiled
# methods, and also used as a fallback for unmatched values.
def _char_to_unified(char) do
char
|> String.codepoints
|> Enum.map(&padded_hex_string/1)
|> Enum.join("-")
|> String.upcase
end
# produce a string representation of the integer value of a codepoint, in hex
# this should be zero-padded to a minimum of 4 digits
defp padded_hex_string(<< cp_int_value :: utf8 >>) do
cp_int_value |> Integer.to_string(16) |> String.rjust(4,?0)
end
end
defmodule Exmoji.Util do
@moduledoc """
Provides utility functions to convert between Unicode unified ID values and
rendered Emoji glyphs in bitstring format.
Pattern matched with precompiled values for all known Emoji character values
for maximum speed, with fallbacks to algorithmic conversion.
"""
alias Exmoji.EmojiChar
alias Exmoji.Util.Unified
alias Exmoji.Util.Char
@doc """
Convert a unified ID directly to its bitstring glyph representation.
Precompiled only for uppercase format of the hex ID.
## Example
iex> Exmoji.Util.unified_to_char("1F47E")
"👾"
"""
for ec <- Exmoji.all, cp <- EmojiChar.codepoint_ids(ec) do
def unified_to_char( unquote(cp) ) do
unquote( Unified._unified_to_char(cp) )
end
end
# if not found, fallback
def unified_to_char(uid), do: Unified._unified_to_char(uid)
@doc """
Convert a native bitstring glyph to its unified codepoint ID.
## Examples
iex> Exmoji.Util.char_to_unified("👾")
"1F47E"
iex> Exmoji.Util.char_to_unified("\x{23}\x{fe0f}\x{20e3}")
"0023-FE0F-20E3"
"""
for ec <- Exmoji.all, cp <- EmojiChar.codepoint_ids(ec) do
def char_to_unified( unquote(Unified._unified_to_char(cp)) ) do
unquote(cp)
end
end
# if not found, fallback
def char_to_unified(uid), do: Char._char_to_unified(uid)
end
| 24.935484
| 79
| 0.697283
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.