hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
list | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
list | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
list | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
938b20e6828f465d85221bb999e3984a2fad8d28
| 2,452
|
ex
|
Elixir
|
clients/docs/lib/google_api/docs/v1/model/named_style_suggestion_state.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/docs/lib/google_api/docs/v1/model/named_style_suggestion_state.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/docs/lib/google_api/docs/v1/model/named_style_suggestion_state.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Docs.V1.Model.NamedStyleSuggestionState do
@moduledoc """
A suggestion state of a NamedStyle message.
## Attributes
* `namedStyleType` (*type:* `String.t`, *default:* `nil`) - The named style type that this suggestion state corresponds to.
This field is provided as a convenience for matching the
NamedStyleSuggestionState with its corresponding NamedStyle.
* `paragraphStyleSuggestionState` (*type:* `GoogleApi.Docs.V1.Model.ParagraphStyleSuggestionState.t`, *default:* `nil`) - A mask that indicates which of the fields in paragraph style have been changed in this
suggestion.
* `textStyleSuggestionState` (*type:* `GoogleApi.Docs.V1.Model.TextStyleSuggestionState.t`, *default:* `nil`) - A mask that indicates which of the fields in text style have been changed in this
suggestion.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:namedStyleType => String.t(),
:paragraphStyleSuggestionState =>
GoogleApi.Docs.V1.Model.ParagraphStyleSuggestionState.t(),
:textStyleSuggestionState => GoogleApi.Docs.V1.Model.TextStyleSuggestionState.t()
}
field(:namedStyleType)
field(:paragraphStyleSuggestionState, as: GoogleApi.Docs.V1.Model.ParagraphStyleSuggestionState)
field(:textStyleSuggestionState, as: GoogleApi.Docs.V1.Model.TextStyleSuggestionState)
end
defimpl Poison.Decoder, for: GoogleApi.Docs.V1.Model.NamedStyleSuggestionState do
def decode(value, options) do
GoogleApi.Docs.V1.Model.NamedStyleSuggestionState.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Docs.V1.Model.NamedStyleSuggestionState do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.559322
| 212
| 0.752447
|
938b26afb601eed290ae8e80fee8423ccda3c94e
| 945
|
ex
|
Elixir
|
lib/glimesh_web/live/live_helpers.ex
|
Megami-Studios/glimesh.tv
|
57dde3a328fabdcc3305be48ae1b82df27b83c9b
|
[
"MIT"
] | 328
|
2020-07-23T22:13:49.000Z
|
2022-03-31T21:22:28.000Z
|
lib/glimesh_web/live/live_helpers.ex
|
Megami-Studios/glimesh.tv
|
57dde3a328fabdcc3305be48ae1b82df27b83c9b
|
[
"MIT"
] | 362
|
2020-07-23T22:38:38.000Z
|
2022-03-24T02:11:16.000Z
|
lib/glimesh_web/live/live_helpers.ex
|
Megami-Studios/glimesh.tv
|
57dde3a328fabdcc3305be48ae1b82df27b83c9b
|
[
"MIT"
] | 72
|
2020-07-23T22:50:46.000Z
|
2022-02-02T11:59:32.000Z
|
defmodule GlimeshWeb.LiveHelpers do
@moduledoc """
Inject methods into live views to provide common functionality.
"""
import Phoenix.LiveView.Helpers
@doc """
Renders a component inside the `GlimeshWeb.ModalComponent` component.
The rendered modal receives a `:return_to` option to properly update
the URL when the modal is closed.
## Examples
<%= live_modal @socket, GlimeshWeb.PostLive.FormComponent,
id: @post.id || :new,
action: @live_action,
post: @post,
return_to: Routes.post_index_path(@socket, :index) %>
"""
def live_modal(_socket, component, opts) do
path = Keyword.fetch!(opts, :return_to)
modal_opts = [
id: :modal,
title: Keyword.fetch!(opts, :title),
return_to: path,
component: component,
opts: opts
]
# Socket param is deprecated from lib author
live_component(GlimeshWeb.ModalComponent, modal_opts)
end
end
| 25.540541
| 71
| 0.671958
|
938b2ab763f09794ce2796513ea944ebb1a1438f
| 216
|
exs
|
Elixir
|
priv/repo/migrations/20200604234142_add_mailchimp_to_riders.exs
|
bikebrigade/dispatch
|
eb622fe4f6dab7c917d678d3d7a322a01f97da44
|
[
"Apache-2.0"
] | 28
|
2021-10-11T01:53:53.000Z
|
2022-03-24T17:45:55.000Z
|
priv/repo/migrations/20200604234142_add_mailchimp_to_riders.exs
|
bikebrigade/dispatch
|
eb622fe4f6dab7c917d678d3d7a322a01f97da44
|
[
"Apache-2.0"
] | 20
|
2021-10-21T08:12:31.000Z
|
2022-03-31T13:35:53.000Z
|
priv/repo/migrations/20200604234142_add_mailchimp_to_riders.exs
|
bikebrigade/dispatch
|
eb622fe4f6dab7c917d678d3d7a322a01f97da44
|
[
"Apache-2.0"
] | null | null | null |
defmodule BikeBrigade.Repo.Migrations.AddMailchimpToRiders do
use Ecto.Migration
def change do
alter table(:riders) do
add :mailchimp_id, :string
add :mailchimp_status, :string
end
end
end
| 19.636364
| 61
| 0.722222
|
938b6bdbbc735a3132fd22baf3013dbe46c7c4ed
| 1,139
|
exs
|
Elixir
|
config/config.exs
|
wrren/baiji_generator.ex
|
7c36c0089d6b88576fa6e064e86c342f03729003
|
[
"MIT"
] | null | null | null |
config/config.exs
|
wrren/baiji_generator.ex
|
7c36c0089d6b88576fa6e064e86c342f03729003
|
[
"MIT"
] | null | null | null |
config/config.exs
|
wrren/baiji_generator.ex
|
7c36c0089d6b88576fa6e064e86c342f03729003
|
[
"MIT"
] | null | null | null |
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :baiji_generator, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:baiji_generator, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.741935
| 73
| 0.755048
|
938b8dbe25e77c7c73d2753894c9769a75832fac
| 464
|
ex
|
Elixir
|
lib/event_loop/event_callbacks.ex
|
cruessler/lafamiglia
|
084915a2d44a5e69fb6ad9321eac08ced0e3016a
|
[
"MIT"
] | 5
|
2016-10-20T10:00:59.000Z
|
2017-11-19T08:14:18.000Z
|
lib/event_loop/event_callbacks.ex
|
cruessler/lafamiglia
|
084915a2d44a5e69fb6ad9321eac08ced0e3016a
|
[
"MIT"
] | 39
|
2020-04-22T05:27:32.000Z
|
2022-03-13T17:22:26.000Z
|
lib/event_loop/event_callbacks.ex
|
cruessler/lafamiglia
|
084915a2d44a5e69fb6ad9321eac08ced0e3016a
|
[
"MIT"
] | null | null | null |
defmodule LaFamiglia.EventCallbacks do
@moduledoc """
This module contains callbacks for models that need to modify the event queue
after they have been inserted, updated or deleted. All calls are forwarded to
the event queue.
"""
def send_to_queue(event) do
LaFamiglia.EventQueue.cast({:new_event, event})
{:ok, event}
end
def drop_from_queue(event) do
LaFamiglia.EventQueue.cast({:cancel_event, event})
{:ok, event}
end
end
| 23.2
| 79
| 0.721983
|
938b93c808712dfae09c3bc71e68ca88d977c62c
| 2,563
|
ex
|
Elixir
|
clients/service_management/lib/google_api/service_management/v1/model/log_descriptor.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | null | null | null |
clients/service_management/lib/google_api/service_management/v1/model/log_descriptor.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | null | null | null |
clients/service_management/lib/google_api/service_management/v1/model/log_descriptor.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | 1
|
2018-07-28T20:50:50.000Z
|
2018-07-28T20:50:50.000Z
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.ServiceManagement.V1.Model.LogDescriptor do
@moduledoc """
A description of a log type. Example in YAML format: - name: library.googleapis.com/activity_history description: The history of borrowing and returning library items. display_name: Activity labels: - key: /customer_id description: Identifier of a library customer
## Attributes
- description (String): A human-readable description of this log. This information appears in the documentation and can contain details. Defaults to: `null`.
- displayName (String): The human-readable name for this log. This information appears on the user interface and should be concise. Defaults to: `null`.
- labels (List[LabelDescriptor]): The set of labels that are available to describe a specific log entry. Runtime requests that contain labels not specified here are considered invalid. Defaults to: `null`.
- name (String): The name of the log. It must be less than 512 characters long and can include the following characters: upper- and lower-case alphanumeric characters [A-Za-z0-9], and punctuation characters including slash, underscore, hyphen, period [/_-.]. Defaults to: `null`.
"""
defstruct [
:"description",
:"displayName",
:"labels",
:"name"
]
end
defimpl Poison.Decoder, for: GoogleApi.ServiceManagement.V1.Model.LogDescriptor do
import GoogleApi.ServiceManagement.V1.Deserializer
def decode(value, options) do
value
|> deserialize(:"labels", :list, GoogleApi.ServiceManagement.V1.Model.LabelDescriptor, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceManagement.V1.Model.LogDescriptor do
def encode(value, options) do
GoogleApi.ServiceManagement.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 47.462963
| 303
| 0.754194
|
938bc11f13d879b65cac83a5c2bb87f8f855802a
| 61
|
ex
|
Elixir
|
lib/erlixir_web/views/page_view.ex
|
itsemilano/erlixir
|
39fdcb86a9ccd55058682b3263d40efb9cbad11f
|
[
"MIT"
] | null | null | null |
lib/erlixir_web/views/page_view.ex
|
itsemilano/erlixir
|
39fdcb86a9ccd55058682b3263d40efb9cbad11f
|
[
"MIT"
] | null | null | null |
lib/erlixir_web/views/page_view.ex
|
itsemilano/erlixir
|
39fdcb86a9ccd55058682b3263d40efb9cbad11f
|
[
"MIT"
] | null | null | null |
defmodule ErlixirWeb.PageView do
use ErlixirWeb, :view
end
| 15.25
| 32
| 0.803279
|
938bf3fe44e1f842232cf03c1d51ce8af712304f
| 328
|
ex
|
Elixir
|
lib/grizzly/command_class/basic.ex
|
pragdave/grizzly
|
bcd7b46ab2cff1797dac04bc3cd12a36209dd579
|
[
"Apache-2.0"
] | null | null | null |
lib/grizzly/command_class/basic.ex
|
pragdave/grizzly
|
bcd7b46ab2cff1797dac04bc3cd12a36209dd579
|
[
"Apache-2.0"
] | null | null | null |
lib/grizzly/command_class/basic.ex
|
pragdave/grizzly
|
bcd7b46ab2cff1797dac04bc3cd12a36209dd579
|
[
"Apache-2.0"
] | null | null | null |
defmodule Grizzly.CommandClass.Basic do
@type value :: :on | :off
@type value_byte :: 0x00 | 0xFF
@spec encode_value(value) :: {:ok, value_byte} | {:error, :invalid_arg, any()}
def encode_value(:on), do: {:ok, 0xFF}
def encode_value(:off), do: {:ok, 0x00}
def encode_value(arg), do: {:error, :invalid_arg, arg}
end
| 32.8
| 80
| 0.658537
|
938c1b0c783d836331ee6f869d2e6b8afd62a32c
| 1,224
|
exs
|
Elixir
|
test/channels/my_room_channel_test.exs
|
KazuCocoa/myChatEx
|
27871c7d9715987a664caa10a7c79b2682f6e56c
|
[
"MIT"
] | null | null | null |
test/channels/my_room_channel_test.exs
|
KazuCocoa/myChatEx
|
27871c7d9715987a664caa10a7c79b2682f6e56c
|
[
"MIT"
] | null | null | null |
test/channels/my_room_channel_test.exs
|
KazuCocoa/myChatEx
|
27871c7d9715987a664caa10a7c79b2682f6e56c
|
[
"MIT"
] | null | null | null |
defmodule MyChatEx.MyRoomChannelTest do
use MyChatEx.ChannelCase, async: true
alias MyChatEx.UserSocket
setup do
user_id = "123"
token = Phoenix.Token.sign(@endpoint, "user", user_id)
{:ok, socket} = connect(UserSocket, %{"token" => token})
{:ok, _reply, socket} = subscribe_and_join(socket, "my_room:lobby")
{:ok, socket: socket, user: user_id}
end
test "join to my_room:lobby", %{socket: socket, user: user_id} do
assert socket.topic == "my_room:lobby"
assert socket.assigns.room_id == "lobby"
assert socket.assigns.user_id == user_id
end
test "ping replies with status ok", %{socket: socket} do
ref = push socket, "ping", %{}
assert_reply ref, :ok, %{}
end
test "new_message broadcasts to my_room:lobby", %{socket: socket} do
push socket, "new_message", %{"name" => "user_id", "message" => "hello world"}
assert_broadcast "new_message", %{"name" => "user_id", "message" => "hello world"}
end
test "broadcasts are pushed to the client", %{socket: socket} do
broadcast_from! socket, "new_message", %{"name" => "user_id", "message" => "hello world"}
assert_push "new_message", %{"name" => "user_id", "message" => "hello world"}
end
end
| 33.081081
| 93
| 0.656863
|
938c2c4e3cc4427356c1a8f82fb9c89ba4fc091b
| 984
|
exs
|
Elixir
|
exercises/02-higher-order-functions/reduce/increasing/tests.exs
|
DennisWinnepenninckx/distributed-applications
|
06743e4e2a09dc52ff52be831e486bb073916173
|
[
"BSD-3-Clause"
] | 1
|
2021-09-22T09:52:11.000Z
|
2021-09-22T09:52:11.000Z
|
exercises/02-higher-order-functions/reduce/increasing/tests.exs
|
DennisWinnepenninckx/distributed-applications
|
06743e4e2a09dc52ff52be831e486bb073916173
|
[
"BSD-3-Clause"
] | 22
|
2019-06-19T18:58:13.000Z
|
2020-03-16T14:43:06.000Z
|
exercises/02-higher-order-functions/reduce/increasing/tests.exs
|
DennisWinnepenninckx/distributed-applications
|
06743e4e2a09dc52ff52be831e486bb073916173
|
[
"BSD-3-Clause"
] | 32
|
2019-09-19T03:25:11.000Z
|
2020-10-06T15:01:47.000Z
|
defmodule Setup do
@script "shared.exs"
def setup(directory \\ ".") do
path = Path.join(directory, @script)
if File.exists?(path) do
Code.require_file(path)
Shared.setup(__DIR__)
else
setup(Path.join(directory, ".."))
end
end
end
Setup.setup
defmodule Tests do
use ExUnit.Case, async: true
import Shared
check that: Util.increasing?([]), is_equal_to: true
check that: Util.increasing?([1]), is_equal_to: true
check that: Util.increasing?([1, 2]), is_equal_to: true
check that: Util.increasing?([1, 1]), is_equal_to: true
check that: Util.increasing?([1, 2, 3]), is_equal_to: true
check that: Util.increasing?([1, 4, 6, 10]), is_equal_to: true
check that: Util.increasing?([1, 4, 6, 10, 100, 1000]), is_equal_to: true
check that: Util.increasing?([1, 0]), is_equal_to: false
check that: Util.increasing?([5, 4, 3, 2, 1]), is_equal_to: false
check that: Util.increasing?([1, 2, 3, 2, 3, 4]), is_equal_to: false
end
| 28.114286
| 75
| 0.662602
|
938c32effbcc94e5399f86d57ad972e0277b6870
| 1,816
|
exs
|
Elixir
|
test/controllers/pet_rescue_controller_test.exs
|
dogIDs/sponsor-dot-dog
|
07b7fe90ee12157c144d0d164ca8035e509e59bb
|
[
"MIT"
] | null | null | null |
test/controllers/pet_rescue_controller_test.exs
|
dogIDs/sponsor-dot-dog
|
07b7fe90ee12157c144d0d164ca8035e509e59bb
|
[
"MIT"
] | 1
|
2016-03-28T03:13:34.000Z
|
2020-08-11T22:26:12.000Z
|
test/controllers/pet_rescue_controller_test.exs
|
dogIDs/sponsor-dot-dog
|
07b7fe90ee12157c144d0d164ca8035e509e59bb
|
[
"MIT"
] | null | null | null |
defmodule SponsorDotDog.PetRescueControllerTest do
use SponsorDotDog.ConnCase
alias SponsorDotDog.PetRescue
@valid_attrs %{address1: "some content", address2: "some content", city: "some content", country: "some content", email: "some content", fax: "some content", phone: "some content", rescue_id: "some content", rescue_name: "some content", zip: "some content"}
@invalid_attrs %{}
test "lists all entries on index", %{conn: conn} do
conn = get conn, pet_rescue_path(conn, :index)
assert html_response(conn, 200) =~ "Listing petrescues"
end
test "creates resource and redirects when data is valid", %{conn: conn} do
conn = post conn, pet_rescue_path(conn, :create), pet_rescue: @valid_attrs
assert redirected_to(conn) == pet_rescue_path(conn, :index)
assert Repo.get_by(PetRescue, @valid_attrs)
end
test "does not create resource and renders errors when data is invalid", %{conn: conn} do
conn = post conn, pet_rescue_path(conn, :create), pet_rescue: @invalid_attrs
assert html_response(conn, 200) =~ "New pet rescue"
end
test "shows chosen resource", %{conn: conn} do
pet_rescue = Repo.insert! %PetRescue{}
conn = get conn, pet_rescue_path(conn, :show, pet_rescue)
assert html_response(conn, 200) =~ "Show pet rescue"
end
test "renders page not found when id is nonexistent", %{conn: conn} do
assert_error_sent 404, fn ->
get conn, pet_rescue_path(conn, :show, -1)
end
end
test "updates chosen resource and redirects when data is valid", %{conn: conn} do
pet_rescue = Repo.insert! %PetRescue{}
conn = put conn, pet_rescue_path(conn, :update, pet_rescue), pet_rescue: @valid_attrs
assert redirected_to(conn) == pet_rescue_path(conn, :show, pet_rescue)
assert Repo.get_by(PetRescue, @valid_attrs)
end
end
| 42.232558
| 259
| 0.713656
|
938c34c5e80b1846b6f58f109e100fce496f9ce4
| 1,784
|
ex
|
Elixir
|
apps/tai/lib/tai/venue_adapters/bitmex/products.ex
|
ihorkatkov/tai
|
09f9f15d2c385efe762ae138a8570f1e3fd41f26
|
[
"MIT"
] | null | null | null |
apps/tai/lib/tai/venue_adapters/bitmex/products.ex
|
ihorkatkov/tai
|
09f9f15d2c385efe762ae138a8570f1e3fd41f26
|
[
"MIT"
] | null | null | null |
apps/tai/lib/tai/venue_adapters/bitmex/products.ex
|
ihorkatkov/tai
|
09f9f15d2c385efe762ae138a8570f1e3fd41f26
|
[
"MIT"
] | null | null | null |
defmodule Tai.VenueAdapters.Bitmex.Products do
def products(venue_id) do
with {:ok, instruments, _rate_limit} <- ExBitmex.Rest.Instruments.all(%{start: 0, count: 500}) do
products =
instruments
|> Enum.map(&build(&1, venue_id))
|> Enum.filter(& &1)
{:ok, products}
else
{:error, reason, _} ->
{:error, reason}
end
end
defp build(%ExBitmex.Instrument{lot_size: nil}, _), do: nil
defp build(instrument, venue_id) do
status = Tai.VenueAdapters.Bitmex.ProductStatus.normalize(instrument.state)
lot_size = instrument.lot_size |> Decimal.cast()
tick_size = instrument.tick_size |> Decimal.cast()
max_order_qty = instrument.max_order_qty && instrument.max_order_qty |> Decimal.cast()
max_price = instrument.max_price && instrument.max_price |> Decimal.cast()
maker_fee = instrument.maker_fee && instrument.maker_fee |> Decimal.cast()
taker_fee = instrument.taker_fee && instrument.taker_fee |> Decimal.cast()
%Tai.Venues.Product{
venue_id: venue_id,
symbol: instrument.symbol |> to_symbol,
venue_symbol: instrument.symbol,
base: instrument.underlying,
quote: instrument.quote_currency,
status: status,
type: :future,
price_increment: tick_size,
size_increment: lot_size,
min_price: tick_size,
min_size: Decimal.new(1),
max_price: max_price,
max_size: max_order_qty,
value: lot_size,
is_quanto: instrument.is_quanto,
is_inverse: instrument.is_inverse,
maker_fee: maker_fee,
taker_fee: taker_fee
}
end
def to_symbol(venue_symbol), do: venue_symbol |> String.downcase() |> String.to_atom()
def from_symbol(symbol), do: symbol |> Atom.to_string() |> String.upcase()
end
| 34.307692
| 101
| 0.674327
|
938c661acca90b2eae57a96a73077213cd7e47c9
| 3,298
|
ex
|
Elixir
|
clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/creative_group.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/creative_group.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/creative_group.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V34.Model.CreativeGroup do
@moduledoc """
Contains properties of a creative group.
## Attributes
* `accountId` (*type:* `String.t`, *default:* `nil`) - Account ID of this creative group. This is a read-only field that can be left blank.
* `advertiserId` (*type:* `String.t`, *default:* `nil`) - Advertiser ID of this creative group. This is a required field on insertion.
* `advertiserIdDimensionValue` (*type:* `GoogleApi.DFAReporting.V34.Model.DimensionValue.t`, *default:* `nil`) - Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* `groupNumber` (*type:* `integer()`, *default:* `nil`) - Subgroup of the creative group. Assign your creative groups to a subgroup in order to filter or manage them more easily. This field is required on insertion and is read-only after insertion. Acceptable values are 1 to 2, inclusive.
* `id` (*type:* `String.t`, *default:* `nil`) - ID of this creative group. This is a read-only, auto-generated field.
* `kind` (*type:* `String.t`, *default:* `nil`) - Identifies what kind of resource this is. Value: the fixed string "dfareporting#creativeGroup".
* `name` (*type:* `String.t`, *default:* `nil`) - Name of this creative group. This is a required field and must be less than 256 characters long and unique among creative groups of the same advertiser.
* `subaccountId` (*type:* `String.t`, *default:* `nil`) - Subaccount ID of this creative group. This is a read-only field that can be left blank.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accountId => String.t() | nil,
:advertiserId => String.t() | nil,
:advertiserIdDimensionValue =>
GoogleApi.DFAReporting.V34.Model.DimensionValue.t() | nil,
:groupNumber => integer() | nil,
:id => String.t() | nil,
:kind => String.t() | nil,
:name => String.t() | nil,
:subaccountId => String.t() | nil
}
field(:accountId)
field(:advertiserId)
field(:advertiserIdDimensionValue, as: GoogleApi.DFAReporting.V34.Model.DimensionValue)
field(:groupNumber)
field(:id)
field(:kind)
field(:name)
field(:subaccountId)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V34.Model.CreativeGroup do
def decode(value, options) do
GoogleApi.DFAReporting.V34.Model.CreativeGroup.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V34.Model.CreativeGroup do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 47.797101
| 293
| 0.702547
|
938c8096a60f5027f8af0d021a9e97ccde8235fe
| 1,340
|
exs
|
Elixir
|
mix.exs
|
rak/rak
|
cdfcd7227b38d9c6cac10bd778b03361b28f67df
|
[
"MIT"
] | 2
|
2018-09-30T02:59:33.000Z
|
2018-10-08T19:55:38.000Z
|
mix.exs
|
rak/rak
|
cdfcd7227b38d9c6cac10bd778b03361b28f67df
|
[
"MIT"
] | null | null | null |
mix.exs
|
rak/rak
|
cdfcd7227b38d9c6cac10bd778b03361b28f67df
|
[
"MIT"
] | null | null | null |
defmodule Rak.Mixfile do
use Mix.Project
def application, do: [
extra_applications: extra_applications(Mix.env)
]
def project, do: [
app: :rak,
name: "Rak",
description: "Game Server Maker",
version: "0.1.2",
elixir: "~> 1.5",
start_permanent: Mix.env == :prod,
deps: deps(),
package: package(),
docs: [
main: "userguide",
logo: "./images/logo_icon_light.png",
extras: ["UserGuide.md", "Contributing.md", "License.md"]
],
test_coverage: [tool: Coverex.Task, coveralls: true]
]
defp package, do: [
name: "rak",
homepage_url: "https://rak.github.io",
source_url: "https://github.com/rak/rak",
maintainers: [
"https://github.com/stelcheck"
],
files: [
"lib",
"config",
"mix.exs",
"*.md",
],
licenses: [
"MIT"
],
links: %{
"GitHub" => "https://github.com/rak/rak"
}
]
defp extra_applications(:dev), do: [:reprise] ++ extra_applications(nil)
defp extra_applications(_default), do: [:logger]
# Run "mix help deps" to learn about dependencies.
defp deps, do: [
{:reprise, "~> 0.5", only: :dev},
{:credo, "~> 0.8", only: [:dev, :test], runtime: false},
{:ex_doc, "~> 0.18", only: :dev, runtime: false},
{:coverex, "~> 1.4.15", only: :test},
]
end
| 23.508772
| 78
| 0.557463
|
938c8f161ab91a1c9caa5d3dcba4ad5eb074c1ca
| 257
|
ex
|
Elixir
|
lib/jerboa.ex
|
ACollectionOfAtoms/jerboa
|
16000d450bb60b544b5c91f0fd7540a31b6c90b8
|
[
"Apache-2.0"
] | 14
|
2016-12-22T10:38:34.000Z
|
2021-03-11T23:20:26.000Z
|
lib/jerboa.ex
|
ACollectionOfAtoms/jerboa
|
16000d450bb60b544b5c91f0fd7540a31b6c90b8
|
[
"Apache-2.0"
] | 94
|
2016-12-22T13:13:29.000Z
|
2021-02-10T14:22:32.000Z
|
lib/jerboa.ex
|
ACollectionOfAtoms/jerboa
|
16000d450bb60b544b5c91f0fd7540a31b6c90b8
|
[
"Apache-2.0"
] | 6
|
2017-04-03T20:23:00.000Z
|
2022-03-06T10:14:30.000Z
|
defmodule Jerboa do
@moduledoc """
STUN/TURN encoder, decoder and client library
Jerboa consists of two components:
* `Jerboa.Format` an encoding & decoding library for the STUN wire format
* `Jerboa.Client` an Elixir STUN/TURN client
"""
end
| 23.363636
| 75
| 0.727626
|
938cc4c9c98aadee73e9f8931971e418d1bb925f
| 144
|
exs
|
Elixir
|
test/tictactoe_test.exs
|
ripertn/tictactoe
|
ff4641d5931b5e7c1ac4761d5796663dcfed4e08
|
[
"MIT"
] | 1
|
2018-07-16T10:05:57.000Z
|
2018-07-16T10:05:57.000Z
|
test/tictactoe_test.exs
|
ripertn/tictactoe
|
ff4641d5931b5e7c1ac4761d5796663dcfed4e08
|
[
"MIT"
] | null | null | null |
test/tictactoe_test.exs
|
ripertn/tictactoe
|
ff4641d5931b5e7c1ac4761d5796663dcfed4e08
|
[
"MIT"
] | null | null | null |
defmodule TictactoeTest do
use ExUnit.Case
doctest Tictactoe
test "greets the world" do
assert Tictactoe.hello() == :world
end
end
| 16
| 38
| 0.722222
|
938d10b9bda532e21c6a27591f336dcd0659d45d
| 442
|
ex
|
Elixir
|
lib/elias.ex
|
oestrich/hcl-elixir
|
aba949284785f81c4e690755cfec5e557c7bda4d
|
[
"MIT"
] | 1
|
2019-10-17T06:15:10.000Z
|
2019-10-17T06:15:10.000Z
|
lib/elias.ex
|
oestrich/hcl-elixir
|
aba949284785f81c4e690755cfec5e557c7bda4d
|
[
"MIT"
] | null | null | null |
lib/elias.ex
|
oestrich/hcl-elixir
|
aba949284785f81c4e690755cfec5e557c7bda4d
|
[
"MIT"
] | 1
|
2019-10-18T09:01:20.000Z
|
2019-10-18T09:01:20.000Z
|
defmodule Elias do
@moduledoc """
Elias is a UCL parser
[Universal Configuration Language](https://github.com/vstakhov/libucl)
"""
alias Elias.AST
alias Elias.Merge
alias Elias.Parser
@doc """
Parse an UCL string into a map
"""
@spec parse(String.t()) :: map()
def parse(string) do
case Parser.parse(string) do
{:ok, ast} ->
ast
|> AST.walk()
|> Merge.collapse()
end
end
end
| 17.68
| 72
| 0.60181
|
938d1be719d39f5f8aaa1cf8d04a19f3929a7b57
| 120
|
ex
|
Elixir
|
debug_eample/lib/debug_eample.ex
|
gguimond/elixir
|
415a7ed10fb44d84089ff89fb651b765b5f5e53f
|
[
"MIT"
] | 1
|
2019-03-28T09:08:16.000Z
|
2019-03-28T09:08:16.000Z
|
debug_eample/lib/debug_eample.ex
|
gguimond/elixir
|
415a7ed10fb44d84089ff89fb651b765b5f5e53f
|
[
"MIT"
] | null | null | null |
debug_eample/lib/debug_eample.ex
|
gguimond/elixir
|
415a7ed10fb44d84089ff89fb651b765b5f5e53f
|
[
"MIT"
] | null | null | null |
defmodule DebugEample do
def cpu_burns(a, b, c) do
x = a * 2
y = b * 3
z = c * 5
x + y + z
end
end
| 12
| 27
| 0.475
|
938d2bd79279f0fd0fdf5e18e5824c3423b187e0
| 1,425
|
ex
|
Elixir
|
lib/text_based_fps/player_commands/move.ex
|
guisehn/elixir-text-based-fps
|
59a815da337309297f8b42ef3481277dd4d9b371
|
[
"MIT"
] | 1
|
2022-03-02T12:18:07.000Z
|
2022-03-02T12:18:07.000Z
|
lib/text_based_fps/player_commands/move.ex
|
guisehn/elixir-text-based-fps
|
59a815da337309297f8b42ef3481277dd4d9b371
|
[
"MIT"
] | 12
|
2021-05-31T21:41:09.000Z
|
2021-07-30T03:18:09.000Z
|
lib/text_based_fps/player_commands/move.ex
|
guisehn/elixir-text-based-fps
|
59a815da337309297f8b42ef3481277dd4d9b371
|
[
"MIT"
] | null | null | null |
defmodule TextBasedFPS.PlayerCommand.Move do
import TextBasedFPS.CommandHelper
import TextBasedFPS.Text, only: [highlight: 1]
alias TextBasedFPS.{Direction, PlayerCommand, Room, ServerState}
@behaviour PlayerCommand
@impl true
def execute(state, player, direction) do
with {:ok, room} <- require_alive_player(state, player) do
room_player = Room.get_player(room, player.key)
parsed_direction = parse_direction(room_player, direction)
move(state, room, room_player, parsed_direction)
end
end
defp parse_direction(room_player, ""), do: room_player.direction
defp parse_direction(_room_player, direction), do: Direction.from_string(direction)
defp move(state, _room, _room_player, nil) do
{:error, state, "Unknown direction. Use #{highlight("<north/south/west/east>")}"}
end
defp move(state, room, room_player, direction) do
{x, y} = Direction.calculate_movement(direction, room_player.coordinates)
case Room.place_player_at(room, room_player.player_key, {x, y}) do
{:ok, updated_room, object_grabbed} ->
updated_state = ServerState.update_room(state, updated_room)
{:ok, updated_state, grabbed_object_message(object_grabbed)}
{:error, _} ->
{:error, state, "You can't go in that direction."}
end
end
defp grabbed_object_message(nil), do: nil
defp grabbed_object_message(object), do: "You found: #{object}"
end
| 34.756098
| 85
| 0.723509
|
938d783cec4b2470f776c69c6e72f7451b73a313
| 2,302
|
ex
|
Elixir
|
lib/protobuf/protoc/context.ex
|
zolakeith/protobuf
|
2d412b260c48be8f90e05408f8569cef2f6d3ace
|
[
"MIT"
] | 419
|
2017-04-02T13:10:51.000Z
|
2020-11-15T15:53:17.000Z
|
lib/protobuf/protoc/context.ex
|
zolakeith/protobuf
|
2d412b260c48be8f90e05408f8569cef2f6d3ace
|
[
"MIT"
] | 101
|
2020-11-22T20:20:11.000Z
|
2022-03-06T16:09:26.000Z
|
lib/protobuf/protoc/context.ex
|
zolakeith/protobuf
|
2d412b260c48be8f90e05408f8569cef2f6d3ace
|
[
"MIT"
] | 83
|
2017-07-24T21:50:04.000Z
|
2020-11-15T08:52:34.000Z
|
defmodule Protobuf.Protoc.Context do
@moduledoc false
@type t() :: %__MODULE__{}
# Plugins passed by options
defstruct plugins: [],
### All files scope
# Mapping from file name to (mapping from type name to metadata, like elixir type name)
# %{"example.proto" => %{".example.FooMsg" => %{type_name: "Example.FooMsg"}}}
global_type_mapping: %{},
### One file scope
# Package name
package: nil,
package_prefix: nil,
module_prefix: nil,
syntax: nil,
# Mapping from type_name to metadata. It's merged type mapping of dependencies files including itself
# %{".example.FooMsg" => %{type_name: "Example.FooMsg"}}
dep_type_mapping: %{},
# For a message
# Nested namespace when generating nested messages. It should be joined to get the full namespace
namespace: [],
# Include binary descriptors in the generated protobuf modules
# And expose them via the `descriptor/0` function
gen_descriptors?: false,
# Module to transform values before and after encode and decode
transform_module: nil,
# Generate one file per module with "proper" directory structure
# (according to Elixir conventions) if this is true
one_file_per_module?: false,
# Elixirpb.FileOptions
custom_file_options: %{}
@spec custom_file_options_from_file_desc(t(), Google.Protobuf.FileDescriptorProto.t()) :: t()
def custom_file_options_from_file_desc(ctx, desc)
def custom_file_options_from_file_desc(
%__MODULE__{} = ctx,
%Google.Protobuf.FileDescriptorProto{options: nil}
) do
%__MODULE__{ctx | custom_file_options: %{}}
end
def custom_file_options_from_file_desc(
%__MODULE__{} = ctx,
%Google.Protobuf.FileDescriptorProto{options: options}
) do
custom_file_opts =
Google.Protobuf.FileOptions.get_extension(options, Elixirpb.PbExtension, :file) ||
Elixirpb.PbExtension.new()
%__MODULE__{
ctx
| custom_file_options: custom_file_opts,
module_prefix: Map.get(custom_file_opts, :module_prefix)
}
end
end
| 33.362319
| 113
| 0.628584
|
938dcffd5e0d990004f9f419497b0d550cd410b5
| 5,289
|
ex
|
Elixir
|
clients/analytics/lib/google_api/analytics/v3/model/webproperty.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/analytics/lib/google_api/analytics/v3/model/webproperty.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | 1
|
2020-12-18T09:25:12.000Z
|
2020-12-18T09:25:12.000Z
|
clients/analytics/lib/google_api/analytics/v3/model/webproperty.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | 1
|
2020-10-04T10:12:44.000Z
|
2020-10-04T10:12:44.000Z
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Analytics.V3.Model.Webproperty do
@moduledoc """
JSON template for an Analytics web property.
## Attributes
* `accountId` (*type:* `String.t`, *default:* `nil`) - Account ID to which this web property belongs.
* `childLink` (*type:* `GoogleApi.Analytics.V3.Model.WebpropertyChildLink.t`, *default:* `nil`) - Child link for this web property. Points to the list of views (profiles) for this web property.
* `created` (*type:* `DateTime.t`, *default:* `nil`) - Time this web property was created.
* `dataRetentionResetOnNewActivity` (*type:* `boolean()`, *default:* `nil`) - Set to true to reset the retention period of the user identifier with each new event from that user (thus setting the expiration date to current time plus retention period).
Set to false to delete data associated with the user identifier automatically after the rentention period.
This property cannot be set on insert.
* `dataRetentionTtl` (*type:* `String.t`, *default:* `nil`) - The length of time for which user and event data is retained.
This property cannot be set on insert.
* `defaultProfileId` (*type:* `String.t`, *default:* `nil`) - Default view (profile) ID.
* `id` (*type:* `String.t`, *default:* `nil`) - Web property ID of the form UA-XXXXX-YY.
* `industryVertical` (*type:* `String.t`, *default:* `nil`) - The industry vertical/category selected for this web property.
* `internalWebPropertyId` (*type:* `String.t`, *default:* `nil`) - Internal ID for this web property.
* `kind` (*type:* `String.t`, *default:* `analytics#webproperty`) - Resource type for Analytics WebProperty.
* `level` (*type:* `String.t`, *default:* `nil`) - Level for this web property. Possible values are STANDARD or PREMIUM.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of this web property.
* `parentLink` (*type:* `GoogleApi.Analytics.V3.Model.WebpropertyParentLink.t`, *default:* `nil`) - Parent link for this web property. Points to the account to which this web property belongs.
* `permissions` (*type:* `GoogleApi.Analytics.V3.Model.WebpropertyPermissions.t`, *default:* `nil`) - Permissions the user has for this web property.
* `profileCount` (*type:* `integer()`, *default:* `nil`) - View (Profile) count for this web property.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - Link for this web property.
* `starred` (*type:* `boolean()`, *default:* `nil`) - Indicates whether this web property is starred or not.
* `updated` (*type:* `DateTime.t`, *default:* `nil`) - Time this web property was last modified.
* `websiteUrl` (*type:* `String.t`, *default:* `nil`) - Website url for this web property.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accountId => String.t(),
:childLink => GoogleApi.Analytics.V3.Model.WebpropertyChildLink.t(),
:created => DateTime.t(),
:dataRetentionResetOnNewActivity => boolean(),
:dataRetentionTtl => String.t(),
:defaultProfileId => String.t(),
:id => String.t(),
:industryVertical => String.t(),
:internalWebPropertyId => String.t(),
:kind => String.t(),
:level => String.t(),
:name => String.t(),
:parentLink => GoogleApi.Analytics.V3.Model.WebpropertyParentLink.t(),
:permissions => GoogleApi.Analytics.V3.Model.WebpropertyPermissions.t(),
:profileCount => integer(),
:selfLink => String.t(),
:starred => boolean(),
:updated => DateTime.t(),
:websiteUrl => String.t()
}
field(:accountId)
field(:childLink, as: GoogleApi.Analytics.V3.Model.WebpropertyChildLink)
field(:created, as: DateTime)
field(:dataRetentionResetOnNewActivity)
field(:dataRetentionTtl)
field(:defaultProfileId)
field(:id)
field(:industryVertical)
field(:internalWebPropertyId)
field(:kind)
field(:level)
field(:name)
field(:parentLink, as: GoogleApi.Analytics.V3.Model.WebpropertyParentLink)
field(:permissions, as: GoogleApi.Analytics.V3.Model.WebpropertyPermissions)
field(:profileCount)
field(:selfLink)
field(:starred)
field(:updated, as: DateTime)
field(:websiteUrl)
end
defimpl Poison.Decoder, for: GoogleApi.Analytics.V3.Model.Webproperty do
def decode(value, options) do
GoogleApi.Analytics.V3.Model.Webproperty.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Analytics.V3.Model.Webproperty do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 50.855769
| 255
| 0.683683
|
938dde3bceac6aa1cbde40b674a4afbccc4964d9
| 1,400
|
ex
|
Elixir
|
server/lib/domsegserver_web/live/dataset_live/index.ex
|
arpieb/domseg
|
0c7165d69181e59902730c6e7ac41e8e849edd70
|
[
"Apache-2.0"
] | null | null | null |
server/lib/domsegserver_web/live/dataset_live/index.ex
|
arpieb/domseg
|
0c7165d69181e59902730c6e7ac41e8e849edd70
|
[
"Apache-2.0"
] | 9
|
2021-12-09T18:19:21.000Z
|
2022-01-09T03:45:33.000Z
|
server/lib/domsegserver_web/live/dataset_live/index.ex
|
arpieb/domseg
|
0c7165d69181e59902730c6e7ac41e8e849edd70
|
[
"Apache-2.0"
] | null | null | null |
defmodule DOMSegServerWeb.DatasetLive.Index do
use DOMSegServerWeb, :live_view
alias DOMSegServer.Datasets
alias DOMSegServer.Datasets.Dataset
@impl true
def mount(_params, session, socket) do
{
:ok,
assign(
socket,
datasets: list_datasets(),
user_token: session["user_token"]
)
}
end
@impl true
def handle_params(params, _url, socket) do
{:noreply, apply_action(socket, socket.assigns.live_action, params)}
end
defp apply_action(socket, :edit, %{"id" => id}) do
dataset = Datasets.get_dataset!(id)
socket
|> assign(:page_title, "Edit Dataset")
|> assign(:dataset, dataset)
end
defp apply_action(socket, :new, _params) do
socket
|> assign(:page_title, "New Dataset")
|> assign(:dataset, %Dataset{segment_types: []})
end
defp apply_action(socket, :index, _params) do
socket
|> assign(:page_title, "Listing Datasets")
|> assign(:dataset, nil)
end
@impl true
def handle_event("delete", %{"id" => id}, socket) do
dataset = Datasets.get_dataset!(id)
{:ok, _} = Datasets.delete_dataset(dataset)
{:noreply, assign(socket, :datasets, list_datasets())}
end
defp list_datasets do
Datasets.list_datasets()
end
def get_current_user(nil), do: nil
def get_current_user(token) do
DOMSegServer.Accounts.get_user_by_session_token(token)
end
end
| 23.333333
| 72
| 0.667857
|
938ded74e990e0d9e71c3b388fc8d67aacb0e097
| 2,055
|
exs
|
Elixir
|
mix.exs
|
prashantpawar/kitteh-test
|
8b5b9e8a080b495ea6f506597a1bb66be56de064
|
[
"MIT"
] | null | null | null |
mix.exs
|
prashantpawar/kitteh-test
|
8b5b9e8a080b495ea6f506597a1bb66be56de064
|
[
"MIT"
] | null | null | null |
mix.exs
|
prashantpawar/kitteh-test
|
8b5b9e8a080b495ea6f506597a1bb66be56de064
|
[
"MIT"
] | null | null | null |
defmodule KittehTest.Mixfile do
use Mix.Project
def project do
[app: :kitteh_test,
version: "0.1.0",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {KittehTest, []},
applications: [:phoenix, :phoenix_pubsub, :phoenix_html, :cowboy, :logger, :gettext,
:phoenix_ecto, :postgrex, :ueberauth_twitter,
:ex_machina]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[{:extwitter, "~> 0.8.0"},
{:ueberauth, "~> 0.4"},
{:ueberauth_twitter, "~> 0.2"},
{:guardian, "~> 0.14"},
{:oauth, github: "tim/erlang-oauth"},
{:phoenix, "~> 1.2.1"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 3.0"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.6"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"},
{:dialyxir, "~> 0.4", only: [:dev], runtime: false},
{:credo, "~> 0.8.0-rc7", only: [:dev, :test], runtime: false},
{:ex_machina, "~> 1.0", only: [:dev, :test]}]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
["ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"]]
end
end
| 32.109375
| 89
| 0.580049
|
938df69b47d2ca2ee006a7c9f8cf6d0ba6fef84d
| 474
|
exs
|
Elixir
|
test/osrs_api_proxy_web/views/error_view_test.exs
|
ScapeGuru/osrs_api_proxy
|
7d4cace8e5185ec60a324de274df225a58049b7a
|
[
"Apache-2.0"
] | null | null | null |
test/osrs_api_proxy_web/views/error_view_test.exs
|
ScapeGuru/osrs_api_proxy
|
7d4cace8e5185ec60a324de274df225a58049b7a
|
[
"Apache-2.0"
] | 37
|
2021-04-19T23:56:46.000Z
|
2022-02-28T15:14:04.000Z
|
test/osrs_api_proxy_web/views/error_view_test.exs
|
ScapeGuru/osrs_api_proxy
|
7d4cace8e5185ec60a324de274df225a58049b7a
|
[
"Apache-2.0"
] | null | null | null |
defmodule OsrsApiProxyWeb.ErrorViewTest do
use OsrsApiProxyWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.json" do
assert render(OsrsApiProxyWeb.ErrorView, "404.json", []) == %{errors: %{detail: "Not Found"}}
end
test "renders 500.json" do
assert render(OsrsApiProxyWeb.ErrorView, "500.json", []) ==
%{errors: %{detail: "Internal Server Error"}}
end
end
| 29.625
| 97
| 0.696203
|
938e026d48d793d2a164dca81324708afa5fe767
| 1,021
|
ex
|
Elixir
|
lib/mix/tasks/shards.ex
|
craig-day/ecto_sharding
|
9a754bf2c0a6ba1ce480ee93685add5472308c2c
|
[
"Apache-2.0"
] | 9
|
2017-09-27T02:47:23.000Z
|
2021-03-17T00:01:29.000Z
|
lib/mix/tasks/shards.ex
|
craig-day/ecto_sharding
|
9a754bf2c0a6ba1ce480ee93685add5472308c2c
|
[
"Apache-2.0"
] | 3
|
2018-02-24T19:25:41.000Z
|
2018-09-17T18:39:20.000Z
|
lib/mix/tasks/shards.ex
|
craig-day/ecto_sharding
|
9a754bf2c0a6ba1ce480ee93685add5472308c2c
|
[
"Apache-2.0"
] | 2
|
2017-09-26T19:24:10.000Z
|
2018-03-07T00:50:10.000Z
|
defmodule Mix.Tasks.Shards do
@moduledoc false
alias EctoSharding.Configuration, as: Config
defmacro __using__(_) do
quote do
use Mix.Task
alias Mix.Tasks.Shards
end
end
def execute(func, args) when func == Mix.Tasks.Ecto.Dump, do: run_first_shard(func, args)
def execute(func, args) when func == Mix.Tasks.Ecto.Gen.Migration, do: run_first_shard(func, args)
def execute(func, args), do: run_all_shards(func, args)
defp set_repo(repo, args) do
{parsed, args, other} = OptionParser.parse(args, aliases: [r: :repo])
permitted =
parsed
|> Keyword.merge(repo: repo)
|> OptionParser.to_argv()
[args, permitted, other]
|> List.flatten()
end
defp run_all_shards(func, args) do
Enum.each(Config.shard_repos(), fn({_name, repo}) ->
set_repo(repo, args)
|> func.run
end)
end
defp run_first_shard(func, args) do
Config.shard_repos()
|> Map.values
|> List.first
|> set_repo(args)
|> func.run
end
end
| 23.744186
| 100
| 0.647405
|
938e59a6764d65dd93ef84528be681a52b1f5992
| 637
|
ex
|
Elixir
|
lib/imagineer/image/png/chunk/decoders/time.ex
|
ndemonner/imagineer
|
a6872296756cde19f8f575a7d1854d0fe7cbcb02
|
[
"MIT"
] | 103
|
2015-01-28T19:09:42.000Z
|
2018-10-22T15:05:46.000Z
|
lib/imagineer/image/png/chunk/decoders/time.ex
|
tyre/imagineer
|
a6872296756cde19f8f575a7d1854d0fe7cbcb02
|
[
"MIT"
] | 12
|
2015-07-11T05:12:41.000Z
|
2018-07-21T04:42:47.000Z
|
lib/imagineer/image/png/chunk/decoders/time.ex
|
ndemonner/imagineer
|
a6872296756cde19f8f575a7d1854d0fe7cbcb02
|
[
"MIT"
] | 18
|
2015-06-16T15:48:34.000Z
|
2018-06-22T02:34:40.000Z
|
defmodule Imagineer.Image.PNG.Chunk.Decoders.Time do
alias Imagineer.Image.PNG
def decode(
<<year::integer-size(16), month::integer-size(8), day::integer-size(8),
hour::integer-size(8), minute::integer-size(8), second::integer-size(8)>> = _content,
%PNG{} = image
) do
%PNG{
image
| last_modified: %DateTime{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
std_offset: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
zone_abbr: "UTC"
}
}
end
end
| 24.5
| 95
| 0.525903
|
938e71b0df46aa497e27b21514a972993cdfcdba
| 1,426
|
exs
|
Elixir
|
exercises/strings_and_binaries_4.exs
|
renanlage/programming-elixir-book
|
71e58398269cde2b76a377d28cc906fb528c4134
|
[
"MIT"
] | 8
|
2018-08-26T08:10:08.000Z
|
2021-04-05T16:05:05.000Z
|
exercises/strings_and_binaries_4.exs
|
renanlage/programming-elixir-book
|
71e58398269cde2b76a377d28cc906fb528c4134
|
[
"MIT"
] | null | null | null |
exercises/strings_and_binaries_4.exs
|
renanlage/programming-elixir-book
|
71e58398269cde2b76a377d28cc906fb528c4134
|
[
"MIT"
] | 1
|
2019-10-08T09:56:43.000Z
|
2019-10-08T09:56:43.000Z
|
defmodule StringCalculator do
def calculate(charlist) do
[num1, op, num2] = charlist
|> to_string
|> String.split
_apply_operator(String.to_integer(num1), op, String.to_integer(num2))
end
defp _apply_operator(number1, "-", number2), do: number1 - number2
defp _apply_operator(number1, "+", number2), do: number1 + number2
defp _apply_operator(number1, "*", number2), do: number1 * number2
defp _apply_operator(number1, "/", number2), do: number1 / number2
end
defmodule StringCalculatorCheater do
def calculate(str), do: _calculate(str, 0)
defp _calculate([], value), do: value
defp _calculate([?\s | tail], value), do: _calculate(tail, value)
defp _calculate([digit | tail], value)
when digit in '0123456789',
do: _calculate(tail, value * 10 + digit - ?0)
defp _calculate([operator | tail], value)
when operator in '+-*/',
do: apply(Kernel, List.to_atom([operator]), [value, calculate(tail)])
end
IO.puts(StringCalculator.calculate('100 + 10') == 110)
IO.puts(StringCalculator.calculate('100 - 10') == 90)
IO.puts(StringCalculator.calculate('100 * 10') == 1000)
IO.puts(StringCalculator.calculate('100 / 10') == 10)
IO.puts(StringCalculatorCheater.calculate('100 + 10') == 110)
IO.puts(StringCalculatorCheater.calculate('100 - 10') == 90)
IO.puts(StringCalculatorCheater.calculate('100 * 10') == 1000)
IO.puts(StringCalculatorCheater.calculate('100 / 10') == 10)
| 35.65
| 73
| 0.698457
|
938e799facf2d8e7d012ecf02935c63a76399437
| 3,144
|
ex
|
Elixir
|
lib/bow/exec.ex
|
kianmeng/bow
|
d0b2ad564b0ccd06eb7d4582b94177877559d4af
|
[
"MIT"
] | 25
|
2017-10-06T14:22:13.000Z
|
2022-01-14T20:59:59.000Z
|
lib/bow/exec.ex
|
kianmeng/bow
|
d0b2ad564b0ccd06eb7d4582b94177877559d4af
|
[
"MIT"
] | 9
|
2017-10-30T06:02:22.000Z
|
2022-01-03T13:57:57.000Z
|
lib/bow/exec.ex
|
kianmeng/bow
|
d0b2ad564b0ccd06eb7d4582b94177877559d4af
|
[
"MIT"
] | 4
|
2018-03-29T12:59:10.000Z
|
2021-09-19T09:10:01.000Z
|
defmodule Bow.Exec do
@moduledoc """
Transform files with shell commands
This module allows executing any external command taking care of temporary path generation and error handling.
It is as reliable as [erlexec](https://github.com/saleyn/erlexec) module (very!).
It is also possible to provide custom command timeout. See `exec/4` to see all available options.
"""
@type command :: [String.t() | {:input, integer} | :input | :output]
defp default_timeout, do: Application.get_env(:bow, :exec_timeout, 15_000)
@doc """
Execute command
Arguments:
- `source` - source file to be transformed
- `target_name` - target file
- `command` - the command to be executed. Placeholders `${input}` and `${output}` will be replaced with source and target paths
Options:
- `:timeout` - time in which the command must return. If it's exceeded the command process will be killed.
Examples
# generate image thumbnail from first page of pdf
def transform(file, :pdf_thumbnail) do
Bow.Exec.exec file, filename(file, :pdf_thumbnail),
"convert '${input}[0]' -strip -gravity North -background '#ffffff'" <>
" -resize 250x175^ -extent 250x175 -format png png:${output}"
end
"""
@spec exec(Bow.t(), Bow.t(), command, keyword) :: {:ok, Bow.t()} | {:error, any}
def exec(source, target, command, opts \\ []) do
timeout = opts[:timeout] || default_timeout()
source_path = source.path
target_path = Plug.Upload.random_file!("bow-exec") <> target.ext
cmd =
command
|> Enum.map(fn
{:input, idx} when is_integer(idx) -> "#{source_path}[#{idx}]"
:input -> source_path
:output -> target_path
arg -> arg
end)
|> Enum.map(&to_charlist/1)
trapping(fn ->
case :exec.run_link(cmd, stdout: self(), stderr: self()) do
{:ok, pid, ospid} ->
case wait_for_exit(pid, ospid, timeout) do
{:ok, output} ->
if File.exists?(target_path) do
{:ok, Bow.set(target, :path, target_path)}
else
{:error, reason: :file_not_found, output: output, exit_code: 0, cmd: cmd}
end
{:error, exit_code, output} ->
{:error, output: output, exit_code: exit_code, cmd: cmd}
end
error ->
error
end
end)
end
defp trapping(fun) do
trap = Process.flag(:trap_exit, true)
result = fun.()
Process.flag(:trap_exit, trap)
result
end
defp wait_for_exit(pid, ospid, timout) do
receive do
{:EXIT, ^pid, :normal} -> {:ok, receive_output(ospid)}
{:EXIT, ^pid, {:exit_status, code}} -> {:error, code, receive_output(ospid)}
after
timout ->
:exec.stop_and_wait(pid, 2000)
{:error, :timeout, receive_output(ospid)}
end
end
defp receive_output(ospid, output \\ []) do
receive do
{:stdout, ^ospid, data} -> receive_output(ospid, [output, data])
{:stderr, ^ospid, data} -> receive_output(ospid, [output, data])
after
0 -> output |> to_string
end
end
end
| 31.128713
| 129
| 0.604326
|
938e7ed1ebe94f599f07b92b23d482feddb0738b
| 1,152
|
exs
|
Elixir
|
implements/pascals-triangle/pascals_triangle.exs
|
MickeyOoh/Exercises
|
3b34e7fdab4a09e0269d20c68531b4fb75bb7f16
|
[
"MIT"
] | null | null | null |
implements/pascals-triangle/pascals_triangle.exs
|
MickeyOoh/Exercises
|
3b34e7fdab4a09e0269d20c68531b4fb75bb7f16
|
[
"MIT"
] | 1
|
2018-06-19T18:59:41.000Z
|
2018-06-19T18:59:41.000Z
|
implements/pascals-triangle/pascals_triangle.exs
|
MickeyOoh/Exercises
|
3b34e7fdab4a09e0269d20c68531b4fb75bb7f16
|
[
"MIT"
] | null | null | null |
defmodule PascalsTriangle do
@doc """
Calculates the rows of a pascal triangle
with the given height
(n:k) = (n-1:k-1) + (n-1:k)
"""
@spec rows(integer) :: [[integer]]
#def rows(num) do
# Enum.reduce(0..(num - 1), [],
# fn k, n_list ->
# k_list = get_k(k, List.last(n_list))
# Enum.into([k_list], n_list) end)
#end
def rows(num) do
Enum.reduce(1..num, [],
fn _k, n_list ->
k_list = set_k(List.last(n_list))
Enum.into([k_list], n_list) end)
end
def set_k(nil), do: [1]
def set_k(n_list) do
n_list = [0] ++ n_list ++ [0]
get_klist(n_list, [])
end
#def get_k(0, _), do: [1]
#def get_k(_k, n_list) do
# n_list = [0] ++ n_list ++ [0]
# get_klist(n_list, [])
#Enum.map(0..k,
# (n-1:k) + (n-1: k + 1)
# fn nk -> Enum.at(n_list,nk) + Enum.at(n_list,nk+1)
# end)
#end
#def get_klist([], _), do: [1]
def get_klist([h | [k | []]], k_list) do
k_list = [h + k | k_list]
Enum.reverse(k_list)
end
def get_klist([h | [k | t]], k_list) do
k_list = [h + k | k_list]
get_klist([ k | t], k_list)
end
end
| 26.181818
| 60
| 0.517361
|
938e9b42bfe1ebf6236aa0f09af38b13b225ebae
| 4,774
|
ex
|
Elixir
|
lib/rdf/serialization/format.ex
|
pukkamustard/rdf-ex
|
c459d8e7fa548fdfad82643338b68decf380a296
|
[
"MIT"
] | null | null | null |
lib/rdf/serialization/format.ex
|
pukkamustard/rdf-ex
|
c459d8e7fa548fdfad82643338b68decf380a296
|
[
"MIT"
] | null | null | null |
lib/rdf/serialization/format.ex
|
pukkamustard/rdf-ex
|
c459d8e7fa548fdfad82643338b68decf380a296
|
[
"MIT"
] | null | null | null |
defmodule RDF.Serialization.Format do
@moduledoc """
A behaviour for RDF serialization formats.
A `RDF.Serialization` for a format can be implemented like this
defmodule SomeFormat do
use RDF.Serialization.Format
import RDF.Sigils
@id ~I<http://example.com/some_format>
@name :some_format
@extension "ext"
@media_type "application/some-format"
end
When `@id`, `@name`, `@extension` and `@media_type` module attributes are
defined the resp. behaviour functions are generated automatically and return
these values.
Then you'll have to do the main work by implementing a
`RDF.Serialization.Encoder` and a `RDF.Serialization.Decoder` for the format.
By default it is assumed that these are defined in `Encoder` and `Decoder`
moduler under the `RDF.Serialization.Format` module of the format, i.e. in the
example above in `SomeFormat.Encoder` and `SomeFormat.Decoder`. If you want
them in another module, you'll have to override the `encoder/0` and/or
`decoder/0` functions in your `RDF.Serialization.Format` module.
"""
alias RDF.{Dataset, Graph}
@doc """
An IRI of the serialization format.
"""
@callback id :: RDF.IRI.t
@doc """
An name atom of the serialization format.
"""
@callback name :: atom
@doc """
The usual file extension for the serialization format.
"""
@callback extension :: String.t
@doc """
The MIME type of the serialization format.
"""
@callback media_type :: String.t
@doc """
A map with the supported options of the `Encoder` and `Decoder` for the serialization format.
"""
@callback options :: map
@doc """
The `RDF.Serialization.Decoder` module for the serialization format.
"""
@callback decoder :: module
@doc """
The `RDF.Serialization.Encoder` module for the serialization format.
"""
@callback encoder :: module
defmacro __using__(_) do
quote bind_quoted: [], unquote: true do
@behaviour unquote(__MODULE__)
@decoder __MODULE__.Decoder
@encoder __MODULE__.Encoder
@impl unquote(__MODULE__)
def decoder, do: @decoder
@impl unquote(__MODULE__)
def encoder, do: @encoder
@impl unquote(__MODULE__)
def options, do: %{}
defoverridable [decoder: 0, encoder: 0, options: 0]
@spec read_string(String.t, keyword) :: {:ok, Graph.t | Dataset.t} | {:error, any}
def read_string(content, opts \\ []),
do: RDF.Serialization.Reader.read_string(decoder(), content, opts)
@spec read_string!(String.t, keyword) :: Graph.t | Dataset.t
def read_string!(content, opts \\ []),
do: RDF.Serialization.Reader.read_string!(decoder(), content, opts)
@spec read_file(Path.t, keyword) :: {:ok, Graph.t | Dataset.t} | {:error, any}
def read_file(file, opts \\ []),
do: RDF.Serialization.Reader.read_file(decoder(), file, opts)
@spec read_file!(Path.t, keyword) :: Graph.t | Dataset.t
def read_file!(file, opts \\ []),
do: RDF.Serialization.Reader.read_file!(decoder(), file, opts)
@spec write_string(Graph.t | Dataset.t, keyword) :: {:ok, String.t} | {:error, any}
def write_string(data, opts \\ []),
do: RDF.Serialization.Writer.write_string(encoder(), data, opts)
@spec write_string!(Graph.t | Dataset.t, keyword) :: String.t
def write_string!(data, opts \\ []),
do: RDF.Serialization.Writer.write_string!(encoder(), data, opts)
@spec write_file(Graph.t | Dataset.t, Path.t, keyword) :: :ok | {:error, any}
def write_file(data, path, opts \\ []),
do: RDF.Serialization.Writer.write_file(encoder(), data, path, opts)
@spec write_file!(Graph.t | Dataset.t, Path.t, keyword) :: :ok
def write_file!(data, path, opts \\ []),
do: RDF.Serialization.Writer.write_file!(encoder(), data, path, opts)
@before_compile unquote(__MODULE__)
end
end
defmacro __before_compile__(_env) do
quote do
if !Module.defines?(__MODULE__, {:id, 0}) &&
Module.get_attribute(__MODULE__, :id) do
@impl unquote(__MODULE__)
def id, do: @id
end
if !Module.defines?(__MODULE__, {:name, 0}) &&
Module.get_attribute(__MODULE__, :name) do
@impl unquote(__MODULE__)
def name, do: @name
end
if !Module.defines?(__MODULE__, {:extension, 0}) &&
Module.get_attribute(__MODULE__, :extension) do
@impl unquote(__MODULE__)
def extension, do: @extension
end
if !Module.defines?(__MODULE__, {:media_type, 0}) &&
Module.get_attribute(__MODULE__, :media_type) do
@impl unquote(__MODULE__)
def media_type, do: @media_type
end
end
end
end
| 33.384615
| 95
| 0.645999
|
938ea746147cfab2fcb7e32300ab674f903b9b91
| 3,219
|
ex
|
Elixir
|
clients/fitness/lib/google_api/fitness/v1/connection.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | 1
|
2018-12-03T23:43:10.000Z
|
2018-12-03T23:43:10.000Z
|
clients/fitness/lib/google_api/fitness/v1/connection.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
clients/fitness/lib/google_api/fitness/v1/connection.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Fitness.V1.Connection do
@moduledoc """
Handle Tesla connections for GoogleApi.Fitness.V1.
"""
@type t :: Tesla.Env.client()
use GoogleApi.Gax.Connection,
scopes: [
# View your activity information in Google Fit
"https://www.googleapis.com/auth/fitness.activity.read",
# View and store your activity information in Google Fit
"https://www.googleapis.com/auth/fitness.activity.write",
# View blood glucose data in Google Fit
"https://www.googleapis.com/auth/fitness.blood_glucose.read",
# View and store blood glucose data in Google Fit
"https://www.googleapis.com/auth/fitness.blood_glucose.write",
# View blood pressure data in Google Fit
"https://www.googleapis.com/auth/fitness.blood_pressure.read",
# View and store blood pressure data in Google Fit
"https://www.googleapis.com/auth/fitness.blood_pressure.write",
# View body sensor information in Google Fit
"https://www.googleapis.com/auth/fitness.body.read",
# View and store body sensor data in Google Fit
"https://www.googleapis.com/auth/fitness.body.write",
# View body temperature data in Google Fit
"https://www.googleapis.com/auth/fitness.body_temperature.read",
# View and store body temperature data in Google Fit
"https://www.googleapis.com/auth/fitness.body_temperature.write",
# View your stored location data in Google Fit
"https://www.googleapis.com/auth/fitness.location.read",
# View and store your location data in Google Fit
"https://www.googleapis.com/auth/fitness.location.write",
# View nutrition information in Google Fit
"https://www.googleapis.com/auth/fitness.nutrition.read",
# View and store nutrition information in Google Fit
"https://www.googleapis.com/auth/fitness.nutrition.write",
# View oxygen saturation data in Google Fit
"https://www.googleapis.com/auth/fitness.oxygen_saturation.read",
# View and store oxygen saturation data in Google Fit
"https://www.googleapis.com/auth/fitness.oxygen_saturation.write",
# View reproductive health data in Google Fit
"https://www.googleapis.com/auth/fitness.reproductive_health.read",
# View and store reproductive health data in Google Fit
"https://www.googleapis.com/auth/fitness.reproductive_health.write"
],
otp_app: :google_api_fitness,
base_url: "https://www.googleapis.com/fitness/v1/users/"
end
| 38.321429
| 77
| 0.718235
|
938eae19ea9d7d3b8345a326b5e36572bed1a0bf
| 61
|
ex
|
Elixir
|
api/lib/loose/repo.ex
|
manna422/loose
|
1d2dc1d1a88976fa3f883917f47023eae3167c37
|
[
"MIT"
] | null | null | null |
api/lib/loose/repo.ex
|
manna422/loose
|
1d2dc1d1a88976fa3f883917f47023eae3167c37
|
[
"MIT"
] | null | null | null |
api/lib/loose/repo.ex
|
manna422/loose
|
1d2dc1d1a88976fa3f883917f47023eae3167c37
|
[
"MIT"
] | null | null | null |
defmodule Loose.Repo do
use Ecto.Repo, otp_app: :loose
end
| 15.25
| 32
| 0.754098
|
938eb22fd60c77c26299c458b67854009894cfca
| 497
|
exs
|
Elixir
|
config/test.exs
|
krishnakumar4a4/speak_up
|
a7b99e812ea4aaa6473a866f89fb0f4bb8973f6e
|
[
"MIT"
] | null | null | null |
config/test.exs
|
krishnakumar4a4/speak_up
|
a7b99e812ea4aaa6473a866f89fb0f4bb8973f6e
|
[
"MIT"
] | null | null | null |
config/test.exs
|
krishnakumar4a4/speak_up
|
a7b99e812ea4aaa6473a866f89fb0f4bb8973f6e
|
[
"MIT"
] | null | null | null |
use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :speak_up, SpeakUpWeb.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :speak_up, SpeakUp.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "speak_up_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 24.85
| 56
| 0.736419
|
938edac44b8554f125dcfe0e068110153fb5931d
| 22,078
|
ex
|
Elixir
|
lib/elixir/lib/calendar/date.ex
|
cdfuller/elixir
|
3bd3f88d57d7fff6cab7b171294b89fb08eedfe7
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/lib/calendar/date.ex
|
cdfuller/elixir
|
3bd3f88d57d7fff6cab7b171294b89fb08eedfe7
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/lib/calendar/date.ex
|
cdfuller/elixir
|
3bd3f88d57d7fff6cab7b171294b89fb08eedfe7
|
[
"Apache-2.0"
] | null | null | null |
defmodule Date do
@moduledoc """
A Date struct and functions.
The Date struct contains the fields year, month, day and calendar.
New dates can be built with the `new/3` function or using the
`~D` (see `Kernel.sigil_D/2`) sigil:
iex> ~D[2000-01-01]
~D[2000-01-01]
Both `new/3` and sigil return a struct where the date fields can
be accessed directly:
iex> date = ~D[2000-01-01]
iex> date.year
2000
iex> date.month
1
The functions on this module work with the `Date` struct as well
as any struct that contains the same fields as the `Date` struct,
such as `NaiveDateTime` and `DateTime`. Such functions expect
`t:Calendar.date/0` in their typespecs (instead of `t:t/0`).
Developers should avoid creating the Date structs directly
and instead rely on the functions provided by this module as well
as the ones in third-party calendar libraries.
## Comparing dates
Comparisons in Elixir using `==/2`, `>/2`, `</2` and similar are structural
and based on the `Date` struct fields. For proper comparison between
dates, use the `compare/2` function.
## Using epochs
The `add/2` and `diff/2` functions can be used for computing dates
or retrieving the number of days between instants. For example, if there
is an interest in computing the number of days from the Unix epoch
(1970-01-01):
iex> Date.diff(~D[2010-04-17], ~D[1970-01-01])
14716
iex> Date.add(~D[1970-01-01], 14716)
~D[2010-04-17]
Those functions are optimized to deal with common epochs, such
as the Unix Epoch above or the Gregorian Epoch (0000-01-01).
"""
@enforce_keys [:year, :month, :day]
defstruct [:year, :month, :day, calendar: Calendar.ISO]
@type t :: %__MODULE__{
year: Calendar.year(),
month: Calendar.month(),
day: Calendar.day(),
calendar: Calendar.calendar()
}
@doc """
Returns a range of dates.
A range of dates represents a discrete number of dates where
the first and last values are dates with matching calendars.
Ranges of dates can be either increasing (`first <= last`) or
decreasing (`first > last`). They are also always inclusive.
## Examples
iex> Date.range(~D[1999-01-01], ~D[2000-01-01])
#DateRange<~D[1999-01-01], ~D[2000-01-01]>
A range of dates implements the `Enumerable` protocol, which means
functions in the `Enum` module can be used to work with
ranges:
iex> range = Date.range(~D[2001-01-01], ~D[2002-01-01])
iex> Enum.count(range)
366
iex> Enum.member?(range, ~D[2001-02-01])
true
iex> Enum.reduce(range, 0, fn _date, acc -> acc - 1 end)
-366
"""
@doc since: "1.5.0"
@spec range(Date.t(), Date.t()) :: Date.Range.t()
def range(%Date{calendar: calendar} = first, %Date{calendar: calendar} = last) do
{first_days, _} = to_iso_days(first)
{last_days, _} = to_iso_days(last)
%Date.Range{
first: first,
last: last,
first_in_iso_days: first_days,
last_in_iso_days: last_days
}
end
def range(%Date{}, %Date{}) do
raise ArgumentError, "both dates must have matching calendars"
end
@doc """
Returns the current date in UTC.
## Examples
iex> date = Date.utc_today()
iex> date.year >= 2016
true
"""
@doc since: "1.4.0"
@spec utc_today(Calendar.calendar()) :: t
def utc_today(calendar \\ Calendar.ISO)
def utc_today(Calendar.ISO) do
{:ok, {year, month, day}, _, _} = Calendar.ISO.from_unix(System.os_time(), :native)
%Date{year: year, month: month, day: day}
end
def utc_today(calendar) do
calendar
|> DateTime.utc_now()
|> DateTime.to_date()
end
@doc """
Returns `true` if the year in the given `date` is a leap year.
## Examples
iex> Date.leap_year?(~D[2000-01-01])
true
iex> Date.leap_year?(~D[2001-01-01])
false
iex> Date.leap_year?(~D[2004-01-01])
true
iex> Date.leap_year?(~D[1900-01-01])
false
iex> Date.leap_year?(~N[2004-01-01 01:23:45])
true
"""
@doc since: "1.4.0"
@spec leap_year?(Calendar.date()) :: boolean()
def leap_year?(date)
def leap_year?(%{calendar: calendar, year: year}) do
calendar.leap_year?(year)
end
@doc """
Returns the number of days in the given `date` month.
## Examples
iex> Date.days_in_month(~D[1900-01-13])
31
iex> Date.days_in_month(~D[1900-02-09])
28
iex> Date.days_in_month(~N[2000-02-20 01:23:45])
29
"""
@doc since: "1.4.0"
@spec days_in_month(Calendar.date()) :: Calendar.day()
def days_in_month(date)
def days_in_month(%{calendar: calendar, year: year, month: month}) do
calendar.days_in_month(year, month)
end
@doc """
Returns the number of months in the given `date` year.
## Example
iex> Date.months_in_year(~D[1900-01-13])
12
"""
@doc since: "1.7.0"
@spec months_in_year(Calendar.date()) :: Calendar.month()
def months_in_year(date)
def months_in_year(%{calendar: calendar, year: year}) do
calendar.months_in_year(year)
end
@doc """
Builds a new ISO date.
Expects all values to be integers. Returns `{:ok, date}` if each
entry fits its appropriate range, returns `{:error, reason}` otherwise.
## Examples
iex> Date.new(2000, 1, 1)
{:ok, ~D[2000-01-01]}
iex> Date.new(2000, 13, 1)
{:error, :invalid_date}
iex> Date.new(2000, 2, 29)
{:ok, ~D[2000-02-29]}
iex> Date.new(2000, 2, 30)
{:error, :invalid_date}
iex> Date.new(2001, 2, 29)
{:error, :invalid_date}
"""
@spec new(Calendar.year(), Calendar.month(), Calendar.day(), Calendar.calendar()) ::
{:ok, t} | {:error, atom}
def new(year, month, day, calendar \\ Calendar.ISO) do
if calendar.valid_date?(year, month, day) do
{:ok, %Date{year: year, month: month, day: day, calendar: calendar}}
else
{:error, :invalid_date}
end
end
@doc """
Converts the given date to a string according to its calendar.
### Examples
iex> Date.to_string(~D[2000-02-28])
"2000-02-28"
iex> Date.to_string(~N[2000-02-28 01:23:45])
"2000-02-28"
iex> Date.to_string(~D[-0100-12-15])
"-0100-12-15"
"""
@spec to_string(Calendar.date()) :: String.t()
def to_string(date)
def to_string(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.date_to_string(year, month, day)
end
@doc """
Parses the extended "Dates" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
The year parsed by this function is limited to four digits.
## Examples
iex> Date.from_iso8601("2015-01-23")
{:ok, ~D[2015-01-23]}
iex> Date.from_iso8601("2015:01:23")
{:error, :invalid_format}
iex> Date.from_iso8601("2015-01-32")
{:error, :invalid_date}
"""
@spec from_iso8601(String.t(), Calendar.calendar()) :: {:ok, t} | {:error, atom}
def from_iso8601(string, calendar \\ Calendar.ISO)
def from_iso8601(<<?-, rest::binary>>, calendar) do
with {:ok, %{year: year} = date} <- raw_from_iso8601(rest, calendar) do
{:ok, %{date | year: -year}}
end
end
def from_iso8601(<<rest::binary>>, calendar) do
raw_from_iso8601(rest, calendar)
end
[match_date, guard_date, read_date] = Calendar.ISO.__match_date__()
defp raw_from_iso8601(string, calendar) do
with unquote(match_date) <- string,
true <- unquote(guard_date) do
{year, month, day} = unquote(read_date)
with {:ok, date} <- new(year, month, day, Calendar.ISO) do
convert(date, calendar)
end
else
_ -> {:error, :invalid_format}
end
end
@doc """
Parses the extended "Dates" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Raises if the format is invalid.
## Examples
iex> Date.from_iso8601!("2015-01-23")
~D[2015-01-23]
iex> Date.from_iso8601!("2015:01:23")
** (ArgumentError) cannot parse "2015:01:23" as date, reason: :invalid_format
"""
@spec from_iso8601!(String.t(), Calendar.calendar()) :: t
def from_iso8601!(string, calendar \\ Calendar.ISO) do
case from_iso8601(string, calendar) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError, "cannot parse #{inspect(string)} as date, reason: #{inspect(reason)}"
end
end
@doc """
Converts the given `date` to
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
By default, `Date.to_iso8601/2` returns dates formatted in the "extended"
format, for human readability. It also supports the "basic" format through passing the `:basic` option.
Only supports converting dates which are in the ISO calendar,
or other calendars in which the days also start at midnight.
Attempting to convert dates from other calendars will raise an `ArgumentError`.
### Examples
iex> Date.to_iso8601(~D[2000-02-28])
"2000-02-28"
iex> Date.to_iso8601(~D[2000-02-28], :basic)
"20000228"
iex> Date.to_iso8601(~N[2000-02-28 00:00:00])
"2000-02-28"
"""
@spec to_iso8601(Calendar.date(), :extended | :basic) :: String.t()
def to_iso8601(date, format \\ :extended)
def to_iso8601(%{calendar: Calendar.ISO} = date, format) when format in [:basic, :extended] do
%{year: year, month: month, day: day} = date
Calendar.ISO.date_to_iso8601(year, month, day, format)
end
def to_iso8601(%{calendar: _} = date, format) when format in [:basic, :extended] do
date
|> convert!(Calendar.ISO)
|> to_iso8601()
end
@doc """
Converts the given `date` to an Erlang date tuple.
Only supports converting dates which are in the ISO calendar,
or other calendars in which the days also start at midnight.
Attempting to convert dates from other calendars will raise.
## Examples
iex> Date.to_erl(~D[2000-01-01])
{2000, 1, 1}
iex> Date.to_erl(~N[2000-01-01 00:00:00])
{2000, 1, 1}
"""
@spec to_erl(Calendar.date()) :: :calendar.date()
def to_erl(date) do
%{year: year, month: month, day: day} = convert!(date, Calendar.ISO)
{year, month, day}
end
@doc """
Converts an Erlang date tuple to a `Date` struct.
Only supports converting dates which are in the ISO calendar,
or other calendars in which the days also start at midnight.
Attempting to convert dates from other calendars will return an error tuple.
## Examples
iex> Date.from_erl({2000, 1, 1})
{:ok, ~D[2000-01-01]}
iex> Date.from_erl({2000, 13, 1})
{:error, :invalid_date}
"""
@spec from_erl(:calendar.date(), Calendar.calendar()) :: {:ok, t} | {:error, atom}
def from_erl(tuple, calendar \\ Calendar.ISO)
def from_erl({year, month, day}, calendar) do
with {:ok, date} <- new(year, month, day, Calendar.ISO), do: convert(date, calendar)
end
@doc """
Converts an Erlang date tuple but raises for invalid dates.
## Examples
iex> Date.from_erl!({2000, 1, 1})
~D[2000-01-01]
iex> Date.from_erl!({2000, 13, 1})
** (ArgumentError) cannot convert {2000, 13, 1} to date, reason: :invalid_date
"""
@spec from_erl!(:calendar.date(), Calendar.calendar()) :: t
def from_erl!(tuple, calendar \\ Calendar.ISO) do
case from_erl(tuple, calendar) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError,
"cannot convert #{inspect(tuple)} to date, reason: #{inspect(reason)}"
end
end
@doc """
Compares two date structs.
Returns `:gt` if first date is later than the second
and `:lt` for vice versa. If the two dates are equal
`:eq` is returned.
## Examples
iex> Date.compare(~D[2016-04-16], ~D[2016-04-28])
:lt
This function can also be used to compare across more
complex calendar types by considering only the date fields:
iex> Date.compare(~D[2016-04-16], ~N[2016-04-28 01:23:45])
:lt
iex> Date.compare(~D[2016-04-16], ~N[2016-04-16 01:23:45])
:eq
iex> Date.compare(~N[2016-04-16 12:34:56], ~N[2016-04-16 01:23:45])
:eq
"""
@doc since: "1.4.0"
@spec compare(Calendar.date(), Calendar.date()) :: :lt | :eq | :gt
def compare(%{calendar: calendar} = date1, %{calendar: calendar} = date2) do
%{year: year1, month: month1, day: day1} = date1
%{year: year2, month: month2, day: day2} = date2
case {{year1, month1, day1}, {year2, month2, day2}} do
{first, second} when first > second -> :gt
{first, second} when first < second -> :lt
_ -> :eq
end
end
def compare(date1, date2) do
if Calendar.compatible_calendars?(date1.calendar, date2.calendar) do
case {to_iso_days(date1), to_iso_days(date2)} do
{first, second} when first > second -> :gt
{first, second} when first < second -> :lt
_ -> :eq
end
else
raise ArgumentError, """
cannot compare #{inspect(date1)} with #{inspect(date2)}.
This comparison would be ambiguous as their calendars have incompatible day rollover moments.
Specify an exact time of day (using DateTime) to resolve this ambiguity
"""
end
end
@doc """
Converts the given `date` from its calendar to the given `calendar`.
Returns `{:ok, date}` if the calendars are compatible,
or `{:error, :incompatible_calendars}` if they are not.
See also `Calendar.compatible_calendars?/2`.
## Examples
Imagine someone implements `Calendar.Holocene`, a calendar based on the
Gregorian calendar that adds exactly 10,000 years to the current Gregorian
year:
iex> Date.convert(~D[2000-01-01], Calendar.Holocene)
{:ok, %Date{calendar: Calendar.Holocene, year: 12000, month: 1, day: 1}}
"""
@doc since: "1.5.0"
@spec convert(Calendar.date(), Calendar.calendar()) ::
{:ok, t} | {:error, :incompatible_calendars}
def convert(%{calendar: calendar, year: year, month: month, day: day}, calendar) do
{:ok, %Date{calendar: calendar, year: year, month: month, day: day}}
end
def convert(%{calendar: calendar} = date, target_calendar) do
if Calendar.compatible_calendars?(calendar, target_calendar) do
result_date =
date
|> to_iso_days()
|> from_iso_days(target_calendar)
{:ok, result_date}
else
{:error, :incompatible_calendars}
end
end
@doc """
Similar to `Date.convert/2`, but raises an `ArgumentError`
if the conversion between the two calendars is not possible.
## Examples
Imagine someone implements `Calendar.Holocene`, a calendar based on the
Gregorian calendar that adds exactly 10,000 years to the current Gregorian
year:
iex> Date.convert!(~D[2000-01-01], Calendar.Holocene)
%Date{calendar: Calendar.Holocene, year: 12000, month: 1, day: 1}
"""
@doc since: "1.5.0"
@spec convert!(Calendar.date(), Calendar.calendar()) :: t
def convert!(date, calendar) do
case convert(date, calendar) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError,
"cannot convert #{inspect(date)} to target calendar #{inspect(calendar)}, " <>
"reason: #{inspect(reason)}"
end
end
@doc """
Adds the number of days to the given `date`.
The days are counted as Gregorian days. The date is returned in the same
calendar as it was given in.
## Examples
iex> Date.add(~D[2000-01-03], -2)
~D[2000-01-01]
iex> Date.add(~D[2000-01-01], 2)
~D[2000-01-03]
iex> Date.add(~N[2000-01-01 09:00:00], 2)
~D[2000-01-03]
iex> Date.add(~D[-0010-01-01], -2)
~D[-0011-12-30]
"""
@doc since: "1.5.0"
@spec add(Calendar.date(), integer()) :: t
def add(%{calendar: Calendar.ISO} = date, days) do
%{year: year, month: month, day: day} = date
{year, month, day} =
Calendar.ISO.date_to_iso_days(year, month, day)
|> Kernel.+(days)
|> Calendar.ISO.date_from_iso_days()
%Date{calendar: Calendar.ISO, year: year, month: month, day: day}
end
def add(%{calendar: calendar} = date, days) do
{base_days, fraction} = to_iso_days(date)
from_iso_days({base_days + days, fraction}, calendar)
end
@doc """
Calculates the difference between two dates, in a full number of days.
It returns the number of Gregorian days between the dates. Only `Date`
structs that follow the same or compatible calendars can be compared
this way. If two calendars are not compatible, it will raise.
## Examples
iex> Date.diff(~D[2000-01-03], ~D[2000-01-01])
2
iex> Date.diff(~D[2000-01-01], ~D[2000-01-03])
-2
iex> Date.diff(~D[0000-01-02], ~D[-0001-12-30])
3
iex> Date.diff(~D[2000-01-01], ~N[2000-01-03 09:00:00])
-2
"""
@doc since: "1.5.0"
@spec diff(Calendar.date(), Calendar.date()) :: integer
def diff(%{calendar: Calendar.ISO} = date1, %{calendar: Calendar.ISO} = date2) do
%{year: year1, month: month1, day: day1} = date1
%{year: year2, month: month2, day: day2} = date2
Calendar.ISO.date_to_iso_days(year1, month1, day1) -
Calendar.ISO.date_to_iso_days(year2, month2, day2)
end
def diff(%{calendar: calendar1} = date1, %{calendar: calendar2} = date2) do
if Calendar.compatible_calendars?(calendar1, calendar2) do
{days1, _} = to_iso_days(date1)
{days2, _} = to_iso_days(date2)
days1 - days2
else
raise ArgumentError,
"cannot calculate the difference between #{inspect(date1)} and #{inspect(date2)} because their calendars are not compatible and thus the result would be ambiguous"
end
end
defp to_iso_days(%{calendar: Calendar.ISO, year: year, month: month, day: day}) do
{Calendar.ISO.date_to_iso_days(year, month, day), {0, 86_400_000_000}}
end
defp to_iso_days(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.naive_datetime_to_iso_days(year, month, day, 0, 0, 0, {0, 0})
end
defp from_iso_days({days, _}, Calendar.ISO) do
{year, month, day} = Calendar.ISO.date_from_iso_days(days)
%Date{year: year, month: month, day: day, calendar: Calendar.ISO}
end
defp from_iso_days(iso_days, target_calendar) do
{year, month, day, _, _, _, _} = target_calendar.naive_datetime_from_iso_days(iso_days)
%Date{year: year, month: month, day: day, calendar: target_calendar}
end
@doc """
Calculates the day of the week of a given `date`.
Returns the day of the week as an integer. For the ISO 8601
calendar (the default), it is an integer from 1 to 7, where
1 is Monday and 7 is Sunday.
## Examples
iex> Date.day_of_week(~D[2016-10-31])
1
iex> Date.day_of_week(~D[2016-11-01])
2
iex> Date.day_of_week(~N[2016-11-01 01:23:45])
2
iex> Date.day_of_week(~D[-0015-10-30])
3
"""
@doc since: "1.4.0"
@spec day_of_week(Calendar.date()) :: Calendar.day()
def day_of_week(date)
def day_of_week(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.day_of_week(year, month, day)
end
@doc """
Calculates the day of the year of a given `date`.
Returns the day of the year as an integer. For the ISO 8601
calendar (the default), it is an integer from 1 to 366.
## Examples
iex> Date.day_of_year(~D[2016-01-01])
1
iex> Date.day_of_year(~D[2016-11-01])
306
iex> Date.day_of_year(~D[-0015-10-30])
303
iex> Date.day_of_year(~D[2004-12-31])
366
"""
@doc since: "1.8.0"
@spec day_of_year(Calendar.date()) :: Calendar.day()
def day_of_year(date)
def day_of_year(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.day_of_year(year, month, day)
end
@doc """
Calculates the quarter of the year of a given `date`.
Returns the day of the year as an integer. For the ISO 8601
calendar (the default), it is an integer from 1 to 4.
## Examples
iex> Date.quarter_of_year(~D[2016-10-31])
4
iex> Date.quarter_of_year(~D[2016-01-01])
1
iex> Date.quarter_of_year(~N[2016-04-01 01:23:45])
2
iex> Date.quarter_of_year(~D[-0015-09-30])
3
"""
@doc since: "1.8.0"
@spec quarter_of_year(Calendar.date()) :: non_neg_integer()
def quarter_of_year(date)
def quarter_of_year(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.quarter_of_year(year, month, day)
end
@doc """
Calculates the year-of-era and era for a given
calendar year.
Returns a tuple `{year, era}` representing the
year within the era and the era number.
## Examples
iex> Date.year_of_era(~D[0001-01-01])
{1, 1}
iex> Date.year_of_era(~D[0000-12-31])
{1, 0}
iex> Date.year_of_era(~D[-0001-01-01])
{2, 0}
"""
@doc since: "1.8.0"
@spec year_of_era(Calendar.date()) :: {Calendar.year(), non_neg_integer()}
def year_of_era(date)
def year_of_era(%{calendar: calendar, year: year}) do
calendar.year_of_era(year)
end
@doc """
Calculates the day-of-era and era for a given
calendar `date`.
Returns a tuple `{day, era}` representing the
day within the era and the era number.
## Examples
iex> Date.day_of_era(~D[0001-01-01])
{1, 1}
iex> Date.day_of_era(~D[0000-12-31])
{1, 0}
"""
@doc since: "1.8.0"
@spec day_of_era(Calendar.date()) :: {Calendar.day(), non_neg_integer()}
def day_of_era(date)
def day_of_era(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.day_of_era(year, month, day)
end
## Helpers
defimpl String.Chars do
def to_string(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.date_to_string(year, month, day)
end
end
defimpl Inspect do
def inspect(%{calendar: Calendar.ISO, year: year, month: month, day: day}, _) do
"~D[" <> Calendar.ISO.date_to_string(year, month, day) <> "]"
end
def inspect(date, opts) do
Inspect.Any.inspect(date, opts)
end
end
end
| 28.268886
| 175
| 0.636199
|
938eef862c134e321868cb36638b05b2265519c3
| 3,020
|
ex
|
Elixir
|
clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p2beta1_object_tracking_annotation.ex
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p2beta1_object_tracking_annotation.ex
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p2beta1_object_tracking_annotation.ex
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1ObjectTrackingAnnotation do
@moduledoc """
Annotations corresponding to one tracked object.
## Attributes
- confidence (float()): Object category's labeling confidence of this track. Defaults to: `null`.
- entity (GoogleCloudVideointelligenceV1p2beta1Entity): Entity to specify the object category that this track is labeled as. Defaults to: `null`.
- frames ([GoogleCloudVideointelligenceV1p2beta1ObjectTrackingFrame]): Information corresponding to all frames where this object track appears. Defaults to: `null`.
- segment (GoogleCloudVideointelligenceV1p2beta1VideoSegment): Each object track corresponds to one video segment where it appears. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:confidence => any(),
:entity =>
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1Entity.t(),
:frames =>
list(
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1ObjectTrackingFrame.t()
),
:segment =>
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1VideoSegment.t()
}
field(:confidence)
field(
:entity,
as: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1Entity
)
field(
:frames,
as:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1ObjectTrackingFrame,
type: :list
)
field(
:segment,
as: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1VideoSegment
)
end
defimpl Poison.Decoder,
for:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1ObjectTrackingAnnotation do
def decode(value, options) do
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1ObjectTrackingAnnotation.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p2beta1ObjectTrackingAnnotation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.385542
| 166
| 0.758278
|
938f1557c77a741229832ab6ab526cf503b1f954
| 14,240
|
exs
|
Elixir
|
test/chat_api/reporting_test.exs
|
raditya3/papercups
|
4657b258ee381ac0b7517e57e4d6261ce94b5871
|
[
"MIT"
] | null | null | null |
test/chat_api/reporting_test.exs
|
raditya3/papercups
|
4657b258ee381ac0b7517e57e4d6261ce94b5871
|
[
"MIT"
] | null | null | null |
test/chat_api/reporting_test.exs
|
raditya3/papercups
|
4657b258ee381ac0b7517e57e4d6261ce94b5871
|
[
"MIT"
] | null | null | null |
defmodule ChatApi.ReportingTest do
use ChatApi.DataCase
alias ChatApi.Reporting
describe "reporting" do
setup do
account = account_fixture()
customer = customer_fixture(account)
{:ok, account: account, customer: customer}
end
test "count_messages_by_date/1 retrieves the number of messages created per day", %{
account: account,
customer: customer
} do
count = 10
inserted_at = ~N[2020-09-01 12:00:00]
conversation = conversation_fixture(account, customer)
for _i <- 1..count do
message_fixture(account, conversation, %{inserted_at: inserted_at})
end
assert [%{count: ^count, date: ~D[2020-09-01]}] =
Reporting.count_messages_by_date(account.id)
end
test "count_messages_by_date/1 groups by date correctly", %{
account: account,
customer: customer
} do
conversation = conversation_fixture(account, customer)
message_fixture(account, conversation, %{inserted_at: ~N[2020-09-01 12:00:00]})
message_fixture(account, conversation, %{inserted_at: ~N[2020-09-02 12:00:00]})
message_fixture(account, conversation, %{inserted_at: ~N[2020-09-03 12:00:00]})
assert [
%{date: ~D[2020-09-01], count: 1},
%{date: ~D[2020-09-02], count: 1},
%{date: ~D[2020-09-03], count: 1}
] = Reporting.count_messages_by_date(account.id)
end
test "count_messages_per_user/1 should return correct number of messages sent per user on team",
%{
account: account,
customer: customer
} do
user_2 = user_fixture(account)
user_3 = user_fixture(account)
conversation = conversation_fixture(account, customer)
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-01 12:00:00],
user_id: user_2.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-02 12:00:00],
user_id: user_2.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-03 12:00:00],
user_id: user_3.id
})
assert [
%{count: 2},
%{count: 1}
] = Reporting.count_messages_per_user(account.id)
end
test "count_messages_by_date/1 only fetches messages by the given account id", %{
account: account,
customer: customer
} do
conversation = conversation_fixture(account, customer)
message_fixture(account, conversation, %{inserted_at: ~N[2020-09-01 12:00:00]})
assert [%{date: ~D[2020-09-01], count: 1}] = Reporting.count_messages_by_date(account.id)
different_account = account_fixture()
assert [] = Reporting.count_messages_by_date(different_account.id)
end
test "count_messages_by_date/3 fetches conversations between two dates", %{
account: account,
customer: customer
} do
conversation = conversation_fixture(account, customer)
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-02 12:00:00]
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-02 12:00:00]
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-03 12:00:00]
})
assert [
%{date: ~D[2020-09-02], count: 2},
%{date: ~D[2020-09-03], count: 1}
] =
Reporting.count_messages_by_date(
account.id,
~N[2020-09-02 11:00:00],
~N[2020-09-03 13:00:00]
)
end
test "count_conversations_by_date/1 retrieves the number of conversations created per day", %{
account: account,
customer: customer
} do
count = 5
inserted_at = ~N[2020-09-01 12:00:00]
for _i <- 1..count do
conversation_fixture(account, customer, %{inserted_at: inserted_at})
end
assert [%{count: ^count, date: ~D[2020-09-01]}] =
Reporting.count_conversations_by_date(account.id)
end
test "count_conversations_by_date/1 groups by date correctly", %{
account: account,
customer: customer
} do
conversation_fixture(account, customer, %{inserted_at: ~N[2020-09-01 12:00:00]})
conversation_fixture(account, customer, %{inserted_at: ~N[2020-09-02 12:00:00]})
conversation_fixture(account, customer, %{inserted_at: ~N[2020-09-03 12:00:00]})
assert [
%{date: ~D[2020-09-01], count: 1},
%{date: ~D[2020-09-02], count: 1},
%{date: ~D[2020-09-03], count: 1}
] = Reporting.count_conversations_by_date(account.id)
end
test "count_conversations_by_date/3 fetches conversations between two dates", %{
account: account,
customer: customer
} do
conversation_fixture(account, customer, %{inserted_at: ~N[2020-09-01 12:00:00]})
conversation_fixture(account, customer, %{inserted_at: ~N[2020-09-02 12:00:00]})
conversation_fixture(account, customer, %{inserted_at: ~N[2020-09-03 12:00:00]})
conversation_fixture(account, customer, %{inserted_at: ~N[2020-09-04 12:00:00]})
assert [
%{date: ~D[2020-09-02], count: 1},
%{date: ~D[2020-09-03], count: 1}
] =
Reporting.count_conversations_by_date(
account.id,
~N[2020-09-02 11:00:00],
~N[2020-09-03 13:00:00]
)
end
test "count_sent_messages_by_date/1 groups by date correctly", %{
account: account,
customer: customer
} do
user_2 = user_fixture(account)
user_3 = user_fixture(account)
conversation = conversation_fixture(account, customer)
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-01 12:00:00],
user_id: user_2.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-02 12:00:00],
user_id: user_2.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-03 12:00:00],
user_id: user_3.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-03 12:00:00],
user_id: user_3.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-03 12:00:00],
customer_id: customer.id
})
assert [
%{date: ~D[2020-09-01], count: 1},
%{date: ~D[2020-09-02], count: 1},
%{date: ~D[2020-09-03], count: 2}
] = Reporting.count_sent_messages_by_date(account.id)
end
test "count_received_messages_by_date/1 groups by date correctly", %{
account: account,
customer: customer
} do
user_2 = user_fixture(account)
conversation = conversation_fixture(account, customer)
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-01 12:00:00],
customer_id: customer.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-02 12:00:00],
customer_id: customer.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-03 12:00:00],
customer_id: customer.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-03 12:00:00],
user_id: user_2.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-03 12:00:00],
user_id: user_2.id
})
assert [
%{date: ~D[2020-09-01], count: 1},
%{date: ~D[2020-09-02], count: 1},
%{date: ~D[2020-09-03], count: 1}
] = Reporting.count_received_messages_by_date(account.id)
end
end
describe "count_messages_by_weekday/1" do
setup do
account = account_fixture()
customer = customer_fixture(account)
{:ok, account: account, customer: customer}
end
test "correctly calculates total and avg of customer messages per day",
%{
account: account,
customer: customer
} do
conversation = conversation_fixture(account, customer)
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-28 12:00:00],
customer_id: customer.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-29 12:00:00],
customer_id: customer.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-29 12:01:00],
customer_id: customer.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-30 12:00:00],
customer_id: customer.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-10-01 12:00:00],
customer_id: customer.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-10-02 12:00:00],
customer_id: customer.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-10-03 12:00:00],
customer_id: customer.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-10-04 12:00:00],
customer_id: customer.id
})
message_fixture(account, conversation, %{
inserted_at: ~N[2020-10-05 12:00:00],
customer_id: customer.id
})
assert [
%{day: "Monday", average: 1.0, total: 2},
%{day: "Tuesday", average: 2.0, total: 2},
%{day: "Wednesday", average: 1.0, total: 1},
%{day: "Thursday", average: 1.0, total: 1},
%{day: "Friday", average: 1.0, total: 1},
%{day: "Saturday", average: 1.0, total: 1},
%{day: "Sunday", average: 1.0, total: 1}
] = Reporting.count_messages_by_weekday(account.id)
end
test "includes zero day counts for weekdays with no messages", %{
account: account,
customer: customer
} do
conversation = conversation_fixture(account, customer)
message_fixture(account, conversation, %{
inserted_at: ~N[2020-09-28 12:00:00],
customer_id: customer.id
})
assert [
%{day: "Monday", average: 1.0, total: 1},
%{day: "Tuesday", average: 0.0, total: 0},
%{day: "Wednesday", average: 0.0, total: 0},
%{day: "Thursday", average: 0.0, total: 0},
%{day: "Friday", average: 0.0, total: 0},
%{day: "Saturday", average: 0.0, total: 0},
%{day: "Sunday", average: 0.0, total: 0}
] = Reporting.count_messages_by_weekday(account.id)
end
test "doesn't count messages without a customer", %{
account: account,
customer: customer
} do
conversation = conversation_fixture(account, customer)
message_fixture(account, conversation, %{inserted_at: ~N[2020-09-28 12:00:00]})
assert [
%{day: "Monday", average: 0.0, total: 0},
%{day: "Tuesday", average: 0.0, total: 0},
%{day: "Wednesday", average: 0.0, total: 0},
%{day: "Thursday", average: 0.0, total: 0},
%{day: "Friday", average: 0.0, total: 0},
%{day: "Saturday", average: 0.0, total: 0},
%{day: "Sunday", average: 0.0, total: 0}
] = Reporting.count_messages_by_weekday(account.id)
end
test "doesn't count messages from other accounts", %{
account: account,
customer: customer
} do
different_account = account_fixture()
conversation = conversation_fixture(different_account, customer)
message_fixture(different_account, conversation, %{
inserted_at: ~N[2020-09-28 12:00:00],
customer_id: customer.id
})
assert [
%{day: "Monday", average: 0.0, total: 0},
%{day: "Tuesday", average: 0.0, total: 0},
%{day: "Wednesday", average: 0.0, total: 0},
%{day: "Thursday", average: 0.0, total: 0},
%{day: "Friday", average: 0.0, total: 0},
%{day: "Saturday", average: 0.0, total: 0},
%{day: "Sunday", average: 0.0, total: 0}
] = Reporting.count_messages_by_weekday(account.id)
end
end
describe "count_customers_by_date/1" do
setup do
account = account_fixture()
{:ok, account: account}
end
test "it groups by date correctly", %{
account: account
} do
customer_fixture(account, %{
inserted_at: ~N[2020-10-12 12:00:00]
})
customer_fixture(account, %{
inserted_at: ~N[2020-10-11 12:00:00]
})
customer_fixture(account, %{
inserted_at: ~N[2020-10-10 12:00:00]
})
customer_fixture(account, %{
inserted_at: ~N[2020-10-12 12:00:00]
})
assert [
%{date: ~D[2020-10-10], count: 1},
%{date: ~D[2020-10-11], count: 1},
%{date: ~D[2020-10-12], count: 2}
] = Reporting.count_customers_by_date(account.id)
end
end
describe "count_customers_by_date/3" do
setup do
account = account_fixture()
{:ok, account: account}
end
test "Fetches customers between two dates", %{
account: account
} do
customer_fixture(account, %{
inserted_at: ~N[2020-10-12 12:00:00]
})
customer_fixture(account, %{
inserted_at: ~N[2020-10-11 12:00:00]
})
customer_fixture(account, %{
inserted_at: ~N[2020-10-10 12:00:00]
})
customer_fixture(account, %{
inserted_at: ~N[2020-10-13 12:00:00]
})
assert [
%{date: ~D[2020-10-10], count: 1},
%{date: ~D[2020-10-11], count: 1},
%{date: ~D[2020-10-12], count: 1}
] =
Reporting.count_customers_by_date(
account.id,
~N[2020-10-10 11:00:00],
~N[2020-10-12 13:00:00]
)
end
end
end
| 31.091703
| 100
| 0.572472
|
938f1f2e7a9ac3c976225c6689f4a68a44fa9500
| 573
|
exs
|
Elixir
|
languages/elixir/exercises/concept/dna-encoding/mix.exs
|
AlexLeSang/v3
|
3d35961a961b5a2129b1d42f1d118972d9665357
|
[
"MIT"
] | 3
|
2020-07-25T06:24:00.000Z
|
2020-09-14T17:39:11.000Z
|
languages/elixir/exercises/concept/dna-encoding/mix.exs
|
AlexLeSang/v3
|
3d35961a961b5a2129b1d42f1d118972d9665357
|
[
"MIT"
] | 1
|
2020-01-26T20:08:06.000Z
|
2020-01-26T20:08:06.000Z
|
languages/elixir/exercises/concept/dna-encoding/mix.exs
|
AlexLeSang/v3
|
3d35961a961b5a2129b1d42f1d118972d9665357
|
[
"MIT"
] | null | null | null |
defmodule DNA.MixProject do
use Mix.Project
def project do
[
app: :bitstrings,
version: "0.1.0",
# elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 19.758621
| 87
| 0.574171
|
938fa0fc687e7e04544ca2fdea2a20801781c84a
| 861
|
exs
|
Elixir
|
test/cauzzipay_web/views/users_view_test.exs
|
Gustavo-Cauzzi/Cauzzipay
|
912b3fa13dbb920a5af242134c77d44e63c39c6f
|
[
"MIT"
] | 1
|
2021-05-19T01:25:54.000Z
|
2021-05-19T01:25:54.000Z
|
test/cauzzipay_web/views/users_view_test.exs
|
Gustavo-Cauzzi/Cauzzipay
|
912b3fa13dbb920a5af242134c77d44e63c39c6f
|
[
"MIT"
] | null | null | null |
test/cauzzipay_web/views/users_view_test.exs
|
Gustavo-Cauzzi/Cauzzipay
|
912b3fa13dbb920a5af242134c77d44e63c39c6f
|
[
"MIT"
] | null | null | null |
defmodule CauzzipayWeb.UsersViewTest do
use CauzzipayWeb.ConnCase, async: true
import Phoenix.View # para poder user o render()
alias Cauzzipay.{Account, User}
alias CauzzipayWeb.UsersView
test "renders create.json" do
params =%{
name: "Gustavo",
password: "123456",
nickname: "Gustavo",
email: "gustavo@email.com",
age: 18
}
{:ok, %User{id: user_id, account: %Account{id: account_id}} = user} = Cauzzipay.create_user(params)
response = render(UsersView, "create.json", user: user)
expected_response = %{
message: "User created",
user: %{
account: %{
account_id: account_id,
balance: Decimal.new("0.00")
},
id: user_id,
name: "Gustavo",
nickname: "Gustavo"
}
}
assert expected_response == response
end
end
| 22.657895
| 103
| 0.603949
|
938fbd40bf2d3907c546c84c2f96ecb1b69e6fb6
| 172
|
ex
|
Elixir
|
fixtures/elixir_output/post_number.ex
|
martinsirbe/curlconverter
|
c5324e85d2ca24ef4743fb2bb36139d23367e293
|
[
"MIT"
] | 4,955
|
2015-01-02T09:04:20.000Z
|
2021-10-06T03:54:43.000Z
|
fixtures/elixir_output/post_number.ex
|
martinsirbe/curlconverter
|
c5324e85d2ca24ef4743fb2bb36139d23367e293
|
[
"MIT"
] | 242
|
2015-03-27T05:59:11.000Z
|
2021-10-03T08:36:05.000Z
|
fixtures/elixir_output/post_number.ex
|
martinsirbe/curlconverter
|
c5324e85d2ca24ef4743fb2bb36139d23367e293
|
[
"MIT"
] | 504
|
2015-01-02T16:04:36.000Z
|
2021-10-01T03:43:55.000Z
|
request = %HTTPoison.Request{
method: :post,
url: "http://a.com/",
options: [],
headers: [],
params: [],
body: ~s|123|
}
response = HTTPoison.request(request)
| 15.636364
| 37
| 0.598837
|
938fbe60f7ef7589ec926bfd417a380cc487ec14
| 4,556
|
ex
|
Elixir
|
apps/ex_wire/lib/ex_wire/dev_p2p.ex
|
atoulme/mana
|
cff3fd96c23feaaeb9fe32df3c0d35ee6dc548a5
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
apps/ex_wire/lib/ex_wire/dev_p2p.ex
|
atoulme/mana
|
cff3fd96c23feaaeb9fe32df3c0d35ee6dc548a5
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
apps/ex_wire/lib/ex_wire/dev_p2p.ex
|
atoulme/mana
|
cff3fd96c23feaaeb9fe32df3c0d35ee6dc548a5
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
defmodule ExWire.DEVp2p do
@moduledoc """
Functions that deal directly with the DEVp2p Wire Protocol.
For more information, please see:
https://github.com/ethereum/wiki/wiki/%C3%90%CE%9EVp2p-Wire-Protocol
"""
alias ExWire.{Config, Packet}
defmodule Session do
@moduledoc """
Module to hold struct for a DEVp2p Wire Protocol session.
The session should be active when `Hello` messages have been exchanged.
See https://github.com/ethereum/wiki/wiki/%C3%90%CE%9EVp2p-Wire-Protocol#session-management
"""
alias ExWire.Packet.Hello
@type handshake_status :: boolean | ExWire.Packet.Hello.t()
@type t :: %Session{hello_sent: handshake_status, hello_received: handshake_status}
defstruct hello_sent: false, hello_received: false
@doc """
Checks whether or not the session is active.
A session is only active if the handshake is complete and if there are overlapping capabilities, meaning
that some of the sub-protocols are the same (e.g. eth 62)
## Examples
iex> hello_received = %ExWire.Packet.Hello{caps: [{"eth", 62}]}
iex> hello_sent = %ExWire.Packet.Hello{caps: [{"eth", 62}]}
iex> ExWire.DEVp2p.Session.active?(%ExWire.DEVp2p.Session{hello_received: hello_received, hello_sent: hello_sent})
true
"""
@spec active?(t) :: boolean()
def active?(%Session{hello_received: false}), do: false
def active?(%Session{hello_sent: false}), do: false
def active?(session = %Session{hello_sent: %Hello{}, hello_received: %Hello{}}) do
compatible_capabilities?(session)
end
@spec disconnect(t) :: Session.t()
def disconnect(session = %Session{}) do
%{session | hello_sent: false, hello_received: false}
end
@spec compatible_capabilities?(t) :: boolean()
def compatible_capabilities?(session = %Session{}) do
%Session{hello_received: hello_received, hello_sent: hello_sent} = session
intersection =
MapSet.intersection(
to_mapset(hello_received.caps),
to_mapset(hello_sent.caps)
)
!Enum.empty?(intersection)
end
defp to_mapset(list) do
Enum.into(list, MapSet.new())
end
end
@doc """
Convenience function to create an `ExWire.DEVp2p.Session` struct
"""
@spec init_session :: Session.t()
def init_session do
%Session{}
end
@doc """
Function to create a DEVp2p struct needed for a protocol handshake. This
should be an `ExWire.Packet.Hello` struct with the appropriate values filled in.
"""
@spec build_hello :: Packet.Hello.t()
def build_hello do
%Packet.Hello{
p2p_version: Config.p2p_version(),
client_id: Config.client_id(),
caps: Config.caps(),
listen_port: Config.listen_port(),
node_id: Config.node_id()
}
end
@doc """
Function to update `ExWire.DEVp2p.Session` when a handshake is sent. The
handshake should be an `ExWire.Packet.Hello` that we have sent to a peer.
"""
@spec hello_sent(Session.t(), Packet.Hello.t()) :: Session.t()
def hello_sent(session, hello = %Packet.Hello{}) do
%{session | hello_sent: hello}
end
@doc """
Function to update `ExWire.DEVp2p.Session` when a handshake is received. The
handshake should be an `ExWire.Packet.Hello` that we have received from a peer.
"""
@spec hello_received(Session.t(), Packet.Hello.t()) :: Session.t()
def hello_received(session, hello = %Packet.Hello{}) do
%{session | hello_received: hello}
end
@doc """
Function to check whether or not a `ExWire.DEVp2p.Session` is active. See
`ExWire.DEVp2p.Session.active?/1` for more information.
"""
@spec session_active?(Session.t()) :: boolean()
def session_active?(session), do: Session.active?(session)
@spec session_compatible?(Session.t()) :: boolean()
def session_compatible?(session), do: Session.compatible_capabilities?(session)
@doc """
Function to handles other messages related to the DEVp2p protocol that a peer
sends. The messages could be `ExWire.Packet.Disconnect`, `ExWire.Packet.Ping`,
or `ExWire.Packet.Pong`.
An `ExWire.DEVp2p.Session` is required as the first argument in order to
properly update the session based on the message received.
"""
@spec handle_message(Session.t(), struct()) ::
{:error, :handshake_incomplete} | {:ok, Session.t()}
def handle_message(session, packet = %Packet.Hello{}) do
{:ok, hello_received(session, packet)}
end
def handle_message(_session, _message) do
{:error, :handshake_incomplete}
end
end
| 32.776978
| 122
| 0.686348
|
938fcb1f2f41aa0f1de95dfbe01b1febeb93f4a6
| 142
|
ex
|
Elixir
|
lib/phantomchain/crypto/networks/mainnet.ex
|
PhantomChain/elixir-crypto
|
d842920d91a9bcbe997736b487f4e74ba0bf2fe3
|
[
"MIT"
] | null | null | null |
lib/phantomchain/crypto/networks/mainnet.ex
|
PhantomChain/elixir-crypto
|
d842920d91a9bcbe997736b487f4e74ba0bf2fe3
|
[
"MIT"
] | null | null | null |
lib/phantomchain/crypto/networks/mainnet.ex
|
PhantomChain/elixir-crypto
|
d842920d91a9bcbe997736b487f4e74ba0bf2fe3
|
[
"MIT"
] | null | null | null |
defmodule PhantomChain.Crypto.Networks.Mainnet do
defstruct(
epoch: "2017-03-21T13:00:00.000Z",
version: "17",
wif: 170
)
end
| 17.75
| 49
| 0.669014
|
938fe128209c844396646efa8ac3635292bbdf32
| 1,568
|
ex
|
Elixir
|
lib/dark_matter/structs.ex
|
dark-elixir/dark_matter
|
3f70edf4220ad1c066489110ef30880a143522fd
|
[
"Apache-2.0"
] | 2
|
2020-12-01T21:33:44.000Z
|
2021-05-29T14:51:18.000Z
|
lib/dark_matter/structs.ex
|
dark-elixir/dark_matter
|
3f70edf4220ad1c066489110ef30880a143522fd
|
[
"Apache-2.0"
] | null | null | null |
lib/dark_matter/structs.ex
|
dark-elixir/dark_matter
|
3f70edf4220ad1c066489110ef30880a143522fd
|
[
"Apache-2.0"
] | 2
|
2020-09-02T14:36:58.000Z
|
2021-04-22T11:20:43.000Z
|
defmodule DarkMatter.Structs do
@moduledoc """
Utils for working with structs.
"""
@moduledoc since: "1.0.5"
import DarkMatter.Guards, only: [is_module: 1]
@doc """
Determine keys for a given `module` or raises `ArgumentError`.
## Examples
iex> keys(IO.Stream)
[:device, :line_or_bytes, :raw]
iex> keys(%IO.Stream{})
[:device, :line_or_bytes, :raw]
iex> keys(%{})
** (FunctionClauseError) no function clause matching in DarkMatter.Structs.keys/1
"""
@spec keys(module() | struct()) :: [atom()]
def keys(module_or_struct) when is_module(module_or_struct) or is_struct(module_or_struct) do
module_or_struct
|> Map.from_struct()
|> Map.keys()
|> Enum.reject(&meta_key?/1)
|> Enum.sort()
end
@doc """
Determine if a given `key` is a meta map property.
## Examples
iex> meta_key?(:__meta__)
true
iex> meta_key?(:__struct__)
true
iex> meta_key?(:non_meta)
false
"""
@spec meta_key?(atom()) :: boolean()
def meta_key?(key) when is_atom(key) do
string_key = Atom.to_string(key)
String.starts_with?(string_key, "__") and String.ends_with?(string_key, "__")
end
@doc """
Reduce a map with only non nil default values for a given `module`.
"""
@doc since: "1.1.3"
@spec defaults(module()) :: %{required(atom()) => any()}
def defaults(module) when is_atom(module) do
default = struct(module)
for {k, v} <- Map.from_struct(default), not meta_key?(k), not is_nil(v), into: %{} do
{k, v}
end
end
end
| 24.123077
| 95
| 0.623087
|
938fe2fb23ebd9209b19ada35492c59bda1fa64f
| 257
|
ex
|
Elixir
|
apps/banking_api/lib/banking_api.ex
|
ckoliveira/banking_api
|
3a4077a97e2cfcb2475bafd582a836bd03f6c6cc
|
[
"Apache-2.0"
] | null | null | null |
apps/banking_api/lib/banking_api.ex
|
ckoliveira/banking_api
|
3a4077a97e2cfcb2475bafd582a836bd03f6c6cc
|
[
"Apache-2.0"
] | null | null | null |
apps/banking_api/lib/banking_api.ex
|
ckoliveira/banking_api
|
3a4077a97e2cfcb2475bafd582a836bd03f6c6cc
|
[
"Apache-2.0"
] | null | null | null |
defmodule BankingApi do
@moduledoc """
BankingApi keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 25.7
| 66
| 0.758755
|
9390385f348190761c21b95678bad9db699b2116
| 2,562
|
ex
|
Elixir
|
lib/logger_json/plug/metadata_formatters/elk.ex
|
anthonyfalzetti/logger_json
|
3724714cc3be98924ac4fcbc20d5d9b31922fde9
|
[
"MIT"
] | null | null | null |
lib/logger_json/plug/metadata_formatters/elk.ex
|
anthonyfalzetti/logger_json
|
3724714cc3be98924ac4fcbc20d5d9b31922fde9
|
[
"MIT"
] | null | null | null |
lib/logger_json/plug/metadata_formatters/elk.ex
|
anthonyfalzetti/logger_json
|
3724714cc3be98924ac4fcbc20d5d9b31922fde9
|
[
"MIT"
] | null | null | null |
if Code.ensure_loaded?(Plug) do
defmodule LoggerJSON.Plug.MetadataFormatters.ELK do
@moduledoc """
Formats connection into Logger metadata:
* `connection.type` - type of connection (Sent or Chunked);
* `connection.method` - HTTP request method;
* `connection.request_path` - HTTP request path;
* `connection.request_id` - value of `X-Request-ID` response header (see `Plug.RequestId`);
* `connection.status` - HTTP status code sent to a client;
* `client.user_agent` - value of `User-Agent` header;
* `client.ip' - value of `X-Forwarded-For` header if present, otherwise - remote IP of a connected client;
* `client.api_version' - version of API that was requested by a client;
* `node.hostname` - system hostname;
* `node.pid` - Erlang VM process identifier;
* `phoenix.controller` - Phoenix controller that processed the request;
* `phoenix.action` - Phoenix action that processed the request;
* `latency_μs` - time in microseconds taken to process the request.
"""
import Jason.Helpers, only: [json_map: 1]
@doc false
def build_metadata(conn, latency, client_version_header) do
latency_μs = System.convert_time_unit(latency, :native, :microsecond)
[
connection:
json_map(
type: connection_type(conn),
method: conn.method,
request_path: conn.request_path,
status: conn.status
),
client:
json_map(
user_agent: LoggerJSON.Plug.get_header(conn, "user-agent"),
ip: remote_ip(conn),
api_version: LoggerJSON.Plug.get_header(conn, client_version_header)
),
node: node_metadata(),
latency_μs: latency_μs
] ++ phoenix_metadata(conn)
end
defp connection_type(%{state: :set_chunked}), do: "chunked"
defp connection_type(_), do: "sent"
defp remote_ip(conn) do
LoggerJSON.Plug.get_header(conn, "x-forwarded-for") || to_string(:inet_parse.ntoa(conn.remote_ip))
end
defp phoenix_metadata(%{private: %{phoenix_controller: controller, phoenix_action: action}}) do
[phoenix: %{controller: controller, action: action}]
end
defp phoenix_metadata(_conn) do
[]
end
defp node_metadata do
{:ok, hostname} = :inet.gethostname()
vm_pid =
case Integer.parse(System.get_pid()) do
{pid, _units} -> pid
_ -> nil
end
json_map(hostname: to_string(hostname), vm_pid: vm_pid)
end
end
end
| 35.09589
| 112
| 0.640125
|
9390453cd0e60a29c9ab49524c4e0e5d56defbf1
| 693
|
ex
|
Elixir
|
lib/ambrosia_web/controllers/page_controller.ex
|
emeric-martineau/ambrosia
|
74c55d35cf66537d7c8a33ef6057e89d44abd347
|
[
"MIT"
] | 2
|
2020-05-25T05:28:31.000Z
|
2020-05-25T08:10:43.000Z
|
lib/ambrosia_web/controllers/page_controller.ex
|
emeric-martineau/ambrosia
|
74c55d35cf66537d7c8a33ef6057e89d44abd347
|
[
"MIT"
] | 9
|
2020-05-25T16:39:15.000Z
|
2020-11-11T16:51:37.000Z
|
lib/ambrosia_web/controllers/page_controller.ex
|
emeric-martineau/ambrosia
|
74c55d35cf66537d7c8a33ef6057e89d44abd347
|
[
"MIT"
] | null | null | null |
defmodule AmbrosiaWeb.PageController do
use AmbrosiaWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
# Because <a href="" data-method="delete"> not always working
def logout(conn, _) do
conn
|> Pow.Plug.delete()
|> render("index.html")
end
def thank_you(conn, _params) do
render(conn, "register_thank_you.html")
end
def set_locale(conn, params) do
%{"url" => path, "locale" => locale} = params
cookie_key = Application.get_env(:ambrosia, :i18n)
|> Keyword.get(:cookie_key)
conn
|> Plug.Conn.put_resp_cookie(cookie_key, locale)
|> redirect(to: path)
|> Plug.Conn.halt()
end
end
| 22.354839
| 63
| 0.63925
|
93904a956872b8b6e553e84b99326fe3065144a9
| 957
|
ex
|
Elixir
|
test/support/channel_case.ex
|
cosmos-sajal/secret_keeper
|
b5ae5e01ed4cd30afb91ae02fedf2f1de9366bf0
|
[
"MIT"
] | 2
|
2019-09-14T14:32:37.000Z
|
2020-06-05T15:17:34.000Z
|
test/support/channel_case.ex
|
cosmos-sajal/secret_keeper
|
b5ae5e01ed4cd30afb91ae02fedf2f1de9366bf0
|
[
"MIT"
] | null | null | null |
test/support/channel_case.ex
|
cosmos-sajal/secret_keeper
|
b5ae5e01ed4cd30afb91ae02fedf2f1de9366bf0
|
[
"MIT"
] | null | null | null |
defmodule SecretKeeperWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint SecretKeeperWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(SecretKeeper.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(SecretKeeper.Repo, {:shared, self()})
end
:ok
end
end
| 25.184211
| 74
| 0.722048
|
93907f1f7c313a621b64b1ee88daf83ffe6e9aeb
| 2,626
|
exs
|
Elixir
|
mix.exs
|
bucha/nerves_system_bonzero
|
997c05c3668ed42b5a9033e210805d0ac0bb8778
|
[
"Apache-2.0"
] | null | null | null |
mix.exs
|
bucha/nerves_system_bonzero
|
997c05c3668ed42b5a9033e210805d0ac0bb8778
|
[
"Apache-2.0"
] | null | null | null |
mix.exs
|
bucha/nerves_system_bonzero
|
997c05c3668ed42b5a9033e210805d0ac0bb8778
|
[
"Apache-2.0"
] | null | null | null |
defmodule NervesSystemBonzero.MixProject do
use Mix.Project
@app :nerves_system_bonzero
@version Path.join(__DIR__, "VERSION")
|> File.read!()
|> String.trim()
def project do
[
app: @app,
version: @version,
elixir: "~> 1.6",
compilers: Mix.compilers() ++ [:nerves_package],
nerves_package: nerves_package(),
description: description(),
package: package(),
deps: deps(),
aliases: [loadconfig: [&bootstrap/1], docs: ["docs", ©_images/1]],
docs: [extras: ["README.md"], main: "readme"]
]
end
def application do
[]
end
defp bootstrap(args) do
set_target()
Application.start(:nerves_bootstrap)
Mix.Task.run("loadconfig", args)
end
defp nerves_package do
[
type: :system,
# artifact_sites: [
# {:github_releases, "nerves-project/#{@app}"}
# ],
build_runner_opts: build_runner_opts(),
platform: Nerves.System.BR,
platform_config: [
defconfig: "nerves_defconfig"
],
checksum: package_files()
]
end
defp deps do
[
{:nerves, "~> 1.3", runtime: false},
{:nerves_system_br, "1.7.1", runtime: false},
{:nerves_toolchain_armv6_rpi_linux_gnueabi, "1.1.0", runtime: false},
{:nerves_system_linter, "~> 0.3.0", runtime: false},
{:ex_doc, "~> 0.18", only: [:dev, :test], runtime: false}
]
end
defp description do
"""
Nerves System - Raspberry Pi Zero and Zero W
"""
end
defp package do
[
maintainers: ["Frank Hunleth", "Justin Schneck"],
files: package_files(),
licenses: ["Apache 2.0"],
links: %{"Github" => "https://github.com/bucha/#{@app}"}
]
end
defp package_files do
[
"fwup_include",
"rootfs_overlay",
"CHANGELOG.md",
"cmdline.txt",
"config.txt",
"fwup-revert.conf",
"fwup.conf",
"LICENSE",
"linux-4.19.defconfig",
"mix.exs",
"nerves_defconfig",
"post-build.sh",
"post-createfs.sh",
"ramoops.dts",
"README.md",
"VERSION"
]
end
# Copy the images referenced by docs, since ex_doc doesn't do this.
defp copy_images(_) do
File.cp_r("assets", "doc/assets")
end
defp build_runner_opts() do
if primary_site = System.get_env("BR2_PRIMARY_SITE") do
[make_args: ["BR2_PRIMARY_SITE=#{primary_site}"]]
else
[]
end
end
defp set_target() do
if function_exported?(Mix, :target, 1) do
apply(Mix, :target, [:target])
else
System.put_env("MIX_TARGET", "target")
end
end
end
| 22.637931
| 76
| 0.581493
|
93909196af5a03f082330aaadfa8bd4f468f6756
| 1,457
|
exs
|
Elixir
|
mix.exs
|
hubertlepicki/scrivener_ecto
|
e782eb7e5581cb0c70f22165dfea230c618f7273
|
[
"MIT"
] | null | null | null |
mix.exs
|
hubertlepicki/scrivener_ecto
|
e782eb7e5581cb0c70f22165dfea230c618f7273
|
[
"MIT"
] | null | null | null |
mix.exs
|
hubertlepicki/scrivener_ecto
|
e782eb7e5581cb0c70f22165dfea230c618f7273
|
[
"MIT"
] | null | null | null |
defmodule Scrivener.Ecto.Mixfile do
use Mix.Project
def project do
[
app: :scrivener_ecto,
version: "1.3.0",
elixir: "~> 1.3",
elixirc_paths: elixirc_paths(Mix.env()),
package: package(),
description: "Paginate your Ecto queries with Scrivener",
deps: deps(),
aliases: aliases(),
docs: [
main: "readme",
extras: [
"README.md"
]
]
]
end
defp aliases do
[
"db.reset": [
"ecto.drop",
"ecto.create",
"ecto.migrate"
]
]
end
def application do
[
applications: applications(Mix.env())
]
end
defp applications(:test), do: [:scrivener, :postgrex, :ecto, :logger]
defp applications(_), do: [:scrivener, :logger]
defp deps do
[
{:scrivener, "~> 2.4"},
{:ecto, "3.0.0-rc.1"},
{:ecto_sql, "3.0.0-rc.0"},
{:dialyxir, "~> 0.5.0", only: :dev},
{:earmark, ">= 0.0.0", only: :dev},
{:ex_doc, "~> 0.18.0", only: :dev},
{:postgrex, "0.14.0-rc.1", only: :test, override: true}
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp package do
[
maintainers: ["Drew Olson"],
licenses: ["MIT"],
links: %{"github" => "https://github.com/drewolson/scrivener_ecto"},
files: [
"lib/scrivener",
"mix.exs",
"README.md"
]
]
end
end
| 20.814286
| 74
| 0.511325
|
9390a55981b97372f96bb8d240f27df67f4e070f
| 1,978
|
exs
|
Elixir
|
mix.exs
|
thedelchop/ecto_job
|
0157d857e4436a35ebcc0a9f5cd4b28b33292f62
|
[
"MIT"
] | null | null | null |
mix.exs
|
thedelchop/ecto_job
|
0157d857e4436a35ebcc0a9f5cd4b28b33292f62
|
[
"MIT"
] | null | null | null |
mix.exs
|
thedelchop/ecto_job
|
0157d857e4436a35ebcc0a9f5cd4b28b33292f62
|
[
"MIT"
] | null | null | null |
defmodule EctoJob.Mixfile do
use Mix.Project
@version "3.1.0"
@url "https://github.com/mbuhot/ecto_job"
def project do
[
app: :ecto_job,
description: "A transactional job queue built with Ecto, PostgreSQL and GenStage.",
version: @version,
elixir: "~> 1.10",
elixirc_paths: elixirc_paths(Mix.env()),
elixirc_options: [warnings_as_errors: true],
start_permanent: Mix.env() == :prod,
deps: deps(),
dialyzer: dialyzer(),
docs: docs(),
package: package()
]
end
def application do
[extra_applications: [:logger] ++ extra_applications(Mix.env())]
end
defp extra_applications(:test), do: [:postgrex]
defp extra_applications(:test_myxql), do: [:myxql]
defp extra_applications(_), do: []
defp elixirc_paths(test) when test in [:test, :test_myxql], do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp package do
[
licenses: ["MIT"],
maintainers: ["Mike Buhot (m.buhot@gmail.com)"],
links: %{
"Github" => @url
}
]
end
defp dialyzer do
[
flags: ["-Werror_handling", "-Wno_unused", "-Wunmatched_returns", "-Wunderspecs"],
# postgrex dep is optional: we want to ignore warnings for calling unknown
# functions from this dep
ignore_warnings: ".dialyzer.ignore-warnings.exs"
]
end
defp docs do
[
extras: ["README.md"],
main: "readme",
source_ref: "v#{@version}",
source_url: @url,
homepage_url: @url
]
end
defp deps do
[
{:ecto_sql, "~> 3.2"},
{:postgrex, "~> 0.15", optional: true},
{:myxql, "~> 0.2", optional: true},
{:jason, "~> 1.0"},
{:gen_stage, "~> 1.0"},
{:credo, "~> 1.0", only: :dev, runtime: false},
{:dialyxir, "~> 1.0.0-rc.7", only: :dev, runtime: false},
{:ex_doc, "~> 0.21", only: :dev, runtime: false},
{:inch_ex, "~> 2.0", only: :dev, runtime: false}
]
end
end
| 25.688312
| 89
| 0.576845
|
9390b0b77fff8db69a5550158ad124a13d3cb1f1
| 811
|
ex
|
Elixir
|
lib/protected_hello_web/views/error_helpers.ex
|
KeenMate/phoenix-guardian-example
|
163d83780f8a8f3b7be20b1e4be63945e8434fb9
|
[
"MIT"
] | null | null | null |
lib/protected_hello_web/views/error_helpers.ex
|
KeenMate/phoenix-guardian-example
|
163d83780f8a8f3b7be20b1e4be63945e8434fb9
|
[
"MIT"
] | null | null | null |
lib/protected_hello_web/views/error_helpers.ex
|
KeenMate/phoenix-guardian-example
|
163d83780f8a8f3b7be20b1e4be63945e8434fb9
|
[
"MIT"
] | null | null | null |
defmodule ProtectedHelloWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_id(form, field)
)
end)
end
@doc """
Translates an error message.
"""
def translate_error({msg, opts}) do
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
Enum.reduce(opts, msg, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end
end
| 26.16129
| 69
| 0.665845
|
9390c349044914db11d5c5f137b61cff2da873ff
| 115
|
ex
|
Elixir
|
web/lib/rfid_latachz/repo.ex
|
latachz/latachz-rfid
|
dbb1462fa258568831de3600ae6108e91080cf0a
|
[
"PostgreSQL",
"Unlicense",
"MIT"
] | 1
|
2020-01-26T05:41:49.000Z
|
2020-01-26T05:41:49.000Z
|
web/lib/rfid_latachz/repo.ex
|
latachz/latachz-rfid
|
dbb1462fa258568831de3600ae6108e91080cf0a
|
[
"PostgreSQL",
"Unlicense",
"MIT"
] | 1
|
2020-05-26T09:28:01.000Z
|
2020-05-26T09:28:01.000Z
|
web/lib/rfid_latachz/repo.ex
|
latachz/latachz-rfid
|
dbb1462fa258568831de3600ae6108e91080cf0a
|
[
"PostgreSQL",
"Unlicense",
"MIT"
] | null | null | null |
defmodule RfidLatachz.Repo do
use Ecto.Repo,
otp_app: :rfid_latachz,
adapter: Ecto.Adapters.Postgres
end
| 19.166667
| 35
| 0.747826
|
9390c94c4e3266edf31025d9d029b348c8595504
| 1,962
|
ex
|
Elixir
|
test/support/conn_case.ex
|
AkioCode/elxpro-blog
|
236984915851b91058e091414deb70c5e8fed72a
|
[
"MIT"
] | null | null | null |
test/support/conn_case.ex
|
AkioCode/elxpro-blog
|
236984915851b91058e091414deb70c5e8fed72a
|
[
"MIT"
] | 4
|
2021-08-11T03:19:33.000Z
|
2021-09-26T01:29:58.000Z
|
test/support/conn_case.ex
|
AkioCode/elxpro-blog
|
236984915851b91058e091414deb70c5e8fed72a
|
[
"MIT"
] | null | null | null |
defmodule ElxproBlogWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use ElxproBlogWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import ElxproBlogWeb.ConnCase
alias ElxproBlogWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint ElxproBlogWeb.Endpoint
end
end
setup tags do
:ok = Sandbox.checkout(ElxproBlog.Repo)
post = ElxproBlog.Factory.insert(:post)
unless tags[:async] do
Sandbox.mode(ElxproBlog.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn(), post: post}
end
def get_resp_body(conn), do: conn |> Map.get(:resp_body) |> Jason.decode!()
def get_resp_message(conn) do
case conn |> get_resp_body() do
%{"message" => message} -> message
body -> raise "Field \"message\" not found in #{inspect(body)}"
end
end
def get_resp_data(conn) do
case conn |> get_resp_body() do
%{"data" => data} -> data
body -> raise "Field \"data\" not found in #{inspect(body)}"
end
end
def get_resp_details(conn) do
case conn |> get_resp_body() do
%{"details" => details} -> details
body -> raise "Field \"details\" not found in #{inspect(body)}"
end
end
end
| 28.434783
| 77
| 0.685015
|
9390f7a2fdca30ee8c10d110de09b472c2e17b5a
| 866
|
ex
|
Elixir
|
lib/koans/01_equalities.ex
|
ekans/elixir-koans
|
f7cdae7a39f3cf581fcd484c0025a5ff07173fce
|
[
"MIT"
] | null | null | null |
lib/koans/01_equalities.ex
|
ekans/elixir-koans
|
f7cdae7a39f3cf581fcd484c0025a5ff07173fce
|
[
"MIT"
] | null | null | null |
lib/koans/01_equalities.ex
|
ekans/elixir-koans
|
f7cdae7a39f3cf581fcd484c0025a5ff07173fce
|
[
"MIT"
] | null | null | null |
defmodule Equalities do
use Koans
@intro """
Welcome to the Elixir koans.
Let these be your first humble steps towards learning a new language.
The path laid in front of you is one of many.
"""
# Replace ___ with the answer to make the koan pass.
koan "We shall contemplate truth by testing reality, via equality" do
assert true == true
end
koan "Not something is the opposite of it" do
assert !true == false
end
koan "To understand reality, we must compare our expectations against reality" do
assert 2 == 1 + 1
end
koan "Some things may appear different, but be the same" do
assert 1 == 2 / 2
end
koan "Unless they actually are different" do
assert 3.2 != 2
end
koan "Some may be looking for bigger things" do
assert 8 > 3
end
koan "Others are happy with less" do
assert 1 < 3
end
end
| 21.65
| 83
| 0.680139
|
939142d78b82e5094ea1fa2fb4b311d5dfe49cee
| 1,626
|
exs
|
Elixir
|
apps/eigr_functions/mix.exs
|
sleipnir/permastate-elixir
|
43cda3a186819c9cfd3ce993907f24251c9ef42b
|
[
"Apache-2.0"
] | 2
|
2021-07-28T23:02:24.000Z
|
2021-07-29T00:34:42.000Z
|
apps/eigr_functions/mix.exs
|
sleipnir/permastate-elixir
|
43cda3a186819c9cfd3ce993907f24251c9ef42b
|
[
"Apache-2.0"
] | null | null | null |
apps/eigr_functions/mix.exs
|
sleipnir/permastate-elixir
|
43cda3a186819c9cfd3ce993907f24251c9ef42b
|
[
"Apache-2.0"
] | 1
|
2021-07-28T21:31:12.000Z
|
2021-07-28T21:31:12.000Z
|
defmodule Functions.MixProject do
use Mix.Project
def project do
[
app: :eigr_functions,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
description: description(),
package: package(),
deps: deps(),
elixirc_paths: elixirc_paths(Mix.env()),
consolidate_protocols: Mix.env() != :test
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# Base deps
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:ex_doc, ">= 0.0.0", only: :dev},
{:flow, "~> 1.0"},
{:google_protos, "~> 0.1.0"},
# Grpc deps
{:grpc, "~> 0.5.0-beta.1"},
# 2.9.0 fixes some important bugs, so it's better to use ~> 2.9.0
{:cowlib, "~> 2.9.0", override: true}
]
end
defp description do
"""
Eigr Functions Elixir SDK.
"""
end
defp package do
# These are the default files included in the package
[
name: :eigr_functions,
files: ["lib", "mix.exs", "README*"],
maintainers: ["Adriano Santos", "Weslei Juan Moser Pereira"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/eigr-labs/functions-elixir-sdk"}
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
end
| 25.015385
| 79
| 0.559656
|
93914fea2f26821bfebbcccbf9ae09f47a85a0c9
| 1,996
|
ex
|
Elixir
|
lib/minizinc_results.ex
|
mbta/solverl
|
3d972db4fa173e9453357814884bd37f52a9713d
|
[
"MIT"
] | 30
|
2020-07-30T19:22:34.000Z
|
2022-03-28T02:06:55.000Z
|
lib/minizinc_results.ex
|
mbta/solverl
|
3d972db4fa173e9453357814884bd37f52a9713d
|
[
"MIT"
] | 1
|
2022-02-14T22:56:02.000Z
|
2022-02-15T14:13:53.000Z
|
lib/minizinc_results.ex
|
mbta/solverl
|
3d972db4fa173e9453357814884bd37f52a9713d
|
[
"MIT"
] | 4
|
2020-08-18T04:31:38.000Z
|
2022-03-19T19:33:26.000Z
|
defmodule MinizincResults do
@moduledoc """
Functions and data structures for working with data produced by Minizinc during runtime.
"""
require Logger
############### Solver results ##################
def get_status(%{summary: summary} = _solver_results) do
get_status(summary)
end
def get_status(summary) do
status(get_method(summary), summary[:status])
end
def get_method(%{summary: summary} = _solver_results) do
get_method(summary)
end
def get_method(summary) do
MinizincModel.method(summary[:model_info])
end
def get_solver(%{summary: summary} = _solver_results) do
summary.solver
end
def get_last_solution(%{summary: summary} = _solver_results) do
get_last_solution(summary)
end
def get_last_solution(summary) do
summary[:last_solution]
end
def get_solution_count(%{summary: summary} = _solver_results) do
get_solution_count(summary)
end
def get_solution_count(summary) do
summary[:solution_count]
end
def has_solution(solver_results) do
get_solution_count(solver_results) > 0
end
def get_solution_objective(solution) do
get_solution_value(solution, "_objective")
end
## Get output generated by a solver for the given solution
## (through 'output' model clauses).
def get_solution_output(solution) do
get_solution_value(solution, "_output")
end
## Get solution checker output
def get_checker_output(solution) do
get_solution_value(solution, "_checker")
end
## Get value of the variable from the solution
def get_solution_value(solution, varname) do
solution[:data][varname]
end
def get_solution_index(solution) do
solution[:index]
end
def get_solutions(solver_results) do
solver_results[:solutions]
end
def status(:satisfy, :all_solutions) do
:all_solutions
end
def status(method, :all_solutions) when method in [:minimize, :maximize] do
:optimal
end
def status(_method, status) do
status
end
end
| 22.426966
| 92
| 0.718437
|
939187d840312830f7871f6de97da0470e1d88aa
| 23,229
|
ex
|
Elixir
|
lib/rustler_precompiled.ex
|
philss/baked_nifs
|
bd3086c84e59c5eb5c5d7cc5484bf6e069c63925
|
[
"Apache-2.0"
] | 1
|
2022-02-05T21:20:07.000Z
|
2022-02-05T21:20:07.000Z
|
lib/rustler_precompiled.ex
|
philss/baked_nifs
|
bd3086c84e59c5eb5c5d7cc5484bf6e069c63925
|
[
"Apache-2.0"
] | null | null | null |
lib/rustler_precompiled.ex
|
philss/baked_nifs
|
bd3086c84e59c5eb5c5d7cc5484bf6e069c63925
|
[
"Apache-2.0"
] | null | null | null |
defmodule RustlerPrecompiled do
@moduledoc """
Download and use precompiled NIFs safely with checksums.
Rustler Precompiled is a tool for library maintainers that rely on Rustler.
It helps by removing the need to have the Rust compiler installed in the
user's machine.
Check the [Precompilation Guide](PRECOMPILATION_GUIDE.md) for details.
## Example
defmodule MyNative do
use RustlerPrecompiled,
otp_app: :my_app,
crate: "my_app_nif",
base_url: "https://github.com/me/my_project/releases/download/v0.1.0",
version: "0.1.0"
end
## Options
* `:otp_app` - The OTP app name that the dynamic library will be loaded from.
* `:crate` - The name of Rust crate if different from the `:otp_app`. This is optional.
* `:base_url` - A valid URL that is used as base path for the NIF file.
* `:version` - The version of precompiled assets (it is part of the NIF filename).
* `:force_build` - Force the build with `Rustler`. This is `false` by default, but
if your `:version` is a pre-release (like "2.1.0-dev"), this option will always
be set `true`.
You can also configure this option by setting an application env like this:
config :rustler_precompiled, :force_build, your_otp_app: true
It is important to add the ":rustler" package to your dependencies in order to force
the build. To do that, just add it to your `mix.exs` file:
{:rustler, ">= 0.0.0", optional: true}
In case "force build" is used, all options except `:base_url`, `:version` and `:force_build`
are going to be passed down to `Rustler`.
So if you need to configure the build, check the `Rustler` options.
"""
defmacro __using__(opts) do
force =
if Code.ensure_loaded?(Rustler) do
quote do
use Rustler, only_rustler_opts
end
else
quote do
raise "Rustler dependency is needed to force the build. " <>
"Add it to your `mix.exs` file: `{:rustler, \">= 0.0.0\", optional: true}`"
end
end
quote do
require Logger
opts = unquote(opts)
otp_app = Keyword.fetch!(opts, :otp_app)
opts =
Keyword.put_new(
opts,
:force_build,
Application.compile_env(:rustler_precompiled, [:force_build, otp_app])
)
case RustlerPrecompiled.__using__(__MODULE__, opts) do
{:force_build, only_rustler_opts} ->
unquote(force)
{:ok, config} ->
@on_load :load_rustler_precompiled
@rustler_precompiled_load_from config.load_from
@rustler_precompiled_load_data config.load_data
@doc false
def load_rustler_precompiled do
# Remove any old modules that may be loaded so we don't get
# {:error, {:upgrade, 'Upgrade not supported by this NIF library.'}}
:code.purge(__MODULE__)
{otp_app, path} = @rustler_precompiled_load_from
load_path =
otp_app
|> Application.app_dir(path)
|> to_charlist()
:erlang.load_nif(load_path, @rustler_precompiled_load_data)
end
{:error, precomp_error} ->
raise precomp_error
end
end
end
# A helper function to extract the logic from __using__ macro.
@doc false
def __using__(module, opts) do
config =
opts
|> Keyword.put_new(:module, module)
|> RustlerPrecompiled.Config.new()
if config.force_build? do
rustler_opts = Keyword.drop(opts, [:base_url, :version, :force_build])
{:force_build, rustler_opts}
else
with {:error, precomp_error} <- RustlerPrecompiled.download_or_reuse_nif_file(config) do
message = """
Error while downloading precompiled NIF: #{precomp_error}.
You can force the project to build from scratch with:
config :rustler_precompiled, :force_build, #{config.otp_app}: true
In order to force the build, you also need to add Rustler as a dependency in your `mix.exs`:
{:rustler, ">= 0.0.0", optional: true}
"""
{:error, message}
end
end
end
## Implementation below
alias RustlerPrecompiled.Config
require Logger
@available_targets ~w(
aarch64-apple-darwin
x86_64-apple-darwin
x86_64-unknown-linux-gnu
x86_64-unknown-linux-musl
arm-unknown-linux-gnueabihf
aarch64-unknown-linux-gnu
x86_64-pc-windows-msvc
x86_64-pc-windows-gnu
)
@available_nif_versions ~w(2.14 2.15 2.16)
@checksum_algo :sha256
@checksum_algorithms [@checksum_algo]
@native_dir "priv/native"
@doc """
List all default available targets.
"""
def available_targets do
for target_triple <- @available_targets, nif_version <- @available_nif_versions do
"nif-#{nif_version}-#{target_triple}"
end
end
@doc """
Returns URLs for NIFs based on its module name.
The module name is the one that defined the NIF and this information
is stored in a metadata file.
"""
def available_nif_urls(nif_module) when is_atom(nif_module) do
metadata =
nif_module
|> metadata_file()
|> read_map_from_file()
case metadata do
%{base_url: base_url, basename: basename, version: version} ->
for target <- available_targets() do
# We need to build again the name because each arch is different.
lib_name = "#{lib_prefix(target)}#{basename}-v#{version}-#{target}"
tar_gz_file_url(base_url, lib_name_with_ext(target, lib_name))
end
_ ->
raise "metadata about current target for the module #{inspect(nif_module)} is not available. " <>
"Please compile the project again with: `mix compile --force`"
end
end
@doc """
Returns the file URL to be downloaded for current target.
It receives the NIF module.
"""
def current_target_nif_url(nif_module) when is_atom(nif_module) do
metadata =
nif_module
|> metadata_file()
|> read_map_from_file()
case metadata do
%{base_url: base_url, file_name: file_name} ->
tar_gz_file_url(base_url, file_name)
_ ->
raise "metadata about current target for the module #{inspect(nif_module)} is not available. " <>
"Please compile the project again with: `mix compile --force`"
end
end
@doc """
Returns the target triple for download or compile and load.
This function is translating and adding more info to the system
architecture returned by Elixir/Erlang to one used by Rust.
The returned string has the following format:
"nif-NIF_VERSION-ARCHITECTURE-VENDOR-OS-ABI"
## Examples
iex> RustlerPrecompiled.target()
{:ok, "nif-2.16-x86_64-unknown-linux-gnu"}
iex> RustlerPrecompiled.target()
{:ok, "nif-2.15-aarch64-apple-darwin"}
"""
def target(config \\ target_config()) do
arch_os =
case config.os_type do
{:unix, _} ->
config.target_system
|> normalize_arch_os()
|> system_arch_to_string()
{:win32, _} ->
existing_target =
config.target_system
|> system_arch_to_string()
# For when someone is setting "TARGET_*" vars on Windows
if existing_target in @available_targets do
existing_target
else
# 32 or 64 bits
arch =
case config.word_size do
4 -> "i686"
8 -> "x86_64"
_ -> "unknown"
end
config.target_system
|> Map.put_new(:arch, arch)
|> Map.put_new(:vendor, "pc")
|> Map.put_new(:os, "windows")
|> Map.put_new(:abi, "msvc")
|> system_arch_to_string()
end
end
cond do
arch_os not in @available_targets ->
{:error,
"precompiled NIF is not available for this target: #{inspect(arch_os)}.\n" <>
"The available targets are:\n - #{Enum.join(@available_targets, "\n - ")}"}
config.nif_version not in @available_nif_versions ->
{:error,
"precompiled NIF is not available for this NIF version: #{inspect(config.nif_version)}.\n" <>
"The available NIF versions are:\n - #{Enum.join(@available_nif_versions, "\n - ")}"}
true ->
{:ok, "nif-#{config.nif_version}-#{arch_os}"}
end
end
defp target_config do
current_nif_version = :erlang.system_info(:nif_version) |> List.to_string()
nif_version =
case find_compatible_nif_version(current_nif_version, @available_nif_versions) do
{:ok, vsn} ->
vsn
:error ->
# In case of error, use the current so we can tell the user.
current_nif_version
end
current_system_arch = system_arch()
%{
os_type: :os.type(),
target_system: maybe_override_with_env_vars(current_system_arch),
word_size: :erlang.system_info(:wordsize),
nif_version: nif_version
}
end
# In case one is using this lib in a newer OTP version, we try to
# find the latest compatible NIF version.
@doc false
def find_compatible_nif_version(vsn, available) do
if vsn in available do
{:ok, vsn}
else
[major, minor | _] = parse_version(vsn)
available
|> Enum.map(&parse_version/1)
|> Enum.filter(fn
[^major, available_minor | _] when available_minor <= minor -> true
[_ | _] -> false
end)
|> case do
[] -> :error
match -> {:ok, match |> Enum.max() |> Enum.join(".")}
end
end
end
defp parse_version(vsn) do
vsn |> String.split(".") |> Enum.map(&String.to_integer/1)
end
# Returns a map with `:arch`, `:vendor`, `:os` and maybe `:abi`.
defp system_arch do
base =
:erlang.system_info(:system_architecture)
|> List.to_string()
|> String.split("-")
triple_keys =
case length(base) do
4 ->
[:arch, :vendor, :os, :abi]
3 ->
[:arch, :vendor, :os]
_ ->
# It's too complicated to find out, and we won't support this for now.
[]
end
triple_keys
|> Enum.zip(base)
|> Enum.into(%{})
end
# The idea is to support systems like Nerves.
# See: https://hexdocs.pm/nerves/compiling-non-beam-code.html#target-cpu-arch-os-and-abi
@doc false
def maybe_override_with_env_vars(original_sys_arch, get_env \\ &System.get_env/1) do
envs_with_keys = [
arch: "TARGET_ARCH",
vendor: "TARGET_VENDOR",
os: "TARGET_OS",
abi: "TARGET_ABI"
]
updated_system_arch =
Enum.reduce(envs_with_keys, original_sys_arch, fn {key, env_key}, acc ->
if env_value = get_env.(env_key) do
Map.put(acc, key, env_value)
else
acc
end
end)
# Only replace vendor if remains the same but some other env changed the config.
if original_sys_arch != updated_system_arch and
original_sys_arch.vendor == updated_system_arch.vendor do
Map.put(updated_system_arch, :vendor, "unknown")
else
updated_system_arch
end
end
defp normalize_arch_os(target_system) do
cond do
target_system.os =~ "darwin" ->
arch = with "arm" <- target_system.arch, do: "aarch64"
%{target_system | arch: arch, os: "darwin"}
target_system.os =~ "linux" ->
arch = with "amd64" <- target_system.arch, do: "x86_64"
vendor = with "pc" <- target_system.vendor, do: "unknown"
%{target_system | arch: arch, vendor: vendor}
true ->
target_system
end
end
defp system_arch_to_string(system_arch) do
values =
for key <- [:arch, :vendor, :os, :abi],
value = system_arch[key],
do: value
Enum.join(values, "-")
end
# Perform the download or load of the precompiled NIF
# It will look in the "priv/native/otp_app" first, and if
# that file doesn't exist, it will try to fetch from cache.
# In case there is no valid cached file, then it will try
# to download the NIF from the provided base URL.
@doc false
def download_or_reuse_nif_file(%Config{} = config) do
name = config.otp_app
version = config.version
native_dir = Application.app_dir(name, @native_dir)
# NOTE: this `cache_base_dir` is a "private" option used only in tests.
cache_dir = cache_dir(config.base_cache_dir, "precompiled_nifs")
with {:ok, target} <- target() do
basename = config.crate || name
lib_name = "#{lib_prefix(target)}#{basename}-v#{version}-#{target}"
file_name = lib_name_with_ext(target, lib_name)
cached_tar_gz = Path.join(cache_dir, "#{file_name}.tar.gz")
lib_file = Path.join(native_dir, file_name)
base_url = config.base_url
nif_module = config.module
metadata = %{
otp_app: name,
crate: config.crate,
cached_tar_gz: cached_tar_gz,
base_url: base_url,
basename: basename,
lib_name: lib_name,
file_name: file_name,
target: target,
version: version
}
write_metadata(nif_module, metadata)
result = %{
load?: true,
load_from: {name, Path.join("priv/native", lib_name)},
load_data: config.load_data
}
# TODO: add option to only write metadata
cond do
File.exists?(cached_tar_gz) ->
# Remove existing NIF file so we don't have processes using it.
# See: https://github.com/rusterlium/rustler/blob/46494d261cbedd3c798f584459e42ab7ee6ea1f4/rustler_mix/lib/rustler/compiler.ex#L134
File.rm(lib_file)
with :ok <- check_file_integrity(cached_tar_gz, nif_module),
:ok <- :erl_tar.extract(cached_tar_gz, [:compressed, cwd: Path.dirname(lib_file)]) do
Logger.debug("Copying NIF from cache and extracting to #{lib_file}")
{:ok, result}
end
true ->
dirname = Path.dirname(lib_file)
with :ok <- File.mkdir_p(cache_dir),
:ok <- File.mkdir_p(dirname),
{:ok, tar_gz} <- download_tar_gz(base_url, lib_name, cached_tar_gz),
:ok <- File.write(cached_tar_gz, tar_gz),
:ok <- check_file_integrity(cached_tar_gz, nif_module),
:ok <-
:erl_tar.extract({:binary, tar_gz}, [:compressed, cwd: Path.dirname(lib_file)]) do
Logger.debug("NIF cached at #{cached_tar_gz} and extracted to #{lib_file}")
{:ok, result}
end
end
end
end
defp checksum_map(nif_module) when is_atom(nif_module) do
nif_module
|> checksum_file()
|> read_map_from_file()
end
defp check_file_integrity(file_path, nif_module) when is_atom(nif_module) do
nif_module
|> checksum_map()
|> check_integrity_from_map(file_path, nif_module)
end
# It receives the map of %{ "filename" => "algo:checksum" } with the file path
@doc false
def check_integrity_from_map(checksum_map, file_path, nif_module) do
with {:ok, {algo, hash}} <- find_checksum(checksum_map, file_path, nif_module),
:ok <- validate_checksum_algo(algo),
do: compare_checksum(file_path, algo, hash)
end
defp find_checksum(checksum_map, file_path, nif_module) do
basename = Path.basename(file_path)
case Map.fetch(checksum_map, basename) do
{:ok, algo_with_hash} ->
[algo, hash] = String.split(algo_with_hash, ":")
algo = String.to_existing_atom(algo)
{:ok, {algo, hash}}
:error ->
{:error,
"the precompiled NIF file does not exist in the checksum file. " <>
"Please consider run: `mix rustler_precompiled.download #{inspect(nif_module)} --only-local` to generate the checksum file."}
end
end
defp validate_checksum_algo(algo) do
if algo in @checksum_algorithms do
:ok
else
{:error,
"checksum algorithm is not supported: #{inspect(algo)}. " <>
"The supported ones are:\n - #{Enum.join(@checksum_algorithms, "\n - ")}"}
end
end
defp compare_checksum(file_path, algo, expected_checksum) do
case File.read(file_path) do
{:ok, content} ->
file_hash =
algo
|> :crypto.hash(content)
|> Base.encode16(case: :lower)
if file_hash == expected_checksum do
:ok
else
{:error, "the integrity check failed because the checksum of files does not match"}
end
{:error, reason} ->
{:error,
"cannot read the file for checksum comparison: #{inspect(file_path)}. " <>
"Reason: #{inspect(reason)}"}
end
end
defp cache_dir(sub_dir) do
cache_opts = if System.get_env("MIX_XDG"), do: %{os: :linux}, else: %{}
:filename.basedir(:user_cache, Path.join("rustler_precompiled", sub_dir), cache_opts)
end
# This arity is only used in test context. It should be private because
# we can't provide this option in the `mix rustler_precompiled.download` task.
defp cache_dir(basedir, sub_dir) do
if basedir do
Path.join(basedir, sub_dir)
else
cache_dir(sub_dir)
end
end
defp lib_prefix(target) do
if String.contains?(target, "windows") do
""
else
"lib"
end
end
defp lib_name_with_ext(target, lib_name) do
ext =
if String.contains?(target, "windows") do
"dll"
else
"so"
end
"#{lib_name}.#{ext}"
end
defp tar_gz_file_url(base_url, file_name) do
uri = URI.parse(base_url)
uri =
Map.update!(uri, :path, fn path ->
Path.join(path || "", "#{file_name}.tar.gz")
end)
to_string(uri)
end
defp download_tar_gz(base_url, lib_name, target_name) do
base_url
|> tar_gz_file_url(lib_name_with_ext(target_name, lib_name))
|> download_nif_artifact()
end
defp download_nif_artifact(url) do
url = String.to_charlist(url)
Logger.debug("Downloading NIF from #{url}")
{:ok, _} = Application.ensure_all_started(:inets)
{:ok, _} = Application.ensure_all_started(:ssl)
if proxy = System.get_env("HTTP_PROXY") || System.get_env("http_proxy") do
Logger.debug("Using HTTP_PROXY: #{proxy}")
%{host: host, port: port} = URI.parse(proxy)
:httpc.set_options([{:proxy, {{String.to_charlist(host), port}, []}}])
end
if proxy = System.get_env("HTTPS_PROXY") || System.get_env("https_proxy") do
Logger.debug("Using HTTPS_PROXY: #{proxy}")
%{host: host, port: port} = URI.parse(proxy)
:httpc.set_options([{:https_proxy, {{String.to_charlist(host), port}, []}}])
end
# https://erlef.github.io/security-wg/secure_coding_and_deployment_hardening/inets
cacertfile = CAStore.file_path() |> String.to_charlist()
http_options = [
ssl: [
verify: :verify_peer,
cacertfile: cacertfile,
depth: 2,
customize_hostname_check: [
match_fun: :public_key.pkix_verify_hostname_match_fun(:https)
]
]
]
options = [body_format: :binary]
case :httpc.request(:get, {url, []}, http_options, options) do
{:ok, {{_, 200, _}, _headers, body}} ->
{:ok, body}
other ->
{:error, "couldn't fetch NIF from #{url}: #{inspect(other)}"}
end
end
# Download a list of files from URLs and calculate its checksum.
# Returns a list with details of the download and the checksum of each file.
@doc false
def download_nif_artifacts_with_checksums!(urls, options \\ []) do
ignore_unavailable? = Keyword.get(options, :ignore_unavailable, false)
tasks =
Task.async_stream(urls, fn url -> {url, download_nif_artifact(url)} end, timeout: :infinity)
cache_dir = cache_dir("precompiled_nifs")
:ok = File.mkdir_p(cache_dir)
Enum.flat_map(tasks, fn {:ok, result} ->
with {:download, {url, download_result}} <- {:download, result},
{:download_result, {:ok, body}} <- {:download_result, download_result},
hash <- :crypto.hash(@checksum_algo, body),
path <- Path.join(cache_dir, basename_from_url(url)),
{:file, :ok} <- {:file, File.write(path, body)} do
checksum = Base.encode16(hash, case: :lower)
Logger.debug(
"NIF cached at #{path} with checksum #{inspect(checksum)} (#{@checksum_algo})"
)
[
%{
url: url,
path: path,
checksum: checksum,
checksum_algo: @checksum_algo
}
]
else
{context, result} ->
if ignore_unavailable? && context in [:download, :download_result] do
Logger.debug(
"Skip an unavailable NIF artifact. " <>
"Context: #{inspect(context)}. Reason: #{inspect(result)}"
)
[]
else
raise "could not finish the download of NIF artifacts. " <>
"Context: #{inspect(context)}. Reason: #{inspect(result)}"
end
end
end)
end
defp basename_from_url(url) do
uri = URI.parse(url)
uri.path
|> String.split("/")
|> List.last()
end
defp read_map_from_file(file) do
with {:ok, contents} <- File.read(file),
{%{} = contents, _} <- Code.eval_string(contents) do
contents
else
_ -> %{}
end
end
defp write_metadata(nif_module, metadata) do
metadata_file = metadata_file(nif_module)
existing = read_map_from_file(metadata_file)
unless Map.equal?(metadata, existing) do
dir = Path.dirname(metadata_file)
:ok = File.mkdir_p(dir)
File.write!(metadata_file, inspect(metadata, limit: :infinity, pretty: true))
end
:ok
end
defp metadata_file(nif_module) when is_atom(nif_module) do
rustler_precompiled_cache = cache_dir("metadata")
Path.join(rustler_precompiled_cache, "metadata-#{nif_module}.exs")
end
# Write the checksum file with all NIFs available.
# It receives the module name and checksums.
@doc false
def write_checksum!(nif_module, checksums) when is_atom(nif_module) do
metadata =
nif_module
|> metadata_file()
|> read_map_from_file()
case metadata do
%{otp_app: _name} ->
file = checksum_file(nif_module)
pairs =
for %{path: path, checksum: checksum, checksum_algo: algo} <- checksums, into: %{} do
basename = Path.basename(path)
checksum = "#{algo}:#{checksum}"
{basename, checksum}
end
lines =
for {filename, checksum} <- Enum.sort(pairs) do
~s( "#{filename}" => #{inspect(checksum, limit: :infinity)},\n)
end
File.write!(file, ["%{\n", lines, "}\n"])
_ ->
raise "could not find the OTP app for #{inspect(nif_module)} in the metadata file. " <>
"Please compile the project again with: `mix compile --force`."
end
end
defp checksum_file(nif_module) do
# Saves the file in the project root.
Path.join(File.cwd!(), "checksum-#{nif_module}.exs")
end
end
| 29.895753
| 141
| 0.617289
|
9391be1049cefe4d4b78d74b73689c54dcaeb4a3
| 3,160
|
ex
|
Elixir
|
lib/aws_codegen/post_service.ex
|
danfilip/aws-codegen
|
9d30a1079490f48afd7c817c3454562edb22315d
|
[
"Apache-2.0"
] | 38
|
2018-05-31T15:07:24.000Z
|
2022-03-08T23:43:15.000Z
|
lib/aws_codegen/post_service.ex
|
danfilip/aws-codegen
|
9d30a1079490f48afd7c817c3454562edb22315d
|
[
"Apache-2.0"
] | 61
|
2018-09-13T11:33:35.000Z
|
2022-02-22T15:25:37.000Z
|
lib/aws_codegen/post_service.ex
|
danfilip/aws-codegen
|
9d30a1079490f48afd7c817c3454562edb22315d
|
[
"Apache-2.0"
] | 31
|
2018-04-10T20:01:07.000Z
|
2022-02-14T18:43:59.000Z
|
defmodule AWS.CodeGen.PostService do
alias AWS.CodeGen.Docstring
alias AWS.CodeGen.Service
defmodule Action do
defstruct arity: nil,
docstring: nil,
function_name: nil,
name: nil
end
@configuration %{
"query" => %{
content_type: "application/x-www-form-urlencoded",
elixir: %{
decode: "xml",
encode: "query"
},
erlang: %{
decode: "aws_util:decode_xml(Body)",
encode: "aws_util:encode_query(Input)"
}
},
"json" => %{
content_type: "application/x-amz-json-",
elixir: %{
decode: "json",
encode: "json"
},
erlang: %{
decode: "jsx:decode(Body)",
encode: "jsx:encode(Input)"
}
}
}
@doc """
Load POST API service and documentation specifications from the
`api_spec_path` and `doc_spec_path` files and convert them into a context
that can be used to generate code for an AWS service. `language` must be
`:elixir` or `:erlang`.
"""
def load_context(language, %AWS.CodeGen.Spec{} = spec, endpoints_spec) do
metadata = spec.api["metadata"]
actions = collect_actions(language, spec.api, spec.doc)
endpoint_prefix = metadata["endpointPrefix"]
endpoint_info = endpoints_spec["services"][endpoint_prefix]
is_global = not is_nil(endpoint_info) and not Map.get(endpoint_info, "isRegionalized", true)
credential_scope =
if is_global do
endpoint_info["endpoints"]["aws-global"]["credentialScope"]["region"]
end
json_version = metadata["jsonVersion"]
protocol = metadata["protocol"]
content_type = @configuration[protocol][:content_type]
content_type = content_type <> if protocol == "json", do: json_version, else: ""
signing_name =
case metadata["signingName"] do
nil -> endpoint_prefix
sn -> sn
end
%Service{
abbreviation: metadata["serviceAbbreviation"],
actions: actions,
api_version: metadata["apiVersion"],
credential_scope: credential_scope,
content_type: content_type,
docstring: Docstring.format(language, spec.doc["service"]),
decode: Map.fetch!(@configuration[protocol][language], :decode),
encode: Map.fetch!(@configuration[protocol][language], :encode),
endpoint_prefix: endpoint_prefix,
is_global: is_global,
json_version: json_version,
language: language,
module_name: spec.module_name,
protocol: protocol,
signing_name: signing_name,
signature_version: metadata["signatureVersion"],
service_id: metadata["serviceId"],
target_prefix: metadata["targetPrefix"]
}
end
defp collect_actions(language, api_spec, doc_spec) do
Enum.map(api_spec["operations"], fn {operation, _metadata} ->
%Action{
arity: 3,
docstring:
Docstring.format(
language,
doc_spec["operations"][operation]
),
function_name: AWS.CodeGen.Name.to_snake_case(operation),
name: operation
}
end)
|> Enum.sort(fn a, b -> a.function_name < b.function_name end)
end
end
| 30.095238
| 96
| 0.633861
|
9391debf9379d9f514285fbc0a5c64225f9a7930
| 67
|
ex
|
Elixir
|
lib/docker_phx_web/views/layout_view.ex
|
beltranaceves/docker_phx
|
06a9cba602ccd4f054568b7a40ca36bedc9a3e27
|
[
"MIT"
] | null | null | null |
lib/docker_phx_web/views/layout_view.ex
|
beltranaceves/docker_phx
|
06a9cba602ccd4f054568b7a40ca36bedc9a3e27
|
[
"MIT"
] | null | null | null |
lib/docker_phx_web/views/layout_view.ex
|
beltranaceves/docker_phx
|
06a9cba602ccd4f054568b7a40ca36bedc9a3e27
|
[
"MIT"
] | null | null | null |
defmodule DockerPhxWeb.LayoutView do
use DockerPhxWeb, :view
end
| 16.75
| 36
| 0.820896
|
9391f712ff692b90ec68c07396982b1bf600b9eb
| 957
|
ex
|
Elixir
|
lib/status/node.ex
|
shinyscorpion/elixir-status
|
3af86483cbbca99295f88d152f2cefdd8259a89e
|
[
"MIT"
] | 2
|
2018-07-18T02:33:48.000Z
|
2020-01-12T03:45:32.000Z
|
lib/status/node.ex
|
shinyscorpion/elixir-status
|
3af86483cbbca99295f88d152f2cefdd8259a89e
|
[
"MIT"
] | null | null | null |
lib/status/node.ex
|
shinyscorpion/elixir-status
|
3af86483cbbca99295f88d152f2cefdd8259a89e
|
[
"MIT"
] | null | null | null |
defmodule Status.Node do
@moduledoc false
def dependencies do
with true <- File.exists?("./assets/package.json"),
{:ok, data} <- File.read("./assets/package.json"),
{:ok, node_package} <- Jason.decode(data) do
node_modules =
node_package
|> Map.get("devDependencies")
|> Map.merge(Map.get(node_package, "dependencies"))
|> Enum.map(fn {dep, v} -> {String.to_atom(dep), String.trim_leading(v, "^")} end)
|> Enum.into(%{})
%{node: node_modules}
else
_ -> %{}
end
end
def outdated do
with true <- File.exists?("./assets/package.json"),
{data, 1} <- System.cmd("yarn", ["outdated"], cd: "./assets") do
%{
node:
data
|> String.split(~r/\n/)
|> Enum.slice(2..-3)
|> Enum.map(&List.first(String.split(&1, ~r/\ +/)))
}
else
_ -> %{}
end
rescue
ErlangError -> %{}
end
end
| 25.184211
| 90
| 0.516196
|
93925830fc691178d63290e757197a8706af2c29
| 5,299
|
exs
|
Elixir
|
implementations/elixir/ockam/ockam_node_web/.credo.exs
|
twittner/ockam
|
96eadf99da42f7c35539c6e29010a657c579ccba
|
[
"Apache-2.0"
] | 1,912
|
2019-01-10T14:17:00.000Z
|
2022-03-30T19:16:44.000Z
|
implementations/elixir/ockam/ockam_node_web/.credo.exs
|
twittner/ockam
|
96eadf99da42f7c35539c6e29010a657c579ccba
|
[
"Apache-2.0"
] | 1,473
|
2019-01-16T15:14:47.000Z
|
2022-03-31T23:44:50.000Z
|
implementations/elixir/ockam/ockam_node_web/.credo.exs
|
twittner/ockam
|
96eadf99da42f7c35539c6e29010a657c579ccba
|
[
"Apache-2.0"
] | 219
|
2019-01-11T03:35:13.000Z
|
2022-03-31T10:25:56.000Z
|
# This file contains the configuration for credo.
#
# It was first generated with `mix credo.gen.config` and then tweaked.
%{
configs: [
%{
name: "default",
# These are the files included in the analysis:
files: %{
included: [
"lib/",
"test/"
],
excluded: [
~r"/_build/",
~r"/deps/"
]
},
strict: false,
parse_timeout: 5000,
color: true,
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
#
## Consistency Checks
#
{Credo.Check.Consistency.ExceptionNames, []},
{Credo.Check.Consistency.LineEndings, []},
{Credo.Check.Consistency.ParameterPatternMatching, []},
{Credo.Check.Consistency.SpaceAroundOperators, []},
{Credo.Check.Consistency.SpaceInParentheses, []},
{Credo.Check.Consistency.TabsOrSpaces, []},
#
## Design Checks
#
{Credo.Check.Design.AliasUsage,
[priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]},
{Credo.Check.Design.TagTODO, false},
{Credo.Check.Design.TagFIXME, []},
#
## Readability Checks
#
{Credo.Check.Readability.AliasOrder, []},
{Credo.Check.Readability.FunctionNames, []},
{Credo.Check.Readability.LargeNumbers, []},
{Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]},
{Credo.Check.Readability.ModuleAttributeNames, []},
{Credo.Check.Readability.ModuleDoc, []},
{Credo.Check.Readability.ModuleNames, []},
{Credo.Check.Readability.ParenthesesInCondition, []},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
{Credo.Check.Readability.PredicateFunctionNames, []},
{Credo.Check.Readability.PreferImplicitTry, []},
{Credo.Check.Readability.RedundantBlankLines, []},
{Credo.Check.Readability.Semicolons, []},
{Credo.Check.Readability.SpaceAfterCommas, []},
{Credo.Check.Readability.StringSigils, []},
{Credo.Check.Readability.TrailingBlankLine, []},
{Credo.Check.Readability.TrailingWhiteSpace, []},
{Credo.Check.Readability.UnnecessaryAliasExpansion, []},
{Credo.Check.Readability.VariableNames, []},
#
## Refactoring Opportunities
#
{Credo.Check.Refactor.CondStatements, []},
{Credo.Check.Refactor.CyclomaticComplexity, []},
{Credo.Check.Refactor.FunctionArity, []},
{Credo.Check.Refactor.LongQuoteBlocks, []},
{Credo.Check.Refactor.MapInto, false},
{Credo.Check.Refactor.MatchInCondition, []},
{Credo.Check.Refactor.NegatedConditionsInUnless, []},
{Credo.Check.Refactor.NegatedConditionsWithElse, []},
{Credo.Check.Refactor.Nesting, []},
{Credo.Check.Refactor.UnlessWithElse, []},
{Credo.Check.Refactor.WithClauses, []},
#
## Warnings
#
{Credo.Check.Warning.BoolOperationOnSameValues, []},
{Credo.Check.Warning.ExpensiveEmptyEnumCheck, []},
{Credo.Check.Warning.IExPry, []},
{Credo.Check.Warning.IoInspect, []},
{Credo.Check.Warning.LazyLogging, false},
{Credo.Check.Warning.MixEnv, false},
{Credo.Check.Warning.OperationOnSameValues, []},
{Credo.Check.Warning.OperationWithConstantResult, []},
{Credo.Check.Warning.RaiseInsideRescue, []},
{Credo.Check.Warning.UnusedEnumOperation, []},
{Credo.Check.Warning.UnusedFileOperation, []},
{Credo.Check.Warning.UnusedKeywordOperation, []},
{Credo.Check.Warning.UnusedListOperation, []},
{Credo.Check.Warning.UnusedPathOperation, []},
{Credo.Check.Warning.UnusedRegexOperation, []},
{Credo.Check.Warning.UnusedStringOperation, []},
{Credo.Check.Warning.UnusedTupleOperation, []},
{Credo.Check.Warning.UnsafeExec, []},
#
## Controversial and experimental checks (opt-in, replace `false` with `[]`)
#
{Credo.Check.Readability.StrictModuleLayout, []},
{Credo.Check.Consistency.MultiAliasImportRequireUse, false},
{Credo.Check.Consistency.UnusedVariableNames, []},
{Credo.Check.Design.DuplicatedCode, false},
{Credo.Check.Readability.AliasAs, false},
{Credo.Check.Readability.MultiAlias, []},
{Credo.Check.Readability.Specs, false},
{Credo.Check.Readability.SinglePipe, []},
{Credo.Check.Readability.WithCustomTaggedTuple, []},
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, []},
{Credo.Check.Refactor.DoubleBooleanNegation, []},
{Credo.Check.Refactor.ModuleDependencies, [max_deps: 15]},
{Credo.Check.Refactor.NegatedIsNil, []},
{Credo.Check.Refactor.PipeChainStart, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.LeakyEnvironment, false},
{Credo.Check.Warning.MapGetUnsafePass, []},
{Credo.Check.Warning.UnsafeToAtom, []}
#
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}
| 38.398551
| 84
| 0.615777
|
9392a80e3e7ff1be74e36f0ebcdb6195344e5a1b
| 2,595
|
ex
|
Elixir
|
lib/medic/checks/chromedriver.ex
|
eahanson/medic
|
01f35f2aa22e7120cf1bd492e6e5400a29224791
|
[
"MIT"
] | 3
|
2021-06-18T18:42:35.000Z
|
2022-02-09T01:54:58.000Z
|
lib/medic/checks/chromedriver.ex
|
eahanson/medic
|
01f35f2aa22e7120cf1bd492e6e5400a29224791
|
[
"MIT"
] | 3
|
2021-06-17T19:02:32.000Z
|
2021-06-17T19:44:35.000Z
|
lib/medic/checks/chromedriver.ex
|
eahanson/medic
|
01f35f2aa22e7120cf1bd492e6e5400a29224791
|
[
"MIT"
] | 1
|
2022-03-10T19:16:14.000Z
|
2022-03-10T19:16:14.000Z
|
defmodule Medic.Checks.Chromedriver do
@moduledoc """
Checks to see if Chromedriver is allowed to run in the MacOS quarantine sandbox.
## Examples
{Check.Chromedriver, :unquarantined?}
{Check.Chromedriver, :versions_match?}
"""
alias Medic.Etc
@doc """
Checks to make sure that Google Chrome is installed.
"""
@spec chrome_installed?() :: Medic.Check.check_return_t()
def chrome_installed? do
if File.dir?("/Applications/Google Chrome.app") do
:ok
else
{:error, "Chrome not installed", "brew install --cask google-chrome"}
end
end
@doc """
Checks that chromedriver is installed, and has not been quarantined by the
MacOS security sandbox.
"""
@spec unquarantined?() :: Medic.Check.check_return_t()
def unquarantined? do
with {:ok, path} <- chromedriver_path(),
{:ok, attrs} <- xattrs(path),
:ok <- quarantine_state(attrs) do
:ok
else
error -> error
end
end
@doc """
Checks that chromedriver matches the installed version of Chrome.
"""
@spec versions_match?() :: Medic.Check.check_return_t()
def versions_match? do
with {:ok, chromedriver_path} <- chromedriver_path(),
chromedriver <- Etc.application_version(chromedriver_path, "-v"),
chrome <- Etc.application_version("/Applications/Google Chrome.app/Contents/MacOS/Google Chrome", "--version") do
if chromedriver == chrome,
do: :ok,
else:
{:error, "Chrome and Chromedriver version are mismatched",
"""
# Please make sure the installed ChromeDriver version matches your Chrome browser's version.
# (Wallaby often fails with 'invalid session id' if the versions differ.)
# Chromedriver : #{chromedriver}
# Chrome : #{chrome}
"""}
end
end
defp chromedriver_path do
System.cmd("command", ["-v", "chromedriver"])
|> case do
{path, 0} -> {:ok, String.trim(path)}
{output, _} -> {:error, "chromedriver was not found\n#{output}", "brew install --cask chromedriver-beta"}
end
end
defp xattrs(path) do
System.cmd("xattr", [path])
|> case do
{output, 0} -> {:ok, output}
{output, _} -> {:error, "unable to find chromedriver xattrs\n#{output}", "# are you on a mac?"}
end
end
defp quarantine_state(attrs) do
if String.contains?(attrs, "com.apple.quarantine"),
do: {:error, "chromedriver is quarantined by the MacOS security sandbox", "xattr -d com.apple.quarantine $(command -v chromedriver)"},
else: :ok
end
end
| 30.892857
| 140
| 0.634297
|
9392d63d2fef9f9285dc6e52e64e538cee2f5791
| 2,863
|
ex
|
Elixir
|
clients/script/lib/google_api/script/v1/model/value.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/script/lib/google_api/script/v1/model/value.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/script/lib/google_api/script/v1/model/value.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Script.V1.Model.Value do
@moduledoc """
`Value` represents a dynamically typed value which is the outcome of an
executed script
Based on Value at:
google3/apps/maestro/api/struct.proto?q=message%5c%20Value
## Attributes
* `boolValue` (*type:* `boolean()`, *default:* `nil`) - Represents a boolean value.
* `bytesValue` (*type:* `String.t`, *default:* `nil`) - Represents raw byte values.
* `dateValue` (*type:* `String.t`, *default:* `nil`) - Represents a date in ms since the epoch.
* `listValue` (*type:* `GoogleApi.Script.V1.Model.ListValue.t`, *default:* `nil`) - Represents a repeated `Value`.
* `nullValue` (*type:* `String.t`, *default:* `nil`) - Represents a null value.
* `numberValue` (*type:* `float()`, *default:* `nil`) - Represents a double value.
* `protoValue` (*type:* `map()`, *default:* `nil`) - Represents a structured proto value.
* `stringValue` (*type:* `String.t`, *default:* `nil`) - Represents a string value.
* `structValue` (*type:* `GoogleApi.Script.V1.Model.Struct.t`, *default:* `nil`) - Represents a structured value.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:boolValue => boolean(),
:bytesValue => String.t(),
:dateValue => String.t(),
:listValue => GoogleApi.Script.V1.Model.ListValue.t(),
:nullValue => String.t(),
:numberValue => float(),
:protoValue => map(),
:stringValue => String.t(),
:structValue => GoogleApi.Script.V1.Model.Struct.t()
}
field(:boolValue)
field(:bytesValue)
field(:dateValue)
field(:listValue, as: GoogleApi.Script.V1.Model.ListValue)
field(:nullValue)
field(:numberValue)
field(:protoValue, type: :map)
field(:stringValue)
field(:structValue, as: GoogleApi.Script.V1.Model.Struct)
end
defimpl Poison.Decoder, for: GoogleApi.Script.V1.Model.Value do
def decode(value, options) do
GoogleApi.Script.V1.Model.Value.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Script.V1.Model.Value do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.689189
| 118
| 0.677611
|
9392dbb91ce12433f3afc0afdce3f106bc9f3b06
| 1,773
|
ex
|
Elixir
|
src/lists.ex
|
James-P-D/ElixirDump
|
7e03958e2fc16152eeb0d3f291541d6ae83b5c13
|
[
"MIT"
] | null | null | null |
src/lists.ex
|
James-P-D/ElixirDump
|
7e03958e2fc16152eeb0d3f291541d6ae83b5c13
|
[
"MIT"
] | null | null | null |
src/lists.ex
|
James-P-D/ElixirDump
|
7e03958e2fc16152eeb0d3f291541d6ae83b5c13
|
[
"MIT"
] | null | null | null |
# cd("C:\\Users\\jdorr\\Desktop\\Dev\\ElixirDump\\src")
# c("lists.ex")
# M.main
defmodule M do
def main do
lists_stuff()
end
def lists_stuff do
list1 = [1, 2, 3]
list2 = [4, 5, 6]
list3 = list1 ++ list2
list4 = list3 -- list1
IO.puts 5 in list4
[head | tail] = list3
IO.puts "Head: #{head}" # outputs "1"
IO.puts "Tail: #{tail}" # outputs "^B^C^D^E^F"
IO.write "Tail: "
IO.inspect tail
IO.puts "-------------------------------------------------------"
IO.puts "First inspect on 65, 66, 67"
IO.inspect [65, 66, 67] # outputs "ABC"
IO.puts "Second inspect on 65, 66, 67"
IO.inspect [65, 66, 67], char_lists: :as_lists # outputs "[65, 66, 67]"
IO.puts "-------------------------------------------------------"
Enum.each list3, fn item ->
IO.puts item
end
IO.puts "-------------------------------------------------------"
display_list(list3)
IO.puts "-------------------------------------------------------"
display_list(List.delete(list3, 4))
IO.puts "-------------------------------------------------------"
display_list(List.delete_at(list3, 1))
IO.puts "-------------------------------------------------------"
display_list(List.insert_at(list3, 1, 12.34))
IO.puts "-------------------------------------------------------"
IO.puts List.first(list3)
IO.puts List.last(list3)
IO.puts "-------------------------------------------------------"
tuple_list = [name: "James", age: 38] # Remember the spaces after the colon (:)
end
def display_list([head|tail]) do
IO.puts(head)
display_list(tail)
end
def display_list([]), do: nil
end
| 24.971831
| 83
| 0.413424
|
9392dc3cd3109aaa1caf151bfbcef8ef1bba07e4
| 1,123
|
ex
|
Elixir
|
test/support/channel_case.ex
|
rzcastilho/mockatron
|
237b2bad3e1bf167a6ebac218c7a46b74a9063ee
|
[
"MIT"
] | 1
|
2018-12-13T16:52:22.000Z
|
2018-12-13T16:52:22.000Z
|
test/support/channel_case.ex
|
rzcastilho/mockatron
|
237b2bad3e1bf167a6ebac218c7a46b74a9063ee
|
[
"MIT"
] | 28
|
2019-07-04T08:42:33.000Z
|
2022-03-28T08:24:07.000Z
|
test/support/channel_case.ex
|
rzcastilho/mockatron
|
237b2bad3e1bf167a6ebac218c7a46b74a9063ee
|
[
"MIT"
] | null | null | null |
defmodule MockatronWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use MockatronWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import MockatronWeb.ChannelCase
# The default endpoint for testing
@endpoint MockatronWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Mockatron.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Mockatron.Repo, {:shared, self()})
end
:ok
end
end
| 27.390244
| 71
| 0.730187
|
9392ddadd82e110d598cedeaa35e6921cd435d48
| 1,496
|
exs
|
Elixir
|
mix.exs
|
igorgbr/faker
|
c10d09218a1a412775c42fe99fa7a6bea6d8afcc
|
[
"MIT"
] | 540
|
2015-01-05T16:31:49.000Z
|
2019-09-25T00:40:27.000Z
|
mix.exs
|
igorgbr/faker
|
c10d09218a1a412775c42fe99fa7a6bea6d8afcc
|
[
"MIT"
] | 172
|
2015-01-06T03:55:17.000Z
|
2019-10-03T12:58:02.000Z
|
mix.exs
|
delmendo/faker
|
180f08380080ec9d95b278b96bc37a0e97a2b89e
|
[
"MIT"
] | 163
|
2015-01-05T21:24:54.000Z
|
2019-10-03T07:59:42.000Z
|
defmodule Faker.Mixfile do
use Mix.Project
@source_url "https://github.com/elixirs/faker"
@version "0.17.0"
def project do
[
app: :faker,
version: @version,
elixir: "~> 1.6",
description: "Faker is a pure Elixir library for generating fake data.",
package: package(),
name: "Faker",
deps: deps(),
docs: docs(),
dialyzer: [
flags: [
:error_handling,
:race_conditions,
:underspecs
]
]
]
end
def application do
[
applications: [:crypto],
env: env()
]
end
defp env do
[
locale: :en,
country: nil,
random_module: Faker.Random.Elixir
]
end
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false},
{:earmark, ">= 0.0.0", only: :dev, runtime: false},
{:credo, ">= 0.0.0", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.0", only: [:dev], runtime: false}
]
end
defp docs do
[
main: "readme",
extras: ["CHANGELOG.md", "README.md"],
skip_undefined_reference_warnings_on: ["CHANGELOG.md"],
source_url: @source_url,
source_ref: "v#{@version}"
]
end
defp package do
%{
maintainers: ["Anthony Smith", "Igor Kapkov", "Toby Hinloopen", "Vitor Oliveira"],
files: ["lib", "mix.exs", "mix.lock", "README.md", "LICENSE", "CHANGELOG.md"],
licenses: ["MIT"],
links: %{"GitHub" => @source_url}
}
end
end
| 21.371429
| 88
| 0.534091
|
9392f967990ea3c4125bb7e7e4b1ecdc462e6f90
| 88
|
ex
|
Elixir
|
test/unit/fixtures/compiler/module_def_aggregators/component/module_3.ex
|
gregjohnsonsaltaire/hologram
|
aa8e9ea0d599def864c263cc37cc8ee31f02ac4a
|
[
"MIT"
] | 40
|
2022-01-19T20:27:36.000Z
|
2022-03-31T18:17:41.000Z
|
test/unit/fixtures/compiler/module_def_aggregators/component/module_3.ex
|
gregjohnsonsaltaire/hologram
|
aa8e9ea0d599def864c263cc37cc8ee31f02ac4a
|
[
"MIT"
] | 42
|
2022-02-03T22:52:43.000Z
|
2022-03-26T20:57:32.000Z
|
test/unit/fixtures/compiler/module_def_aggregators/component/module_3.ex
|
gregjohnsonsaltaire/hologram
|
aa8e9ea0d599def864c263cc37cc8ee31f02ac4a
|
[
"MIT"
] | 3
|
2022-02-10T04:00:37.000Z
|
2022-03-08T22:07:45.000Z
|
defmodule Hologram.Test.Fixtures.Compiler.ModuleDefAggregators.Component.Module3 do
end
| 29.333333
| 83
| 0.886364
|
939310ce405a78738d225762ca4862a8528206fd
| 1,476
|
ex
|
Elixir
|
lib/plug/end_trace.ex
|
bforchhammer/spandex
|
caa094c28bb98ef4a16a96ae451d35576ba7fe91
|
[
"MIT"
] | 237
|
2018-08-29T17:48:17.000Z
|
2022-03-29T15:35:35.000Z
|
lib/plug/end_trace.ex
|
bforchhammer/spandex
|
caa094c28bb98ef4a16a96ae451d35576ba7fe91
|
[
"MIT"
] | 69
|
2018-08-23T14:28:25.000Z
|
2022-02-02T13:59:48.000Z
|
lib/plug/end_trace.ex
|
bforchhammer/spandex
|
caa094c28bb98ef4a16a96ae451d35576ba7fe91
|
[
"MIT"
] | 35
|
2018-08-27T13:33:34.000Z
|
2021-12-30T12:53:47.000Z
|
defmodule Spandex.Plug.EndTrace do
@moduledoc """
Finishes a trace, setting status and error based on the HTTP status.
"""
@behaviour Plug
alias Spandex.Plug.Utils
@init_opts Optimal.schema(
opts: [
tracer: :atom,
tracer_opts: :keyword
],
defaults: [
tracer_opts: []
],
required: [:tracer],
describe: [
tracer: "The tracing module to be used to start the trace.",
tracer_opts: "Any opts to be passed to the tracer when starting or continuing the trace."
]
)
@doc """
Accepts and validates opts for the plug, and underlying tracer.
#{Optimal.Doc.document(@init_opts)}
"""
@spec init(opts :: Keyword.t()) :: Keyword.t()
def init(opts) do
Optimal.validate!(opts, @init_opts)
end
@spec call(conn :: Plug.Conn.t(), _opts :: Keyword.t()) :: Plug.Conn.t()
def call(conn, opts) do
tracer = opts[:tracer]
tracer_opts = opts[:tracer_opts]
opts =
if conn.status in 200..399 do
Keyword.merge([http: [status_code: conn.status]], tracer_opts)
else
Keyword.merge(
[http: [status_code: conn.status], error: [error?: true]],
tracer_opts
)
end
if Utils.trace?(conn) do
tracer.update_top_span(opts)
tracer.finish_trace(tracer_opts)
end
conn
end
end
| 25.448276
| 106
| 0.557588
|
939317b4e43f2cb035baef607c367708c57d6155
| 558
|
ex
|
Elixir
|
lib/mix/tasks/ecto.setup.arango.ex
|
solitec/arangox_ecto
|
e58c2714d59cc220ce2f41b51025bc1ce8d7b33c
|
[
"Apache-2.0"
] | null | null | null |
lib/mix/tasks/ecto.setup.arango.ex
|
solitec/arangox_ecto
|
e58c2714d59cc220ce2f41b51025bc1ce8d7b33c
|
[
"Apache-2.0"
] | null | null | null |
lib/mix/tasks/ecto.setup.arango.ex
|
solitec/arangox_ecto
|
e58c2714d59cc220ce2f41b51025bc1ce8d7b33c
|
[
"Apache-2.0"
] | 1
|
2021-01-27T10:28:14.000Z
|
2021-01-27T10:28:14.000Z
|
defmodule Mix.Tasks.Ecto.Setup.Arango do
@moduledoc """
Sets up all necessary collection in _systems db for migrations and creates database
"""
use Mix.Task
import Mix.ArangoXEcto
@shortdoc "Sets up all necessary collections in _systems db for migrations"
@impl true
def run(_args) do
Mix.Task.run("app.start")
case create_migrations() do
:ok ->
create_master_document()
Mix.shell().info("Setup Complete")
{:error, 409} ->
Mix.shell().info("ArangoDB already setup for ecto")
end
end
end
| 22.32
| 85
| 0.668459
|
9393336230fdf95ea13a0b2971ca7f12f8b13ba9
| 1,861
|
ex
|
Elixir
|
clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_optimized_stats_node.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | null | null | null |
clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_optimized_stats_node.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | 1
|
2020-12-18T09:25:12.000Z
|
2020-12-18T09:25:12.000Z
|
clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_optimized_stats_node.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | 1
|
2020-10-04T10:12:44.000Z
|
2020-10-04T10:12:44.000Z
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1OptimizedStatsNode do
@moduledoc """
This message type encapsulates a data node as represented below: { "identifier": { "names": [ "apiproxy" ], "values": [ "sirjee" ] }, "metric": [ { "env": "prod", "name": "sum(message_count)", "values": [ 36.0 ] } ] } OR { "env": "prod", "name": "sum(message_count)", "values": [ 36.0 ] } Depending on whether a dimension is present in the query or not the data node type can be a simple metric value or dimension identifier with list of metrics.
## Attributes
* `data` (*type:* `list(any())`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:data => list(any())
}
field(:data, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1OptimizedStatsNode do
def decode(value, options) do
GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1OptimizedStatsNode.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1OptimizedStatsNode do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.595745
| 448
| 0.722192
|
939354cbaff767150054a8997097518925eba3c4
| 930
|
ex
|
Elixir
|
apps/hefty/lib/hefty/backtesting.ex
|
Cinderella-Man/crypto-streamer
|
b1e990d375f7143c5149930be991249f0d9c3ee3
|
[
"MIT"
] | 49
|
2019-10-28T22:27:28.000Z
|
2021-10-11T06:40:29.000Z
|
apps/hefty/lib/hefty/backtesting.ex
|
Cinderella-Man/crypto-streamer
|
b1e990d375f7143c5149930be991249f0d9c3ee3
|
[
"MIT"
] | 9
|
2019-08-30T13:15:36.000Z
|
2019-10-10T21:25:14.000Z
|
apps/hefty/lib/hefty/backtesting.ex
|
Cinderella-Man/crypto-streamer
|
b1e990d375f7143c5149930be991249f0d9c3ee3
|
[
"MIT"
] | 7
|
2019-10-31T06:19:26.000Z
|
2021-09-30T04:20:58.000Z
|
defmodule Hefty.Backtesting do
@moduledoc """
From Wikipedia:
```
Backtesting is a term used in modeling to refer to testing a predictive model on historical data. Backtesting is a type of retrodiction, and a special type of cross-validation applied to previous time period(s).
```
"""
def kick_off_backtesting(symbol, from_date, to_date) do
total_events = Hefty.TradeEvents.count(symbol, from_date, to_date)
min = Hefty.TradeEvents.min(symbol, from_date, to_date)
max = Hefty.TradeEvents.min(symbol, from_date, to_date)
first = Hefty.TradeEvents.min(symbol, from_date, to_date)
last = Hefty.TradeEvents.min(symbol, from_date, to_date)
Hefty.Streaming.Backtester.SimpleStreamer.start_streaming(
symbol,
from_date,
to_date
)
%{
:total_events => total_events,
:min => min,
:max => max,
:first => first,
:last => last
}
end
end
| 28.181818
| 213
| 0.684946
|
93935ab7f02a0c6a9c49b6aacb00a94de8043ab9
| 1,724
|
ex
|
Elixir
|
lib/shex/shape_expressions/shape_expression_reference.ex
|
rdf-elixir/shex-ex
|
84100ab3dfcf3988b2b90289a8e4fbeb9f4d1516
|
[
"MIT"
] | 4
|
2020-06-06T15:09:16.000Z
|
2021-03-22T19:46:30.000Z
|
lib/shex/shape_expressions/shape_expression_reference.ex
|
rdf-elixir/shex-ex
|
84100ab3dfcf3988b2b90289a8e4fbeb9f4d1516
|
[
"MIT"
] | null | null | null |
lib/shex/shape_expressions/shape_expression_reference.ex
|
rdf-elixir/shex-ex
|
84100ab3dfcf3988b2b90289a8e4fbeb9f4d1516
|
[
"MIT"
] | null | null | null |
defmodule ShEx.ShapeExpressionReference do
@moduledoc false
def satisfies(expr_ref, graph, schema, association, state) do
with {:ok, ref_stack} <-
push_ref_stack(state.ref_stack, {expr_ref, association.node}),
shape_expr when not is_nil(shape_expr) <-
ShEx.Schema.shape_expr_with_id(schema, expr_ref) do
ShEx.ShapeExpression.satisfies(shape_expr, graph, schema, association, %{
state
| ref_stack: ref_stack
})
else
:circular_reference ->
ShEx.ShapeMap.Association.conform(association)
nil ->
raise """
Error: Unknown reference #{expr_ref}
This should have been detected during schema creation.
Please raise an issue at https://github.com/rdf-elixir/shex-ex/issues
"""
end
end
defp push_ref_stack(stack, entry) do
if entry in stack do
:circular_reference
else
{:ok, [entry | stack]}
end
end
end
defimpl ShEx.ShapeExpression, for: RDF.IRI do
def satisfies(expr_ref, graph, schema, association, state) do
ShEx.ShapeExpressionReference.satisfies(expr_ref, graph, schema, association, state)
end
end
defimpl ShEx.ShapeExpression, for: RDF.BlankNode do
def satisfies(expr_ref, graph, schema, association, state) do
ShEx.ShapeExpressionReference.satisfies(expr_ref, graph, schema, association, state)
end
end
defimpl ShEx.Operator, for: RDF.IRI do
def children(_), do: raise("This should never be called")
def triple_expression_label_and_operands(_), do: {nil, []}
end
defimpl ShEx.Operator, for: RDF.BlankNode do
def children(_), do: raise("This should never be called")
def triple_expression_label_and_operands(_), do: {nil, []}
end
| 30.785714
| 88
| 0.698376
|
93935d8edf428c162f62aa38c38a4bbe76ccfcf5
| 695
|
ex
|
Elixir
|
lib/r_map.ex
|
tashirosota/ex-rubenium
|
0ddb30c31b678889a65dae7674ab7010e1dd7c5e
|
[
"Apache-2.0"
] | 24
|
2022-01-13T23:13:11.000Z
|
2022-03-27T18:02:39.000Z
|
lib/r_map.ex
|
tashirosota/ex-rubenium
|
0ddb30c31b678889a65dae7674ab7010e1dd7c5e
|
[
"Apache-2.0"
] | 16
|
2022-01-16T09:18:17.000Z
|
2022-02-08T01:10:09.000Z
|
lib/r_map.ex
|
tashirosota/ex-rubenium
|
0ddb30c31b678889a65dae7674ab7010e1dd7c5e
|
[
"Apache-2.0"
] | 6
|
2022-01-16T04:40:42.000Z
|
2022-02-07T14:56:26.000Z
|
defmodule RMap do
@moduledoc """
Entry point of Map extensions, and can use all of RMap.* and REnum functions.
See also.
- [RMap.Native](https://hexdocs.pm/r_enum/RMap.Native.html#content)
- [RMap.Ruby](https://hexdocs.pm/r_enum/RMap.Ruby.html#content)
- [RMap.ActiveSupport](https://hexdocs.pm/r_enum/RMap.ActiveSupport.html#content)
- [RMap.Support](https://hexdocs.pm/r_enum/RMap.Support.html#content)
- [REnum](https://hexdocs.pm/r_enum/REnum.html#content)
"""
use RMap.Native
use RMap.Ruby
use RMap.ActiveSupport
use RMap.Support
use REnum,
undelegate_functions:
(Map.module_info()[:exports] |> Keyword.keys()) ++ [:select, :filter, :reject]
end
| 34.75
| 84
| 0.700719
|
93936d57178f340365576e1de2e323954ae2455c
| 1,822
|
exs
|
Elixir
|
mix.exs
|
nulian/fusion_jwt_authentication
|
293965ff94fe84fef7a422c21c6ef7e5a6e475cf
|
[
"MIT"
] | null | null | null |
mix.exs
|
nulian/fusion_jwt_authentication
|
293965ff94fe84fef7a422c21c6ef7e5a6e475cf
|
[
"MIT"
] | null | null | null |
mix.exs
|
nulian/fusion_jwt_authentication
|
293965ff94fe84fef7a422c21c6ef7e5a6e475cf
|
[
"MIT"
] | 1
|
2021-03-25T14:46:52.000Z
|
2021-03-25T14:46:52.000Z
|
defmodule FusionJWTAuthentication.MixProject do
use Mix.Project
def project do
[
app: :fusion_jwt_authentication,
version: "1.0.1",
elixir: "~> 1.9",
description: description(),
elixirc_paths: elixirc_paths(Mix.env()),
elixirc_options: [warnings_as_errors: true],
package: package(),
start_permanent: Mix.env() == :prod,
build_embedded: Mix.env() == :prod,
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
deps: deps(),
dialyzer: dialyzer()
]
end
defp elixirc_paths(:prod), do: ["lib"]
defp elixirc_paths(:test), do: ["lib"]
defp elixirc_paths(_), do: ["lib"]
defp dialyzer do
[
plt_file: {:no_warn, "priv/plts/dialyzer.plt"}
]
end
defp description do
"Plug for verifying fusionauth certificate signed jwt tokens"
end
defp package do
%{
files: ["lib", "mix.exs", "LICENSE", "README.md"],
maintainers: ["Peter Arentsen"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/bettyblocks/fusion_jwt_authentication"}
}
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger, :jason],
mod: {FusionJWTAuthentication, []}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:joken, "~> 2.0"},
{:plug, "~> 1.11"},
{:httpoison, "~> 1.4"},
{:jason, "~> 1.0"},
{:credo, ">= 0.0.0", only: :dev},
{:excoveralls, "~> 0.12", only: :test},
{:dialyxir, "~> 1.0.0", only: [:dev], runtime: false},
{:ex_doc, "~> 0.21", only: :dev, runtime: false}
]
end
end
| 25.305556
| 86
| 0.573546
|
9393893436459cee35a291f9969f4e7631b60a5c
| 150
|
exs
|
Elixir
|
test/broadcaster_test.exs
|
groxio-learning/broadcaster
|
0c2e40773083518f9d4316cecadb1c69b8dd22a3
|
[
"MIT"
] | null | null | null |
test/broadcaster_test.exs
|
groxio-learning/broadcaster
|
0c2e40773083518f9d4316cecadb1c69b8dd22a3
|
[
"MIT"
] | null | null | null |
test/broadcaster_test.exs
|
groxio-learning/broadcaster
|
0c2e40773083518f9d4316cecadb1c69b8dd22a3
|
[
"MIT"
] | null | null | null |
defmodule BroadcasterTest do
use ExUnit.Case
doctest Broadcaster
test "greets the world" do
assert Broadcaster.hello() == :world
end
end
| 16.666667
| 40
| 0.733333
|
9393950329575bcbd3a0f2c9bf43cda50c208f54
| 355
|
ex
|
Elixir
|
apps/tai/lib/tai/venue_adapters/bitmex/stream/process_auth/messages/insert_position.ex
|
yurikoval/tai
|
94254b45d22fa0307b01577ff7c629c7280c0295
|
[
"MIT"
] | null | null | null |
apps/tai/lib/tai/venue_adapters/bitmex/stream/process_auth/messages/insert_position.ex
|
yurikoval/tai
|
94254b45d22fa0307b01577ff7c629c7280c0295
|
[
"MIT"
] | 78
|
2020-10-12T06:21:43.000Z
|
2022-03-28T09:02:00.000Z
|
apps/tai/lib/tai/venue_adapters/bitmex/stream/process_auth/messages/insert_position.ex
|
yurikoval/tai
|
94254b45d22fa0307b01577ff7c629c7280c0295
|
[
"MIT"
] | null | null | null |
defmodule Tai.VenueAdapters.Bitmex.Stream.ProcessAuth.Messages.InsertPosition do
alias __MODULE__
alias Tai.VenueAdapters.Bitmex.Stream.ProcessAuth
@type t :: %InsertPosition{data: map}
@enforce_keys ~w(data)a
defstruct ~w(data)a
defimpl ProcessAuth.Message do
def process(_message, _received_at, _state) do
:ok
end
end
end
| 22.1875
| 80
| 0.749296
|
9393ab11869ea9680d1ecb229a42d37da6321fc2
| 80
|
exs
|
Elixir
|
phoenix/app/test/phx_app_web/views/layout_view_test.exs
|
ohr486/docker-app-template
|
9365f95d1102c69dba46474d0498d8306d40ed79
|
[
"MIT"
] | null | null | null |
phoenix/app/test/phx_app_web/views/layout_view_test.exs
|
ohr486/docker-app-template
|
9365f95d1102c69dba46474d0498d8306d40ed79
|
[
"MIT"
] | null | null | null |
phoenix/app/test/phx_app_web/views/layout_view_test.exs
|
ohr486/docker-app-template
|
9365f95d1102c69dba46474d0498d8306d40ed79
|
[
"MIT"
] | null | null | null |
defmodule PhxAppWeb.LayoutViewTest do
use PhxAppWeb.ConnCase, async: true
end
| 20
| 37
| 0.825
|
9393db1a96de5217b1c4615de8778e4a42453bf5
| 2,285
|
ex
|
Elixir
|
clients/service_control/lib/google_api/service_control/v1/model/log_entry_source_location.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/service_control/lib/google_api/service_control/v1/model/log_entry_source_location.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/service_control/lib/google_api/service_control/v1/model/log_entry_source_location.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceControl.V1.Model.LogEntrySourceLocation do
@moduledoc """
Additional information about the source code location that produced the log
entry.
## Attributes
* `file` (*type:* `String.t`, *default:* `nil`) - Optional. Source file name. Depending on the runtime environment, this
might be a simple name or a fully-qualified name.
* `function` (*type:* `String.t`, *default:* `nil`) - Optional. Human-readable name of the function or method being invoked, with
optional context such as the class or package name. This information may be
used in contexts such as the logs viewer, where a file and line number are
less meaningful. The format can vary by language. For example:
`qual.if.ied.Class.method` (Java), `dir/package.func` (Go), `function`
(Python).
* `line` (*type:* `String.t`, *default:* `nil`) - Optional. Line within the source file. 1-based; 0 indicates no line number
available.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:file => String.t(),
:function => String.t(),
:line => String.t()
}
field(:file)
field(:function)
field(:line)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceControl.V1.Model.LogEntrySourceLocation do
def decode(value, options) do
GoogleApi.ServiceControl.V1.Model.LogEntrySourceLocation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceControl.V1.Model.LogEntrySourceLocation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.459016
| 133
| 0.715536
|
9393f1c41782b554895bdd04e033b97a17cbe13e
| 885
|
ex
|
Elixir
|
lib/test_web/german_text.ex
|
clayscode/exzeitable
|
312e7a0aebda51f5cd3ccee800c1d1affd9d4248
|
[
"MIT"
] | 1
|
2021-10-18T00:55:47.000Z
|
2021-10-18T00:55:47.000Z
|
lib/test_web/german_text.ex
|
EnzymeCorp/exzeitable
|
74c02ab82de56aaf150006f05836a65a7d47697f
|
[
"MIT"
] | null | null | null |
lib/test_web/german_text.ex
|
EnzymeCorp/exzeitable
|
74c02ab82de56aaf150006f05836a65a7d47697f
|
[
"MIT"
] | null | null | null |
defmodule TestWeb.GermanText do
@moduledoc "Custom text for the Exzeitable HTML interface"
@behaviour Exzeitable.Text
# Action buttons
def actions(_assigns), do: "Aktionen"
def new(_assigns), do: "Neu"
def show(_assigns), do: "Show"
def edit(_assigns), do: "Bearbeiten"
def delete(_assigns), do: "Löschen"
def confirm_action(_assigns), do: "Bist du sicher?"
# Pagination
def previous(_assigns), do: "Bisherige"
def next(_assigns), do: "Nächster"
# Search
def search(_assigns), do: "Suche"
def nothing_found(_assigns), do: "Nichts gefunden"
# Show and hide fields
def show_field_buttons(_assigns), do: "Feldschaltflächen anzeigen"
def hide_field_buttons(_assigns), do: "Feldschaltflächen ausblenden"
def show_field(_assigns, field), do: "Zeigen Sie #{field}"
def hide(_assigns), do: "ausblenden"
def sort(_assigns), do: "Sortieren"
end
| 27.65625
| 70
| 0.718644
|
9393fde77c6fd06ed4b07f0d85ceb4fcdb809494
| 918
|
exs
|
Elixir
|
exercises/01-elixir/01-basics/07-arity/tests.exs
|
DennisWinnepenninckx/distributed-applications
|
06743e4e2a09dc52ff52be831e486bb073916173
|
[
"BSD-3-Clause"
] | 1
|
2021-09-22T09:52:11.000Z
|
2021-09-22T09:52:11.000Z
|
exercises/01-elixir/01-basics/07-arity/tests.exs
|
DennisWinnepenninckx/distributed-applications
|
06743e4e2a09dc52ff52be831e486bb073916173
|
[
"BSD-3-Clause"
] | 22
|
2019-06-19T18:58:13.000Z
|
2020-03-16T14:43:06.000Z
|
exercises/01-elixir/01-basics/07-arity/tests.exs
|
DennisWinnepenninckx/distributed-applications
|
06743e4e2a09dc52ff52be831e486bb073916173
|
[
"BSD-3-Clause"
] | 32
|
2019-09-19T03:25:11.000Z
|
2020-10-06T15:01:47.000Z
|
defmodule Setup do
@script "shared.exs"
def setup(directory \\ ".") do
path = Path.join(directory, @script)
if File.exists?(path) do
Code.require_file(path)
Shared.setup(__DIR__)
else
setup(Path.join(directory, ".."))
end
end
end
Setup.setup
defmodule Tests do
use ExUnit.Case, async: true
import Shared
check that: Numbers.maximum(1, 2), is_equal_to: 2
check that: Numbers.maximum(3, 2), is_equal_to: 3
check that: Numbers.maximum(1, 5, 2), is_equal_to: 5
check that: Numbers.maximum(7, 5, 2), is_equal_to: 7
check that: Numbers.maximum(7, 5, 9), is_equal_to: 9
check that: Numbers.maximum(7, 10, 9), is_equal_to: 10
check that: Numbers.maximum(3, 2, 1, 0), is_equal_to: 3
check that: Numbers.maximum(3, 4, 1, 0), is_equal_to: 4
check that: Numbers.maximum(3, 4, 7, 0), is_equal_to: 7
check that: Numbers.maximum(3, 4, 7, 9), is_equal_to: 9
end
| 25.5
| 57
| 0.669935
|
939416b594df3ec1bf454beeafade16597e852bb
| 1,888
|
ex
|
Elixir
|
elixir/elixir/lib/homework/companies.ex
|
connernance1/web-homework
|
0ec80f58f366359bf09e88afb18ccc3d69974457
|
[
"MIT"
] | null | null | null |
elixir/elixir/lib/homework/companies.ex
|
connernance1/web-homework
|
0ec80f58f366359bf09e88afb18ccc3d69974457
|
[
"MIT"
] | null | null | null |
elixir/elixir/lib/homework/companies.ex
|
connernance1/web-homework
|
0ec80f58f366359bf09e88afb18ccc3d69974457
|
[
"MIT"
] | null | null | null |
defmodule Homework.Companies do
@moduledoc """
The Companiies context.
"""
import Ecto.Query, warn: false
alias Homework.Repo
alias Homework.Companies.Company
@doc """
Returns the list of companies.
## Examples
iex> list_companies([])
[%Company{}, ...]
"""
def list_companies(_args) do
Repo.all(Company)
end
@doc """
Gets a single company.
Raises `Ecto.NoResultsError` if the Company does not exist.
## Examples
iex> get_companies!(123)
%Company{}
iex> get_transaction!(456)
** (Ecto.NoResultsError)
"""
def get_company!(id), do: Repo.get!(Company, id)
@doc """
Creates a company.
## Examples
iex> create_companies(%{field: value})
{:ok, %Transaction{}}
iex> create_companies(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_company(attrs \\ %{}) do
%Company{}
|> Company.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a transaction.
## Examples
iex> update_transaction(transaction, %{field: new_value})
{:ok, %Transaction{}}
iex> update_transaction(transaction, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_company(%Company{} = company, attrs) do
company
|> Company.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a company.
## Examples
iex> delete_company(company)
{:ok, %Transaction{}}
iex> delete_company(company)
{:error, %Ecto.Changeset{}}
"""
def delete_company(%Company{} = company) do
Repo.delete(company)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking company changes.
## Examples
iex> change_company(company)
%Ecto.Changeset{data: %Company{}}
"""
def change_company(%Company{} = company, attrs \\ %{}) do
Company.changeset(company, attrs)
end
end
| 17.980952
| 63
| 0.612288
|
939417b7a666a81ba869dfd5bfc04db10ac99f1f
| 31,609
|
ex
|
Elixir
|
lib/aws/generated/pinpoint_email.ex
|
benmmari/aws-elixir
|
b97477498a9e8ba0d46a09255302d88c6a1c8573
|
[
"Apache-2.0"
] | null | null | null |
lib/aws/generated/pinpoint_email.ex
|
benmmari/aws-elixir
|
b97477498a9e8ba0d46a09255302d88c6a1c8573
|
[
"Apache-2.0"
] | null | null | null |
lib/aws/generated/pinpoint_email.ex
|
benmmari/aws-elixir
|
b97477498a9e8ba0d46a09255302d88c6a1c8573
|
[
"Apache-2.0"
] | null | null | null |
# WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.PinpointEmail do
@moduledoc """
Amazon Pinpoint Email Service
Welcome to the *Amazon Pinpoint Email API Reference*. This guide provides
information about the Amazon Pinpoint Email API (version 1.0), including
supported operations, data types, parameters, and schemas.
[Amazon Pinpoint](https://aws.amazon.com/pinpoint) is an AWS service that
you can use to engage with your customers across multiple messaging
channels. You can use Amazon Pinpoint to send email, SMS text messages,
voice messages, and push notifications. The Amazon Pinpoint Email API
provides programmatic access to options that are unique to the email
channel and supplement the options provided by the Amazon Pinpoint API.
If you're new to Amazon Pinpoint, you might find it helpful to also review
the [Amazon Pinpoint Developer
Guide](https://docs.aws.amazon.com/pinpoint/latest/developerguide/welcome.html).
The *Amazon Pinpoint Developer Guide* provides tutorials, code samples, and
procedures that demonstrate how to use Amazon Pinpoint features
programmatically and how to integrate Amazon Pinpoint functionality into
mobile apps and other types of applications. The guide also provides
information about key topics such as Amazon Pinpoint integration with other
AWS services and the limits that apply to using the service.
The Amazon Pinpoint Email API is available in several AWS Regions and it
provides an endpoint for each of these Regions. For a list of all the
Regions and endpoints where the API is currently available, see [AWS
Service
Endpoints](https://docs.aws.amazon.com/general/latest/gr/rande.html#pinpoint_region)
in the *Amazon Web Services General Reference*. To learn more about AWS
Regions, see [Managing AWS
Regions](https://docs.aws.amazon.com/general/latest/gr/rande-manage.html)
in the *Amazon Web Services General Reference*.
In each Region, AWS maintains multiple Availability Zones. These
Availability Zones are physically isolated from each other, but are united
by private, low-latency, high-throughput, and highly redundant network
connections. These Availability Zones enable us to provide very high levels
of availability and redundancy, while also minimizing latency. To learn
more about the number of Availability Zones that are available in each
Region, see [AWS Global
Infrastructure](http://aws.amazon.com/about-aws/global-infrastructure/).
"""
@doc """
Create a configuration set. *Configuration sets* are groups of rules that
you can apply to the emails you send using Amazon Pinpoint. You apply a
configuration set to an email by including a reference to the configuration
set in the headers of the email. When you apply a configuration set to an
email, all of the rules in that configuration set are applied to the email.
"""
def create_configuration_set(client, input, options \\ []) do
path_ = "/v1/email/configuration-sets"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Create an event destination. In Amazon Pinpoint, *events* include message
sends, deliveries, opens, clicks, bounces, and complaints. *Event
destinations* are places that you can send information about these events
to. For example, you can send event data to Amazon SNS to receive
notifications when you receive bounces or complaints, or you can use Amazon
Kinesis Data Firehose to stream data to Amazon S3 for long-term storage.
A single configuration set can include more than one event destination.
"""
def create_configuration_set_event_destination(client, configuration_set_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/event-destinations"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Create a new pool of dedicated IP addresses. A pool can include one or more
dedicated IP addresses that are associated with your Amazon Pinpoint
account. You can associate a pool with a configuration set. When you send
an email that uses that configuration set, Amazon Pinpoint sends it using
only the IP addresses in the associated pool.
"""
def create_dedicated_ip_pool(client, input, options \\ []) do
path_ = "/v1/email/dedicated-ip-pools"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Create a new predictive inbox placement test. Predictive inbox placement
tests can help you predict how your messages will be handled by various
email providers around the world. When you perform a predictive inbox
placement test, you provide a sample message that contains the content that
you plan to send to your customers. Amazon Pinpoint then sends that message
to special email addresses spread across several major email providers.
After about 24 hours, the test is complete, and you can use the
`GetDeliverabilityTestReport` operation to view the results of the test.
"""
def create_deliverability_test_report(client, input, options \\ []) do
path_ = "/v1/email/deliverability-dashboard/test"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Verifies an email identity for use with Amazon Pinpoint. In Amazon
Pinpoint, an identity is an email address or domain that you use when you
send email. Before you can use an identity to send email with Amazon
Pinpoint, you first have to verify it. By verifying an address, you
demonstrate that you're the owner of the address, and that you've given
Amazon Pinpoint permission to send email from the address.
When you verify an email address, Amazon Pinpoint sends an email to the
address. Your email address is verified as soon as you follow the link in
the verification email.
When you verify a domain, this operation provides a set of DKIM tokens,
which you can convert into CNAME tokens. You add these CNAME tokens to the
DNS configuration for your domain. Your domain is verified when Amazon
Pinpoint detects these records in the DNS configuration for your domain. It
usually takes around 72 hours to complete the domain verification process.
"""
def create_email_identity(client, input, options \\ []) do
path_ = "/v1/email/identities"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Delete an existing configuration set.
In Amazon Pinpoint, *configuration sets* are groups of rules that you can
apply to the emails you send. You apply a configuration set to an email by
including a reference to the configuration set in the headers of the email.
When you apply a configuration set to an email, all of the rules in that
configuration set are applied to the email.
"""
def delete_configuration_set(client, configuration_set_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Delete an event destination.
In Amazon Pinpoint, *events* include message sends, deliveries, opens,
clicks, bounces, and complaints. *Event destinations* are places that you
can send information about these events to. For example, you can send event
data to Amazon SNS to receive notifications when you receive bounces or
complaints, or you can use Amazon Kinesis Data Firehose to stream data to
Amazon S3 for long-term storage.
"""
def delete_configuration_set_event_destination(client, configuration_set_name, event_destination_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/event-destinations/#{URI.encode(event_destination_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Delete a dedicated IP pool.
"""
def delete_dedicated_ip_pool(client, pool_name, input, options \\ []) do
path_ = "/v1/email/dedicated-ip-pools/#{URI.encode(pool_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an email identity that you previously verified for use with Amazon
Pinpoint. An identity can be either an email address or a domain name.
"""
def delete_email_identity(client, email_identity, input, options \\ []) do
path_ = "/v1/email/identities/#{URI.encode(email_identity)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Obtain information about the email-sending status and capabilities of your
Amazon Pinpoint account in the current AWS Region.
"""
def get_account(client, options \\ []) do
path_ = "/v1/email/account"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve a list of the blacklists that your dedicated IP addresses appear
on.
"""
def get_blacklist_reports(client, blacklist_item_names, options \\ []) do
path_ = "/v1/email/deliverability-dashboard/blacklist-report"
headers = []
query_ = []
query_ = if !is_nil(blacklist_item_names) do
[{"BlacklistItemNames", blacklist_item_names} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Get information about an existing configuration set, including the
dedicated IP pool that it's associated with, whether or not it's enabled
for sending email, and more.
In Amazon Pinpoint, *configuration sets* are groups of rules that you can
apply to the emails you send. You apply a configuration set to an email by
including a reference to the configuration set in the headers of the email.
When you apply a configuration set to an email, all of the rules in that
configuration set are applied to the email.
"""
def get_configuration_set(client, configuration_set_name, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve a list of event destinations that are associated with a
configuration set.
In Amazon Pinpoint, *events* include message sends, deliveries, opens,
clicks, bounces, and complaints. *Event destinations* are places that you
can send information about these events to. For example, you can send event
data to Amazon SNS to receive notifications when you receive bounces or
complaints, or you can use Amazon Kinesis Data Firehose to stream data to
Amazon S3 for long-term storage.
"""
def get_configuration_set_event_destinations(client, configuration_set_name, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/event-destinations"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Get information about a dedicated IP address, including the name of the
dedicated IP pool that it's associated with, as well information about the
automatic warm-up process for the address.
"""
def get_dedicated_ip(client, ip, options \\ []) do
path_ = "/v1/email/dedicated-ips/#{URI.encode(ip)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
List the dedicated IP addresses that are associated with your Amazon
Pinpoint account.
"""
def get_dedicated_ips(client, next_token \\ nil, page_size \\ nil, pool_name \\ nil, options \\ []) do
path_ = "/v1/email/dedicated-ips"
headers = []
query_ = []
query_ = if !is_nil(pool_name) do
[{"PoolName", pool_name} | query_]
else
query_
end
query_ = if !is_nil(page_size) do
[{"PageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"NextToken", next_token} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve information about the status of the Deliverability dashboard for
your Amazon Pinpoint account. When the Deliverability dashboard is enabled,
you gain access to reputation, deliverability, and other metrics for the
domains that you use to send email using Amazon Pinpoint. You also gain the
ability to perform predictive inbox placement tests.
When you use the Deliverability dashboard, you pay a monthly subscription
charge, in addition to any other fees that you accrue by using Amazon
Pinpoint. For more information about the features and cost of a
Deliverability dashboard subscription, see [Amazon Pinpoint
Pricing](http://aws.amazon.com/pinpoint/pricing/).
"""
def get_deliverability_dashboard_options(client, options \\ []) do
path_ = "/v1/email/deliverability-dashboard"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve the results of a predictive inbox placement test.
"""
def get_deliverability_test_report(client, report_id, options \\ []) do
path_ = "/v1/email/deliverability-dashboard/test-reports/#{URI.encode(report_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve all the deliverability data for a specific campaign. This data is
available for a campaign only if the campaign sent email by using a domain
that the Deliverability dashboard is enabled for
(`PutDeliverabilityDashboardOption` operation).
"""
def get_domain_deliverability_campaign(client, campaign_id, options \\ []) do
path_ = "/v1/email/deliverability-dashboard/campaigns/#{URI.encode(campaign_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve inbox placement and engagement rates for the domains that you use
to send email.
"""
def get_domain_statistics_report(client, domain, end_date, start_date, options \\ []) do
path_ = "/v1/email/deliverability-dashboard/statistics-report/#{URI.encode(domain)}"
headers = []
query_ = []
query_ = if !is_nil(start_date) do
[{"StartDate", start_date} | query_]
else
query_
end
query_ = if !is_nil(end_date) do
[{"EndDate", end_date} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Provides information about a specific identity associated with your Amazon
Pinpoint account, including the identity's verification status, its DKIM
authentication status, and its custom Mail-From settings.
"""
def get_email_identity(client, email_identity, options \\ []) do
path_ = "/v1/email/identities/#{URI.encode(email_identity)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
List all of the configuration sets associated with your Amazon Pinpoint
account in the current region.
In Amazon Pinpoint, *configuration sets* are groups of rules that you can
apply to the emails you send. You apply a configuration set to an email by
including a reference to the configuration set in the headers of the email.
When you apply a configuration set to an email, all of the rules in that
configuration set are applied to the email.
"""
def list_configuration_sets(client, next_token \\ nil, page_size \\ nil, options \\ []) do
path_ = "/v1/email/configuration-sets"
headers = []
query_ = []
query_ = if !is_nil(page_size) do
[{"PageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"NextToken", next_token} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
List all of the dedicated IP pools that exist in your Amazon Pinpoint
account in the current AWS Region.
"""
def list_dedicated_ip_pools(client, next_token \\ nil, page_size \\ nil, options \\ []) do
path_ = "/v1/email/dedicated-ip-pools"
headers = []
query_ = []
query_ = if !is_nil(page_size) do
[{"PageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"NextToken", next_token} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Show a list of the predictive inbox placement tests that you've performed,
regardless of their statuses. For predictive inbox placement tests that are
complete, you can use the `GetDeliverabilityTestReport` operation to view
the results.
"""
def list_deliverability_test_reports(client, next_token \\ nil, page_size \\ nil, options \\ []) do
path_ = "/v1/email/deliverability-dashboard/test-reports"
headers = []
query_ = []
query_ = if !is_nil(page_size) do
[{"PageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"NextToken", next_token} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve deliverability data for all the campaigns that used a specific
domain to send email during a specified time range. This data is available
for a domain only if you enabled the Deliverability dashboard
(`PutDeliverabilityDashboardOption` operation) for the domain.
"""
def list_domain_deliverability_campaigns(client, subscribed_domain, end_date, next_token \\ nil, page_size \\ nil, start_date, options \\ []) do
path_ = "/v1/email/deliverability-dashboard/domains/#{URI.encode(subscribed_domain)}/campaigns"
headers = []
query_ = []
query_ = if !is_nil(start_date) do
[{"StartDate", start_date} | query_]
else
query_
end
query_ = if !is_nil(page_size) do
[{"PageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"NextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(end_date) do
[{"EndDate", end_date} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a list of all of the email identities that are associated with your
Amazon Pinpoint account. An identity can be either an email address or a
domain. This operation returns identities that are verified as well as
those that aren't.
"""
def list_email_identities(client, next_token \\ nil, page_size \\ nil, options \\ []) do
path_ = "/v1/email/identities"
headers = []
query_ = []
query_ = if !is_nil(page_size) do
[{"PageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"NextToken", next_token} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieve a list of the tags (keys and values) that are associated with a
specified resource. A *tag* is a label that you optionally define and
associate with a resource in Amazon Pinpoint. Each tag consists of a
required *tag key* and an optional associated *tag value*. A tag key is a
general label that acts as a category for more specific tag values. A tag
value acts as a descriptor within a tag key.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/v1/email/tags"
headers = []
query_ = []
query_ = if !is_nil(resource_arn) do
[{"ResourceArn", resource_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Enable or disable the automatic warm-up feature for dedicated IP addresses.
"""
def put_account_dedicated_ip_warmup_attributes(client, input, options \\ []) do
path_ = "/v1/email/account/dedicated-ips/warmup"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Enable or disable the ability of your account to send email.
"""
def put_account_sending_attributes(client, input, options \\ []) do
path_ = "/v1/email/account/sending"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Associate a configuration set with a dedicated IP pool. You can use
dedicated IP pools to create groups of dedicated IP addresses for sending
specific types of email.
"""
def put_configuration_set_delivery_options(client, configuration_set_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/delivery-options"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Enable or disable collection of reputation metrics for emails that you send
using a particular configuration set in a specific AWS Region.
"""
def put_configuration_set_reputation_options(client, configuration_set_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/reputation-options"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Enable or disable email sending for messages that use a particular
configuration set in a specific AWS Region.
"""
def put_configuration_set_sending_options(client, configuration_set_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/sending"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Specify a custom domain to use for open and click tracking elements in
email that you send using Amazon Pinpoint.
"""
def put_configuration_set_tracking_options(client, configuration_set_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/tracking-options"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Move a dedicated IP address to an existing dedicated IP pool.
<note> The dedicated IP address that you specify must already exist, and
must be associated with your Amazon Pinpoint account.
The dedicated IP pool you specify must already exist. You can create a new
pool by using the `CreateDedicatedIpPool` operation.
</note>
"""
def put_dedicated_ip_in_pool(client, ip, input, options \\ []) do
path_ = "/v1/email/dedicated-ips/#{URI.encode(ip)}/pool"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
<p/>
"""
def put_dedicated_ip_warmup_attributes(client, ip, input, options \\ []) do
path_ = "/v1/email/dedicated-ips/#{URI.encode(ip)}/warmup"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Enable or disable the Deliverability dashboard for your Amazon Pinpoint
account. When you enable the Deliverability dashboard, you gain access to
reputation, deliverability, and other metrics for the domains that you use
to send email using Amazon Pinpoint. You also gain the ability to perform
predictive inbox placement tests.
When you use the Deliverability dashboard, you pay a monthly subscription
charge, in addition to any other fees that you accrue by using Amazon
Pinpoint. For more information about the features and cost of a
Deliverability dashboard subscription, see [Amazon Pinpoint
Pricing](http://aws.amazon.com/pinpoint/pricing/).
"""
def put_deliverability_dashboard_option(client, input, options \\ []) do
path_ = "/v1/email/deliverability-dashboard"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Used to enable or disable DKIM authentication for an email identity.
"""
def put_email_identity_dkim_attributes(client, email_identity, input, options \\ []) do
path_ = "/v1/email/identities/#{URI.encode(email_identity)}/dkim"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Used to enable or disable feedback forwarding for an identity. This setting
determines what happens when an identity is used to send an email that
results in a bounce or complaint event.
When you enable feedback forwarding, Amazon Pinpoint sends you email
notifications when bounce or complaint events occur. Amazon Pinpoint sends
this notification to the address that you specified in the Return-Path
header of the original email.
When you disable feedback forwarding, Amazon Pinpoint sends notifications
through other mechanisms, such as by notifying an Amazon SNS topic. You're
required to have a method of tracking bounces and complaints. If you
haven't set up another mechanism for receiving bounce or complaint
notifications, Amazon Pinpoint sends an email notification when these
events occur (even if this setting is disabled).
"""
def put_email_identity_feedback_attributes(client, email_identity, input, options \\ []) do
path_ = "/v1/email/identities/#{URI.encode(email_identity)}/feedback"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Used to enable or disable the custom Mail-From domain configuration for an
email identity.
"""
def put_email_identity_mail_from_attributes(client, email_identity, input, options \\ []) do
path_ = "/v1/email/identities/#{URI.encode(email_identity)}/mail-from"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Sends an email message. You can use the Amazon Pinpoint Email API to send
two types of messages:
<ul> <li> **Simple** – A standard email message. When you create this type
of message, you specify the sender, the recipient, and the message body,
and Amazon Pinpoint assembles the message for you.
</li> <li> **Raw** – A raw, MIME-formatted email message. When you send
this type of email, you have to specify all of the message headers, as well
as the message body. You can use this message type to send messages that
contain attachments. The message that you specify has to be a valid MIME
message.
</li> </ul>
"""
def send_email(client, input, options \\ []) do
path_ = "/v1/email/outbound-emails"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Add one or more tags (keys and values) to a specified resource. A *tag* is
a label that you optionally define and associate with a resource in Amazon
Pinpoint. Tags can help you categorize and manage resources in different
ways, such as by purpose, owner, environment, or other criteria. A resource
can have as many as 50 tags.
Each tag consists of a required *tag key* and an associated *tag value*,
both of which you define. A tag key is a general label that acts as a
category for more specific tag values. A tag value acts as a descriptor
within a tag key.
"""
def tag_resource(client, input, options \\ []) do
path_ = "/v1/email/tags"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Remove one or more tags (keys and values) from a specified resource.
"""
def untag_resource(client, input, options \\ []) do
path_ = "/v1/email/tags"
headers = []
{query_, input} =
[
{"ResourceArn", "ResourceArn"},
{"TagKeys", "TagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Update the configuration of an event destination for a configuration set.
In Amazon Pinpoint, *events* include message sends, deliveries, opens,
clicks, bounces, and complaints. *Event destinations* are places that you
can send information about these events to. For example, you can send event
data to Amazon SNS to receive notifications when you receive bounces or
complaints, or you can use Amazon Kinesis Data Firehose to stream data to
Amazon S3 for long-term storage.
"""
def update_configuration_set_event_destination(client, configuration_set_name, event_destination_name, input, options \\ []) do
path_ = "/v1/email/configuration-sets/#{URI.encode(configuration_set_name)}/event-destinations/#{URI.encode(event_destination_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "ses"}
host = build_host("email", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
| 39.023457
| 146
| 0.708691
|
939422fbc0be594f5650810cbc34d9e13e141b6b
| 3,577
|
ex
|
Elixir
|
clients/books/lib/google_api/books/v1/model/volume_sale_info.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/books/lib/google_api/books/v1/model/volume_sale_info.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/books/lib/google_api/books/v1/model/volume_sale_info.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Books.V1.Model.VolumeSaleInfo do
@moduledoc """
Any information about a volume related to the eBookstore and/or purchaseability. This information can depend on the country where the request originates from (i.e. books may not be for sale in certain countries).
## Attributes
* `buyLink` (*type:* `String.t`, *default:* `nil`) - URL to purchase this volume on the Google Books site. (In LITE projection)
* `country` (*type:* `String.t`, *default:* `nil`) - The two-letter ISO_3166-1 country code for which this sale information is valid. (In LITE projection.)
* `isEbook` (*type:* `boolean()`, *default:* `nil`) - Whether or not this volume is an eBook (can be added to the My eBooks shelf).
* `listPrice` (*type:* `GoogleApi.Books.V1.Model.VolumeSaleInfoListPrice.t`, *default:* `nil`) - Suggested retail price. (In LITE projection.)
* `offers` (*type:* `list(GoogleApi.Books.V1.Model.VolumeSaleInfoOffers.t)`, *default:* `nil`) - Offers available for this volume (sales and rentals).
* `onSaleDate` (*type:* `DateTime.t`, *default:* `nil`) - The date on which this book is available for sale.
* `retailPrice` (*type:* `GoogleApi.Books.V1.Model.VolumeSaleInfoRetailPrice.t`, *default:* `nil`) - The actual selling price of the book. This is the same as the suggested retail or list price unless there are offers or discounts on this volume. (In LITE projection.)
* `saleability` (*type:* `String.t`, *default:* `nil`) - Whether or not this book is available for sale or offered for free in the Google eBookstore for the country listed above. Possible values are FOR_SALE, FOR_RENTAL_ONLY, FOR_SALE_AND_RENTAL, FREE, NOT_FOR_SALE, or FOR_PREORDER.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:buyLink => String.t(),
:country => String.t(),
:isEbook => boolean(),
:listPrice => GoogleApi.Books.V1.Model.VolumeSaleInfoListPrice.t(),
:offers => list(GoogleApi.Books.V1.Model.VolumeSaleInfoOffers.t()),
:onSaleDate => DateTime.t(),
:retailPrice => GoogleApi.Books.V1.Model.VolumeSaleInfoRetailPrice.t(),
:saleability => String.t()
}
field(:buyLink)
field(:country)
field(:isEbook)
field(:listPrice, as: GoogleApi.Books.V1.Model.VolumeSaleInfoListPrice)
field(:offers, as: GoogleApi.Books.V1.Model.VolumeSaleInfoOffers, type: :list)
field(:onSaleDate, as: DateTime)
field(:retailPrice, as: GoogleApi.Books.V1.Model.VolumeSaleInfoRetailPrice)
field(:saleability)
end
defimpl Poison.Decoder, for: GoogleApi.Books.V1.Model.VolumeSaleInfo do
def decode(value, options) do
GoogleApi.Books.V1.Model.VolumeSaleInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Books.V1.Model.VolumeSaleInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 52.602941
| 287
| 0.719038
|
93942cf18071c41a269b4eb91659a7bba4491266
| 1,319
|
ex
|
Elixir
|
apps/discovery_api/lib/discovery_api/schemas/visualizations.ex
|
SmartColumbusOS/smartcitiesdata
|
c8553d34631c822b034945eebf396994bf1001ff
|
[
"Apache-2.0"
] | 1
|
2021-04-05T19:17:18.000Z
|
2021-04-05T19:17:18.000Z
|
apps/discovery_api/lib/discovery_api/schemas/visualizations.ex
|
AWHServiceAccount/smartcitiesdata
|
6957afac12809288640b6ba6b576c3016e6033d7
|
[
"Apache-2.0"
] | 11
|
2020-01-07T15:43:42.000Z
|
2020-12-22T15:23:25.000Z
|
apps/discovery_api/lib/discovery_api/schemas/visualizations.ex
|
SmartColumbusOS/smartcitiesdata
|
c8553d34631c822b034945eebf396994bf1001ff
|
[
"Apache-2.0"
] | null | null | null |
defmodule DiscoveryApi.Schemas.Visualizations do
@moduledoc """
Interface for reading and writing the Visualization schema.
"""
import Ecto.Query, only: [from: 2]
alias DiscoveryApi.Repo
alias DiscoveryApi.Schemas.Visualizations.Visualization
def list_visualizations do
Repo.all(Visualization)
end
def create_visualization(visualization_attributes) do
%Visualization{}
|> Visualization.changeset(visualization_attributes)
|> Repo.insert()
end
def get_visualization_by_id(public_id) do
case Repo.get_by(Visualization, public_id: public_id) |> Repo.preload(:owner) do
nil -> {:error, "#{public_id} not found"}
visualization -> {:ok, visualization}
end
end
def get_visualizations_by_owner_id(owner_id) do
query =
from(visualization in Visualization,
where: visualization.owner_id == ^owner_id
)
Repo.all(query)
end
def update_visualization_by_id(id, visualization_changes, user) do
{:ok, existing_visualization} = get_visualization_by_id(id)
if user.id == existing_visualization.owner_id do
existing_visualization
|> Visualization.changeset_update(visualization_changes)
|> Repo.update()
else
{:error, "User does not have permission to update this visualization."}
end
end
end
| 26.918367
| 84
| 0.722517
|
9394819cd655e25dfd1eb7f7b055b93d8b0714f8
| 9,683
|
ex
|
Elixir
|
lib/mix/tasks/hex.outdated.ex
|
hrzndhrn/hex
|
f74e2ed979e74130bdc4a6974660aa986333f33f
|
[
"Apache-2.0"
] | 824
|
2015-01-05T09:12:36.000Z
|
2022-03-28T12:02:29.000Z
|
lib/mix/tasks/hex.outdated.ex
|
hrzndhrn/hex
|
f74e2ed979e74130bdc4a6974660aa986333f33f
|
[
"Apache-2.0"
] | 737
|
2015-01-01T05:48:46.000Z
|
2022-03-29T12:56:12.000Z
|
lib/mix/tasks/hex.outdated.ex
|
hrzndhrn/hex
|
f74e2ed979e74130bdc4a6974660aa986333f33f
|
[
"Apache-2.0"
] | 220
|
2015-03-14T17:55:11.000Z
|
2022-03-23T22:17:07.000Z
|
defmodule Mix.Tasks.Hex.Outdated do
use Mix.Task
alias Hex.Registry.Server, as: Registry
@shortdoc "Shows outdated Hex deps for the current project"
@moduledoc """
Shows all Hex dependencies that have newer versions in the registry.
$ mix hex.outdated [APP]
By default, it only shows top-level packages explicitly listed in the
`mix.exs` file. All outdated packages can be displayed by using the `--all`
command line option.
By default, `hex.outdated` will exit with a non-zero exit code (1) if there are any
outdated dependencies. You can override this to respect the requirements
as specified in your `mix.exs` file, with the `--within-requirements` command line option,
so it only exits with non-zero exit code if the update is possible.
For example, if your version requirement is "~> 2.0" but the latest version is `3.0`,
with `--within-requirements` it will exit successfully, but if the latest version
is `2.8`, then `--within-requirements` will exit with non-zero exit code (1).
One scenario this could be useful is to ensure you always have the latest
version of your dependencies, except for major version bumps.
If a dependency name is given all requirements on that dependency, from
the entire dependency tree, are listed. This is useful if you are trying
to figure why a package isn't updating when you run `mix deps.update`.
Note that when this task determines if a package is updatable it only looks
at the project's current set of dependency requirements and what version
they are locked to. When `mix deps.update` is called multiple packages may
be updated that in turn update their own dependencies, which may cause the
package you want to update to not be able to update.
## Command line options
* `--all` - shows all outdated packages, including children of packages defined in `mix.exs`
* `--pre` - include pre-releases when checking for newer versions
* `--within-requirements` - exit with non-zero code only if requirements specified in `mix.exs` is met.
"""
@behaviour Hex.Mix.TaskDescription
@switches [all: :boolean, pre: :boolean, within_requirements: :boolean]
@impl true
def run(args) do
Hex.Mix.check_deps()
Hex.start()
{opts, args} = Hex.OptionParser.parse!(args, strict: @switches)
Registry.open()
lock = Mix.Dep.Lock.read()
lock
|> Hex.Mix.packages_from_lock()
|> Hex.Registry.Server.prefetch()
case args do
[app] ->
single(lock, app, opts)
[] ->
all(lock, opts)
_ ->
Mix.raise("""
Invalid arguments, expected:
mix hex.outdated [APP]
""")
end
end
@impl true
def tasks() do
[
{"", "Shows outdated Hex deps for the current project"},
{"[APP]", "Shows outdated Hex deps for the given dependency"}
]
end
defp single(lock, app, opts) do
app = String.to_atom(app)
deps = Hex.Mix.top_level_deps()
{repo, package, current} =
case Hex.Utils.lock(lock[app]) do
%{repo: repo, name: package, version: version} ->
{repo, package, version}
nil ->
Mix.raise("Dependency #{app} not locked as a Hex package")
end
latest = latest_version(repo, package, current, opts[:pre])
outdated? = Hex.Version.compare(current, latest) == :lt
lock_requirements = get_requirements_from_lock(app, lock)
deps_requirements = get_requirements_from_deps(app, deps)
requirements = deps_requirements ++ lock_requirements
if outdated? do
[
"There is newer version of the dependency available ",
[:bright, latest, " > ", current, :reset, "!"]
]
|> IO.ANSI.format_fragment()
|> Hex.Shell.info()
else
["Current version ", :bright, current, :reset, " of dependency is up to date!"]
|> IO.ANSI.format_fragment()
|> Hex.Shell.info()
end
header = ["Source", "Requirement", "Up-to-date"]
values = Enum.map(requirements, &format_single_row(&1, latest))
Hex.Shell.info("")
Mix.Tasks.Hex.print_table(header, values)
message = "Up-to-date indicates if the requirement matches the latest version."
Hex.Shell.info(["\n", message])
if outdated?, do: Mix.Tasks.Hex.set_exit_code(1)
end
defp get_requirements_from_lock(app, lock) do
Enum.flat_map(lock, fn {source, lock} ->
case Hex.Utils.lock(lock) do
%{deps: nil} ->
[]
%{deps: deps} ->
Enum.flat_map(deps, fn {dep_app, req, _opts} ->
if app == dep_app, do: [[Atom.to_string(source), req]], else: []
end)
nil ->
[]
end
end)
end
defp get_requirements_from_deps(app, deps) do
# TODO: Path to umbrella child's mix.exs
case Map.fetch(deps, app) do
{:ok, deps} ->
Enum.map(deps, fn {src, req, _opts} -> [Path.join([src, "mix.exs"]), req] end)
:error ->
[]
end
end
defp format_single_row([source, req], latest) do
req_matches? = version_match?(latest, req)
req_color = if req_matches?, do: :green, else: :red
up_to_date? = if req_matches?, do: "Yes", else: "No"
[[:bright, source], [req_color, req || ""], [req_color, up_to_date?]]
end
defp all(lock, opts) do
deps = Hex.Mix.top_level_deps()
dep_names = if opts[:all], do: Map.keys(lock), else: Map.keys(deps)
versions =
dep_names
|> Enum.sort()
|> get_versions(deps, lock, opts[:pre])
values = Enum.map(versions, &format_all_row/1)
diff_links = Enum.map(versions, &build_diff_link/1) |> Enum.reject(&is_nil/1)
if Enum.empty?(values) do
Hex.Shell.info("No hex dependencies")
else
header = ["Dependency", "Current", "Latest", "Status"]
Mix.Tasks.Hex.print_table(header, values)
base_message = "Run `mix hex.outdated APP` to see requirements for a specific dependency."
diff_message = maybe_diff_message(diff_links)
Hex.Shell.info(["\n", base_message, diff_message])
any_outdated? = any_outdated?(versions)
req_met? = any_req_matches?(versions)
cond do
any_outdated? && opts[:within_requirements] && req_met? ->
Mix.Tasks.Hex.set_exit_code(1)
any_outdated? && opts[:within_requirements] && not req_met? ->
nil
any_outdated? ->
Mix.Tasks.Hex.set_exit_code(1)
true ->
nil
end
end
end
defp get_versions(dep_names, deps, lock, pre?) do
Enum.flat_map(dep_names, fn name ->
case Hex.Utils.lock(lock[name]) do
%{repo: repo, name: package, version: lock_version} ->
latest_version = latest_version(repo, package, lock_version, pre?)
lock_requirements = get_requirements_from_lock(name, lock)
deps_requirements = get_requirements_from_deps(name, deps)
requirements =
(deps_requirements ++ lock_requirements)
|> Enum.map(fn [_, req_version] -> req_version end)
[[Atom.to_string(name), lock_version, latest_version, requirements]]
_ ->
[]
end
end)
end
defp latest_version(repo, package, default, pre?) do
{:ok, default} = Hex.Version.parse(default)
pre? = pre? || default.pre != []
latest =
Registry.versions(repo, package)
|> highest_version(pre?)
latest || default
end
defp highest_version(versions, pre?) do
versions =
if pre? do
versions
else
Enum.filter(versions, fn version ->
{:ok, version} = Hex.Version.parse(version)
version.pre == []
end)
end
List.last(versions)
end
defp format_all_row([package, lock, latest, requirements]) do
outdated? = Hex.Version.compare(lock, latest) == :lt
latest_color = if outdated?, do: :red, else: :green
req_matches? = req_matches?(requirements, latest)
status =
case {outdated?, req_matches?} do
{true, true} -> [:yellow, "Update possible"]
{true, false} -> [:red, "Update not possible"]
{false, _} -> [:green, "Up-to-date"]
end
[
[:bright, package],
lock,
[latest_color, latest],
status
]
end
defp build_diff_link([package, lock, latest, requirements]) do
outdated? = Hex.Version.compare(lock, latest) == :lt
req_matches? = Enum.all?(requirements, &version_match?(latest, &1))
case {outdated?, req_matches?} do
{true, true} -> "diffs[]=#{package}:#{lock}:#{latest}"
{_, _} -> nil
end
end
defp version_match?(_version, nil), do: true
defp version_match?(version, req), do: Hex.Version.match?(version, req)
defp any_outdated?(versions) do
Enum.any?(versions, fn [_package, lock, latest, _requirements] ->
Hex.Version.compare(lock, latest) == :lt
end)
end
defp maybe_diff_message([]), do: ""
defp maybe_diff_message(diff_links) do
"\n\nTo view the diffs in each available update, visit:\n" <>
diff_link(diff_links)
end
defp diff_link(diff_links) do
long_url = "https://diff.hex.pm/diffs?" <> Enum.join(diff_links, "&")
if Hex.State.fetch!(:no_short_urls) do
long_url
else
maybe_get_short_link(long_url)
end
end
defp maybe_get_short_link(long_url) do
case Hex.API.ShortURL.create(long_url) do
:error -> long_url
{:ok, short_url} -> short_url
end
end
defp any_req_matches?(versions) do
Enum.any?(versions, fn [_package, _lock, latest, requirements] ->
req_matches?(requirements, latest)
end)
end
defp req_matches?(requirements, latest) do
Enum.all?(requirements, &version_match?(latest, &1))
end
end
| 29.793846
| 107
| 0.638748
|
9394b87d450dd845c22b234febd74739639aa09a
| 903
|
exs
|
Elixir
|
test/defr/nested_call_test.exs
|
jechol/defr
|
1d4a319bb06f38845306691d9bc8b8711d726cd0
|
[
"MIT"
] | 3
|
2021-07-30T17:42:13.000Z
|
2021-09-03T09:38:54.000Z
|
test/defr/nested_call_test.exs
|
trevorite/witchcraft_helpers
|
146312441c21b85423265cea6fc9cef50fc6423b
|
[
"MIT"
] | 1
|
2021-12-16T02:40:35.000Z
|
2021-12-16T03:21:21.000Z
|
test/defr/nested_call_test.exs
|
trevorite/witchcraft_helpers
|
146312441c21b85423265cea6fc9cef50fc6423b
|
[
"MIT"
] | null | null | null |
defmodule Defr.NestedCallTest do
use ExUnit.Case, async: false
use Defr
alias Algae.Reader
defmodule Target do
use Defr
import Enum, only: [at: 2]
defr top(list) do
list |> List.flatten() |> inject() |> middle() |> run()
end
defr middle(list) do
list |> bottom() |> inject() |> run()
end
defrp bottom(list) do
%{pos: pos} <- ask()
at(list, pos) |> inject()
end
end
test "inject" do
assert 1 == Target.top([[0], 1]) |> Reader.run(%{pos: 1})
assert 20 ==
Target.top([[0], 1]) |> Reader.run(mock(%{&List.flatten/1 => [10, 20, 30], pos: 1}))
assert :imported_func ==
Target.top([[0], 1]) |> Reader.run(mock(%{&Enum.at/2 => :imported_func, pos: 1}))
assert :private_func ==
Target.top([[0], 1])
|> Reader.run(mock(%{&Target.bottom/1 => :private_func}))
end
end
| 23.153846
| 97
| 0.535991
|
9394d65484e92007bc72580a587181d27914d4e1
| 233
|
exs
|
Elixir
|
lib/nap/util_test.exs
|
mindreframer/nap
|
ea1951c830061b6aa880a6ed00a80b7e8870c5fa
|
[
"MIT"
] | null | null | null |
lib/nap/util_test.exs
|
mindreframer/nap
|
ea1951c830061b6aa880a6ed00a80b7e8870c5fa
|
[
"MIT"
] | null | null | null |
lib/nap/util_test.exs
|
mindreframer/nap
|
ea1951c830061b6aa880a6ed00a80b7e8870c5fa
|
[
"MIT"
] | null | null | null |
defmodule Nap.UtilTest do
use ExUnit.Case
alias Nap.Util
describe "filepath_to_nappath" do
test "works" do
assert Util.filepath_to_nappath("/a/b/c/some_test.exs") == "/a/b/c/__naps__/some_test.nap"
end
end
end
| 21.181818
| 96
| 0.699571
|
9394dc524a13e8cab3f54314be296cb88a1c62df
| 459
|
exs
|
Elixir
|
config/config.exs
|
esmaeilpour/redex
|
c2c6e29e3dec0df265fdcd9f24cd2471c8615ee7
|
[
"Apache-2.0"
] | null | null | null |
config/config.exs
|
esmaeilpour/redex
|
c2c6e29e3dec0df265fdcd9f24cd2471c8615ee7
|
[
"Apache-2.0"
] | null | null | null |
config/config.exs
|
esmaeilpour/redex
|
c2c6e29e3dec0df265fdcd9f24cd2471c8615ee7
|
[
"Apache-2.0"
] | null | null | null |
import Config
config :redex,
port: System.get_env("REDEX_PORT", "6379") |> String.to_integer(),
quorum: System.get_env("REDEX_QUORUM", "1") |> String.to_integer()
config :libcluster, :topologies,
gossip: [
strategy: Cluster.Strategy.Gossip,
config: [
port: 45892,
if_addr: "0.0.0.0",
multicast_addr: "230.1.1.251",
multicast_ttl: 1,
secret: "REDEX"
]
]
if Mix.env() == :test, do: import_config("test.exs")
| 22.95
| 68
| 0.623094
|
939510f0a3d2074059c33d51ef2a1daad294f211
| 2,094
|
exs
|
Elixir
|
apps/ewallet/test/ewallet/web/v1/serializers/paginator_serializer_test.exs
|
jimpeebles/ewallet
|
ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405
|
[
"Apache-2.0"
] | null | null | null |
apps/ewallet/test/ewallet/web/v1/serializers/paginator_serializer_test.exs
|
jimpeebles/ewallet
|
ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405
|
[
"Apache-2.0"
] | null | null | null |
apps/ewallet/test/ewallet/web/v1/serializers/paginator_serializer_test.exs
|
jimpeebles/ewallet
|
ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.Web.V1.PaginatorSerializerTest do
use EWallet.Web.SerializerCase, :v1
alias EWallet.Web.Paginator
alias EWallet.Web.V1.PaginatorSerializer
describe "PaginatorSerializer.serialize/1" do
test "serializes the given paginator into a list object" do
paginator = %Paginator{
data: "dummy_data",
pagination: %{
current_page: 2,
per_page: 5,
is_first_page: false,
is_last_page: true
}
}
expected = %{
object: "list",
data: "dummy_data",
pagination: %{
current_page: 2,
per_page: 5,
is_first_page: false,
is_last_page: true
}
}
assert PaginatorSerializer.serialize(paginator) == expected
end
end
describe "PaginatorSerializer.serialize/2" do
test "maps the data before serializing into a list object" do
paginator = %Paginator{
data: ["dummy", "another_dummy"],
pagination: %{
current_page: 2,
per_page: 5,
is_first_page: false,
is_last_page: true
}
}
expected = %{
object: "list",
data: ["replaced_data", "replaced_data"],
pagination: %{
current_page: 2,
per_page: 5,
is_first_page: false,
is_last_page: true
}
}
result = PaginatorSerializer.serialize(paginator, fn _ -> "replaced_data" end)
assert result == expected
end
end
end
| 27.92
| 84
| 0.628462
|
93951dacd38eb7566902b4a9fd57be3659683692
| 10,981
|
ex
|
Elixir
|
lib/credo/code/strings.ex
|
hrzndhrn/credo
|
71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593
|
[
"MIT"
] | 4,590
|
2015-09-28T06:01:43.000Z
|
2022-03-29T08:48:57.000Z
|
lib/credo/code/strings.ex
|
hrzndhrn/credo
|
71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593
|
[
"MIT"
] | 890
|
2015-11-16T21:07:07.000Z
|
2022-03-29T08:52:07.000Z
|
lib/credo/code/strings.ex
|
hrzndhrn/credo
|
71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593
|
[
"MIT"
] | 479
|
2015-11-17T19:42:40.000Z
|
2022-03-29T00:09:21.000Z
|
defmodule Credo.Code.Strings do
@moduledoc """
This module lets you strip strings from source code.
"""
alias Credo.Code.InterpolationHelper
alias Credo.SourceFile
string_sigil_delimiters = [
{"(", ")"},
{"[", "]"},
{"{", "}"},
{"<", ">"},
{"|", "|"},
{"\"", "\""},
{"'", "'"},
{"/", "/"}
]
heredocs_sigil_delimiters = [
{"'''", "'''"},
{~s("""), ~s(""")}
]
all_string_sigils =
Enum.flat_map(string_sigil_delimiters, fn {b, e} ->
[{"~s#{b}", e}, {"~S#{b}", e}]
end)
all_string_sigil_ends = Enum.map(string_sigil_delimiters, &elem(&1, 1))
all_heredocs_sigils =
Enum.flat_map(heredocs_sigil_delimiters, fn {b, e} ->
[{"~s#{b}", e}, {"~S#{b}", e}]
end)
alphabet = ~w(a b c d e f g h i j k l m n o p q r t u v w x y z)
sigil_delimiters = [
{"(", ")"},
{"[", "]"},
{"{", "}"},
{"<", ">"},
{"|", "|"},
{"/", "/"},
{"\"\"\"", "\"\"\""},
{"\"", "\""},
{"'", "'"}
]
all_sigil_chars =
Enum.flat_map(alphabet, fn a ->
[a, String.upcase(a)]
end)
all_sigil_starts = Enum.map(all_sigil_chars, fn c -> "~#{c}" end)
removable_sigil_ends = Enum.map(sigil_delimiters, &elem(&1, 1))
removable_sigils =
sigil_delimiters
|> Enum.flat_map(fn {b, e} ->
Enum.flat_map(all_sigil_starts, fn start ->
[{"#{start}#{b}", e}, {"#{start}#{b}", e}]
end)
end)
|> Enum.uniq()
@doc """
Replaces all characters inside string literals and string sigils
with the equivalent amount of white-space.
"""
def replace_with_spaces(
source_file,
replacement \\ " ",
interpolation_replacement \\ " ",
filename \\ "nofilename"
) do
{source, filename} = SourceFile.source_and_filename(source_file, filename)
source
|> InterpolationHelper.replace_interpolations(interpolation_replacement, filename)
|> parse_code("", replacement)
end
defp parse_code("", acc, _replacement) do
acc
end
for {sigil_start, sigil_end} <- removable_sigils do
defp parse_code(<<unquote(sigil_start)::utf8, t::binary>>, acc, replacement) do
parse_removable_sigil(
t,
acc <> unquote(sigil_start),
unquote(sigil_end),
replacement
)
end
end
for {sigil_start, sigil_end} <- all_heredocs_sigils do
defp parse_code(<<unquote(sigil_start)::utf8, t::binary>>, acc, replacement) do
parse_heredoc(
t,
acc <> unquote(sigil_start),
replacement,
unquote(sigil_end)
)
end
end
defp parse_code(<<"\"\"\""::utf8, t::binary>>, acc, replacement) do
parse_heredoc(t, acc <> ~s("""), replacement, ~s("""))
end
defp parse_code(<<"\'\'\'"::utf8, t::binary>>, acc, replacement) do
parse_heredoc(t, acc <> ~s('''), replacement, ~s('''))
end
for {sigil_start, sigil_end} <- all_string_sigils do
defp parse_code(<<unquote(sigil_start)::utf8, t::binary>>, acc, replacement) do
parse_string_sigil(
t,
acc <> unquote(sigil_start),
unquote(sigil_end),
replacement
)
end
end
defp parse_code(<<"\\\""::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> "\\\"", replacement)
end
defp parse_code(<<"\\\'"::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> "\\\'", replacement)
end
defp parse_code(<<"?'"::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> "?'", replacement)
end
defp parse_code(<<"'"::utf8, t::binary>>, acc, replacement) do
parse_charlist(t, acc <> "'", replacement)
end
defp parse_code(<<"?\""::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> "?\"", replacement)
end
defp parse_code(<<"#"::utf8, t::binary>>, acc, replacement) do
parse_comment(t, acc <> "#", replacement)
end
defp parse_code(<<"\""::utf8, t::binary>>, acc, replacement) do
parse_string_literal(t, acc <> "\"", replacement)
end
defp parse_code(<<h::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> <<h::utf8>>, replacement)
end
defp parse_code(str, acc, replacement) when is_binary(str) do
{h, t} = String.next_codepoint(str)
parse_code(t, acc <> h, replacement)
end
#
# Charlists
#
defp parse_charlist("", acc, _replacement) do
acc
end
defp parse_charlist(<<"\\\\"::utf8, t::binary>>, acc, replacement) do
parse_charlist(t, acc <> "\\\\", replacement)
end
defp parse_charlist(<<"\\\'"::utf8, t::binary>>, acc, replacement) do
parse_charlist(t, acc <> "\\\'", replacement)
end
defp parse_charlist(<<"\'"::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> "'", replacement)
end
defp parse_charlist(<<"\n"::utf8, t::binary>>, acc, replacement) do
parse_charlist(t, acc <> "\n", replacement)
end
defp parse_charlist(str, acc, replacement) when is_binary(str) do
{h, t} = String.next_codepoint(str)
parse_charlist(t, acc <> h, replacement)
end
#
# Comments
#
defp parse_comment("", acc, _replacement) do
acc
end
defp parse_comment(<<"\n"::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> "\n", replacement)
end
defp parse_comment(str, acc, replacement) when is_binary(str) do
{h, t} = String.next_codepoint(str)
parse_comment(t, acc <> h, replacement)
end
#
# String Literals
#
defp parse_string_literal("", acc, _replacement) do
acc
end
defp parse_string_literal(<<"\\\\"::utf8, t::binary>>, acc, replacement) do
parse_string_literal(t, acc, replacement)
end
defp parse_string_literal(<<"\\\""::utf8, t::binary>>, acc, replacement) do
parse_string_literal(t, acc, replacement)
end
defp parse_string_literal(<<"\""::utf8, t::binary>>, acc, replacement) do
parse_code(t, acc <> ~s("), replacement)
end
defp parse_string_literal(<<"\n"::utf8, t::binary>>, acc, replacement) do
parse_string_literal(t, acc <> "\n", replacement)
end
defp parse_string_literal(<<_::utf8, t::binary>>, acc, replacement) do
parse_string_literal(t, acc <> replacement, replacement)
end
#
# Non-String Sigils
#
for sigil_end <- removable_sigil_ends do
defp parse_removable_sigil("", acc, unquote(sigil_end), _replacement) do
acc
end
defp parse_removable_sigil(
<<"\\"::utf8, s::binary>>,
acc,
unquote(sigil_end),
replacement
) do
{h, t} = String.next_codepoint(s)
parse_removable_sigil(t, acc <> "\\" <> h, unquote(sigil_end), replacement)
end
defp parse_removable_sigil(
# \\
<<"\\\\"::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_removable_sigil(t, acc <> "\\\\", unquote(sigil_end), replacement)
end
if sigil_end != "\"" do
defp parse_removable_sigil(
<<"\""::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_removable_sigil(t, acc <> "\"", unquote(sigil_end), replacement)
end
end
defp parse_removable_sigil(
<<unquote("\\#{sigil_end}")::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_removable_sigil(
t,
acc <> unquote("\\#{sigil_end}"),
unquote(sigil_end),
replacement
)
end
defp parse_removable_sigil(
<<unquote(sigil_end)::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_code(t, acc <> unquote(sigil_end), replacement)
end
defp parse_removable_sigil(
<<"\n"::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_removable_sigil(t, acc <> "\n", unquote(sigil_end), replacement)
end
defp parse_removable_sigil(
str,
acc,
unquote(sigil_end),
replacement
)
when is_binary(str) do
{h, t} = String.next_codepoint(str)
parse_removable_sigil(t, acc <> h, unquote(sigil_end), replacement)
end
end
#
# Sigils
#
for sigil_end <- all_string_sigil_ends do
defp parse_string_sigil("", acc, unquote(sigil_end), _replacement) do
acc
end
defp parse_string_sigil(
<<"\\\\"::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_string_sigil(t, acc <> replacement <> replacement, unquote(sigil_end), replacement)
end
defp parse_string_sigil(
<<"\\\""::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_string_sigil(t, acc <> replacement <> replacement, unquote(sigil_end), replacement)
end
defp parse_string_sigil(
<<unquote(sigil_end)::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_code(t, acc <> unquote(sigil_end), replacement)
end
defp parse_string_sigil(
<<"\n"::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_string_sigil(t, acc <> "\n", unquote(sigil_end), replacement)
end
defp parse_string_sigil(
<<_::utf8, t::binary>>,
acc,
unquote(sigil_end),
replacement
) do
parse_string_sigil(t, acc <> replacement, unquote(sigil_end), replacement)
end
end
#
# Heredocs
#
defp parse_heredoc(<<"\"\"\""::utf8, t::binary>>, acc, "" = replacement, "\"\"\"") do
parse_code(t, acc, replacement)
end
defp parse_heredoc(<<"\"\"\""::utf8, t::binary>>, acc, replacement, "\"\"\"") do
acc = Regex.replace(~r/([#{replacement}]+)(\"\"\")\z/m, acc <> "\"\"\"", "\"\"\"")
parse_code(t, acc, replacement)
end
defp parse_heredoc(<<"\'\'\'"::utf8, t::binary>>, acc, "" = replacement, "\'\'\'") do
parse_code(t, acc, replacement)
end
defp parse_heredoc(<<"\'\'\'"::utf8, t::binary>>, acc, replacement, "\'\'\'") do
acc = Regex.replace(~r/([#{replacement}]+)(\'\'\')\z/m, acc <> "\'\'\'", "\'\'\'")
parse_code(t, acc, replacement)
end
defp parse_heredoc("", acc, _replacement, _delimiter) do
acc
end
defp parse_heredoc(<<"\\\\"::utf8, t::binary>>, acc, replacement, delimiter) do
parse_heredoc(t, acc, replacement, delimiter)
end
defp parse_heredoc(<<"\\\""::utf8, t::binary>>, acc, replacement, delimiter) do
parse_heredoc(t, acc, replacement, delimiter)
end
defp parse_heredoc(<<"\n"::utf8, t::binary>>, acc, replacement, delimiter) do
parse_heredoc(t, acc <> "\n", replacement, delimiter)
end
defp parse_heredoc(<<_::utf8, t::binary>>, acc, replacement, delimiter) do
parse_heredoc(t, acc <> replacement, replacement, delimiter)
end
end
| 25.537209
| 95
| 0.570986
|
93951ee6c0433f3b2ac63e56f86e5a0beaf9e349
| 263
|
ex
|
Elixir
|
lib/dagger/instance.ex
|
DarkMarmot/avast
|
1c8294cba77b5b7ea9b98030c29d06ee723e3e77
|
[
"Apache-2.0"
] | null | null | null |
lib/dagger/instance.ex
|
DarkMarmot/avast
|
1c8294cba77b5b7ea9b98030c29d06ee723e3e77
|
[
"Apache-2.0"
] | null | null | null |
lib/dagger/instance.ex
|
DarkMarmot/avast
|
1c8294cba77b5b7ea9b98030c29d06ee723e3e77
|
[
"Apache-2.0"
] | null | null | null |
defmodule Dagger.Instance do
alias Dagger.Instance
defstruct key: nil, node: nil
@type t :: %Instance{
key: any(),
node: atom()
}
def new(key \\ nil) do
%Instance{
node: Node.self(),
key: key
}
end
end
| 14.611111
| 31
| 0.528517
|
93952bc665c31f4487c94f2d2eab6e880c3fe2de
| 3,663
|
ex
|
Elixir
|
implementations/elixir/ockam/ockam/lib/ockam/asymmetric_worker.ex
|
jared-s/ockam
|
a1d482550aeafbc2a6040a5efb3f5effc9974d51
|
[
"Apache-2.0"
] | null | null | null |
implementations/elixir/ockam/ockam/lib/ockam/asymmetric_worker.ex
|
jared-s/ockam
|
a1d482550aeafbc2a6040a5efb3f5effc9974d51
|
[
"Apache-2.0"
] | 12
|
2022-01-11T16:23:26.000Z
|
2022-03-23T20:16:37.000Z
|
implementations/elixir/ockam/ockam/lib/ockam/asymmetric_worker.ex
|
jared-s/ockam
|
a1d482550aeafbc2a6040a5efb3f5effc9974d51
|
[
"Apache-2.0"
] | null | null | null |
defmodule Ockam.AsymmetricWorker do
@moduledoc """
Ockam.Worker with two addresses.
On start registers an additional `inner_address`
Usage:
`use Ockam.AsymmetricWorker`
Callbacks:
`inner_setup/2` - same as `Ockam.Worker.setup/2`, but `state` would have already registered `inner_address`
`handle_inner_message/2` - handle message received on `inner_address`
`handle_outer_message/2` - handle message received on `address`
`handle_other_message/2` - handle message received on a different address, other than `inner_address` or `address`
"""
@callback inner_setup(Keyword.t(), map()) :: {:ok, state :: map()} | {:error, reason :: any()}
@callback handle_inner_message(message :: any(), state :: map()) ::
{:ok, state :: map()}
| {:error, reason :: any()}
| {:stop, reason :: any(), state :: map()}
@callback handle_outer_message(message :: any(), state :: map()) ::
{:ok, state :: map()}
| {:error, reason :: any()}
| {:stop, reason :: any(), state :: map()}
@callback handle_other_message(message :: any(), state :: map()) ::
{:ok, state :: map()}
| {:error, reason :: any()}
| {:stop, reason :: any(), state :: map()}
## TODO: remove that after refactoring Ockam.Worker to not call handle_message for non-messages
@callback handle_non_message(message :: any(), state :: map()) ::
{:ok, state :: map()}
| {:error, reason :: any()}
| {:stop, reason :: any(), state :: map()}
defmacro __using__(_options) do
quote do
use Ockam.Worker
alias Ockam.Message
require Logger
@behaviour Ockam.AsymmetricWorker
@impl true
def setup(options, state) do
with {:ok, inner_address} <- register_inner_address(options) do
inner_setup(options, Map.put(state, :inner_address, inner_address))
end
end
@impl true
def handle_message(message, state) do
case message_type(message, state) do
:inner ->
handle_inner_message(message, state)
:outer ->
handle_outer_message(message, state)
:other ->
handle_other_message(message, state)
:non_message ->
handle_non_message(message, state)
end
end
@doc false
def register_inner_address(options) do
case Keyword.get(options, :inner_address) do
nil ->
Ockam.Node.register_random_address()
inner_address ->
case Ockam.Node.register_address(inner_address) do
:yes -> {:ok, inner_address}
:no -> {:error, :inner_address_already_taken}
end
end
end
@doc false
def message_type(%{onward_route: _} = message, state) do
[me | _] = Message.onward_route(message)
outer_address = state.address
inner_address = state.inner_address
case me do
^outer_address ->
:outer
^inner_address ->
:inner
_other ->
:other
end
end
def message_type(_message, _state) do
:non_message
end
def inner_setup(options, state), do: {:ok, state}
def handle_other_message(message, state) do
{:error, {:unknown_self_address, message, state}}
end
def handle_non_message(non_message, state) do
{:error, {:not_ockam_message, non_message, state}}
end
defoverridable inner_setup: 2, handle_other_message: 2, handle_non_message: 2
end
end
end
| 29.304
| 116
| 0.583675
|
939532683af4abe3e7d1fef9e82d3d7edd859bbb
| 3,551
|
ex
|
Elixir
|
lib/hexpm/repository/requirement.ex
|
pragmaticivan/hexpm
|
7845d1baaf14e8811df00db550b59e51ac9675c6
|
[
"Apache-2.0"
] | null | null | null |
lib/hexpm/repository/requirement.ex
|
pragmaticivan/hexpm
|
7845d1baaf14e8811df00db550b59e51ac9675c6
|
[
"Apache-2.0"
] | null | null | null |
lib/hexpm/repository/requirement.ex
|
pragmaticivan/hexpm
|
7845d1baaf14e8811df00db550b59e51ac9675c6
|
[
"Apache-2.0"
] | null | null | null |
defmodule Hexpm.Repository.Requirement do
use Hexpm.Web, :schema
require Logger
@derive {Hexpm.Web.Stale, last_modified: nil}
schema "requirements" do
field :app, :string
field :requirement, :string
field :optional, :boolean, default: false
# The repository and name of the dependency used to find the package
field :repository, :string, virtual: true
field :name, :string, virtual: true
belongs_to :release, Release
belongs_to :dependency, Package
end
def changeset(requirement, params, dependencies, release_changeset, package) do
cast(requirement, params, ~w(repository name app requirement optional))
|> put_assoc(:dependency, dependencies[{params["repository"] || "hexpm", params["name"]}])
|> validate_required(~w(name app requirement optional)a)
|> validate_required(
:dependency,
message: "package does not exist in repository #{inspect(package.repository.name)}"
)
|> validate_requirement(:requirement, pre: version_pre(release_changeset) != [])
|> validate_repository(:repository, repository: package.repository)
end
def build_all(release_changeset, package) do
dependencies = preload_dependencies(release_changeset.params["requirements"])
release_changeset =
cast_assoc(
release_changeset,
:requirements,
with: &changeset(&1, &2, dependencies, release_changeset, package)
)
if release_changeset.valid? do
requirements =
get_change(release_changeset, :requirements, [])
|> Enum.map(&apply_changes/1)
validate_resolver(release_changeset, requirements)
else
release_changeset
end
end
defp validate_resolver(release_changeset, requirements) do
build_tools = get_field(release_changeset, :meta).build_tools
{time, release_changeset} =
:timer.tc(fn ->
case Resolver.run(requirements, build_tools) do
:ok ->
release_changeset
{:error, reason} ->
release_changeset =
update_in(release_changeset.changes.requirements, fn req_changesets ->
Enum.map(req_changesets, &add_error(&1, :requirement, reason))
end)
%{release_changeset | valid?: false}
end
end)
Logger.warn("DEPENDENCY_RESOLUTION_COMPLETED (#{div(time, 1000)}ms)")
release_changeset
end
defp preload_dependencies(requirements) do
names = requirement_names(requirements)
from(
p in Package,
join: r in assoc(p, :repository),
select: {{r.name, p.name}, %{p | repository: r}}
)
|> filter_dependencies(names)
end
defp filter_dependencies(_query, []) do
%{}
end
defp filter_dependencies(query, names) do
import Ecto.Query, only: [or_where: 3]
Enum.reduce(names, query, fn {repository, package}, query ->
or_where(query, [p, r], r.name == ^repository and p.name == ^package)
end)
|> Hexpm.Repo.all()
|> Map.new()
end
defp requirement_names(requirements) when is_list(requirements) do
Enum.flat_map(requirements, fn
req when is_map(req) ->
name = req["name"]
repository = req["repository"] || "hexpm"
if is_binary(name) and is_binary(repository) do
[{repository, name}]
else
[]
end
_ ->
[]
end)
end
defp requirement_names(_requirements), do: []
defp version_pre(release_changeset) do
version = get_field(release_changeset, :version)
version && version.pre
end
end
| 28.408
| 94
| 0.658688
|
9395629d73a0282a827bf5283c3fec14c49fcd1a
| 1,853
|
exs
|
Elixir
|
mix.exs
|
luizmiranda7/ex_aws
|
3bce7a95225c49fb89b5b04f5a7123e90fec0690
|
[
"MIT"
] | null | null | null |
mix.exs
|
luizmiranda7/ex_aws
|
3bce7a95225c49fb89b5b04f5a7123e90fec0690
|
[
"MIT"
] | 11
|
2021-08-02T18:13:25.000Z
|
2022-03-23T20:53:41.000Z
|
mix.exs
|
justgage/ex_aws
|
bd9c2383a75ae5f6d874fd92f14d6dc2eab4b5ae
|
[
"MIT"
] | null | null | null |
defmodule ExAws.Mixfile do
use Mix.Project
@source_url "https://github.com/ex-aws/ex_aws"
@version "2.2.3"
def project do
[
app: :ex_aws,
version: @version,
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
description: "Generic AWS client",
name: "ExAws",
source_url: @source_url,
package: package(),
deps: deps(),
docs: docs(),
dialyzer: [
plt_add_apps: [:mix, :hackney, :configparser_ex, :jsx]
]
]
end
def application do
[extra_applications: [:logger, :crypto], mod: {ExAws, []}]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps() do
[
{:telemetry, "~> 0.4"},
{:bypass, "~> 2.1", only: :test},
{:configparser_ex, "~> 4.0", optional: true},
{:dialyxir, "~> 1.0", only: [:dev, :test], runtime: false},
{:ex_doc, "~> 0.16", only: [:dev, :test]},
{:hackney, "~> 1.16", optional: true},
{:jason, "~> 1.1", optional: true},
{:jsx, "~> 3.0", optional: true},
{:mox, "~> 1.0", only: :test},
{:sweet_xml, "~> 0.6", optional: true}
]
end
defp package do
[
description: description(),
files: ["priv", "lib", "config", "mix.exs", "README*"],
maintainers: ["Bernard Duggan", "Ben Wilson"],
licenses: ["MIT"],
links: %{
Changelog: "#{@source_url}/blob/master/CHANGELOG.md",
GitHub: @source_url
}
]
end
defp description do
"""
AWS client for Elixir. Currently supports Dynamo, DynamoStreams, EC2,
Firehose, Kinesis, KMS, Lambda, RRDS, Route53, S3, SES, SNS, SQS, STS
"""
end
defp docs do
[
main: "readme",
source_ref: "v#{@version}",
source_url: @source_url,
extras: ["README.md"]
]
end
end
| 24.381579
| 73
| 0.541284
|
93957247943c56ec335f5b0dd72e1948ecb0cb69
| 7,902
|
exs
|
Elixir
|
bench/basic_bench.exs
|
YongHaoWu/elixir-cbson
|
f60586ed51168922edf0dd6e5caeaad7573cd594
|
[
"MIT"
] | null | null | null |
bench/basic_bench.exs
|
YongHaoWu/elixir-cbson
|
f60586ed51168922edf0dd6e5caeaad7573cd594
|
[
"MIT"
] | null | null | null |
bench/basic_bench.exs
|
YongHaoWu/elixir-cbson
|
f60586ed51168922edf0dd6e5caeaad7573cd594
|
[
"MIT"
] | null | null | null |
defmodule Data do
def get_bson do
get_data |> Enum.map(&CBson.encode/1)
end
defp get_data do
[
%{
a: -4.230845,
b: "hello",
c: %{x: -1, y: 2.2001},
d: [23, 45, 200],
eeeeeeeee: %Bson.Bin{ subtype: Bson.Bin.subtyx(:binary),
bin: <<200, 12, 240, 129, 100, 90, 56, 198, 34, 0, 0>>},
f: %Bson.Bin{ subtype: Bson.Bin.subtyx(:function),
bin: <<200, 12, 240, 129, 100, 90, 56, 198, 34, 0, 0>>},
g: %Bson.Bin{ subtype: Bson.Bin.subtyx(:uuid),
bin: <<49, 0, 0, 0, 4, 66, 83, 79, 78, 0, 38, 0, 0, 0,
2, 48, 0, 8, 0, 0, 0, 97, 119, 101, 115, 111, 109,
101, 0, 1, 49, 0, 51, 51, 51, 51, 51, 51, 20, 64,
16, 50, 0, 194, 7, 0, 0, 0, 0>>},
h: %Bson.Bin{ subtype: Bson.Bin.subtyx(:md5),
bin: <<200, 12, 240, 129, 100, 90, 56, 198, 34, 0, 0>>},
i: %Bson.Bin{ subtype: Bson.Bin.subtyx(:user),
bin: <<49, 0, 0, 0, 4, 66, 83, 79, 78, 0, 38, 0, 0, 0, 2,
48, 0, 8, 0, 0, 0, 97, 119, 101, 115, 111, 109, 101,
0, 1, 49, 0, 51, 51, 51, 51, 51, 51, 20, 64, 16, 50,
0, 194, 7, 0, 0, 0, 0>>},
j: %Bson.ObjectId{oid: <<82, 224, 229, 161, 0, 0, 2, 0, 3, 0, 0, 4>>},
k1: false,
k2: true,
l: Bson.UTC.from_now({1390, 470561, 277000}),
m: nil,
q1: -2000444000,
q2: -8000111000222001,
r: %Bson.Timestamp{ts: 2},
t: Bson.ObjectId.from_string("52e0e5a10000020003000004")
},
%{
"ip": "112.113.57.181",
"data": %{
"graphicsDeviceVersion": "OpenGL ES 2.0 V@6.0 AU@ (CL@3657481)",
"npotSupport": "Full",
"supportedRenderTargetCount": 1,
"supportsVibration": true,
"dataPath": "\/data\/app\/com.tencent.tmgp.ttlz-1.apk",
"graphicsMemorySize": 251,
"processorType": "ARMv7 VFPv3 NEON",
"operator": "qqapp",
"processorCount": 4,
"supportsComputeShaders": false,
"absoluteURL": "",
"graphicsDeviceVendorID": 0,
"genuine": true,
"temporaryCachePath": "\/storage\/sdcard0\/Android\/data\/com.tencent.tmgp.ttlz\/cache",
"targetFrameRate": -1,
"systemMemorySize": 1797,
"supportsGyroscope": true,
"supportsStencil": 1,
"platform": "Android",
"supportsAccelerometer": true,
"version": "1.0.70706",
"deviceType": "Handheld",
"supportsRenderToCubemap": true,
"supportsShadows": true,
"utcDateTime": 1410404000,
"persistentDataPath": "\/storage\/sdcard0\/Android\/data\/com.tencent.tmgp.ttlz\/files",
"description": "应用宝 2014-07-02",
"graphicsShaderLevel": 30,
"supportsInstancing": false,
"streamingAssetsPath": "jar:file:\/\/\/data\/app\/com.tencent.tmgp.ttlz-1.apk!\/assets",
"supportsRenderTextures": true,
"systemLanguage": "Chinese",
"maxTextureSize": 4096,
"supportsVertexPrograms": true,
"graphicsDeviceName": "Adreno (TM) 320",
"deviceModel": "LGE LG-E988",
"internetReachability": "ReachableViaLocalAreaNetwork",
"graphicsDeviceVendor": "Qualcomm",
"graphicsPixelFillrate": -1,
"unityVersion": "4.5.1f3",
"dateTime": 1410433000,
"graphicsDeviceID": 0,
"genuineCheckAvailable": false,
"supportsImageEffects": true,
"supportsLocationService": true,
"supports3DTextures": false,
"deviceUniqueIdentifier": "27bd20d0f89d1f7611bbc996ee0e0817",
"operatingSystem": "Android OS 4.1.2 \/ API-16 (JZO54K\/E98810d.1377827792)"
}
},
%{
"ip": "112.113.57.181",
"data": %{
"graphicsDeviceVersion": "OpenGL ES 2.0 V@6.0 AU@ (CL@3657481)",
"npotSupport": "Full",
"supportedRenderTargetCount": 1,
"subdata": %{
"dateTime": 1410433000,
"graphicsDeviceID": 0,
"genuineCheckAvailable": false,
"supportsImageEffects": true,
"supportsLocationService": true,
"subdata": %{
"dateTime": 1410433000,
"graphicsDeviceID": 0,
"genuineCheckAvailable": false,
"supportsImageEffects": true,
"supportsLocationService": true,
}
}
}
},
%{
"_id"=> Bson.ObjectId.from_string("5625d0b75f6bcdac0556dcec"),
"avatar"=> %{
"create"=> nil,
"id"=> "E575D708-8A61-9B3D-896F-4895809F63E5",
"name"=> nil,
"size"=> nil,
"source"=> "MOMO",
"type"=> nil
},
"bio"=> "找一个能相知相伴一直走下去的人真的好难。",
"city"=> "360700",
"contacts"=> [],
"day"=> 7,
"eula"=> false,
"ext"=> %{
"vip"=> 0
},
"fresh"=> true,
"gender"=> 1,
"loc"=> %{
"coordinates"=> [
114.936872,
25.855845
],
"type"=> "Point"
},
"lock"=> true,
"month"=> 3,
"name"=> " 阳光如旧",
"photos"=> [
%{
"create"=> nil,
"id"=> "E575D708-8A61-9B3D-896F-4895809F63E5",
"name"=> nil,
"size"=> nil,
"source"=> "MOMO",
"type"=> nil
},
%{
"create"=> nil,
"id"=> "97BD960B-E4ED-FCDE-1F7D-629990C4A87C20151019",
"name"=> nil,
"size"=> nil,
"source"=> "MOMO",
"type"=> nil
},
%{
"create"=> nil,
"id"=> "15962FFD-60C7-9F72-2F65-5A4E561C75C820151019",
"name"=> nil,
"size"=> nil,
"source"=> "MOMO",
"type"=> nil
},
%{
"create"=> nil,
"id"=> "ED626DC0-7AAF-CD9C-B87C-AD1138C91D9620151019",
"name"=> nil,
"size"=> nil,
"source"=> "MOMO",
"type"=> nil
},
%{
"create"=> nil,
"id"=> "612A2B4B-484B-14D6-0710-BAEF532A674C20151019",
"name"=> nil,
"size"=> nil,
"source"=> "MOMO",
"type"=> nil
},
%{
"create"=> nil,
"id"=> "5C8DE3DA-72EC-1912-749C-D339C4BAAB5D20151019",
"name"=> nil,
"size"=> nil,
"source"=> "MOMO",
"type"=> nil
},
%{
"create"=> nil,
"id"=> "98FF1781-82D0-6138-347D-25BC22D3F5A120151019",
"name"=> nil,
"size"=> nil,
"source"=> "MOMO",
"type"=> nil
},
%{
"create"=> nil,
"id"=> "B35CDBF8-471D-0672-8F21-00CB2E40258820151019",
"name"=> nil,
"size"=> nil,
"source"=> "MOMO",
"type"=> nil
}
],
"update_mark"=> Bson.UTC.from_now(:erlang.timestamp),
"year"=> 1996
}
]
end
end
defmodule DecodBench do
use Benchfella
bench "decoder(cbson)", [bson: Data.get_bson()] do
bson |> Enum.map(&CBson.decode/1)
end
bench "decoder(bson)", [bson: Data.get_bson()] do
bson |> Enum.map(&Bson.decode/1)
end
bench "decoder(BSON)", [bson: Data.get_bson()] do
bson |> Enum.map(&BSON.decode/1)
end
end
defmodule EncodeBench do
use Benchfella
bench "encode(cbson)", [bson: get_bson()] do
bson |> Enum.map(&CBson.encode/1)
end
bench "encode(bson)", [bson: get_bson()] do
bson |> Enum.map(&Bson.encode/1)
end
bench "encode(BSON)", [bson: get_BSON()] do
bson |> Enum.map(&BSON.encode/1)
end
defp get_bson do
Data.get_bson |> Enum.map(&CBson.decode/1)
end
defp get_BSON() do
Data.get_bson() |> Enum.map(&BSON.decode/1)
end
end
| 30.627907
| 98
| 0.483548
|
9395cc4df84f55801367496c7da091920527614f
| 878
|
exs
|
Elixir
|
test/extras/enum_x_test.exs
|
marick/ecto_test_dsl
|
6d460af093367098b7c78db709753deb45904d77
|
[
"Unlicense"
] | 4
|
2021-02-09T17:26:34.000Z
|
2021-08-08T01:42:52.000Z
|
test/extras/enum_x_test.exs
|
marick/transformer_test_support
|
6d460af093367098b7c78db709753deb45904d77
|
[
"Unlicense"
] | null | null | null |
test/extras/enum_x_test.exs
|
marick/transformer_test_support
|
6d460af093367098b7c78db709753deb45904d77
|
[
"Unlicense"
] | null | null | null |
defmodule EnumXTest do
use EctoTestDSL.Drink.Me
use ExUnit.Case
alias FlowAssertions.TabularA
describe "take_until" do
test "doesn't stop" do
assert [1, 2, 3] == EnumX.take_until([1, 2, 3], fn _ -> false end)
end
test "does stop" do
assert [1, 2] == EnumX.take_until([1, 2, 3], &(&1 == 2))
end
end
test "difference" do
expect = TabularA.run_and_assert(&EnumX.difference/2)
# Base cases.
{ [], [ ] } |> expect.([ ])
{ [], [:b] } |> expect.([ ])
{ [:a], [ ] } |> expect.([:a])
{ [:a], [:a] } |> expect.([ ])
{ [:a, :b], [:a] } |> expect.([:b])
# duplicates
{ [:a, :b, :a], [:a ] } |> expect.([:b])
{ [:a ], [:a, :a] } |> expect.([ ])
# Note that duplicates can be retained
{ [:b, :b ], [:a, :a] } |> expect.([:b, :b])
end
end
| 25.085714
| 72
| 0.447608
|
9395db0564d9397f32e104cbaddf99654c37ea7d
| 586
|
ex
|
Elixir
|
lib/edgedb/protocol/type_descriptors/scalar_type_name_annotation.ex
|
nsidnev/edgedb-elixir
|
bade2b9daba2e83bfaa5915b2addb74f41610968
|
[
"MIT"
] | 30
|
2021-05-19T08:54:44.000Z
|
2022-03-11T22:52:25.000Z
|
lib/edgedb/protocol/type_descriptors/scalar_type_name_annotation.ex
|
nsidnev/edgedb-elixir
|
bade2b9daba2e83bfaa5915b2addb74f41610968
|
[
"MIT"
] | 3
|
2021-11-17T21:26:01.000Z
|
2022-03-12T09:49:25.000Z
|
lib/edgedb/protocol/type_descriptors/scalar_type_name_annotation.ex
|
nsidnev/edgedb-elixir
|
bade2b9daba2e83bfaa5915b2addb74f41610968
|
[
"MIT"
] | 3
|
2021-08-29T14:55:41.000Z
|
2022-03-12T01:30:35.000Z
|
defmodule EdgeDB.Protocol.TypeDescriptors.ScalarTypeNameAnnotation do
use EdgeDB.Protocol.TypeDescriptor
alias EdgeDB.Protocol.{
Codecs,
Datatypes
}
# id of type here is always known, so no need to parse
deftypedescriptor(
type: 0xFF,
parse: false
)
# update existing codec with type name information
@impl EdgeDB.Protocol.TypeDescriptor
def consume_description(codecs_storage, id, <<data::binary>>) do
{type_name, rest} = Datatypes.String.decode(data)
Codecs.Storage.update(codecs_storage, id, %{type_name: type_name})
rest
end
end
| 23.44
| 70
| 0.732082
|
9395e85179885f45549ef72b6085bb75b0091690
| 47,945
|
exs
|
Elixir
|
test/livebook/intellisense_test.exs
|
PeteJodo/livebook
|
4badf40afcbe489deda92812e72a99a0e54f11a7
|
[
"Apache-2.0"
] | null | null | null |
test/livebook/intellisense_test.exs
|
PeteJodo/livebook
|
4badf40afcbe489deda92812e72a99a0e54f11a7
|
[
"Apache-2.0"
] | null | null | null |
test/livebook/intellisense_test.exs
|
PeteJodo/livebook
|
4badf40afcbe489deda92812e72a99a0e54f11a7
|
[
"Apache-2.0"
] | null | null | null |
defmodule Livebook.IntellisenseTest.Utils do
@moduledoc false
@doc """
Returns `{binding, env}` resulting from evaluating
the given block of code in a fresh context.
"""
defmacro eval(do: block) do
quote do
block = unquote(Macro.escape(block))
binding = []
# TODO: Use Code.eval_quoted_with_env/3 on Elixir v1.14
env = :elixir.env_for_eval([])
{_, binding, env} = :elixir.eval_quoted(block, binding, env)
{binding, env}
end
end
end
defmodule Livebook.IntellisenseTest do
use ExUnit.Case, async: true
import Livebook.IntellisenseTest.Utils
alias Livebook.Intellisense
describe "get_completion_items/3" do
test "completion when no hint given" do
{binding, env} = eval(do: nil)
length_item = %{
label: "length/1",
kind: :function,
detail: "Kernel.length(list)",
documentation: """
Returns the length of `list`.
```
@spec length(list()) :: non_neg_integer()
```\
""",
insert_text: "length($0)"
}
assert length_item in Intellisense.get_completion_items("", binding, env)
assert length_item in Intellisense.get_completion_items("to_string(", binding, env)
assert length_item in Intellisense.get_completion_items("Enum.map(list, ", binding, env)
end
@tag :erl_docs
test "Erlang module completion" do
{binding, env} = eval(do: nil)
assert [
%{
label: ":zlib",
kind: :module,
detail: "module",
documentation:
"This module provides an API for the zlib library ([www.zlib.net](http://www.zlib.net)). It is used to compress and decompress data. The data format is described by [RFC 1950](https://www.ietf.org/rfc/rfc1950.txt), [RFC 1951](https://www.ietf.org/rfc/rfc1951.txt), and [RFC 1952](https://www.ietf.org/rfc/rfc1952.txt).",
insert_text: "zlib"
}
] = Intellisense.get_completion_items(":zl", binding, env)
end
test "Erlang module no completion" do
{binding, env} = eval(do: nil)
assert [] = Intellisense.get_completion_items(":unknown", binding, env)
end
test "Erlang module multiple values completion" do
{binding, env} = eval(do: nil)
assert [
%{
label: ":user",
kind: :module,
detail: "module",
documentation: _user_doc,
insert_text: "user"
},
%{
label: ":user_drv",
kind: :module,
detail: "module",
documentation: _user_drv_doc,
insert_text: "user_drv"
},
%{
label: ":user_sup",
kind: :module,
detail: "module",
documentation: _user_sup_doc,
insert_text: "user_sup"
}
] = Intellisense.get_completion_items(":user", binding, env)
end
@tag :erl_docs
test "Erlang root completion" do
{binding, env} = eval(do: nil)
lists_item = %{
label: ":lists",
kind: :module,
detail: "module",
documentation: "This module contains functions for list processing.",
insert_text: "lists"
}
assert lists_item in Intellisense.get_completion_items(":", binding, env)
assert lists_item in Intellisense.get_completion_items(" :", binding, env)
end
@tag :erl_docs
test "Erlang completion doesn't include quoted atoms" do
{binding, env} = eval(do: nil)
assert [] = Intellisense.get_completion_items(~s{:Elixir}, binding, env)
end
@tag :erl_docs
test "Erlang module completion with 'in' operator in spec" do
{binding, env} = eval(do: nil)
assert [
%{
label: "open_port/2",
kind: :function,
detail: ":erlang.open_port/2",
documentation: _open_port_doc,
insert_text: "open_port($0)"
}
] = Intellisense.get_completion_items(":erlang.open_por", binding, env)
end
test "Elixir proxy" do
{binding, env} = eval(do: nil)
assert %{
label: "Elixir",
kind: :module,
detail: "module",
documentation: "No documentation available",
insert_text: "Elixir"
} in Intellisense.get_completion_items("E", binding, env)
end
test "Elixir module completion" do
{binding, env} = eval(do: nil)
assert [
%{
label: "Enum",
kind: :module,
detail: "module",
documentation: "Provides a set of algorithms to work with enumerables.",
insert_text: "Enum"
},
%{
label: "Enumerable",
kind: :interface,
detail: "protocol",
documentation: "Enumerable protocol used by `Enum` and `Stream` modules.",
insert_text: "Enumerable"
}
] = Intellisense.get_completion_items("En", binding, env)
assert [
%{
label: "Enumerable",
kind: :interface,
detail: "protocol",
documentation: "Enumerable protocol used by `Enum` and `Stream` modules.",
insert_text: "Enumerable"
}
] = Intellisense.get_completion_items("Enumera", binding, env)
assert [
%{
label: "RuntimeError",
kind: :struct,
detail: "exception",
documentation: "No documentation available",
insert_text: "RuntimeError"
}
] = Intellisense.get_completion_items("RuntimeE", binding, env)
end
test "Elixir struct completion lists nested options" do
{binding, env} = eval(do: nil)
assert %{
label: "File.Stat",
kind: :struct,
detail: "struct",
documentation: "A struct that holds file information.",
insert_text: "File.Stat"
} in Intellisense.get_completion_items("%Fi", binding, env)
end
test "Elixir type completion" do
{binding, env} = eval(do: nil)
assert [
%{
label: "from/0",
kind: :type,
detail: "typespec",
documentation: "Tuple describing the client of a call request.",
insert_text: "from"
}
] = Intellisense.get_completion_items("GenServer.fr", binding, env)
assert [
%{
label: "name/0",
kind: :type,
detail: "typespec",
documentation: _name_doc,
insert_text: "name"
},
%{
label: "name_all/0",
kind: :type,
detail: "typespec",
documentation: _name_all_doc,
insert_text: "name_all"
}
] = Intellisense.get_completion_items(":file.nam", binding, env)
end
test "Elixir module completion with self" do
{binding, env} = eval(do: nil)
assert [
%{
label: "Enumerable",
kind: :interface,
detail: "protocol",
documentation: "Enumerable protocol used by `Enum` and `Stream` modules.",
insert_text: "Enumerable"
}
] = Intellisense.get_completion_items("Enumerable", binding, env)
end
test "Elixir completion on modules from load path" do
{binding, env} = eval(do: nil)
assert %{
label: "Jason",
kind: :module,
detail: "module",
documentation: "A blazing fast JSON parser and generator in pure Elixir.",
insert_text: "Jason"
} in Intellisense.get_completion_items("Jas", binding, env)
end
test "Elixir no completion" do
{binding, env} = eval(do: nil)
assert [] = Intellisense.get_completion_items(".", binding, env)
assert [] = Intellisense.get_completion_items("Xyz", binding, env)
assert [] = Intellisense.get_completion_items("x.Foo", binding, env)
assert [] = Intellisense.get_completion_items("x.Foo.get_by", binding, env)
end
test "Elixir private module no completion" do
{binding, env} = eval(do: nil)
assert [] =
Intellisense.get_completion_items(
"Livebook.TestModules.Hidd",
binding,
env
)
end
test "Elixir private module members completion" do
{binding, env} = eval(do: nil)
assert [
%{
detail: "Livebook.TestModules.Hidden.visible()",
documentation: "No documentation available",
insert_text: "visible()",
kind: :function,
label: "visible/0"
}
] = Intellisense.get_completion_items("Livebook.TestModules.Hidden.", binding, env)
end
test "Elixir root submodule completion" do
{binding, env} = eval(do: nil)
assert [
%{
label: "Access",
kind: :interface,
detail: "behaviour",
documentation: "Key-based access to data structures.",
insert_text: "Access"
}
] = Intellisense.get_completion_items("Elixir.Acce", binding, env)
end
test "Elixir submodule completion" do
{binding, env} = eval(do: nil)
assert [
%{
label: "ANSI",
kind: :module,
detail: "module",
documentation: "Functionality to render ANSI escape sequences.",
insert_text: "ANSI"
}
] = Intellisense.get_completion_items("IO.AN", binding, env)
end
test "Elixir submodule no completion" do
{binding, env} = eval(do: nil)
assert [] = Intellisense.get_completion_items("IEx.Xyz", binding, env)
end
test "Elixir function completion" do
{binding, env} = eval(do: nil)
assert [
%{
label: "version/0",
kind: :function,
detail: "System.version()",
documentation: """
Elixir version information.
```
@spec version() :: String.t()
```\
""",
insert_text: "version()"
}
] = Intellisense.get_completion_items("System.ve", binding, env)
end
test "Elixir sigil completion" do
{binding, env} = eval(do: nil)
regex_item = %{
label: "~r/2",
kind: :function,
detail: "Kernel.sigil_r(term, modifiers)",
documentation: "Handles the sigil `~r` for regular expressions.",
insert_text: "~r"
}
assert regex_item in Intellisense.get_completion_items("~", binding, env)
assert [^regex_item] = Intellisense.get_completion_items("~r", binding, env)
end
@tag :erl_docs
test "Erlang function completion" do
{binding, env} = eval(do: nil)
assert %{
label: "gzip/1",
kind: :function,
detail: ":zlib.gzip/1",
documentation: """
Compresses data with gz headers and checksum.
```
@spec gzip(data) :: compressed
when data: iodata(),
compressed: binary()
```\
""",
insert_text: "gzip($0)"
} in Intellisense.get_completion_items(":zlib.gz", binding, env)
end
test "function completion with arity" do
{binding, env} = eval(do: nil)
assert %{
label: "concat/1",
kind: :function,
detail: "Enum.concat(enumerables)",
documentation: """
Given an enumerable of enumerables, concatenates the `enumerables` into
a single list.
```
@spec concat(t()) :: t()
```\
""",
insert_text: "concat($0)"
} in Intellisense.get_completion_items("Enum.concat/", binding, env)
assert [
%{label: "count/1"},
%{label: "count/2"}
] = Intellisense.get_completion_items("Enum.count/", binding, env)
end
test "function completion same name with different arities" do
{binding, env} = eval(do: nil)
assert [
%{
label: "concat/1",
kind: :function,
detail: "Enum.concat(enumerables)",
documentation: """
Given an enumerable of enumerables, concatenates the `enumerables` into
a single list.
```
@spec concat(t()) :: t()
```\
""",
insert_text: "concat($0)"
},
%{
label: "concat/2",
kind: :function,
detail: "Enum.concat(left, right)",
documentation: """
Concatenates the enumerable on the `right` with the enumerable on the
`left`.
```
@spec concat(t(), t()) :: t()
```\
""",
insert_text: "concat($0)"
}
] = Intellisense.get_completion_items("Enum.concat", binding, env)
end
test "function completion when has default args then documentation all arities have docs" do
{binding, env} = eval(do: nil)
assert [
%{
label: "join/1",
kind: :function,
detail: ~S{Enum.join(enumerable, joiner \\ "")},
documentation: """
Joins the given `enumerable` into a string using `joiner` as a
separator.
```
@spec join(t(), String.t()) :: String.t()
```\
""",
insert_text: "join($0)"
},
%{
label: "join/2",
kind: :function,
detail: ~S{Enum.join(enumerable, joiner \\ "")},
documentation: """
Joins the given `enumerable` into a string using `joiner` as a
separator.
```
@spec join(t(), String.t()) :: String.t()
```\
""",
insert_text: "join($0)"
}
] = Intellisense.get_completion_items("Enum.jo", binding, env)
end
test "function completion using a variable bound to a module" do
{binding, env} =
eval do
mod = System
end
assert [
%{
label: "version/0",
kind: :function,
detail: "System.version()",
documentation: """
Elixir version information.
```
@spec version() :: String.t()
```\
""",
insert_text: "version()"
}
] = Intellisense.get_completion_items("mod.ve", binding, env)
end
test "operator completion" do
{binding, env} = eval(do: nil)
assert [
%{
label: "++/2",
kind: :function,
detail: "left ++ right",
documentation: """
List concatenation operator. Concatenates a proper list and a term, returning a list.
```
@spec list() ++ term() ::
maybe_improper_list()
```\
""",
insert_text: "++"
},
%{
label: "+/1",
kind: :function,
detail: "+value",
documentation: """
Arithmetic positive unary operator.
```
@spec +integer() :: integer()
@spec +float() :: float()
```\
""",
insert_text: "+"
},
%{
label: "+/2",
kind: :function,
detail: "left + right",
documentation: """
Arithmetic addition operator.
```
@spec integer() + integer() :: integer()
@spec float() + float() :: float()
@spec integer() + float() :: float()
@spec float() + integer() :: float()
```\
""",
insert_text: "+"
}
] = Intellisense.get_completion_items("+", binding, env)
assert [
%{label: "+/1"},
%{label: "+/2"}
] = Intellisense.get_completion_items("+/", binding, env)
assert [
%{label: "++/2"}
] = Intellisense.get_completion_items("++/", binding, env)
end
test "map atom key completion" do
{binding, env} =
eval do
map = %{
foo: 1,
bar_1: ~r/pattern/,
bar_2: true
}
end
assert [
%{
label: "bar_1",
kind: :field,
detail: "field",
documentation: nil,
insert_text: "bar_1"
},
%{
label: "bar_2",
kind: :field,
detail: "field",
documentation: nil,
insert_text: "bar_2"
},
%{
label: "foo",
kind: :field,
detail: "field",
documentation: nil,
insert_text: "foo"
}
] = Intellisense.get_completion_items("map.", binding, env)
assert [
%{
label: "foo",
kind: :field,
detail: "field",
documentation: nil,
insert_text: "foo"
}
] = Intellisense.get_completion_items("map.f", binding, env)
end
test "nested map atom key completion" do
{binding, env} =
eval do
map = %{
nested: %{
deeply: %{
foo: 1,
bar_1: 23,
bar_2: 14,
mod: System
}
}
}
end
assert [
%{
label: "nested",
kind: :field,
detail: "field",
documentation: nil,
insert_text: "nested"
}
] = Intellisense.get_completion_items("map.nest", binding, env)
assert [
%{
label: "foo",
kind: :field,
detail: "field",
documentation: nil,
insert_text: "foo"
}
] = Intellisense.get_completion_items("map.nested.deeply.f", binding, env)
assert [
%{
label: "version/0",
kind: :function,
detail: "System.version()",
documentation: """
Elixir version information.
```
@spec version() :: String.t()
```\
""",
insert_text: "version()"
}
] = Intellisense.get_completion_items("map.nested.deeply.mod.ve", binding, env)
assert [] = Intellisense.get_completion_items("map.non.existent", binding, env)
end
test "map string key completion is not supported" do
{binding, env} =
eval do
map = %{"foo" => 1}
end
assert [] = Intellisense.get_completion_items("map.f", binding, env)
end
test "autocompletion off a bound variable only works for modules and maps" do
{binding, env} =
eval do
num = 5
map = %{nested: %{num: 23}}
end
assert [] = Intellisense.get_completion_items("num.print", binding, env)
assert [] = Intellisense.get_completion_items("map.nested.num.f", binding, env)
end
test "autocompletion using access syntax does is not supported" do
{binding, env} =
eval do
map = %{nested: %{deeply: %{num: 23}}}
end
assert [] = Intellisense.get_completion_items("map[:nested][:deeply].n", binding, env)
assert [] = Intellisense.get_completion_items("map[:nested].deeply.n", binding, env)
assert [] = Intellisense.get_completion_items("map.nested.[:deeply].n", binding, env)
end
test "macro completion" do
{binding, env} = eval(do: nil)
assert [
%{
label: "is_nil/1",
kind: :function,
detail: "Kernel.is_nil(term)",
documentation: "Returns `true` if `term` is `nil`, `false` otherwise.",
insert_text: "is_nil($0)"
}
] = Intellisense.get_completion_items("Kernel.is_ni", binding, env)
end
test "special forms completion" do
{binding, env} = eval(do: nil)
assert [
%{
label: "quote/2",
kind: :function,
detail: "Kernel.SpecialForms.quote(opts, block)",
documentation: "Gets the representation of any expression.",
insert_text: "quote "
}
] = Intellisense.get_completion_items("quot", binding, env)
end
test "kernel import completion" do
{binding, env} = eval(do: nil)
assert [
%{
label: "put_in/2",
kind: :function,
detail: "Kernel.put_in(path, value)",
documentation: "Puts a value in a nested structure via the given `path`.",
insert_text: "put_in($0)"
},
%{
label: "put_in/3",
kind: :function,
detail: "Kernel.put_in(data, keys, value)",
documentation: """
Puts a value in a nested structure.
```
@spec put_in(
Access.t(),
[term(), ...],
term()
) :: Access.t()
```\
""",
insert_text: "put_in($0)"
}
] = Intellisense.get_completion_items("put_i", binding, env)
end
test "variable name completion" do
{binding, env} =
eval do
number = 3
numbats = ["numbat", "numbat"]
nothing = nil
end
assert [
%{
label: "numbats",
kind: :variable,
detail: "variable",
documentation: nil,
insert_text: "numbats"
}
] = Intellisense.get_completion_items("numba", binding, env)
assert [
%{
label: "numbats",
kind: :variable,
detail: "variable",
documentation: nil,
insert_text: "numbats"
},
%{
label: "number",
kind: :variable,
detail: "variable",
documentation: nil,
insert_text: "number"
}
] = Intellisense.get_completion_items("num", binding, env)
assert [
%{
label: "nothing",
kind: :variable,
detail: "variable",
documentation: nil,
insert_text: "nothing"
},
%{label: "node/0"},
%{label: "node/1"},
%{label: "not/1"}
] = Intellisense.get_completion_items("no", binding, env)
end
test "completion of manually imported functions and macros" do
{binding, env} =
eval do
import Enum
import System, only: [version: 0]
import Protocol
end
assert [
%{label: "take/2"},
%{label: "take_every/2"},
%{label: "take_random/2"},
%{label: "take_while/2"}
] = Intellisense.get_completion_items("take", binding, env)
assert %{
label: "version/0",
kind: :function,
detail: "System.version()",
documentation: """
Elixir version information.
```
@spec version() :: String.t()
```\
""",
insert_text: "version()"
} in Intellisense.get_completion_items("v", binding, env)
assert [
%{label: "derive/2"},
%{label: "derive/3"}
] = Intellisense.get_completion_items("der", binding, env)
assert [
%{label: "count/1"},
%{label: "count/2"}
] = Intellisense.get_completion_items("count/", binding, env)
end
test "ignores quoted variables when performing variable completion" do
{binding, env} =
eval do
quote do
var!(my_var_1, Elixir) = 1
end
my_var_2 = 2
end
assert [
%{label: "my_var_2"}
] = Intellisense.get_completion_items("my_var", binding, env)
end
test "completion inside expression" do
{binding, env} = eval(do: nil)
assert [
%{label: "Enum"},
%{label: "Enumerable"}
] = Intellisense.get_completion_items("1 En", binding, env)
assert [
%{label: "Enum"},
%{label: "Enumerable"}
] = Intellisense.get_completion_items("foo(En", binding, env)
assert [
%{label: "Enum"},
%{label: "Enumerable"}
] = Intellisense.get_completion_items("Test En", binding, env)
assert [
%{label: "Enum"},
%{label: "Enumerable"}
] = Intellisense.get_completion_items("foo(x,En", binding, env)
assert [
%{label: "Enum"},
%{label: "Enumerable"}
] = Intellisense.get_completion_items("[En", binding, env)
assert [
%{label: "Enum"},
%{label: "Enumerable"}
] = Intellisense.get_completion_items("{En", binding, env)
end
test "ampersand completion" do
{binding, env} = eval(do: nil)
assert [
%{label: "Enum"},
%{label: "Enumerable"}
] = Intellisense.get_completion_items("&En", binding, env)
assert [
%{label: "all?/1"},
%{label: "all?/2"}
] = Intellisense.get_completion_items("&Enum.al", binding, env)
assert [
%{label: "all?/1"},
%{label: "all?/2"}
] = Intellisense.get_completion_items("f = &Enum.al", binding, env)
end
test "negation operator completion" do
{binding, env} = eval(do: nil)
assert [
%{label: "is_binary/1"}
] = Intellisense.get_completion_items("!is_bin", binding, env)
end
test "pin operator completion" do
{binding, env} =
eval do
my_variable = 2
end
assert [
%{label: "my_variable"}
] = Intellisense.get_completion_items("^my_va", binding, env)
end
defmodule SublevelTest.LevelA.LevelB do
end
test "Elixir completion sublevel" do
{binding, env} = eval(do: nil)
assert [
%{label: "LevelA"}
] =
Intellisense.get_completion_items(
"Livebook.IntellisenseTest.SublevelTest.",
binding,
env
)
end
test "complete aliases of Elixir modules" do
{binding, env} =
eval do
alias List, as: MyList
end
assert [
%{label: "MyList"}
] = Intellisense.get_completion_items("MyL", binding, env)
assert [
%{label: "to_integer/1"},
%{label: "to_integer/2"}
] = Intellisense.get_completion_items("MyList.to_integ", binding, env)
end
@tag :erl_docs
test "complete aliases of Erlang modules" do
{binding, env} =
eval do
alias :lists, as: EList
end
assert [
%{label: "EList"}
] = Intellisense.get_completion_items("EL", binding, env)
assert [
%{label: "map/2"},
%{label: "mapfoldl/3"},
%{label: "mapfoldr/3"}
] = Intellisense.get_completion_items("EList.map", binding, env)
assert %{
label: "max/1",
kind: :function,
detail: ":lists.max/1",
documentation: """
Returns the first element of `List` that compares greater than or equal to all other elements of `List`.
```
@spec max(list) :: max
when list: [t, ...], max: t, t: term()
```\
""",
insert_text: "max($0)"
} in Intellisense.get_completion_items("EList.", binding, env)
assert [] = Intellisense.get_completion_items("EList.Invalid", binding, env)
end
test "completion for functions added when compiled module is reloaded" do
{binding, env} = eval(do: nil)
{:module, _, bytecode, _} =
defmodule Sample do
def foo(), do: 0
end
assert [
%{label: "foo/0"}
] =
Intellisense.get_completion_items(
"Livebook.IntellisenseTest.Sample.foo",
binding,
env
)
Code.compiler_options(ignore_module_conflict: true)
defmodule Sample do
def foo(), do: 0
def foobar(), do: 0
end
assert [
%{label: "foo/0"},
%{label: "foobar/0"}
] =
Intellisense.get_completion_items(
"Livebook.IntellisenseTest.Sample.foo",
binding,
env
)
after
Code.compiler_options(ignore_module_conflict: false)
:code.purge(Sample)
:code.delete(Sample)
end
defmodule MyStruct do
defstruct [:my_val]
end
test "completion for struct names" do
{binding, env} = eval(do: nil)
assert [
%{label: "MyStruct"}
] =
Intellisense.get_completion_items("Livebook.IntellisenseTest.MyStr", binding, env)
end
test "completion for struct keys" do
{binding, env} =
eval do
struct = %Livebook.IntellisenseTest.MyStruct{}
end
assert [
%{label: "my_val"}
] = Intellisense.get_completion_items("struct.my", binding, env)
end
test "completion for struct keys inside struct" do
{binding, env} = eval(do: nil)
assert [
%{
label: "my_val",
kind: :field,
detail: "Livebook.IntellisenseTest.MyStruct struct field",
documentation: "```\nmy_val\n```\n\n---\n\n**Default**\n\n```\nnil\n```\n",
insert_text: "my_val: "
}
] =
Intellisense.get_completion_items(
"%Livebook.IntellisenseTest.MyStruct{my",
binding,
env
)
end
test "completion for struct keys inside struct removes filled keys" do
{binding, env} =
eval do
struct = %Livebook.IntellisenseTest.MyStruct{}
end
assert [] =
Intellisense.get_completion_items(
"%Livebook.IntellisenseTest.MyStruct{my_val: 123, ",
binding,
env
)
end
test "completion for struct keys inside struct ignores `__exception__`" do
{binding, env} = eval(do: nil)
completions =
Intellisense.get_completion_items(
"%ArgumentError{",
binding,
env
)
refute Enum.find(completions, &match?(%{label: "__exception__"}, &1))
end
test "ignore invalid Elixir module literals" do
{binding, env} = eval(do: nil)
defmodule(:"Elixir.Livebook.IntellisenseTest.Unicodé", do: nil)
assert [] =
Intellisense.get_completion_items("Livebook.IntellisenseTest.Unicod", binding, env)
after
:code.purge(:"Elixir.Livebook.IntellisenseTest.Unicodé")
:code.delete(:"Elixir.Livebook.IntellisenseTest.Unicodé")
end
test "known Elixir module attributes completion" do
{binding, env} = eval(do: nil)
assert [
%{
label: "moduledoc",
kind: :variable,
detail: "module attribute",
documentation: "Provides documentation for the current module.",
insert_text: "moduledoc"
}
] = Intellisense.get_completion_items("@modu", binding, env)
end
test "handles calls on module attribute" do
{binding, env} = eval(do: nil)
assert [] = Intellisense.get_completion_items("@attr.value", binding, env)
end
test "includes language keywords" do
{binding, env} = eval(do: nil)
assert [
%{
label: "do",
kind: :keyword,
detail: "do-end block",
documentation: nil,
insert_text: "do\n $0\nend"
}
| _
] = Intellisense.get_completion_items("do", binding, env)
end
test "includes space instead of parentheses for def* macros" do
{binding, env} = eval(do: nil)
assert [
%{
label: "defmodule/2",
insert_text: "defmodule "
}
] = Intellisense.get_completion_items("defmodu", binding, env)
end
test "includes space instead of parentheses for keyword macros" do
{binding, env} = eval(do: nil)
assert [
%{
label: "import/2",
insert_text: "import "
}
] = Intellisense.get_completion_items("impor", binding, env)
end
test "includes doesn't include space nor parentheses for macros like __ENV__" do
{binding, env} = eval(do: nil)
assert [
%{
label: "__ENV__/0",
insert_text: "__ENV__"
}
] = Intellisense.get_completion_items("__EN", binding, env)
end
end
describe "get_details/3" do
test "returns nil if there are no matches" do
{binding, env} = eval(do: nil)
assert nil == Intellisense.get_details("Unknown.unknown()", 2, binding, env)
end
test "returns subject range" do
{binding, env} = eval(do: nil)
assert %{range: %{from: 1, to: 18}} =
Intellisense.get_details("Integer.to_string(10)", 15, binding, env)
assert %{range: %{from: 1, to: 8}} =
Intellisense.get_details("Integer.to_string(10)", 2, binding, env)
end
test "does not return duplicate details for functions with default arguments" do
{binding, env} = eval(do: nil)
assert %{contents: [_]} =
Intellisense.get_details("Integer.to_string(10)", 15, binding, env)
end
test "returns details only for exactly matching identifiers" do
{binding, env} = eval(do: nil)
assert nil == Intellisense.get_details("Enum.ma", 6, binding, env)
end
test "returns full docs" do
{binding, env} = eval(do: nil)
assert %{contents: [content]} = Intellisense.get_details("Enum.map", 6, binding, env)
assert content =~ "## Examples"
end
@tag :erl_docs
test "returns full Erlang docs" do
{binding, env} = eval(do: nil)
assert %{contents: [file]} = Intellisense.get_details(":file.read()", 2, binding, env)
assert file =~ "## Performance"
assert %{contents: [file_read]} = Intellisense.get_details(":file.read()", 8, binding, env)
assert file_read =~ "Typical error reasons:"
end
test "properly parses unicode" do
{binding, env} = eval(do: nil)
assert nil == Intellisense.get_details("msg = '🍵'", 8, binding, env)
end
test "handles operators" do
{binding, env} = eval(do: nil)
assert %{contents: [match_op]} = Intellisense.get_details("x = 1", 3, binding, env)
assert match_op =~ "Match operator."
end
test "handles local calls" do
{binding, env} = eval(do: nil)
assert %{contents: [to_string_fn]} =
Intellisense.get_details("to_string(1)", 3, binding, env)
assert to_string_fn =~ "Converts the argument to a string"
end
test "includes full module name in the docs" do
{binding, env} = eval(do: nil)
assert %{contents: [date_range]} = Intellisense.get_details("Date.Range", 8, binding, env)
assert date_range =~ "Date.Range"
end
end
describe "get_signature_items/3" do
test "returns nil when outside call" do
{binding, env} = eval(do: nil)
assert nil == Intellisense.get_signature_items("length()", binding, env)
end
test "returns nil if there are no matches" do
{binding, env} = eval(do: nil)
assert nil == Intellisense.get_signature_items("Unknown.unknown(", binding, env)
assert nil == Intellisense.get_signature_items("Enum.concat(x, y,", binding, env)
end
test "supports remote function calls" do
{binding, env} = eval(do: nil)
assert %{
active_argument: 0,
signature_items: [
%{
signature: "map(enumerable, fun)",
arguments: ["enumerable", "fun"],
documentation: """
Returns a list where each element is the result of invoking
`fun` on each corresponding element of `enumerable`.
---
```
@spec map(t(), (element() -> any())) ::
list()
```\
"""
}
]
} = Intellisense.get_signature_items("Enum.map(", binding, env)
end
test "supports local function calls" do
{binding, env} = eval(do: nil)
assert %{
active_argument: 0,
signature_items: [
%{
signature: "length(list)",
arguments: ["list"],
documentation: """
Returns the length of `list`.
---
```
@spec length(list()) :: non_neg_integer()
```\
"""
}
]
} = Intellisense.get_signature_items("length(", binding, env)
end
test "supports manually imported functions and macros" do
{binding, env} =
eval do
import Enum
import Protocol
end
assert %{
active_argument: 0,
signature_items: [
%{
signature: "map(enumerable, fun)",
arguments: ["enumerable", "fun"],
documentation: _map_doc
}
]
} = Intellisense.get_signature_items("map(", binding, env)
assert %{
active_argument: 0,
signature_items: [
%{
signature: ~S"derive(protocol, module, options \\ [])",
arguments: ["protocol", "module", ~S"options \\ []"],
documentation: _derive_doc
}
]
} = Intellisense.get_signature_items("derive(", binding, env)
end
test "supports remote function calls on aliases" do
{binding, env} =
eval do
alias Enum, as: MyEnum
end
assert %{
active_argument: 0,
signature_items: [
%{
signature: "map(enumerable, fun)",
arguments: ["enumerable", "fun"],
documentation: _map_doc
}
]
} = Intellisense.get_signature_items("MyEnum.map(", binding, env)
end
test "supports anonymous function calls" do
{binding, env} =
eval do
add = fn x, y -> x + y end
end
assert %{
active_argument: 0,
signature_items: [
%{
signature: "add.(arg1, arg2)",
arguments: ["arg1", "arg2"],
documentation: """
No documentation available\
"""
}
]
} = Intellisense.get_signature_items("add.(", binding, env)
end
test "supports captured remote function calls" do
{binding, env} =
eval do
map = &Enum.map/2
end
assert %{
active_argument: 0,
signature_items: [
%{
signature: "map(enumerable, fun)",
arguments: ["enumerable", "fun"],
documentation: _map_doc
}
]
} = Intellisense.get_signature_items("map.(", binding, env)
end
@tag :erl_docs
test "shows signature with arguments for erlang modules" do
{binding, env} = eval(do: nil)
assert %{
active_argument: 0,
signature_items: [
%{
signature: "map(fun, list1)",
arguments: ["fun", "list1"],
documentation: _map_doc
}
]
} = Intellisense.get_signature_items(":lists.map(", binding, env)
end
test "returns call active argument" do
{binding, env} = eval(do: nil)
assert %{active_argument: 0, signature_items: [_item]} =
Intellisense.get_signature_items("Enum.map([1, ", binding, env)
assert %{active_argument: 1, signature_items: [_item]} =
Intellisense.get_signature_items("Enum.map([1, 2], ", binding, env)
assert %{active_argument: 1, signature_items: [_item]} =
Intellisense.get_signature_items("Enum.map([1, 2], fn", binding, env)
assert %{active_argument: 1, signature_items: [_item]} =
Intellisense.get_signature_items(
"Enum.map([1, 2], fn x -> x * x end",
binding,
env
)
assert %{active_argument: 2, signature_items: [_item]} =
Intellisense.get_signature_items("IO.ANSI.color(1, 2, 3", binding, env)
end
test "returns correct active argument when using pipe operator" do
{binding, env} = eval(do: nil)
assert %{active_argument: 1, signature_items: [_item]} =
Intellisense.get_signature_items("[1, 2] |> Enum.map(", binding, env)
assert %{active_argument: 1, signature_items: [_item]} =
Intellisense.get_signature_items("[1, 2] |> Enum.map(fn", binding, env)
assert %{active_argument: 1, signature_items: [_item]} =
Intellisense.get_signature_items(
"[1, 2] |> Enum.map(fn x -> x * x end",
binding,
env
)
assert %{active_argument: 2, signature_items: [_item]} =
Intellisense.get_signature_items("1 |> IO.ANSI.color(2, 3", binding, env)
end
test "returns a single signature for fnuctions with default arguments" do
{binding, env} = eval(do: nil)
assert %{
active_argument: 0,
signature_items: [
%{
signature: ~S"to_string(integer, base \\ 10)",
arguments: ["integer", ~S"base \\ 10"],
documentation: """
Returns a binary which corresponds to the text representation
of `integer` in the given `base`.
---
```
@spec to_string(integer(), 2..36) ::
String.t()
```\
"""
}
]
} = Intellisense.get_signature_items("Integer.to_string(", binding, env)
end
test "returns multiple signatures for function with multiple arities" do
{binding, env} = eval(do: nil)
assert %{
active_argument: 0,
signature_items: [
%{
signature: "concat(enumerables)",
arguments: ["enumerables"],
documentation: _concat_1_docs
},
%{
signature: "concat(left, right)",
arguments: ["left", "right"],
documentation: _concat_2_docs
}
]
} = Intellisense.get_signature_items("Enum.concat(", binding, env)
end
test "returns only signatures where active argument is at valid position" do
{binding, env} = eval(do: nil)
assert %{
active_argument: 1,
signature_items: [
%{
signature: "concat(left, right)",
arguments: ["left", "right"],
documentation: _concat_1_docs
}
]
} = Intellisense.get_signature_items("Enum.concat([1, 2], ", binding, env)
end
test "does not return any signatures when in do-end block" do
{binding, env} = eval(do: nil)
assert nil == Intellisense.get_signature_items("if true do ", binding, env)
end
test "does not return any signatures for module attributes" do
{binding, env} = eval(do: nil)
assert nil == Intellisense.get_signature_items("@length(", binding, env)
end
test "does not returns signatures for calls in attribute value" do
{binding, env} = eval(do: nil)
assert %{
active_argument: 0,
signature_items: [
%{
arguments: ["list"],
documentation: _length_doc,
signature: "length(list)"
}
]
} = Intellisense.get_signature_items("@attr length(", binding, env)
end
end
end
| 30.812982
| 339
| 0.481301
|
9395efcfbd9192211ffdc6b518f730f92612e11d
| 3,123
|
exs
|
Elixir
|
test/rubber/search_test.exs
|
evuez/elastix-reloaded
|
6a14253d13c5163518dd9e4967ccc160eff2e622
|
[
"MIT"
] | 4
|
2018-02-12T16:35:02.000Z
|
2020-04-23T21:17:20.000Z
|
test/rubber/search_test.exs
|
evuez/rubber
|
6a14253d13c5163518dd9e4967ccc160eff2e622
|
[
"MIT"
] | null | null | null |
test/rubber/search_test.exs
|
evuez/rubber
|
6a14253d13c5163518dd9e4967ccc160eff2e622
|
[
"MIT"
] | null | null | null |
defmodule Rubber.SearchTest do
use ExUnit.Case
alias Rubber.Search
alias Rubber.Index
alias Rubber.Document
alias HTTPoison.Response
@test_url Rubber.config(:test_url)
@test_index Rubber.config(:test_index)
@document_data %{
user: "werbitzky",
post_date: "2009-11-15T14:12:12",
message: "trying out Elasticsearch"
}
@query_data %{
query: %{
term: %{user: "werbitzky"}
}
}
@scroll_query %{
size: 5,
query: %{match_all: %{}},
sort: ["_doc"]
}
setup do
Index.delete(@test_url, @test_index)
:ok
end
test "make_path should make path from id and url" do
path = Search.make_path(@test_index, ["tweet", "product"], ttl: "1d", timeout: 123)
assert path == "/#{@test_index}/tweet,product/_search?ttl=1d&timeout=123"
end
test "make_path should make path that can interchange api type" do
path =
Search.make_path(
@test_index,
["tweet", "product"],
[ttl: "1d", timeout: 123],
"_count"
)
assert path == "/#{@test_index}/tweet,product/_count?ttl=1d&timeout=123"
end
test "search should return with status 200" do
Document.index(@test_url, @test_index, "message", 1, @document_data, refresh: true)
{:ok, response} = Search.search(@test_url, @test_index, [], @query_data)
assert response.status_code == 200
end
test "search accepts httpoison options" do
Document.index(@test_url, @test_index, "message", 1, @document_data, refresh: true)
{:error, %HTTPoison.Error{reason: :timeout}} =
Search.search(@test_url, @test_index, [], @query_data, [], recv_timeout: 0)
end
test "search accepts a list of requests" do
Document.index(@test_url, @test_index, "message", 1, @document_data, refresh: true)
Document.index(@test_url, @test_index, "message", 2, @document_data, refresh: true)
{:ok, response} =
Search.search(@test_url, @test_index, [], [%{}, @query_data, %{}, @query_data])
assert [_first, _second] = response.body["responses"]
assert response.status_code == 200
end
test "can scroll through all documents" do
for i <- 1..10,
do:
Document.index(
@test_url,
@test_index,
"message",
i,
@document_data,
refresh: true
)
{:ok, %Response{body: body}} =
Search.search(@test_url, @test_index, [], @scroll_query, scroll: "1m")
assert length(body["hits"]["hits"]) === 5
{:ok, %Response{body: body}} =
Search.scroll(@test_url, %{scroll: "1m", scroll_id: body["_scroll_id"]})
assert length(body["hits"]["hits"]) === 5
{:ok, %Response{body: body}} =
Search.scroll(@test_url, %{scroll: "1m", scroll_id: body["_scroll_id"]})
assert length(body["hits"]["hits"]) === 0
end
test "count should return with status 200" do
Document.index(@test_url, @test_index, "message", 1, @document_data, refresh: true)
{:ok, response} = Search.count(@test_url, @test_index, [], @query_data)
assert response.status_code == 200
assert response.body["count"] == 1
end
end
| 27.883929
| 87
| 0.622799
|
93961105111b7c5b1f24ab02121f65859205e538
| 1,535
|
ex
|
Elixir
|
lib/bamboo/adapters/test_adapter.ex
|
clairton/bamboo
|
f2e4ddf177bbcf2c90d010c53c2fa9251fdf1b01
|
[
"MIT"
] | null | null | null |
lib/bamboo/adapters/test_adapter.ex
|
clairton/bamboo
|
f2e4ddf177bbcf2c90d010c53c2fa9251fdf1b01
|
[
"MIT"
] | null | null | null |
lib/bamboo/adapters/test_adapter.ex
|
clairton/bamboo
|
f2e4ddf177bbcf2c90d010c53c2fa9251fdf1b01
|
[
"MIT"
] | 1
|
2020-04-21T12:36:34.000Z
|
2020-04-21T12:36:34.000Z
|
defmodule Bamboo.TestAdapter do
@moduledoc """
Used for testing email delivery
No emails are sent, instead a message is sent to the current process and can
be asserted on with helpers from `Bamboo.Test`.
## Example config
# Typically done in config/test.exs
config :my_app, MyApp.Mailer,
adapter: Bamboo.TestAdapter
# Define a Mailer. Typically in lib/my_app/mailer.ex
defmodule MyApp.Mailer do
use Bamboo.Mailer, otp_app: :my_app
end
"""
@behaviour Bamboo.Adapter
@doc false
def deliver(email, _config) do
email = clean_assigns(email)
send(test_process(), {:delivered_email, email})
end
defp test_process do
Application.get_env(:bamboo, :shared_test_process) || self()
end
def handle_config(config) do
case config[:deliver_later_strategy] do
nil ->
Map.put(config, :deliver_later_strategy, Bamboo.ImmediateDeliveryStrategy)
Bamboo.ImmediateDeliveryStrategy ->
config
_ ->
raise ArgumentError, """
Bamboo.TestAdapter requires that the deliver_later_strategy is
Bamboo.ImmediateDeliveryStrategy
Instead it got: #{inspect(config[:deliver_later_strategy])}
Please remove the deliver_later_strategy from your config options, or
set it to Bamboo.ImmediateDeliveryStrategy.
"""
end
end
@doc false
def clean_assigns(email) do
%{email | assigns: :assigns_removed_for_testing}
end
@doc false
def supports_attachments?, do: true
end
| 25.163934
| 82
| 0.691205
|
93961ba106c8a36ab97596b830ba167c0f3d0cae
| 1,351
|
ex
|
Elixir
|
elixir-match/app/lib/match_web/channels/user_socket.ex
|
jim80net/elixir_tutorial_projects
|
db19901a9305b297faa90642bebcc08455621b52
|
[
"Unlicense"
] | null | null | null |
elixir-match/app/lib/match_web/channels/user_socket.ex
|
jim80net/elixir_tutorial_projects
|
db19901a9305b297faa90642bebcc08455621b52
|
[
"Unlicense"
] | null | null | null |
elixir-match/app/lib/match_web/channels/user_socket.ex
|
jim80net/elixir_tutorial_projects
|
db19901a9305b297faa90642bebcc08455621b52
|
[
"Unlicense"
] | null | null | null |
defmodule MatchWeb.UserSocket do
use Phoenix.Socket
## Channels
channel "available", MatchWeb.AvailableChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(%{"token" => token}, socket, _connect_info) do
case Phoenix.Token.verify(socket, "player_auth", token, max_age: 86400) do
{:ok, current_user} ->
{:ok, assign(socket, :current_user, current_user)}
{:error, _reason} ->
:error
end
end
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# MatchWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.704545
| 83
| 0.680977
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.