hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
list | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
list | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
list | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
79c02709a06e58df8d96e66751e87e312c9f912e
| 1,137
|
exs
|
Elixir
|
mix.exs
|
coletiv/puppeteer-pdf
|
cacd547bb0ba62402e2c414a9ca1f29da28e4fb5
|
[
"MIT"
] | 90
|
2018-04-26T15:51:36.000Z
|
2022-03-21T22:18:41.000Z
|
mix.exs
|
coletiv/puppeteer-pdf
|
cacd547bb0ba62402e2c414a9ca1f29da28e4fb5
|
[
"MIT"
] | 26
|
2018-05-26T14:53:58.000Z
|
2021-06-28T05:03:17.000Z
|
mix.exs
|
coletiv/puppeteer-pdf
|
cacd547bb0ba62402e2c414a9ca1f29da28e4fb5
|
[
"MIT"
] | 23
|
2018-05-25T18:40:55.000Z
|
2022-01-22T07:33:48.000Z
|
defmodule PuppeteerPdf.MixProject do
use Mix.Project
def project do
[
app: :puppeteer_pdf,
version: "1.0.4",
elixir: "~> 1.6",
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases(),
package: [
maintainers: ["David Magalhães"],
licenses: ["MIT"],
links: %{github: "https://github.com/coletiv/puppeteer-pdf"}
],
description: """
Wrapper for Puppeteer-pdf, a node module that use Puppeteer to convert
HTML pages to PDF.
""",
# Docs
name: "Puppeteer PDF",
source_url: "https://github.com/coletiv/puppeteer-pdf",
docs: [main: "readme", extras: ["README.md"]]
]
end
def aliases do
[compile: ["compile --warnings-as-errors"]]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_doc, "~> 0.18", only: :dev},
{:briefly, "~> 0.3"},
{:dialyxir, "~> 0.5", only: [:dev], runtime: false}
]
end
end
| 23.204082
| 76
| 0.565523
|
79c0348006a05cf9a0748567c11497683d679eaf
| 7,314
|
ex
|
Elixir
|
lib/chart/pie_chart.ex
|
mrjoelkemp/contex
|
701a7f2a86c1f1dfdc4fd5199e6bb3b939b44d2e
|
[
"MIT"
] | null | null | null |
lib/chart/pie_chart.ex
|
mrjoelkemp/contex
|
701a7f2a86c1f1dfdc4fd5199e6bb3b939b44d2e
|
[
"MIT"
] | null | null | null |
lib/chart/pie_chart.ex
|
mrjoelkemp/contex
|
701a7f2a86c1f1dfdc4fd5199e6bb3b939b44d2e
|
[
"MIT"
] | null | null | null |
defmodule Contex.PieChart do
@moduledoc """
A Pie Chart that displays data in a circular graph.
The pieces of the graph are proportional to the fraction of the whole in each category.
Each slice of the pie is relative to the size of that category in the group as a whole.
The entire “pie” represents 100 percent of a whole, while the pie “slices” represent portions of the whole.
Fill colours for each slice can be specified with `colour_palette` parameter in chart options, or can be
applied from a `CategoryColourScale` suppled in the `colour_scale` parameter. If neither option is supplied
a default colour palette is used.
"""
alias __MODULE__
alias Contex.{Dataset, Mapping, CategoryColourScale}
defstruct [
:dataset,
:mapping,
:options,
:colour_scale
]
@type t() :: %__MODULE__{}
@required_mappings [
category_col: :zero_or_one,
value_col: :zero_or_one
]
@default_options [
width: 600,
height: 400,
colour_palette: :default,
colour_scale: nil,
data_labels: true
]
@doc """
Create a new PieChart struct from Dataset.
Options may be passed to control the settings for the barchart. Options available are:
- `:data_labels` : `true` (default) or false - display labels for each slice value
- `:colour_palette` : `:default` (default) or colour palette - see `colours/2`
An example:
data = [
["Cat", 10.0],
["Dog", 20.0],
["Hamster", 5.0]
]
dataset = Dataset.new(data, ["Pet", "Preference"])
opts = [
mapping: %{category_col: "Pet", value_col: "Preference"},
colour_palette: ["fbb4ae", "b3cde3", "ccebc5"],
legend_setting: :legend_right,
data_labels: false,
title: "Why dogs are better than cats"
]
Contex.Plot.new(dataset, Contex.PieChart, 600, 400, opts)
"""
def new(%Dataset{} = dataset, options \\ []) when is_list(options) do
options = check_options(options)
options = Keyword.merge(@default_options, options)
mapping = Mapping.new(@required_mappings, Keyword.get(options, :mapping), dataset)
%PieChart{
dataset: dataset,
mapping: mapping,
options: options,
colour_scale: Keyword.get(options, :colour_scale)
}
end
defp check_options(options) do
colour_scale = check_colour_scale(Keyword.get(options, :colour_scale))
Keyword.put(options, :colour_scale, colour_scale)
end
defp check_colour_scale(%CategoryColourScale{} = scale), do: scale
defp check_colour_scale(_), do: nil
@doc false
def set_size(%PieChart{} = chart, width, height) do
chart
|> set_option(:width, width)
|> set_option(:height, height)
end
@doc false
def get_svg_legend(%PieChart{} = chart) do
get_colour_palette(chart)
|> Contex.Legend.to_svg()
end
@doc """
Overrides the default colours.
Colours can either be a named palette defined in `Contex.CategoryColourScale` or a list of strings representing hex code
of the colour as per CSS colour hex codes, but without the #. For example:
```
barchart = BarChart.colours(barchart, ["fbb4ae", "b3cde3", "ccebc5"])
```
The colours will be applied to the data series in the same order as the columns are specified in `set_val_col_names/2`
"""
@deprecated "Set in new/2 options"
@spec colours(PieChart.t(), Contex.CategoryColourScale.colour_palette()) ::
PieChart.t()
def colours(%PieChart{} = chart, colour_palette) when is_list(colour_palette) do
set_option(chart, :colour_palette, colour_palette)
end
def colours(%PieChart{} = chart, colour_palette) when is_atom(colour_palette) do
set_option(chart, :colour_palette, colour_palette)
end
def colours(%PieChart{} = chart, _) do
set_option(chart, :colour_palette, :default)
end
@doc """
Renders the PieChart to svg, including the svg wrapper, as a string or improper string list that
is marked safe.
"""
def to_svg(%PieChart{} = chart) do
[
"<g>",
generate_slices(chart),
"</g>"
]
end
def get_categories(%PieChart{dataset: dataset, mapping: mapping}) do
cat_accessor = dataset |> Dataset.value_fn(mapping.column_map[:category_col])
dataset.data
|> Enum.map(&cat_accessor.(&1))
end
defp set_option(%PieChart{options: options} = plot, key, value) do
options = Keyword.put(options, key, value)
%{plot | options: options}
end
defp get_option(%PieChart{options: options}, key) do
Keyword.get(options, key)
end
defp get_colour_palette(%PieChart{colour_scale: colour_scale}) when not is_nil(colour_scale) do
colour_scale
end
defp get_colour_palette(%PieChart{} = chart) do
get_categories(chart)
|> CategoryColourScale.new()
|> CategoryColourScale.set_palette(get_option(chart, :colour_palette))
end
defp generate_slices(%PieChart{} = chart) do
height = get_option(chart, :height)
with_labels? = get_option(chart, :data_labels)
colour_palette = get_colour_palette(chart)
r = height / 2
stroke_circumference = 2 * :math.pi() * r / 2
scale_values(chart)
|> Enum.map_reduce({0, 0}, fn {value, category}, {idx, offset} ->
text_rotation = rotate_for(value, offset)
label =
if with_labels? do
~s"""
<text x="#{negate_if_flipped(r, text_rotation)}"
y="#{negate_if_flipped(r, text_rotation)}"
text-anchor="middle"
fill="white"
stroke-width="1"
transform="rotate(#{text_rotation},#{r},#{r})
translate(#{r / 2}, #{negate_if_flipped(5, text_rotation)})
#{if need_flip?(text_rotation), do: "scale(-1,-1)"}"
>
#{Float.round(value, 2)}%
</text>
"""
else
""
end
{
~s"""
<circle r="#{r / 2}" cx="#{r}" cy="#{r}" fill="transparent"
stroke="##{CategoryColourScale.colour_for_value(colour_palette, category)}"
stroke-width="#{r}"
stroke-dasharray="#{slice_value(value, stroke_circumference)} #{stroke_circumference}"
stroke-dashoffset="-#{slice_value(offset, stroke_circumference)}">
</circle>
#{label}
""",
{idx + 1, offset + value}
}
end)
|> elem(0)
|> Enum.join()
end
defp slice_value(value, stroke_circumference) do
value * stroke_circumference / 100
end
defp rotate_for(n, offset) do
n / 2 * 3.6 + offset * 3.6
end
defp need_flip?(rotation) do
90 < rotation and rotation < 270
end
defp negate_if_flipped(number, rotation) do
if need_flip?(rotation),
do: -number,
else: number
end
@spec scale_values(PieChart.t()) :: [{value :: number(), label :: any()}]
defp scale_values(%PieChart{dataset: dataset, mapping: mapping}) do
val_accessor = dataset |> Dataset.value_fn(mapping.column_map[:value_col])
cat_accessor = dataset |> Dataset.value_fn(mapping.column_map[:category_col])
sum = dataset.data |> Enum.reduce(0, fn col, acc -> val_accessor.(col) + acc end)
dataset.data
|> Enum.map_reduce(sum, &{{val_accessor.(&1) / &2 * 100, cat_accessor.(&1)}, &2})
|> elem(0)
end
end
| 29.97541
| 122
| 0.640963
|
79c042b4343a36ef6b7b1a229c0c088a498aaab6
| 1,594
|
ex
|
Elixir
|
lib/vintage_net/interface/udhcpd.ex
|
byronanderson/vintage_net
|
2b2cc3fe876cef351b0aafa83725a7ac71dd00ec
|
[
"Apache-2.0"
] | null | null | null |
lib/vintage_net/interface/udhcpd.ex
|
byronanderson/vintage_net
|
2b2cc3fe876cef351b0aafa83725a7ac71dd00ec
|
[
"Apache-2.0"
] | null | null | null |
lib/vintage_net/interface/udhcpd.ex
|
byronanderson/vintage_net
|
2b2cc3fe876cef351b0aafa83725a7ac71dd00ec
|
[
"Apache-2.0"
] | null | null | null |
defmodule VintageNet.Interface.Udhcpd do
@behaviour VintageNet.ToElixir.UdhcpdHandler
require Logger
@impl true
def lease_update(ifname, lease_file) do
case parse_leases(lease_file) do
{:ok, leases} ->
VintageNet.PropertyTable.put(
VintageNet,
["interface", ifname, "dhcpd", "leases"],
leases
)
{:error, _} ->
_ = Logger.error("#{ifname}: Failed to handle lease update from #{lease_file}")
VintageNet.PropertyTable.clear_prefix(VintageNet, ["interface", ifname, "dhcpd", "leases"])
end
end
@doc "Parse the leases file from udhcpd"
def parse_leases(path) do
with {:ok, <<_timestamp::binary-size(8), rest::binary>>} <- File.read(path) do
do_parse_leases(rest, [])
end
end
def do_parse_leases(
<<leasetime::unsigned-integer-size(32), ip1, ip2, ip3, ip4, mac1, mac2, mac3, mac4, mac5,
mac6, hostname::binary-size(20), _pad::binary-size(2), rest::binary>>,
acc
) do
lease = %{
leasetime: leasetime,
lease_nip: Enum.join([ip1, ip2, ip3, ip4], "."),
lease_mac:
Enum.join(
[
Integer.to_string(mac1, 16),
Integer.to_string(mac2, 16),
Integer.to_string(mac3, 16),
Integer.to_string(mac4, 16),
Integer.to_string(mac5, 16),
Integer.to_string(mac6, 16)
],
":"
),
hostname: String.trim(hostname, <<0>>)
}
do_parse_leases(rest, [lease | acc])
end
def do_parse_leases(<<>>, acc), do: {:ok, acc}
end
| 27.964912
| 99
| 0.580301
|
79c0532377cbe09bb16f64a32fb70562eed7ebb1
| 1,102
|
ex
|
Elixir
|
lib/greyhound/helpers/uuid.ex
|
sticksnleaves/greyhound
|
f230c736c297f176ef03dfa01e707fb4eb746f43
|
[
"MIT"
] | 5
|
2018-09-05T12:10:49.000Z
|
2021-12-13T12:48:39.000Z
|
lib/greyhound/helpers/uuid.ex
|
sticksnleaves/greyhound
|
f230c736c297f176ef03dfa01e707fb4eb746f43
|
[
"MIT"
] | null | null | null |
lib/greyhound/helpers/uuid.ex
|
sticksnleaves/greyhound
|
f230c736c297f176ef03dfa01e707fb4eb746f43
|
[
"MIT"
] | null | null | null |
defmodule Greyhound.Helpers.UUID do
@moduledoc false
@uuid_v4 4
@variant10 2
@spec v4() :: binary
def v4() do
<<u0::48, _::4, u1::12, _::2, u2::62>> = :crypto.strong_rand_bytes(16)
uuid_to_string(<<u0::48, @uuid_v4::4, u1::12, @variant10::2, u2::62>>)
end
#
# private
#
defp binary_to_hex_list(binary) do
binary
|> :binary.bin_to_list()
|> list_to_hex_str()
end
defp list_to_hex_str([]) do
[]
end
defp list_to_hex_str([head | tail]) do
to_hex_str(head) ++ list_to_hex_str(tail)
end
defp to_hex(i) when i < 10 do
0 + i + 48
end
defp to_hex(i) when i >= 10 and i < 16 do
?a + (i - 10)
end
defp to_hex_str(n) when n < 256 do
[to_hex(div(n, 16)), to_hex(rem(n, 16))]
end
defp uuid_to_string(<<u0::32, u1::16, u2::16, u3::16, u4::48>>) do
IO.iodata_to_binary([
binary_to_hex_list(<<u0::32>>),
?-,
binary_to_hex_list(<<u1::16>>),
?-,
binary_to_hex_list(<<u2::16>>),
?-,
binary_to_hex_list(<<u3::16>>),
?-,
binary_to_hex_list(<<u4::48>>)
])
end
end
| 18.677966
| 74
| 0.565336
|
79c0668e1dabab5af876928ada299cf96dd26601
| 2,064
|
ex
|
Elixir
|
lib/oli_web/live/publisher_live/form.ex
|
jrissler/oli-torus
|
747f9e4360163d76a6ca5daee3aab1feab0c99b1
|
[
"MIT"
] | 1
|
2022-03-17T20:35:47.000Z
|
2022-03-17T20:35:47.000Z
|
lib/oli_web/live/publisher_live/form.ex
|
jrissler/oli-torus
|
747f9e4360163d76a6ca5daee3aab1feab0c99b1
|
[
"MIT"
] | 9
|
2021-11-02T16:52:09.000Z
|
2022-03-25T15:14:01.000Z
|
lib/oli_web/live/publisher_live/form.ex
|
marc-hughes/oli-torus-1
|
aa3c9bb2d91b678a365be839761eaf86c60ee35c
|
[
"MIT"
] | null | null | null |
defmodule OliWeb.PublisherLive.Form do
use Surface.Component
alias Surface.Components.Form
alias Surface.Components.Form.{EmailInput, ErrorTag, Field, Label, TextInput}
prop(changeset, :changeset, required: true)
prop(save, :event, required: true)
prop(display_labels, :boolean, default: true)
def render(assigns) do
~F"""
<Form for={@changeset} submit={@save}>
<Field name={:name} class="form-group">
{#if @display_labels}
<Label class="control-label">Publisher Name</Label>
{#if @changeset.data.default}
<span class="badge badge-info">default</span>
{/if}
{/if}
<TextInput class="form-control" opts={placeholder: "Name", maxlength: "255"}/>
<ErrorTag/>
</Field>
<Field name={:email} class="form-group">
{#if @display_labels}
<Label class="control-label">Publisher Email</Label>
{/if}
<EmailInput class="form-control" opts={placeholder: "Email", maxlength: "255"}/>
<ErrorTag/>
</Field>
<Field name={:address} class="form-group">
{#if @display_labels}
<Label class="control-label">Publisher Address</Label>
{/if}
<TextInput class="form-control" opts={placeholder: "Address", maxlength: "255"}/>
<ErrorTag/>
</Field>
<Field name={:main_contact} class="form-group">
{#if @display_labels}
<Label class="control-label">Publisher Main Contact</Label>
{/if}
<TextInput class="form-control" opts={placeholder: "Main Contact", maxlength: "255"}/>
<ErrorTag/>
</Field>
<Field name={:website_url} class="form-group">
{#if @display_labels}
<Label class="control-label">Publisher Website URL</Label>
{/if}
<TextInput class="form-control" opts={placeholder: "Website URL", maxlength: "255"}/>
<ErrorTag/>
</Field>
<button class="form-button btn btn-md btn-primary btn-block mt-3" type="submit">Save</button>
</Form>
"""
end
end
| 33.290323
| 99
| 0.602713
|
79c0a73ac75a9b61f76387b5a6479b1141da62aa
| 1,522
|
ex
|
Elixir
|
backend/lib/caffe_web/schema.ex
|
eeng/caffe
|
d85d0dd56a8204c715052ddaf3d990e47c5df0e9
|
[
"MIT"
] | 7
|
2020-03-27T08:26:52.000Z
|
2021-08-29T09:50:31.000Z
|
backend/lib/caffe_web/schema.ex
|
eeng/caffe
|
d85d0dd56a8204c715052ddaf3d990e47c5df0e9
|
[
"MIT"
] | null | null | null |
backend/lib/caffe_web/schema.ex
|
eeng/caffe
|
d85d0dd56a8204c715052ddaf3d990e47c5df0e9
|
[
"MIT"
] | null | null | null |
defmodule CaffeWeb.Schema do
use Absinthe.Schema
alias Caffe.Menu
alias CaffeWeb.Schema.Middleware
import_types CaffeWeb.Schema.{AccountsTypes, MenuTypes, OrderingTypes}
query do
import_fields :accounts_queries
import_fields :menu_queries
import_fields :ordering_queries
end
mutation do
import_fields :accounts_mutations
import_fields :menu_mutations
import_fields :ordering_mutations
end
subscription do
import_fields :ordering_subscriptions
end
scalar :decimal do
parse fn
%{value: value}, _ when is_binary(value) -> Decimal.parse(value)
_, _ -> :error
end
serialize &to_string/1
end
scalar :datetime do
parse fn input ->
case DateTime.from_iso8601(input.value) do
{:ok, datetime, _} -> {:ok, datetime}
_ -> :error
end
end
serialize fn datetime ->
DateTime.to_iso8601(datetime)
end
end
def context(ctx) do
loader =
Dataloader.new()
|> Dataloader.add_source(Menu.Item, Menu.data())
|> Dataloader.add_source(Menu.Category, Menu.data())
Map.put(ctx, :loader, loader)
end
def plugins do
[Absinthe.Middleware.Dataloader] ++ Absinthe.Plugin.defaults()
end
def middleware(middleware, field, object) do
middleware
|> apply(:errors, field, object)
end
defp apply(middleware, :errors, _field, %{identifier: :mutation}) do
middleware ++ [Middleware.HandleErrors]
end
defp apply(middleware, _, _, _) do
middleware
end
end
| 21.138889
| 72
| 0.68134
|
79c0a9055e51bf11aa96ca1ee2513007645b2633
| 274
|
exs
|
Elixir
|
test/train2cal_web/views/layout_view_test.exs
|
bfontaine/train2cal
|
d4a69e3dece4e68f1e280c9f5d796b738dee64bb
|
[
"MIT"
] | null | null | null |
test/train2cal_web/views/layout_view_test.exs
|
bfontaine/train2cal
|
d4a69e3dece4e68f1e280c9f5d796b738dee64bb
|
[
"MIT"
] | null | null | null |
test/train2cal_web/views/layout_view_test.exs
|
bfontaine/train2cal
|
d4a69e3dece4e68f1e280c9f5d796b738dee64bb
|
[
"MIT"
] | null | null | null |
defmodule Train2calWeb.LayoutViewTest do
use Train2calWeb.ConnCase, async: true
# When testing helpers, you may want to import Phoenix.HTML and
# use functions such as safe_to_string() to convert the helper
# result into an HTML string.
# import Phoenix.HTML
end
| 30.444444
| 65
| 0.770073
|
79c0b8d694c677b303dd2b2c60bde2de9de62769
| 2,438
|
ex
|
Elixir
|
clients/service_user/lib/google_api/service_user/v1/model/context_rule.ex
|
leandrocp/elixir-google-api
|
a86e46907f396d40aeff8668c3bd81662f44c71e
|
[
"Apache-2.0"
] | null | null | null |
clients/service_user/lib/google_api/service_user/v1/model/context_rule.ex
|
leandrocp/elixir-google-api
|
a86e46907f396d40aeff8668c3bd81662f44c71e
|
[
"Apache-2.0"
] | null | null | null |
clients/service_user/lib/google_api/service_user/v1/model/context_rule.ex
|
leandrocp/elixir-google-api
|
a86e46907f396d40aeff8668c3bd81662f44c71e
|
[
"Apache-2.0"
] | 1
|
2020-11-10T16:58:27.000Z
|
2020-11-10T16:58:27.000Z
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.ServiceUser.V1.Model.ContextRule do
@moduledoc """
A context rule provides information about the context for an individual API element.
## Attributes
- allowedRequestExtensions ([String.t]): A list of full type names or extension IDs of extensions allowed in grpc side channel from client to backend. Defaults to: `null`.
- allowedResponseExtensions ([String.t]): A list of full type names or extension IDs of extensions allowed in grpc side channel from backend to client. Defaults to: `null`.
- provided ([String.t]): A list of full type names of provided contexts. Defaults to: `null`.
- requested ([String.t]): A list of full type names of requested contexts. Defaults to: `null`.
- selector (String.t): Selects the methods to which this rule applies. Refer to selector for syntax details. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:allowedRequestExtensions => list(any()),
:allowedResponseExtensions => list(any()),
:provided => list(any()),
:requested => list(any()),
:selector => any()
}
field(:allowedRequestExtensions, type: :list)
field(:allowedResponseExtensions, type: :list)
field(:provided, type: :list)
field(:requested, type: :list)
field(:selector)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceUser.V1.Model.ContextRule do
def decode(value, options) do
GoogleApi.ServiceUser.V1.Model.ContextRule.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceUser.V1.Model.ContextRule do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.633333
| 174
| 0.730107
|
79c0f2d26d724f6aa23701cdd60082c5fd17c3fa
| 3,921
|
ex
|
Elixir
|
components/notifications-service/server/lib/formatters/slack.compliance.ex
|
MsysTechnologiesllc/automate
|
633e3efe43c3e8da808a120381448198282b7c0b
|
[
"Apache-2.0"
] | 4
|
2019-10-24T05:59:51.000Z
|
2021-08-16T15:17:27.000Z
|
components/notifications-service/server/lib/formatters/slack.compliance.ex
|
MsysTechnologiesllc/automate
|
633e3efe43c3e8da808a120381448198282b7c0b
|
[
"Apache-2.0"
] | null | null | null |
components/notifications-service/server/lib/formatters/slack.compliance.ex
|
MsysTechnologiesllc/automate
|
633e3efe43c3e8da808a120381448198282b7c0b
|
[
"Apache-2.0"
] | null | null | null |
defmodule Notifications.Formatters.Slack.Compliance do
@moduledoc """
Generates a map from the compliance struct suitable for posting to Slack webhooks as json.
"""
@behaviour Notifications.Formatters.Behavior
alias Notifications.ComplianceFailure
alias Notifications.Formatters.Utils
# TODO - the top level envelop bvelongs in slack formatter - it can call into the module
# for attachment_note elements.
@spec format(ComplianceFailure.t):: map()
def format(%ComplianceFailure{} = c) do
msg = Utils.maybe_markdown_url(c.compliance_url,
"InSpec found a critical control failure on node #{c.node_name}")
%{
username: "Chef Automate",
icon_url: "https://docs.chef.io/_static/chef_logo_v2.png",
text: msg,
attachments: [attachment_note(c)],
}
end
defp attachment_note(%ComplianceFailure{} = compliance) do
failed_profiles = compliance.failed_profiles
crit_failed_controls = failed_critical_controls(failed_profiles)
{profile_name, control_value} = collect_profile_and_control_names(failed_profiles, crit_failed_controls)
message = message_from_failed_controls(crit_failed_controls, compliance
.test_totals.critical)
truncated_message = Utils.truncate_slack_message(message)
fallback = "InSpec critical control failure on node #{compliance.node_name}."
%{
fallback: fallback <> "\n" <> truncated_message,
text: "#{maybe_markdown_preformat(truncated_message)}\n",
color: "warning",
fields: [
%{
title: "Control ID::Title",
value: "#{control_value}",
short: false
},
%{
title: "Profile",
value: "#{profile_name}",
short: false
},
%{
title: "Node",
value: "#{compliance.node_name}",
short: false
}
],
mrkdwn_in: ["text", "pretext"]
}
end
# Avoids sending `````` when msg is blank -- which slack does not format well.
defp maybe_markdown_preformat(""), do: ""
defp maybe_markdown_preformat(msg), do: "```#{msg}```"
# TODO - prefiltering must be in place to ensure we never see 0 critical failed
defp collect_profile_and_control_names(profiles, failed_controls) do
if Enum.count(profiles) == 1 do
profile = hd(profiles)
cv = if Enum.count(failed_controls) == 1 do
failed_control = hd(failed_controls)
"#{failed_control.id}::#{failed_control.title}"
else
"Multiple"
end
{profile.title, cv}
else
{"Multiple", "Multiple"}
end
end
def failed_critical_controls(failed_profiles) do
Enum.flat_map(failed_profiles,
fn(profile) ->
Enum.filter(profile.failed_controls, &Utils.failed_critical_control?(&1))
end)
end
# If only one test fails the message from that test is used.
# If multiple, it's "failed_count of total_count tests failed"
def all_failed_tests(failed_controls) do
failures_filter = fn(control) ->
Enum.filter(control.failed_results,
&Utils.failed_test?(&1))
end
Enum.flat_map(failed_controls, failures_filter)
end
# This evaluates the invidivual failed tests within the provided controls
# and extracts a message.
def message_from_failed_controls(failed_controls, total_tests) do
case all_failed_tests(failed_controls) do
[] -> "" # No failed tests
[test] -> test.message # Exactly one failed test
failed_tests -> # multiple failed tests
count = Enum.count(failed_tests)
# TODO: #{count} of {total_tests} failed across {controls} controls.
"#{count} of #{total_tests} tests failed. View in Chef Automate for full details."
end
end
end
| 35.645455
| 108
| 0.639888
|
79c10c692b035fa0d16f43126d4869ef6b6100fc
| 1,576
|
ex
|
Elixir
|
clients/admin/lib/google_api/admin/directory_v1/model/chrome_os_device_active_time_ranges.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/admin/lib/google_api/admin/directory_v1/model/chrome_os_device_active_time_ranges.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | 1
|
2020-12-18T09:25:12.000Z
|
2020-12-18T09:25:12.000Z
|
clients/admin/lib/google_api/admin/directory_v1/model/chrome_os_device_active_time_ranges.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Admin.Directory_v1.Model.ChromeOsDeviceActiveTimeRanges do
@moduledoc """
## Attributes
* `activeTime` (*type:* `integer()`, *default:* `nil`) - Duration of usage in milliseconds.
* `date` (*type:* `Date.t`, *default:* `nil`) - Date of usage
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:activeTime => integer(),
:date => Date.t()
}
field(:activeTime)
field(:date, as: Date)
end
defimpl Poison.Decoder, for: GoogleApi.Admin.Directory_v1.Model.ChromeOsDeviceActiveTimeRanges do
def decode(value, options) do
GoogleApi.Admin.Directory_v1.Model.ChromeOsDeviceActiveTimeRanges.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Admin.Directory_v1.Model.ChromeOsDeviceActiveTimeRanges do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.52
| 97
| 0.730964
|
79c1287eeeff0179cd028fb1108de0b6ec052c79
| 773
|
ex
|
Elixir
|
lib/exnn/utils/task.ex
|
zampino/exnn
|
2be888df107644daab1aca7614fecb4940fe3c84
|
[
"MIT"
] | 104
|
2015-06-18T18:54:25.000Z
|
2021-11-04T15:07:02.000Z
|
lib/exnn/utils/task.ex
|
nelyj/exnn
|
2be888df107644daab1aca7614fecb4940fe3c84
|
[
"MIT"
] | 3
|
2016-10-09T11:19:42.000Z
|
2018-09-17T16:36:32.000Z
|
lib/exnn/utils/task.ex
|
nelyj/exnn
|
2be888df107644daab1aca7614fecb4940fe3c84
|
[
"MIT"
] | 17
|
2016-03-08T19:02:51.000Z
|
2019-04-27T16:40:52.000Z
|
defmodule EXNN.Utils.Task do
def wait_all(tasks, timeout\\5000) do
process_tasks(tasks, %{}, timeout)
end
def process_tasks([], done, _timeout), do: done
def process_tasks([%Task{ref: ref}=task | rest], done, timeout) do
receive do
{^ref, reply} ->
Process.demonitor(ref, [:flush])
process_tasks(rest, Map.put(done, ref, reply), timeout)
{:DOWN, ^ref, _, _, :noconnection} ->
mfa = {__MODULE__, :await, [task, timeout]}
exit({{:nodedown, node(task.pid)}, mfa})
{:DOWN, ^ref, _, _, reason} ->
exit({reason, {__MODULE__, :await, [task, timeout]}})
after
timeout ->
Process.demonitor(ref, [:flush])
exit({:timeout, {__MODULE__, :await, [task, timeout]}})
end
end
end
| 29.730769
| 68
| 0.587322
|
79c13ee7b28375a3e08bc0b65e08edf099c85f63
| 78
|
exs
|
Elixir
|
test/acceptance/readme_test.exs
|
RobertDober/earmark
|
6f20bd06f40e4333294d19eb38031ea480f3d3ba
|
[
"Apache-2.0"
] | null | null | null |
test/acceptance/readme_test.exs
|
RobertDober/earmark
|
6f20bd06f40e4333294d19eb38031ea480f3d3ba
|
[
"Apache-2.0"
] | null | null | null |
test/acceptance/readme_test.exs
|
RobertDober/earmark
|
6f20bd06f40e4333294d19eb38031ea480f3d3ba
|
[
"Apache-2.0"
] | 1
|
2020-09-15T17:47:35.000Z
|
2020-09-15T17:47:35.000Z
|
defmodule Acceptance.ReadmeTest do
use ExUnit.Case
doctest Earmark
end
| 13
| 34
| 0.782051
|
79c15f94c9400965a93e7383cf9af5c096776592
| 73
|
ex
|
Elixir
|
lib/cforum_web/views/notification_mail_view.ex
|
campingrider/cforum_ex
|
cf27684c47d6dc26c9c37a946f1c729a79d27c70
|
[
"MIT"
] | null | null | null |
lib/cforum_web/views/notification_mail_view.ex
|
campingrider/cforum_ex
|
cf27684c47d6dc26c9c37a946f1c729a79d27c70
|
[
"MIT"
] | null | null | null |
lib/cforum_web/views/notification_mail_view.ex
|
campingrider/cforum_ex
|
cf27684c47d6dc26c9c37a946f1c729a79d27c70
|
[
"MIT"
] | null | null | null |
defmodule CforumWeb.NotificationMailerView do
use CforumWeb, :view
end
| 18.25
| 45
| 0.835616
|
79c1f19a9ac18594c58eb6b7559268b58b53bc8d
| 134,203
|
ex
|
Elixir
|
clients/logging/lib/google_api/logging/v2/api/projects.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
clients/logging/lib/google_api/logging/v2/api/projects.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
clients/logging/lib/google_api/logging/v2/api/projects.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Logging.V2.Api.Projects do
@moduledoc """
API calls for all endpoints tagged `Projects`.
"""
alias GoogleApi.Logging.V2.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Gets the Logging CMEK settings for the given resource.Note: CMEK for the Log Router can be configured for Google Cloud projects, folders, organizations and billing accounts. Once configured for an organization, it applies to all projects and folders in the Google Cloud organization.See Enabling CMEK for Logs Router (https://cloud.google.com/logging/docs/routing/managed-encryption) for more information.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The resource for which to retrieve CMEK settings. "projects/[PROJECT_ID]/cmekSettings" "organizations/[ORGANIZATION_ID]/cmekSettings" "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" "folders/[FOLDER_ID]/cmekSettings" For example:"organizations/12345/cmekSettings"Note: CMEK for the Log Router can be configured for Google Cloud projects, folders, organizations and billing accounts. Once configured for an organization, it applies to all projects and folders in the Google Cloud organization.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.CmekSettings{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_get_cmek_settings(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Logging.V2.Model.CmekSettings.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_get_cmek_settings(
connection,
projects_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/projects/{projectsId}/cmekSettings", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.CmekSettings{}])
end
@doc """
Creates a new exclusion in the _Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The parent resource in which to create the exclusion: "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" For examples:"projects/my-logging-project" "organizations/123456789"
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.Logging.V2.Model.LogExclusion.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogExclusion{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_exclusions_create(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Logging.V2.Model.LogExclusion.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_exclusions_create(
connection,
projects_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v2/projects/{projectsId}/exclusions", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogExclusion{}])
end
@doc """
Deletes an exclusion in the _Default sink.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of an existing exclusion to delete: "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" For example:"projects/my-project/exclusions/my-exclusion"
* `exclusions_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_exclusions_delete(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_exclusions_delete(
connection,
projects_id,
exclusions_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v2/projects/{projectsId}/exclusions/{exclusionsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"exclusionsId" => URI.encode(exclusions_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.Empty{}])
end
@doc """
Gets the description of an exclusion in the _Default sink.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of an existing exclusion: "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" For example:"projects/my-project/exclusions/my-exclusion"
* `exclusions_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogExclusion{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_exclusions_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogExclusion.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_exclusions_get(
connection,
projects_id,
exclusions_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/projects/{projectsId}/exclusions/{exclusionsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"exclusionsId" => URI.encode(exclusions_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogExclusion{}])
end
@doc """
Lists all the exclusions on the _Default sink in a parent resource.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The parent resource whose exclusions are to be listed. "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]"
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response indicates that more results might be available.
* `:pageToken` (*type:* `String.t`) - Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from the previous response. The values of other method parameters should be identical to those in the previous call.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.ListExclusionsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_exclusions_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Logging.V2.Model.ListExclusionsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_exclusions_list(connection, projects_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/projects/{projectsId}/exclusions", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.ListExclusionsResponse{}])
end
@doc """
Changes one or more properties of an existing exclusion in the _Default sink.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of the exclusion to update: "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" For example:"projects/my-project/exclusions/my-exclusion"
* `exclusions_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Required. A non-empty list of fields to change in the existing exclusion. New values for the fields are taken from the corresponding fields in the LogExclusion included in this request. Fields not mentioned in update_mask are not changed and are ignored in the request.For example, to change the filter and description of an exclusion, specify an update_mask of "filter,description".
* `:body` (*type:* `GoogleApi.Logging.V2.Model.LogExclusion.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogExclusion{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_exclusions_patch(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogExclusion.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_exclusions_patch(
connection,
projects_id,
exclusions_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v2/projects/{projectsId}/exclusions/{exclusionsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"exclusionsId" => URI.encode(exclusions_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogExclusion{}])
end
@doc """
Gets information about a location.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Resource name for the location.
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.Location{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.Location.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_get(
connection,
projects_id,
locations_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/projects/{projectsId}/locations/{locationsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.Location{}])
end
@doc """
Lists information about the supported locations for this service.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. The resource that owns the locations collection, if applicable.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - A filter to narrow down results to a preferred subset. The filtering language accepts strings like "displayName=tokyo", and is documented in more detail in AIP-160 (https://google.aip.dev/160).
* `:pageSize` (*type:* `integer()`) - The maximum number of results to return. If not set, the service selects a default.
* `:pageToken` (*type:* `String.t`) - A page token received from the next_page_token field in the response. Send that page token to receive the subsequent page.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.ListLocationsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Logging.V2.Model.ListLocationsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_list(connection, projects_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/projects/{projectsId}/locations", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.ListLocationsResponse{}])
end
@doc """
Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's location cannot be changed.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The resource in which to create the log bucket: "projects/[PROJECT_ID]/locations/[LOCATION_ID]" For example:"projects/my-project/locations/global"
* `locations_id` (*type:* `String.t`) - Part of `parent`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:bucketId` (*type:* `String.t`) - Required. A client-assigned identifier such as "my-bucket". Identifiers are limited to 100 characters and can include only letters, digits, underscores, hyphens, and periods.
* `:body` (*type:* `GoogleApi.Logging.V2.Model.LogBucket.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogBucket{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_buckets_create(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogBucket.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_buckets_create(
connection,
projects_id,
locations_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:bucketId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v2/projects/{projectsId}/locations/{locationsId}/buckets", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogBucket{}])
end
@doc """
Deletes a log bucket.Changes the bucket's lifecycle_state to the DELETE_REQUESTED state. After 7 days, the bucket will be purged and all log entries in the bucket will be permanently deleted.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The full resource name of the bucket to delete. "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" For example:"projects/my-project/locations/global/buckets/my-bucket"
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `buckets_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_buckets_delete(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_buckets_delete(
connection,
projects_id,
locations_id,
buckets_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v2/projects/{projectsId}/locations/{locationsId}/buckets/{bucketsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"bucketsId" => URI.encode(buckets_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.Empty{}])
end
@doc """
Gets a log bucket.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of the bucket: "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" For example:"projects/my-project/locations/global/buckets/my-bucket"
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `buckets_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogBucket{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_buckets_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogBucket.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_buckets_get(
connection,
projects_id,
locations_id,
buckets_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/projects/{projectsId}/locations/{locationsId}/buckets/{bucketsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"bucketsId" => URI.encode(buckets_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogBucket{}])
end
@doc """
Lists log buckets.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The parent resource whose buckets are to be listed: "projects/[PROJECT_ID]/locations/[LOCATION_ID]" "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]" Note: The locations portion of the resource must be specified, but supplying the character - in place of LOCATION_ID will return all buckets.
* `locations_id` (*type:* `String.t`) - Part of `parent`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response indicates that more results might be available.
* `:pageToken` (*type:* `String.t`) - Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from the previous response. The values of other method parameters should be identical to those in the previous call.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.ListBucketsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_buckets_list(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.ListBucketsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_buckets_list(
connection,
projects_id,
locations_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/projects/{projectsId}/locations/{locationsId}/buckets", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.ListBucketsResponse{}])
end
@doc """
Updates a log bucket. This method replaces the following fields in the existing bucket with values from the new bucket: retention_periodIf the retention period is decreased and the bucket is locked, FAILED_PRECONDITION will be returned.If the bucket has a lifecycle_state of DELETE_REQUESTED, then FAILED_PRECONDITION will be returned.After a bucket has been created, the bucket's location cannot be changed.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The full resource name of the bucket to update. "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" For example:"projects/my-project/locations/global/buckets/my-bucket"
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `buckets_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Required. Field mask that specifies the fields in bucket that need an update. A bucket field will be overwritten if, and only if, it is in the update mask. name and output only fields cannot be updated.For a detailed FieldMask definition, see: https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMaskFor example: updateMask=retention_days
* `:body` (*type:* `GoogleApi.Logging.V2.Model.LogBucket.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogBucket{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_buckets_patch(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogBucket.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_buckets_patch(
connection,
projects_id,
locations_id,
buckets_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v2/projects/{projectsId}/locations/{locationsId}/buckets/{bucketsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"bucketsId" => URI.encode(buckets_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogBucket{}])
end
@doc """
Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 days.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The full resource name of the bucket to undelete. "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" For example:"projects/my-project/locations/global/buckets/my-bucket"
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `buckets_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.Logging.V2.Model.UndeleteBucketRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_buckets_undelete(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_buckets_undelete(
connection,
projects_id,
locations_id,
buckets_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/v2/projects/{projectsId}/locations/{locationsId}/buckets/{bucketsId}:undelete",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"bucketsId" => URI.encode(buckets_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.Empty{}])
end
@doc """
Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The bucket in which to create the view `"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"` For example:"projects/my-project/locations/global/buckets/my-bucket"
* `locations_id` (*type:* `String.t`) - Part of `parent`. See documentation of `projectsId`.
* `buckets_id` (*type:* `String.t`) - Part of `parent`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:viewId` (*type:* `String.t`) - Required. The id to use for this view.
* `:body` (*type:* `GoogleApi.Logging.V2.Model.LogView.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogView{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_buckets_views_create(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogView.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_buckets_views_create(
connection,
projects_id,
locations_id,
buckets_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:viewId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/v2/projects/{projectsId}/locations/{locationsId}/buckets/{bucketsId}/views",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"bucketsId" => URI.encode(buckets_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogView{}])
end
@doc """
Deletes a view on a log bucket. If an UNAVAILABLE error is returned, this indicates that system is not in a state where it can delete the view. If this occurs, please try again in a few minutes.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The full resource name of the view to delete: "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" For example:"projects/my-project/locations/global/buckets/my-bucket/views/my-view"
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `buckets_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `views_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_buckets_views_delete(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_buckets_views_delete(
connection,
projects_id,
locations_id,
buckets_id,
views_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url(
"/v2/projects/{projectsId}/locations/{locationsId}/buckets/{bucketsId}/views/{viewsId}",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"bucketsId" => URI.encode(buckets_id, &URI.char_unreserved?/1),
"viewsId" => URI.encode(views_id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.Empty{}])
end
@doc """
Gets a view on a log bucket..
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of the policy: "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" For example:"projects/my-project/locations/global/buckets/my-bucket/views/my-view"
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `buckets_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `views_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogView{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_buckets_views_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogView.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_buckets_views_get(
connection,
projects_id,
locations_id,
buckets_id,
views_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/v2/projects/{projectsId}/locations/{locationsId}/buckets/{bucketsId}/views/{viewsId}",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"bucketsId" => URI.encode(buckets_id, &URI.char_unreserved?/1),
"viewsId" => URI.encode(views_id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogView{}])
end
@doc """
Lists views on a log bucket.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The bucket whose views are to be listed: "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
* `locations_id` (*type:* `String.t`) - Part of `parent`. See documentation of `projectsId`.
* `buckets_id` (*type:* `String.t`) - Part of `parent`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of results to return from this request.Non-positive values are ignored. The presence of nextPageToken in the response indicates that more results might be available.
* `:pageToken` (*type:* `String.t`) - Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from the previous response. The values of other method parameters should be identical to those in the previous call.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.ListViewsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_buckets_views_list(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.ListViewsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_buckets_views_list(
connection,
projects_id,
locations_id,
buckets_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/v2/projects/{projectsId}/locations/{locationsId}/buckets/{bucketsId}/views",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"bucketsId" => URI.encode(buckets_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.ListViewsResponse{}])
end
@doc """
Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new view: filter. If an UNAVAILABLE error is returned, this indicates that system is not in a state where it can update the view. If this occurs, please try again in a few minutes.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The full resource name of the view to update "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" For example:"projects/my-project/locations/global/buckets/my-bucket/views/my-view"
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `buckets_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `views_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Optional. Field mask that specifies the fields in view that need an update. A field will be overwritten if, and only if, it is in the update mask. name and output only fields cannot be updated.For a detailed FieldMask definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMaskFor example: updateMask=filter
* `:body` (*type:* `GoogleApi.Logging.V2.Model.LogView.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogView{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_buckets_views_patch(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogView.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_buckets_views_patch(
connection,
projects_id,
locations_id,
buckets_id,
views_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url(
"/v2/projects/{projectsId}/locations/{locationsId}/buckets/{bucketsId}/views/{viewsId}",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"bucketsId" => URI.encode(buckets_id, &URI.char_unreserved?/1),
"viewsId" => URI.encode(views_id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogView{}])
end
@doc """
Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns google.rpc.Code.UNIMPLEMENTED. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to Code.CANCELLED.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. The name of the operation resource to be cancelled.
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `operations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.Logging.V2.Model.CancelOperationRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_operations_cancel(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_operations_cancel(
connection,
projects_id,
locations_id,
operations_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/v2/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"operationsId" => URI.encode(operations_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.Empty{}])
end
@doc """
Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. The name of the operation resource.
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `operations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_operations_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_operations_get(
connection,
projects_id,
locations_id,
operations_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/v2/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1),
"operationsId" => URI.encode(operations_id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.Operation{}])
end
@doc """
Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns UNIMPLEMENTED.NOTE: the name binding allows API services to override the binding to use different resource name schemes, such as users/*/operations. To override the binding, API services can add a binding such as "/v1/{name=users/*}/operations" to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. The name of the operation's parent resource.
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - The standard list filter.
* `:pageSize` (*type:* `integer()`) - The standard list page size.
* `:pageToken` (*type:* `String.t`) - The standard list page token.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.ListOperationsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_locations_operations_list(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.ListOperationsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_locations_operations_list(
connection,
projects_id,
locations_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/projects/{projectsId}/locations/{locationsId}/operations", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.ListOperationsResponse{}])
end
@doc """
Deletes all the log entries in a log for the _Default Log Bucket. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not be deleted. Entries received after the delete operation with a timestamp before the operation will be deleted.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `logName`. Required. The resource name of the log to delete: projects/[PROJECT_ID]/logs/[LOG_ID] organizations/[ORGANIZATION_ID]/logs/[LOG_ID] billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID] folders/[FOLDER_ID]/logs/[LOG_ID][LOG_ID] must be URL-encoded. For example, "projects/my-project-id/logs/syslog", "organizations/123/logs/cloudaudit.googleapis.com%2Factivity".For more information about log names, see LogEntry.
* `logs_id` (*type:* `String.t`) - Part of `logName`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_logs_delete(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_logs_delete(
connection,
projects_id,
logs_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v2/projects/{projectsId}/logs/{logsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"logsId" => URI.encode(logs_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.Empty{}])
end
@doc """
Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The resource name that owns the logs: projects/[PROJECT_ID] organizations/[ORGANIZATION_ID] billingAccounts/[BILLING_ACCOUNT_ID] folders/[FOLDER_ID]
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response indicates that more results might be available.
* `:pageToken` (*type:* `String.t`) - Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from the previous response. The values of other method parameters should be identical to those in the previous call.
* `:resourceNames` (*type:* `list(String.t)`) - Optional. The resource name that owns the logs: projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]To support legacy queries, it could also be: projects/[PROJECT_ID] organizations/[ORGANIZATION_ID] billingAccounts/[BILLING_ACCOUNT_ID] folders/[FOLDER_ID]
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.ListLogsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_logs_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Logging.V2.Model.ListLogsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_logs_list(connection, projects_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query,
:resourceNames => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/projects/{projectsId}/logs", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.ListLogsResponse{}])
end
@doc """
Creates a logs-based metric.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The resource name of the project in which to create the metric: "projects/[PROJECT_ID]" The new metric must be provided in the request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.Logging.V2.Model.LogMetric.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogMetric{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_metrics_create(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Logging.V2.Model.LogMetric.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_metrics_create(connection, projects_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v2/projects/{projectsId}/metrics", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogMetric{}])
end
@doc """
Deletes a logs-based metric.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `metricName`. Required. The resource name of the metric to delete: "projects/[PROJECT_ID]/metrics/[METRIC_ID]"
* `metrics_id` (*type:* `String.t`) - Part of `metricName`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_metrics_delete(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_metrics_delete(
connection,
projects_id,
metrics_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v2/projects/{projectsId}/metrics/{metricsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"metricsId" => URI.encode(metrics_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.Empty{}])
end
@doc """
Gets a logs-based metric.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `metricName`. Required. The resource name of the desired metric: "projects/[PROJECT_ID]/metrics/[METRIC_ID]"
* `metrics_id` (*type:* `String.t`) - Part of `metricName`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogMetric{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_metrics_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogMetric.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_metrics_get(
connection,
projects_id,
metrics_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/projects/{projectsId}/metrics/{metricsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"metricsId" => URI.encode(metrics_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogMetric{}])
end
@doc """
Lists logs-based metrics.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The name of the project containing the metrics: "projects/[PROJECT_ID]"
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response indicates that more results might be available.
* `:pageToken` (*type:* `String.t`) - Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from the previous response. The values of other method parameters should be identical to those in the previous call.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.ListLogMetricsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_metrics_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Logging.V2.Model.ListLogMetricsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_metrics_list(connection, projects_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/projects/{projectsId}/metrics", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.ListLogMetricsResponse{}])
end
@doc """
Creates or updates a logs-based metric.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `metricName`. Required. The resource name of the metric to update: "projects/[PROJECT_ID]/metrics/[METRIC_ID]" The updated metric must be provided in the request and it's name field must be the same as [METRIC_ID] If the metric does not exist in [PROJECT_ID], then a new metric is created.
* `metrics_id` (*type:* `String.t`) - Part of `metricName`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.Logging.V2.Model.LogMetric.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogMetric{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_metrics_update(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogMetric.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_metrics_update(
connection,
projects_id,
metrics_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/v2/projects/{projectsId}/metrics/{metricsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"metricsId" => URI.encode(metrics_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogMetric{}])
end
@doc """
Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's writer_identity is not permitted to write to the destination. A sink can export log entries only from the resource owning the sink.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The resource in which to create the sink: "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" For examples:"projects/my-project" "organizations/123456789"
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:uniqueWriterIdentity` (*type:* `boolean()`) - Optional. Determines the kind of IAM identity returned as writer_identity in the new sink. If this value is omitted or set to false, and if the sink's parent is a project, then the value returned as writer_identity is the same group or service account used by Cloud Logging before the addition of writer identities to this API. The sink's destination must be in the same project as the sink itself.If this field is set to true, or if the sink is owned by a non-project resource such as an organization, then the value of writer_identity will be a unique service account used only for exports from the new sink. For more information, see writer_identity in LogSink.
* `:body` (*type:* `GoogleApi.Logging.V2.Model.LogSink.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogSink{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_sinks_create(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Logging.V2.Model.LogSink.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_sinks_create(connection, projects_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:uniqueWriterIdentity => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v2/projects/{projectsId}/sinks", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogSink{}])
end
@doc """
Deletes a sink. If the sink has a unique writer_identity, then that service account is also deleted.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `sinkName`. Required. The full resource name of the sink to delete, including the parent resource and the sink identifier: "projects/[PROJECT_ID]/sinks/[SINK_ID]" "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" For example:"projects/my-project/sinks/my-sink"
* `sinks_id` (*type:* `String.t`) - Part of `sinkName`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_sinks_delete(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_sinks_delete(
connection,
projects_id,
sinks_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v2/projects/{projectsId}/sinks/{sinksId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"sinksId" => URI.encode(sinks_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.Empty{}])
end
@doc """
Gets a sink.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `sinkName`. Required. The resource name of the sink: "projects/[PROJECT_ID]/sinks/[SINK_ID]" "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" For example:"projects/my-project/sinks/my-sink"
* `sinks_id` (*type:* `String.t`) - Part of `sinkName`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogSink{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_sinks_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogSink.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_sinks_get(
connection,
projects_id,
sinks_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/projects/{projectsId}/sinks/{sinksId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"sinksId" => URI.encode(sinks_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogSink{}])
end
@doc """
Lists sinks.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The parent resource whose sinks are to be listed: "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]"
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response indicates that more results might be available.
* `:pageToken` (*type:* `String.t`) - Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from the previous response. The values of other method parameters should be identical to those in the previous call.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.ListSinksResponse{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_sinks_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Logging.V2.Model.ListSinksResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_sinks_list(connection, projects_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/projects/{projectsId}/sinks", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.ListSinksResponse{}])
end
@doc """
Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: destination, and filter.The updated sink might also have a new writer_identity; see the unique_writer_identity field.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `sinkName`. Required. The full resource name of the sink to update, including the parent resource and the sink identifier: "projects/[PROJECT_ID]/sinks/[SINK_ID]" "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" For example:"projects/my-project/sinks/my-sink"
* `sinks_id` (*type:* `String.t`) - Part of `sinkName`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:uniqueWriterIdentity` (*type:* `boolean()`) - Optional. See sinks.create for a description of this field. When updating a sink, the effect of this field on the value of writer_identity in the updated sink depends on both the old and new values of this field: If the old and new values of this field are both false or both true, then there is no change to the sink's writer_identity. If the old value is false and the new value is true, then writer_identity is changed to a unique service account. It is an error if the old value is true and the new value is set to false or defaulted to false.
* `:updateMask` (*type:* `String.t`) - Optional. Field mask that specifies the fields in sink that need an update. A sink field will be overwritten if, and only if, it is in the update mask. name and output only fields cannot be updated.An empty updateMask is temporarily treated as using the following mask for backwards compatibility purposes:destination,filter,includeChildrenAt some point in the future, behavior will be removed and specifying an empty updateMask will be an error.For a detailed FieldMask definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMaskFor example: updateMask=filter
* `:body` (*type:* `GoogleApi.Logging.V2.Model.LogSink.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogSink{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_sinks_patch(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogSink.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_sinks_patch(
connection,
projects_id,
sinks_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:uniqueWriterIdentity => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v2/projects/{projectsId}/sinks/{sinksId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"sinksId" => URI.encode(sinks_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogSink{}])
end
@doc """
Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: destination, and filter.The updated sink might also have a new writer_identity; see the unique_writer_identity field.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `sinkName`. Required. The full resource name of the sink to update, including the parent resource and the sink identifier: "projects/[PROJECT_ID]/sinks/[SINK_ID]" "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" For example:"projects/my-project/sinks/my-sink"
* `sinks_id` (*type:* `String.t`) - Part of `sinkName`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:uniqueWriterIdentity` (*type:* `boolean()`) - Optional. See sinks.create for a description of this field. When updating a sink, the effect of this field on the value of writer_identity in the updated sink depends on both the old and new values of this field: If the old and new values of this field are both false or both true, then there is no change to the sink's writer_identity. If the old value is false and the new value is true, then writer_identity is changed to a unique service account. It is an error if the old value is true and the new value is set to false or defaulted to false.
* `:updateMask` (*type:* `String.t`) - Optional. Field mask that specifies the fields in sink that need an update. A sink field will be overwritten if, and only if, it is in the update mask. name and output only fields cannot be updated.An empty updateMask is temporarily treated as using the following mask for backwards compatibility purposes:destination,filter,includeChildrenAt some point in the future, behavior will be removed and specifying an empty updateMask will be an error.For a detailed FieldMask definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMaskFor example: updateMask=filter
* `:body` (*type:* `GoogleApi.Logging.V2.Model.LogSink.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogSink{}}` on success
* `{:error, info}` on failure
"""
@spec logging_projects_sinks_update(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogSink.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def logging_projects_sinks_update(
connection,
projects_id,
sinks_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:uniqueWriterIdentity => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/v2/projects/{projectsId}/sinks/{sinksId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"sinksId" => URI.encode(sinks_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogSink{}])
end
end
| 50.33871
| 722
| 0.625903
|
79c1f7a80a306f15768d39b35c197df25d8b0803
| 300
|
ex
|
Elixir
|
apps/re_integrations/test/support/test_http.ex
|
caspg/backend
|
34df9dc14ab8ed75de4578fefa2e087580c7e867
|
[
"MIT"
] | 1
|
2021-01-19T05:01:15.000Z
|
2021-01-19T05:01:15.000Z
|
apps/re_integrations/test/support/test_http.ex
|
caspg/backend
|
34df9dc14ab8ed75de4578fefa2e087580c7e867
|
[
"MIT"
] | null | null | null |
apps/re_integrations/test/support/test_http.ex
|
caspg/backend
|
34df9dc14ab8ed75de4578fefa2e087580c7e867
|
[
"MIT"
] | null | null | null |
defmodule ReIntegrations.TestHTTP do
@moduledoc false
def get("https://res.cloudinary.com/emcasa/image/upload/f_auto/v1513818385/" <> filename),
do: {:ok, %{body: filename}}
def get(%URI{path: "/simulator"}, [], _opts),
do: {:ok, %{body: "{\"cem\":\"10,8%\",\"cet\":\"11,3%\"}"}}
end
| 30
| 92
| 0.606667
|
79c21351ba4d75285e83041e54998de89b80422e
| 15,295
|
exs
|
Elixir
|
test/scenic/component/input/text_field_test.exs
|
PragTob/scenic
|
7e5193c2a0b86768937e5bbd7c22f00544d26048
|
[
"Apache-2.0"
] | null | null | null |
test/scenic/component/input/text_field_test.exs
|
PragTob/scenic
|
7e5193c2a0b86768937e5bbd7c22f00544d26048
|
[
"Apache-2.0"
] | null | null | null |
test/scenic/component/input/text_field_test.exs
|
PragTob/scenic
|
7e5193c2a0b86768937e5bbd7c22f00544d26048
|
[
"Apache-2.0"
] | null | null | null |
#
# Created by Boyd Multerer on September 18, 2018
# Copyright © 2018 Kry10 Industries. All rights reserved.
#
defmodule Scenic.Component.Input.TextFieldTest do
use ExUnit.Case, async: true
doctest Scenic
# alias Scenic.Component
alias Scenic.Graph
alias Scenic.Primitive
alias Scenic.ViewPort
alias Scenic.Component.Input.TextField
@initial_value "Initial value"
@initial_password "*************"
@hint "Hint String"
@state %{
graph: Graph.build() |> Scenic.Primitives.text("text", id: :text),
theme: Primitive.Style.Theme.preset(:primary),
width: 200,
height: 30,
value: @initial_value,
display: @initial_value,
hint: @hint,
index: 2,
char_width: 12,
focused: false,
type: :text,
filter: :all,
id: :test_id
}
# ============================================================================
# info
test "info works" do
assert is_bitstring(TextField.info(:bad_data))
assert TextField.info(:bad_data) =~ ":bad_data"
end
# ============================================================================
# verify
test "verify passes valid data" do
assert TextField.verify("Title") == {:ok, "Title"}
end
test "verify fails invalid data" do
assert TextField.verify(:banana) == :invalid_data
end
# ============================================================================
# init
test "init works with simple data" do
{:ok, state} = TextField.init(@initial_value, styles: %{}, id: :test_id)
%Graph{} = state.graph
assert is_map(state.theme)
assert state.value == @initial_value
assert state.display == @initial_value
assert state.focused == false
assert state.type == :text
assert state.id == :test_id
{:ok, state} = TextField.init(@initial_value, styles: %{type: :password})
assert state.value == @initial_value
assert state.display == @initial_password
assert state.type == :password
end
# ============================================================================
# handle_input
# ============================================================================
# control keys
test "handle_input {:key \"left\" moves the cursor to the left" do
self = self()
scene_ref = make_ref()
Process.put(:parent_pid, self)
Process.put(:scene_ref, scene_ref)
{:ok, tables_pid} = Scenic.ViewPort.Tables.start_link(nil)
context = %ViewPort.Context{viewport: self}
assert @state.index == 2
{:noreply, state} = TextField.handle_input({:key, {"left", :press, 0}}, context, @state)
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
assert state.index == 1
{:noreply, state} = TextField.handle_input({:key, {"left", :press, 0}}, context, state)
assert state.index == 0
# does not keep going below 0
{:noreply, state} = TextField.handle_input({:key, {"left", :press, 0}}, context, state)
assert state.index == 0
# cleanup
Process.exit(tables_pid, :shutdown)
end
test "handle_input {:key \"right\" moves the cursor to the right" do
self = self()
scene_ref = make_ref()
Process.put(:parent_pid, self)
Process.put(:scene_ref, scene_ref)
{:ok, tables_pid} = Scenic.ViewPort.Tables.start_link(nil)
context = %ViewPort.Context{viewport: self}
length = String.length(@initial_value)
state = %{@state | index: length - 2}
{:noreply, state} = TextField.handle_input({:key, {"right", :press, 0}}, context, state)
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
assert state.index == length - 1
{:noreply, state} = TextField.handle_input({:key, {"right", :press, 0}}, context, state)
assert state.index == length
# does not keep going past the end
{:noreply, state} = TextField.handle_input({:key, {"right", :press, 0}}, context, state)
assert state.index == length
# cleanup
Process.exit(tables_pid, :shutdown)
end
test "handle_input {:key \"home\" and \"page_up\" move the cursor all the way to the left" do
self = self()
scene_ref = make_ref()
Process.put(:parent_pid, self)
Process.put(:scene_ref, scene_ref)
{:ok, tables_pid} = Scenic.ViewPort.Tables.start_link(nil)
context = %ViewPort.Context{viewport: self}
state = %{@state | index: 4}
{:noreply, state} = TextField.handle_input({:key, {"home", :press, 0}}, context, state)
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
assert state.index == 0
state = %{@state | index: 4}
{:noreply, state} = TextField.handle_input({:key, {"page_up", :press, 0}}, context, state)
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
assert state.index == 0
# cleanup
Process.exit(tables_pid, :shutdown)
end
test "handle_input {:key \"end\" and \"page_down\" move the cursor all the way to the right" do
self = self()
scene_ref = make_ref()
Process.put(:parent_pid, self)
Process.put(:scene_ref, scene_ref)
{:ok, tables_pid} = Scenic.ViewPort.Tables.start_link(nil)
context = %ViewPort.Context{viewport: self}
length = String.length(@initial_value)
state = %{@state | index: 4}
{:noreply, state} = TextField.handle_input({:key, {"end", :press, 0}}, context, state)
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
assert state.index == length
state = %{@state | index: 4}
{:noreply, state} = TextField.handle_input({:key, {"page_down", :press, 0}}, context, state)
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
assert state.index == length
# cleanup
Process.exit(tables_pid, :shutdown)
end
test "handle_input {:key \"backspace\" deletes to the left" do
self = self()
scene_ref = make_ref()
Process.put(:parent_pid, self)
Process.put(:scene_ref, scene_ref)
{:ok, tables_pid} = Scenic.ViewPort.Tables.start_link(nil)
context = %ViewPort.Context{viewport: self}
{:noreply, state} = TextField.handle_input({:key, {"backspace", :press, 0}}, context, @state)
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
assert state.index == 1
assert state.value == "Iitial value"
{:noreply, state} = TextField.handle_input({:key, {"backspace", :press, 0}}, context, state)
assert state.index == 0
assert state.value == "itial value"
# does nothing if already at position 0
{:noreply, state} = TextField.handle_input({:key, {"backspace", :press, 0}}, context, state)
assert state.index == 0
assert state.value == "itial value"
# cleanup
Process.exit(tables_pid, :shutdown)
end
test "handle_input {:key \"delete\" deletes to the right" do
self = self()
scene_ref = make_ref()
Process.put(:parent_pid, self)
Process.put(:scene_ref, scene_ref)
{:ok, tables_pid} = Scenic.ViewPort.Tables.start_link(nil)
context = %ViewPort.Context{viewport: self}
length = String.length(@initial_value)
pos = length - 2
state = %{@state | index: pos}
{:noreply, state} = TextField.handle_input({:key, {"delete", :press, 0}}, context, state)
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
assert state.index == pos
assert state.value == "Initial vale"
{:noreply, state} = TextField.handle_input({:key, {"delete", :press, 0}}, context, state)
assert state.index == pos
assert state.value == "Initial val"
# does nothing if already at position 0
{:noreply, state} = TextField.handle_input({:key, {"delete", :press, 0}}, context, state)
assert state.index == pos
assert state.value == "Initial val"
# cleanup
Process.exit(tables_pid, :shutdown)
end
test "handle_input {:codepoint adds and moves cursor to right" do
self = self()
scene_ref = make_ref()
Process.put(:parent_pid, self)
Process.put(:scene_ref, scene_ref)
{:ok, tables_pid} = Scenic.ViewPort.Tables.start_link(nil)
context = %ViewPort.Context{viewport: self}
state = %{@state | index: 2}
{:noreply, state} = TextField.handle_input({:codepoint, {"a", 0}}, context, state)
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
assert state.index == 3
assert state.value == "Inaitial value"
assert state.display == "Inaitial value"
# can also add strings
{:noreply, state} = TextField.handle_input({:codepoint, {".com", 0}}, context, state)
assert state.index == 7
assert state.value == "Ina.comitial value"
# cleanup
Process.exit(tables_pid, :shutdown)
end
test "handle_input {:codepoint displays password * chars" do
self = self()
scene_ref = make_ref()
Process.put(:parent_pid, self)
Process.put(:scene_ref, scene_ref)
{:ok, tables_pid} = Scenic.ViewPort.Tables.start_link(nil)
context = %ViewPort.Context{viewport: self}
state = %{@state | index: 2, type: :password, display: @initial_password}
{:noreply, state} = TextField.handle_input({:codepoint, {"a", 0}}, context, state)
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
assert state.index == 3
assert state.value == "Inaitial value"
assert state.display == @initial_password <> "*"
# cleanup
Process.exit(tables_pid, :shutdown)
end
test "handle_input {:codepoint filters input" do
self = self()
scene_ref = make_ref()
Process.put(:parent_pid, self)
Process.put(:scene_ref, scene_ref)
{:ok, tables_pid} = Scenic.ViewPort.Tables.start_link(nil)
context = %ViewPort.Context{viewport: self}
# filters to integers
state = %{@state | index: 2, value: "Initial value"}
state = %{state | filter: :integer}
{:noreply, state} = TextField.handle_input({:codepoint, {".", 0}}, context, state)
assert state.index == 2
assert state.value == "Initial value"
{:noreply, state} = TextField.handle_input({:codepoint, {"2", 0}}, context, state)
assert state.index == 3
assert state.value == "In2itial value"
# filters to floats
state = %{@state | index: 2, value: "Initial value"}
state = %{state | filter: :number}
{:noreply, state} = TextField.handle_input({:codepoint, {"a", 0}}, context, state)
assert state.index == 2
assert state.value == "Initial value"
{:noreply, state} = TextField.handle_input({:codepoint, {".", 0}}, context, state)
assert state.index == 3
assert state.value == "In.itial value"
{:noreply, state} = TextField.handle_input({:codepoint, {"2", 0}}, context, state)
assert state.index == 4
assert state.value == "In.2itial value"
# filters to char string
state = %{@state | index: 2, value: "Initial value"}
state = %{state | filter: "abcd"}
{:noreply, state} = TextField.handle_input({:codepoint, {"f", 0}}, context, state)
assert state.index == 2
assert state.value == "Initial value"
{:noreply, state} = TextField.handle_input({:codepoint, {"d", 0}}, context, state)
assert state.index == 3
assert state.value == "Inditial value"
# filters to function
state = %{@state | index: 2, value: "Initial value"}
state = %{state | filter: fn ch -> ch == "k" end}
{:noreply, state} = TextField.handle_input({:codepoint, {"f", 0}}, context, state)
assert state.index == 2
assert state.value == "Initial value"
{:noreply, state} = TextField.handle_input({:codepoint, {"k", 0}}, context, state)
assert state.index == 3
assert state.value == "Inkitial value"
# cleanup
Process.exit(tables_pid, :shutdown)
end
test "handle_input {:key handles repeats" do
self = self()
scene_ref = make_ref()
Process.put(:parent_pid, self)
Process.put(:scene_ref, scene_ref)
{:ok, tables_pid} = Scenic.ViewPort.Tables.start_link(nil)
context = %ViewPort.Context{viewport: self}
state = %{@state | index: 4}
{:noreply, state} = TextField.handle_input({:key, {"left", :repeat, 0}}, context, state)
{:noreply, state} = TextField.handle_input({:key, {"left", :repeat, 0}}, context, state)
{:noreply, state} = TextField.handle_input({:key, {"", :repeat, 0}}, context, state)
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
assert state.index == 2
assert state.value == "Initial value"
assert state.display == "Initial value"
# {:noreply, state} = TextField.handle_input({:key, {"a", :repeat, 0}}, context, state)
# {:noreply, state} = TextField.handle_input({:key, {"a", :repeat, 0}}, context, state)
# # confirm the graph was pushed
# assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
# assert state.index == 4
# assert state.value == "Inaaaitial value"
# assert state.display == "Inaaaitial value"
# cleanup
Process.exit(tables_pid, :shutdown)
end
test "shows the hint when the value goes to an empty string" do
self = self()
scene_ref = make_ref()
Process.put(:parent_pid, self)
Process.put(:scene_ref, scene_ref)
{:ok, tables_pid} = Scenic.ViewPort.Tables.start_link(nil)
context = %ViewPort.Context{viewport: self}
state = %{@state | index: 2, value: "ab", display: "ab"}
{:noreply, state} = TextField.handle_input({:key, {"backspace", :repeat, 0}}, context, state)
{:noreply, state} = TextField.handle_input({:key, {"backspace", :repeat, 0}}, context, state)
assert state.index == 0
assert state.value == ""
assert state.display == ""
%Primitive{data: @hint} = Graph.get!(state.graph, :text)
# cleanup
Process.exit(tables_pid, :shutdown)
end
test "moves the cursor when a click happens inside the text field" do
self = self()
scene_ref = make_ref()
Process.put(:parent_pid, self)
Process.put(:scene_ref, scene_ref)
{:ok, tables_pid} = Scenic.ViewPort.Tables.start_link(nil)
context = %ViewPort.Context{viewport: self, id: :border}
state = %{@state | index: 2, focused: true}
{:noreply, state} =
TextField.handle_input(
{:cursor_button, {:left, :press, 0, {100, 0}}},
context,
state
)
assert state.index == 9
# cleanup
Process.exit(tables_pid, :shutdown)
end
test "don't move the cursor when a click happens in the current index location" do
self = self()
scene_ref = make_ref()
Process.put(:parent_pid, self)
Process.put(:scene_ref, scene_ref)
{:ok, tables_pid} = Scenic.ViewPort.Tables.start_link(nil)
context = %ViewPort.Context{viewport: self, id: :border}
state = %{@state | index: 2, focused: true}
{:noreply, state} =
TextField.handle_input(
{:cursor_button, {:left, :press, 0, {30, 0}}},
context,
state
)
assert state.index == 2
# cleanup
Process.exit(tables_pid, :shutdown)
end
test "handle_input does nothing on unknown input" do
context = %ViewPort.Context{viewport: self()}
{:noreply, state} = TextField.handle_input(:unknown, context, @state)
assert state == @state
end
end
| 33.988889
| 97
| 0.627591
|
79c248d3e5abf5e0f4c78551adb52d69802acae7
| 657
|
exs
|
Elixir
|
mix.exs
|
carlogilmar/le_toille_learning_manager
|
00c5679bed5fb9ecf43a71c5d355610db9792bbf
|
[
"MIT"
] | 1
|
2018-09-21T01:14:38.000Z
|
2018-09-21T01:14:38.000Z
|
mix.exs
|
carlogilmar/le_toille_learning_manager
|
00c5679bed5fb9ecf43a71c5d355610db9792bbf
|
[
"MIT"
] | 17
|
2018-09-18T04:12:03.000Z
|
2018-09-21T06:27:45.000Z
|
mix.exs
|
carlogilmar/le_toille_learning_manager
|
00c5679bed5fb9ecf43a71c5d355610db9792bbf
|
[
"MIT"
] | 1
|
2018-09-21T01:08:38.000Z
|
2018-09-21T01:08:38.000Z
|
defmodule EscriptDemo.Mixfile do
use Mix.Project
def project do
[
app: :learning_manager,
version: "1.1.0",
elixir: "~> 1.5",
escript: [main_module: Etoile.Cli],
start_permanent: Mix.env == :prod,
deps: deps()
]
end
def application do
[
mod: {Etoile.Application, []},
extra_applications: [:logger, :runtime_tools, :calendar]
]
end
defp deps do
[
{:tzdata, "~> 0.1.7"},
{:calendar, "~> 0.17.2"},
{:bunt, "~> 0.1.0"},
{:httpoison, "~> 1.5.0"},
{:poison, "~> 3.1"},
{:elixir_uuid, "~> 1.2"},
{:ex_gram, "~> 0.5.0-rc6"}
]
end
end
| 19.323529
| 62
| 0.499239
|
79c258a9c4f1dffabbc9922970cffb928c857d79
| 890
|
ex
|
Elixir
|
clients/service_usage/lib/google_api/service_usage/v1/metadata.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
clients/service_usage/lib/google_api/service_usage/v1/metadata.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
clients/service_usage/lib/google_api/service_usage/v1/metadata.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceUsage.V1 do
@moduledoc """
API client metadata for GoogleApi.ServiceUsage.V1.
"""
@discovery_revision "20200606"
def discovery_revision(), do: @discovery_revision
end
| 32.962963
| 74
| 0.760674
|
79c26209f6f5b8080cc6a004ac63c1ef6b0d3acf
| 3,291
|
ex
|
Elixir
|
clients/sheets/lib/google_api/sheets/v4/model/filter_view.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | null | null | null |
clients/sheets/lib/google_api/sheets/v4/model/filter_view.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | 1
|
2020-12-18T09:25:12.000Z
|
2020-12-18T09:25:12.000Z
|
clients/sheets/lib/google_api/sheets/v4/model/filter_view.ex
|
MasashiYokota/elixir-google-api
|
975dccbff395c16afcb62e7a8e411fbb58e9ab01
|
[
"Apache-2.0"
] | 1
|
2020-10-04T10:12:44.000Z
|
2020-10-04T10:12:44.000Z
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.FilterView do
@moduledoc """
A filter view.
## Attributes
* `criteria` (*type:* `%{optional(String.t) => GoogleApi.Sheets.V4.Model.FilterCriteria.t}`, *default:* `nil`) - The criteria for showing/hiding values per column. The map's key is the column index, and the value is the criteria for that column. This field is deprecated in favor of filter_specs.
* `filterSpecs` (*type:* `list(GoogleApi.Sheets.V4.Model.FilterSpec.t)`, *default:* `nil`) - The filter criteria for showing/hiding values per column. Both criteria and filter_specs are populated in responses. If both fields are specified in an update request, this field takes precedence.
* `filterViewId` (*type:* `integer()`, *default:* `nil`) - The ID of the filter view.
* `namedRangeId` (*type:* `String.t`, *default:* `nil`) - The named range this filter view is backed by, if any. When writing, only one of range or named_range_id may be set.
* `range` (*type:* `GoogleApi.Sheets.V4.Model.GridRange.t`, *default:* `nil`) - The range this filter view covers. When writing, only one of range or named_range_id may be set.
* `sortSpecs` (*type:* `list(GoogleApi.Sheets.V4.Model.SortSpec.t)`, *default:* `nil`) - The sort order per column. Later specifications are used when values are equal in the earlier specifications.
* `title` (*type:* `String.t`, *default:* `nil`) - The name of the filter view.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:criteria => %{optional(String.t()) => GoogleApi.Sheets.V4.Model.FilterCriteria.t()},
:filterSpecs => list(GoogleApi.Sheets.V4.Model.FilterSpec.t()),
:filterViewId => integer(),
:namedRangeId => String.t(),
:range => GoogleApi.Sheets.V4.Model.GridRange.t(),
:sortSpecs => list(GoogleApi.Sheets.V4.Model.SortSpec.t()),
:title => String.t()
}
field(:criteria, as: GoogleApi.Sheets.V4.Model.FilterCriteria, type: :map)
field(:filterSpecs, as: GoogleApi.Sheets.V4.Model.FilterSpec, type: :list)
field(:filterViewId)
field(:namedRangeId)
field(:range, as: GoogleApi.Sheets.V4.Model.GridRange)
field(:sortSpecs, as: GoogleApi.Sheets.V4.Model.SortSpec, type: :list)
field(:title)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.FilterView do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.FilterView.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.FilterView do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 50.630769
| 300
| 0.711334
|
79c294174ddabf424f6332fc1c6ab9ed77bcc8fb
| 12,083
|
exs
|
Elixir
|
test/phoenix/integration/websocket_test.exs
|
gjaldon/phoenix
|
a85a13d8bfbd7d768bb86d38da427875df3fb703
|
[
"MIT"
] | 1
|
2015-05-19T11:04:16.000Z
|
2015-05-19T11:04:16.000Z
|
test/phoenix/integration/websocket_test.exs
|
gjaldon/phoenix
|
a85a13d8bfbd7d768bb86d38da427875df3fb703
|
[
"MIT"
] | null | null | null |
test/phoenix/integration/websocket_test.exs
|
gjaldon/phoenix
|
a85a13d8bfbd7d768bb86d38da427875df3fb703
|
[
"MIT"
] | null | null | null |
Code.require_file "../../support/websocket_client.exs", __DIR__
defmodule Phoenix.Integration.WebSocketTest do
# TODO: Make this test async
use ExUnit.Case
import ExUnit.CaptureLog
alias Phoenix.Integration.WebsocketClient
alias Phoenix.Socket.Message
alias Phoenix.Transports.{V2, WebSocketSerializer}
alias __MODULE__.Endpoint
@moduletag :capture_log
@port 5807
Application.put_env(:phoenix, Endpoint, [
https: false,
http: [port: @port],
secret_key_base: String.duplicate("abcdefgh", 8),
debug_errors: false,
server: true,
pubsub: [adapter: Phoenix.PubSub.PG2, name: __MODULE__]
])
defmodule RoomChannel do
use Phoenix.Channel
intercept ["new_msg"]
def join(topic, message, socket) do
Process.register(self(), String.to_atom(topic))
send(self(), {:after_join, message})
{:ok, socket}
end
def handle_info({:after_join, message}, socket) do
broadcast socket, "user_entered", %{user: message["user"]}
push socket, "joined", Map.merge(%{status: "connected"}, socket.assigns)
{:noreply, socket}
end
def handle_in("new_msg", message, socket) do
broadcast! socket, "new_msg", message
{:reply, :ok, socket}
end
def handle_in("boom", _message, _socket) do
raise "boom"
end
def handle_out("new_msg", payload, socket) do
push socket, "new_msg", Map.put(payload, "transport", inspect(socket.transport))
{:noreply, socket}
end
def terminate(_reason, socket) do
push socket, "you_left", %{message: "bye!"}
:ok
end
end
defmodule UserSocket do
use Phoenix.Socket
channel "room:*", RoomChannel
transport :websocket, Phoenix.Transports.WebSocket,
check_origin: ["//example.com"], timeout: 200
def connect(%{"reject" => "true"}, _socket) do
:error
end
def connect(params, socket) do
Logger.disable(self())
{:ok, assign(socket, :user_id, params["user_id"])}
end
def id(socket) do
if id = socket.assigns.user_id, do: "user_sockets:#{id}"
end
end
defmodule LoggingSocket do
use Phoenix.Socket
channel "room:*", RoomChannel
transport :websocket, Phoenix.Transports.WebSocket,
check_origin: ["//example.com"], timeout: 200
def connect(%{"reject" => "true"}, _socket) do
:error
end
def connect(params, socket) do
{:ok, assign(socket, :user_id, params["user_id"])}
end
def id(socket) do
if id = socket.assigns.user_id, do: "user_sockets:#{id}"
end
end
defmodule Endpoint do
use Phoenix.Endpoint, otp_app: :phoenix
socket "/ws", UserSocket
socket "/ws/admin", UserSocket
socket "/ws/logging", LoggingSocket
end
setup_all do
capture_log fn -> Endpoint.start_link() end
:ok
end
for {serializer, vsn, join_ref} <- [{WebSocketSerializer, "1.0.0", nil}, {V2.WebSocketSerializer, "2.0.0", "1"}] do
@serializer serializer
@vsn vsn
@join_ref join_ref
describe "with #{vsn} serializer #{inspect serializer}" do
test "endpoint handles multiple mount segments" do
{:ok, sock} = WebsocketClient.start_link(self(), "ws://127.0.0.1:#{@port}/ws/admin/websocket?vsn=#{@vsn}", @serializer)
WebsocketClient.join(sock, "room:admin-lobby1", %{})
assert_receive %Message{event: "phx_reply",
payload: %{"response" => %{}, "status" => "ok"},
join_ref: @join_ref,
ref: "1", topic: "room:admin-lobby1"}
end
test "join, leave, and event messages" do
{:ok, sock} = WebsocketClient.start_link(self(), "ws://127.0.0.1:#{@port}/ws/websocket?vsn=#{@vsn}", @serializer)
WebsocketClient.join(sock, "room:lobby1", %{})
assert_receive %Message{event: "phx_reply",
join_ref: @join_ref,
payload: %{"response" => %{}, "status" => "ok"},
ref: "1", topic: "room:lobby1"}
assert_receive %Message{event: "joined",
payload: %{"status" => "connected", "user_id" => nil}}
assert_receive %Message{event: "user_entered",
payload: %{"user" => nil},
ref: nil, topic: "room:lobby1"}
channel_pid = Process.whereis(:"room:lobby1")
assert channel_pid
assert Process.alive?(channel_pid)
WebsocketClient.send_event(sock, "room:lobby1", "new_msg", %{body: "hi!"})
assert_receive %Message{event: "new_msg", payload: %{"transport" => "Phoenix.Transports.WebSocket", "body" => "hi!"}}
WebsocketClient.leave(sock, "room:lobby1", %{})
assert_receive %Message{event: "you_left", payload: %{"message" => "bye!"}}
assert_receive %Message{event: "phx_reply", payload: %{"status" => "ok"}}
assert_receive %Message{event: "phx_close", payload: %{}}
refute Process.alive?(channel_pid)
WebsocketClient.send_event(sock, "room:lobby1", "new_msg", %{body: "Should ignore"})
refute_receive %Message{event: "new_msg"}
assert_receive %Message{event: "phx_reply", payload: %{"response" => %{"reason" => "unmatched topic"}}}
WebsocketClient.send_event(sock, "room:lobby1", "new_msg", %{body: "Should ignore"})
refute_receive %Message{event: "new_msg"}
end
test "logs and filter params on join and handle_in" do
topic = "room:admin-lobby2"
{:ok, sock} = WebsocketClient.start_link(self(), "ws://127.0.0.1:#{@port}/ws/logging/websocket?vsn=#{@vsn}", @serializer)
log = capture_log fn ->
WebsocketClient.join(sock, topic, %{"join" => "yes", "password" => "no"})
assert_receive %Message{event: "phx_reply",
join_ref: @join_ref,
payload: %{"response" => %{}, "status" => "ok"},
ref: "1", topic: "room:admin-lobby2"}
end
assert log =~ "Parameters: %{\"join\" => \"yes\", \"password\" => \"[FILTERED]\"}"
log = capture_log fn ->
WebsocketClient.send_event(sock, topic, "new_msg", %{"in" => "yes", "password" => "no"})
assert_receive %Message{event: "phx_reply", ref: "2"}
end
assert log =~ "Parameters: %{\"in\" => \"yes\", \"password\" => \"[FILTERED]\"}"
end
test "sends phx_error if a channel server abnormally exits" do
{:ok, sock} = WebsocketClient.start_link(self(), "ws://127.0.0.1:#{@port}/ws/websocket?vsn=#{@vsn}", @serializer)
WebsocketClient.join(sock, "room:lobby", %{})
assert_receive %Message{event: "phx_reply", ref: "1", payload: %{"response" => %{}, "status" => "ok"}}
assert_receive %Message{event: "joined"}
assert_receive %Message{event: "user_entered"}
capture_log fn ->
WebsocketClient.send_event(sock, "room:lobby", "boom", %{})
assert_receive %Message{event: "phx_error", payload: %{}, topic: "room:lobby"}
end
end
test "channels are terminated if transport normally exits" do
{:ok, sock} = WebsocketClient.start_link(self(), "ws://127.0.0.1:#{@port}/ws/websocket?vsn=#{@vsn}", @serializer)
WebsocketClient.join(sock, "room:lobby2", %{})
assert_receive %Message{event: "phx_reply", ref: "1", payload: %{"response" => %{}, "status" => "ok"}}
assert_receive %Message{event: "joined"}
channel = Process.whereis(:"room:lobby2")
assert channel
Process.monitor(channel)
WebsocketClient.close(sock)
assert_receive {:DOWN, _, :process, ^channel, {:shutdown, :closed}}
end
test "refuses websocket events that haven't joined" do
{:ok, sock} = WebsocketClient.start_link(self(), "ws://127.0.0.1:#{@port}/ws/websocket?vsn=#{@vsn}", @serializer)
WebsocketClient.send_event(sock, "room:lobby", "new_msg", %{body: "hi!"})
refute_receive %Message{event: "new_msg"}
assert_receive %Message{event: "phx_reply", payload: %{"response" => %{"reason" => "unmatched topic"}}}
WebsocketClient.send_event(sock, "room:lobby1", "new_msg", %{body: "Should ignore"})
refute_receive %Message{event: "new_msg"}
end
test "refuses unallowed origins" do
capture_log fn ->
assert {:ok, _} =
WebsocketClient.start_link(self(), "ws://127.0.0.1:#{@port}/ws/websocket?vsn=#{@vsn}", @serializer,
[{"origin", "https://example.com"}])
assert {:error, {403, _}} =
WebsocketClient.start_link(self(), "ws://127.0.0.1:#{@port}/ws/websocket?vsn=#{@vsn}", @serializer,
[{"origin", "http://notallowed.com"}])
end
end
test "refuses connects that error with 403 response" do
assert WebsocketClient.start_link(self(), "ws://127.0.0.1:#{@port}/ws/websocket?vsn=#{@vsn}&reject=true", @serializer) ==
{:error, {403, "Forbidden"}}
end
test "shuts down when receiving disconnect broadcasts on socket's id" do
{:ok, sock} = WebsocketClient.start_link(self(), "ws://127.0.0.1:#{@port}/ws/websocket?vsn=#{@vsn}&user_id=1001", @serializer)
WebsocketClient.join(sock, "room:wsdisconnect1", %{})
assert_receive %Message{topic: "room:wsdisconnect1", event: "phx_reply",
ref: "1", payload: %{"response" => %{}, "status" => "ok"}}
WebsocketClient.join(sock, "room:wsdisconnect2", %{})
assert_receive %Message{topic: "room:wsdisconnect2", event: "phx_reply",
ref: "2", payload: %{"response" => %{}, "status" => "ok"}}
chan1 = Process.whereis(:"room:wsdisconnect1")
assert chan1
chan2 = Process.whereis(:"room:wsdisconnect2")
assert chan2
Process.monitor(sock)
Process.monitor(chan1)
Process.monitor(chan2)
Endpoint.broadcast("user_sockets:1001", "disconnect", %{})
assert_receive {:DOWN, _, :process, ^sock, :normal}
assert_receive {:DOWN, _, :process, ^chan1, :shutdown}
assert_receive {:DOWN, _, :process, ^chan2, :shutdown}
end
test "duplicate join event closes existing channel" do
{:ok, sock} = WebsocketClient.start_link(self(), "ws://127.0.0.1:#{@port}/ws/websocket?vsn=#{@vsn}&user_id=1001", @serializer)
WebsocketClient.join(sock, "room:joiner", %{})
assert_receive %Message{topic: "room:joiner", event: "phx_reply",
ref: "1", payload: %{"response" => %{}, "status" => "ok"}}
WebsocketClient.join(sock, "room:joiner", %{})
assert_receive %Message{topic: "room:joiner", event: "phx_reply",
ref: "2", payload: %{"response" => %{}, "status" => "ok"}}
assert_receive %Message{topic: "room:joiner", event: "phx_close",
ref: "1", payload: %{}}
end
test "returns 403 when versions to not match" do
log = capture_log fn ->
url = "ws://127.0.0.1:#{@port}/ws/websocket?vsn=123.1.1"
assert WebsocketClient.start_link(self(), url, @serializer) ==
{:error, {403, "Forbidden"}}
end
assert log =~ "The client's requested channel transport version \"123.1.1\" does not match server's version"
end
test "shuts down if client goes quiet" do
{:ok, socket} = WebsocketClient.start_link(self(), "ws://127.0.0.1:#{@port}/ws/websocket?vsn=#{@vsn}", @serializer)
Process.monitor(socket)
WebsocketClient.send_heartbeat(socket)
assert_receive %Message{event: "phx_reply",
payload: %{"response" => %{}, "status" => "ok"},
ref: "1", topic: "phoenix"}
assert_receive {:DOWN, _, :process, ^socket, :normal}, 400
end
end
end
end
| 39.746711
| 134
| 0.58272
|
79c2aa07c9a78867233c3661fa5caf7a96d02b09
| 215
|
ex
|
Elixir
|
lib/liveman_web/views/v1/user_view.ex
|
nimblehq/liveman-demo-api
|
e184349983f949c8434b8651f9223db597ef1025
|
[
"MIT"
] | null | null | null |
lib/liveman_web/views/v1/user_view.ex
|
nimblehq/liveman-demo-api
|
e184349983f949c8434b8651f9223db597ef1025
|
[
"MIT"
] | 19
|
2021-07-02T08:14:52.000Z
|
2021-07-30T09:33:12.000Z
|
lib/liveman_web/views/v1/user_view.ex
|
nimblehq/liveman
|
e184349983f949c8434b8651f9223db597ef1025
|
[
"MIT"
] | null | null | null |
defmodule LivemanWeb.V1.UserView do
use JSONAPI.View, type: "user"
def fields do
[:email, :avatar_url]
end
def avatar_url(_user, _conn) do
"https://source.boringavatars.com/bauhaus/120/"
end
end
| 17.916667
| 51
| 0.697674
|
79c2b51dc5146ca6bbd1d7fb54ba397b0dbdddab
| 4,691
|
exs
|
Elixir
|
test/ex338/fantasy_teams/deadlines_test.exs
|
axelclark/ex338
|
3fb3c260d93bda61f7636ee1a677770d2dc1b89a
|
[
"MIT"
] | 17
|
2016-12-22T06:39:26.000Z
|
2021-01-20T13:51:13.000Z
|
test/ex338/fantasy_teams/deadlines_test.exs
|
axelclark/ex338
|
3fb3c260d93bda61f7636ee1a677770d2dc1b89a
|
[
"MIT"
] | 608
|
2016-08-06T18:57:58.000Z
|
2022-03-01T02:48:17.000Z
|
test/ex338/fantasy_teams/deadlines_test.exs
|
axelclark/ex338
|
3fb3c260d93bda61f7636ee1a677770d2dc1b89a
|
[
"MIT"
] | 6
|
2017-11-21T22:35:45.000Z
|
2022-01-11T21:37:40.000Z
|
defmodule Ex338.FantasyTeams.DeadlinesTest do
use Ex338.DataCase, async: true
alias Ex338.{CalendarAssistant, Championships.Championship, FantasyTeams.Deadlines}
describe "add_for_league/1" do
test "add boolean whether season has ended" do
teams = [
%{
team: "A",
roster_positions: [
%{
pos: "A",
fantasy_player: %{
sports_league: %{
championships: [
%Championship{
championship_at: CalendarAssistant.days_from_now(9),
waiver_deadline_at: CalendarAssistant.days_from_now(9),
trade_deadline_at: CalendarAssistant.days_from_now(9)
}
]
}
}
},
%{
pos: "B",
fantasy_player: %{
sports_league: %{
championships: [
%Championship{
championship_at: CalendarAssistant.days_from_now(-9),
waiver_deadline_at: CalendarAssistant.days_from_now(-9),
trade_deadline_at: CalendarAssistant.days_from_now(-9)
}
]
}
}
}
]
},
%{
team: "B",
roster_positions: [
%{
pos: "A",
fantasy_player: %{
sports_league: %{
championships: [
%Championship{
championship_at: CalendarAssistant.days_from_now(9),
waiver_deadline_at: CalendarAssistant.days_from_now(9),
trade_deadline_at: CalendarAssistant.days_from_now(9)
}
]
}
}
},
%{
pos: "B",
fantasy_player: %{
sports_league: %{
championships: [
%Championship{
championship_at: CalendarAssistant.days_from_now(-9),
waiver_deadline_at: CalendarAssistant.days_from_now(-9),
trade_deadline_at: CalendarAssistant.days_from_now(-9)
}
]
}
}
}
]
}
]
[team_a, _team_b] = Deadlines.add_for_league(teams)
%{roster_positions: [ros_a, ros_b]} = team_a
a = get_champ(ros_a)
b = get_champ(ros_b)
assert a.season_ended? == false
assert a.waivers_closed? == false
assert a.trades_closed? == false
assert b.season_ended? == true
assert b.waivers_closed? == true
assert b.trades_closed? == true
end
end
describe "add_for_team/1" do
test "add boolean whether season has ended" do
team = %{
roster_positions: [
%{
pos: "A",
fantasy_player: %{
sports_league: %{
championships: [
%Championship{
championship_at: CalendarAssistant.days_from_now(9),
waiver_deadline_at: CalendarAssistant.days_from_now(9),
trade_deadline_at: CalendarAssistant.days_from_now(9)
}
]
}
}
},
%{
pos: "B",
fantasy_player: %{
sports_league: %{
championships: [
%Championship{
championship_at: CalendarAssistant.days_from_now(-9),
waiver_deadline_at: CalendarAssistant.days_from_now(-9),
trade_deadline_at: CalendarAssistant.days_from_now(-9)
}
]
}
}
},
%{
pos: "C",
fantasy_player: %{
sports_league: %{
championships: []
}
}
}
]
}
%{roster_positions: [a, b, c]} = Deadlines.add_for_team(team)
a = get_champ(a)
b = get_champ(b)
c = get_champ(c)
assert a.season_ended? == false
assert a.waivers_closed? == false
assert a.trades_closed? == false
assert b.season_ended? == true
assert b.waivers_closed? == true
assert b.trades_closed? == true
assert c == nil
end
end
defp get_champ(%{fantasy_player: %{sports_league: %{championships: [champ]}}}), do: champ
defp get_champ(%{fantasy_player: %{sports_league: %{championships: []}}}), do: nil
end
| 30.461039
| 91
| 0.466425
|
79c2ba533afe8419ad97de37317d3e7c3e00ed36
| 1,997
|
ex
|
Elixir
|
lib/public_suffix_list.ex
|
HGData/public_suffix_list
|
b184746953f6d63a4b4c270d3f6c3941785eef9d
|
[
"Apache-2.0"
] | 2
|
2020-01-13T23:24:45.000Z
|
2020-12-17T22:47:22.000Z
|
lib/public_suffix_list.ex
|
cogini/public_suffix_list
|
b184746953f6d63a4b4c270d3f6c3941785eef9d
|
[
"Apache-2.0"
] | null | null | null |
lib/public_suffix_list.ex
|
cogini/public_suffix_list
|
b184746953f6d63a4b4c270d3f6c3941785eef9d
|
[
"Apache-2.0"
] | null | null | null |
defmodule PublicSuffixList do
@moduledoc """
Parse DNS domain names using public suffix list from https://publicsuffix.org
"""
@app :public_suffix_list
@input_file Path.join(:code.priv_dir(@app), "public_suffix_list.dat")
@external_resource @input_file
@doc "Parse domain into subdomains, name and suffix"
@spec parse(binary) :: {:ok, {list(binary), binary, binary}} | {:error, :unknown_suffix}
def parse(domain) when is_binary(domain) do
domain
|> String.downcase()
|> String.split(".")
|> Enum.reverse()
|> match_suffix()
end
@doc "Strip subdomain, leaving just name and suffix"
@spec normalize(binary) :: {:ok, binary} | {:error, :unknown_suffix}
def normalize(domain) when is_binary(domain) do
case parse(domain) do
{:ok, {_subdomains, name, suffix}} ->
{:ok, name <> "." <> suffix}
{:error, reason} ->
{:error, reason}
end
end
@doc "Strip subdomain and suffix, leaving just the name"
@spec name(binary) :: {:ok, binary} | {:error, :unknown_suffix}
def name(domain) when is_binary(domain) do
case parse(domain) do
{:ok, {_subdomains, name, _suffix}} ->
{:ok, name}
{:error, reason} ->
{:error, reason}
end
end
# Internal functions
# Build function clauses to match names from public suffix list data
@spec match_suffix(list(binary)) :: {:ok, {list(binary), binary, binary}} | {:error, :unknown_suffix}
@input_file
|> File.read!
|> String.split("\n")
|> Enum.filter(&(not Regex.match?(~r/^\/\/|^\s*$/, &1)))
|> Enum.map(&String.trim/1)
|> Enum.reject(&(&1 == ""))
|> Enum.map(&({&1 |> String.split(".") |> Enum.reverse, &1}))
|> Enum.sort(&>=/2)
|> Enum.map(fn {comps, suffix} ->
args = comps ++ quote(do: [name | rest])
result = quote(do: {:ok, {Enum.reverse(rest), name, unquote(suffix)}})
defp match_suffix(unquote(args)), do: unquote(result)
end)
defp match_suffix(_) do
{:error, :unknown_suffix}
end
end
| 30.257576
| 103
| 0.620931
|
79c2bc4940f86363d6b82a4cba7038c447f74bdf
| 229
|
exs
|
Elixir
|
maps/nested.exs
|
nickveys/elixir-play
|
1cb3249e3dfc4df6eb7a84b7a57042931a2e8e86
|
[
"Unlicense"
] | null | null | null |
maps/nested.exs
|
nickveys/elixir-play
|
1cb3249e3dfc4df6eb7a84b7a57042931a2e8e86
|
[
"Unlicense"
] | null | null | null |
maps/nested.exs
|
nickveys/elixir-play
|
1cb3249e3dfc4df6eb7a84b7a57042931a2e8e86
|
[
"Unlicense"
] | null | null | null |
defmodule Customer do
defstruct name: "", company: ""
end
defmodule BugReport do
defstruct owner: %Customer{}, details: ""
def company_name(%BugReport{ owner: %Customer{ company: company } }) do
company
end
end
| 14.3125
| 73
| 0.68559
|
79c2f9231cfa117b22dcac88166757de21b25e7f
| 1,549
|
exs
|
Elixir
|
apps/translations/mix.exs
|
pzingg/log_watcher
|
9260b1ec79a3a211a18b8214b33be918ebf77141
|
[
"MIT"
] | 1
|
2021-11-03T03:26:44.000Z
|
2021-11-03T03:26:44.000Z
|
apps/translations/mix.exs
|
pzingg/log_watcher
|
9260b1ec79a3a211a18b8214b33be918ebf77141
|
[
"MIT"
] | null | null | null |
apps/translations/mix.exs
|
pzingg/log_watcher
|
9260b1ec79a3a211a18b8214b33be918ebf77141
|
[
"MIT"
] | null | null | null |
defmodule Translations.MixProject do
use Mix.Project
def project do
[
app: :translations,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.11",
elixirc_paths: elixirc_paths(Mix.env()),
dialyzer: [
plt_add_deps: :apps_direct,
plt_add_apps: [:oban],
ignore_warnings: "dialyzer.ignore-warnings"
],
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Translations.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:dotenvy, "~> 0.3"},
{:ecto, "~> 3.6"},
{:gettext, "~> 0.18"},
{:typed_struct, "~> 0.2"},
# {:typed_struct_ecto_changeset, "~> 0.1"}
{:typed_struct_ecto_changeset,
git: "git://github.com/pzingg/typed_struct_ecto_changeset.git"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[]
end
end
| 25.393443
| 70
| 0.599096
|
79c2fda7e4f92fec78a2446bc2dcf5521bc89950
| 16,593
|
ex
|
Elixir
|
lib/codes/codes_v37.ex
|
badubizzle/icd_code
|
4c625733f92b7b1d616e272abc3009bb8b916c0c
|
[
"Apache-2.0"
] | null | null | null |
lib/codes/codes_v37.ex
|
badubizzle/icd_code
|
4c625733f92b7b1d616e272abc3009bb8b916c0c
|
[
"Apache-2.0"
] | null | null | null |
lib/codes/codes_v37.ex
|
badubizzle/icd_code
|
4c625733f92b7b1d616e272abc3009bb8b916c0c
|
[
"Apache-2.0"
] | null | null | null |
defmodule IcdCode.ICDCode.Codes_V37 do
alias IcdCode.ICDCode
def _V370XXA do
%ICDCode{full_code: "V370XXA",
category_code: "V37",
short_code: "0XXA",
full_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, initial encounter",
short_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, initial encounter",
category_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, initial encounter"
}
end
def _V370XXD do
%ICDCode{full_code: "V370XXD",
category_code: "V37",
short_code: "0XXD",
full_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, subsequent encounter",
short_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, subsequent encounter",
category_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, subsequent encounter"
}
end
def _V370XXS do
%ICDCode{full_code: "V370XXS",
category_code: "V37",
short_code: "0XXS",
full_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, sequela",
short_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, sequela",
category_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, sequela"
}
end
def _V371XXA do
%ICDCode{full_code: "V371XXA",
category_code: "V37",
short_code: "1XXA",
full_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, initial encounter",
short_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, initial encounter",
category_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, initial encounter"
}
end
def _V371XXD do
%ICDCode{full_code: "V371XXD",
category_code: "V37",
short_code: "1XXD",
full_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, subsequent encounter",
short_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, subsequent encounter",
category_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, subsequent encounter"
}
end
def _V371XXS do
%ICDCode{full_code: "V371XXS",
category_code: "V37",
short_code: "1XXS",
full_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, sequela",
short_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, sequela",
category_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, sequela"
}
end
def _V372XXA do
%ICDCode{full_code: "V372XXA",
category_code: "V37",
short_code: "2XXA",
full_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, initial encounter",
short_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, initial encounter",
category_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, initial encounter"
}
end
def _V372XXD do
%ICDCode{full_code: "V372XXD",
category_code: "V37",
short_code: "2XXD",
full_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, subsequent encounter",
short_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, subsequent encounter",
category_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, subsequent encounter"
}
end
def _V372XXS do
%ICDCode{full_code: "V372XXS",
category_code: "V37",
short_code: "2XXS",
full_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, sequela",
short_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, sequela",
category_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, sequela"
}
end
def _V373XXA do
%ICDCode{full_code: "V373XXA",
category_code: "V37",
short_code: "3XXA",
full_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, initial encounter",
short_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, initial encounter",
category_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, initial encounter"
}
end
def _V373XXD do
%ICDCode{full_code: "V373XXD",
category_code: "V37",
short_code: "3XXD",
full_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, subsequent encounter",
short_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, subsequent encounter",
category_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, subsequent encounter"
}
end
def _V373XXS do
%ICDCode{full_code: "V373XXS",
category_code: "V37",
short_code: "3XXS",
full_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, sequela",
short_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, sequela",
category_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in nontraffic accident, sequela"
}
end
def _V374XXA do
%ICDCode{full_code: "V374XXA",
category_code: "V37",
short_code: "4XXA",
full_name: "Person boarding or alighting a three-wheeled motor vehicle injured in collision with fixed or stationary object, initial encounter",
short_name: "Person boarding or alighting a three-wheeled motor vehicle injured in collision with fixed or stationary object, initial encounter",
category_name: "Person boarding or alighting a three-wheeled motor vehicle injured in collision with fixed or stationary object, initial encounter"
}
end
def _V374XXD do
%ICDCode{full_code: "V374XXD",
category_code: "V37",
short_code: "4XXD",
full_name: "Person boarding or alighting a three-wheeled motor vehicle injured in collision with fixed or stationary object, subsequent encounter",
short_name: "Person boarding or alighting a three-wheeled motor vehicle injured in collision with fixed or stationary object, subsequent encounter",
category_name: "Person boarding or alighting a three-wheeled motor vehicle injured in collision with fixed or stationary object, subsequent encounter"
}
end
def _V374XXS do
%ICDCode{full_code: "V374XXS",
category_code: "V37",
short_code: "4XXS",
full_name: "Person boarding or alighting a three-wheeled motor vehicle injured in collision with fixed or stationary object, sequela",
short_name: "Person boarding or alighting a three-wheeled motor vehicle injured in collision with fixed or stationary object, sequela",
category_name: "Person boarding or alighting a three-wheeled motor vehicle injured in collision with fixed or stationary object, sequela"
}
end
def _V375XXA do
%ICDCode{full_code: "V375XXA",
category_code: "V37",
short_code: "5XXA",
full_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, initial encounter",
short_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, initial encounter",
category_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, initial encounter"
}
end
def _V375XXD do
%ICDCode{full_code: "V375XXD",
category_code: "V37",
short_code: "5XXD",
full_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, subsequent encounter",
short_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, subsequent encounter",
category_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, subsequent encounter"
}
end
def _V375XXS do
%ICDCode{full_code: "V375XXS",
category_code: "V37",
short_code: "5XXS",
full_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, sequela",
short_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, sequela",
category_name: "Driver of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, sequela"
}
end
def _V376XXA do
%ICDCode{full_code: "V376XXA",
category_code: "V37",
short_code: "6XXA",
full_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, initial encounter",
short_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, initial encounter",
category_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, initial encounter"
}
end
def _V376XXD do
%ICDCode{full_code: "V376XXD",
category_code: "V37",
short_code: "6XXD",
full_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, subsequent encounter",
short_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, subsequent encounter",
category_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, subsequent encounter"
}
end
def _V376XXS do
%ICDCode{full_code: "V376XXS",
category_code: "V37",
short_code: "6XXS",
full_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, sequela",
short_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, sequela",
category_name: "Passenger in three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, sequela"
}
end
def _V377XXA do
%ICDCode{full_code: "V377XXA",
category_code: "V37",
short_code: "7XXA",
full_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, initial encounter",
short_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, initial encounter",
category_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, initial encounter"
}
end
def _V377XXD do
%ICDCode{full_code: "V377XXD",
category_code: "V37",
short_code: "7XXD",
full_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, subsequent encounter",
short_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, subsequent encounter",
category_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, subsequent encounter"
}
end
def _V377XXS do
%ICDCode{full_code: "V377XXS",
category_code: "V37",
short_code: "7XXS",
full_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, sequela",
short_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, sequela",
category_name: "Person on outside of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, sequela"
}
end
def _V379XXA do
%ICDCode{full_code: "V379XXA",
category_code: "V37",
short_code: "9XXA",
full_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, initial encounter",
short_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, initial encounter",
category_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, initial encounter"
}
end
def _V379XXD do
%ICDCode{full_code: "V379XXD",
category_code: "V37",
short_code: "9XXD",
full_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, subsequent encounter",
short_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, subsequent encounter",
category_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, subsequent encounter"
}
end
def _V379XXS do
%ICDCode{full_code: "V379XXS",
category_code: "V37",
short_code: "9XXS",
full_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, sequela",
short_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, sequela",
category_name: "Unspecified occupant of three-wheeled motor vehicle injured in collision with fixed or stationary object in traffic accident, sequela"
}
end
end
| 66.372
| 176
| 0.729103
|
79c30383e22859aef75850cb2d44855032e29e99
| 504
|
ex
|
Elixir
|
lib/credo/execution/task/parse_options.ex
|
codeclimate-community/credo
|
b960a25d604b4499a2577321f9d61b39dc4b0437
|
[
"MIT"
] | 1
|
2021-12-01T13:37:43.000Z
|
2021-12-01T13:37:43.000Z
|
lib/credo/execution/task/parse_options.ex
|
codeclimate-community/credo
|
b960a25d604b4499a2577321f9d61b39dc4b0437
|
[
"MIT"
] | 3
|
2021-06-20T14:51:14.000Z
|
2021-06-25T00:56:11.000Z
|
lib/credo/execution/task/parse_options.ex
|
codeclimate-community/credo
|
b960a25d604b4499a2577321f9d61b39dc4b0437
|
[
"MIT"
] | 1
|
2020-09-25T11:48:49.000Z
|
2020-09-25T11:48:49.000Z
|
defmodule Credo.Execution.Task.ParseOptions do
@moduledoc false
use Credo.Execution.Task
alias Credo.CLI.Options
alias Credo.CLI.Output.UI
alias Credo.Execution
def call(exec, _opts) do
command_names = Execution.get_valid_command_names(exec)
cli_options =
Options.parse(
exec.argv,
File.cwd!(),
command_names,
[UI.edge()],
exec.cli_switches,
exec.cli_aliases
)
%Execution{exec | cli_options: cli_options}
end
end
| 19.384615
| 59
| 0.65873
|
79c30562cb49b3c0a5f99a94a0f1ee4a9f4cb076
| 355
|
exs
|
Elixir
|
priv/repo/seeds.exs
|
AltCampus/altstatus_backend
|
70bf7a9d337e570f54002c3a7df264e88372adfa
|
[
"MIT"
] | 1
|
2020-01-20T18:17:59.000Z
|
2020-01-20T18:17:59.000Z
|
priv/repo/seeds.exs
|
AltCampus/altstatus_backend
|
70bf7a9d337e570f54002c3a7df264e88372adfa
|
[
"MIT"
] | null | null | null |
priv/repo/seeds.exs
|
AltCampus/altstatus_backend
|
70bf7a9d337e570f54002c3a7df264e88372adfa
|
[
"MIT"
] | 2
|
2018-09-09T08:05:24.000Z
|
2018-09-09T08:35:18.000Z
|
# Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Altstatus.Repo.insert!(%Altstatus.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.583333
| 61
| 0.709859
|
79c31e17b6b7db89bfbbd0d91344015e20b7aec1
| 20,705
|
ex
|
Elixir
|
deps/ecto/lib/ecto/date_time.ex
|
rchervin/phoenixportfolio
|
a5a6a60168d7261647a10a8dbd395b440db8a4f9
|
[
"MIT"
] | null | null | null |
deps/ecto/lib/ecto/date_time.ex
|
rchervin/phoenixportfolio
|
a5a6a60168d7261647a10a8dbd395b440db8a4f9
|
[
"MIT"
] | null | null | null |
deps/ecto/lib/ecto/date_time.ex
|
rchervin/phoenixportfolio
|
a5a6a60168d7261647a10a8dbd395b440db8a4f9
|
[
"MIT"
] | null | null | null |
# TODO: Remove Ecto.Date|Time types on Ecto v2.2
import Kernel, except: [to_string: 1]
defmodule Ecto.DateTime.Utils do
@moduledoc false
@doc "Pads with zero"
def zero_pad(val, count) do
num = Integer.to_string(val)
pad_length = max(count - byte_size(num), 0)
:binary.copy("0", pad_length) <> num
end
@doc "Converts to integer if possible"
def to_i(nil), do: nil
def to_i({int, _}) when is_integer(int), do: int
def to_i(int) when is_integer(int), do: int
def to_i(bin) when is_binary(bin) do
case Integer.parse(bin) do
{int, ""} -> int
_ -> nil
end
end
@doc "A guard to check for dates"
defmacro is_date(year, month, day) do
quote do
is_integer(unquote(year)) and unquote(month) in 1..12 and unquote(day) in 1..31
end
end
@doc "A guard to check for times"
defmacro is_time(hour, min, sec, usec \\ 0) do
quote do
unquote(hour) in 0..23 and
unquote(min) in 0..59 and
unquote(sec) in 0..59 and
unquote(usec) in 0..999_999
end
end
@doc """
Checks if the trailing part of a date/time matches ISO specs.
"""
defmacro is_iso_8601(x) do
quote do: unquote(x) in ["", "Z"]
end
@doc """
Gets microseconds from rest and validates it.
Returns nil if an invalid format is given.
"""
def usec("." <> rest) do
case parse(rest, "") do
{int, rest} when byte_size(int) > 6 and is_iso_8601(rest) ->
String.to_integer(binary_part(int, 0, 6))
{int, rest} when byte_size(int) in 1..6 and is_iso_8601(rest) ->
pad = String.duplicate("0", 6 - byte_size(int))
String.to_integer(int <> pad)
_ ->
nil
end
end
def usec(rest) when is_iso_8601(rest), do: 0
def usec(_), do: nil
@doc """
Compare two datetimes.
Receives two datetimes and compares the `t1`
against `t2` and returns `:lt`, `:eq` or `:gt`.
"""
def compare(%{__struct__: module} = t1, %{__struct__: module} = t2) do
{:ok, t1} = module.dump(t1)
{:ok, t2} = module.dump(t2)
cond do
t1 == t2 -> :eq
t1 > t2 -> :gt
true -> :lt
end
end
defp parse(<<h, t::binary>>, acc) when h in ?0..?9, do: parse(t, <<acc::binary, h>>)
defp parse(rest, acc), do: {acc, rest}
end
defmodule Ecto.Date do
import Ecto.DateTime.Utils
@doc """
Compare two dates.
Receives two dates and compares the `t1`
against `t2` and returns `:lt`, `:eq` or `:gt`.
"""
defdelegate compare(t1, t2), to: Ecto.DateTime.Utils
@moduledoc """
An Ecto type for dates.
"""
@behaviour Ecto.Type
defstruct [:year, :month, :day]
@doc """
The Ecto primitive type.
"""
def type, do: :date
@doc """
Casts the given value to date.
It supports:
* a binary in the "YYYY-MM-DD" format
* a binary in the "YYYY-MM-DD HH:MM:SS" format
(may be separated by T and/or followed by "Z", as in `2014-04-17T14:00:00Z`)
* a binary in the "YYYY-MM-DD HH:MM:SS.USEC" format
(may be separated by T and/or followed by "Z", as in `2014-04-17T14:00:00.030Z`)
* a map with `"year"`, `"month"` and `"day"` keys
with integer or binaries as values
* a map with `:year`, `:month` and `:day` keys
with integer or binaries as values
* a tuple with `{year, month, day}` as integers or binaries
* an `Ecto.Date` struct itself
"""
def cast(d), do: d |> do_cast() |> validate_cast()
@doc """
Same as `cast/1` but raises `Ecto.CastError` on invalid dates.
"""
def cast!(value) do
case cast(value) do
{:ok, date} -> date
:error -> raise Ecto.CastError, "cannot cast #{inspect value} to date"
end
end
defp do_cast(<<year::4-bytes, ?-, month::2-bytes, ?-, day::2-bytes>>),
do: from_parts(to_i(year), to_i(month), to_i(day))
defp do_cast(<<year::4-bytes, ?-, month::2-bytes, ?-, day::2-bytes, sep,
_hour::2-bytes, ?:, _min::2-bytes, ?:, _sec::2-bytes, _rest::binary>>) when sep in [?\s, ?T],
do: from_parts(to_i(year), to_i(month), to_i(day))
defp do_cast(%Ecto.Date{} = d),
do: {:ok, d}
defp do_cast(%{"year" => empty, "month" => empty, "day" => empty}) when empty in ["", nil],
do: {:ok, nil}
defp do_cast(%{year: empty, month: empty, day: empty}) when empty in ["", nil],
do: {:ok, nil}
defp do_cast(%{"year" => year, "month" => month, "day" => day}),
do: from_parts(to_i(year), to_i(month), to_i(day))
defp do_cast(%{year: year, month: month, day: day}),
do: from_parts(to_i(year), to_i(month), to_i(day))
defp do_cast({year, month, day}),
do: from_parts(to_i(year), to_i(month), to_i(day))
defp do_cast(_),
do: :error
defp validate_cast(:error), do: :error
defp validate_cast({:ok, nil}), do: {:ok, nil}
defp validate_cast({:ok, %{year: y, month: m, day: d} = date}) do
if :calendar.valid_date(y, m, d), do: {:ok, date}, else: :error
end
defp from_parts(year, month, day) when is_date(year, month, day) do
{:ok, %Ecto.Date{year: year, month: month, day: day}}
end
defp from_parts(_, _, _), do: :error
@doc """
Converts an `Ecto.Date` into a date triplet.
"""
def dump(%Ecto.Date{year: year, month: month, day: day}) do
{:ok, {year, month, day}}
end
def dump(_), do: :error
@doc """
Converts a date triplet into an `Ecto.Date`.
"""
def load({year, month, day}) do
{:ok, %Ecto.Date{year: year, month: month, day: day}}
end
def load(_), do: :error
@doc """
Converts `Ecto.Date` to a readable string representation.
"""
def to_string(%Ecto.Date{year: year, month: month, day: day}) do
zero_pad(year, 4) <> "-" <> zero_pad(month, 2) <> "-" <> zero_pad(day, 2)
end
@doc """
Converts `Ecto.Date` to ISO8601 representation.
"""
def to_iso8601(date) do
to_string(date)
end
@doc """
Returns an `Ecto.Date` in UTC.
"""
def utc do
{{year, month, day}, _time} = :erlang.universaltime
%Ecto.Date{year: year, month: month, day: day}
end
@doc """
Returns an Erlang date tuple from an `Ecto.Date`.
"""
def to_erl(%Ecto.Date{year: year, month: month, day: day}) do
{year, month, day}
end
@doc """
Returns an `Ecto.Date` from an Erlang date tuple.
"""
def from_erl({year, month, day}) do
%Ecto.Date{year: year, month: month, day: day}
end
end
defmodule Ecto.Time do
import Ecto.DateTime.Utils
@doc """
Compare two times.
Receives two times and compares the `t1`
against `t2` and returns `:lt`, `:eq` or `:gt`.
"""
defdelegate compare(t1, t2), to: Ecto.DateTime.Utils
@moduledoc """
An Ecto type for time.
"""
@behaviour Ecto.Type
defstruct [:hour, :min, :sec, usec: 0]
@doc """
The Ecto primitive type.
"""
def type, do: :time
@doc """
Casts the given value to time.
It supports:
* a binary in the "HH:MM:SS" format
(may be followed by "Z", as in `12:00:00Z`)
* a binary in the "HH:MM:SS.USEC" format
(may be followed by "Z", as in `12:00:00.005Z`)
* a map with `"hour"`, `"minute"` keys with `"second"` and `"microsecond"`
as optional keys and values are integers or binaries
* a map with `:hour`, `:minute` keys with `:second` and `:microsecond`
as optional keys and values are integers or binaries
* a tuple with `{hour, min, sec}` as integers or binaries
* a tuple with `{hour, min, sec, usec}` as integers or binaries
* an `Ecto.Time` struct itself
"""
def cast(<<hour::2-bytes, ?:, min::2-bytes, ?:, sec::2-bytes, rest::binary>>) do
if usec = usec(rest) do
from_parts(to_i(hour), to_i(min), to_i(sec), usec)
else
:error
end
end
def cast(%Ecto.Time{} = t),
do: {:ok, t}
def cast(%{"hour" => hour, "min" => min} = map),
do: from_parts(to_i(hour), to_i(min), to_i(Map.get(map, "sec", 0)), to_i(Map.get(map, "usec", 0)))
def cast(%{hour: hour, min: min} = map),
do: from_parts(to_i(hour), to_i(min), to_i(Map.get(map, :sec, 0)), to_i(Map.get(map, :usec, 0)))
def cast(%{"hour" => empty, "minute" => empty}) when empty in ["", nil],
do: {:ok, nil}
def cast(%{hour: empty, minute: empty}) when empty in ["", nil],
do: {:ok, nil}
def cast(%{"hour" => hour, "minute" => minute} = map),
do: from_parts(to_i(hour), to_i(minute), to_i(Map.get(map, "second", 0)), to_i(Map.get(map, "microsecond", 0)))
def cast(%{hour: hour, minute: minute} = map),
do: from_parts(to_i(hour), to_i(minute), to_i(Map.get(map, :second, 0)), to_i(Map.get(map, :microsecond, 0)))
def cast({hour, min, sec}),
do: from_parts(to_i(hour), to_i(min), to_i(sec), 0)
def cast({hour, min, sec, usec}),
do: from_parts(to_i(hour), to_i(min), to_i(sec), to_i(usec))
def cast(_),
do: :error
@doc """
Same as `cast/1` but raises `Ecto.CastError` on invalid times.
"""
def cast!(value) do
case cast(value) do
{:ok, time} -> time
:error -> raise Ecto.CastError, "cannot cast #{inspect value} to time"
end
end
defp from_parts(hour, min, sec, usec) when is_time(hour, min, sec, usec),
do: {:ok, %Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}}
defp from_parts(_, _, _, _),
do: :error
@doc """
Converts an `Ecto.Time` into a time tuple (in the form `{hour, min, sec,
usec}`).
"""
def dump(%Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}) do
{:ok, {hour, min, sec, usec}}
end
def dump(_), do: :error
@doc """
Converts a time tuple like the one returned by `dump/1` into an `Ecto.Time`.
"""
def load({hour, min, sec, usec}) do
{:ok, %Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}}
end
def load({_, _, _} = time) do
{:ok, from_erl(time)}
end
def load(_), do: :error
@doc """
Converts `Ecto.Time` to a string representation.
"""
def to_string(%Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}) do
str = zero_pad(hour, 2) <> ":" <> zero_pad(min, 2) <> ":" <> zero_pad(sec, 2)
if is_nil(usec) or usec == 0 do
str
else
str <> "." <> zero_pad(usec, 6)
end
end
@doc """
Converts `Ecto.Time` to its ISO 8601 representation.
"""
def to_iso8601(time) do
to_string(time)
end
@doc """
Returns an `Ecto.Time` in UTC.
`precision` can be `:sec` or `:usec.`
"""
def utc(precision \\ :sec)
def utc(:sec) do
{_, {hour, min, sec}} = :erlang.universaltime
%Ecto.Time{hour: hour, min: min, sec: sec}
end
def utc(:usec) do
now = {_, _, usec} = :os.timestamp
{_date, {hour, min, sec}} = :calendar.now_to_universal_time(now)
%Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}
end
@doc """
Returns an Erlang time tuple from an `Ecto.Time`.
"""
def to_erl(%Ecto.Time{hour: hour, min: min, sec: sec}) do
{hour, min, sec}
end
@doc """
Returns an `Ecto.Time` from an Erlang time tuple.
"""
def from_erl({hour, min, sec}) do
%Ecto.Time{hour: hour, min: min, sec: sec}
end
end
defmodule Ecto.DateTime do
import Ecto.DateTime.Utils
@unix_epoch :calendar.datetime_to_gregorian_seconds {{1970, 1, 1}, {0, 0, 0}}
@doc """
Compare two datetimes.
Receives two datetimes and compares the `t1`
against `t2` and returns `:lt`, `:eq` or `:gt`.
"""
defdelegate compare(t1, t2), to: Ecto.DateTime.Utils
@moduledoc """
An Ecto type that includes a date and a time.
"""
@behaviour Ecto.Type
defstruct [:year, :month, :day, :hour, :min, :sec, usec: 0]
@doc """
The Ecto primitive type.
"""
def type, do: :naive_datetime
@doc """
Casts the given value to datetime.
It supports:
* a binary in the "YYYY-MM-DD HH:MM:SS" format
(may be separated by T and/or followed by "Z", as in `2014-04-17T14:00:00Z`)
* a binary in the "YYYY-MM-DD HH:MM:SS.USEC" format
(may be separated by T and/or followed by "Z", as in `2014-04-17T14:00:00.030Z`)
* a map with `"year"`, `"month"`,`"day"`, `"hour"`, `"minute"` keys
with `"second"` and `"microsecond"` as optional keys and values are integers or binaries
* a map with `:year`, `:month`,`:day`, `:hour`, `:minute` keys
with `:second` and `:microsecond` as optional keys and values are integers or binaries
* a tuple with `{{year, month, day}, {hour, min, sec}}` as integers or binaries
* a tuple with `{{year, month, day}, {hour, min, sec, usec}}` as integers or binaries
* an `Ecto.DateTime` struct itself
"""
def cast(dt), do: dt |> do_cast() |> validate_cast()
@doc """
Same as `cast/1` but raises `Ecto.CastError` on invalid datetimes.
"""
def cast!(value) do
case cast(value) do
{:ok, datetime} -> datetime
:error -> raise Ecto.CastError, "cannot cast #{inspect value} to datetime"
end
end
defp do_cast(<<year::4-bytes, ?-, month::2-bytes, ?-, day::2-bytes, sep,
hour::2-bytes, ?:, min::2-bytes, ?:, sec::2-bytes, rest::binary>>) when sep in [?\s, ?T] do
if usec = usec(rest) do
from_parts(to_i(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(sec), usec)
else
:error
end
end
defp do_cast(%Ecto.DateTime{} = dt) do
{:ok, dt}
end
defp do_cast(%{"year" => year, "month" => month, "day" => day, "hour" => hour, "min" => min} = map) do
from_parts(to_i(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(Map.get(map, "sec", 0)),
to_i(Map.get(map, "usec", 0)))
end
defp do_cast(%{year: year, month: month, day: day, hour: hour, min: min} = map) do
from_parts(to_i(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(Map.get(map, :sec, 0)),
to_i(Map.get(map, :usec, 0)))
end
defp do_cast(%{"year" => empty, "month" => empty, "day" => empty,
"hour" => empty, "minute" => empty}) when empty in ["", nil] do
{:ok, nil}
end
defp do_cast(%{year: empty, month: empty, day: empty,
hour: empty, minute: empty}) when empty in ["", nil] do
{:ok, nil}
end
defp do_cast(%{"year" => year, "month" => month, "day" => day, "hour" => hour, "minute" => min} = map) do
from_parts(to_i(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(Map.get(map, "second", 0)),
to_i(Map.get(map, "microsecond", 0)))
end
defp do_cast(%{year: year, month: month, day: day, hour: hour, minute: min} = map) do
from_parts(to_i(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(Map.get(map, :second, 0)),
to_i(Map.get(map, :microsecond, 0)))
end
defp do_cast({{year, month, day}, {hour, min, sec}}) do
from_parts(to_i(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(sec), 0)
end
defp do_cast({{year, month, day}, {hour, min, sec, usec}}) do
from_parts(to_i(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(sec), to_i(usec))
end
defp do_cast(_) do
:error
end
defp validate_cast(:error), do: :error
defp validate_cast({:ok, nil}), do: {:ok, nil}
defp validate_cast({:ok, %{year: y, month: m, day: d} = datetime}) do
if :calendar.valid_date(y, m, d), do: {:ok, datetime}, else: :error
end
defp from_parts(year, month, day, hour, min, sec, usec)
when is_date(year, month, day) and is_time(hour, min, sec, usec) do
{:ok, %Ecto.DateTime{year: year, month: month, day: day, hour: hour, min: min, sec: sec, usec: usec}}
end
defp from_parts(_, _, _, _, _, _, _), do: :error
@doc """
Converts an `Ecto.DateTime` into a `{date, time}` tuple.
"""
def dump(%Ecto.DateTime{year: year, month: month, day: day, hour: hour, min: min, sec: sec, usec: usec}) do
{:ok, {{year, month, day}, {hour, min, sec, usec}}}
end
def dump(_), do: :error
@doc """
Converts a `{date, time}` tuple into an `Ecto.DateTime`.
"""
def load({{_, _, _}, {_, _, _, _}} = datetime) do
{:ok, erl_load(datetime)}
end
def load({{_, _, _}, {_, _, _}} = datetime) do
{:ok, from_erl(datetime)}
end
def load(_), do: :error
@doc """
Converts `Ecto.DateTime` into an `Ecto.Date`.
"""
def to_date(%Ecto.DateTime{year: year, month: month, day: day}) do
%Ecto.Date{year: year, month: month, day: day}
end
@doc """
Converts `Ecto.DateTime` into an `Ecto.Time`.
"""
def to_time(%Ecto.DateTime{hour: hour, min: min, sec: sec, usec: usec}) do
%Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}
end
@doc """
Converts the given `Ecto.Date` into `Ecto.DateTime` with the time being
00:00:00.
"""
def from_date(%Ecto.Date{year: year, month: month, day: day}) do
%Ecto.DateTime{year: year, month: month, day: day,
hour: 0, min: 0, sec: 0, usec: 0}
end
@doc """
Converts the given `Ecto.Date` and `Ecto.Time` into `Ecto.DateTime`.
"""
def from_date_and_time(%Ecto.Date{year: year, month: month, day: day},
%Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}) do
%Ecto.DateTime{year: year, month: month, day: day,
hour: hour, min: min, sec: sec, usec: usec}
end
@doc """
Converts `Ecto.DateTime` to its string representation.
"""
def to_string(%Ecto.DateTime{year: year, month: month, day: day, hour: hour, min: min, sec: sec, usec: usec}) do
str = zero_pad(year, 4) <> "-" <> zero_pad(month, 2) <> "-" <> zero_pad(day, 2) <> " " <>
zero_pad(hour, 2) <> ":" <> zero_pad(min, 2) <> ":" <> zero_pad(sec, 2)
if is_nil(usec) or usec == 0 do
str
else
str <> "." <> zero_pad(usec, 6)
end
end
@doc """
Converts `Ecto.DateTime` to its ISO 8601 representation
without timezone specification.
"""
def to_iso8601(%Ecto.DateTime{year: year, month: month, day: day,
hour: hour, min: min, sec: sec, usec: usec}) do
str = zero_pad(year, 4) <> "-" <> zero_pad(month, 2) <> "-" <> zero_pad(day, 2) <> "T" <>
zero_pad(hour, 2) <> ":" <> zero_pad(min, 2) <> ":" <> zero_pad(sec, 2)
if is_nil(usec) or usec == 0 do
str
else
str <> "." <> zero_pad(usec, 6)
end
end
@doc """
Returns an `Ecto.DateTime` in UTC.
`precision` can be `:sec` or `:usec`.
"""
def utc(precision \\ :sec) do
autogenerate(precision)
end
@doc """
Returns an Erlang datetime tuple from an `Ecto.DateTime`.
"""
def to_erl(%Ecto.DateTime{year: year, month: month, day: day, hour: hour, min: min, sec: sec}) do
{{year, month, day}, {hour, min, sec}}
end
@doc """
Returns an `Ecto.DateTime` from an Erlang datetime tuple.
"""
def from_erl({{year, month, day}, {hour, min, sec}}) do
%Ecto.DateTime{year: year, month: month, day: day,
hour: hour, min: min, sec: sec}
end
def from_unix!(integer, unit) do
total = System.convert_time_unit(integer, unit, :microseconds)
microsecond = rem(total, 1_000_000)
{{year, month, day}, {hour, minute, second}} =
:calendar.gregorian_seconds_to_datetime(@unix_epoch + div(total, 1_000_000))
%Ecto.DateTime{year: year, month: month, day: day,
hour: hour, min: minute, sec: second, usec: microsecond}
end
# Callback invoked by autogenerate fields.
@doc false
def autogenerate(precision \\ :sec)
def autogenerate(:sec) do
{date, {h, m, s}} = :erlang.universaltime
erl_load({date, {h, m, s, 0}})
end
def autogenerate(:usec) do
timestamp = {_, _, usec} = :os.timestamp
{date, {h, m, s}} = :calendar.now_to_datetime(timestamp)
erl_load({date, {h, m, s, usec}})
end
defp erl_load({{year, month, day}, {hour, min, sec, usec}}) do
%Ecto.DateTime{year: year, month: month, day: day,
hour: hour, min: min, sec: sec, usec: usec}
end
end
defimpl String.Chars, for: [Ecto.DateTime, Ecto.Date, Ecto.Time] do
def to_string(dt) do
@for.to_string(dt)
end
end
defimpl Inspect, for: [Ecto.DateTime, Ecto.Date, Ecto.Time] do
@inspected inspect(@for)
def inspect(dt, _opts) do
"#" <> @inspected <> "<" <> @for.to_string(dt) <> ">"
end
end
defimpl Ecto.DataType, for: Ecto.DateTime do
def dump(%Ecto.DateTime{year: year, month: month, day: day,
hour: hour, min: min, sec: sec, usec: usec}) do
{:ok, {{year, month, day}, {hour, min, sec, usec}}}
end
end
defimpl Ecto.DataType, for: Ecto.Date do
def dump(%Ecto.Date{year: year, month: month, day: day}) do
{:ok, {year, month, day}}
end
end
defimpl Ecto.DataType, for: Ecto.Time do
def dump(%Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}) do
{:ok, {hour, min, sec, usec}}
end
end
if Code.ensure_loaded?(Poison) do
defimpl Poison.Encoder, for: [Ecto.Date, Ecto.Time, Ecto.DateTime] do
def encode(dt, _opts), do: <<?", @for.to_iso8601(dt)::binary, ?">>
end
end
| 30.094477
| 115
| 0.594446
|
79c34eddd38c753ba0cd3c586ce6fd63e28702e3
| 7,644
|
ex
|
Elixir
|
core/handler/gear_action/web.ex
|
IvanPereyra-23/PaaS
|
0179c7b57645473308b0a295a70b6284ed220fbf
|
[
"Apache-2.0"
] | 1
|
2020-08-27T18:43:11.000Z
|
2020-08-27T18:43:11.000Z
|
core/handler/gear_action/web.ex
|
IvanPereyra-23/PaaS
|
0179c7b57645473308b0a295a70b6284ed220fbf
|
[
"Apache-2.0"
] | null | null | null |
core/handler/gear_action/web.ex
|
IvanPereyra-23/PaaS
|
0179c7b57645473308b0a295a70b6284ed220fbf
|
[
"Apache-2.0"
] | 1
|
2020-08-27T18:43:21.000Z
|
2020-08-27T18:43:21.000Z
|
# Copyright(c) 2015-2020 ACCESS CO., LTD. All rights reserved.
use Croma
defmodule AntikytheraCore.Handler.GearAction.Web do
alias Croma.Result, as: R
alias Antikythera.{GearName, PathInfo, Conn}
alias Antikythera.Http.{Method, QueryParams, Body}
alias Antikythera.Request.PathMatches
alias Antikythera.Context.GearEntryPoint
alias AntikytheraCore.Handler.{GearAction, GearError, HelperModules, CowboyReq, ExecutorPoolHelper, WebsocketState}
alias AntikytheraCore.Conn, as: CoreConn
alias AntikytheraCore.GearModule
alias AntikytheraCore.ExecutorPool.ActionRunner
alias AntikytheraCore.GearLog.ContextHelper
# @behaviour :cowboy_handler # This line results in "conflicting behaviours - callback init/2 required by both 'cowboy_websocket' and 'cowboy_handler'"
@behaviour :cowboy_websocket
@type http_reply :: {:ok, :cowboy_req.req, nil}
@type ws_upgrade :: {:cowboy_websocket, :cowboy_req.req, Conn.t, timeout}
max_frame_size = 5_000_000 # (Should we make this a mix config item?)
@ws_upgrade_options %{idle_timeout: 60_000, compress: true, max_frame_size: max_frame_size}
@impl true
defun init(req1 :: :cowboy_req.req, gear_name :: v[GearName.t]) :: http_reply | ws_upgrade do
R.m do
method <- CowboyReq.method(req1)
path_info = CowboyReq.path_info(req1)
helper_modules = GearModule.request_helper_modules(gear_name)
{entry_point, path_matches, ws?} <- find_route(req1, gear_name, method, path_info, helper_modules)
routing_info = {gear_name, entry_point, method, path_info, path_matches}
qparams <- CowboyReq.query_params(req1, routing_info)
{req2, body_pair} <- CowboyReq.request_body_pair(req1, routing_info, qparams, helper_modules)
pure run_action_with_conn(req2, routing_info, qparams, body_pair, helper_modules, ws?)
end
|> case do
{:ok, {req3, state}} -> {:cowboy_websocket, req3, state, @ws_upgrade_options} # protocol upgrade to websocket
{:ok, req_reply} -> {:ok, req_reply, nil} # normal response
{:error, req_reply} -> {:ok, req_reply, nil} # error response
end
end
defunp find_route(req :: :cowboy_req.req,
gear_name :: v[GearName.t],
method :: v[Method.t],
path_info :: v[PathInfo.t],
%HelperModules{router: router}) :: R.t({GearEntryPoint.t, PathMatches.t, boolean}) do
case router.__web_route__(method, path_info) do
{controller, action, path_matches, websocket?} -> {:ok, {{controller, action}, path_matches, websocket?}}
nil -> {:error, CowboyReq.with_conn(req, {gear_name, nil, method, path_info, %{}}, %{}, &GearError.no_route/1)}
end
end
defunp run_action_with_conn(req :: :cowboy_req.req,
routing_info :: CowboyReq.routing_info,
qparams :: v[QueryParams.t],
body_pair :: {binary, Body.t},
helper_modules :: v[HelperModules.t],
websocket? :: v[boolean]) :: :cowboy_req.req | {:cowboy_req.req, WebsocketState.t} do
case websocket? do
true -> run_action_with_conn_ws(req, routing_info, qparams, body_pair, helper_modules)
false -> run_action_with_conn_http(req, routing_info, qparams, body_pair, helper_modules)
end
end
defunp run_action_with_conn_http(req :: :cowboy_req.req,
{gear_name, entry_point, _, _, _} = routing_info :: CowboyReq.routing_info,
qparams :: v[QueryParams.t],
body_pair :: {binary, Body.t},
helper_modules :: v[HelperModules.t]) :: :cowboy_req.req do
CowboyReq.with_conn(req, routing_info, qparams, body_pair, fn conn ->
GearAction.with_logging_and_metrics_reporting(conn, helper_modules, fn ->
run_action_with_executor(conn, gear_name, entry_point, helper_modules)
end)
end)
end
defunp run_action_with_conn_ws(req :: :cowboy_req.req,
{gear_name, entry_point, _, _, _} = routing_info :: CowboyReq.routing_info,
qparams :: v[QueryParams.t],
body_pair :: {binary, Body.t},
helper_modules :: v[HelperModules.t]) :: :cowboy_req.req | {:cowboy_req.req, WebsocketState.t} do
conn1 = CoreConn.make_from_cowboy_req(req, routing_info, qparams, body_pair)
ContextHelper.set(conn1)
GearAction.with_logging_and_metrics_reporting(conn1, helper_modules, fn ->
case run_action_with_executor(conn1, gear_name, entry_point, helper_modules) do
conn2 = %Conn{status: nil} -> %Conn{conn2 | status: 101} # Fill the status code with "101 Upgrade" in order to correctly report response metrics
conn2 -> conn2
end
end)
|> case do
%Conn{status: 101} = conn3 ->
ExecutorPoolHelper.increment_ws_count(conn3, req, helper_modules, fn ->
{req, WebsocketState.make(conn3, entry_point, helper_modules)}
end)
conn3 -> CoreConn.reply_as_cowboy_res(conn3, req)
end
end
defunp run_action_with_executor(conn1 :: v[Conn.t],
gear_name :: v[GearName.t],
entry_point :: v[GearEntryPoint.t],
helper_modules :: v[HelperModules.t]) :: Conn.t do
ExecutorPoolHelper.with_executor(conn1, gear_name, helper_modules, fn(pid, conn2) ->
ActionRunner.run(pid, conn2, entry_point)
end)
end
#
# callback implementations for cowboy_websocket
#
@impl true
defun websocket_init(ws_state :: v[WebsocketState.t]) :: WebsocketState.callback_result do
WebsocketState.init(ws_state)
end
@impl true
defun websocket_handle(frame :: :cow_ws.frame, ws_state :: v[WebsocketState.t]) :: WebsocketState.callback_result do
case frame do
:ping -> {:ok, ws_state}
:pong -> {:ok, ws_state}
_ -> WebsocketState.handle_client_message(ws_state, frame)
end
end
@impl true
defun websocket_info(message :: any, ws_state :: v[WebsocketState.t]) :: WebsocketState.callback_result do
case message do
{:EXIT, _, _} ->
# In rare conditions, websocket connection process receives an EXIT message
# about death of the original handler (i.e. the process that executed the `init/2` callback).
# Just neglect the message.
{:ok, ws_state}
{:antikythera_internal, :close} ->
close_frame = {:close, 1001, "server shutting down; please reconnect"}
{:reply, [close_frame], ws_state}
_ ->
WebsocketState.handle_server_message(ws_state, message)
end
end
@impl true
def terminate(reason, _maybe_req, %WebsocketState{} = ws_state) do
WebsocketState.terminate(ws_state, reason)
end
def terminate(_reason, _maybe_req, _state) do
:ok # normal HTTP request, do nothing
end
end
| 50.289474
| 164
| 0.599686
|
79c36e505af615abb497d8c4585b0cc49c7345d6
| 1,283
|
exs
|
Elixir
|
apps/site/test/site_web/plugs/date_test.exs
|
noisecapella/dotcom
|
d5ef869412102d2230fac3dcc216f01a29726227
|
[
"MIT"
] | 42
|
2019-05-29T16:05:30.000Z
|
2021-08-09T16:03:37.000Z
|
apps/site/test/site_web/plugs/date_test.exs
|
noisecapella/dotcom
|
d5ef869412102d2230fac3dcc216f01a29726227
|
[
"MIT"
] | 872
|
2019-05-29T17:55:50.000Z
|
2022-03-30T09:28:43.000Z
|
apps/site/test/site_web/plugs/date_test.exs
|
noisecapella/dotcom
|
d5ef869412102d2230fac3dcc216f01a29726227
|
[
"MIT"
] | 12
|
2019-07-01T18:33:21.000Z
|
2022-03-10T02:13:57.000Z
|
defmodule SiteWeb.Plugs.DateTest do
use SiteWeb.ConnCase, async: true
import SiteWeb.Plugs.Date
@date ~D[2016-01-01]
def date_fn do
@date
end
describe "init/1" do
test "defaults to Util.service_date/0" do
assert init([]) == [date_fn: &Util.service_date/0]
end
end
describe "call/2" do
test "with no params, assigns date to the result of date_fn", %{conn: conn} do
conn =
%{conn | params: %{}}
|> call(date_fn: &date_fn/0)
assert conn.assigns.date == @date
end
test "with a valid date_time param, parses that into date_time", %{conn: conn} do
conn =
%{conn | params: %{"date" => "2016-12-12"}}
|> call(date_fn: &date_fn/0)
assert conn.assigns.date == ~D[2016-12-12]
end
test "with an invalid date_time param, returns the result of date_fn", %{conn: conn} do
conn =
%{conn | params: %{"date" => "not_a_time"}}
|> call(date_fn: &date_fn/0)
assert conn.assigns.date == @date
end
test "with an imaginary date_time param, returns the result of date_fn", %{conn: conn} do
conn =
%{conn | params: %{"date" => "2018-09-31"}}
|> call(date_fn: &date_fn/0)
assert conn.assigns.date == @date
end
end
end
| 24.673077
| 93
| 0.592362
|
79c38047d0f97800b2d0fec1a8ca3f4649ad32a6
| 6,808
|
ex
|
Elixir
|
lib/timetracker.ex
|
jlogar/toggler
|
e906f145d5a38bafe9048dfd256496a52436a5b1
|
[
"MIT"
] | 1
|
2020-11-27T07:13:45.000Z
|
2020-11-27T07:13:45.000Z
|
lib/timetracker.ex
|
jlogar/toggler
|
e906f145d5a38bafe9048dfd256496a52436a5b1
|
[
"MIT"
] | null | null | null |
lib/timetracker.ex
|
jlogar/toggler
|
e906f145d5a38bafe9048dfd256496a52436a5b1
|
[
"MIT"
] | null | null | null |
defmodule TogglerCli.Timetracker do
require Logger
defp parse_cookie(header_value) do
[key, val] =
String.split(header_value, ";")
|> Enum.map(fn t -> String.split(t, "=") end)
|> hd
{key, val}
end
defp get_cookie(headers, name) do
cookie_headers =
Enum.filter(headers, fn {key, _} -> String.match?(key, ~r/\Aset-cookie\z/i) end)
|> Enum.map(fn {_, v} -> v end)
cookie_headers
|> Enum.map(fn c -> parse_cookie(c) end)
|> Enum.filter(fn {key, _val} -> key == name end)
|> hd
end
defp get_url do
Application.get_env(:toggler_cli, :timetracker_url)
end
def get_session_id do
case HTTPoison.get(
"#{get_url()}/login.php",
[]
) do
{:ok, %HTTPoison.Response{status_code: 200, headers: headers}} ->
Logger.debug("TT get login: OK from timetracker.")
{:ok, get_cookie(headers, "tt_PHPSESSID")}
{:ok, %HTTPoison.Response{status_code: status_code}} ->
Logger.error("TT get login: #{status_code}")
{:err, "err"}
{:error, error} ->
{:err, error}
end
end
def login_session({cookie_name, session_id}) do
username = Application.get_env(:toggler_cli, :timetracker_username)
password = Application.get_env(:toggler_cli, :timetracker_password)
form =
{:form,
[
{"login", username},
{"password", password},
{"btn_login", "Login"},
{"browser_today", Calendar.strftime(Date.utc_today(), "%Y-%m-%d")}
]}
res =
case HTTPoison.post(
"#{get_url()}/login.php",
form,
%{},
hackney: [cookie: ["#{cookie_name}=#{session_id}; tt_login=#{username}"]]
) do
{:ok, %HTTPoison.Response{status_code: 302, headers: headers}} ->
Logger.debug("TT login: OK from timetracker.")
{:ok, [{cookie_name, session_id}, get_cookie(headers, "tt_login")]}
{:ok, %HTTPoison.Response{status_code: status_code}} ->
Logger.error("TT login: #{status_code}")
{:err, "err"}
{:error, error} ->
{:err, error}
end
res
end
defp scrape_projects_tasks(body) do
{:ok, document} = Floki.parse_document(body)
{_, _, [script]} =
Floki.find(document, "script:not([src])")
|> Enum.filter(fn {_, _, children} ->
String.contains?(hd(children), "project_names")
end)
|> hd
projects =
Regex.scan(
~r/project_names\[(?<project_index>(\d)*)\](\s)?=(\s)?\"(?<project_name>(\d)+ - (.)+)\";/,
script,
capture: :all_names
)
|> Enum.map(fn [id, name] -> {id, name} end)
tasks =
Regex.scan(
~r/task_names\[(?<task_index>(\d)*)\](\s)?=(\s)?\"(?<task_name>(\d)+ - (.)+)\";/,
script,
capture: :all_names
)
|> Enum.map(fn [index, name] -> {index, name} end)
{projects, tasks}
end
defp get_cookie_options(cookies) do
[cookie: [Enum.join(Enum.map(cookies, fn {key, val} -> "#{key}=#{val}" end), ";")]]
end
def get_projects(cookies) do
# :hackney_trace.enable(:max, :io)
res =
case HTTPoison.get(
"#{get_url()}/time.php",
%{},
hackney: get_cookie_options(cookies)
) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
Logger.debug("TT get time: OK from timetracker.")
{projects, tasks} = scrape_projects_tasks(body)
Logger.debug(
"scraped the html, got #{length(projects)} projects, #{length(tasks)} tasks."
)
{:ok, [projects: projects, tasks: tasks]}
{:ok, %HTTPoison.Response{status_code: status_code}} ->
Logger.error("TT get time: #{status_code}")
{:err, "err"}
{:error, error} ->
{:err, error}
end
# :hackney_trace.disable()
res
end
def get_daily_tasks(cookies, date) do
# :hackney_trace.enable(:max, :io)
date_string = Date.to_string(date)
res =
case HTTPoison.get(
"#{get_url()}/time.php",
%{},
params: %{
date: date_string
},
hackney: get_cookie_options(cookies)
) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
Logger.debug("TT get time for date #{date_string}: OK from timetracker.")
{:ok, document} = Floki.parse_document(body)
entry_rows =
Floki.find(document, "form[name=\"timeRecordForm\"]")
|> Floki.find("table:nth-child(3) tr")
|> Floki.find("table")
|> Floki.find("tr[bgcolor]")
entries =
entry_rows
|> Enum.map(fn entry ->
note =
entry
|> Floki.find("td:nth-child(6)")
|> Floki.text()
toggl_id =
case Regex.run(~r/\[\[(?<id>(\d)*)\]\]/, note, capture: :all_names) do
[id] -> id
nil -> nil
end
link =
entry
|> Floki.find("a[href^=\"time_edit.php\"]")
|> Floki.attribute("href")
|> hd
[_, tt_id] = String.split(link, "=")
{toggl_id, tt_id}
end)
# we're only touching those that originate from toggl
|> Enum.filter(fn {toggl_id, _} -> toggl_id end)
Logger.debug(
"scraped the html, got #{length(entry_rows)} entries, #{length(entries)} from toggl."
)
{:ok, entries}
{:ok, %HTTPoison.Response{status_code: status_code}} ->
Logger.error("TT get time: #{status_code}")
{:err, "err"}
{:error, error} ->
{:err, error}
end
res
end
def push_new_entry(cookies, form) do
Logger.debug(Poison.encode!(form))
# :hackney_trace.enable(:max, :io)
form_for_sending =
{:form,
form
|> Map.to_list()}
res =
case HTTPoison.post(
"#{get_url()}/time.php",
form_for_sending,
%{},
params: %{
date: form["date"]
},
hackney: get_cookie_options(cookies)
) do
{:ok, %HTTPoison.Response{status_code: 302, headers: _headers}} ->
Logger.debug("TT post time: OK from timetracker.")
{:ok, ""}
{:ok, %HTTPoison.Response{status_code: status_code, body: body}} ->
Logger.error("TT post time: #{status_code}")
Logger.debug(body)
{:error, "err"}
{:error, error} ->
{:error, error}
end
# :hackney_trace.disable()
res
end
end
| 27.015873
| 98
| 0.512926
|
79c3939307475089d133edcaa1e494b81dd32f4a
| 6,979
|
ex
|
Elixir
|
lib/credo/check/consistency/space_around_operators.ex
|
jlgeering/credo
|
b952190ed758c262aa0d9bbee01227f9b1f0c63b
|
[
"MIT"
] | null | null | null |
lib/credo/check/consistency/space_around_operators.ex
|
jlgeering/credo
|
b952190ed758c262aa0d9bbee01227f9b1f0c63b
|
[
"MIT"
] | null | null | null |
lib/credo/check/consistency/space_around_operators.ex
|
jlgeering/credo
|
b952190ed758c262aa0d9bbee01227f9b1f0c63b
|
[
"MIT"
] | null | null | null |
defmodule Credo.Check.Consistency.SpaceAroundOperators do
@moduledoc """
Use spaces around operators like `+`, `-`, `*` and `/`. This is the
**preferred** way, although other styles are possible, as long as it is
applied consistently.
# preferred
1 + 2 * 4
# also okay
1+2*4
While this is not necessarily a concern for the correctness of your code,
you should use a consistent style throughout your codebase.
"""
@explanation [
check: @moduledoc,
params: [
ignore: "List of operators to be ignored for this check."
]
]
@collector Credo.Check.Consistency.SpaceAroundOperators.Collector
@default_params [ignore: [:|]]
use Credo.Check, run_on_all: true, base_priority: :high
# TODO: add *ignored* operators, so you can add "|" and still write
# [head|tail] while enforcing 2 + 3 / 1 ...
# FIXME: this seems to be already implemented, but there don't seem to be
# any related test cases around.
@doc false
def run(source_files, exec, params \\ []) when is_list(source_files) do
@collector.find_and_append_issues(source_files, exec, params, &issues_for/3)
end
defp issues_for(expected, source_file, params) do
tokens = Credo.Code.to_tokens(source_file)
ast = SourceFile.ast(source_file)
issue_meta = IssueMeta.for(source_file, params)
issue_locations =
expected
|> @collector.find_locations_not_matching(source_file)
|> Enum.reject(&ignored?(&1, params))
|> Enum.filter(&create_issue?(&1, tokens, ast, issue_meta))
Enum.map(issue_locations, fn location ->
format_issue(
issue_meta,
message: message_for(expected),
line_no: location[:line_no],
column: location[:column],
trigger: location[:trigger]
)
end)
end
defp message_for(:with_space = _expected) do
"There are spaces around operators most of the time, but not here."
end
defp message_for(:without_space = _expected) do
"There are no spaces around operators most of the time, but here there are."
end
defp ignored?(location, params) do
ignored_triggers = Params.get(params, :ignore, @default_params)
Enum.member?(ignored_triggers, location[:trigger])
end
defp create_issue?(location, tokens, ast, issue_meta) do
line_no = location[:line_no]
trigger = location[:trigger]
column = location[:column]
line =
issue_meta
|> IssueMeta.source_file()
|> SourceFile.line_at(line_no)
create_issue?(trigger, line_no, column, line, tokens, ast)
end
defp create_issue?(trigger, line_no, column, line, tokens, ast) when trigger in [:+, :-] do
create_issue?(line, column, trigger) &&
!parameter_in_function_call?({line_no, column, trigger}, tokens, ast)
end
defp create_issue?(trigger, _line_no, column, line, _tokens, _ast) do
create_issue?(line, column, trigger)
end
# Don't create issues for `c = -1`
# TODO: Consider moving these checks inside the Collector.
defp create_issue?(line, column, trigger) when trigger in [:+, :-] do
!number_with_sign?(line, column) && !number_in_range?(line, column) &&
!(trigger == :- && minus_in_binary_size?(line, column))
end
defp create_issue?(line, column, trigger) when trigger == :-> do
!arrow_in_typespec?(line, column)
end
defp create_issue?(line, column, trigger) when trigger == :/ do
!number_in_fun?(line, column)
end
defp create_issue?(_, _, _), do: true
defp arrow_in_typespec?(line, column) do
# -2 because we need to subtract the operator
line
|> String.slice(0..(column - 2))
|> String.match?(~r/\(\s*$/)
end
defp number_with_sign?(line, column) do
# -2 because we need to subtract the operator
line
|> String.slice(0..(column - 2))
|> String.match?(~r/(\A\s+|\@[a-zA-Z0-9\_]+|[\|\\\{\[\(\,\:\>\<\=\+\-\*\/])\s*$/)
end
defp number_in_range?(line, column) do
line
|> String.slice(column..-1)
|> String.match?(~r/^\d+\.\./)
end
defp number_in_fun?(line, column) do
line
|> String.slice(0..(column - 2))
|> String.match?(~r/[\.\&][a-z0-9_]+$/)
end
# TODO: this implementation is a bit naive. improve it.
defp minus_in_binary_size?(line, column) do
# -2 because we need to subtract the operator
binary_pattern_start_before? =
line
|> String.slice(0..(column - 2))
|> String.match?(~r/\<\</)
# -2 because we need to subtract the operator
double_colon_before? =
line
|> String.slice(0..(column - 2))
|> String.match?(~r/\:\:/)
# -1 because we need to subtract the operator
binary_pattern_end_after? =
line
|> String.slice(column..-1)
|> String.match?(~r/\>\>/)
# -1 because we need to subtract the operator
typed_after? =
line
|> String.slice(column..-1)
|> String.match?(~r/^\s*(integer|native|signed|unsigned|binary|size|little|float)/)
# -2 because we need to subtract the operator
typed_before? =
line
|> String.slice(0..(column - 2))
|> String.match?(~r/(integer|native|signed|unsigned|binary|size|little|float)\s*$/)
heuristics_met_count =
[
binary_pattern_start_before?,
binary_pattern_end_after?,
double_colon_before?,
typed_after?,
typed_before?
]
|> Enum.filter(& &1)
|> Enum.count()
heuristics_met_count >= 2
end
defp parameter_in_function_call?(location_tuple, tokens, ast) do
case find_prev_current_next_token(tokens, location_tuple) do
{prev, _current, _next} ->
Credo.Code.TokenAstCorrelation.find_tokens_in_ast(prev, ast)
|> List.wrap()
|> List.first()
|> is_parameter_in_function_call()
_ ->
false
end
end
defp is_parameter_in_function_call({atom, _, arguments})
when is_atom(atom) and is_list(arguments) do
true
end
defp is_parameter_in_function_call(
{{:., _, [{:__aliases__, _, _mods}, fun_name]}, _, arguments}
)
when is_atom(fun_name) and is_list(arguments) do
true
end
defp is_parameter_in_function_call(_) do
false
end
# TOKENS
defp find_prev_current_next_token(tokens, location_tuple) do
tokens
|> traverse_prev_current_next(&matching_location(location_tuple, &1, &2, &3, &4), [])
|> List.first()
end
defp traverse_prev_current_next(tokens, callback, acc) do
tokens
|> case do
[prev | [current | [next | rest]]] ->
acc = callback.(prev, current, next, acc)
traverse_prev_current_next([current | [next | rest]], callback, acc)
_ ->
acc
end
end
defp matching_location(
{line_no, column, trigger},
prev,
{_, {line_no, column, _}, trigger} = current,
next,
acc
) do
acc ++ [{prev, current, next}]
end
defp matching_location(_, _prev, _current, _next, acc) do
acc
end
end
| 27.58498
| 93
| 0.637914
|
79c3c37752b110cd2aeb3d311459729250eb1190
| 322
|
ex
|
Elixir
|
lib/mishka_auth/application.ex
|
farhangj2020/mishka-auth
|
33d461f248d5cdcc935c0d64a93faf95830fe500
|
[
"MIT"
] | 8
|
2020-09-07T14:47:42.000Z
|
2021-11-03T21:53:37.000Z
|
lib/mishka_auth/application.ex
|
farhangj2020/mishka-auth
|
33d461f248d5cdcc935c0d64a93faf95830fe500
|
[
"MIT"
] | 1
|
2022-03-11T09:51:34.000Z
|
2022-03-11T09:51:34.000Z
|
lib/mishka_auth/application.ex
|
farhangj2020/mishka-auth
|
33d461f248d5cdcc935c0d64a93faf95830fe500
|
[
"MIT"
] | 3
|
2020-08-02T14:05:21.000Z
|
2020-09-12T12:58:59.000Z
|
defmodule MishkaAuth.Application do
@moduledoc false
use Application
def start(_type, _args) do
children = [
{Redix, name: :redix, password: "#{MishkaAuth.get_config_info(:redix)}"}
]
opts = [strategy: :one_for_one, name: MishkaAuth.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 24.769231
| 78
| 0.698758
|
79c3c4e5e692ca5502248289527d5e42a80853e7
| 924
|
ex
|
Elixir
|
lib/special.ex
|
elcritch/matrex_numerix
|
5835a9b477d8ea41bb9b862272a0997fe37c1236
|
[
"MIT"
] | 8
|
2020-01-08T06:55:36.000Z
|
2021-03-10T15:44:46.000Z
|
lib/special.ex
|
elcritch/matrex_numerix
|
5835a9b477d8ea41bb9b862272a0997fe37c1236
|
[
"MIT"
] | null | null | null |
lib/special.ex
|
elcritch/matrex_numerix
|
5835a9b477d8ea41bb9b862272a0997fe37c1236
|
[
"MIT"
] | null | null | null |
defmodule MatrexNumerix.Special do
@moduledoc """
Special mathematical functions that would make Euler proud.
"""
alias MatrexNumerix.Common
@doc """
Calculates the logit function (the inverse of the sigmoidal
logistic function) for probability p (a number between 0 and 1).
"""
@spec logit(float) :: Common.extended_number() | nil
def logit(0.0), do: :negative_infinity
def logit(1.0), do: :infinity
def logit(p) when p < 0 or p > 1, do: nil
def logit(p) do
:math.log(p / (1 - p))
end
@doc """
Calculates the sigmoidal logistic function, a common "S" shape.
It is the inverse of the natural logit function and so can be
used to convert the logarithm of odds into a probability.
"""
@spec logistic(Common.extended_number()) :: float
def logistic(:negative_infinity), do: 0.0
def logistic(:infinity), do: 1.0
def logistic(p) do
1 / (:math.exp(-p) + 1)
end
end
| 27.176471
| 67
| 0.677489
|
79c3ca5c99968593e8bfbd76e3128883f9302f72
| 9,539
|
ex
|
Elixir
|
lib/sshkit.ex
|
Bugagazavr/sshkit.ex
|
4180f372a60e39b6a5f13e403f4664e6338b6105
|
[
"MIT"
] | null | null | null |
lib/sshkit.ex
|
Bugagazavr/sshkit.ex
|
4180f372a60e39b6a5f13e403f4664e6338b6105
|
[
"MIT"
] | null | null | null |
lib/sshkit.ex
|
Bugagazavr/sshkit.ex
|
4180f372a60e39b6a5f13e403f4664e6338b6105
|
[
"MIT"
] | null | null | null |
defmodule SSHKit do
@moduledoc """
A toolkit for performing tasks on one or more servers.
```
hosts = ["1.eg.io", {"2.eg.io", port: 2222}]
hosts = [%SSHKit.Host{name: "3.eg.io", options: [port: 2223]} | hosts]
context =
SSHKit.context(hosts)
|> SSHKit.path("/var/www/phx")
|> SSHKit.user("deploy")
|> SSHKit.group("deploy")
|> SSHKit.umask("022")
|> SSHKit.env(%{"NODE_ENV" => "production"})
:ok = SSHKit.upload(context, ".", recursive: true)
:ok = SSHKit.run(context, "yarn install", mode: :parallel)
```
"""
alias SSHKit.SCP
alias SSHKit.SSH
alias SSHKit.Context
alias SSHKit.Host
@doc """
Produces an `SSHKit.Host` struct holding the information
needed to connect to a (remote) host.
## Examples
You can pass a map with hostname and options:
```
host = SSHKit.host(%{name: "name.io", options: [port: 2222]})
# This means, that if you pass in a host struct,
# you'll get the same result. In particular:
host == SSHKit.host(host)
```
…or, alternatively, a tuple with hostname and options:
```
host = SSHKit.host({"name.io", port: 2222})
```
See `host/2` for additional details and examples.
"""
def host(%{name: name, options: options}) do
%Host{name: name, options: options}
end
def host({name, options}) do
%Host{name: name, options: options}
end
@doc """
Produces an `SSHKit.Host` struct holding the information
needed to connect to a (remote) host.
## Examples
In its most basic version, you just pass a hostname and all other options
will use the defaults:
```
host = SSHKit.host("name.io")
```
If you wish to provide additional host options, e.g. a non-standard port,
you can pass a keyword list as the second argument:
```
host = SSHKit.host("name.io", port: 2222)
```
One or many of these hosts can then be used to create an execution context
in which commands can be executed:
```
host
|> SSHKit.context()
|> SSHKit.run("echo \"That was fun\"")
```
See `host/1` for additional ways of specifying host details.
"""
def host(host, options \\ [])
def host(name, options) when is_binary(name) do
%Host{name: name, options: options}
end
def host(%{name: name, options: options}, defaults) do
%Host{name: name, options: Keyword.merge(defaults, options)}
end
def host({name, options}, defaults) do
%Host{name: name, options: Keyword.merge(defaults, options)}
end
@doc """
Takes one or more (remote) hosts and creates an execution context in which
remote commands can be run. Accepts any form of host specification also
accepted by `host/1` and `host/2`, i.e. binaries, maps and 2-tuples.
See `path/2`, `user/2`, `group/2`, `umask/2`, and `env/2`
for details on how to derive variations of a context.
## Example
Create an execution context for two hosts. Commands issued in this context
will be executed on both hosts.
```
hosts = ["10.0.0.1", "10.0.0.2"]
context = SSHKit.context(hosts)
```
Create a context for hosts with different connection options:
```
hosts = [{"10.0.0.3", port: 2223}, %{name: "10.0.0.4", options: [port: 2224]}]
context = SSHKit.context(hosts)
```
Any shared options can be specified in the second argument.
Here we add a user and port for all hosts.
```
hosts = ["10.0.0.1", "10.0.0.2"]
options = [user: "admin", port: 2222]
context = SSHKit.context(hosts, options)
```
"""
def context(hosts, defaults \\ []) do
hosts =
hosts
|> List.wrap()
|> Enum.map(&host(&1, defaults))
%Context{hosts: hosts}
end
@doc """
Changes the working directory commands are executed in for the given context.
Returns a new, derived context for easy chaining.
## Example
Create `/var/www/app/config.json`:
```
"10.0.0.1"
|> SSHKit.context()
|> SSHKit.path("/var/www/app")
|> SSHKit.run("touch config.json")
```
"""
def path(context, path) do
%Context{context | path: path}
end
@doc """
Changes the file creation mode mask affecting default file and directory
permissions.
Returns a new, derived context for easy chaining.
## Example
Create `precious.txt`, readable and writable only for the logged-in user:
```
"10.0.0.1"
|> SSHKit.context()
|> SSHKit.umask("077")
|> SSHKit.run("touch precious.txt")
```
"""
def umask(context, mask) do
%Context{context | umask: mask}
end
@doc """
Specifies the user under whose name commands are executed.
That user might be different than the user with which
ssh connects to the remote host.
Returns a new, derived context for easy chaining.
## Example
All commands executed in the created `context` will run as `deploy_user`,
although we use the `login_user` to log in to the remote host:
```
context =
{"10.0.0.1", port: 3000, user: "login_user", password: "secret"}
|> SSHKit.context()
|> SSHKit.user("deploy_user")
```
"""
def user(context, name) do
%Context{context | user: name}
end
@doc """
Specifies the group commands are executed with.
Returns a new, derived context for easy chaining.
## Example
All commands executed in the created `context` will run in group `www`:
```
context =
"10.0.0.1"
|> SSHKit.context()
|> SSHKit.group("www")
```
"""
def group(context, name) do
%Context{context | group: name}
end
@doc """
Defines new environment variables or overrides existing ones
for a given context.
Returns a new, derived context for easy chaining.
## Examples
Setting `NODE_ENV=production`:
```
context =
"10.0.0.1"
|> SSHKit.context()
|> SSHKit.env(%{"NODE_ENV" => "production"})
# Run the npm start script with NODE_ENV=production
SSHKit.run(context, "npm start")
```
Modifying the `PATH`:
```
context =
"10.0.0.1"
|> SSHKit.context()
|> SSHKit.env(%{"PATH" => "$HOME/.rbenv/shims:$PATH"})
# Execute the rbenv-installed ruby to print its version
SSHKit.run(context, "ruby --version")
```
"""
def env(context, map) do
%Context{context | env: map}
end
@doc ~S"""
Executes a command in the given context.
Returns a list of tuples, one fore each host in the context.
The resulting tuples have the form `{:ok, output, exit_code}` –
as returned by `SSHKit.SSH.run/3`:
* `exit_code` is the number with which the executed command returned.
If everything went well, that usually is `0`.
* `output` is a keyword list of the output collected from the command.
It has the form:
```
[
stdout: "output on standard out",
stderr: "output on standard error",
stdout: "some more normal output",
…
]
```
## Example
Run a command and verify its output:
```
[{:ok, output, 0}] =
"example.io"
|> SSHKit.context()
|> SSHKit.run("echo \"Hello World!\"")
stdout =
output
|> Keyword.get_values(:stdout)
|> Enum.join()
assert "Hello World!\n" == stdout
```
"""
def run(context, command) do
cmd = Context.build(context, command)
run = fn host ->
{:ok, conn} = SSH.connect(host.name, host.options)
res = SSH.run(conn, cmd)
:ok = SSH.close(conn)
res
end
Enum.map(context.hosts, run)
end
@doc ~S"""
Upload a file or files to the given context.
Returns a list of `:ok` or `{:error, reason}` - one for each host.
Possible options are:
* `as: "remote.txt"` - specify the name of the uploaded file/directory
* all options accepted by `SSHKit.SCP.Upload.transfer/4`
## Examples
Upload all files and folders in current directory to "/workspace":
```
[:ok] =
"example.io"
|> SSHKit.context()
|> SSHKit.path("/workspace")
|> SSHKit.upload(".", recursive: true)
```
Upload file to different name on host:
```
[:ok] =
"example.io"
|> SSHKit.context()
|> SSHKit.upload("local.txt", as: "remote.txt")
```
"""
def upload(context, path, options \\ []) do
as_path = Keyword.get(options, :as, Path.basename(path))
remote_path = build_remote_path(context, as_path)
run = fn host ->
{:ok, res} = SSH.connect host.name, host.options, fn conn ->
SCP.upload(conn, path, remote_path, options)
end
res
end
Enum.map(context.hosts, run)
end
@doc ~S"""
Download a file or files from the given context.
Returns a list of `:ok` or `{:error, reason}` - one for each host.
Possible options are:
* `as: "local.txt"` - specify the name of the downloaded file/directory
* all options accepted by `SSHKit.SCP.Upload.transfer/4`
## Examples
Download all files and folders in context directory to current working directory:
```
[:ok] =
"example.io"
|> SSHKit.context()
|> SSHKit.path("/workspace")
|> SSHKit.upload(".", recursive: true)
```
Download file to different local name:
```
[:ok] =
"example.io"
|> SSHKit.context()
|> SSHKit.download("remote.txt", as: "local.txt")
```
"""
def download(context, path, options \\ []) do
remote = build_remote_path(context, path)
local = Keyword.get(options, :as, Path.basename(path))
run = fn host ->
{:ok, res} = SSH.connect host.name, host.options, fn conn ->
SCP.download(conn, remote, local, options)
end
res
end
Enum.map(context.hosts, run)
end
defp build_remote_path(context, path) do
Path.absname(path, context.path || ".")
end
end
| 22.820574
| 83
| 0.628682
|
79c40f5053796c99bde3316c4136c7676b618697
| 1,591
|
exs
|
Elixir
|
phoenix/budgets/mix.exs
|
stevenjackson/jacksonbudget
|
b1c0189684829f5be13155726ce156f1fccbc8fe
|
[
"MIT"
] | null | null | null |
phoenix/budgets/mix.exs
|
stevenjackson/jacksonbudget
|
b1c0189684829f5be13155726ce156f1fccbc8fe
|
[
"MIT"
] | null | null | null |
phoenix/budgets/mix.exs
|
stevenjackson/jacksonbudget
|
b1c0189684829f5be13155726ce156f1fccbc8fe
|
[
"MIT"
] | null | null | null |
defmodule Budgets.MixProject do
use Mix.Project
def project do
[
app: :budgets,
version: "0.1.0",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Budgets.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.8"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_dashboard, "~> 0.4"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:google_api_storage, "~> 0.29.0"},
{:goth, "~> 1.3.0-rc.2"},
{:hackney, "~> 1.17"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get"]
]
end
end
| 25.253968
| 84
| 0.577624
|
79c41228ba748d371464d8c5332c2c1441e52a1d
| 214
|
exs
|
Elixir
|
ListsAndRecursion/7.4.exs
|
miguelcoba/ProgrammingElixir
|
c3aee9151ab2346fbd31a5411c473b1a4e626e36
|
[
"MIT"
] | 2
|
2015-11-14T21:35:01.000Z
|
2017-07-12T07:09:40.000Z
|
ListsAndRecursion/7.4.exs
|
miguelcoba/ProgrammingElixir
|
c3aee9151ab2346fbd31a5411c473b1a4e626e36
|
[
"MIT"
] | null | null | null |
ListsAndRecursion/7.4.exs
|
miguelcoba/ProgrammingElixir
|
c3aee9151ab2346fbd31a5411c473b1a4e626e36
|
[
"MIT"
] | null | null | null |
defmodule MyList do
def span(from, to), do: _span(from, to)
defp _span(from, to) when to < from, do: []
defp _span(from, to) when from == to, do: [to]
defp _span(from, to), do: [from | _span(from + 1, to)]
end
| 30.571429
| 55
| 0.630841
|
79c4187a7b58ff2b77d69f4145238ae8498d8d68
| 1,565
|
ex
|
Elixir
|
clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3_text_input.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3_text_input.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_cx_v3_text_input.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3TextInput do
@moduledoc """
Represents the natural language text to be processed.
## Attributes
* `text` (*type:* `String.t`, *default:* `nil`) - Required. The UTF-8 encoded natural language text to be processed. Text length must not exceed 256 characters.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:text => String.t()
}
field(:text)
end
defimpl Poison.Decoder, for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3TextInput do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3TextInput.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowCxV3TextInput do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.297872
| 164
| 0.752716
|
79c42678b635767c6bb7570f8a191caf2d9265f7
| 3,120
|
ex
|
Elixir
|
clients/android_device_provisioning/lib/google_api/android_device_provisioning/v1/model/company.ex
|
kyleVsteger/elixir-google-api
|
3a0dd498af066a4361b5b0fd66ffc04a57539488
|
[
"Apache-2.0"
] | null | null | null |
clients/android_device_provisioning/lib/google_api/android_device_provisioning/v1/model/company.ex
|
kyleVsteger/elixir-google-api
|
3a0dd498af066a4361b5b0fd66ffc04a57539488
|
[
"Apache-2.0"
] | null | null | null |
clients/android_device_provisioning/lib/google_api/android_device_provisioning/v1/model/company.ex
|
kyleVsteger/elixir-google-api
|
3a0dd498af066a4361b5b0fd66ffc04a57539488
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidDeviceProvisioning.V1.Model.Company do
@moduledoc """
A reseller, vendor, or customer in the zero-touch reseller and customer APIs.
## Attributes
* `adminEmails` (*type:* `list(String.t)`, *default:* `nil`) - Optional. Email address of customer's users in the admin role. Each email address must be associated with a Google Account.
* `companyId` (*type:* `String.t`, *default:* `nil`) - Output only. The ID of the company. Assigned by the server.
* `companyName` (*type:* `String.t`, *default:* `nil`) - Required. The name of the company. For example _XYZ Corp_. Displayed to the company's employees in the zero-touch enrollment portal.
* `name` (*type:* `String.t`, *default:* `nil`) - Output only. The API resource name of the company. The resource name is one of the following formats: * `partners/[PARTNER_ID]/customers/[CUSTOMER_ID]` * `partners/[PARTNER_ID]/vendors/[VENDOR_ID]` * `partners/[PARTNER_ID]/vendors/[VENDOR_ID]/customers/[CUSTOMER_ID]` Assigned by the server.
* `ownerEmails` (*type:* `list(String.t)`, *default:* `nil`) - Required. Input only. Email address of customer's users in the owner role. At least one `owner_email` is required. Each email address must be associated with a Google Account. Owners share the same access as admins but can also add, delete, and edit your organization's portal users.
* `termsStatus` (*type:* `String.t`, *default:* `nil`) - Output only. Whether any user from the company has accepted the latest Terms of Service (ToS). See TermsStatus.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:adminEmails => list(String.t()) | nil,
:companyId => String.t() | nil,
:companyName => String.t() | nil,
:name => String.t() | nil,
:ownerEmails => list(String.t()) | nil,
:termsStatus => String.t() | nil
}
field(:adminEmails, type: :list)
field(:companyId)
field(:companyName)
field(:name)
field(:ownerEmails, type: :list)
field(:termsStatus)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidDeviceProvisioning.V1.Model.Company do
def decode(value, options) do
GoogleApi.AndroidDeviceProvisioning.V1.Model.Company.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidDeviceProvisioning.V1.Model.Company do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 50.322581
| 350
| 0.713141
|
79c44a82329040ac37af1cca5762f26adb0a853a
| 79
|
ex
|
Elixir
|
lib/igwet_web/views/node_view.ex
|
TheSwanFactory/igwet
|
0a450686d1d222eb8e39e23ba5d2ea83657862d1
|
[
"MIT"
] | null | null | null |
lib/igwet_web/views/node_view.ex
|
TheSwanFactory/igwet
|
0a450686d1d222eb8e39e23ba5d2ea83657862d1
|
[
"MIT"
] | 18
|
2018-02-25T11:13:46.000Z
|
2022-03-28T03:43:38.000Z
|
lib/igwet_web/views/node_view.ex
|
TheSwanFactory/igwet
|
0a450686d1d222eb8e39e23ba5d2ea83657862d1
|
[
"MIT"
] | 1
|
2019-01-04T12:16:47.000Z
|
2019-01-04T12:16:47.000Z
|
defmodule IgwetWeb.NodeView do
use IgwetWeb, :view
alias Igwet.Network
end
| 15.8
| 30
| 0.78481
|
79c45f0adec0050be73c26d5923e172bd0d9988d
| 4,881
|
ex
|
Elixir
|
lib/wechat/mini_program/auth.ex
|
hsieh/wechat
|
6a49033d3a35a80358481778a0609f214c340a0c
|
[
"Apache-2.0"
] | null | null | null |
lib/wechat/mini_program/auth.ex
|
hsieh/wechat
|
6a49033d3a35a80358481778a0609f214c340a0c
|
[
"Apache-2.0"
] | null | null | null |
lib/wechat/mini_program/auth.ex
|
hsieh/wechat
|
6a49033d3a35a80358481778a0609f214c340a0c
|
[
"Apache-2.0"
] | null | null | null |
defmodule WeChat.MiniProgram.Auth do
@moduledoc """
小程序 - 权限接口
"""
import WeChat.Utils, only: [doc_link_prefix: 0]
alias WeChat.{Utils, ServerMessage.Encryptor, Storage.Cache}
@doc_link "#{doc_link_prefix()}/miniprogram/dev/api-backend/open-api"
@open_ability_doc_link "#{doc_link_prefix()}/miniprogram/dev/framework/open-ability"
@doc """
服务端获取开放数据 -
[官方文档](#{@open_ability_doc_link}/signature.html){:target="_blank"}
[小程序登录](#{@open_ability_doc_link}/login.html)
"""
@spec decode_user_info(
session_key :: String.t(),
raw_data :: String.t(),
signature :: String.t()
) :: {:ok, map()} | {:error, String.t()}
def decode_user_info(session_key, raw_data, signature) do
case Utils.sha1(raw_data <> session_key) do
^signature ->
Jason.decode(raw_data)
_ ->
{:error, "invalid"}
end
end
@doc """
服务端获取开放数据 - 包含敏感数据 -
[官方文档](#{@open_ability_doc_link}/signature.html){:target="_blank"}
* [小程序登录](#{@open_ability_doc_link}/login.html)
* [加密数据解密算法](#{@open_ability_doc_link}/signature.html#加密数据解密算法)
"""
@spec decode_get_user_sensitive_info(
session_key :: String.t(),
encrypted_data :: String.t(),
iv :: String.t()
) :: {:ok, map()} | :error | {:error, any()}
def decode_get_user_sensitive_info(session_key, encrypted_data, iv) do
with {:ok, session_key} <- Base.decode64(session_key),
{:ok, iv} <- Base.decode64(iv),
{:ok, encrypted_data} <- Base.decode64(encrypted_data) do
:crypto.crypto_one_time(:aes_128_cbc, session_key, iv, encrypted_data, false)
|> Encryptor.decode_padding_with_pkcs7()
|> Jason.decode()
end
end
@doc """
小程序登录
官方文档:
* [Mini Program](#{@doc_link}/login/auth.code2Session.html){:target="_blank"}
* [Component](#{doc_link_prefix()}/doc/oplatform/Third-party_Platforms/Mini_Programs/WeChat_login.html){:target="_blank"}
"""
@spec code2session(WeChat.client(), code :: String.t()) :: WeChat.response()
def code2session(client, code) do
if client.by_component?() do
component_appid = client.component_appid()
client.get("/sns/component/jscode2session",
query: [
appid: client.appid(),
js_code: code,
grant_type: "authorization_code",
component_appid: component_appid,
component_access_token: Cache.get_cache(component_appid, :component_access_token)
]
)
else
client.get("/sns/jscode2session",
query: [
appid: client.appid(),
secret: client.appsecret(),
js_code: code,
grant_type: "authorization_code"
]
)
end
end
@doc """
支付后获取用户的`UnionId` -
[官方文档](#{@doc_link}/user-info/auth.getPaidUnionId.html){:target="_blank"}
用户支付完成后,获取该用户的`UnionId`,无需用户授权.
本接口支持第三方平台代理查询.
**注意:调用前需要用户完成支付,且在支付后的五分钟内有效**
"""
@spec get_paid_unionid(WeChat.client(), WeChat.openid()) :: WeChat.response()
def get_paid_unionid(client, openid) do
client.get("/wxa/getpaidunionid",
query: [
openid: openid,
access_token: client.get_access_token()
]
)
end
@doc """
支付后获取用户的`UnionId` - 微信支付订单号(`transaction_id`) -
[官方文档](#{@doc_link}/user-info/auth.getPaidUnionId.html){:target="_blank"}
用户支付完成后,获取该用户的`UnionId`,无需用户授权.
本接口支持第三方平台代理查询.
**注意:调用前需要用户完成支付,且在支付后的五分钟内有效**
"""
@spec get_paid_unionid(WeChat.client(), WeChat.openid(), transaction_id :: String.t()) ::
WeChat.response()
def get_paid_unionid(client, openid, transaction_id) do
client.get("/wxa/getpaidunionid",
query: [
openid: openid,
transaction_id: transaction_id,
access_token: client.get_access_token()
]
)
end
@doc """
支付后获取用户的`UnionId` - 微信支付商户订单号和微信支付商户号(`out_trade_no`及`mch_id`) -
[官方文档](#{@doc_link}/user-info/auth.getPaidUnionId.html){:target="_blank"}
用户支付完成后,获取该用户的`UnionId`,无需用户授权.
本接口支持第三方平台代理查询.
**注意:调用前需要用户完成支付,且在支付后的五分钟内有效**
"""
@spec get_paid_unionid(
WeChat.client(),
WeChat.openid(),
mch_id :: String.t(),
out_trade_no :: String.t()
) :: WeChat.response()
def get_paid_unionid(client, openid, mch_id, out_trade_no) do
client.get("/wxa/getpaidunionid",
query: [
openid: openid,
mch_id: mch_id,
out_trade_no: out_trade_no,
access_token: client.get_access_token()
]
)
end
@doc """
获取AccessToken -
[官方文档](#{@doc_link}/access-token/auth.getAccessToken.html){:target="_blank"}
"""
@spec get_access_token(WeChat.client()) :: WeChat.response()
def get_access_token(client) do
client.get("/cgi-bin/token",
query: [
grant_type: "client_credential",
appid: client.appid(),
secret: client.appsecret()
]
)
end
end
| 28.54386
| 125
| 0.633682
|
79c46fef9ab23d1a4c7bfb406bc784f6cbfd4696
| 502
|
exs
|
Elixir
|
priv/repo/migrations/20180930021722_recreate_videos_comments_replies.exs
|
DavidAlphaFox/coderplanets_server
|
3fd47bf3bba6cc04c9a34698201a60ad2f3e8254
|
[
"Apache-2.0"
] | 1
|
2019-05-07T15:03:54.000Z
|
2019-05-07T15:03:54.000Z
|
priv/repo/migrations/20180930021722_recreate_videos_comments_replies.exs
|
DavidAlphaFox/coderplanets_server
|
3fd47bf3bba6cc04c9a34698201a60ad2f3e8254
|
[
"Apache-2.0"
] | null | null | null |
priv/repo/migrations/20180930021722_recreate_videos_comments_replies.exs
|
DavidAlphaFox/coderplanets_server
|
3fd47bf3bba6cc04c9a34698201a60ad2f3e8254
|
[
"Apache-2.0"
] | null | null | null |
defmodule MastaniServer.Repo.Migrations.RecreateVideosCommentsReplies do
use Ecto.Migration
def change do
create table(:videos_comments_replies) do
add(:video_comment_id, references(:videos_comments, on_delete: :delete_all), null: false)
add(:reply_id, references(:videos_comments, on_delete: :delete_all), null: false)
timestamps()
end
create(index(:videos_comments_replies, [:video_comment_id]))
create(index(:videos_comments_replies, [:reply_id]))
end
end
| 31.375
| 95
| 0.750996
|
79c490ed7ea12aff987a88000287683d21ff71ad
| 557
|
exs
|
Elixir
|
programming_elixir/reduce.exs
|
enilsen16/elixir
|
b4d1d45858a25e4beb39e07de8685f3d93d6a520
|
[
"MIT"
] | null | null | null |
programming_elixir/reduce.exs
|
enilsen16/elixir
|
b4d1d45858a25e4beb39e07de8685f3d93d6a520
|
[
"MIT"
] | null | null | null |
programming_elixir/reduce.exs
|
enilsen16/elixir
|
b4d1d45858a25e4beb39e07de8685f3d93d6a520
|
[
"MIT"
] | null | null | null |
defmodule MyList do
def reduce([], value, _) do
value
end
def reduce([head | tail], value, func) do
reduce(tail, func.(head, value), func)
end
def mapsum([], _func), do: 0
def mapsum([ head | tail ], func) do
func.(head) + mapsum(tail, func)
end
def max([(x)]), do: x
def max([ head | tail ]) do
Kernel.max(head, max(tail) )
end
def caesar([], _n), do: []
def caesar([ head | tail ], n) when head+n <= 122, do: [ head+n | caesar(tail, n) ]
def caesar([ head | tail ], n), do: [ head+n-26 | caesar(tail, n) ]
end
| 24.217391
| 85
| 0.567325
|
79c4a06e2dce0e4e2a2bf23e4e5f7e830f5ca2af
| 763
|
ex
|
Elixir
|
gen/envoy/type/percent.pb.ex
|
aclemmensen/relay
|
4bce71ed7d8bd4936f96d62ed08d007729c4253d
|
[
"BSD-3-Clause"
] | 5
|
2018-10-12T13:13:19.000Z
|
2020-10-03T17:51:37.000Z
|
gen/envoy/type/percent.pb.ex
|
aclemmensen/relay
|
4bce71ed7d8bd4936f96d62ed08d007729c4253d
|
[
"BSD-3-Clause"
] | 207
|
2018-02-09T14:24:14.000Z
|
2020-07-25T11:09:19.000Z
|
gen/envoy/type/percent.pb.ex
|
aclemmensen/relay
|
4bce71ed7d8bd4936f96d62ed08d007729c4253d
|
[
"BSD-3-Clause"
] | 1
|
2019-08-08T11:30:59.000Z
|
2019-08-08T11:30:59.000Z
|
defmodule Envoy.Type.Percent do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: float
}
defstruct [:value]
field :value, 1, type: :double
end
defmodule Envoy.Type.FractionalPercent do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
numerator: non_neg_integer,
denominator: integer
}
defstruct [:numerator, :denominator]
field :numerator, 1, type: :uint32
field :denominator, 2, type: Envoy.Type.FractionalPercent.DenominatorType, enum: true
end
defmodule Envoy.Type.FractionalPercent.DenominatorType do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
field :HUNDRED, 0
field :TEN_THOUSAND, 1
field :MILLION, 2
end
| 21.8
| 87
| 0.693316
|
79c4ba9253d83dea3b8788319186e7794f9159ef
| 2,371
|
ex
|
Elixir
|
lib/sitemap/funcs.ex
|
x-ji/sitemap
|
8d2b3e50117fb562a6927b8742ec415125dbe2f2
|
[
"MIT"
] | null | null | null |
lib/sitemap/funcs.ex
|
x-ji/sitemap
|
8d2b3e50117fb562a6927b8742ec415125dbe2f2
|
[
"MIT"
] | null | null | null |
lib/sitemap/funcs.ex
|
x-ji/sitemap
|
8d2b3e50117fb562a6927b8742ec415125dbe2f2
|
[
"MIT"
] | null | null | null |
defmodule Sitemap.Funcs do
def iso8601(yy, mm, dd, hh, mi, ss) do
"~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~2.10.0BZ"
|> :io_lib.format([yy, mm, dd, hh, mi, ss])
|> IO.iodata_to_binary
end
def iso8601 do
{{yy, mm, dd}, {hh, mi, ss}} = :calendar.universal_time
iso8601(yy, mm, dd, hh, mi, ss)
end
def iso8601({{yy, mm, dd}, {hh, mi, ss}}) do
iso8601(yy, mm, dd, hh, mi, ss)
end
if Code.ensure_loaded?(NaiveDateTime) do
def iso8601(%NaiveDateTime{} = dt) do
dt
|> NaiveDateTime.to_erl
|> iso8601()
end
def iso8601(%DateTime{} = dt) do
DateTime.to_iso8601 dt
end
end
def iso8601(%Date{} = dt) do
Date.to_iso8601 dt
end
if Code.ensure_loaded?(Ecto) do
def iso8601(%Ecto.DateTime{} = dt) do
dt
|> Ecto.DateTime.to_erl
|> iso8601()
end
def iso8601(%Ecto.Date{} = dt) do
Ecto.Date.to_iso8601 dt
end
end
def iso8601(dt), do: dt
def eraser(elements) do
Enum.filter elements, fn elm ->
case elm do
e when is_list(e) -> eraser(e)
nil -> false
_ -> !!elem(elm, 2)
end
end
end
def yes_no(bool) do
if bool == false, do: "no", else: "yes"
end
def allow_deny(bool) do
if bool == false, do: "deny", else: "allow"
end
def autoplay(bool) do
if bool, do: "ap=1", else: "ap=0"
end
def getenv(key) do
x = System.get_env(key)
cond do
x == "false" -> false
x == "true" -> true
is_numeric(x) ->
{num, _} = Integer.parse(x)
num
true -> x
end
end
def nil_or(opts), do: nil_or(opts, "")
def nil_or([], value), do: value
def nil_or([h|t], _value) do
case h do
v when is_nil(v) -> nil_or(t, "")
v -> nil_or([], v)
end
end
def is_numeric(str) when is_nil(str), do: false
def is_numeric(str) do
case Float.parse(str) do
{_num, ""} -> true
{_num, _r} -> false
:error -> false
end
end
def urljoin(src, dest) do
{s, d} = {URI.parse(src), URI.parse(dest)}
to_string struct(s, [
host: d.host || s.host,
path: d.path || s.path,
port: d.port || s.port,
query: d.query || s.query,
scheme: d.scheme || s.scheme,
userinfo: d.userinfo || s.userinfo,
fragment: d.fragment || s.fragment,
authority: d.authority || s.authority,
])
end
end
| 22.580952
| 60
| 0.557149
|
79c4cbd02aa9872197949579ec26747aafefb692
| 1,488
|
ex
|
Elixir
|
elixir-guide/mix-and-otp/kv_umbrella/apps/kv_server/lib/kv_server.ex
|
Cate-Lukner/cate-lukner-internship
|
43e8b467287ea3a7955e23f18180cb4f849e6620
|
[
"MIT"
] | null | null | null |
elixir-guide/mix-and-otp/kv_umbrella/apps/kv_server/lib/kv_server.ex
|
Cate-Lukner/cate-lukner-internship
|
43e8b467287ea3a7955e23f18180cb4f849e6620
|
[
"MIT"
] | 8
|
2020-05-18T14:43:21.000Z
|
2020-06-03T16:07:37.000Z
|
elixir-guide/mix-and-otp/kv_umbrella/apps/kv_server/lib/kv_server.ex
|
lowlandresearch/cate-lukner-internship
|
71fff3bcd2d44905357c99dbff1b1f572f5bcc6f
|
[
"MIT"
] | 1
|
2020-05-18T14:44:13.000Z
|
2020-05-18T14:44:13.000Z
|
defmodule KVServer do
require Logger
@doc """
Starts accepting connections on the given `port`.
"""
def accept(port) do
{:ok, socket} =
:gen_tcp.listen(
port,
[:binary, packet: :line, active: false, reuseaddr: true]
)
Logger.info("Accepting connections on port #{port}")
loop_acceptor(socket)
end
defp loop_acceptor(socket) do
{:ok, client} = :gen_tcp.accept(socket)
{:ok, pid} = Task.Supervisor.start_child(KVServer.TaskSupervisor, fn -> serve(client) end)
:ok = :gen_tcp.controlling_process(client, pid)
loop_acceptor(socket)
end
defp serve(socket) do
msg =
with {:ok, data} <- read_line(socket),
{:ok, command} <- KVServer.Command.parse(data),
do: KVServer.Command.run(command)
write_line(socket, msg)
serve(socket)
end
defp read_line(socket) do
:gen_tcp.recv(socket, 0)
end
defp write_line(socket, {:ok, text}) do
:gen_tcp.send(socket, text)
end
defp write_line(socket, {:error, :unknown_command}) do
:gen_tcp.send(socket, "UNKNOWN COMMAND\r\n")
end
defp write_line(socket, {:error, :not_found}) do
:gen_tcp.send(socket, "NOT FOUND\r\n")
end
defp write_line(_socket, {:error, :closed}) do
# The connection was closed, exit politely
exit(:shutdown)
end
defp write_line(socket, {:error, error}) do
# Unknown error; write to the client and exit
:gen_tcp.send(socket, "ERROR\r\n")
exit(error)
end
end
| 24
| 94
| 0.645833
|
79c4f65364ebaf7349d70dcb9efaa975cc045e37
| 1,168
|
ex
|
Elixir
|
lib/abbr/rpc/local.ex
|
elvanja/abbr
|
b09954df2f68f71c03f308b01927f032bf692ac4
|
[
"MIT"
] | 14
|
2020-03-25T22:10:42.000Z
|
2021-02-04T01:31:40.000Z
|
lib/abbr/rpc/local.ex
|
elvanja/abbr
|
b09954df2f68f71c03f308b01927f032bf692ac4
|
[
"MIT"
] | null | null | null |
lib/abbr/rpc/local.ex
|
elvanja/abbr
|
b09954df2f68f71c03f308b01927f032bf692ac4
|
[
"MIT"
] | null | null | null |
defmodule Abbr.Rpc.Local do
@moduledoc """
Caches shortened and original URLs on local node.
"""
alias Abbr.Url
alias Abbr.Util.ETSTableManager
use ETSTableManager
use GenServer
@impl ETSTableManager
def table_definition, do: {__MODULE__, [:set, :public, :named_table]}
@impl ETSTableManager
def on_receive_table(table, _state), do: table
@spec lookup(Url.short()) :: Url.t() | nil
def lookup(short) when is_binary(short) do
case :ets.lookup(__MODULE__, short) do
[{^short, original}] -> %Url{short: short, original: original}
[] -> nil
end
end
@spec save(Url.t()) :: :ok | :error
def save(%Url{short: short, original: original}) do
true = :ets.insert(__MODULE__, {short, original})
:ok
end
@spec export :: list(any())
def export, do: :ets.tab2list(__MODULE__)
@spec merge(list(any())) :: :ok
def merge(list) do
true = :ets.insert(__MODULE__, list)
:ok
end
@spec start_link([any()]) :: {:ok, pid()}
def start_link(opts) do
{:ok, _} = GenServer.start_link(__MODULE__, :ok, [{:name, __MODULE__} | opts])
end
@impl GenServer
def init(:ok) do
{:ok, nil}
end
end
| 22.901961
| 82
| 0.642979
|
79c532e3fd0f8eb963ef61790af8b55167dd9805
| 2,624
|
ex
|
Elixir
|
year_2020/lib/day_11/seats.ex
|
bschmeck/advent_of_code
|
cbec98019c6c00444e0f4c7e15e01b1ed9ae6145
|
[
"MIT"
] | null | null | null |
year_2020/lib/day_11/seats.ex
|
bschmeck/advent_of_code
|
cbec98019c6c00444e0f4c7e15e01b1ed9ae6145
|
[
"MIT"
] | null | null | null |
year_2020/lib/day_11/seats.ex
|
bschmeck/advent_of_code
|
cbec98019c6c00444e0f4c7e15e01b1ed9ae6145
|
[
"MIT"
] | null | null | null |
defmodule Day11.Seats do
defstruct [:grid]
def new(input), do: %__MODULE__{grid: gridify(input, 0, %{})}
def stabilize(%__MODULE__{grid: grid}, opts) do
case advance(grid, opts) do
{:stable, grid} -> %__MODULE__{grid: grid}
{:change, grid} -> stabilize(%__MODULE__{grid: grid}, opts)
end
end
def count_empty_seats(%__MODULE__{grid: grid}) do
grid
|> Map.values()
|> Enum.count(fn a -> a == "#" end)
end
defp gridify([], _, grid), do: grid
defp gridify([line | rest], row, grid) do
grid =
line
|> String.split("", trim: true)
|> Enum.with_index()
|> Enum.into(grid, fn {char, col} -> {{row, col}, char} end)
gridify(rest, row + 1, grid)
end
defp advance(grid, opts) do
grid
|> Enum.map(fn entry -> new_value(entry, grid, opts) end)
|> Enum.reduce({:stable, %{}}, fn
{pos, val, val}, {outcome, grid} -> {outcome, Map.put(grid, pos, val)}
{pos, _old_val, new_val}, {_, grid} -> {:change, Map.put(grid, pos, new_val)}
end)
end
def new_value({pos, "."}, _grid, _opts), do: {pos, ".", "."}
def new_value({pos, "L"}, grid, ignore_floor: ignore_floor, threshold: _) do
pos
|> adjacent(grid, ignore_floor)
|> Enum.count(fn seat -> seat == "#" end)
|> case do
0 -> {pos, "L", "#"}
_ -> {pos, "L", "L"}
end
end
def new_value({pos, "#"}, grid, ignore_floor: ignore_floor, threshold: threshold) do
pos
|> adjacent(grid, ignore_floor)
|> Enum.count(fn seat -> seat == "#" end)
|> case do
x when x >= threshold -> {pos, "#", "L"}
_ -> {pos, "#", "#"}
end
end
def adjacent({x, y}, grid, ignore_floor) do
for i <- -1..1 do
for j <- -1..1, do: {i, j}
end
|> Enum.concat()
|> Enum.reject(fn
{0, 0} -> true
_ -> false
end)
|> Enum.map(&seat_in_dir(&1, {x, y}, 1, grid, ignore_floor))
end
def seat_in_dir({dx, dy}, {x, y}, i, grid, false) do
pos = {x + dx * i, y + dy * i}
Map.get(grid, pos, "L")
end
def seat_in_dir({dx, dy}, {x, y}, i, grid, true) do
pos = {x + dx * i, y + dy * i}
case Map.get(grid, pos, "L") do
"." -> seat_in_dir({dx, dy}, {x, y}, i + 1, grid, true)
seat -> seat
end
end
end
defimpl String.Chars, for: Day11.Seats do
def to_string(%Day11.Seats{grid: grid}) do
for i <- 0..9 do
pts = for j <- 0..9, do: {i, j}
pts
|> Enum.map(fn pos -> Map.get(grid, pos) end)
|> Enum.join()
end
|> Enum.join("\n")
end
end
defimpl Inspect, for: Day11.Seats do
def inspect(seats, _), do: to_string(seats)
end
| 24.990476
| 86
| 0.538491
|
79c54610b75cb4772e826a677b9e58e720ea14b0
| 4,745
|
ex
|
Elixir
|
lib/strategy/strategy.ex
|
polyglot-concurrency/libcluster
|
1a0640f2b39adc430a121add01ab2ae0a4ee35b2
|
[
"MIT"
] | 1,571
|
2016-09-30T20:38:30.000Z
|
2022-03-31T15:06:38.000Z
|
lib/strategy/strategy.ex
|
polyglot-concurrency/libcluster
|
1a0640f2b39adc430a121add01ab2ae0a4ee35b2
|
[
"MIT"
] | 140
|
2016-10-11T10:03:06.000Z
|
2022-03-18T21:06:32.000Z
|
lib/strategy/strategy.ex
|
polyglot-concurrency/libcluster
|
1a0640f2b39adc430a121add01ab2ae0a4ee35b2
|
[
"MIT"
] | 167
|
2016-10-24T09:47:16.000Z
|
2022-03-15T11:59:50.000Z
|
defmodule Cluster.Strategy do
@moduledoc """
This module defines the behaviour for implementing clustering strategies.
"""
defmacro __using__(_) do
quote do
@behaviour Cluster.Strategy
@impl true
def child_spec(args) do
%{id: __MODULE__, type: :worker, start: {__MODULE__, :start_link, [args]}}
end
defoverridable child_spec: 1
end
end
@type topology :: atom
@type bad_nodes :: [{node, reason :: term}]
@type mfa_tuple :: {module, atom, [term]}
@type strategy_args :: [Cluster.Strategy.State.t()]
# Required for supervision of the strategy
@callback child_spec(strategy_args) :: Supervisor.child_spec()
# Starts the strategy
@callback start_link(strategy_args) :: {:ok, pid} | :ignore | {:error, reason :: term}
@doc """
Given a list of node names, attempts to connect to all of them.
Returns `:ok` if all nodes connected, or `{:error, [{node, reason}, ..]}`
if we failed to connect to some nodes.
All failures are logged.
"""
@spec connect_nodes(topology, mfa_tuple, mfa_tuple, [atom()]) :: :ok | {:error, bad_nodes}
def connect_nodes(topology, {_, _, _} = connect, {_, _, _} = list_nodes, nodes)
when is_list(nodes) do
{connect_mod, connect_fun, connect_args} = connect
{list_mod, list_fun, list_args} = list_nodes
ensure_exported!(list_mod, list_fun, length(list_args))
current_node = Node.self()
need_connect =
nodes
|> difference(apply(list_mod, list_fun, list_args))
|> Enum.reject(fn n -> current_node == n end)
bad_nodes =
Enum.reduce(need_connect, [], fn n, acc ->
fargs = connect_args ++ [n]
ensure_exported!(connect_mod, connect_fun, length(fargs))
case apply(connect_mod, connect_fun, fargs) do
true ->
Cluster.Logger.info(topology, "connected to #{inspect(n)}")
acc
false ->
Cluster.Logger.warn(topology, "unable to connect to #{inspect(n)}")
[{n, false} | acc]
:ignored ->
Cluster.Logger.warn(
topology,
"unable to connect to #{inspect(n)}: not part of network"
)
[{n, :ignored} | acc]
end
end)
case bad_nodes do
[] -> :ok
_ -> {:error, bad_nodes}
end
end
@doc """
Given a list of node names, attempts to disconnect from all of them.
Returns `:ok` if all nodes disconnected, or `{:error, [{node, reason}, ..]}`
if we failed to disconnect from some nodes.
All failures are logged.
"""
@spec disconnect_nodes(topology, mfa_tuple, mfa_tuple, [atom()]) :: :ok | {:error, bad_nodes}
def disconnect_nodes(topology, {_, _, _} = disconnect, {_, _, _} = list_nodes, nodes)
when is_list(nodes) do
{disconnect_mod, disconnect_fun, disconnect_args} = disconnect
{list_mod, list_fun, list_args} = list_nodes
ensure_exported!(list_mod, list_fun, length(list_args))
current_node = Node.self()
need_disconnect =
nodes
|> intersection(apply(list_mod, list_fun, list_args))
|> Enum.reject(fn n -> current_node == n end)
bad_nodes =
Enum.reduce(need_disconnect, [], fn n, acc ->
fargs = disconnect_args ++ [n]
ensure_exported!(disconnect_mod, disconnect_fun, length(fargs))
case apply(disconnect_mod, disconnect_fun, fargs) do
true ->
Cluster.Logger.info(topology, "disconnected from #{inspect(n)}")
acc
false ->
Cluster.Logger.warn(
topology,
"disconnect from #{inspect(n)} failed because we're already disconnected"
)
acc
:ignored ->
Cluster.Logger.warn(
topology,
"disconnect from #{inspect(n)} failed because it is not part of the network"
)
acc
reason ->
Cluster.Logger.warn(
topology,
"disconnect from #{inspect(n)} failed with: #{inspect(reason)}"
)
[{n, reason} | acc]
end
end)
case bad_nodes do
[] -> :ok
_ -> {:error, bad_nodes}
end
end
def intersection(_a, []), do: []
def intersection([], _b), do: []
def intersection(a, b) when is_list(a) and is_list(b) do
a |> MapSet.new() |> MapSet.intersection(MapSet.new(b))
end
def difference(a, []), do: a
def difference([], _b), do: []
def difference(a, b) when is_list(a) and is_list(b) do
a |> MapSet.new() |> MapSet.difference(MapSet.new(b))
end
defp ensure_exported!(mod, fun, arity) do
unless function_exported?(mod, fun, arity) do
raise "#{mod}.#{fun}/#{arity} is undefined!"
end
end
end
| 29.65625
| 95
| 0.597682
|
79c54cdff1fe38313695ccfe9526b1c79482ce73
| 926
|
ex
|
Elixir
|
lib/newt.ex
|
jlauman/newt
|
ecbd7cfd098d616a8af15270828176e1197af3cf
|
[
"MIT"
] | null | null | null |
lib/newt.ex
|
jlauman/newt
|
ecbd7cfd098d616a8af15270828176e1197af3cf
|
[
"MIT"
] | null | null | null |
lib/newt.ex
|
jlauman/newt
|
ecbd7cfd098d616a8af15270828176e1197af3cf
|
[
"MIT"
] | null | null | null |
defmodule Newt do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
# Define workers and child supervisors to be supervised
# worker(ElixirWeb.Worker, [arg1, arg2, arg3]),
Plug.Adapters.Cowboy.child_spec(:http, Newt.Router, [], dispatch: dispatch, port: 8080)
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Newt.Supervisor]
Supervisor.start_link(children, opts)
end
defp dispatch do
[
{:_, [
{"/favicon.ico", :cowboy_static, {:priv_file, :newt, "favicon.ico"}},
{"/ws", Newt.WebSocketHandler, []},
{:_, Plug.Adapters.Cowboy.Handler, {Newt.Router, []}}
]}
]
end
end
| 28.9375
| 93
| 0.653348
|
79c5780534c30df2f1ddff7e4ea97da3a2665e99
| 838
|
ex
|
Elixir
|
lib/pkcs7_verify_api/web/endpoint.ex
|
edenlabllc/pkcs7_verify.api
|
ec2cfcd4892babae345ac485b833455773f1222b
|
[
"MIT"
] | 1
|
2020-09-25T10:10:16.000Z
|
2020-09-25T10:10:16.000Z
|
lib/pkcs7_verify_api/web/endpoint.ex
|
Nebo15/pkcs7_verify.api
|
ec2cfcd4892babae345ac485b833455773f1222b
|
[
"MIT"
] | null | null | null |
lib/pkcs7_verify_api/web/endpoint.ex
|
Nebo15/pkcs7_verify.api
|
ec2cfcd4892babae345ac485b833455773f1222b
|
[
"MIT"
] | 1
|
2018-01-16T14:37:16.000Z
|
2018-01-16T14:37:16.000Z
|
defmodule PKCS7Verify.Web.Endpoint do
@moduledoc """
Phoenix Endpoint for pkcs7_verify_api application.
"""
use Phoenix.Endpoint, otp_app: :pkcs7_verify_api
plug Plug.RequestId
plug EView.Plugs.Idempotency
plug Plug.Logger
plug EView
plug Plug.Parsers,
parsers: [:json],
pass: ["application/json"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
plug PKCS7Verify.Web.Router
@doc """
Dynamically loads configuration from the system environment
on startup.
It receives the endpoint configuration from the config files
and must return the updated configuration.
"""
def load_from_system_env(config) do
config = Confex.process_env(config)
unless config[:secret_key_base] do
raise "Set SECRET_KEY environment variable!"
end
{:ok, config}
end
end
| 20.95
| 62
| 0.727924
|
79c594f83e4b3648e043c31dd10c5bf6d5e3b77c
| 632
|
ex
|
Elixir
|
lib/hexpm/repository/release_metadata.ex
|
Benjamin-Philip/hexpm
|
6f38244f81bbabd234c660f46ea973849ba77a7f
|
[
"Apache-2.0"
] | 691
|
2017-03-08T09:15:45.000Z
|
2022-03-23T22:04:47.000Z
|
lib/hexpm/repository/release_metadata.ex
|
Benjamin-Philip/hexpm
|
6f38244f81bbabd234c660f46ea973849ba77a7f
|
[
"Apache-2.0"
] | 491
|
2017-03-07T12:58:42.000Z
|
2022-03-29T23:32:54.000Z
|
lib/hexpm/repository/release_metadata.ex
|
Benjamin-Philip/hexpm
|
6f38244f81bbabd234c660f46ea973849ba77a7f
|
[
"Apache-2.0"
] | 200
|
2017-03-12T23:03:39.000Z
|
2022-03-05T17:55:52.000Z
|
defmodule Hexpm.Repository.ReleaseMetadata do
use Hexpm.Schema
@derive HexpmWeb.Stale
embedded_schema do
field :app, :string
field :build_tools, {:array, :string}
field :elixir, :string
field :files, {:array, :string}, virtual: true
end
def changeset(meta, params) do
cast(meta, params, ~w(app build_tools elixir files)a)
|> validate_required(~w(app build_tools files)a)
|> validate_list_required(:build_tools)
|> validate_list_required(:files, message: "package can't be empty")
|> update_change(:build_tools, &Enum.uniq/1)
|> validate_requirement(:elixir, pre: true)
end
end
| 28.727273
| 72
| 0.702532
|
79c59a2e84a81b12276499bab61360579ba2a792
| 1,126
|
exs
|
Elixir
|
clients/mirror/mix.exs
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
clients/mirror/mix.exs
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
clients/mirror/mix.exs
|
mocknen/elixir-google-api
|
dac4877b5da2694eca6a0b07b3bd0e179e5f3b70
|
[
"Apache-2.0"
] | null | null | null |
defmodule GoogleApi.Mirror.V1.Mixfile do
use Mix.Project
@version "0.0.1"
def project do
[app: :google_api_mirror,
version: @version,
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/FIXME"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.1"},
{:ex_doc, "~> 0.16", only: :dev},
{:dialyxir, "~> 0.5", only: [:dev], runtime: false}
]
end
defp description() do
"""
Interacts with Glass users via the timeline.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/FIXME",
"Homepage" => "https://developers.google.com/glass"
}
]
end
end
| 22.979592
| 105
| 0.584369
|
79c59ebb24645027d6ab553a466ae5b0451e7ee2
| 19,582
|
ex
|
Elixir
|
lib/elixlsx/xml_templates.ex
|
JJCOINCWEBDEV/elixlsx
|
97b3518841d45ca87e2fd0df25377a2085dc42f8
|
[
"MIT"
] | null | null | null |
lib/elixlsx/xml_templates.ex
|
JJCOINCWEBDEV/elixlsx
|
97b3518841d45ca87e2fd0df25377a2085dc42f8
|
[
"MIT"
] | null | null | null |
lib/elixlsx/xml_templates.ex
|
JJCOINCWEBDEV/elixlsx
|
97b3518841d45ca87e2fd0df25377a2085dc42f8
|
[
"MIT"
] | 1
|
2020-02-04T14:12:42.000Z
|
2020-02-04T14:12:42.000Z
|
defmodule Elixlsx.XMLTemplates do
alias Elixlsx.Util, as: U
alias Elixlsx.Compiler.CellStyleDB
alias Elixlsx.Compiler.StringDB
alias Elixlsx.Compiler.FontDB
alias Elixlsx.Compiler.FillDB
alias Elixlsx.Compiler.SheetCompInfo
alias Elixlsx.Compiler.NumFmtDB
alias Elixlsx.Compiler.BorderStyleDB
alias Elixlsx.Compiler.WorkbookCompInfo
alias Elixlsx.Style.CellStyle
alias Elixlsx.Style.Font
alias Elixlsx.Style.Fill
alias Elixlsx.Style.BorderStyle
alias Elixlsx.Sheet
# TODO: the xml_text_exape functions belong into Elixlsx.Util,
# as they are/will be used by functions in Elixlsx.Style.*
@doc ~S"""
There are 5 characters that should be escaped in XML (<,>,",',&), but only
2 of them *must* be escaped. Saves a couple of CPU cycles, for the environment.
## Example
iex> Elixlsx.XMLTemplates.minimal_xml_text_escape "Only '&' and '<' are escaped here, '\"' & '>' & \"'\" are not."
"Only '&' and '<' are escaped here, '\"' & '>' & \"'\" are not."
"""
def minimal_xml_text_escape(s) do
U.replace_all(s, [ {"&", "&"},
{"<", "<"}
])
end
@doc ~S"""
Escape characters for embedding in XML
documents.
## Example
iex> Elixlsx.XMLTemplates.xml_escape "&\"'<>'"
"&"'<>'"
"""
def xml_escape(s) do
U.replace_all(s, [ {"&", "&"},
{"'", "'"},
{"\"", """},
{"<", "<"},
{">", ">"} ])
end
@docprops_app ~S"""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<Properties xmlns="http://schemas.openxmlformats.org/officeDocument/2006/extended-properties" xmlns:vt="http://schemas.openxmlformats.org/officeDocument/2006/docPropsVTypes">
<TotalTime>0</TotalTime>
<Application>Elixlsx</Application>
<AppVersion>__APPVERSION__</AppVersion>
</Properties>
"""
def docprops_app do
U.replace_all(@docprops_app,
[{"__APPVERSION__", U.app_version_string()}])
end
@docprops_core ~S"""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<cp:coreProperties xmlns:cp="http://schemas.openxmlformats.org/package/2006/metadata/core-properties" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:dcmitype="http://purl.org/dc/dcmitype/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<dcterms:created xsi:type="dcterms:W3CDTF">__TIMESTAMP__</dcterms:created>
<dc:language>__LANGUAGE__</dc:language>
<dcterms:modified xsi:type="dcterms:W3CDTF">__TIMESTAMP__</dcterms:modified>
<cp:revision>__REVISION__</cp:revision>
</cp:coreProperties>
"""
def docprops_core(timestamp, language \\ "en-US", revision \\ 1) do
U.replace_all(@docprops_core,
[{"__TIMESTAMP__", xml_escape(timestamp)},
{"__LANGUAGE__", language},
{"__REVISION__", to_string(revision)}])
end
@spec make_xl_rel_sheet(SheetCompInfo.t) :: String.t
def make_xl_rel_sheet sheet_comp_info do
# I'd love to use string interpolation here, but unfortunately """< is heredoc notation, so i have to use
# string concatenation or escape all the quotes. Choosing the first.
"<Relationship Id=\"#{sheet_comp_info.rId}\" Type=\"http://schemas.openxmlformats.org/officeDocument/2006/relationships/worksheet\" Target=\"worksheets/#{sheet_comp_info.filename}\"/>"
end
@spec make_xl_rel_sheets(nonempty_list(SheetCompInfo.t)) :: String.t
def make_xl_rel_sheets sheet_comp_infos do
Enum.map_join sheet_comp_infos, &make_xl_rel_sheet/1
end
### xl/workbook.xml
@spec make_xl_workbook_xml_sheet_entry({Sheet.t, SheetCompInfo.t}) :: String.t
def make_xl_workbook_xml_sheet_entry {sheet_info, sheet_comp_info} do
if String.length(sheet_info.name) > 31 do
raise %ArgumentError{message: "The sheet name '#{sheet_info.name}' is too long. Maximum 31 chars allowed for name."}
end
"""
<sheet name="#{xml_escape(sheet_info.name)}" sheetId="#{sheet_comp_info.sheetId}" state="visible" r:id="#{sheet_comp_info.rId}"/>
"""
end
### [Content_Types].xml
defp contenttypes_sheet_entry sheet_comp_info do
"""
<Override PartName="/xl/worksheets/#{sheet_comp_info.filename}" ContentType="application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml"/>
"""
end
defp contenttypes_sheet_entries sheet_comp_infos do
Enum.map_join sheet_comp_infos, &contenttypes_sheet_entry/1
end
def make_contenttypes_xml(wci) do
~S"""
<?xml version="1.0" encoding="UTF-8"?>
<Types xmlns="http://schemas.openxmlformats.org/package/2006/content-types">
<Override PartName="/_rels/.rels" ContentType="application/vnd.openxmlformats-package.relationships+xml"/>
<Override PartName="/docProps/app.xml" ContentType="application/vnd.openxmlformats-officedocument.extended-properties+xml"/>
<Override PartName="/docProps/core.xml" ContentType="application/vnd.openxmlformats-package.core-properties+xml"/>
<Override PartName="/xl/_rels/workbook.xml.rels" ContentType="application/vnd.openxmlformats-package.relationships+xml"/>
<Override PartName="/xl/workbook.xml" ContentType="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml"/>
<Override PartName="/xl/styles.xml" ContentType="application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml"/>
"""
<> contenttypes_sheet_entries(wci.sheet_info) <>
~S"""
<Override PartName="/xl/sharedStrings.xml" ContentType="application/vnd.openxmlformats-officedocument.spreadsheetml.sharedStrings+xml"/>
</Types>
"""
end
###
### xl/worksheet/sheet*.xml
###
defp split_into_content_style(cell, wci) do
cond do
is_list(cell) ->
cellstyle = CellStyle.from_props (tl cell)
{
hd(cell),
CellStyleDB.get_id(wci.cellstyledb, cellstyle),
cellstyle
}
true ->
{
cell,
0,
nil
}
end
end
defp get_content_type_value(content, wci) do
case content do
{:excelts, num} ->
{"n", to_string(num)}
{:formula, x} ->
{:formula, x}
{:formula, x, opts} when is_list(opts) ->
{:formula, x, opts}
x when is_number(x) ->
{"n", to_string(x)}
x when is_binary(x) ->
{"s", to_string(StringDB.get_id wci.stringdb, x)}
x when is_boolean(x) ->
{"b", if x do "1" else "0" end}
:empty ->
{:empty, :empty}
true ->
:error
end
end
# TODO i know now about string interpolation, i should probably clean this up. ;)
defp xl_sheet_cols(row, rowidx, wci) do
Enum.zip(row, 1 .. length row) |>
Enum.map(
fn {cell, colidx} ->
{content, styleID, cellstyle} = split_into_content_style(cell, wci)
if is_nil(content) do
""
else
content = if CellStyle.is_date? cellstyle do
U.to_excel_datetime content
else
content
end
cv = get_content_type_value(content, wci)
{content_type, content_value, content_opts} =
case cv do
{t, v} -> {t, v, []}
{t, v, opts} -> {t, v, opts}
:error -> raise %ArgumentError{
message: "Invalid column content at " <>
U.to_excel_coords(rowidx, colidx) <> ": "
<> (inspect content)
}
end
case content_type do
:formula ->
value = if not is_nil(content_opts[:value]), do: "<v>#{content_opts[:value]}</v>", else: ""
"""
<c r="#{U.to_excel_coords(rowidx, colidx)}"
s="#{styleID}">
<f>#{content_value}</f>
#{value}
</c>
"""
:empty ->
"""
<c r="#{U.to_excel_coords(rowidx, colidx)}"
s="#{styleID}">
</c>
"""
type ->
"""
<c r="#{U.to_excel_coords(rowidx, colidx)}"
s="#{styleID}" t="#{type}">
<v>#{content_value}</v>
</c>
"""
end
end
end) |>
List.foldr("", &<>/2)
end
defp xl_merge_cells([]) do
""
end
defp xl_merge_cells(merge_cells) do
"""
<mergeCells count="#{Enum.count(merge_cells)}">
#{Enum.map(merge_cells, fn {fromCell, toCell} ->
"<mergeCell ref=\"#{fromCell}:#{toCell}\"/>"
end)}
</mergeCells>
"""
end
defp xl_sheet_rows(data, row_heights, wci) do
Enum.zip(data, 1 .. length data) |>
Enum.map_join(fn {row, rowidx} ->
"""
<row r="#{rowidx}" #{get_row_height_attr(row_heights, rowidx)}>
#{xl_sheet_cols(row, rowidx, wci)}
</row>
""" end)
end
defp get_row_height_attr(row_heights, rowidx) do
row_height = Map.get(row_heights, rowidx)
if (row_height) do
"customHeight=\"1\" ht=\"#{row_height}\""
else
""
end
end
defp make_col_width({k, v}) do
'<col min="#{k}" max="#{k}" width="#{v}" customWidth="1" />'
end
defp make_col_widths(sheet) do
if Kernel.map_size(sheet.col_widths) != 0 do
cols = Map.to_list(sheet.col_widths)
|> Enum.sort
|> Enum.map_join(&make_col_width/1)
"<cols>#{cols}</cols>"
else
""
end
end
@spec make_sheet(Sheet.t, WorkbookCompInfo.t) :: String.t
@doc ~S"""
Returns the XML content for single sheet.
"""
def make_sheet(sheet, wci) do
~S"""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<worksheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships">
<sheetPr filterMode="false">
<pageSetUpPr fitToPage="false"/>
</sheetPr>
<dimension ref="A1"/>
<sheetViews>
<sheetView workbookViewId="0"
"""
<> make_sheet_show_grid(sheet) <>
"""
>
"""
<> make_sheetview(sheet) <>
"""
</sheetView>
</sheetViews>
<sheetFormatPr defaultRowHeight="12.8"/>
"""
<> make_col_widths(sheet) <>
"""
<sheetData>
"""
<>
xl_sheet_rows(sheet.rows, sheet.row_heights, wci)
<>
~S"""
</sheetData>
"""
<> xl_merge_cells(sheet.merge_cells) <>
"""
<pageMargins left="0.75" right="0.75" top="1" bottom="1.0" header="0.5" footer="0.5"/>
</worksheet>
"""
end
defp make_sheet_show_grid(sheet) do
show_grid_lines_xml = case sheet.show_grid_lines do
false -> "showGridLines=\"0\" "
_ -> ""
end
show_grid_lines_xml
end
defp make_sheetview(sheet) do
# according to spec:
# * when only horizontal split is applied we need to use bottomLeft
# * when only vertical split is applied we need to use topRight
# * and when both splits is applied, we can use bottomRight
pane = case sheet.pane_freeze do
{_row_idx, 0} ->
"bottomLeft"
{0, _col_idx} ->
"topRight"
{col_idx, row_idx} when col_idx > 0 and row_idx > 0 ->
"bottomRight"
_any ->
nil
end
{selection_pane_attr, panel_xml} = case sheet.pane_freeze do
{row_idx, col_idx} when col_idx > 0 or row_idx > 0 ->
top_left_cell = U.to_excel_coords(row_idx + 1, col_idx + 1)
{"pane=\"#{pane}\"", "<pane xSplit=\"#{col_idx}\" ySplit=\"#{row_idx}\" topLeftCell=\"#{top_left_cell}\" activePane=\"#{pane}\" state=\"frozen\" />"}
_any ->
{"", ""}
end
panel_xml <> "<selection " <> selection_pane_attr <> " activeCell=\"A1\" sqref=\"A1\" />"
end
###
### xl/sharedStrings.xml
###
@spec make_xl_shared_strings(list({non_neg_integer, String.t})) :: String.t
def make_xl_shared_strings(stringlist) do
len = length stringlist
"""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<sst xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" count="#{len}" uniqueCount="#{len}">
"""
<> Enum.map_join(stringlist, fn ({_, value}) ->
# the only two characters that *must* be replaced for safe XML encoding are & and <:
"<si><t>#{minimal_xml_text_escape value}</t></si>"
end)
<> "</sst>"
end
###
### xl/styles.xml
###
@spec make_font_list(list(Font.t)) :: String.t
defp make_font_list(ordered_font_list) do
Enum.map_join(ordered_font_list, "\n",
&(Font.get_stylexml_entry &1))
end
@spec make_fill_list(list(Fill.t)) :: String.t
defp make_fill_list(ordered_fill_list) do
Enum.map_join(ordered_fill_list, "\n",
&(Fill.get_stylexml_entry &1))
end
# Turns a CellStyle struct into the styles.xml <xf /> representation.
# TODO: This could be moved into the CellStyle struct.
@spec style_to_xml_entry(CellStyle.t, WorkbookCompInfo.t) :: String.t
defp style_to_xml_entry(style, wci) do
fontid = if is_nil(style.font),
do: 0,
else: FontDB.get_id wci.fontdb, style.font
fillid = if is_nil(style.fill),
do: 0,
else: FillDB.get_id wci.filldb, style.fill
numfmtid = if is_nil(style.numfmt),
do: 0,
else: NumFmtDB.get_id wci.numfmtdb, style.numfmt
borderid = if is_nil(style.border),
do: 0,
else: BorderStyleDB.get_id wci.borderstyledb, style.border
{apply_alignment, wrap_text_tag} = case style.font do
nil ->
{"", ""}
font ->
case make_style_alignment(font) do
"" ->
{"", ""}
alignment ->
{"applyAlignment=\"1\"", alignment}
end
end
"""
<xf borderId="#{borderid}"
fillId="#{fillid}"
fontId="#{fontid}"
numFmtId="#{numfmtid}"
xfId="0" #{apply_alignment}>
#{wrap_text_tag}
</xf>
"""
end
@spec wrap_text(String.t, Font.t) :: String.t
defp wrap_text(attrs, %Font{wrap_text: true}), do: attrs <> "wrapText=\"1\" "
defp wrap_text(attrs, _), do: attrs
@spec horizontal_alignment(String.t, Font.t) :: String.t
defp horizontal_alignment(attrs, %Font{align_horizontal: nil}), do: attrs
defp horizontal_alignment(attrs, %Font{align_horizontal: alignment}) do
if alignment in [:center, :fill, :general, :justify, :left, :right] do
attrs <> "horizontal=\"#{ Atom.to_string(alignment) }\" "
else
raise %ArgumentError{message: "Given horizontal alignment not supported. Only :center, :fill, :general, :justify, :left, :right are available."}
end
end
@spec vertical_alignment(String.t, Font.t) :: String.t
defp vertical_alignment(attrs, %Font{align_vertical: nil}), do: attrs
defp vertical_alignment(attrs, %Font{align_vertical: alignment}) do
if alignment in [:center, :top, :bottom] do
attrs <> "vertical=\"#{ Atom.to_string(alignment) }\" "
else
raise %ArgumentError{message: "Given vertical alignment not supported. Only :center, :top, :bottom are available."}
end
end
# Creates an aligment xml tag from font style.
@spec make_style_alignment(Font.t) :: String.t
defp make_style_alignment(font) do
attrs = "" |> wrap_text(font)
|> horizontal_alignment(font)
|> vertical_alignment(font)
case attrs do
"" ->
nil
^attrs ->
"<alignment #{attrs}/>"
end
end
# Returns the inner content of the <CellXfs> block.
@spec make_cellxfs(list(CellStyle.t), WorkbookCompInfo.t) :: String.t
defp make_cellxfs(ordered_style_list, wci) do
Enum.map_join(ordered_style_list, "\n", &(style_to_xml_entry &1, wci))
end
alias Elixlsx.Style.NumFmt
defp make_numfmts_inner(id_numfmt_tuples) do
Enum.map_join(id_numfmt_tuples, "\n",
(fn ({id, numfmt}) ->
NumFmt.get_stylexml_entry numfmt, id
end))
end
defp make_numfmts(id_numfmt_tuples) do
case length(id_numfmt_tuples) do
0 -> ""
n -> "<numFmts count=\"#{n}\">#{make_numfmts_inner(id_numfmt_tuples)}</numFmts>"
end
end
defp make_borders(borders_list) do
Enum.map_join borders_list, "\n", &(BorderStyle.get_border_style_entry &1)
end
@spec make_xl_styles(WorkbookCompInfo.t) :: String.t
@doc ~S"""
get the content of the styles.xml file.
the WorkbookCompInfo struct must be computed before calling this,
(especially CellStyleDB.register_all)
"""
def make_xl_styles(wci) do
font_list = FontDB.id_sorted_fonts wci.fontdb
fill_list = FillDB.id_sorted_fills wci.filldb
cell_xfs = CellStyleDB.id_sorted_styles wci.cellstyledb
numfmts_list = NumFmtDB.custom_numfmt_id_tuples wci.numfmtdb
borders_list = BorderStyleDB.id_sorted_borders wci.borderstyledb
"""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<styleSheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
#{make_numfmts(numfmts_list)}
<fonts count="#{1 + length font_list}">
<font />
#{make_font_list(font_list)}
</fonts>
<fills count="#{2 + length fill_list}">
<fill><patternFill patternType="none"/></fill>
<fill><patternFill patternType="gray125"/></fill>
#{make_fill_list(fill_list)}
</fills>
<borders count="#{1 + length borders_list}">
<border />
#{make_borders(borders_list)}
</borders>
<cellStyleXfs count="1">
<xf borderId="0" numFmtId="0" fillId="0" fontId="0" applyAlignment="1">
<alignment wrapText="1"/>
</xf>
</cellStyleXfs>
<cellXfs count="#{1 + length cell_xfs}">
<xf borderId="0" numFmtId="0" fillId="0" fontId="0" xfId="0"/>
#{make_cellxfs cell_xfs, wci}
</cellXfs>
</styleSheet>
"""
end
###
### _rels/.rels
###
@rels_dotrels ~S"""
<?xml version="1.0" encoding="UTF-8"?>
<Relationships xmlns="http://schemas.openxmlformats.org/package/2006/relationships">
<Relationship Id="rId1" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/officeDocument" Target="xl/workbook.xml"/>
<Relationship Id="rId2" Type="http://schemas.openxmlformats.org/package/2006/relationships/metadata/core-properties" Target="docProps/core.xml"/>
<Relationship Id="rId3" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/extended-properties" Target="docProps/app.xml"/>
</Relationships>
"""
def rels_dotrels, do: @rels_dotrels
####
#### xl/workbook.xml
####
@spec workbook_sheet_entries(nonempty_list(Sheet.t), nonempty_list(SheetCompInfo.t)) :: String.t
defp workbook_sheet_entries sheet_infos, sheet_comp_infos do
Enum.zip(sheet_infos, sheet_comp_infos)
|> Enum.map_join(&make_xl_workbook_xml_sheet_entry/1)
end
@doc ~S"""
Return the data for /xl/workbook.xml
"""
def make_workbook_xml(data, sci) do
~S"""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<workbook xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships">
<fileVersion appName="Calc"/>
<bookViews>
<workbookView activeTab="0"/>
</bookViews>
<sheets>
"""
<> workbook_sheet_entries(data.sheets, sci)
<>
~S"""
</sheets>
<calcPr fullCalcOnLoad="1" iterateCount="100" refMode="A1" iterate="false" iterateDelta="0.001"/>
</workbook>
"""
end
end
| 32.636667
| 288
| 0.622408
|
79c5addd083e7b59e96c15e3920272b62eed4e75
| 1,745
|
ex
|
Elixir
|
clients/firestore/lib/google_api/firestore/v1/model/existence_filter.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/firestore/lib/google_api/firestore/v1/model/existence_filter.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/firestore/lib/google_api/firestore/v1/model/existence_filter.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Firestore.V1.Model.ExistenceFilter do
@moduledoc """
A digest of all the documents that match a given target.
## Attributes
* `count` (*type:* `integer()`, *default:* `nil`) - The total count of documents that match target_id. If different from the count of documents in the client that match, the client must manually determine which documents no longer match the target.
* `targetId` (*type:* `integer()`, *default:* `nil`) - The target ID to which this filter applies.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:count => integer() | nil,
:targetId => integer() | nil
}
field(:count)
field(:targetId)
end
defimpl Poison.Decoder, for: GoogleApi.Firestore.V1.Model.ExistenceFilter do
def decode(value, options) do
GoogleApi.Firestore.V1.Model.ExistenceFilter.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Firestore.V1.Model.ExistenceFilter do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.9
| 252
| 0.728367
|
79c5b305c9b55ae77f7de9d02f925c5a748a1e95
| 221
|
exs
|
Elixir
|
priv/repo/migrations/20180729015122_add_feature_ids_to_rooms.exs
|
stevegrossi/ex_venture
|
e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa
|
[
"MIT"
] | 2
|
2019-05-14T11:36:44.000Z
|
2020-07-01T08:54:04.000Z
|
priv/repo/migrations/20180729015122_add_feature_ids_to_rooms.exs
|
nickwalton/ex_venture
|
d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb
|
[
"MIT"
] | null | null | null |
priv/repo/migrations/20180729015122_add_feature_ids_to_rooms.exs
|
nickwalton/ex_venture
|
d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb
|
[
"MIT"
] | 1
|
2021-01-29T14:12:40.000Z
|
2021-01-29T14:12:40.000Z
|
defmodule Data.Repo.Migrations.AddFeatureIdsToRooms do
use Ecto.Migration
def change do
alter table(:rooms) do
add :feature_ids, {:array, :integer}, default: fragment("'{}'"), null: false
end
end
end
| 22.1
| 82
| 0.687783
|
79c64e1cd875cb8c20eaa08f81ac6900c87ed1e5
| 3,015
|
exs
|
Elixir
|
kousa/test/broth/_casts/speaking_change_test.exs
|
LeonardSSH/dogehouse
|
584055ad407bc37fa35cdf36ebb271622e29d436
|
[
"MIT"
] | 9
|
2021-03-17T03:56:18.000Z
|
2021-09-24T22:45:14.000Z
|
kousa/test/broth/_casts/speaking_change_test.exs
|
ActuallyTomas/dogehouse
|
8c3d2cd1d7e99e173f0658759467a391c4a90c4e
|
[
"MIT"
] | 12
|
2021-07-06T12:51:13.000Z
|
2022-03-16T12:38:18.000Z
|
kousa/test/broth/_casts/speaking_change_test.exs
|
ActuallyTomas/dogehouse
|
8c3d2cd1d7e99e173f0658759467a391c4a90c4e
|
[
"MIT"
] | 4
|
2021-07-15T20:33:50.000Z
|
2022-03-27T12:46:47.000Z
|
defmodule BrothTest.SpeakingChangeTest do
use ExUnit.Case, async: true
use KousaTest.Support.EctoSandbox
alias Beef.Schemas.User
alias BrothTest.WsClient
alias BrothTest.WsClientFactory
alias KousaTest.Support.Factory
require WsClient
setup do
user = Factory.create(User)
client_ws = WsClientFactory.create_client_for(user)
%{"id" => room_id} =
WsClient.do_call(
client_ws,
"room:create",
%{"name" => "foo room", "description" => "foo"}
)
{:ok, user: user, client_ws: client_ws, room_id: room_id}
end
describe "the websocket speaking_change operation" do
test "toggles the active speaking state", t do
room_id = t.room_id
# add a second user to the test
other = Factory.create(User)
other_ws = WsClientFactory.create_client_for(other)
WsClient.do_call(other_ws, "room:join", %{"roomId" => room_id})
WsClient.assert_frame_legacy("new_user_join_room", _)
assert %{} = Onion.RoomSession.get(room_id, :activeSpeakerMap)
WsClient.send_msg_legacy(
t.client_ws,
"speaking_change",
%{"value" => true}
)
# both websockets will be informed
WsClient.assert_frame_legacy(
"active_speaker_change",
%{"activeSpeakerMap" => map},
t.client_ws
)
assert is_map_key(map, t.user.id)
WsClient.assert_frame_legacy(
"active_speaker_change",
%{"activeSpeakerMap" => map},
other_ws
)
assert is_map_key(map, t.user.id)
map = Onion.RoomSession.get(room_id, :activeSpeakerMap)
assert is_map_key(map, t.user.id)
Process.sleep(100)
WsClient.send_msg_legacy(
t.client_ws,
"speaking_change",
%{"value" => false}
)
WsClient.assert_frame_legacy(
"active_speaker_change",
%{"activeSpeakerMap" => map},
t.client_ws
)
refute is_map_key(map, t.user.id)
WsClient.assert_frame_legacy(
"active_speaker_change",
%{"activeSpeakerMap" => map},
other_ws
)
refute is_map_key(map, t.user.id)
map = Onion.RoomSession.get(room_id, :activeSpeakerMap)
refute is_map_key(map, t.user.id)
end
test "does nothing if it's unset", t do
room_id = t.room_id
# add a second user to the test
other = Factory.create(User)
other_ws = WsClientFactory.create_client_for(other)
WsClient.do_call(other_ws, "room:join", %{"roomId" => room_id})
WsClient.assert_frame_legacy("new_user_join_room", _)
Onion.RoomSession.get(room_id, :activeSpeakerMap)
WsClient.send_msg_legacy(
t.client_ws,
"speaking_change",
%{"value" => false}
)
WsClient.assert_frame_legacy(
"active_speaker_change",
%{"activeSpeakerMap" => map}
)
assert map == %{}
map = Onion.RoomSession.get(room_id, :activeSpeakerMap)
assert map == %{}
end
end
end
| 23.928571
| 69
| 0.626202
|
79c657798b90b82b75597a9214fa757bfd558d02
| 39,679
|
exs
|
Elixir
|
test/unit/dataset_test.exs
|
pukkamustard/rdf-ex
|
c459d8e7fa548fdfad82643338b68decf380a296
|
[
"MIT"
] | null | null | null |
test/unit/dataset_test.exs
|
pukkamustard/rdf-ex
|
c459d8e7fa548fdfad82643338b68decf380a296
|
[
"MIT"
] | null | null | null |
test/unit/dataset_test.exs
|
pukkamustard/rdf-ex
|
c459d8e7fa548fdfad82643338b68decf380a296
|
[
"MIT"
] | null | null | null |
defmodule RDF.DatasetTest do
use RDF.Test.Case
doctest RDF.Dataset
describe "new" do
test "creating an empty unnamed dataset" do
assert unnamed_dataset?(unnamed_dataset())
end
test "creating an empty dataset with a proper dataset name" do
refute unnamed_dataset?(named_dataset())
assert named_dataset?(named_dataset())
end
test "creating an empty dataset with a coercible dataset name" do
assert named_dataset("http://example.com/DatasetName")
|> named_dataset?(iri("http://example.com/DatasetName"))
assert named_dataset(EX.Foo) |> named_dataset?(iri(EX.Foo))
end
test "creating an unnamed dataset with an initial triple" do
ds = Dataset.new({EX.Subject, EX.predicate, EX.Object})
assert unnamed_dataset?(ds)
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate, EX.Object})
end
test "creating an unnamed dataset with an initial quad" do
ds = Dataset.new({EX.Subject, EX.predicate, EX.Object, EX.GraphName})
assert unnamed_dataset?(ds)
assert dataset_includes_statement?(ds,
{EX.Subject, EX.predicate, EX.Object, EX.GraphName})
end
test "creating a named dataset with an initial triple" do
ds = Dataset.new({EX.Subject, EX.predicate, EX.Object}, name: EX.DatasetName)
assert named_dataset?(ds, iri(EX.DatasetName))
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate, EX.Object})
end
test "creating a named dataset with an initial quad" do
ds = Dataset.new({EX.Subject, EX.predicate, EX.Object, EX.GraphName}, name: EX.DatasetName)
assert named_dataset?(ds, iri(EX.DatasetName))
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate, EX.Object, EX.GraphName})
end
test "creating an unnamed dataset with a list of initial statements" do
ds = Dataset.new([
{EX.Subject1, EX.predicate1, EX.Object1},
{EX.Subject2, EX.predicate2, EX.Object2, EX.GraphName},
{EX.Subject3, EX.predicate3, EX.Object3, nil}
])
assert unnamed_dataset?(ds)
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1, nil})
assert dataset_includes_statement?(ds, {EX.Subject2, EX.predicate2, EX.Object2, EX.GraphName})
assert dataset_includes_statement?(ds, {EX.Subject3, EX.predicate3, EX.Object3, nil})
end
test "creating a named dataset with a list of initial statements" do
ds = Dataset.new([
{EX.Subject, EX.predicate1, EX.Object1},
{EX.Subject, EX.predicate2, EX.Object2, EX.GraphName},
{EX.Subject, EX.predicate3, EX.Object3, nil}
], name: EX.DatasetName)
assert named_dataset?(ds, iri(EX.DatasetName))
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate1, EX.Object1, nil})
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate2, EX.Object2, EX.GraphName})
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate3, EX.Object3, nil})
end
test "creating a named dataset with an initial description" do
ds = Dataset.new(Description.new({EX.Subject, EX.predicate, EX.Object}), name: EX.DatasetName)
assert named_dataset?(ds, iri(EX.DatasetName))
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate, EX.Object})
end
test "creating an unnamed dataset with an initial description" do
ds = Dataset.new(Description.new({EX.Subject, EX.predicate, EX.Object}))
assert unnamed_dataset?(ds)
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate, EX.Object})
end
test "creating a named dataset with an initial graph" do
ds = Dataset.new(Graph.new({EX.Subject, EX.predicate, EX.Object}), name: EX.DatasetName)
assert named_dataset?(ds, iri(EX.DatasetName))
assert unnamed_graph?(Dataset.default_graph(ds))
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate, EX.Object})
ds = Dataset.new(Graph.new({EX.Subject, EX.predicate, EX.Object}, name: EX.GraphName), name: EX.DatasetName)
assert named_dataset?(ds, iri(EX.DatasetName))
assert unnamed_graph?(Dataset.default_graph(ds))
assert named_graph?(Dataset.graph(ds, EX.GraphName), iri(EX.GraphName))
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate, EX.Object, EX.GraphName})
end
test "creating an unnamed dataset with an inital graph" do
ds = Dataset.new(Graph.new({EX.Subject, EX.predicate, EX.Object}))
assert unnamed_dataset?(ds)
assert unnamed_graph?(Dataset.default_graph(ds))
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate, EX.Object})
ds = Dataset.new(Graph.new({EX.Subject, EX.predicate, EX.Object}, name: EX.GraphName))
assert unnamed_dataset?(ds)
assert unnamed_graph?(Dataset.default_graph(ds))
assert named_graph?(Dataset.graph(ds, EX.GraphName), iri(EX.GraphName))
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate, EX.Object, EX.GraphName})
end
end
describe "add" do
test "a proper triple is added to the default graph" do
assert Dataset.add(dataset(), {iri(EX.Subject), EX.predicate, iri(EX.Object)})
|> dataset_includes_statement?({EX.Subject, EX.predicate, EX.Object})
end
test "a proper quad is added to the specified graph" do
ds = Dataset.add(dataset(), {iri(EX.Subject), EX.predicate, iri(EX.Object), iri(EX.Graph)})
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate, EX.Object, iri(EX.Graph)})
end
test "a proper quad with nil context is added to the default graph" do
ds = Dataset.add(dataset(), {iri(EX.Subject), EX.predicate, iri(EX.Object), nil})
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate, EX.Object})
end
test "a coercible triple" do
assert Dataset.add(dataset(),
{"http://example.com/Subject", EX.predicate, EX.Object})
|> dataset_includes_statement?({EX.Subject, EX.predicate, EX.Object})
end
test "a coercible quad" do
assert Dataset.add(dataset(),
{"http://example.com/Subject", EX.predicate, EX.Object, "http://example.com/GraphName"})
|> dataset_includes_statement?({EX.Subject, EX.predicate, EX.Object, EX.GraphName})
end
test "a quad and an overwriting graph context " do
assert Dataset.add(dataset(), {EX.Subject, EX.predicate, EX.Object, EX.Graph}, EX.Other)
|> dataset_includes_statement?({EX.Subject, EX.predicate, EX.Object, EX.Other})
assert Dataset.add(dataset(), {EX.Subject, EX.predicate, EX.Object, EX.Graph}, nil)
|> dataset_includes_statement?({EX.Subject, EX.predicate, EX.Object})
end
test "statements with multiple objects" do
ds = Dataset.add(dataset(), {EX.Subject1, EX.predicate1, [EX.Object1, EX.Object2]})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object2})
ds = Dataset.add(dataset(), {EX.Subject1, EX.predicate1, [EX.Object1, EX.Object2], EX.GraphName})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1, EX.GraphName})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object2, EX.GraphName})
end
test "a list of triples without specification of the default context" do
ds = Dataset.add(dataset(), [
{EX.Subject1, EX.predicate1, EX.Object1},
{EX.Subject1, EX.predicate2, EX.Object2},
{EX.Subject3, EX.predicate3, EX.Object3}
])
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
assert dataset_includes_statement?(ds, {EX.Subject3, EX.predicate3, EX.Object3})
end
test "a list of triples with specification of the default context" do
ds = Dataset.add(dataset(), [
{EX.Subject1, EX.predicate1, EX.Object1},
{EX.Subject1, EX.predicate2, EX.Object2},
{EX.Subject3, EX.predicate3, EX.Object3}
], EX.Graph)
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1, EX.Graph})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2, EX.Graph})
assert dataset_includes_statement?(ds, {EX.Subject3, EX.predicate3, EX.Object3, EX.Graph})
ds = Dataset.add(dataset(), [
{EX.Subject1, EX.predicate1, EX.Object1},
{EX.Subject1, EX.predicate2, EX.Object2},
{EX.Subject3, EX.predicate3, EX.Object3}
], nil)
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1, nil})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2, nil})
assert dataset_includes_statement?(ds, {EX.Subject3, EX.predicate3, EX.Object3, nil})
end
test "a list of quads without specification of the default context" do
ds = Dataset.add(dataset(), [
{EX.Subject, EX.predicate1, EX.Object1, EX.Graph1},
{EX.Subject, EX.predicate2, EX.Object2, nil},
{EX.Subject, EX.predicate1, EX.Object1, EX.Graph2}
])
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate1, EX.Object1, EX.Graph1})
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate2, EX.Object2, nil})
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate1, EX.Object1, EX.Graph2})
end
test "a list of quads with specification of the default context" do
ds = Dataset.add(dataset(), [
{EX.Subject, EX.predicate1, EX.Object1, EX.Graph1},
{EX.Subject, EX.predicate2, EX.Object2, nil},
{EX.Subject, EX.predicate1, EX.Object1, EX.Graph2}
], EX.Graph)
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate1, EX.Object1, EX.Graph})
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate2, EX.Object2, EX.Graph})
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate1, EX.Object1, EX.Graph})
ds = Dataset.add(dataset(), [
{EX.Subject, EX.predicate1, EX.Object1, EX.Graph1},
{EX.Subject, EX.predicate2, EX.Object2, nil},
{EX.Subject, EX.predicate1, EX.Object1, EX.Graph2}
], nil)
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate1, EX.Object1, nil})
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate2, EX.Object2, nil})
assert dataset_includes_statement?(ds, {EX.Subject, EX.predicate1, EX.Object1, nil})
end
test "a list of mixed triples and quads" do
ds = Dataset.add(dataset(), [
{EX.Subject1, EX.predicate1, EX.Object1, EX.GraphName},
{EX.Subject3, EX.predicate3, EX.Object3}
])
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1, EX.GraphName})
assert dataset_includes_statement?(ds, {EX.Subject3, EX.predicate3, EX.Object3, nil})
end
test "a Description without specification of the default context" do
ds = Dataset.add(dataset(), Description.new(EX.Subject1, [
{EX.predicate1, EX.Object1},
{EX.predicate2, EX.Object2},
]))
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
end
test "a Description with specification of the default context" do
ds = Dataset.add(dataset(), Description.new(EX.Subject1, [
{EX.predicate1, EX.Object1},
{EX.predicate2, EX.Object2},
]), nil)
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
ds = Dataset.add(ds, Description.new({EX.Subject1, EX.predicate3, EX.Object3}), EX.Graph)
assert Enum.count(ds) == 3
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate3, EX.Object3, EX.Graph})
end
test "an unnamed Graph without specification of the default context" do
ds = Dataset.add(dataset(), Graph.new([
{EX.Subject1, EX.predicate1, EX.Object1},
{EX.Subject1, EX.predicate2, EX.Object2},
]))
assert unnamed_graph?(Dataset.default_graph(ds))
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
ds = Dataset.add(ds, Graph.new({EX.Subject1, EX.predicate2, EX.Object3}))
assert unnamed_graph?(Dataset.default_graph(ds))
assert Enum.count(ds) == 3
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object3})
end
test "an unnamed Graph with specification of the default context" do
ds = Dataset.add(dataset(), Graph.new([
{EX.Subject1, EX.predicate1, EX.Object1},
{EX.Subject1, EX.predicate2, EX.Object2},
]), nil)
assert unnamed_graph?(Dataset.default_graph(ds))
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
ds = Dataset.add(ds, Graph.new({EX.Subject1, EX.predicate2, EX.Object3}), nil)
assert unnamed_graph?(Dataset.default_graph(ds))
assert Enum.count(ds) == 3
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object3})
ds = Dataset.add(ds, Graph.new({EX.Subject1, EX.predicate2, EX.Object3}), EX.Graph)
assert unnamed_graph?(Dataset.default_graph(ds))
assert named_graph?(Dataset.graph(ds, EX.Graph), iri(EX.Graph))
assert Enum.count(ds) == 4
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object3})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object3, EX.Graph})
end
test "a named Graph without specification of the default context" do
ds = Dataset.add(dataset(), Graph.new([
{EX.Subject1, EX.predicate1, EX.Object1},
{EX.Subject1, EX.predicate2, EX.Object2},
], name: EX.Graph1))
assert Dataset.graph(ds, EX.Graph1)
assert named_graph?(Dataset.graph(ds, EX.Graph1), iri(EX.Graph1))
assert unnamed_graph?(Dataset.default_graph(ds))
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1, EX.Graph1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2, EX.Graph1})
ds = Dataset.add(ds, Graph.new({EX.Subject1, EX.predicate2, EX.Object3}, name: EX.Graph2))
assert Dataset.graph(ds, EX.Graph2)
assert named_graph?(Dataset.graph(ds, EX.Graph2), iri(EX.Graph2))
assert unnamed_graph?(Dataset.default_graph(ds))
assert Enum.count(ds) == 3
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1, EX.Graph1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2, EX.Graph1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object3, EX.Graph2})
end
test "a named Graph with specification of the default context" do
ds = Dataset.add(dataset(), Graph.new([
{EX.Subject1, EX.predicate1, EX.Object1},
{EX.Subject1, EX.predicate2, EX.Object2},
], name: EX.Graph1), nil)
refute Dataset.graph(ds, EX.Graph1)
assert unnamed_graph?(Dataset.default_graph(ds))
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
ds = Dataset.add(ds, Graph.new({EX.Subject1, EX.predicate2, EX.Object3}, name: EX.Graph2), nil)
refute Dataset.graph(ds, EX.Graph2)
assert unnamed_graph?(Dataset.default_graph(ds))
assert Enum.count(ds) == 3
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object3})
ds = Dataset.add(ds, Graph.new({EX.Subject1, EX.predicate2, EX.Object3}, name: EX.Graph3), EX.Graph)
assert named_graph?(Dataset.graph(ds, EX.Graph), iri(EX.Graph))
assert Enum.count(ds) == 4
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object3})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object3, EX.Graph})
end
test "an unnamed Dataset" do
ds = Dataset.add(dataset(), Dataset.new([
{EX.Subject1, EX.predicate1, EX.Object1},
{EX.Subject1, EX.predicate2, EX.Object2},
]))
assert ds.name == nil
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
ds = Dataset.add(ds, Dataset.new({EX.Subject1, EX.predicate2, EX.Object3}))
ds = Dataset.add(ds, Dataset.new({EX.Subject1, EX.predicate2, EX.Object3, EX.Graph}))
ds = Dataset.add(ds, Dataset.new({EX.Subject1, EX.predicate2, EX.Object4}), EX.Graph)
assert ds.name == nil
assert Enum.count(ds) == 5
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object3})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object3, EX.Graph})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object4, EX.Graph})
end
test "a named Dataset" do
ds = Dataset.add(named_dataset(), Dataset.new([
{EX.Subject1, EX.predicate1, EX.Object1},
{EX.Subject1, EX.predicate2, EX.Object2},
], name: EX.DS1))
assert ds.name == iri(EX.DatasetName)
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
ds = Dataset.add(ds, Dataset.new({EX.Subject1, EX.predicate2, EX.Object3}, name: EX.DS2))
ds = Dataset.add(ds, Dataset.new({EX.Subject1, EX.predicate2, EX.Object3, EX.Graph}, name: EX.DS2))
ds = Dataset.add(ds, Dataset.new({EX.Subject1, EX.predicate2, EX.Object4}, name: EX.DS2), EX.Graph)
assert ds.name == iri(EX.DatasetName)
assert Enum.count(ds) == 5
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object2})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object3})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object3, EX.Graph})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate2, EX.Object4, EX.Graph})
end
test "a list of Descriptions" do
ds = Dataset.add(dataset(), [
Description.new({EX.Subject1, EX.predicate1, EX.Object1}),
Description.new({EX.Subject2, EX.predicate2, EX.Object2}),
Description.new({EX.Subject1, EX.predicate3, EX.Object3})
])
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject2, EX.predicate2, EX.Object2})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate3, EX.Object3})
ds = Dataset.add(ds, [
Description.new({EX.Subject1, EX.predicate1, EX.Object1}),
Description.new({EX.Subject2, EX.predicate2, EX.Object2}),
Description.new({EX.Subject1, EX.predicate3, EX.Object3})
], EX.Graph)
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1, EX.Graph})
assert dataset_includes_statement?(ds, {EX.Subject2, EX.predicate2, EX.Object2, EX.Graph})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate3, EX.Object3, EX.Graph})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate1, EX.Object1})
assert dataset_includes_statement?(ds, {EX.Subject2, EX.predicate2, EX.Object2})
assert dataset_includes_statement?(ds, {EX.Subject1, EX.predicate3, EX.Object3})
end
test "a list of Graphs" do
ds = Dataset.new([{EX.S1, EX.P1, EX.O1}, {EX.S2, EX.P2, EX.O2}])
|> RDF.Dataset.add([
Graph.new([{EX.S1, EX.P1, EX.O1}, {EX.S1, EX.P2, bnode(:foo)}]),
Graph.new({EX.S1, EX.P2, EX.O3}),
Graph.new([{EX.S1, EX.P2, EX.O2}, {EX.S2, EX.P2, EX.O2}], name: EX.Graph)
])
assert Enum.count(ds) == 6
assert dataset_includes_statement?(ds, {EX.S1, EX.P1, EX.O1})
assert dataset_includes_statement?(ds, {EX.S1, EX.P2, bnode(:foo)})
assert dataset_includes_statement?(ds, {EX.S1, EX.P2, EX.O3})
assert dataset_includes_statement?(ds, {EX.S2, EX.P2, EX.O2})
assert dataset_includes_statement?(ds, {EX.S1, EX.P2, EX.O2, EX.Graph})
assert dataset_includes_statement?(ds, {EX.S2, EX.P2, EX.O2, EX.Graph})
end
test "duplicates are ignored" do
ds = Dataset.add(dataset(), {EX.Subject, EX.predicate, EX.Object, EX.GraphName})
assert Dataset.add(ds, {EX.Subject, EX.predicate, EX.Object, EX.GraphName}) == ds
end
test "non-coercible statements elements are causing an error" do
assert_raise RDF.IRI.InvalidError, fn ->
Dataset.add(dataset(), {"not a IRI", EX.predicate, iri(EX.Object), iri(EX.GraphName)})
end
assert_raise RDF.Literal.InvalidError, fn ->
Dataset.add(dataset(), {EX.Subject, EX.prop, self(), nil})
end
assert_raise RDF.IRI.InvalidError, fn ->
Dataset.add(dataset(), {iri(EX.Subject), EX.predicate, iri(EX.Object), "not a IRI"})
end
end
end
describe "put" do
test "a list of statements without specification of the default context" do
ds = Dataset.new([{EX.S1, EX.P1, EX.O1}, {EX.S2, EX.P2, EX.O2, EX.Graph}])
|> RDF.Dataset.put([
{EX.S1, EX.P2, EX.O3, EX.Graph},
{EX.S1, EX.P2, bnode(:foo), nil},
{EX.S2, EX.P2, EX.O3, EX.Graph},
{EX.S2, EX.P2, EX.O4, EX.Graph}])
assert Dataset.statement_count(ds) == 5
assert dataset_includes_statement?(ds, {EX.S1, EX.P1, EX.O1})
assert dataset_includes_statement?(ds, {EX.S1, EX.P2, bnode(:foo)})
assert dataset_includes_statement?(ds, {EX.S1, EX.P2, EX.O3, EX.Graph})
assert dataset_includes_statement?(ds, {EX.S2, EX.P2, EX.O3, EX.Graph})
assert dataset_includes_statement?(ds, {EX.S2, EX.P2, EX.O4, EX.Graph})
end
test "a list of statements with specification of the default context" do
ds = Dataset.new([{EX.S1, EX.P1, EX.O1}, {EX.S2, EX.P2, EX.O2, EX.Graph}])
|> RDF.Dataset.put([
{EX.S1, EX.P1, EX.O3, EX.Graph},
{EX.S1, EX.P2, bnode(:foo), nil},
{EX.S2, EX.P2, EX.O3, EX.Graph},
{EX.S2, EX.P2, EX.O4}], nil)
assert Dataset.statement_count(ds) == 5
assert dataset_includes_statement?(ds, {EX.S1, EX.P1, EX.O3})
assert dataset_includes_statement?(ds, {EX.S1, EX.P2, bnode(:foo)})
assert dataset_includes_statement?(ds, {EX.S2, EX.P2, EX.O3})
assert dataset_includes_statement?(ds, {EX.S2, EX.P2, EX.O4})
assert dataset_includes_statement?(ds, {EX.S2, EX.P2, EX.O2, EX.Graph})
ds = Dataset.new([{EX.S1, EX.P1, EX.O1}, {EX.S2, EX.P2, EX.O2, EX.Graph}])
|> RDF.Dataset.put([
{EX.S1, EX.P1, EX.O3},
{EX.S1, EX.P1, EX.O4, EX.Graph},
{EX.S1, EX.P2, bnode(:foo), nil},
{EX.S2, EX.P2, EX.O3, EX.Graph},
{EX.S2, EX.P2, EX.O4}], EX.Graph)
assert Dataset.statement_count(ds) == 6
assert dataset_includes_statement?(ds, {EX.S1, EX.P1, EX.O1})
assert dataset_includes_statement?(ds, {EX.S1, EX.P1, EX.O3, EX.Graph})
assert dataset_includes_statement?(ds, {EX.S1, EX.P1, EX.O4, EX.Graph})
assert dataset_includes_statement?(ds, {EX.S1, EX.P2, bnode(:foo), EX.Graph})
assert dataset_includes_statement?(ds, {EX.S2, EX.P2, EX.O3, EX.Graph})
assert dataset_includes_statement?(ds, {EX.S2, EX.P2, EX.O4, EX.Graph})
end
test "a Description" do
ds = Dataset.new([{EX.S1, EX.P1, EX.O1}, {EX.S2, EX.P2, EX.O2}, {EX.S1, EX.P3, EX.O3}])
|> RDF.Dataset.put(Description.new(EX.S1, [{EX.P3, EX.O4}, {EX.P2, bnode(:foo)}]))
assert Dataset.statement_count(ds) == 4
assert dataset_includes_statement?(ds, {EX.S1, EX.P1, EX.O1})
assert dataset_includes_statement?(ds, {EX.S1, EX.P3, EX.O4})
assert dataset_includes_statement?(ds, {EX.S1, EX.P2, bnode(:foo)})
assert dataset_includes_statement?(ds, {EX.S2, EX.P2, EX.O2})
end
test "an unnamed Graph" do
ds = Dataset.new([{EX.S1, EX.P1, EX.O1}, {EX.S2, EX.P2, EX.O2}, {EX.S1, EX.P3, EX.O3}])
|> RDF.Dataset.put(Graph.new([{EX.S1, EX.P3, EX.O4}, {EX.S1, EX.P2, bnode(:foo)}]))
assert Dataset.statement_count(ds) == 4
assert dataset_includes_statement?(ds, {EX.S1, EX.P1, EX.O1})
assert dataset_includes_statement?(ds, {EX.S1, EX.P3, EX.O4})
assert dataset_includes_statement?(ds, {EX.S1, EX.P2, bnode(:foo)})
assert dataset_includes_statement?(ds, {EX.S2, EX.P2, EX.O2})
end
test "a named Graph" do
ds = Dataset.new(
Graph.new([{EX.S1, EX.P1, EX.O1}, {EX.S2, EX.P2, EX.O2}, {EX.S1, EX.P3, EX.O3}], name: EX.GraphName))
|> RDF.Dataset.put(
Graph.new([{EX.S1, EX.P3, EX.O4}, {EX.S1, EX.P2, bnode(:foo)}]), EX.GraphName)
assert Dataset.statement_count(ds) == 4
assert dataset_includes_statement?(ds, {EX.S1, EX.P1, EX.O1, EX.GraphName})
assert dataset_includes_statement?(ds, {EX.S1, EX.P3, EX.O4, EX.GraphName})
assert dataset_includes_statement?(ds, {EX.S1, EX.P2, bnode(:foo), EX.GraphName})
assert dataset_includes_statement?(ds, {EX.S2, EX.P2, EX.O2, EX.GraphName})
end
test "simultaneous use of the different forms to address the default context" do
ds = RDF.Dataset.put(dataset(), [
{EX.S, EX.P, EX.O1},
{EX.S, EX.P, EX.O2, nil}])
assert Dataset.statement_count(ds) == 2
assert dataset_includes_statement?(ds, {EX.S, EX.P, EX.O1})
assert dataset_includes_statement?(ds, {EX.S, EX.P, EX.O2})
end
end
describe "delete" do
setup do
{:ok,
dataset1: Dataset.new({EX.S1, EX.p1, EX.O1}),
dataset2: Dataset.new([
{EX.S1, EX.p1, EX.O1},
{EX.S2, EX.p2, EX.O2, EX.Graph},
]),
dataset3: Dataset.new([
{EX.S1, EX.p1, EX.O1},
{EX.S2, EX.p2, [EX.O1, EX.O2], EX.Graph1},
{EX.S3, EX.p3, [~B<foo>, ~L"bar"], EX.Graph2},
]),
}
end
test "a single statement",
%{dataset1: dataset1, dataset2: dataset2, dataset3: dataset3} do
assert Dataset.delete(Dataset.new, {EX.S, EX.p, EX.O}) == Dataset.new
assert Dataset.delete(dataset1, {EX.S1, EX.p1, EX.O1}) == Dataset.new
assert Dataset.delete(dataset2, {EX.S2, EX.p2, EX.O2, EX.Graph}) == dataset1
assert Dataset.delete(dataset2, {EX.S1, EX.p1, EX.O1}) ==
Dataset.new({EX.S2, EX.p2, EX.O2, EX.Graph})
assert Dataset.delete(dataset3, {EX.S2, EX.p2, EX.O1, EX.Graph1}) ==
Dataset.new [
{EX.S1, EX.p1, EX.O1},
{EX.S2, EX.p2, EX.O2, EX.Graph1},
{EX.S3, EX.p3, [~B<foo>, ~L"bar"], EX.Graph2},
]
assert Dataset.delete(dataset3, {EX.S2, EX.p2, [EX.O1, EX.O2], EX.Graph1}) ==
Dataset.new [
{EX.S1, EX.p1, EX.O1},
{EX.S3, EX.p3, [~B<foo>, ~L"bar"], EX.Graph2},
]
assert Dataset.delete(dataset3, {EX.S2, EX.p2, [EX.O1, EX.O2]}, EX.Graph1) ==
Dataset.new [
{EX.S1, EX.p1, EX.O1},
{EX.S3, EX.p3, [~B<foo>, ~L"bar"], EX.Graph2},
]
end
test "multiple statements with a list of triples",
%{dataset1: dataset1, dataset2: dataset2, dataset3: dataset3} do
assert Dataset.delete(dataset1, [{EX.S1, EX.p1, EX.O1},
{EX.S1, EX.p1, EX.O2}]) == Dataset.new
assert Dataset.delete(dataset2, [{EX.S1, EX.p1, EX.O1},
{EX.S2, EX.p2, EX.O2, EX.Graph}]) == Dataset.new
assert Dataset.delete(dataset3, [
{EX.S1, EX.p1, EX.O1},
{EX.S2, EX.p2, [EX.O1, EX.O2, EX.O3], EX.Graph1},
{EX.S3, EX.p3, ~B<foo>, EX.Graph2}]) == Dataset.new({EX.S3, EX.p3, ~L"bar", EX.Graph2})
end
test "multiple statements with a Description",
%{dataset1: dataset1, dataset2: dataset2} do
assert Dataset.delete(dataset1, Description.new(EX.S1, EX.p1, EX.O1)) == Dataset.new
assert Dataset.delete(dataset1, Description.new(EX.S1, EX.p1, EX.O1), EX.Graph) == dataset1
assert Dataset.delete(dataset2, Description.new(EX.S2, EX.p2, EX.O2), EX.Graph) == dataset1
assert Dataset.delete(dataset2, Description.new(EX.S1, EX.p1, EX.O1)) ==
Dataset.new({EX.S2, EX.p2, EX.O2, EX.Graph})
end
test "multiple statements with a Graph",
%{dataset1: dataset1, dataset2: dataset2, dataset3: dataset3} do
assert Dataset.delete(dataset1, Graph.new({EX.S1, EX.p1, EX.O1})) == Dataset.new
assert Dataset.delete(dataset2, Graph.new({EX.S1, EX.p1, EX.O1})) ==
Dataset.new({EX.S2, EX.p2, EX.O2, EX.Graph})
assert Dataset.delete(dataset2, Graph.new({EX.S2, EX.p2, EX.O2}, name: EX.Graph)) == dataset1
assert Dataset.delete(dataset2, Graph.new({EX.S2, EX.p2, EX.O2}, name: EX.Graph)) == dataset1
assert Dataset.delete(dataset2, Graph.new({EX.S2, EX.p2, EX.O2}), EX.Graph) == dataset1
assert Dataset.delete(dataset2, Graph.new({EX.S2, EX.p2, EX.O2}), EX.Graph) == dataset1
assert Dataset.delete(dataset3, Graph.new([
{EX.S1, EX.p1, [EX.O1, EX.O2]},
{EX.S2, EX.p2, EX.O3},
{EX.S3, EX.p3, ~B<foo>},
])) == Dataset.new([
{EX.S2, EX.p2, [EX.O1, EX.O2], EX.Graph1},
{EX.S3, EX.p3, [~B<foo>, ~L"bar"], EX.Graph2},
])
assert Dataset.delete(dataset3, Graph.new([
{EX.S1, EX.p1, [EX.O1, EX.O2]},
{EX.S2, EX.p2, EX.O3},
{EX.S3, EX.p3, ~B<foo>},
], name: EX.Graph2)) == Dataset.new([
{EX.S1, EX.p1, EX.O1},
{EX.S2, EX.p2, [EX.O1, EX.O2], EX.Graph1},
{EX.S3, EX.p3, [~L"bar"], EX.Graph2},
])
assert Dataset.delete(dataset3, Graph.new({EX.S3, EX.p3, ~B<foo>}), EX.Graph2) ==
Dataset.new([
{EX.S1, EX.p1, EX.O1},
{EX.S2, EX.p2, [EX.O1, EX.O2], EX.Graph1},
{EX.S3, EX.p3, ~L"bar", EX.Graph2},
])
end
test "multiple statements with a Dataset",
%{dataset1: dataset1, dataset2: dataset2} do
assert Dataset.delete(dataset1, dataset1) == Dataset.new
assert Dataset.delete(dataset1, dataset2) == Dataset.new
assert Dataset.delete(dataset2, dataset1) == Dataset.new({EX.S2, EX.p2, EX.O2, EX.Graph})
end
end
describe "delete_graph" do
setup do
{:ok,
dataset1: Dataset.new({EX.S1, EX.p1, EX.O1}),
dataset2: Dataset.new([
{EX.S1, EX.p1, EX.O1},
{EX.S2, EX.p2, EX.O2, EX.Graph},
]),
dataset3: Dataset.new([
{EX.S1, EX.p1, EX.O1},
{EX.S2, EX.p2, EX.O2, EX.Graph1},
{EX.S3, EX.p3, EX.O3, EX.Graph2},
]),
}
end
test "the default graph", %{dataset1: dataset1, dataset2: dataset2} do
assert Dataset.delete_graph(dataset1, nil) == Dataset.new
assert Dataset.delete_graph(dataset2, nil) == Dataset.new({EX.S2, EX.p2, EX.O2, EX.Graph})
end
test "delete_default_graph", %{dataset1: dataset1, dataset2: dataset2} do
assert Dataset.delete_default_graph(dataset1) == Dataset.new
assert Dataset.delete_default_graph(dataset2) == Dataset.new({EX.S2, EX.p2, EX.O2, EX.Graph})
end
test "a single graph", %{dataset1: dataset1, dataset2: dataset2} do
assert Dataset.delete_graph(dataset1, EX.Graph) == dataset1
assert Dataset.delete_graph(dataset2, EX.Graph) == dataset1
end
test "a list of graphs", %{dataset1: dataset1, dataset3: dataset3} do
assert Dataset.delete_graph(dataset3, [EX.Graph1, EX.Graph2]) == dataset1
assert Dataset.delete_graph(dataset3, [EX.Graph1, EX.Graph2, EX.Graph3]) == dataset1
assert Dataset.delete_graph(dataset3, [EX.Graph1, EX.Graph2, nil]) == Dataset.new
end
end
test "pop" do
assert Dataset.pop(Dataset.new) == {nil, Dataset.new}
{quad, dataset} = Dataset.new({EX.S, EX.p, EX.O, EX.Graph}) |> Dataset.pop
assert quad == {iri(EX.S), iri(EX.p), iri(EX.O), iri(EX.Graph)}
assert Enum.count(dataset.graphs) == 0
{{subject, predicate, object, _}, dataset} =
Dataset.new([{EX.S, EX.p, EX.O, EX.Graph}, {EX.S, EX.p, EX.O}])
|> Dataset.pop
assert {subject, predicate, object} == {iri(EX.S), iri(EX.p), iri(EX.O)}
assert Enum.count(dataset.graphs) == 1
{{subject, _, _, graph_context}, dataset} =
Dataset.new([{EX.S, EX.p, EX.O1, EX.Graph}, {EX.S, EX.p, EX.O2, EX.Graph}])
|> Dataset.pop
assert subject == iri(EX.S)
assert graph_context == iri(EX.Graph)
assert Enum.count(dataset.graphs) == 1
end
test "values/1" do
assert Dataset.new() |> Dataset.values() == %{}
assert Dataset.new([{EX.s1, EX.p, EX.o1}, {EX.s2, EX.p, EX.o2, EX.graph}])
|> Dataset.values() ==
%{
nil => %{
RDF.Term.value(EX.s1) => %{RDF.Term.value(EX.p) => [RDF.Term.value(EX.o1)]}
},
RDF.Term.value(EX.graph) => %{
RDF.Term.value(EX.s2) => %{RDF.Term.value(EX.p) => [RDF.Term.value(EX.o2)]},
}
}
end
test "values/2" do
mapping = fn
{:graph_name, graph_name} ->
graph_name
{:predicate, predicate} ->
predicate |> to_string() |> String.split("/") |> List.last() |> String.to_atom()
{_, term} ->
RDF.Term.value(term)
end
assert Dataset.new() |> Dataset.values(mapping) == %{}
assert Dataset.new([{EX.s1, EX.p, EX.o1}, {EX.s2, EX.p, EX.o2, EX.graph}])
|> Dataset.values(mapping) ==
%{
nil => %{
RDF.Term.value(EX.s1) => %{p: [RDF.Term.value(EX.o1)]}
},
EX.graph => %{
RDF.Term.value(EX.s2) => %{p: [RDF.Term.value(EX.o2)]},
}
}
end
test "equal/2" do
triple = {EX.S, EX.p, EX.O}
assert Dataset.equal?(Dataset.new(triple), Dataset.new(triple))
assert Dataset.equal?(Dataset.new(triple, name: EX.Dataset1),
Dataset.new(triple, name: EX.Dataset1))
assert Dataset.equal?(
Dataset.new(Graph.new(triple, name: EX.Graph1, prefixes: %{ex: EX})),
Dataset.new(Graph.new(triple, name: EX.Graph1, prefixes: %{ex: RDF}))
)
assert Dataset.equal?(
Dataset.new(Graph.new(triple, name: EX.Graph1, base_iri: EX.base)),
Dataset.new(Graph.new(triple, name: EX.Graph1, base_iri: EX.other_base))
)
refute Dataset.equal?(Dataset.new(triple), Dataset.new({EX.S, EX.p, EX.O2}))
refute Dataset.equal?(Dataset.new(triple, name: EX.Dataset1),
Dataset.new(triple, name: EX.Dataset2))
refute Dataset.equal?(
Dataset.new(Graph.new(triple, name: EX.Graph1)),
Dataset.new(Graph.new(triple, name: EX.Graph2))
)
end
describe "Enumerable protocol" do
test "Enum.count" do
assert Enum.count(Dataset.new(name: EX.foo)) == 0
assert Enum.count(Dataset.new {EX.S, EX.p, EX.O, EX.Graph}) == 1
assert Enum.count(Dataset.new [{EX.S, EX.p, EX.O1, EX.Graph}, {EX.S, EX.p, EX.O2}]) == 2
ds = Dataset.add(dataset(), [
{EX.Subject1, EX.predicate1, EX.Object1, EX.Graph},
{EX.Subject1, EX.predicate2, EX.Object2, EX.Graph},
{EX.Subject3, EX.predicate3, EX.Object3}
])
assert Enum.count(ds) == 3
end
test "Enum.member?" do
refute Enum.member?(Dataset.new, {iri(EX.S), EX.p, iri(EX.O), iri(EX.Graph)})
assert Enum.member?(Dataset.new({EX.S, EX.p, EX.O, EX.Graph}),
{EX.S, EX.p, EX.O, EX.Graph})
ds = Dataset.add(dataset(), [
{EX.Subject1, EX.predicate1, EX.Object1, EX.Graph},
{EX.Subject1, EX.predicate2, EX.Object2, EX.Graph},
{EX.Subject3, EX.predicate3, EX.Object3}
])
assert Enum.member?(ds, {EX.Subject1, EX.predicate1, EX.Object1, EX.Graph})
assert Enum.member?(ds, {EX.Subject1, EX.predicate2, EX.Object2, EX.Graph})
assert Enum.member?(ds, {EX.Subject3, EX.predicate3, EX.Object3})
end
test "Enum.reduce" do
ds = Dataset.add(dataset(), [
{EX.Subject1, EX.predicate1, EX.Object1, EX.Graph},
{EX.Subject1, EX.predicate2, EX.Object2},
{EX.Subject3, EX.predicate3, EX.Object3, EX.Graph}
])
assert ds == Enum.reduce(ds, dataset(),
fn(statement, acc) -> acc |> Dataset.add(statement) end)
end
end
describe "Collectable protocol" do
test "with a list of triples" do
triples = [
{EX.Subject, EX.predicate1, EX.Object1},
{EX.Subject, EX.predicate2, EX.Object2},
{EX.Subject, EX.predicate2, EX.Object2, EX.Graph}
]
assert Enum.into(triples, Dataset.new()) == Dataset.new(triples)
end
test "with a list of lists" do
lists = [
[EX.Subject, EX.predicate1, EX.Object1],
[EX.Subject, EX.predicate2, EX.Object2],
[EX.Subject, EX.predicate2, EX.Object2, EX.Graph]
]
assert Enum.into(lists, Dataset.new()) ==
Dataset.new(Enum.map(lists, &List.to_tuple/1))
end
end
describe "Access behaviour" do
test "access with the [] operator" do
assert Dataset.new[EX.Graph] == nil
assert Dataset.new({EX.S, EX.p, EX.O, EX.Graph})[EX.Graph] ==
Graph.new({EX.S, EX.p, EX.O}, name: EX.Graph)
end
end
end
| 47.462919
| 114
| 0.633005
|
79c67a5613f501bdaf41333b960e2dc64af15a03
| 2,335
|
ex
|
Elixir
|
clients/gke_hub/lib/google_api/gke_hub/v1/model/anthos_vm_sub_feature_spec.ex
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
clients/gke_hub/lib/google_api/gke_hub/v1/model/anthos_vm_sub_feature_spec.ex
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
clients/gke_hub/lib/google_api/gke_hub/v1/model/anthos_vm_sub_feature_spec.ex
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.GKEHub.V1.Model.AnthosVMSubFeatureSpec do
@moduledoc """
AnthosVMSubFeatureSpec contains the subfeature configuration for a membership/cluster.
## Attributes
* `enabled` (*type:* `boolean()`, *default:* `nil`) - Indicates whether the subfeature should be enabled on the cluster or not. If set to true, the subfeature's control plane and resources will be installed in the cluster. If set to false, the oneof spec if present will be ignored and nothing will be installed in the cluster.
* `migrateSpec` (*type:* `GoogleApi.GKEHub.V1.Model.MigrateSpec.t`, *default:* `nil`) - MigrateSpec repsents the configuration for Migrate subfeature.
* `serviceMeshSpec` (*type:* `GoogleApi.GKEHub.V1.Model.ServiceMeshSpec.t`, *default:* `nil`) - ServiceMeshSpec repsents the configuration for Service Mesh subfeature.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:enabled => boolean() | nil,
:migrateSpec => GoogleApi.GKEHub.V1.Model.MigrateSpec.t() | nil,
:serviceMeshSpec => GoogleApi.GKEHub.V1.Model.ServiceMeshSpec.t() | nil
}
field(:enabled)
field(:migrateSpec, as: GoogleApi.GKEHub.V1.Model.MigrateSpec)
field(:serviceMeshSpec, as: GoogleApi.GKEHub.V1.Model.ServiceMeshSpec)
end
defimpl Poison.Decoder, for: GoogleApi.GKEHub.V1.Model.AnthosVMSubFeatureSpec do
def decode(value, options) do
GoogleApi.GKEHub.V1.Model.AnthosVMSubFeatureSpec.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.GKEHub.V1.Model.AnthosVMSubFeatureSpec do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.056604
| 331
| 0.747752
|
79c68b421730ad9067cbd37d0b0bdc05eb40bdf7
| 1,185
|
ex
|
Elixir
|
apps/chatter_web/lib/chatter_web/live/user_live/new.ex
|
barboza/chatter
|
16a56aea83eceb0fd1a709f3bc8d3a24812996cd
|
[
"MIT"
] | 2
|
2019-08-29T19:35:01.000Z
|
2019-08-31T04:08:03.000Z
|
apps/chatter_web/lib/chatter_web/live/user_live/new.ex
|
barboza/chatter
|
16a56aea83eceb0fd1a709f3bc8d3a24812996cd
|
[
"MIT"
] | 1
|
2021-03-09T16:10:46.000Z
|
2021-03-09T16:10:46.000Z
|
apps/chatter_web/lib/chatter_web/live/user_live/new.ex
|
barboza/chatter
|
16a56aea83eceb0fd1a709f3bc8d3a24812996cd
|
[
"MIT"
] | null | null | null |
defmodule ChatterWeb.UserLive.New do
@moduledoc """
Renders new user page, calls Accounts module to validate user data whenever
the form changes, sends user params to be persisted and redirects to profile.
"""
use Phoenix.LiveView
alias ChatterWeb.Router.Helpers, as: Routes
alias Chatter.Accounts
alias Chatter.Accounts.User
def mount(_session, socket) do
{:ok, assign(socket, %{changeset: Accounts.change_user(%User{})})}
end
def render(assigns), do: Phoenix.View.render(ChatterWeb.UserView, "new.html", assigns)
def handle_event("validate", %{"user" => params}, socket) do
changeset =
%User{}
|> User.changeset(params)
|> Map.put(:action, :insert)
{:noreply, assign(socket, :changeset, changeset)}
end
def handle_event("save", %{"user" => params}, socket) do
case Accounts.create_user(params) do
{:ok, user} ->
socket = put_flash(socket, :info, "User created")
{:noreply,
live_redirect(socket, to: Routes.live_path(socket, ChatterWeb.UserLive.Show, user))}
{:error, %Ecto.Changeset{} = changeset} ->
{:noreply, assign(socket, changeset: changeset)}
end
end
end
| 29.625
| 93
| 0.670042
|
79c6af7411629d0518e40670f00d1bd7c0cda286
| 966
|
ex
|
Elixir
|
lib/four_oh_four_finder.ex
|
wfleming/fourohfourfinder
|
ef670566182e8dcf70795de1ae313df26e440d56
|
[
"MIT"
] | 2
|
2016-06-03T17:25:41.000Z
|
2016-06-21T17:45:05.000Z
|
lib/four_oh_four_finder.ex
|
wfleming/fourohfourfinder
|
ef670566182e8dcf70795de1ae313df26e440d56
|
[
"MIT"
] | 4
|
2016-06-03T22:38:58.000Z
|
2016-06-09T03:09:33.000Z
|
lib/four_oh_four_finder.ex
|
wfleming/fourohfourfinder
|
ef670566182e8dcf70795de1ae313df26e440d56
|
[
"MIT"
] | null | null | null |
defmodule FourOhFourFinderApp do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
# Start the endpoint when the application starts
supervisor(FourOhFourFinderApp.Endpoint, []),
# Here you could define other workers and supervisors as children
# worker(FourOhFourFinderApp.Worker, [arg1, arg2, arg3]),
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: FourOhFourFinderApp.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
FourOhFourFinderApp.Endpoint.config_change(changed, removed)
:ok
end
end
| 33.310345
| 73
| 0.736025
|
79c6f823ea7c914106ccadc2b332ed62394093d3
| 110
|
exs
|
Elixir
|
ping/test/ping_test.exs
|
enilsen16/elixir
|
b4d1d45858a25e4beb39e07de8685f3d93d6a520
|
[
"MIT"
] | null | null | null |
ping/test/ping_test.exs
|
enilsen16/elixir
|
b4d1d45858a25e4beb39e07de8685f3d93d6a520
|
[
"MIT"
] | null | null | null |
ping/test/ping_test.exs
|
enilsen16/elixir
|
b4d1d45858a25e4beb39e07de8685f3d93d6a520
|
[
"MIT"
] | null | null | null |
defmodule PingTest do
use ExUnit.Case
doctest Ping
test "the truth" do
assert 1 + 1 == 2
end
end
| 12.222222
| 21
| 0.654545
|
79c6fe7f9bf84fe425a022fd383a4967bd1a8d0c
| 745
|
ex
|
Elixir
|
alcarin_api/apps/alcarin_web/lib/alcarin_web/channels/character_feed_channel.ex
|
alcarin-org/alcarin-elixir
|
a04d4e043790a7773745e0fba7098e1c06362896
|
[
"MIT"
] | null | null | null |
alcarin_api/apps/alcarin_web/lib/alcarin_web/channels/character_feed_channel.ex
|
alcarin-org/alcarin-elixir
|
a04d4e043790a7773745e0fba7098e1c06362896
|
[
"MIT"
] | 3
|
2018-05-26T10:36:22.000Z
|
2018-05-26T13:48:36.000Z
|
alcarin_api/apps/alcarin_web/lib/alcarin_web/channels/character_feed_channel.ex
|
alcarin-org/alcarin-elixir
|
a04d4e043790a7773745e0fba7098e1c06362896
|
[
"MIT"
] | null | null | null |
defmodule AlcarinWeb.CharacterFeedChannel do
use AlcarinWeb, :channel
alias Alcarin.GameEvents
def join("character-feed:lobby", _message, socket) do
{:ok, %{test: 5}, socket}
end
def join("character-feed:" <> _character_id, _params, _socket) do
{:error, %{reason: "unauthorized"}}
end
def handle_in("communication:say", %{"content" => new_msg}, socket) do
case GameEvents.create_speak_event(new_msg) do
{:ok, _game_event} ->
{:reply, :ok, socket}
{:error, %Ecto.Changeset{} = changeset} ->
{:reply, {:error, %{errors: parse_changeset_errors(changeset)}}, socket}
end
end
def handle_in(_, _, socket) do
{:reply, {:error, %{error: "Unknown message type"}}, socket}
end
end
| 26.607143
| 80
| 0.651007
|
79c707b382939cf9f310e894e19d90bb62f0c686
| 1,139
|
exs
|
Elixir
|
config/config.exs
|
kreeti/ueberauth_linkedin
|
6a16e025928cd8e194aad472d879458737e70a99
|
[
"MIT"
] | 9
|
2016-04-17T21:50:24.000Z
|
2021-04-26T11:22:46.000Z
|
config/config.exs
|
kreeti/ueberauth_linkedin
|
6a16e025928cd8e194aad472d879458737e70a99
|
[
"MIT"
] | 9
|
2016-06-02T01:25:52.000Z
|
2021-10-01T02:30:08.000Z
|
config/config.exs
|
kreeti/ueberauth_linkedin
|
6a16e025928cd8e194aad472d879458737e70a99
|
[
"MIT"
] | 35
|
2016-05-29T23:16:03.000Z
|
2022-03-06T09:57:28.000Z
|
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :ueberauth_linkedin, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:ueberauth_linkedin, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.741935
| 73
| 0.755926
|
79c75cdb47a3d783ba99ea1661222b2f1423423e
| 3,615
|
ex
|
Elixir
|
clients/cloud_search/lib/google_api/cloud_search/v1/model/date_operator_options.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/cloud_search/lib/google_api/cloud_search/v1/model/date_operator_options.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/cloud_search/lib/google_api/cloud_search/v1/model/date_operator_options.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudSearch.V1.Model.DateOperatorOptions do
@moduledoc """
Optional. Provides a search operator for date properties.
Search operators let users restrict the query to specific fields relevant
to the type of item being searched.
## Attributes
* `greaterThanOperatorName` (*type:* `String.t`, *default:* `nil`) - Indicates the operator name required in the query in order to isolate the
date property using the greater-than operator. For example, if
greaterThanOperatorName is *closedafter* and the property's name is
*closeDate*, then queries like *closedafter:<value>*
show results only where the value of the property named *closeDate* is
later than *<value>*.
The operator name can only contain lowercase letters (a-z).
The maximum length is 32 characters.
* `lessThanOperatorName` (*type:* `String.t`, *default:* `nil`) - Indicates the operator name required in the query in order to isolate the
date property using the less-than operator. For example, if
lessThanOperatorName is *closedbefore* and the property's name is
*closeDate*, then queries like *closedbefore:<value>*
show results only where the value of the property named *closeDate* is
earlier than *<value>*.
The operator name can only contain lowercase letters (a-z).
The maximum length is 32 characters.
* `operatorName` (*type:* `String.t`, *default:* `nil`) - Indicates the actual string required in the query in order to isolate the
date property. For example, suppose an issue tracking schema object
has a property named *closeDate* that specifies an operator with an
operatorName of *closedon*. For searches on that data, queries like
*closedon:<value>* show results only where the value of the
*closeDate* property matches *<value>*. By contrast, a
search that uses the same *<value>* without an operator returns
all items where *<value>* matches the value of any String
properties or text within the content field for the indexed datasource.
The operator name can only contain lowercase letters (a-z).
The maximum length is 32 characters.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:greaterThanOperatorName => String.t(),
:lessThanOperatorName => String.t(),
:operatorName => String.t()
}
field(:greaterThanOperatorName)
field(:lessThanOperatorName)
field(:operatorName)
end
defimpl Poison.Decoder, for: GoogleApi.CloudSearch.V1.Model.DateOperatorOptions do
def decode(value, options) do
GoogleApi.CloudSearch.V1.Model.DateOperatorOptions.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudSearch.V1.Model.DateOperatorOptions do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45.759494
| 146
| 0.728077
|
79c776089da300f94c080a9512fd5b0681ac538b
| 1,095
|
exs
|
Elixir
|
mix.exs
|
Apelsinka223/test_match
|
9fbfe3c06faf15dbe4f30f512d02fd36cc948c29
|
[
"MIT"
] | 21
|
2018-02-19T13:23:54.000Z
|
2021-03-02T15:04:43.000Z
|
mix.exs
|
Apelsinka223/test_match
|
9fbfe3c06faf15dbe4f30f512d02fd36cc948c29
|
[
"MIT"
] | 2
|
2019-04-27T11:25:21.000Z
|
2021-03-05T21:09:09.000Z
|
mix.exs
|
Apelsinka223/test_match
|
9fbfe3c06faf15dbe4f30f512d02fd36cc948c29
|
[
"MIT"
] | null | null | null |
defmodule TestMatch.MixProject do
use Mix.Project
def project do
[
app: :test_match,
version: "3.0.5",
elixir: "~> 1.10",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
description: description(),
package: package(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test,
"coveralls.travis": :test
]
]
end
def application do
[]
end
defp deps do
[
{:ex_doc, "~> 0.19", only: :dev},
{:excoveralls, github: "parroty/excoveralls", only: :test},
{:inch_ex, "~> 2.0", only: :docs}
]
end
defp description() do
"Recursive matching"
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE*", ".formatter.exs"],
maintainers: ["Anastasiya Dyachenko"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/Apelsinka223/test_match"}
]
end
end
| 21.9
| 73
| 0.551598
|
79c7a5ffbcbc10b57c3b706547d4ced29bd70558
| 6,829
|
exs
|
Elixir
|
apps/muster/test/muster/game_test.exs
|
eugenebolshakov/muster
|
dd5465da2c9c1a64817e271b297de95fa31c07d9
|
[
"Unlicense"
] | null | null | null |
apps/muster/test/muster/game_test.exs
|
eugenebolshakov/muster
|
dd5465da2c9c1a64817e271b297de95fa31c07d9
|
[
"Unlicense"
] | null | null | null |
apps/muster/test/muster/game_test.exs
|
eugenebolshakov/muster
|
dd5465da2c9c1a64817e271b297de95fa31c07d9
|
[
"Unlicense"
] | null | null | null |
defmodule Muster.GameTest do
use ExUnit.Case
import Muster.TestHelper
alias Muster.Game
@empty_grid [
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]
]
describe "new/0" do
test "returns a grid with a single tile of value 2" do
assert %{grid: [tile]} = Game.new()
assert tile.value == 2
assert tile.row in 0..5
assert tile.column in 0..5
assert tile.id
end
test "returns a game that is waiting for players" do
game = Game.new()
assert game.status == :waiting_for_players
end
end
describe "add_player/2" do
test "adds the player to the list" do
game = Game.new()
assert game.players == []
assert {:ok, game} = Game.add_player(game, :player1)
assert game.players == [:player1]
end
test "starts the game when 2 players are added" do
game = Game.new()
assert game.players == []
assert game.status == :waiting_for_players
refute game.current_player
assert {:ok, game} = Game.add_player(game, :player1)
assert game.players == [:player1]
assert game.status == :waiting_for_players
refute game.current_player
assert {:ok, game} = Game.add_player(game, :player2)
assert game.players == [:player1, :player2]
assert game.status == :on
assert game.current_player == :player1
end
test "returns error if the game is already on" do
game = Game.new()
assert {:ok, game} = Game.add_player(game, :player1)
assert {:ok, game} = Game.add_player(game, :player2)
assert Game.add_player(game, :player3) == {:error, :game_is_on}
end
end
describe "move/2" do
setup :start_game
test "moves tiles and adds a tile of value 1", %{game: game} do
game = %{
game
| grid:
tiles([
[0, 1, 0, 2, 0, 0],
[1, 1, 0, 0, 1, 0],
[0, 0, 0, 2, 3, 0],
[3, 3, 6, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 1, 0, 2, 0, 0]
])
}
expected_grid =
tiles([
[1, 2, 0, 0, 0, 0],
[2, 1, 0, 0, 0, 0],
[2, 3, 0, 0, 0, 0],
[6, 6, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[1, 2, 0, 0, 0, 0]
])
assert {:ok, %{grid: grid}} = Game.move(game, :player1, :left)
assert [new_tile] = remove_ids(grid) -- expected_grid
assert new_tile.value == 1
end
test "assigns unique ids to tiles", %{game: game} do
game = %{
game
| grid:
tiles([
[0, 1, 0, 2, 0, 0],
[1, 1, 0, 0, 1, 0],
[0, 0, 0, 2, 3, 0],
[3, 3, 6, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 1, 0, 2, 0, 0]
])
}
assert {:ok, %{grid: grid}} = Game.move(game, :player1, :left)
assert Enum.map(grid, & &1.id) |> Enum.uniq() |> length == length(grid)
end
test "stores merged tiles", %{game: game} do
game =
set_grid(game, [
[1, 1, 2, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]
])
assert {:ok, game} = Game.move(game, :player1, :left)
assert Enum.at(game.grid, 0).value == 2
assert Enum.at(game.grid, 1).value == 2
assert Enum.map(game.merged_tiles, & &1.value) == [1, 1]
assert {:ok, game} = Game.move(game, :player2, :left)
assert Enum.at(game.grid, 0).value == 4
assert Enum.map(game.merged_tiles, & &1.value) == [1, 1, 2, 2]
end
test "game is won when a tile reaches the value 2048", %{game: game} do
game = %{
game
| grid:
tiles([
[1024, 512, 512, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]
])
}
assert {:ok, %{grid: [tile1 | [tile2 | _]]} = game} =
Game.move(game, game.current_player, :left)
assert tile1.value == 1024
assert tile2.value == 1024
assert game.status == :on
assert {:ok, %{grid: [tile | _]} = game} = Game.move(game, game.current_player, :left)
assert tile.value == 2048
assert game.status == :won
end
test "game is lost when there is no space to add a new tile", %{game: game} do
game = %{
game
| grid:
tiles([
[1, 2, 3, 4, 5, 6],
[1, 2, 3, 4, 5, 6],
[1, 2, 3, 4, 5, 6],
[1, 2, 3, 4, 5, 6],
[1, 2, 3, 4, 5, 6],
[1, 2, 3, 4, 5, 0]
])
}
assert {:ok, game} = Game.move(game, game.current_player, :left)
assert length(game.grid) == 36
assert List.last(game.grid).value == 1
assert game.status == :on
assert {:ok, game} = Game.move(game, game.current_player, :left)
assert game.status == :lost
end
test "toggles current player" do
game = %Game{
status: :on,
players: [:player1, :player2],
current_player: :player1,
grid: tiles(@empty_grid)
}
assert {:ok, game} = Game.move(game, :player1, :left)
assert game.current_player == :player2
assert {:ok, game} = Game.move(game, :player2, :left)
assert game.current_player == :player1
end
test "returns error if game is not on", %{game: game} do
game = %{game | status: "stopped"}
assert Game.move(game, game.current_player, :left) == {:error, :player_cant_move}
end
test "returns error if it's not player's turn", %{game: game} do
player = Enum.find(game.players, &(&1 != game.current_player))
assert Game.move(game, player, :left) == {:error, :player_cant_move}
end
end
describe "stop/1" do
test "stops the game" do
game = Game.new()
game = Game.stop(game)
assert game.status == :stopped
end
end
describe "endded?/1" do
test "returns true if game has ended" do
game = Game.new()
refute Game.ended?(%{game | status: :waiting_for_players})
refute Game.ended?(%{game | status: :on})
assert Game.ended?(%{game | status: :won})
assert Game.ended?(%{game | status: :lost})
assert Game.ended?(%{game | status: :stopped})
end
end
defp start_game(context) do
game = %Game{
status: :on,
players: [:player1, :player2],
current_player: :player1,
grid: tiles(@empty_grid)
}
Map.put(context, :game, game)
end
defp remove_ids(tiles) do
Enum.map(tiles, &%{&1 | id: nil})
end
end
| 27.53629
| 92
| 0.500659
|
79c7e703cf6646ba3b095e14416f2d7041986f8c
| 1,365
|
ex
|
Elixir
|
lib/mail_slurp_api/deserializer.ex
|
sumup-bank/mailslurp-client-elixir
|
87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8
|
[
"MIT"
] | 1
|
2021-06-17T18:07:49.000Z
|
2021-06-17T18:07:49.000Z
|
lib/mail_slurp_api/deserializer.ex
|
sumup-bank/mailslurp-client-elixir
|
87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8
|
[
"MIT"
] | null | null | null |
lib/mail_slurp_api/deserializer.ex
|
sumup-bank/mailslurp-client-elixir
|
87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8
|
[
"MIT"
] | 1
|
2021-03-16T18:55:56.000Z
|
2021-03-16T18:55:56.000Z
|
# NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule MailSlurpAPI.Deserializer do
@moduledoc """
Helper functions for deserializing responses into models
"""
@doc """
Update the provided model with a deserialization of a nested value
"""
@spec deserialize(struct(), :atom, :atom, struct(), keyword()) :: struct()
def deserialize(model, field, :list, mod, options) do
model
|> Map.update!(field, &(Poison.Decode.decode(&1, Keyword.merge(options, [as: [struct(mod)]]))))
end
def deserialize(model, field, :struct, mod, options) do
model
|> Map.update!(field, &(Poison.Decode.decode(&1, Keyword.merge(options, [as: struct(mod)]))))
end
def deserialize(model, field, :map, mod, options) do
model
|> Map.update!(field, &(Map.new(&1, fn {key, val} -> {key, Poison.Decode.decode(val, Keyword.merge(options, [as: struct(mod)]))} end)))
end
def deserialize(model, field, :date, _, _options) do
value = Map.get(model, field)
case is_binary(value) do
true -> case DateTime.from_iso8601(value) do
{:ok, datetime, _offset} ->
Map.put(model, field, datetime)
_ ->
model
end
false -> model
end
end
end
| 35
| 139
| 0.632234
|
79c7ef738413c04b1f4120ab3477e2df6b240bc4
| 1,298
|
ex
|
Elixir
|
elixir/list-ops/lib/list_ops.ex
|
herminiotorres/exercism
|
82173fcf1f09c27da0134f746a799840aa5eac05
|
[
"MIT"
] | null | null | null |
elixir/list-ops/lib/list_ops.ex
|
herminiotorres/exercism
|
82173fcf1f09c27da0134f746a799840aa5eac05
|
[
"MIT"
] | null | null | null |
elixir/list-ops/lib/list_ops.ex
|
herminiotorres/exercism
|
82173fcf1f09c27da0134f746a799840aa5eac05
|
[
"MIT"
] | 1
|
2021-03-15T11:02:40.000Z
|
2021-03-15T11:02:40.000Z
|
defmodule ListOps do
# Please don't use any external modules (especially List or Enum) in your
# implementation. The point of this exercise is to create these basic
# functions yourself. You may use basic Kernel functions (like `Kernel.+/2`
# for adding numbers), but please do not use Kernel functions for Lists like
# `++`, `--`, `hd`, `tl`, `in`, and `length`.
@spec count(list) :: non_neg_integer
def count(l) do
fun = fn _item, acc -> acc + 1 end
reduce(l, 0, fun)
end
@spec reverse(list) :: list
def reverse(l) do
fun = fn item, acc -> [item | acc] end
reduce(l, [], fun)
end
@spec map(list, (any -> any)) :: list
def map(l, f) do
for item <- l, do: f.(item)
end
@spec filter(list, (any -> as_boolean(term))) :: list
def filter(l, f) do
for item <- l, f.(item), do: item
end
@type acc :: any
@spec reduce(list, acc, (any, acc -> acc)) :: acc
def reduce([], acc, _f), do: acc
def reduce([head | tail], acc, f) do
reduce(tail, f.(head, acc), f)
end
@spec append(list, list) :: list
def append(a, b) do
fun = fn item, acc -> [item | acc] end
a
|> reverse
|> reduce(b, fun)
end
@spec concat([[any]]) :: [any]
def concat(ll) do
ll
|> reverse
|> reduce([], &append/2)
end
end
| 23.6
| 78
| 0.585516
|
79c82b02078e4fcad2a2b209e51a57eb3e19b7d7
| 351
|
exs
|
Elixir
|
priv/repo/seeds.exs
|
coltonw/majudge
|
4f81a66abe6a2e82f42131982e7a9b26951b9124
|
[
"MIT"
] | null | null | null |
priv/repo/seeds.exs
|
coltonw/majudge
|
4f81a66abe6a2e82f42131982e7a9b26951b9124
|
[
"MIT"
] | 1
|
2021-05-10T04:23:56.000Z
|
2021-05-10T04:23:56.000Z
|
priv/repo/seeds.exs
|
coltonw/majudge
|
4f81a66abe6a2e82f42131982e7a9b26951b9124
|
[
"MIT"
] | null | null | null |
# Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Majudge.Repo.insert!(%Majudge.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.25
| 61
| 0.706553
|
79c877b114c7be17aa411519381ed91e695561a9
| 482
|
ex
|
Elixir
|
elixir/gdc.ex
|
felipe-araujo/functional-programming
|
f69ed723f17fba4fd5a956ee27935a770252b7ce
|
[
"MIT"
] | null | null | null |
elixir/gdc.ex
|
felipe-araujo/functional-programming
|
f69ed723f17fba4fd5a956ee27935a770252b7ce
|
[
"MIT"
] | null | null | null |
elixir/gdc.ex
|
felipe-araujo/functional-programming
|
f69ed723f17fba4fd5a956ee27935a770252b7ce
|
[
"MIT"
] | null | null | null |
defmodule Solution do
def gdc(a, b) do
gdc(a, b, Enum.min([a, b]))
end
def gdc(a, b, prev_rem) do
[divisor, dividend] = [a, b] |> Enum.sort()
quotient = div(dividend, divisor)
rm = rem(dividend, divisor)
case rm do
0 -> {:ok, prev_rem}
_ -> gdc(divisor, quotient, rm)
end
end
end
[a, b] =
IO.read(:stdio, :line)
|> String.trim()
|> String.split()
|> Enum.map(&String.to_integer/1)
{:ok, gdc} = Solution.gdc(a, b)
IO.puts(gdc)
| 18.538462
| 47
| 0.56639
|
79c88bdb18d6aa1e3090316df4cad18e235356d4
| 210
|
ex
|
Elixir
|
lib/phone/mh.ex
|
net/phone
|
18e1356d2f8d32fe3f95638c3c44bceab0164fb2
|
[
"Apache-2.0"
] | null | null | null |
lib/phone/mh.ex
|
net/phone
|
18e1356d2f8d32fe3f95638c3c44bceab0164fb2
|
[
"Apache-2.0"
] | null | null | null |
lib/phone/mh.ex
|
net/phone
|
18e1356d2f8d32fe3f95638c3c44bceab0164fb2
|
[
"Apache-2.0"
] | null | null | null |
defmodule Phone.MH do
@moduledoc false
use Helper.Country
def regex, do: ~r/^(692)()(.{6,7})/
def country, do: "Marshall Islands"
def a2, do: "MH"
def a3, do: "MHL"
matcher :regex, ["692"]
end
| 16.153846
| 37
| 0.609524
|
79c88e3449c56ace7a3bcfd85c88aa8019ff4968
| 727
|
exs
|
Elixir
|
apps/estuary/config/config.exs
|
calebcarroll1/smartcitiesdata
|
b0f03496f6c592c82ba14aebf6c5996311cf3cd0
|
[
"Apache-2.0"
] | 26
|
2019-09-20T23:54:45.000Z
|
2020-08-20T14:23:32.000Z
|
apps/estuary/config/config.exs
|
calebcarroll1/smartcitiesdata
|
b0f03496f6c592c82ba14aebf6c5996311cf3cd0
|
[
"Apache-2.0"
] | 757
|
2019-08-15T18:15:07.000Z
|
2020-09-18T20:55:31.000Z
|
apps/estuary/config/config.exs
|
calebcarroll1/smartcitiesdata
|
b0f03496f6c592c82ba14aebf6c5996311cf3cd0
|
[
"Apache-2.0"
] | 9
|
2019-11-12T16:43:46.000Z
|
2020-03-25T16:23:16.000Z
|
use Mix.Config
config :estuary,
topic: "event-stream",
schema_name: "default",
table_name: "event_stream",
topic_reader: Pipeline.Reader.TopicReader,
table_writer: Pipeline.Writer.TableWriter,
connection: :estuary_elsa
# Configures the endpoint
config :estuary, EstuaryWeb.Endpoint,
url: [host: "localhost"],
# it should be overwriten as part of deploying the platform.
secret_key_base: "4gV3z+mTeMzrnd+E1lI9xFei/79xYDJ6kN25HlD70cjtspfJ/WIK1mi3sxWIco1v",
render_errors: [view: EstuaryWeb.ErrorView, accepts: ~w(html json)],
pubsub_server: Estuary.PubSub,
check_origin: ["http://localhost:4000", "https://*.smartcolumbusos.com"]
config :phoenix, :json_library, Jason
import_config "#{Mix.env()}.exs"
| 31.608696
| 86
| 0.756534
|
79c8c932fe59493cad7ffcd3dbe84233c64f103e
| 539
|
ex
|
Elixir
|
lib/plaid/liabilities/mortgage/interest_rate.ex
|
ethangunderson/elixir-plaid
|
53aa0a87a4a837df6a2d15684870e7a58a003db6
|
[
"MIT"
] | 16
|
2021-03-09T02:29:32.000Z
|
2022-03-13T07:18:03.000Z
|
lib/plaid/liabilities/mortgage/interest_rate.ex
|
ethangunderson/elixir-plaid
|
53aa0a87a4a837df6a2d15684870e7a58a003db6
|
[
"MIT"
] | 5
|
2021-04-24T20:38:14.000Z
|
2022-03-19T22:03:09.000Z
|
lib/plaid/liabilities/mortgage/interest_rate.ex
|
ethangunderson/elixir-plaid
|
53aa0a87a4a837df6a2d15684870e7a58a003db6
|
[
"MIT"
] | 2
|
2021-06-11T02:15:01.000Z
|
2022-03-15T18:39:59.000Z
|
defmodule Plaid.Liabilities.Mortgage.InterestRate do
@moduledoc """
[Plaid Liabilities Mortage Interest Rate Schema.](https://plaid.com/docs/api/products/#liabilities-get-response-interest-rate)
"""
@behaviour Plaid.Castable
@type t :: %__MODULE__{
percentage: number() | nil,
type: String.t() | nil
}
defstruct [
:percentage,
:type
]
@impl true
def cast(generic_map) do
%__MODULE__{
percentage: generic_map["percentage"],
type: generic_map["type"]
}
end
end
| 20.730769
| 128
| 0.641929
|
79c8cf27cb5c0720200437345ac7247b539a0b79
| 1,185
|
ex
|
Elixir
|
lib/uro/application.ex
|
V-Sekai/uro
|
0b23da65d5c7e459efcd6b2c3d9bdf91c533b737
|
[
"MIT"
] | 1
|
2022-01-11T04:05:39.000Z
|
2022-01-11T04:05:39.000Z
|
lib/uro/application.ex
|
V-Sekai/uro
|
0b23da65d5c7e459efcd6b2c3d9bdf91c533b737
|
[
"MIT"
] | 35
|
2021-02-10T08:18:57.000Z
|
2021-05-06T17:19:50.000Z
|
lib/uro/application.ex
|
V-Sekai/uro
|
0b23da65d5c7e459efcd6b2c3d9bdf91c533b737
|
[
"MIT"
] | null | null | null |
defmodule Uro.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
# List all child processes to be supervised
children = [
# Start the Ecto repository
Uro.Repo,
# Start the endpoint when the application starts
UroWeb.Endpoint,
# Starts a worker by calling: Uro.Worker.start_link(arg)
# {Uro.Worker, arg},
# Starts Pow's Mnesia-backed cache store
Pow.Store.Backend.MnesiaCache
# # Or in a distributed system:
# {Pow.Store.Backend.MnesiaCache, extra_db_nodes: Node.list()},
# Pow.Store.Backend.MnesiaCache.Unsplit # Recover from netsplit
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Uro.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
UroWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 29.625
| 69
| 0.688608
|
79c8d62646709a8eac379a7d1d8530a8ebd58b20
| 4,011
|
ex
|
Elixir
|
clients/safe_browsing/lib/google_api/safe_browsing/v4/api/threat_list_updates.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/safe_browsing/lib/google_api/safe_browsing/v4/api/threat_list_updates.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/safe_browsing/lib/google_api/safe_browsing/v4/api/threat_list_updates.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SafeBrowsing.V4.Api.ThreatListUpdates do
@moduledoc """
API calls for all endpoints tagged `ThreatListUpdates`.
"""
alias GoogleApi.SafeBrowsing.V4.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Fetches the most recent threat list updates. A client can request updates for multiple lists at once.
## Parameters
* `connection` (*type:* `GoogleApi.SafeBrowsing.V4.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.SafeBrowsing.V4.Model.GoogleSecuritySafebrowsingV4FetchThreatListUpdatesRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SafeBrowsing.V4.Model.GoogleSecuritySafebrowsingV4FetchThreatListUpdatesResponse{}}` on success
* `{:error, info}` on failure
"""
@spec safebrowsing_threat_list_updates_fetch(Tesla.Env.client(), keyword(), keyword()) ::
{:ok,
GoogleApi.SafeBrowsing.V4.Model.GoogleSecuritySafebrowsingV4FetchThreatListUpdatesResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def safebrowsing_threat_list_updates_fetch(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v4/threatListUpdates:fetch", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[
struct:
%GoogleApi.SafeBrowsing.V4.Model.GoogleSecuritySafebrowsingV4FetchThreatListUpdatesResponse{}
]
)
end
end
| 43.129032
| 196
| 0.663176
|
79c8dd236dd2ccbfad7e2ea89158fd06e5827e4e
| 691
|
ex
|
Elixir
|
test/support/assertions.ex
|
eahanson/medic
|
01f35f2aa22e7120cf1bd492e6e5400a29224791
|
[
"MIT"
] | 3
|
2021-06-18T18:42:35.000Z
|
2022-02-09T01:54:58.000Z
|
test/support/assertions.ex
|
eahanson/medic
|
01f35f2aa22e7120cf1bd492e6e5400a29224791
|
[
"MIT"
] | 3
|
2021-06-17T19:02:32.000Z
|
2021-06-17T19:44:35.000Z
|
test/support/assertions.ex
|
eahanson/medic
|
01f35f2aa22e7120cf1bd492e6e5400a29224791
|
[
"MIT"
] | 1
|
2022-03-10T19:16:14.000Z
|
2022-03-10T19:16:14.000Z
|
defmodule Test.Support.Assertions do
@moduledoc """
Assertions for
"""
require ExUnit.Assertions
@spec assert_eq(any(), any()) :: any()
def assert_eq([first | _] = left, right) when is_struct(first),
do: assert_eq(Enum.map(left, &Map.from_struct/1), right)
def assert_eq(left, [first | _] = right) when is_struct(first),
do: assert_eq(left, Enum.map(right, &Map.from_struct/1))
def assert_eq(left, right) when is_struct(left), do: assert_eq(Map.from_struct(left), right)
def assert_eq(left, right) when is_struct(right), do: assert_eq(left, Map.from_struct(right))
def assert_eq(left, right) do
ExUnit.Assertions.assert(left == right)
left
end
end
| 30.043478
| 95
| 0.69754
|
79c8eb4423d63c23327cef667ad039e9a1acde06
| 4,338
|
ex
|
Elixir
|
lib/elixir/lib/range.ex
|
schabou/elixir
|
7634f3f4c9380bdadd0e74eb76e9f7ae8cf27a1d
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/lib/range.ex
|
schabou/elixir
|
7634f3f4c9380bdadd0e74eb76e9f7ae8cf27a1d
|
[
"Apache-2.0"
] | null | null | null |
lib/elixir/lib/range.ex
|
schabou/elixir
|
7634f3f4c9380bdadd0e74eb76e9f7ae8cf27a1d
|
[
"Apache-2.0"
] | null | null | null |
defmodule Range do
@moduledoc """
Defines a range.
A range represents a sequence of one or many,
ascending or descending, consecutive integers.
Ranges can be either increasing (`first <= last`) or
decreasing (`first > last`). Ranges are also always
inclusive.
A range is represented internally as a struct. However,
the most common form of creating and matching on ranges
is via the `../2` macro, auto-imported from `Kernel`:
iex> range = 1..3
1..3
iex> first..last = range
iex> first
1
iex> last
3
A range implements the `Enumerable` protocol, which means
functions in the `Enum` module can be used to work with
ranges:
iex> range = 1..10
1..10
iex> Enum.reduce(range, 0, fn i, acc -> i * i + acc end)
385
iex> Enum.count(range)
10
iex> Enum.member?(range, 11)
false
iex> Enum.member?(range, 8)
true
Such function calls are efficient memory-wise no matter the
size of the range. The implementation of the `Enumerable`
protocol uses logic based solely on the endpoints and does
not materialize the whole list of integers.
"""
defstruct first: nil, last: nil
@type t :: %__MODULE__{first: integer, last: integer}
@type t(first, last) :: %__MODULE__{first: first, last: last}
@doc """
Creates a new range.
"""
@spec new(integer, integer) :: t
def new(first, last) when is_integer(first) and is_integer(last) do
%Range{first: first, last: last}
end
def new(first, last) do
raise ArgumentError,
"ranges (first..last) expect both sides to be integers, " <>
"got: #{inspect(first)}..#{inspect(last)}"
end
@doc """
Checks if two ranges are disjoint.
## Examples
iex> Range.disjoint?(1..5, 6..9)
true
iex> Range.disjoint?(5..1, 6..9)
true
iex> Range.disjoint?(1..5, 5..9)
false
iex> Range.disjoint?(1..5, 2..7)
false
"""
@doc since: "1.8.0"
@spec disjoint?(t, t) :: boolean
def disjoint?(first1..last1, first2..last2) do
{first1, last1} = normalize(first1, last1)
{first2, last2} = normalize(first2, last2)
last2 < first1 or last1 < first2
end
@compile inline: [normalize: 2]
defp normalize(first, last) when first > last, do: {last, first}
defp normalize(first, last), do: {first, last}
# TODO: Remove by 2.0
@doc false
@deprecated "Pattern match on first..last instead"
def range?(term)
def range?(first..last) when is_integer(first) and is_integer(last), do: true
def range?(_), do: false
end
defimpl Enumerable, for: Range do
def reduce(first..last, acc, fun) do
reduce(first, last, acc, fun, _up? = last >= first)
end
defp reduce(_first, _last, {:halt, acc}, _fun, _up?) do
{:halted, acc}
end
defp reduce(first, last, {:suspend, acc}, fun, up?) do
{:suspended, acc, &reduce(first, last, &1, fun, up?)}
end
defp reduce(first, last, {:cont, acc}, fun, _up? = true) when first <= last do
reduce(first + 1, last, fun.(first, acc), fun, _up? = true)
end
defp reduce(first, last, {:cont, acc}, fun, _up? = false) when first >= last do
reduce(first - 1, last, fun.(first, acc), fun, _up? = false)
end
defp reduce(_, _, {:cont, acc}, _fun, _up) do
{:done, acc}
end
def member?(first..last, value) when is_integer(value) do
if first <= last do
{:ok, first <= value and value <= last}
else
{:ok, last <= value and value <= first}
end
end
def member?(_.._, _value) do
{:ok, false}
end
def count(first..last) do
if first <= last do
{:ok, last - first + 1}
else
{:ok, first - last + 1}
end
end
def slice(first..last) do
if first <= last do
{:ok, last - first + 1, &slice_asc(first + &1, &2)}
else
{:ok, first - last + 1, &slice_desc(first - &1, &2)}
end
end
defp slice_asc(current, 1), do: [current]
defp slice_asc(current, remaining), do: [current | slice_asc(current + 1, remaining - 1)]
defp slice_desc(current, 1), do: [current]
defp slice_desc(current, remaining), do: [current | slice_desc(current - 1, remaining - 1)]
end
defimpl Inspect, for: Range do
import Inspect.Algebra
def inspect(first..last, opts) do
concat([to_doc(first, opts), "..", to_doc(last, opts)])
end
end
| 26.13253
| 93
| 0.620101
|
79c8eb6581e941f42695b735ba19113cb7fe5544
| 1,426
|
exs
|
Elixir
|
mix.exs
|
kianmeng/surface_formatter
|
6a861ad299457c2e1375f64c57e4ed15bad8f425
|
[
"MIT"
] | null | null | null |
mix.exs
|
kianmeng/surface_formatter
|
6a861ad299457c2e1375f64c57e4ed15bad8f425
|
[
"MIT"
] | null | null | null |
mix.exs
|
kianmeng/surface_formatter
|
6a861ad299457c2e1375f64c57e4ed15bad8f425
|
[
"MIT"
] | null | null | null |
defmodule SurfaceFormatter.MixProject do
use Mix.Project
@source_url "https://github.com/surface-ui/surface_formatter"
@version "0.7.5"
def project do
[
app: :surface_formatter,
version: @version,
elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
# Hex
description:
"A code formatter for Surface, the component based library for Phoenix LiveView",
package: package(),
# Docs
name: "SurfaceFormatter",
docs: docs()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:surface, "~> 0.5"},
{:ex_doc, ">= 0.19.0", only: [:dev, :docs], runtime: false}
]
end
defp docs do
[
main: "readme",
source_ref: "v#{@version}",
source_url: @source_url,
extras: [
"README.md",
"CHANGELOG.md"
]
]
end
defp package do
%{
licenses: ["MIT"],
links: %{
Changelog: @source_url <> "/blob/master/CHANGELOG.md",
GitHub: @source_url
}
}
end
defp aliases do
[
ci: [
"format --check-formatted",
"compile --force --warnings-as-errors",
"cmd MIX_ENV=test mix test"
]
]
end
end
| 19.27027
| 89
| 0.54979
|
79c91c2ac1f61210324b724ecefe2309fc6010cf
| 1,006
|
ex
|
Elixir
|
apps/rest_api/lib/controllers/admin/sessions.ex
|
lcpojr/watcher_ex
|
bd5a9210b5b41a6c9b5d4255de19fc6967d29fb7
|
[
"Apache-2.0"
] | 9
|
2020-10-13T14:11:37.000Z
|
2021-08-12T18:40:08.000Z
|
apps/rest_api/lib/controllers/admin/sessions.ex
|
lcpojr/watcher_ex
|
bd5a9210b5b41a6c9b5d4255de19fc6967d29fb7
|
[
"Apache-2.0"
] | 28
|
2020-10-04T14:43:48.000Z
|
2021-12-07T16:54:22.000Z
|
apps/rest_api/lib/controllers/admin/sessions.ex
|
lcpojr/watcher_ex
|
bd5a9210b5b41a6c9b5d4255de19fc6967d29fb7
|
[
"Apache-2.0"
] | 3
|
2020-11-25T20:59:47.000Z
|
2021-08-30T10:36:58.000Z
|
defmodule RestAPI.Controllers.Admin.Sessions do
@moduledoc false
use RestAPI.Controller, :controller
alias RestAPI.Ports.Authenticator
action_fallback RestAPI.Controllers.Fallback
@doc "Logout the authenticated subject session."
@spec logout(conn :: Plug.Conn.t(), params :: map()) :: Plug.Conn.t()
def logout(%Plug.Conn{private: %{session: session}} = conn, _params) do
session.jti
|> Authenticator.sign_out_session()
|> case do
{:ok, _any} -> send_resp(conn, :no_content, "")
{:error, _reason} = error -> error
end
end
@doc "Logout subject authenticated sessions."
@spec logout_all_sessions(conn :: Plug.Conn.t(), params :: map()) :: Plug.Conn.t()
def logout_all_sessions(%Plug.Conn{private: %{session: session}} = conn, _params) do
session.subject_id
|> Authenticator.sign_out_all_sessions(session.subject_type)
|> case do
{:ok, _any} -> send_resp(conn, :no_content, "")
{:error, _reason} = error -> error
end
end
end
| 31.4375
| 86
| 0.676938
|
79c9a894d583ee187006e6ea6fdaf48531d029d8
| 297
|
ex
|
Elixir
|
community/betterdev/lib/betterdev/repo.ex
|
earthrid/betterdev.link
|
b8efe279e82810075ba36673483f7f4d6862bc19
|
[
"MIT"
] | 79
|
2017-07-03T13:04:08.000Z
|
2022-02-11T13:59:37.000Z
|
community/betterdev/lib/betterdev/repo.ex
|
earthrid/betterdev.link
|
b8efe279e82810075ba36673483f7f4d6862bc19
|
[
"MIT"
] | 16
|
2017-07-09T03:16:27.000Z
|
2022-01-14T14:29:57.000Z
|
community/betterdev/lib/betterdev/repo.ex
|
earthrid/betterdev.link
|
b8efe279e82810075ba36673483f7f4d6862bc19
|
[
"MIT"
] | 10
|
2017-07-09T02:58:59.000Z
|
2021-09-14T08:01:02.000Z
|
defmodule Betterdev.Repo do
use Ecto.Repo, otp_app: :betterdev
use Kerosene, per_page: 10
@doc """
Dynamically loads the repository url from the
DATABASE_URL environment variable.
"""
def init(_, opts) do
{:ok, Keyword.put(opts, :url, System.get_env("DATABASE_URL"))}
end
end
| 22.846154
| 66
| 0.703704
|
79c9b3a9a8996f2086efadd321541df0b85c3919
| 4,244
|
ex
|
Elixir
|
kousa/lib/broth/message/auth/request.ex
|
samyadel/dogehouse
|
c9daffbfe81a7488093b07f3f9a274a062dde801
|
[
"MIT"
] | 2
|
2021-05-01T16:57:50.000Z
|
2021-07-07T22:01:14.000Z
|
kousa/lib/broth/message/auth/request.ex
|
samyadel/dogehouse
|
c9daffbfe81a7488093b07f3f9a274a062dde801
|
[
"MIT"
] | null | null | null |
kousa/lib/broth/message/auth/request.ex
|
samyadel/dogehouse
|
c9daffbfe81a7488093b07f3f9a274a062dde801
|
[
"MIT"
] | null | null | null |
defmodule Broth.Message.Auth.Request do
use Broth.Message.Call,
needs_auth: false
@primary_key false
embedded_schema do
field(:accessToken, :string)
field(:refreshToken, :string)
field(:platform, :string)
field(:currentRoomId, :binary_id)
field(:reconnectToVoice, :boolean)
field(:muted, :boolean, default: false)
field(:deafened, :boolean, default: false)
end
alias Kousa.Utils.UUID
@impl true
def changeset(initializer \\ %__MODULE__{}, data) do
initializer
|> cast(data, [:accessToken, :refreshToken, :platform, :reconnectToVoice, :muted, :deafened])
|> validate_required([:accessToken])
|> UUID.normalize(:currentRoomId)
end
defmodule Reply do
use Broth.Message.Push
alias Beef.Repo
@derive {Jason.Encoder, only: ~w(
id
username
displayName
avatarUrl
bannerUrl
bio
online
numFollowing
numFollowers
lastOnline
)a}
@primary_key {:id, :binary_id, []}
schema "users" do
field(:username, :string)
field(:displayName, :string)
field(:avatarUrl, :string)
field(:bannerUrl, :string)
field(:bio, :string, default: "")
field(:currentRoomId, :binary_id)
field(:numFollowing, :integer)
field(:numFollowers, :integer)
field(:online, :boolean)
field(:lastOnline, :utc_datetime_usec)
end
end
@impl true
def execute(changeset, state) do
case apply_action(changeset, :validate) do
{:ok, request} -> convert_tokens(request, state)
error -> error
end
end
alias Beef.Repo
alias Beef.Schemas.User
alias Onion.PubSub
defp convert_tokens(request, state) do
alias Kousa.Utils.TokenUtils
case TokenUtils.tokens_to_user_id(request.accessToken, request.refreshToken) do
nil ->
{:close, 4001, "invalid_authentication"}
{:existing_claim, user_id} ->
do_auth(user_id, nil, Repo.get(User, user_id), request, state)
{:new_tokens, user_id, tokens, user} ->
do_auth(user_id, tokens, user, request, state)
end
end
defp do_auth(user_id, tokens, user, request, state) do
alias Onion.UserSession
alias Onion.RoomSession
alias Beef.Rooms
alias Beef.Repo
alias Beef.Users
if user do
# note that this will start the session and will be ignored if the
# session is already running.
UserSession.start_supervised(
user_id: user_id,
ip: state.ip,
username: user.username,
avatar_url: user.avatarUrl,
banner_url: user.bannerUrl,
display_name: user.displayName,
current_room_id: user.currentRoomId,
muted: request.muted,
deafened: request.deafened,
bot_owner_id: user.botOwnerId
)
if user.ip != state.ip do
Users.set_ip(user_id, state.ip)
end
# currently we only allow one active websocket connection per-user
# at some point soon we're going to make this multi-connection, and we
# won't have to do this.
UserSession.set_active_ws(user_id, self())
if tokens do
UserSession.new_tokens(user_id, tokens)
end
roomIdFromFrontend = request.currentRoomId
cond do
user.currentRoomId ->
# TODO: move toroom business logic
room = Rooms.get_room_by_id(user.currentRoomId)
RoomSession.start_supervised(
room_id: user.currentRoomId,
voice_server_id: room.voiceServerId
)
RoomSession.join_room(room.id, user.id, request.muted, request.deafened)
if request.reconnectToVoice == true do
Kousa.Room.join_vc_room(user.id, room)
end
# This seems janky, should probably be refactored into a Kousa.Auth module.
PubSub.subscribe("chat:" <> room.id)
roomIdFromFrontend ->
Kousa.Room.join_room(user.id, roomIdFromFrontend)
true ->
:ok
end
# subscribe to chats directed to oneself.
PubSub.subscribe("chat:" <> user_id)
{:reply, Repo.get(Reply, user_id), %{state | user_id: user_id, awaiting_init: false}}
else
{:close, 4001, "invalid authentication"}
end
end
end
| 26.860759
| 97
| 0.64279
|
79c9c95188b0e29b524e57ff72d4a69305a0c038
| 2,148
|
exs
|
Elixir
|
config/prod.exs
|
mentels/pomex
|
57eca65777171ee0116b5ffde56c35f41bff7c8e
|
[
"Apache-2.0"
] | null | null | null |
config/prod.exs
|
mentels/pomex
|
57eca65777171ee0116b5ffde56c35f41bff7c8e
|
[
"Apache-2.0"
] | null | null | null |
config/prod.exs
|
mentels/pomex
|
57eca65777171ee0116b5ffde56c35f41bff7c8e
|
[
"Apache-2.0"
] | null | null | null |
use Mix.Config
# For production, we configure the host to read the PORT
# from the system environment. Therefore, you will need
# to set PORT=80 before running your server.
#
# You should also configure the url host to something
# meaningful, we use this information when generating URLs.
#
# Finally, we also include the path to a manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :pomex, Pomex.Endpoint,
http: [port: {:system, "PORT"}],
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :pomex, Pomex.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :pomex, Pomex.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :pomex, Pomex.Endpoint, server: true
#
# You will also need to set the application root to `.` in order
# for the new static assets to be served after a hot upgrade:
#
# config :pomex, Pomex.Endpoint, root: "."
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 32.545455
| 67
| 0.709497
|
79c9e17f80638b7aa700fe7f7ee486be27320670
| 880
|
ex
|
Elixir
|
clients/content/lib/google_api/content/v2/metadata.ex
|
MMore/elixir-google-api
|
0574ec1439d9bbfe22d63965be1681b0f45a94c9
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v2/metadata.ex
|
MMore/elixir-google-api
|
0574ec1439d9bbfe22d63965be1681b0f45a94c9
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v2/metadata.ex
|
MMore/elixir-google-api
|
0574ec1439d9bbfe22d63965be1681b0f45a94c9
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2 do
@moduledoc """
API client metadata for GoogleApi.Content.V2.
"""
@discovery_revision "20220303"
def discovery_revision(), do: @discovery_revision
end
| 32.592593
| 74
| 0.757955
|
79c9ea131e86c4f99142510acdb36d9ecbbaa269
| 556
|
exs
|
Elixir
|
examples/ecto_job_priority_demo/config/config.exs
|
thedelchop/ecto_job
|
0157d857e4436a35ebcc0a9f5cd4b28b33292f62
|
[
"MIT"
] | 268
|
2017-08-15T12:55:41.000Z
|
2022-03-20T22:42:18.000Z
|
examples/ecto_job_priority_demo/config/config.exs
|
thedelchop/ecto_job
|
0157d857e4436a35ebcc0a9f5cd4b28b33292f62
|
[
"MIT"
] | 52
|
2018-01-15T20:47:54.000Z
|
2021-12-24T06:13:55.000Z
|
examples/ecto_job_priority_demo/config/config.exs
|
thedelchop/ecto_job
|
0157d857e4436a35ebcc0a9f5cd4b28b33292f62
|
[
"MIT"
] | 38
|
2018-01-08T12:26:19.000Z
|
2021-06-01T12:41:09.000Z
|
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :logger, :level, :info
config :ecto_job_priority_demo, ecto_repos: [EctoJobPriorityDemo.Repo]
config :ecto_job_priority_demo, EctoJobPriorityDemo.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "password",
database: "ecto_job_test",
hostname: "localhost",
pool_size: 30
config :ecto_job,
repo: EctoJobPriorityDemo.Repo,
always_dispatch_jobs_on_poll: true,
log: false
| 26.47619
| 70
| 0.776978
|
79c9f0919baf0ffd33c68b1f475565e6456a6748
| 1,466
|
ex
|
Elixir
|
lib/xdr/ledger/liquidity_pool.ex
|
kommitters/stellar_base
|
cf36723d5add4fead1029526230299def68048fe
|
[
"MIT"
] | 3
|
2021-08-17T20:32:45.000Z
|
2022-03-13T20:26:02.000Z
|
lib/xdr/ledger/liquidity_pool.ex
|
kommitters/stellar_base
|
cf36723d5add4fead1029526230299def68048fe
|
[
"MIT"
] | 45
|
2021-08-12T20:19:41.000Z
|
2022-03-27T21:00:10.000Z
|
lib/xdr/ledger/liquidity_pool.ex
|
kommitters/stellar_base
|
cf36723d5add4fead1029526230299def68048fe
|
[
"MIT"
] | 2
|
2021-09-22T23:11:13.000Z
|
2022-01-23T03:19:11.000Z
|
defmodule StellarBase.XDR.Ledger.LiquidityPool do
@moduledoc """
Representation of Stellar Ledger `LiquidityPool` type.
"""
alias StellarBase.XDR.PoolID
@behaviour XDR.Declaration
@struct_spec XDR.Struct.new(liquidity_pool_id: PoolID)
@type t :: %__MODULE__{liquidity_pool_id: PoolID.t()}
defstruct [:liquidity_pool_id]
@spec new(liquidity_pool_id :: PoolID.t()) :: t()
def new(%PoolID{} = liquidity_pool_id),
do: %__MODULE__{liquidity_pool_id: liquidity_pool_id}
@impl true
def encode_xdr(%__MODULE__{liquidity_pool_id: liquidity_pool_id}) do
[liquidity_pool_id: liquidity_pool_id]
|> XDR.Struct.new()
|> XDR.Struct.encode_xdr()
end
@impl true
def encode_xdr!(%__MODULE__{liquidity_pool_id: liquidity_pool_id}) do
[liquidity_pool_id: liquidity_pool_id]
|> XDR.Struct.new()
|> XDR.Struct.encode_xdr!()
end
@impl true
def decode_xdr(bytes, struct \\ @struct_spec)
def decode_xdr(bytes, struct) do
case XDR.Struct.decode_xdr(bytes, struct) do
{:ok, {%XDR.Struct{components: [liquidity_pool_id: liquidity_pool_id]}, rest}} ->
{:ok, {new(liquidity_pool_id), rest}}
error ->
error
end
end
@impl true
def decode_xdr!(bytes, struct \\ @struct_spec)
def decode_xdr!(bytes, struct) do
{%XDR.Struct{components: [liquidity_pool_id: liquidity_pool_id]}, rest} =
XDR.Struct.decode_xdr!(bytes, struct)
{new(liquidity_pool_id), rest}
end
end
| 26.178571
| 87
| 0.700546
|
79ca2ae2ab8b563de9861bb60072622508e291ad
| 5,860
|
ex
|
Elixir
|
clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/quota_limit.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | 1
|
2018-12-03T23:43:10.000Z
|
2018-12-03T23:43:10.000Z
|
clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/quota_limit.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/quota_limit.ex
|
matehat/elixir-google-api
|
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.ServiceConsumerManagement.V1.Model.QuotaLimit do
@moduledoc """
`QuotaLimit` defines a specific limit that applies over a specified duration
for a limit type. There can be at most one limit for a duration and limit
type combination defined within a `QuotaGroup`.
## Attributes
* `defaultLimit` (*type:* `String.t`, *default:* `nil`) - Default number of tokens that can be consumed during the specified
duration. This is the number of tokens assigned when a client
application developer activates the service for his/her project.
Specifying a value of 0 will block all requests. This can be used if you
are provisioning quota to selected consumers and blocking others.
Similarly, a value of -1 will indicate an unlimited quota. No other
negative values are allowed.
Used by group-based quotas only.
* `description` (*type:* `String.t`, *default:* `nil`) - Optional. User-visible, extended description for this quota limit.
Should be used only when more context is needed to understand this limit
than provided by the limit's display name (see: `display_name`).
* `displayName` (*type:* `String.t`, *default:* `nil`) - User-visible display name for this limit.
Optional. If not set, the UI will provide a default display name based on
the quota configuration. This field can be used to override the default
display name generated from the configuration.
* `duration` (*type:* `String.t`, *default:* `nil`) - Duration of this limit in textual notation. Example: "100s", "24h", "1d".
For duration longer than a day, only multiple of days is supported. We
support only "100s" and "1d" for now. Additional support will be added in
the future. "0" indicates indefinite duration.
Used by group-based quotas only.
* `freeTier` (*type:* `String.t`, *default:* `nil`) - Free tier value displayed in the Developers Console for this limit.
The free tier is the number of tokens that will be subtracted from the
billed amount when billing is enabled.
This field can only be set on a limit with duration "1d", in a billable
group; it is invalid on any other limit. If this field is not set, it
defaults to 0, indicating that there is no free tier for this service.
Used by group-based quotas only.
* `maxLimit` (*type:* `String.t`, *default:* `nil`) - Maximum number of tokens that can be consumed during the specified
duration. Client application developers can override the default limit up
to this maximum. If specified, this value cannot be set to a value less
than the default limit. If not specified, it is set to the default limit.
To allow clients to apply overrides with no upper bound, set this to -1,
indicating unlimited maximum quota.
Used by group-based quotas only.
* `metric` (*type:* `String.t`, *default:* `nil`) - The name of the metric this quota limit applies to. The quota limits with
the same metric will be checked together during runtime. The metric must be
defined within the service config.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of the quota limit.
The name must be provided, and it must be unique within the service. The
name can only include alphanumeric characters as well as '-'.
The maximum length of the limit name is 64 characters.
* `unit` (*type:* `String.t`, *default:* `nil`) - Specify the unit of the quota limit. It uses the same syntax as
Metric.unit. The supported unit kinds are determined by the quota
backend system.
Here are some examples:
* "1/min/{project}" for quota per minute per project.
Note: the order of unit components is insignificant.
The "1" at the beginning is required to follow the metric unit syntax.
* `values` (*type:* `map()`, *default:* `nil`) - Tiered limit values. You must specify this as a key:value pair, with an
integer value that is the maximum number of requests allowed for the
specified unit. Currently only STANDARD is supported.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:defaultLimit => String.t(),
:description => String.t(),
:displayName => String.t(),
:duration => String.t(),
:freeTier => String.t(),
:maxLimit => String.t(),
:metric => String.t(),
:name => String.t(),
:unit => String.t(),
:values => map()
}
field(:defaultLimit)
field(:description)
field(:displayName)
field(:duration)
field(:freeTier)
field(:maxLimit)
field(:metric)
field(:name)
field(:unit)
field(:values, type: :map)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.QuotaLimit do
def decode(value, options) do
GoogleApi.ServiceConsumerManagement.V1.Model.QuotaLimit.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.QuotaLimit do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.141732
| 131
| 0.69744
|
79ca46ced03912ecd514f6fbb8f708f044cdfb38
| 731
|
ex
|
Elixir
|
api/web/gettext.ex
|
NaiRobley/IntoTheBookmarks
|
a5e36aa39740e4908fdc46f8a1fb6df3d92b2cda
|
[
"Apache-2.0"
] | null | null | null |
api/web/gettext.ex
|
NaiRobley/IntoTheBookmarks
|
a5e36aa39740e4908fdc46f8a1fb6df3d92b2cda
|
[
"Apache-2.0"
] | 6
|
2021-05-25T05:47:16.000Z
|
2022-02-26T04:42:31.000Z
|
api/web/gettext.ex
|
NaiRobley/IntoTheBookmarks
|
a5e36aa39740e4908fdc46f8a1fb6df3d92b2cda
|
[
"Apache-2.0"
] | null | null | null |
defmodule IntoTheBookmarks.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import IntoTheBookmarks.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :into_the_bookmarks
end
| 29.24
| 72
| 0.690834
|
79ca6cb9afcbc0a573dc5f522df5733def1be464
| 433
|
ex
|
Elixir
|
discuss/lib/discuss/model/comments.ex
|
hiroshisiq/elixir-sandbox
|
3f014d2f15ae1eeda1c22db30ef111cbc22e9afc
|
[
"MIT"
] | null | null | null |
discuss/lib/discuss/model/comments.ex
|
hiroshisiq/elixir-sandbox
|
3f014d2f15ae1eeda1c22db30ef111cbc22e9afc
|
[
"MIT"
] | null | null | null |
discuss/lib/discuss/model/comments.ex
|
hiroshisiq/elixir-sandbox
|
3f014d2f15ae1eeda1c22db30ef111cbc22e9afc
|
[
"MIT"
] | null | null | null |
defmodule Discuss.Model.Comment do
use Ecto.Schema
import Ecto.Changeset
@derive {Jason.Encoder, only: [:content, :user]}
schema "comments" do
field :content, :string
belongs_to :user, Discuss.Model.User
belongs_to :topic, Discuss.Model.Topic
timestamps()
end
@doc false
def changeset(struct, attrs \\ %{}) do
struct
|> cast(attrs, [:content])
|> validate_required([:content])
end
end
| 19.681818
| 50
| 0.667436
|
79ca6e3e3bda6b5f1bae5faa743ffd5a06201301
| 959
|
ex
|
Elixir
|
clients/speech/lib/google_api/speech/v1/request_builder.ex
|
leandrocp/elixir-google-api
|
a86e46907f396d40aeff8668c3bd81662f44c71e
|
[
"Apache-2.0"
] | null | null | null |
clients/speech/lib/google_api/speech/v1/request_builder.ex
|
leandrocp/elixir-google-api
|
a86e46907f396d40aeff8668c3bd81662f44c71e
|
[
"Apache-2.0"
] | null | null | null |
clients/speech/lib/google_api/speech/v1/request_builder.ex
|
leandrocp/elixir-google-api
|
a86e46907f396d40aeff8668c3bd81662f44c71e
|
[
"Apache-2.0"
] | 1
|
2020-11-10T16:58:27.000Z
|
2020-11-10T16:58:27.000Z
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Speech.V1.RequestBuilder do
@moduledoc """
Helper functions for building Tesla requests.
This module is no longer used. Please use GoogleApi.Gax.Request instead.
"""
end
| 36.884615
| 77
| 0.763295
|
79cb25a654ac29954f12a4cae9656f69bcd30a88
| 1,279
|
ex
|
Elixir
|
lib/egcovac_web/router.ex
|
karembadawy/egcovac
|
a1ddb339656d41b29ea098cd8be6c4934dec6eee
|
[
"MIT"
] | null | null | null |
lib/egcovac_web/router.ex
|
karembadawy/egcovac
|
a1ddb339656d41b29ea098cd8be6c4934dec6eee
|
[
"MIT"
] | null | null | null |
lib/egcovac_web/router.ex
|
karembadawy/egcovac
|
a1ddb339656d41b29ea098cd8be6c4934dec6eee
|
[
"MIT"
] | null | null | null |
defmodule EgcovacWeb.Router do
use EgcovacWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", EgcovacWeb do
pipe_through :browser
get "/", PageController, :index
resources "/users", UserController
resources "/requests", RequestController
get "/follow_up/:registration_number", FollowUpController, :index
get "/follow_up/", FollowUpController, :index
end
# Other scopes may use custom stacks.
# scope "/api", EgcovacWeb do
# pipe_through :api
# end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through :browser
live_dashboard "/dashboard", metrics: EgcovacWeb.Telemetry
end
end
end
| 27.212766
| 70
| 0.705238
|
79cb4bd6b0e7caedf7ba4fcc8ebf5f9760249990
| 61
|
ex
|
Elixir
|
web/views/topic_view.ex
|
slaily/discuss
|
6f0eacd0f2c03d197f2cf9b6c27a03752c90e969
|
[
"MIT"
] | null | null | null |
web/views/topic_view.ex
|
slaily/discuss
|
6f0eacd0f2c03d197f2cf9b6c27a03752c90e969
|
[
"MIT"
] | null | null | null |
web/views/topic_view.ex
|
slaily/discuss
|
6f0eacd0f2c03d197f2cf9b6c27a03752c90e969
|
[
"MIT"
] | null | null | null |
defmodule Discuss.TopicView do
use Discuss.Web, :view
end
| 12.2
| 30
| 0.770492
|
79cbbb8e7502d394fdea0abc5e1e729074aa3a14
| 1,408
|
ex
|
Elixir
|
lib/eps_web/controllers/docs_controller.ex
|
generalui/elixir_pairing
|
67919e37b53e72bbd11e4785c9caa94fb3ac3303
|
[
"MIT"
] | null | null | null |
lib/eps_web/controllers/docs_controller.ex
|
generalui/elixir_pairing
|
67919e37b53e72bbd11e4785c9caa94fb3ac3303
|
[
"MIT"
] | null | null | null |
lib/eps_web/controllers/docs_controller.ex
|
generalui/elixir_pairing
|
67919e37b53e72bbd11e4785c9caa94fb3ac3303
|
[
"MIT"
] | null | null | null |
defmodule EPSWeb.DocsController do
@moduledoc """
Static documentation controller
"""
use EPSWeb, :controller
@content_types %{
".css" => "text/css",
".eot" => "font/eot",
".ico" => "image/x-icon",
".jpg" => "image/jpeg",
".js" => "text/javascript",
".png" => "image/png",
".svg" => "image/svg+xml",
".ttf" => "font/ttf",
".woff" => "font/woff"
}
def index(%{path_info: path_info} = conn, _params) when path_info == ["docs"] do
conn
|> put_resp_header("content-type", "text/html; charset=utf-8")
|> send_file(200, Application.app_dir(:eps, "priv/static/docs/index.html"))
end
def index(%{path_info: path_info} = conn, _params) do
resource = path_info |> List.last()
{content_type, path} =
cond do
Enum.member?(path_info, "assets") -> {get_content_type(resource), "assets/"}
Enum.member?(path_info, "fonts") -> {get_content_type(resource), "dist/html/fonts/"}
Enum.member?(path_info, "dist") -> {get_content_type(resource), "dist/"}
true -> {"text/html; charset=utf-8", ""}
end
conn
|> put_resp_header("content-type", content_type)
|> send_file(200, Application.app_dir(:eps, "priv/static/docs/#{path}#{resource}"))
end
### PRIVATE ###
defp get_content_type(resource) do
extension = resource |> Path.extname()
@content_types |> Map.get(extension)
end
end
| 28.734694
| 92
| 0.607244
|
79cc08bc3cc9ad9899783e028d7bcfbb650b96d1
| 862
|
exs
|
Elixir
|
test/mix/tasks/word2vec/compile_test.exs
|
pylon/penelope
|
5b0310dc0647a8e20ab1b4c10d3820f11cfb2601
|
[
"Apache-2.0"
] | 53
|
2017-10-13T06:39:49.000Z
|
2022-03-28T19:43:42.000Z
|
test/mix/tasks/word2vec/compile_test.exs
|
pylon/penelope
|
5b0310dc0647a8e20ab1b4c10d3820f11cfb2601
|
[
"Apache-2.0"
] | 12
|
2018-01-08T23:05:37.000Z
|
2019-08-02T12:59:27.000Z
|
test/mix/tasks/word2vec/compile_test.exs
|
pylon/penelope
|
5b0310dc0647a8e20ab1b4c10d3820f11cfb2601
|
[
"Apache-2.0"
] | 4
|
2018-06-13T19:45:57.000Z
|
2019-10-17T13:37:06.000Z
|
defmodule Mix.Tasks.Word2vec.CompileTest do
use ExUnit.Case, async: true
alias Mix.Tasks.Word2vec.Compile
setup_all do
input = "/tmp/penelope_mix_tasks_word2vec_compile.txt"
output = "/tmp/penelope_mix_tasks_word2vec_compile"
File.write!(
input,
1..10
|> Enum.map(fn i ->
"a" <>
Integer.to_string(i) <>
" " <>
(1..10
|> Enum.map(fn j -> Float.to_string(i / j) end)
|> Enum.join(" "))
end)
|> Enum.join("\n")
)
on_exit(fn ->
File.rm(input)
File.rm_rf(output)
end)
{:ok, input: input, output: output}
end
test "index construction", %{input: input, output: output} do
Compile.run([
input,
output,
"test",
"--partitions=3",
"--size-hint=1000",
"--vector-size=10"
])
end
end
| 20.046512
| 63
| 0.535963
|
79cc5a87a3a697f39f94583b1bf3c3c46cfed1b9
| 2,768
|
ex
|
Elixir
|
clients/spanner/lib/google_api/spanner/v1/model/copy_backup_metadata.ex
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
clients/spanner/lib/google_api/spanner/v1/model/copy_backup_metadata.ex
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
clients/spanner/lib/google_api/spanner/v1/model/copy_backup_metadata.ex
|
yoshi-code-bot/elixir-google-api
|
cdb6032f01fac5ab704803113c39f2207e9e019d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Spanner.V1.Model.CopyBackupMetadata do
@moduledoc """
Metadata type for the operation returned by CopyBackup.
## Attributes
* `cancelTime` (*type:* `DateTime.t`, *default:* `nil`) - The time at which cancellation of CopyBackup operation was received. Operations.CancelOperation starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
* `name` (*type:* `String.t`, *default:* `nil`) - The name of the backup being created through the copy operation. Values are of the form `projects//instances//backups/`.
* `progress` (*type:* `GoogleApi.Spanner.V1.Model.OperationProgress.t`, *default:* `nil`) - The progress of the CopyBackup operation.
* `sourceBackup` (*type:* `String.t`, *default:* `nil`) - The name of the source backup that is being copied. Values are of the form `projects//instances//backups/`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:cancelTime => DateTime.t() | nil,
:name => String.t() | nil,
:progress => GoogleApi.Spanner.V1.Model.OperationProgress.t() | nil,
:sourceBackup => String.t() | nil
}
field(:cancelTime, as: DateTime)
field(:name)
field(:progress, as: GoogleApi.Spanner.V1.Model.OperationProgress)
field(:sourceBackup)
end
defimpl Poison.Decoder, for: GoogleApi.Spanner.V1.Model.CopyBackupMetadata do
def decode(value, options) do
GoogleApi.Spanner.V1.Model.CopyBackupMetadata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Spanner.V1.Model.CopyBackupMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 49.428571
| 656
| 0.740246
|
79cc742f825d4a7c4f3da9cd145637d452d7a293
| 6,270
|
ex
|
Elixir
|
test/event_store/support/append_events_test_case.ex
|
edwardzhou/commanded
|
f104cbf5ff3a37a6e9b637bc07ccde1d79c0725d
|
[
"MIT"
] | 1
|
2018-12-28T20:48:23.000Z
|
2018-12-28T20:48:23.000Z
|
test/event_store/support/append_events_test_case.ex
|
edwardzhou/commanded
|
f104cbf5ff3a37a6e9b637bc07ccde1d79c0725d
|
[
"MIT"
] | null | null | null |
test/event_store/support/append_events_test_case.ex
|
edwardzhou/commanded
|
f104cbf5ff3a37a6e9b637bc07ccde1d79c0725d
|
[
"MIT"
] | 1
|
2018-12-28T20:48:24.000Z
|
2018-12-28T20:48:24.000Z
|
defmodule Commanded.EventStore.AppendEventsTestCase do
import Commanded.SharedTestCase
define_tests do
import Commanded.Enumerable, only: [pluck: 2]
alias Commanded.EventStore
alias Commanded.EventStore.EventData
defmodule BankAccountOpened do
@derive Jason.Encoder
defstruct [:account_number, :initial_balance]
end
describe "append events to a stream" do
test "should append events" do
assert :ok == EventStore.append_to_stream("stream", 0, build_events(1))
assert :ok == EventStore.append_to_stream("stream", 1, build_events(2))
assert :ok == EventStore.append_to_stream("stream", 3, build_events(3))
end
test "should append events with `:any_version` without checking expected version" do
assert :ok == EventStore.append_to_stream("stream", :any_version, build_events(3))
assert :ok == EventStore.append_to_stream("stream", :any_version, build_events(2))
assert :ok == EventStore.append_to_stream("stream", :any_version, build_events(1))
end
test "should append events with `:no_stream` parameter" do
assert :ok == EventStore.append_to_stream("stream", :no_stream, build_events(2))
end
test "should fail when stream aleady exists with `:no_stream` parameter" do
assert :ok == EventStore.append_to_stream("stream", :no_stream, build_events(2))
assert {:error, :stream_exists} ==
EventStore.append_to_stream("stream", :no_stream, build_events(1))
end
test "should append events with `:stream_exists` parameter" do
assert :ok == EventStore.append_to_stream("stream", :no_stream, build_events(2))
assert :ok == EventStore.append_to_stream("stream", :stream_exists, build_events(1))
end
test "should fail with `:stream_exists` parameter when stream does not exist" do
assert {:error, :stream_does_not_exist} ==
EventStore.append_to_stream("stream", :stream_exists, build_events(1))
end
test "should fail to append to a stream because of wrong expected version when no stream" do
assert {:error, :wrong_expected_version} ==
EventStore.append_to_stream("stream", 1, build_events(1))
end
test "should fail to append to a stream because of wrong expected version" do
assert :ok == EventStore.append_to_stream("stream", 0, build_events(3))
assert {:error, :wrong_expected_version} ==
EventStore.append_to_stream("stream", 0, build_events(1))
assert {:error, :wrong_expected_version} ==
EventStore.append_to_stream("stream", 1, build_events(1))
assert {:error, :wrong_expected_version} ==
EventStore.append_to_stream("stream", 2, build_events(1))
assert :ok == EventStore.append_to_stream("stream", 3, build_events(1))
end
end
describe "stream events from an unknown stream" do
test "should return stream not found error" do
assert {:error, :stream_not_found} == EventStore.stream_forward("unknownstream")
end
end
describe "stream events from an existing stream" do
test "should read events" do
correlation_id = UUID.uuid4()
causation_id = UUID.uuid4()
events = build_events(4, correlation_id, causation_id)
assert :ok == EventStore.append_to_stream("stream", 0, events)
read_events = EventStore.stream_forward("stream") |> Enum.to_list()
assert length(read_events) == 4
assert coerce(events) == coerce(read_events)
assert pluck(read_events, :stream_version) == [1, 2, 3, 4]
Enum.each(read_events, fn event ->
assert_is_uuid(event.event_id)
assert event.stream_id == "stream"
assert event.correlation_id == correlation_id
assert event.causation_id == causation_id
assert event.metadata == %{"metadata" => "value"}
end)
read_events = EventStore.stream_forward("stream", 3) |> Enum.to_list()
assert coerce(Enum.slice(events, 2, 2)) == coerce(read_events)
assert pluck(read_events, :stream_version) == [3, 4]
end
test "should read from single stream" do
events1 = build_events(2)
events2 = build_events(4)
assert :ok == EventStore.append_to_stream("stream", 0, events1)
assert :ok == EventStore.append_to_stream("secondstream", 0, events2)
read_events = EventStore.stream_forward("stream", 0) |> Enum.to_list()
assert 2 == length(read_events)
assert coerce(events1) == coerce(read_events)
read_events = EventStore.stream_forward("secondstream", 0) |> Enum.to_list()
assert 4 == length(read_events)
assert coerce(events2) == coerce(read_events)
end
test "should read events in batches" do
events = build_events(10)
assert :ok == EventStore.append_to_stream("stream", 0, events)
read_events = EventStore.stream_forward("stream", 0, 2) |> Enum.to_list()
assert length(read_events) == 10
assert coerce(events) == coerce(read_events)
assert pluck(read_events, :stream_version) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
end
end
defp build_event(account_number, correlation_id, causation_id) do
%EventData{
correlation_id: correlation_id,
causation_id: causation_id,
event_type: "#{__MODULE__}.BankAccountOpened",
data: %BankAccountOpened{account_number: account_number, initial_balance: 1_000},
metadata: %{"metadata" => "value"}
}
end
defp build_events(count, correlation_id \\ UUID.uuid4(), causation_id \\ UUID.uuid4())
defp build_events(count, correlation_id, causation_id) do
for account_number <- 1..count,
do: build_event(account_number, correlation_id, causation_id)
end
defp assert_is_uuid(uuid) do
assert uuid |> UUID.string_to_binary!() |> is_binary()
end
defp coerce(events) do
Enum.map(
events,
&%{
causation_id: &1.causation_id,
correlation_id: &1.correlation_id,
data: &1.data,
metadata: &1.metadata
}
)
end
end
end
| 38
| 98
| 0.654705
|
79cc8f505eea4655d6d1adf003d3e9173fba4178
| 866
|
exs
|
Elixir
|
mix.exs
|
nichochar/CLHN
|
406e55a2810f93c5510f0a9145f95b2bde0fdd9c
|
[
"MIT"
] | 7
|
2017-05-15T20:31:13.000Z
|
2022-01-02T03:44:13.000Z
|
mix.exs
|
nichochar/CLHN
|
406e55a2810f93c5510f0a9145f95b2bde0fdd9c
|
[
"MIT"
] | 3
|
2017-02-27T03:30:25.000Z
|
2021-04-28T03:59:17.000Z
|
mix.exs
|
nichochar/CLHN
|
406e55a2810f93c5510f0a9145f95b2bde0fdd9c
|
[
"MIT"
] | null | null | null |
defmodule Clhn.Mixfile do
use Mix.Project
def project do
[app: :clhn,
escript: escript,
version: "0.2.0",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger, :httpoison, :poison, :bunt]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:httpoison, "~> 0.10.0"},
{:poison, "~> 3.0"},
{:bunt, "~> 0.1.0"}
]
end
defp escript do
[main_module: Clhn, name: "hn"]
end
end
| 20.619048
| 77
| 0.575058
|
79cc93b95ef6fa3d9dc916b51e8a92f991300898
| 2,693
|
ex
|
Elixir
|
clients/display_video/lib/google_api/display_video/v1/model/frequency_cap.ex
|
MMore/elixir-google-api
|
0574ec1439d9bbfe22d63965be1681b0f45a94c9
|
[
"Apache-2.0"
] | null | null | null |
clients/display_video/lib/google_api/display_video/v1/model/frequency_cap.ex
|
MMore/elixir-google-api
|
0574ec1439d9bbfe22d63965be1681b0f45a94c9
|
[
"Apache-2.0"
] | null | null | null |
clients/display_video/lib/google_api/display_video/v1/model/frequency_cap.ex
|
MMore/elixir-google-api
|
0574ec1439d9bbfe22d63965be1681b0f45a94c9
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DisplayVideo.V1.Model.FrequencyCap do
@moduledoc """
Settings that control the number of times a user may be shown with the same ad during a given time period.
## Attributes
* `maxImpressions` (*type:* `integer()`, *default:* `nil`) - The maximum number of times a user may be shown the same ad during this period. Must be greater than 0. Required when unlimited is `false` and max_views is not set.
* `timeUnit` (*type:* `String.t`, *default:* `nil`) - The time unit in which the frequency cap will be applied. Required when unlimited is `false`.
* `timeUnitCount` (*type:* `integer()`, *default:* `nil`) - The number of time_unit the frequency cap will last. Required when unlimited is `false`. The following restrictions apply based on the value of time_unit: * `TIME_UNIT_LIFETIME` - this field is output only and will default to 1 * `TIME_UNIT_MONTHS` - must be between 1 and 2 * `TIME_UNIT_WEEKS` - must be between 1 and 4 * `TIME_UNIT_DAYS` - must be between 1 and 6 * `TIME_UNIT_HOURS` - must be between 1 and 23 * `TIME_UNIT_MINUTES` - must be between 1 and 59
* `unlimited` (*type:* `boolean()`, *default:* `nil`) - Whether unlimited frequency capping is applied. When this field is set to `true`, the remaining frequency cap fields are not applicable.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:maxImpressions => integer() | nil,
:timeUnit => String.t() | nil,
:timeUnitCount => integer() | nil,
:unlimited => boolean() | nil
}
field(:maxImpressions)
field(:timeUnit)
field(:timeUnitCount)
field(:unlimited)
end
defimpl Poison.Decoder, for: GoogleApi.DisplayVideo.V1.Model.FrequencyCap do
def decode(value, options) do
GoogleApi.DisplayVideo.V1.Model.FrequencyCap.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DisplayVideo.V1.Model.FrequencyCap do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 48.089286
| 525
| 0.721872
|
79cca64019b48cf598937b5e73d1f3b818df2e88
| 3,884
|
ex
|
Elixir
|
lib/mix_docker.ex
|
DinaWork/mix_docker
|
90b3f2fdd7c33abac678e1bca6006817d2afeb6f
|
[
"MIT"
] | null | null | null |
lib/mix_docker.ex
|
DinaWork/mix_docker
|
90b3f2fdd7c33abac678e1bca6006817d2afeb6f
|
[
"MIT"
] | null | null | null |
lib/mix_docker.ex
|
DinaWork/mix_docker
|
90b3f2fdd7c33abac678e1bca6006817d2afeb6f
|
[
"MIT"
] | null | null | null |
defmodule MixDocker do
require Logger
@dockerfile_path :code.priv_dir(:mix_docker)
@dockerfile_build Application.get_env(:mix_docker, :dockerfile_build, "Dockerfile.build")
@dockerfile_release Application.get_env(:mix_docker, :dockerfile_release, "Dockerfile.release")
def init(args) do
# copy .dockerignore
unless File.exists?(".dockerignore") do
File.cp(Path.join([@dockerfile_path, "dockerignore"]), ".dockerignore")
end
Mix.Task.run("release.init", args)
end
def build(args) do
with_dockerfile @dockerfile_build, fn ->
docker :build, @dockerfile_build, image(:build), args
end
Mix.shell.info "Docker image #{image(:build)} has been successfully created"
end
def release(args) do
project = Mix.Project.get.project
app = project[:app]
version = project[:version]
cid = "mix_docker-#{:rand.uniform(1000000)}"
with_dockerfile @dockerfile_release, fn ->
docker :rm, cid
docker :create, cid, image(:build)
docker :cp, cid, "/opt/app/_build/prod/rel/#{app}/releases/#{version}/#{app}.tar.gz", "#{app}.tar.gz"
docker :rm, cid
docker :build, @dockerfile_release, image(:release), args
end
Mix.shell.info "Docker image #{image(:release)} has been successfully created"
Mix.shell.info "You can now test your app with the following command:"
Mix.shell.info " docker run -it --rm #{image(:release)} foreground"
end
def publish(_args) do
name = image(:version)
docker :tag, image(:release), name
docker :push, name
Mix.shell.info "Docker image #{name} has been successfully created"
end
def shipit(args) do
build(args)
release(args)
publish(args)
end
def customize([]) do
try_copy_dockerfile @dockerfile_build
try_copy_dockerfile @dockerfile_release
end
defp git_head_sha do
{sha, 0} = System.cmd "git", ["rev-parse", "HEAD"]
String.slice(sha, 0, 10)
end
defp git_commit_count do
{count, 0} = System.cmd "git", ["rev-list", "--count", "HEAD"]
String.trim(count)
end
defp image(tag) do
image_name() <> ":" <> to_string(image_tag(tag))
end
defp image_name do
Application.get_env(:mix_docker, :image) || to_string(Mix.Project.get.project[:app])
end
defp image_tag(:version) do
version = Mix.Project.get.project[:version]
count = git_commit_count()
sha = git_head_sha()
"#{version}.#{count}-#{sha}"
end
defp image_tag(tag), do: tag
defp docker(:cp, cid, source, dest) do
system! "docker", ["cp", "#{cid}:#{source}", dest]
end
defp docker(:build, dockerfile, tag, args) do
system! "docker", ["build", "-f", dockerfile, "-t", tag] ++ args ++ ["."]
end
defp docker(:create, name, image) do
system! "docker", ["create", "--name", name, image]
end
defp docker(:tag, image, tag) do
system! "docker", ["tag", image, tag]
end
defp docker(:rm, cid) do
system "docker", ["rm", "-f", cid]
end
defp docker(:push, image) do
system! "docker", ["push", image]
end
defp with_dockerfile(name, fun) do
if File.exists?(name) do
fun.()
else
try do
copy_dockerfile(name)
fun.()
after
File.rm(name)
end
end
end
defp copy_dockerfile(name) do
app = Mix.Project.get.project[:app]
content = [@dockerfile_path, name]
|> Path.join
|> File.read!
|> String.replace("${APP}", to_string(app))
File.write!(name, content)
end
defp try_copy_dockerfile(name) do
if File.exists?(name) do
Logger.warn("#{name} already exists")
else
copy_dockerfile(name)
end
end
defp system(cmd, args) do
Logger.debug "$ #{cmd} #{args |> Enum.join(" ")}"
System.cmd(cmd, args, into: IO.stream(:stdio, :line))
end
defp system!(cmd, args) do
{_, 0} = system(cmd, args)
end
end
| 24.897436
| 107
| 0.632853
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.