hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
list | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
list | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
list | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9e217ed997f19a1b05edcbb3b485297941ba9d39
| 207
|
exs
|
Elixir
|
test/docker_phx_web/controllers/page_controller_test.exs
|
beltranaceves/docker_phx
|
06a9cba602ccd4f054568b7a40ca36bedc9a3e27
|
[
"MIT"
] | null | null | null |
test/docker_phx_web/controllers/page_controller_test.exs
|
beltranaceves/docker_phx
|
06a9cba602ccd4f054568b7a40ca36bedc9a3e27
|
[
"MIT"
] | null | null | null |
test/docker_phx_web/controllers/page_controller_test.exs
|
beltranaceves/docker_phx
|
06a9cba602ccd4f054568b7a40ca36bedc9a3e27
|
[
"MIT"
] | null | null | null |
defmodule DockerPhxWeb.PageControllerTest do
use DockerPhxWeb.ConnCase
test "GET /", %{conn: conn} do
conn = get(conn, "/")
assert html_response(conn, 200) =~ "Welcome to Docker Phx!"
end
end
| 23
| 63
| 0.68599
|
9e21ad828d735b131dd18dc1e0703189a04d3c2b
| 136
|
ex
|
Elixir
|
lib/sozui_web/controllers/page_controller.ex
|
evuez/sozui
|
e478da720ca72c96be4d53bfb03fe6917670b81d
|
[
"MIT"
] | null | null | null |
lib/sozui_web/controllers/page_controller.ex
|
evuez/sozui
|
e478da720ca72c96be4d53bfb03fe6917670b81d
|
[
"MIT"
] | null | null | null |
lib/sozui_web/controllers/page_controller.ex
|
evuez/sozui
|
e478da720ca72c96be4d53bfb03fe6917670b81d
|
[
"MIT"
] | null | null | null |
defmodule SozUIWeb.PageController do
use SozUIWeb, :controller
def index(conn, _params) do
render conn, "index.html"
end
end
| 17
| 36
| 0.735294
|
9e21ada50d8d40ee0a84525fa7831a91d6881585
| 213
|
ex
|
Elixir
|
examples/ex1/task/store.ex
|
Carburetor/esp_ex
|
0a0ab02c71945e521b213befc0421a4642c9e07b
|
[
"MIT"
] | null | null | null |
examples/ex1/task/store.ex
|
Carburetor/esp_ex
|
0a0ab02c71945e521b213befc0421a4642c9e07b
|
[
"MIT"
] | 1
|
2018-05-11T04:27:54.000Z
|
2018-05-11T04:27:54.000Z
|
examples/ex1/task/store.ex
|
Carburetor/esp_ex
|
0a0ab02c71945e521b213befc0421a4642c9e07b
|
[
"MIT"
] | 1
|
2018-10-30T06:08:02.000Z
|
2018-10-30T06:08:02.000Z
|
defmodule Ex1.Task.Store do
use EspEx.Store,
entity: Ex1.Task,
stream_category: "task",
projection: Ex1.Task.Projection
# To use the store
# {:ok, task, _version} = Ex1.Task.Store.fetch(id)
end
| 21.3
| 52
| 0.676056
|
9e21d93d8b5036b2bb31d5cfce0e2449ef344fa4
| 60,773
|
ex
|
Elixir
|
lib/ecto_adapters_dynamodb.ex
|
rauann/ecto_adapters_dynamodb
|
4762bab21c4715775b6155f0c76b3cfabdeebc6e
|
[
"Apache-2.0"
] | 50
|
2017-06-21T00:15:15.000Z
|
2022-01-18T04:12:24.000Z
|
lib/ecto_adapters_dynamodb.ex
|
rauann/ecto_adapters_dynamodb
|
4762bab21c4715775b6155f0c76b3cfabdeebc6e
|
[
"Apache-2.0"
] | 91
|
2018-01-10T19:29:25.000Z
|
2022-02-24T16:12:26.000Z
|
lib/ecto_adapters_dynamodb.ex
|
rauann/ecto_adapters_dynamodb
|
4762bab21c4715775b6155f0c76b3cfabdeebc6e
|
[
"Apache-2.0"
] | 11
|
2017-11-28T06:22:58.000Z
|
2022-01-04T18:03:11.000Z
|
defmodule Ecto.Adapters.DynamoDB do
@moduledoc """
Ecto adapter for Amazon DynamoDB.
"""
@behaviour Ecto.Adapter
@behaviour Ecto.Adapter.Schema
@behaviour Ecto.Adapter.Queryable
@behaviour Ecto.Adapter.Migration
@impl Ecto.Adapter
defmacro __before_compile__(_env) do
# Nothing to see here, yet...
end
use Bitwise, only_operators: true
alias Confex.Resolver
alias Ecto.Adapters.DynamoDB.Cache
alias Ecto.Adapters.DynamoDB.DynamoDBSet
alias Ecto.Adapters.DynamoDB.RepoConfig
alias Ecto.Query.BooleanExpr
alias ExAws.Dynamo
@pool_opts [:timeout, :pool_size, :migration_lock]
# DynamoDB will reject attempts to batch write more than 25 records at once
# https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html
@batch_write_item_limit 25
@impl Ecto.Adapter
def init(config) do
log = Keyword.get(config, :log, :debug)
telemetry_prefix = Keyword.fetch!(config, :telemetry_prefix)
meta = %{
opts: Keyword.take(config, @pool_opts),
telemetry: {config[:repo], log, telemetry_prefix},
migration_source: Keyword.get(config, :migration_source, "schema_migrations")
}
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.init", %{
"#{inspect(__MODULE__)}.init-params" => %{config: config}
})
{:ok, Cache.child_spec([config[:repo]]), meta}
end
@doc """
Ensure all applications necessary to run the adapter are started.
"""
@impl Ecto.Adapter
def ensure_all_started(config, type) do
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.ensure_all_started", %{
"#{inspect(__MODULE__)}.ensure_all_started-params" => %{type: type, config: config}
})
with {:ok, _} = Application.ensure_all_started(:ecto_adapters_dynamodb) do
{:ok, [config]}
end
end
@impl Ecto.Adapter.Migration
def supports_ddl_transaction?, do: false
@impl Ecto.Adapter.Migration
def execute_ddl(adapter_meta, command, options) do
Ecto.Adapters.DynamoDB.Migration.execute_ddl(adapter_meta, command, options)
end
@impl Ecto.Adapter.Migration
def lock_for_migrations(%{opts: adapter_opts} = _meta, _opts, callback) do
# TODO - consider adding support for this? See https://github.com/circles-learning-labs/ecto_adapters_dynamodb/issues/34
if Keyword.get(adapter_opts, :migration_lock) do
raise "#{inspect(__MODULE__)}.lock_for_migrations error: #{inspect(__MODULE__)} does not currently support migration table lock; please remove the :migration_lock option from your repo configuration or set it to nil"
else
callback.()
end
end
@impl Ecto.Adapter
def checkout(_meta, _opts, _fun) do
# TODO - consider adding support for this? See https://github.com/circles-learning-labs/ecto_adapters_dynamodb/issues/33
raise "#{inspect(__MODULE__)}.checkout: #{inspect(__MODULE__)} does not currently support checkout"
end
@impl Ecto.Adapter.Queryable
def stream(_adapter_meta, _query_meta, _query, _params, _opts) do
# TODO - consider adding support for this? See https://github.com/circles-learning-labs/ecto_adapters_dynamodb/issues/32
raise "#{inspect(__MODULE__)}.stream: #{inspect(__MODULE__)} does not currently support stream"
end
@doc """
Called to autogenerate a value for id/embed_id/binary_id.
Returns the autogenerated value, or nil if it must be
autogenerated inside the storage or raise if not supported.
For the Ecto type, `:id`, the adapter autogenerates a 128-bit integer
For the Ecto type, `:embed_id`, the adapter autogenerates a string, using `Ecto.UUID.generate()`
For the Ecto type, `:binary_id`, the adapter autogenerates a string, using `Ecto.UUID.generate()`
"""
# biggest possible int in 128 bits
@max_id (1 <<< 128) - 1
@impl Ecto.Adapter.Schema
def autogenerate(:id), do: Enum.random(1..@max_id)
def autogenerate(:embed_id), do: Ecto.UUID.generate()
def autogenerate(:binary_id), do: Ecto.UUID.generate()
@doc """
Returns the loaders for a given type.
Rather than use the Ecto adapter loaders callback, the adapter builds on ExAws' decoding functionality, please see ExAws's `ExAws.Dynamo.Decoder`, in this module, which at this time only loads :utc_datetime and :naive_datetime.
"""
@impl Ecto.Adapter
def loaders(_primitive, type), do: [type]
@doc """
Returns the dumpers for a given type.
We rely on ExAws encoding functionality during insertion and update to properly format types for DynamoDB. Please see ExAws `ExAws.Dynamo.update_item` and `ExAws.Dynamo.put_item` for specifics. Currently, we only modify :utc_datetime and :naive_datetime, appending the UTC offset, "Z", to the datetime string before passing to ExAws.
"""
@impl Ecto.Adapter
def dumpers(type, datetime)
when type in [:naive_datetime, :naive_datetime_usec, :utc_datetime, :utc_datetime_usec],
do: [datetime, &to_iso_string/1]
def dumpers(_primitive, type), do: [type]
# Add UTC offset
# We are adding the offset here also for the :naive_datetime, this
# assumes we are getting a UTC date (which does correspond with the
# timestamps() macro but not necessarily with :naive_datetime in general)
defp to_iso_string(datetime) do
iso_string =
case datetime do
%NaiveDateTime{} ->
(datetime |> NaiveDateTime.to_iso8601()) <> "Z"
%DateTime{} ->
datetime |> DateTime.to_iso8601()
end
{:ok, iso_string}
end
@doc """
Commands invoked to prepare a query for `all`, `update_all` and `delete_all`.
The returned result is given to `execute/6`.
"""
# @callback prepare(atom :: :all | :update_all | :delete_all, query :: Ecto.Query.t) ::
# {:cache, prepared} | {:nocache, prepared}
@impl Ecto.Adapter.Queryable
def prepare(:all, query) do
# 'preparing' is more a SQL concept - Do we really need to do anything here or just pass the params through?
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.prepare: :all", %{
"#{inspect(__MODULE__)}.prepare-params" => %{query: inspect(query, structs: false)}
})
{:nocache, {:all, query}}
end
def prepare(:update_all, query) do
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.prepare: :update_all", %{
"#{inspect(__MODULE__)}.prepare-params" => %{query: inspect(query, structs: false)}
})
{:nocache, {:update_all, query}}
end
def prepare(:delete_all, query) do
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.prepare: :delete_all", %{
"#{inspect(__MODULE__)}.prepare-params" => %{query: inspect(query, structs: false)}
})
{:nocache, {:delete_all, query}}
end
@doc """
Executes a previously prepared query.
It must return a tuple containing the number of entries and
the result set as a list of lists. The result set may also be
`nil` if a particular operation does not support them.
The `meta` field is a map containing some of the fields found
in the `Ecto.Query` struct.
It receives a process function that should be invoked for each
selected field in the query result in order to convert them to the
expected Ecto type. The `process` function will be nil if no
result set is expected from the query.
"""
# @callback execute(repo, query_meta, query, params :: list(), process | nil, options) :: result when
# result: {integer, [[term]] | nil} | no_return,
# query: {:nocache, prepared} |
# {:cached, (prepared -> :ok), cached} |
# {:cache, (cached -> :ok), prepared}
@impl Ecto.Adapter.Queryable
def execute(
%{repo: repo, migration_source: migration_source},
query_meta,
{:nocache, {func, prepared}},
params,
opts
) do
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.execute", %{
"#{inspect(__MODULE__)}.execute-params" => %{
repo: repo,
query_meta: query_meta,
prepared: prepared,
params: params,
opts: opts
}
})
# table and model are now nested under .from.source
{table, model} = prepared.from.source
validate_where_clauses!(prepared)
lookup_fields = extract_lookup_fields(prepared.wheres, params, [])
limit_option = opts[:scan_limit]
scan_limit = if is_integer(limit_option), do: [limit: limit_option], else: []
updated_opts =
if table == migration_source do
ecto_dynamo_log(
:debug,
"#{inspect(__MODULE__)}.execute: table name corresponds with migration source: #{
inspect(migration_source)
}. Setting options for recursive scan.",
%{}
)
Keyword.drop(opts, [:timeout, :log]) ++ [recursive: true]
else
Keyword.drop(opts, [:scan_limit, :limit]) ++ scan_limit
end
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.execute: local variables", %{
"#{inspect(__MODULE__)}.execute-vars" => %{
table: table,
lookup_fields: lookup_fields,
scan_limit: scan_limit
}
})
case func do
:delete_all ->
delete_all(repo, table, lookup_fields, updated_opts)
:update_all ->
update_all(repo, table, lookup_fields, updated_opts, prepared.updates, params)
:all ->
ecto_dynamo_log(:info, "#{inspect(__MODULE__)}.execute: :all", %{
"#{inspect(__MODULE__)}.execute-all-vars" => %{
table: table,
lookup_fields: lookup_fields,
updated_opts: updated_opts
}
})
result = Ecto.Adapters.DynamoDB.Query.get_item(repo, table, lookup_fields, updated_opts)
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.execute: all: result", %{
"#{inspect(__MODULE__)}.execute-all-result" => inspect(result)
})
if opts[:query_info_key],
do:
Ecto.Adapters.DynamoDB.QueryInfo.put(
opts[:query_info_key],
extract_query_info(result)
)
if result == %{} do
# Empty map means "not found"
{0, []}
else
case query_meta do
%{select: %{from: {_, {_, _, _, types}}}} ->
types = types_to_source_fields(model, types)
handle_type_decode(table, result, types, repo, opts)
_ ->
if table == migration_source do
decoded = Enum.map(result["Items"], &decode_item(&1, repo, opts))
{length(decoded), decoded}
else
# Queries with a :select clause will not have the types available in the query_meta,
# instead construct them from prepared.select
types = construct_types_from_select_fields(prepared.select)
handle_type_decode(table, result, types, repo, opts)
end
end
end
end
end
defp handle_type_decode(table, result, types, repo, opts) do
if !result["Count"] and !result["Responses"] do
decoded = decode_item(result["Item"], types, repo, opts)
{1, [decoded]}
else
# batch_get_item returns "Responses" rather than "Items"
results_to_decode =
if result["Items"], do: result["Items"], else: result["Responses"][table]
decoded = Enum.map(results_to_decode, &decode_item(&1, types, repo, opts))
{length(decoded), decoded}
end
end
defp types_to_source_fields(model, types) do
types
|> Enum.into([], fn {field, type} ->
{model.__schema__(:field_source, field), type}
end)
end
# delete_all allows for the recursive option, scanning through multiple pages
defp delete_all(repo, table, lookup_fields, opts) do
ecto_dynamo_log(:info, "#{inspect(__MODULE__)}.delete_all", %{
"#{inspect(__MODULE__)}.delete_all-params" => %{
table: table,
lookup_fields: lookup_fields,
opts: opts
}
})
# select only the key
{:primary, key_list} = Ecto.Adapters.DynamoDB.Info.primary_key!(repo, table)
scan_or_query = Ecto.Adapters.DynamoDB.Query.scan_or_query?(repo, table, lookup_fields)
recursive = Ecto.Adapters.DynamoDB.Query.parse_recursive_option(scan_or_query, opts)
updated_opts =
prepare_recursive_opts(opts ++ [projection_expression: Enum.join(key_list, ", ")])
delete_all_recursive(repo, table, lookup_fields, updated_opts, recursive, %{}, 0)
end
defp delete_all_recursive(
repo,
table,
lookup_fields,
opts,
recursive,
query_info,
total_processed
) do
# query the table for which records to delete
fetch_result = Ecto.Adapters.DynamoDB.Query.get_item(repo, table, lookup_fields, opts)
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.delete_all_recursive: fetch_result", %{
"#{inspect(__MODULE__)}.delete_all_recursive-fetch_result" => inspect(fetch_result)
})
items =
case fetch_result do
%{"Items" => fetch_items} -> fetch_items
%{"Item" => item} -> [item]
%{"Responses" => table_map} -> table_map[table]
_ -> []
end
prepared_data =
for key_list <- Enum.map(items, &Map.to_list/1) do
key_map =
for {key, val_map} <- key_list, into: %{}, do: {key, Dynamo.Decoder.decode(val_map)}
[delete_request: [key: key_map]]
end
unprocessed_items =
if prepared_data != [] do
batch_delete(repo, table, prepared_data)
else
%{}
end
num_processed =
length(prepared_data) -
if !unprocessed_items[table], do: 0, else: length(unprocessed_items[table])
updated_query_info =
Enum.reduce(fetch_result, query_info, fn {key, val}, acc ->
case key do
"Count" ->
Map.update(acc, key, val, fn x -> x + val end)
"ScannedCount" ->
Map.update(acc, key, val, fn x -> x + val end)
"LastEvaluatedKey" ->
Map.update(acc, key, val, fn _ -> fetch_result["LastEvaluatedKey"] end)
_ ->
acc
end
end)
|> Map.update("UnprocessedItems", unprocessed_items, fn map ->
if map == %{}, do: %{}, else: %{table => map[table] ++ unprocessed_items[table]}
end)
updated_recursive = Ecto.Adapters.DynamoDB.Query.update_recursive_option(recursive)
if fetch_result["LastEvaluatedKey"] != nil and updated_recursive.continue do
opts_with_offset = opts ++ [exclusive_start_key: fetch_result["LastEvaluatedKey"]]
delete_all_recursive(
repo,
table,
lookup_fields,
opts_with_offset,
updated_recursive.new_value,
updated_query_info,
total_processed + num_processed
)
else
# We're not retrying unprocessed items yet, but we are providing the relevant info in the QueryInfo agent if :query_info_key is supplied
if opts[:query_info_key],
do: Ecto.Adapters.DynamoDB.QueryInfo.put(opts[:query_info_key], updated_query_info)
{num_processed + total_processed, nil}
end
end
# Returns unprocessed_items
# Similarly to a batch insert, batch delete is also restricted by DDB's batch write limit of 25 records - these requests will be chunked as well.
defp batch_delete(repo, table, prepared_data) do
Enum.chunk_every(prepared_data, @batch_write_item_limit)
|> Enum.reduce(%{}, fn batch, unprocessed_items ->
batch_write_attempt =
Dynamo.batch_write_item(%{table => batch})
|> ExAws.request(ex_aws_config(repo))
|> handle_error!(repo, %{table: table, records: []})
case batch_write_attempt do
%{"UnprocessedItems" => %{^table => items}} ->
Map.update(unprocessed_items, table, items, &(&1 ++ items))
_ ->
unprocessed_items
end
end)
end
defp update_all(repo, table, lookup_fields, opts, updates, params) do
ecto_dynamo_log(:info, "#{inspect(__MODULE__)}.update_all", %{
"#{inspect(__MODULE__)}.update_all-params" => %{
table: table,
lookup_fields: lookup_fields,
opts: opts
}
})
scan_or_query = Ecto.Adapters.DynamoDB.Query.scan_or_query?(repo, table, lookup_fields)
recursive = Ecto.Adapters.DynamoDB.Query.parse_recursive_option(scan_or_query, opts)
key_list = Ecto.Adapters.DynamoDB.Info.primary_key!(repo, table)
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.update_all: key_list", %{
"#{inspect(__MODULE__)}.update_all-key_list" => inspect(key_list)
})
# The remove statement must be constructed after finding pull-indexes, but it
# also includes possibly removing nil fields, and since we have one handler for
# both set and remove, we call it during the batch update process
{update_expression, update_fields_sans_set_remove, set_remove_fields} =
construct_update_expression(repo, updates, params, opts)
ecto_dynamo_log(:info, "#{inspect(__MODULE__)}.update_all: update fields", %{
"#{inspect(__MODULE__)}.update_all-update_fields" => %{
update_fields_sans_set_remove: inspect(update_fields_sans_set_remove),
set_remove_fields: inspect(set_remove_fields)
}
})
attribute_names = construct_expression_attribute_names(update_fields_sans_set_remove)
attribute_values =
construct_expression_attribute_values(repo, update_fields_sans_set_remove, opts)
base_update_options = [
expression_attribute_names: attribute_names,
update_expression: update_expression,
return_values: :all_new
]
updated_opts = prepare_recursive_opts(opts)
update_options = maybe_add_attribute_values(base_update_options, attribute_values)
pull_actions_without_index =
Keyword.keys(set_remove_fields[:pull])
|> Enum.any?(fn x -> !Enum.member?(Keyword.keys(maybe_list(opts[:pull_indexes])), x) end)
{new_update_options, new_set_remove_fields} =
if pull_actions_without_index do
{update_options, set_remove_fields}
else
merged_pull_indexes =
Keyword.merge(set_remove_fields[:pull], maybe_list(opts[:pull_indexes]))
opts_with_pull_indexes =
Keyword.update(opts, :pull_indexes, merged_pull_indexes, fn _ -> merged_pull_indexes end)
{update_batch_update_options(
repo,
update_options,
set_remove_fields,
opts_with_pull_indexes
), []}
end
update_all_recursive(
repo,
table,
lookup_fields,
updated_opts,
new_update_options,
key_list,
new_set_remove_fields,
recursive,
%{},
0
)
end
defp update_all_recursive(
repo,
table,
lookup_fields,
opts,
update_options,
key_list,
set_remove_fields,
recursive,
query_info,
total_updated
) do
fetch_result = Ecto.Adapters.DynamoDB.Query.get_item(repo, table, lookup_fields, opts)
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.update_all_recursive: fetch_result", %{
"#{inspect(__MODULE__)}.update_all_recursive-fetch_result" => inspect(fetch_result)
})
updated_query_info =
case fetch_result do
%{"Count" => last_count, "ScannedCount" => last_scanned_count} ->
%{
"Count" => last_count + Map.get(query_info, "Count", 0),
"ScannedCount" => last_scanned_count + Map.get(query_info, "ScannedCount", 0),
"LastEvaluatedKey" => Map.get(fetch_result, "LastEvaluatedKey")
}
_ ->
query_info
end
items =
case fetch_result do
%{"Items" => fetch_items} -> fetch_items
%{"Item" => item} -> [item]
%{"Responses" => table_map} -> table_map[table]
_ -> []
end
num_updated =
if items != [] do
batch_update(repo, table, items, key_list, update_options, set_remove_fields, opts)
else
0
end
updated_recursive = Ecto.Adapters.DynamoDB.Query.update_recursive_option(recursive)
if fetch_result["LastEvaluatedKey"] != nil and updated_recursive.continue do
opts_with_offset = opts ++ [exclusive_start_key: fetch_result["LastEvaluatedKey"]]
update_all_recursive(
repo,
table,
lookup_fields,
opts_with_offset,
update_options,
key_list,
set_remove_fields,
updated_recursive.new_value,
updated_query_info,
total_updated + num_updated
)
else
if opts[:query_info_key],
do: Ecto.Adapters.DynamoDB.QueryInfo.put(opts[:query_info_key], updated_query_info)
{total_updated + num_updated, []}
end
end
defp batch_update(repo, table, items, key_list, update_options, set_remove_fields, opts) do
Enum.reduce(items, 0, fn result_to_update, acc ->
filters = get_key_values_dynamo_map(result_to_update, key_list)
# we only update this on a case-by-case basis if pull actions
# without specific indexes are specified
options_with_set_and_remove =
case set_remove_fields do
[] ->
update_options
_ ->
pull_fields_with_indexes =
Enum.map(set_remove_fields[:pull], fn {field_atom, val} ->
list = result_to_update[to_string(field_atom)]
{field_atom, find_all_indexes_in_dynamodb_list(list, val)}
end)
merged_pull_indexes =
Keyword.merge(pull_fields_with_indexes, maybe_list(opts[:pull_indexes]))
opts_with_pull_indexes =
Keyword.update(opts, :pull_indexes, merged_pull_indexes, fn _ ->
merged_pull_indexes
end)
update_batch_update_options(
repo,
update_options,
set_remove_fields,
opts_with_pull_indexes
)
end
# 'options_with_set_and_remove' might not have the key, ':expression_attribute_values',
# when there are only removal statements.
record =
if options_with_set_and_remove[:expression_attribute_values],
do: [options_with_set_and_remove[:expression_attribute_values] |> Enum.into(%{})],
else: []
if options_with_set_and_remove[:update_expression] |> String.trim() != "" do
Dynamo.update_item(table, filters, options_with_set_and_remove)
|> ExAws.request(ex_aws_config(repo))
|> handle_error!(repo, %{table: table, records: record ++ []})
acc + 1
else
acc
end
end)
end
defp update_batch_update_options(repo, update_options, set_remove_fields, opts) do
attribute_names =
construct_expression_attribute_names(Keyword.values(set_remove_fields) |> List.flatten())
set_and_push_fields =
maybe_list(set_remove_fields[:set]) ++ maybe_list(set_remove_fields[:push])
opts_with_push = opts ++ Keyword.take(set_remove_fields, [:push])
attribute_values =
construct_expression_attribute_values(repo, set_and_push_fields, opts_with_push)
set_statement = construct_set_statement(repo, set_remove_fields[:set], opts_with_push)
opts_for_construct_remove =
Keyword.take(set_remove_fields, [:pull]) ++
Keyword.take(opts, [:pull_indexes, :remove_nil_fields])
remove_statement =
construct_remove_statement(repo, set_remove_fields[:set], opts_for_construct_remove)
base_update_options = [
expression_attribute_names:
Map.merge(attribute_names, update_options[:expression_attribute_names]),
update_expression:
(set_statement <> " " <> remove_statement <> " " <> update_options[:update_expression])
|> String.trim(),
return_values: :all_new
]
maybe_add_attribute_values(
base_update_options,
attribute_values ++ maybe_list(update_options[:expression_attribute_values])
)
end
# find indexes to remove for update :pull action
defp find_all_indexes_in_dynamodb_list(dynamodb_list, target) do
Dynamo.Decoder.decode(dynamodb_list)
|> Enum.with_index()
|> Enum.filter(fn {x, _} -> x == target end)
|> Enum.map(fn {_, i} -> i end)
end
# During delete_all's and update_all's recursive
# procedure, we want to keep the recursion in
# the top-level, between actions, rather than
# load all the results into memory and then act;
# so we disable the recursion on get_item
defp prepare_recursive_opts(opts) do
opts |> Keyword.delete(:page_limit) |> Keyword.update(:recursive, false, fn _ -> false end)
end
@doc """
Inserts a single new struct in the data store.
## Autogenerate
The primary key will be automatically included in `returning` if the
field has type `:id` or `:binary_id` and no value was set by the
developer or none was autogenerated by the adapter.
"""
# @callback insert(repo, schema_meta, fields, on_conflict, returning, options) ::
# {:ok, fields} | {:invalid, constraints} | no_return
# def insert(_,_,_,_,_) do
@impl Ecto.Adapter.Schema
def insert(repo_meta, schema_meta, fields, on_conflict, returning, opts) do
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.insert", %{
"#{inspect(__MODULE__)}.insert-params" => %{
repo_meta: repo_meta,
schema_meta: schema_meta,
fields: fields,
on_conflict: on_conflict,
returning: returning,
opts: opts
}
})
table = schema_meta.source
model = schema_meta.schema
fields_map = Enum.into(fields, %{})
record = maybe_replace_empty_mapsets(fields_map, repo_meta.repo, opts)
insert_nil_fields = opt_config(:insert_nil_fields, repo_meta.repo, opts, true)
record = unless insert_nil_fields, do: record, else: build_record_map(model, record)
ecto_dynamo_log(:info, "#{inspect(__MODULE__)}.insert: local variables", %{
"#{inspect(__MODULE__)}.insert-vars" => %{table: table, record: record}
})
{:primary, key_list} = Ecto.Adapters.DynamoDB.Info.primary_key!(repo_meta.repo, table)
hash_key = hd(key_list)
on_conflict_action = elem(on_conflict, 0)
options =
case on_conflict_action do
:replace_all ->
[]
_ ->
attribute_names = for k <- key_list, into: %{}, do: {"##{k}", k}
conditions = for k <- key_list, do: "attribute_not_exists(##{k})"
condition_expression = Enum.join(conditions, " and ")
[
expression_attribute_names: attribute_names,
condition_expression: condition_expression
]
end
case Dynamo.put_item(table, record, options)
|> ExAws.request(ex_aws_config(repo_meta.repo))
|> handle_error!(repo_meta.repo, %{table: table, records: [record]}) do
{:error, "ConditionalCheckFailedException"} ->
case on_conflict_action do
# Per discussion with Jose Valim (https://github.com/elixir-ecto/ecto/issues/2378)
# clarifying the adapter should return nothing if there is no `:returning` specified,
# and what we thought was to be returned as a `nil` id, is only for cases where
# "the field is autogenerated by the database" (https://hexdocs.pm/ecto/Ecto.Repo.html)
:nothing ->
{:ok, []}
:raise ->
# This constraint name yields the correct behavior in the case the user
# has specified a unique constraint on the primary key in their schema:
constraint_name = "#{table}_#{hash_key}_index"
{:invalid, [unique: constraint_name]}
end
%{} ->
{:ok, []}
end
end
@impl Ecto.Adapter.Schema
def insert_all(
%{repo: repo},
schema_meta,
field_list,
rows,
on_conflict,
return_sources,
_placeholders,
opts
) do
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.insert_all", %{
"#{inspect(__MODULE__)}.insert_all-params" => %{
repo: repo,
schema_meta: schema_meta,
field_list: field_list,
rows: rows,
on_conflict: on_conflict,
return_sources: return_sources,
opts: opts
}
})
insert_nil_field_option = Keyword.get(opts, :insert_nil_fields, true)
do_not_insert_nil_fields =
insert_nil_field_option == false ||
RepoConfig.config_val(repo, :insert_nil_fields) == false
table = schema_meta.source
model = schema_meta.schema
prepared_rows =
Enum.map(rows, fn row ->
mapped_fields = Enum.into(row, %{})
record =
if do_not_insert_nil_fields,
do: mapped_fields,
else: build_record_map(model, mapped_fields)
[put_request: [item: record]]
end)
ecto_dynamo_log(:info, "#{inspect(__MODULE__)}.insert_all: local variables", %{
"#{inspect(__MODULE__)}.insert_all-vars" => %{
table: table,
records: get_records_from_fields(prepared_rows)
}
})
batch_write(repo, table, prepared_rows, opts)
end
# DynamoDB will reject an entire batch of insert_all() records if there are more than 25 requests.
# https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html
# batch_write/4 will break the list into chunks of 25 items and insert each separately.
defp batch_write(repo, table, prepared_fields, opts) do
unprocessed_items_element = "UnprocessedItems"
grouped_records = Enum.chunk_every(prepared_fields, @batch_write_item_limit)
num_batches = length(grouped_records)
# Break the prepared_fields into chunks of at most 25 elements to be batch inserted, accumulating
# the total count of records and appropriate results as it loops through the reduce.
{total_processed, results} =
grouped_records
|> Stream.with_index()
|> Enum.reduce({0, []}, fn {field_group, i},
{running_total_processed, batch_write_results} ->
{total_batch_processed, batch_write_attempt} =
handle_batch_write(repo, field_group, table, unprocessed_items_element)
# Log depth of 11 will capture the full data structure returned in any UnprocessedItems - https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html
ecto_dynamo_log(
:debug,
"#{inspect(__MODULE__)}.batch_write #{i + 1} of #{num_batches}: local variables",
%{
"#{inspect(__MODULE__)}.insert_all-batch_write" => %{
table: table,
field_group: field_group,
results: batch_write_attempt
}
},
depth: 11
)
# We're not retrying unprocessed items yet, but we are providing the relevant info in the QueryInfo agent if :query_info_key is supplied
if opts[:query_info_key] do
query_info = extract_query_info(batch_write_attempt)
Ecto.Adapters.DynamoDB.QueryInfo.update(opts[:query_info_key], [query_info], fn list ->
list ++ [query_info]
end)
end
{running_total_processed + total_batch_processed,
batch_write_results ++ [batch_write_attempt]}
end)
result_body_for_log = %{
table => Enum.flat_map(results, fn res -> res[unprocessed_items_element][table] || [] end)
}
ecto_dynamo_log(:info, "#{inspect(__MODULE__)}.batch_write: batch_write_attempt result", %{
"#{inspect(__MODULE__)}.insert_all-batch_write" =>
inspect(%{
unprocessed_items_element =>
if(result_body_for_log[table] == [], do: %{}, else: result_body_for_log)
})
})
{total_processed, nil}
end
defp handle_batch_write(repo, field_group, table, unprocessed_items_element) do
results =
Dynamo.batch_write_item(%{table => field_group})
|> ExAws.request(ex_aws_config(repo))
|> handle_error!(repo, %{table: table, records: get_records_from_fields(field_group)})
if results[unprocessed_items_element] == %{} do
{length(field_group), results}
else
{length(field_group) - length(results[unprocessed_items_element][table]), results}
end
end
defp get_records_from_fields(fields),
do: Enum.map(fields, fn [put_request: [item: record]] -> record end)
defp build_record_map(model, fields_to_insert) do
# Ecto does not convert empty strings to nil before passing them
# to Repo.insert_all, and ExAws will remove empty strings (as well as empty lists)
# when building the insertion query but not nil values. We don't mind the removal
# of empty lists since those cannot be inserted to indexed fields, but we'd like to
# catch the removal of fields with empty strings by ExAws to support our option, :remove_nil_fields,
# so we convert these to nil.
fields = model.__schema__(:fields)
sources = fields |> Enum.into(%{}, fn f -> {f, model.__schema__(:field_source, f)} end)
empty_strings_to_nil =
fields_to_insert
|> Enum.map(fn {field, val} -> {field, if(val == "", do: nil, else: val)} end)
|> Enum.into(%{})
model.__struct__
|> Map.delete(:__meta__)
|> Map.from_struct()
|> Enum.reduce(%{}, fn {k, v}, acc ->
Map.put(acc, Map.get(sources, k), v)
end)
|> Map.merge(empty_strings_to_nil)
end
@impl Ecto.Adapter.Schema
def delete(adapter_meta = %{repo: repo}, schema_meta, filters, opts) do
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.delete", %{
"#{inspect(__MODULE__)}.delete-params" => %{
adapter_meta: adapter_meta,
schema_meta: schema_meta,
filters: filters,
opts: opts
}
})
table = schema_meta.source
updated_filters =
maybe_update_filters_for_range_key(repo, table, schema_meta, filters, opts, "delete")
attribute_names = construct_expression_attribute_names(keys_to_atoms(filters))
base_options = [expression_attribute_names: attribute_names]
condition_expression = construct_condition_expression(filters)
options = base_options ++ [condition_expression: condition_expression]
# 'options' might not have the key, ':expression_attribute_values', when there are only removal statements
record =
if options[:expression_attribute_values],
do: [options[:expression_attribute_values] |> Enum.into(%{})],
else: []
case Dynamo.delete_item(table, updated_filters, options)
|> ExAws.request(ex_aws_config(repo))
|> handle_error!(repo, %{table: table, records: record ++ []}) do
%{} -> {:ok, []}
{:error, "ConditionalCheckFailedException"} -> {:error, :stale}
end
end
@impl Ecto.Adapter.Schema
def update(adapter_meta = %{repo: repo}, schema_meta, fields, filters, returning, opts) do
ecto_dynamo_log(:debug, "#{inspect(__MODULE__)}.update", %{
"#{inspect(__MODULE__)}.update-params" => %{
adapter_meta: adapter_meta,
schema_meta: schema_meta,
fields: fields,
filters: filters,
returning: returning,
opts: opts
}
})
table = schema_meta.source
updated_filters =
maybe_update_filters_for_range_key(repo, table, schema_meta, filters, opts, "update")
update_expression = construct_update_expression(repo, fields, opts)
# add updated_filters to attribute_ names and values for condition_expression
attribute_names = construct_expression_attribute_names(fields ++ keys_to_atoms(filters))
attribute_values = construct_expression_attribute_values(repo, fields, opts)
base_options = [
expression_attribute_names: attribute_names,
update_expression: update_expression
]
condition_expression = construct_condition_expression(filters)
options =
maybe_add_attribute_values(base_options, attribute_values) ++
[condition_expression: condition_expression]
# 'options' might not have the key, ':expression_attribute_values', when there are only removal statements
record =
if options[:expression_attribute_values],
do: [options[:expression_attribute_values] |> Enum.into(%{})],
else: []
case Dynamo.update_item(table, updated_filters, options)
|> ExAws.request(ex_aws_config(repo))
|> handle_error!(repo, %{table: table, records: record ++ []}) do
%{} -> {:ok, []}
{:error, "ConditionalCheckFailedException"} -> {:error, :stale}
end
end
# Support for tables with a hash+range key.
#
# * If the schema has both keys declared (using the `primary_key: true`) the filters are already correct
# * If :range_key is specified with a value it is added to filters
# * If :range_key is not specified, and the table does have a range key, attempt to find it with a DynamoDB query
#
defp maybe_update_filters_for_range_key(repo, table, schema_meta, filters, opts, action) do
with primary_key_length <- length(schema_meta.schema.__schema__(:primary_key)) do
case opts[:range_key] do
# Use primary keys declared in schema
nil when primary_key_length == 2 ->
filters
nil ->
{:primary, key_list} = Ecto.Adapters.DynamoDB.Info.primary_key!(repo, table)
if length(key_list) > 1 do
updated_opts = opts ++ [projection_expression: Enum.join(key_list, ", ")]
filters_as_strings =
for {field, val} <- filters, do: {Atom.to_string(field), {val, :==}}
fetch_result =
Ecto.Adapters.DynamoDB.Query.get_item(repo, table, filters_as_strings, updated_opts)
items =
case fetch_result do
%{"Items" => fetch_items} -> fetch_items
%{"Item" => item} -> [item]
_ -> []
end
if items == [],
do:
raise(
"#{inspect(__MODULE__)}.#{action} error: no results found for record: #{
inspect(filters)
}"
)
if length(items) > 1,
do:
raise(
"#{inspect(__MODULE__)}.#{action} error: more than one result found for record: #{
inspect(filters)
} Please consider using the adapter's :range_key custom inline option (see README)."
)
for {field, key_map} <- Map.to_list(hd(items)) do
[{_field_type, val}] = Map.to_list(key_map)
{field, val}
end
else
filters
end
range_key ->
[range_key | filters]
end
end
end
defp keys_to_atoms(list),
do: for({k, v} <- list, do: {maybe_string_to_atom(k), v})
defp maybe_string_to_atom(s),
do: if(is_binary(s), do: String.to_atom(s), else: s)
defp construct_condition_expression(filters) when is_list(filters) do
Keyword.keys(filters)
|> Enum.map(fn field -> "attribute_exists(##{to_string(field)})" end)
|> Enum.join(" AND ")
end
defp extract_query_info(result),
do:
result
|> Map.take([
"Count",
"ScannedCount",
"LastEvaluatedKey",
"UnprocessedItems",
"UnprocessedKeys"
])
# Used in update_all
defp extract_update_params([], _action_atom, _params), do: []
defp extract_update_params([%{expr: key_list}], action_atom, params) do
case key_list[action_atom] do
nil ->
[]
action_list ->
for s <- action_list do
{field_atom, {:^, _, [idx]}} = s
{field_atom, Enum.at(params, idx)}
end
end
end
defp extract_update_params([a], _action_atom, _params),
do:
error(
"#{inspect(__MODULE__)}.extract_update_params: Updates is either missing the :expr key or does not contain a struct or map: #{
inspect(a)
}"
)
defp extract_update_params(unsupported, _action_atom, _params),
do:
error(
"#{inspect(__MODULE__)}.extract_update_params: unsupported parameter construction. #{
inspect(unsupported)
}"
)
# Ecto does not support push pull for types other than array.
# Therefore, we enable add and delete via opts
defp extract_update_params(key_list, action_atom) do
case key_list[action_atom] do
nil -> []
action_list -> action_list
end
end
# used in :update_all
defp get_key_values_dynamo_map(dynamo_map, {:primary, keys}) do
for k <- keys, do: {String.to_atom(k), Dynamo.Decoder.decode(dynamo_map[k])}
end
defp construct_expression_attribute_names(fields) do
for {f, _} <- fields, into: %{}, do: {"##{Atom.to_string(f)}", Atom.to_string(f)}
end
defp construct_expression_attribute_values(repo, fields, opts) do
remove_rather_than_set_to_null =
opts[:remove_nil_fields] || opts[:remove_nil_fields_on_update] ||
RepoConfig.config_val(repo, :remove_nil_fields_on_update) == true
# If the value is nil and the :remove_nil_fields option is set,
# we're removing this attribute, not updating it, so filter out any such fields:
fields
|> maybe_replace_empty_mapsets(repo, opts)
|> Enum.reduce([], &format_update_field(&1, &2, remove_rather_than_set_to_null, opts))
|> Enum.filter(fn {x, _} -> not Keyword.has_key?(maybe_list(opts[:pull]), x) end)
end
defp format_update_field({_k, nil}, acc, true, _opts), do: acc
defp format_update_field({k, v}, acc, true, opts), do: [{k, format_val(k, v, opts)} | acc]
defp format_update_field({k, v}, acc, false, opts),
do: [{k, format_nil_or_val(k, v, opts)} | acc]
defp maybe_list(l) when is_list(l), do: l
defp maybe_list(_), do: []
defp format_nil_or_val(_k, nil, _opts), do: %{"NULL" => "true"}
defp format_nil_or_val(k, v, opts), do: format_val(k, v, opts)
defp format_val(k, v, opts) do
case opts[:push][k] do
nil -> v
_ -> [v]
end
end
# DynamoDB throws an error if we pass in an empty list for attribute values,
# so we have to implement this stupid little helper function to avoid hurting its feelings:
defp maybe_add_attribute_values(options, []) do
options
end
defp maybe_add_attribute_values(options, attribute_values) do
[expression_attribute_values: attribute_values] ++ options
end
defp construct_update_expression(_repo, updates, params, opts) do
to_set = extract_update_params(updates, :set, params)
to_push = extract_update_params(updates, :push, params)
to_pull = extract_update_params(updates, :pull, params)
to_add = extract_update_params(opts, :add) ++ extract_update_params(updates, :inc, params)
to_delete = extract_update_params(opts, :delete)
{(construct_add_statement(to_add, opts) <>
" " <>
construct_delete_statement(to_delete, opts))
|> String.trim(), to_add ++ to_delete, [set: to_set, push: to_push, pull: to_pull]}
end
# The update callback supplies fields in the paramaters
# whereas update_all includes a more complicated updates structure
defp construct_update_expression(repo, fields, opts) do
set_statement = construct_set_statement(repo, fields, opts)
rem_statement = construct_remove_statement(repo, fields, opts)
String.trim("#{set_statement} #{rem_statement}")
end
# fields::[{:field, val}]
defp construct_set_statement(repo, fields, opts) do
remove_rather_than_set_to_null =
opts[:remove_nil_fields] || opts[:remove_nil_fields_on_update] ||
RepoConfig.config_val(repo, :remove_nil_fields_on_update) == true
set_clauses =
for {key, val} <- fields, not (is_nil(val) and remove_rather_than_set_to_null) do
key_str = Atom.to_string(key)
"##{key_str}=:#{key_str}"
end ++
case opts[:push] do
nil ->
[]
push_list ->
for {key, _val} <- push_list do
key_str = Atom.to_string(key)
if Enum.member?(maybe_list(opts[:prepend_to_list]), key),
do: "##{key_str} = list_append(:#{key_str}, ##{key_str})",
else: "##{key_str} = list_append(##{key_str}, :#{key_str})"
end
end
case set_clauses do
[] ->
""
_ ->
"SET " <> Enum.join(set_clauses, ", ")
end
end
defp construct_remove_statement(repo, fields, opts) do
remove_rather_than_set_to_null =
opts[:remove_nil_fields] || opts[:remove_nil_fields_on_update] ||
RepoConfig.config_val(repo, :remove_nil_fields_on_update) == true
# Ecto :pull update can be emulated provided
# we are given an index to remove in opts[:pull_indexes]
remove_clauses =
if remove_rather_than_set_to_null do
for {key, val} <- fields, is_nil(val), do: "##{Atom.to_string(key)}"
else
[]
end ++
cond do
!opts[:pull_indexes] or Keyword.values(opts[:pull_indexes]) |> List.flatten() == [] ->
[]
opts[:pull] == nil ->
[]
true ->
for {key, _val} <- opts[:pull] do
key_str = Atom.to_string(key)
Enum.map(opts[:pull_indexes][key], fn index -> "##{key_str}[#{index}]" end)
|> Enum.join(", ")
end
end
case remove_clauses do
[] ->
""
_ ->
"REMOVE " <> Enum.join(remove_clauses, ", ")
end
end
# fields::[{:field, val}]
defp construct_add_statement(fields, _opts) do
add_clauses =
for {key, _val} <- fields do
key_str = Atom.to_string(key)
"##{key_str} :#{key_str}"
end
case add_clauses do
[] ->
""
_ ->
"ADD " <> Enum.join(add_clauses, ", ")
end
end
defp construct_delete_statement(fields, _opts) do
delete_clauses =
for {key, _val} <- fields do
key_str = Atom.to_string(key)
"##{key_str} :#{key_str}"
end
case delete_clauses do
[] ->
""
_ ->
"DELETE " <> Enum.join(delete_clauses, ", ")
end
end
defp validate_where_clauses!(query) do
for w <- query.wheres do
validate_where_clause!(w)
end
end
defp validate_where_clause!(%BooleanExpr{expr: {op, _, _}})
when op in [:==, :<, :>, :<=, :>=, :in],
do: :ok
defp validate_where_clause!(%BooleanExpr{expr: {logical_op, _, _}})
when logical_op in [:and, :or],
do: :ok
defp validate_where_clause!(%BooleanExpr{expr: {:is_nil, _, _}}), do: :ok
defp validate_where_clause!(%BooleanExpr{expr: {:fragment, _, _}}), do: :ok
defp validate_where_clause!(unsupported),
do: error("unsupported where clause: #{inspect(unsupported)}")
# We are parsing a nested, recursive structure of the general type:
# %{:logical_op, list_of_clauses} | %{:conditional_op, field_and_value}
defp extract_lookup_fields([], _params, lookup_fields), do: lookup_fields
defp extract_lookup_fields([query | queries], params, lookup_fields) do
# A logical operator tuple does not always have a parent 'expr' key.
maybe_extract_from_expr =
case query do
%BooleanExpr{expr: expr} -> expr
# TODO: could there be other cases?
_ -> query
end
case maybe_extract_from_expr do
# A logical operator points to a list of conditionals
{op, _, [left, right]} when op in [:==, :<, :>, :<=, :>=, :in] ->
{field, value} = get_op_clause(left, right, params)
updated_lookup_fields =
case List.keyfind(lookup_fields, field, 0) do
# we assume the most ops we can apply to one field is two, otherwise this might throw an error
{field, {old_val, old_op}} ->
List.keyreplace(lookup_fields, field, 0, {field, {[value, old_val], [op, old_op]}})
_ ->
[{field, {value, op}} | lookup_fields]
end
extract_lookup_fields(queries, params, updated_lookup_fields)
# Logical operator expressions have more than one op clause
# We are matching queries of the type: 'from(p in Person, where: p.email == "g@email.com" and p.first_name == "George")'
# But not of the type: 'from(p in Person, where: [email: "g@email.com", first_name: "George"])'
#
# A logical operator is a member of a list
{logical_op, _, clauses} when logical_op in [:and, :or] ->
deeper_lookup_fields = extract_lookup_fields(clauses, params, [])
extract_lookup_fields(queries, params, [
{logical_op, deeper_lookup_fields} | lookup_fields
])
{:fragment, _, raw_expr_mixed_list} ->
parsed_fragment = parse_raw_expr_mixed_list(raw_expr_mixed_list, params)
extract_lookup_fields(queries, params, [parsed_fragment | lookup_fields])
# We perform a post-query is_nil filter on indexed fields and have DynamoDB filter
# for nil non-indexed fields (although post-query nil-filters on (missing) indexed
# attributes could only find matches when the attributes are not the range part of
# a queried partition key (hash part) since those would not return the sought records).
{:is_nil, _, [arg]} ->
{{:., _, [_, field_name]}, _, _} = arg
# We give the nil value a string, "null", since it will be mapped as a DynamoDB attribute_expression_value
extract_lookup_fields(queries, params, [
{to_string(field_name), {"null", :is_nil}} | lookup_fields
])
_ ->
extract_lookup_fields(queries, params, lookup_fields)
end
end
# Specific (as opposed to generalized) parsing for Ecto :fragment - the only use for it
# so far is 'between' which is the only way to query 'between' on an indexed field since
# those accept only single conditions.
#
# Example with values as strings: [raw: "", expr: {{:., [], [{:&, [], [0]}, :person_id]}, [], []}, raw: " between ", expr: "person:a", raw: " and ", expr: "person:f", raw: ""]
#
# Example with values as part of the string itself: [raw: "", expr: {{:., [], [{:&, [], [0]}, :person_id]}, [], []}, raw: " between person:a and person:f"]
#
# Example with values in params: [raw: "", expr: {{:., [], [{:&, [], [0]}, :person_id]}, [], []}, raw: " between ", expr: {:^, [], [0]}, raw: " and ", expr: {:^, [], [1]}, raw: ""]
#
defp parse_raw_expr_mixed_list(raw_expr_mixed_list, params) do
# group the expression into fields, values, and operators,
# only supporting the example with values in params
case raw_expr_mixed_list do
# between
[
raw: _,
expr: {{:., [], [{:&, [], [0]}, field_atom]}, [], []},
raw: between_str,
expr: {:^, [], [idx1]},
raw: and_str,
expr: {:^, [], [idx2]},
raw: _
] ->
if not Regex.match?(~r/^\s*between\s*and\s*$/i, between_str <> and_str),
do: parse_raw_expr_mixed_list_error(raw_expr_mixed_list)
{to_string(field_atom), {[Enum.at(params, idx1), Enum.at(params, idx2)], :between}}
# begins_with
[
raw: begins_with_str,
expr: {{:., [], [{:&, [], [0]}, field_atom]}, [], []},
raw: comma_str,
expr: {:^, [], [idx]},
raw: closing_parenthesis_str
] ->
if not Regex.match?(
~r/^\s*begins_with\(\s*,\s*\)\s*$/i,
begins_with_str <> comma_str <> closing_parenthesis_str
),
do: parse_raw_expr_mixed_list_error(raw_expr_mixed_list)
{to_string(field_atom), {Enum.at(params, idx), :begins_with}}
_ ->
parse_raw_expr_mixed_list_error(raw_expr_mixed_list)
end
end
defp parse_raw_expr_mixed_list_error(raw_expr_mixed_list),
do:
raise(
"#{inspect(__MODULE__)}.parse_raw_expr_mixed_list parse error. We currently only support the Ecto fragments of the form, 'where: fragment(\"? between ? and ?\", FIELD_AS_VARIABLE, VALUE_AS_VARIABLE, VALUE_AS_VARIABLE)'; and 'where: fragment(\"begins_with(?, ?)\", FIELD_AS_VARIABLE, VALUE_AS_VARIABLE)'. Received: #{
inspect(raw_expr_mixed_list)
}"
)
defp get_op_clause(left, right, params) do
field = left |> get_field |> Atom.to_string()
value = get_value(right, params)
{field, value}
end
defp get_field({{:., _, [{:&, _, [0]}, field]}, _, []}), do: field
defp get_field(other_clause) do
error("Unsupported where clause, left hand side: #{other_clause}")
end
defp get_value({:^, _, [idx]}, params), do: Enum.at(params, idx)
# Handle queries with variable values, ex. Repo.all from i in Item, where: i.id in ^item_ids
# The last element of the tuple (first arg) will be a list with two numbers;
# the first number will be the number of attributes to be updated (in the event of an update_all query with a variable list)
# and the second will be a count of the number of elements in the variable list being queried. For example:
#
# query = from p in Person, where: p.id in ^ids
# TestRepo.update_all(query, set: [password: "cheese", last_name: "Smith"])
#
# assuming that ids contains 4 values, the last element would be [2, 4].
# Use this data to modify the params, which would otherwise include the values to be updated as well, which we don't want to query on.
defp get_value({:^, _, [num_update_terms, _num_query_terms]}, params),
do: Enum.drop(params, num_update_terms)
# Seems to be necessary for handling running a batch of migrations down.
defp get_value(%{value: right}, params), do: get_value(right, params)
# Handle .all(query) queries
defp get_value(other_clause, _params), do: other_clause
defp error(msg) do
raise ArgumentError, message: msg
end
defp construct_types_from_select_fields(%Ecto.Query.SelectExpr{expr: expr}) do
case expr do
{:{}, [], clauses = [{{:., [type: type], [{:&, [], [0]}, field]}, [], []} | _]} ->
for {{:., [type: type], [{:&, [], [0]}, field]}, [], []} <- clauses, do: {field, type}
{_, _, [0]} ->
[]
{{:., [type: type], [{_, _, _}, field]}, _, _} ->
[{field, type}]
clauses = [_ | _] ->
for {{_, [type: type], [{_, _, _}, field]}, _, _} <- clauses, do: {field, type}
end
end
defp decode_item(item, types, repo, opts) do
types
|> Enum.map(fn {field, type} ->
Map.get(item, Atom.to_string(field), %{"NULL" => true})
|> Dynamo.Decoder.decode()
|> decode_type(type, repo, opts)
end)
end
defp decode_item(%{"version" => version}, _repo, _opts) do
[version |> Dynamo.Decoder.decode()]
end
# Decodes datetime, seemingly unhandled by ExAws Dynamo decoder
defp decode_type(nil, DynamoDBSet, repo, opts), do: maybe_replace_nil_mapset(repo, opts)
defp decode_type(nil, _type, _repo, _opts), do: nil
defp decode_type(val, type, _repo, _opts) when type in [:utc_datetime_usec, :utc_datetime] do
{:ok, dt, _offset} = DateTime.from_iso8601(val)
dt
end
defp decode_type(val, type, _repo, _opts) when type in [:naive_datetime_usec, :naive_datetime],
do: NaiveDateTime.from_iso8601!(val)
# Support for Ecto >= 3.5
defp decode_type(val, {:parameterized, _, _} = type, _repo, _opts), do: decode_embed(val, type)
# Support for Ecto 3.0-3.4
defp decode_type(val, {:embed, _} = type, _repo, _opts), do: decode_embed(val, type)
defp decode_type(val, _type, _repo, _opts), do: val
defp decode_embed(val, type) do
case Ecto.Type.embedded_load(type, val, :json) do
{:ok, decoded_value} ->
decoded_value
:error ->
ecto_dynamo_log(
:info,
"#{inspect(__MODULE__)}.decode_embed: failed to decode embedded value: #{inspect(val)}"
)
nil
end
end
# We found one instance where DynamoDB's error message could
# be more instructive - when trying to set an indexed field to something
# other than a string or number - so we're adding a more helpful message.
# The parameter, 'params', has the type %{table: :string, records: [:map]}
defp handle_error!(ex_aws_request_result, repo, params) do
case ex_aws_request_result do
{:ok, result} ->
result
{:error, {error_name, _} = error} ->
# Check for inappropriate insert into indexed field
indexed_fields = Ecto.Adapters.DynamoDB.Info.indexed_attributes(repo, params.table)
# Repo.insert_all can present multiple records at once
forbidden_insert_on_indexed_field =
Enum.reduce(params.records, false, fn record, acc ->
acc ||
Enum.any?(record, fn {field, val} ->
[type] = Dynamo.Encoder.encode(val) |> Map.keys()
# Ecto does not convert Empty strings to nil before passing them to Repo.update_all or
# Repo.insert_all DynamoDB provides an instructive message during an update (forwarded by ExAws),
# but less so for batch_write_item, so we catch the empty string as well.
# Dynamo does not allow insertion of empty strings in any case.
(Enum.member?(indexed_fields, to_string(field)) and not (type in ["S", "N"])) ||
val == ""
end)
end)
cond do
# we use this error to check if an update or delete record does not exist
error_name == "ConditionalCheckFailedException" ->
{:error, error_name}
forbidden_insert_on_indexed_field ->
raise "The following request error could be related to attempting to insert an empty string or attempting to insert a type other than a string or number on an indexed field. Indexed fields: #{
inspect(indexed_fields)
}. Records: #{inspect(params.records)}.\n\nExAws Request Error! #{
inspect(error)
}"
true ->
raise ExAws.Error, message: "ExAws Request Error! #{inspect(error)}"
end
end
end
@doc """
Logs message to console and optionally to file. Log levels, colours and file path may be set in configuration (details in README.md).
"""
def ecto_dynamo_log(level, message, attributes \\ %{}, opts \\ []) do
log_levels = Confex.get_env(:ecto_adapters_dynamodb, :log_levels) || [:info]
if level in log_levels do
log_path = Confex.get_env(:ecto_adapters_dynamodb, :log_path)
depth = opts[:depth] || 4
colours = Confex.get_env(:ecto_adapters_dynamodb, :log_colours)
d = DateTime.utc_now()
formatted_message =
"#{d.year}-#{d.month}-#{d.day} #{d.hour}:#{d.minute}:#{d.second} UTC [Ecto dynamo #{level}] #{
inspect(message)
}"
{:ok, log_message} =
Jason.encode(%{message: formatted_message, attributes: chisel(attributes, depth)})
if Confex.get_env(:ecto_adapters_dynamodb, :log_in_colour) do
IO.ANSI.format([colours[level] || :normal, log_message], true) |> IO.puts()
else
log_message |> IO.puts()
end
if String.valid?(log_path) and Regex.match?(~r/\S/, log_path),
do: log_pipe(log_path, log_message)
end
end
def ex_aws_config(repo) do
config = Resolver.resolve!(repo.config())
config
|> Keyword.take([:debug_requests, :access_key_id, :secret_access_key, :region])
|> Keyword.merge(Keyword.get(config, :dynamodb, []))
end
defp chisel(str, _depth) when is_binary(str), do: str
defp chisel(num, _depth) when is_number(num), do: num
defp chisel(any, _depth) when not is_map(any) and not is_list(any), do: inspect(any)
defp chisel(_, 0), do: "beyond_log_depth"
defp chisel(%{__struct__: _} = struct, _depth), do: inspect(struct)
defp chisel(map, depth) when is_map(map) do
for {k, v} <- map, into: %{}, do: {k, chisel(v, depth - 1)}
end
defp chisel(list, depth) when is_list(list) do
for e <- list, do: chisel(e, depth - 1)
# Stream.with_index(list) |> Enum.reduce(%{}, fn({v,k}, acc)-> Map.put(acc, k, chisel(v, depth - 1)) end)
end
defp log_pipe(path, str) do
{:ok, file} = File.open(path, [:append])
IO.binwrite(file, str)
File.close(file)
end
defp opt_config(key, repo, opts, default \\ false) do
case Keyword.get(opts, key) do
nil -> RepoConfig.config_val(repo, key, default)
x -> x
end
end
defp maybe_replace_empty_mapsets(record, repo, opts) do
if opt_config(:empty_mapset_to_nil, repo, opts) do
record
|> Enum.map(fn {k, v} -> {k, empty_mapset_to_nil(v)} end)
|> Enum.into(%{})
else
record
end
end
defp empty_mapset_to_nil(%MapSet{} = m), do: if(MapSet.size(m) == 0, do: nil, else: m)
defp empty_mapset_to_nil(x), do: x
defp maybe_replace_nil_mapset(repo, opts) do
if opt_config(:nil_to_empty_mapset, repo, opts) do
MapSet.new()
else
nil
end
end
end
| 35.353694
| 335
| 0.641436
|
9e22148f51cfe36ecf0dffb41c8f2ded6e97bbc1
| 397
|
exs
|
Elixir
|
test/cog/chat/slack/templates/embedded/bundle_install_test.exs
|
matusf/cog
|
71708301c7dc570fb0d3498a50f47a70ef957788
|
[
"Apache-2.0"
] | 1,003
|
2016-02-23T17:21:12.000Z
|
2022-02-20T14:39:35.000Z
|
test/cog/chat/slack/templates/embedded/bundle_install_test.exs
|
matusf/cog
|
71708301c7dc570fb0d3498a50f47a70ef957788
|
[
"Apache-2.0"
] | 906
|
2016-02-22T22:54:19.000Z
|
2022-03-11T15:19:43.000Z
|
test/cog/chat/slack/templates/embedded/bundle_install_test.exs
|
matusf/cog
|
71708301c7dc570fb0d3498a50f47a70ef957788
|
[
"Apache-2.0"
] | 95
|
2016-02-23T13:42:31.000Z
|
2021-11-30T14:39:55.000Z
|
defmodule Cog.Chat.Slack.Templates.Embedded.BundleInstallTest do
use Cog.TemplateCase
test "bundle-info template" do
data = %{"results" => [%{"name" => "heroku",
"versions" => [%{"version" => "0.0.4"}]}]}
expected = "Installed bundle 'heroku' version '0.0.4'"
assert_rendered_template(:slack, :embedded, "bundle-install", data, expected)
end
end
| 28.357143
| 81
| 0.617128
|
9e223b234716cf7d9136bb96074b90c1b6cdf334
| 1,995
|
ex
|
Elixir
|
clients/display_video/lib/google_api/display_video/v1/model/list_combined_audiences_response.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
clients/display_video/lib/google_api/display_video/v1/model/list_combined_audiences_response.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
clients/display_video/lib/google_api/display_video/v1/model/list_combined_audiences_response.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DisplayVideo.V1.Model.ListCombinedAudiencesResponse do
@moduledoc """
## Attributes
* `combinedAudiences` (*type:* `list(GoogleApi.DisplayVideo.V1.Model.CombinedAudience.t)`, *default:* `nil`) - The list of combined audiences.
This list will be absent if empty.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - A token to retrieve the next page of results.
Pass this value in the
page_token
field in the subsequent call to `ListCombinedAudiences` method to retrieve
the next page of results.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:combinedAudiences => list(GoogleApi.DisplayVideo.V1.Model.CombinedAudience.t()),
:nextPageToken => String.t()
}
field(:combinedAudiences, as: GoogleApi.DisplayVideo.V1.Model.CombinedAudience, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.DisplayVideo.V1.Model.ListCombinedAudiencesResponse do
def decode(value, options) do
GoogleApi.DisplayVideo.V1.Model.ListCombinedAudiencesResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DisplayVideo.V1.Model.ListCombinedAudiencesResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.625
| 146
| 0.741855
|
9e228eeb743e4fe0ea62d0d29d82aa1fc9b92575
| 1,216
|
ex
|
Elixir
|
lib/pelemay/generator/builder.ex
|
christianjgreen/pelemay
|
b400c4420f9edd41031d7dbc962eff38215d9e05
|
[
"Apache-2.0"
] | null | null | null |
lib/pelemay/generator/builder.ex
|
christianjgreen/pelemay
|
b400c4420f9edd41031d7dbc962eff38215d9e05
|
[
"Apache-2.0"
] | null | null | null |
lib/pelemay/generator/builder.ex
|
christianjgreen/pelemay
|
b400c4420f9edd41031d7dbc962eff38215d9e05
|
[
"Apache-2.0"
] | null | null | null |
defmodule Pelemay.Generator.Builder do
alias Pelemay.Generator
@cc "clang"
@cflags ["-Ofast", "-g", "-ansi", "-pedantic", "-femit-all-decls"]
@cflags_includes ["-I/usr/local/include", "-I/usr/include", "-L/usr/local/lib", "-L/usr/lib"]
@cflags_after ["-std=c11", "-Wno-unused-function"]
@ldflags []
@cflags_non_windows ["-fPIC"]
@ldflags_non_windows ["-dynamiclib", "-undefined", "dynamic_lookup"]
def generate(module) do
if is_nil(System.find_executable(@cc)) do
raise CompileError, message: "#{@cc} is not installed."
end
cflags_t = @cflags ++ ["-I#{erlang_include_path()}"] ++ @cflags_includes ++ @cflags_after
ldflags_t = @ldflags
cflags =
case :os.type() do
{:win32, :nt} -> cflags_t
_ -> cflags_t ++ @cflags_non_windows
end
ldflags =
case :os.type() do
{:win32, :nt} -> ldflags_t
_ -> ldflags_t ++ @ldflags_non_windows
end
options =
cflags ++ ["-shared"] ++ ldflags ++ ["-o", Generator.libso(module), Generator.libc(module)]
{_result, 0} = System.cmd(@cc, options)
end
def erlang_include_path() do
"#{:code.root_dir()}/erts-#{:erlang.system_info(:version)}/include"
end
end
| 28.952381
| 97
| 0.616776
|
9e229a72e592453f55b3bb4d286dc0f0b5e6c9b7
| 288
|
exs
|
Elixir
|
test/order_api/buyers_test.exs
|
gissandrogama/delivery_order
|
8642453b03f590fe828225fc13aa58a5f79b2117
|
[
"MIT"
] | null | null | null |
test/order_api/buyers_test.exs
|
gissandrogama/delivery_order
|
8642453b03f590fe828225fc13aa58a5f79b2117
|
[
"MIT"
] | 6
|
2021-01-22T15:23:04.000Z
|
2021-01-28T07:56:01.000Z
|
test/order_api/buyers_test.exs
|
gissandrogama/delivery_order
|
8642453b03f590fe828225fc13aa58a5f79b2117
|
[
"MIT"
] | null | null | null |
defmodule OrderApi.BuyersTest do
use OrderApi.DataCase
import OrderApi.PayloadFixture
alias OrderApi.Buyers
describe "run/0" do
test "return structure in json" do
build()
result = Buyers.list_buyers()
assert [%OrderApi.Buyer{}] = result
end
end
end
| 18
| 41
| 0.6875
|
9e22f9017b5f316df4db60cbd356b0e2cc77115e
| 2,975
|
ex
|
Elixir
|
plugins/one_webrtc/lib/one_webrtc_web/flex_bar/tab/webrtc.ex
|
smpallen99/ucx_ucc
|
47225f205a6ac4aacdb9bb4f7512dcf4092576ad
|
[
"MIT"
] | 11
|
2017-05-15T18:35:05.000Z
|
2018-02-05T18:27:40.000Z
|
plugins/one_webrtc/lib/one_webrtc_web/flex_bar/tab/webrtc.ex
|
anndream/infinity_one
|
47225f205a6ac4aacdb9bb4f7512dcf4092576ad
|
[
"MIT"
] | 15
|
2017-11-27T10:38:05.000Z
|
2018-02-09T20:42:08.000Z
|
plugins/one_webrtc/lib/one_webrtc_web/flex_bar/tab/webrtc.ex
|
anndream/infinity_one
|
47225f205a6ac4aacdb9bb4f7512dcf4092576ad
|
[
"MIT"
] | 4
|
2017-09-13T11:34:16.000Z
|
2018-02-26T13:37:06.000Z
|
defmodule OneWebrtcWeb.FlexBar.Tab.Webrtc do
use OneChatWeb.FlexBar.Helpers
alias InfinityOne.OnePubSub
alias InfinityOne.{TabBar.Tab, Repo}
alias OneWebrtcWeb.{FlexBar.Tab.MembersList, FlexBarView}
alias OneWebrtc.ClientDevice
require Logger
@spec add_buttons() :: any
def add_buttons do
TabBar.add_button Tab.new(
__MODULE__,
~w[channel group direct im],
"device-settings",
~g"Device Settings",
"icon-mic",
FlexBarView,
"device.html",
95,
[
model: OneWebrtc.ClientDevice,
prefix: "client_device"
])
TabBar.add_button Tab.new(
MembersList,
"webrtc-members-list")
end
@spec args(socket, {id, id, any, map}, args) :: {List.t, socket}
def args(socket, {user_id, _channel_id, _, _}, _) do
# Logger.error "assigns: #{inspect socket.assigns}"
current_user = Helpers.get_user! user_id
client_device = ClientDevice.get_by(user_id: current_user.id, ip_addr: socket.assigns.ip_address) ||
# client_device = ClientDevice.get_by(user_id: current_user.id) ||
ClientDevice.new()
changeset = ClientDevice.change client_device, %{user_id: current_user.id}
assigns =
socket
|> Rebel.get_assigns()
|> Map.put(:client_device, client_device)
|> Map.put(:resource_key, :client_device)
Rebel.put_assigns(socket, assigns)
{[
client_device: client_device,
changeset: changeset,
devices: get_client_devices(socket),
], socket}
end
defp get_client_devices(socket) do
socket
|> exec_js("window.InfinityOne.installed_devices")
# |> IO.inspect(label: "installed_devices")
|> case do
{:ok, devices} -> devices
{:error, nil} -> nil
end
|> build_client_devices
end
defp build_client_devices(nil), do: %{}
defp build_client_devices(devices) do
devices
# |> IO.inspect(label: "installed_devices")
|> Enum.reduce(%{input: [], output: [], video: []}, fn
%{"kind" => "audioinput", "id" => id, "label" => label}, acc ->
update_in acc, [:input], &([{label, id} | &1])
%{"kind" => "audiooutput", "id" => id, "label" => label}, acc ->
update_in acc, [:output], &([{label, id} | &1])
%{"kind" => "videoinput", "id" => id, "label" => label}, acc ->
update_in acc, [:video], &([{label, id} | &1])
end)
|> update_in([:input], &Enum.reverse/1)
|> update_in([:output], &Enum.reverse/1)
|> update_in([:video], &Enum.reverse/1)
end
def flex_form_select_change(socket, sender, resource, field, _value) do
user_id = socket.assigns.user_id
resource
|> ClientDevice.change(%{field => sender["value"], "user_id" => user_id})
|> Repo.insert_or_update
|> case do
{:ok, device} ->
OnePubSub.broadcast "user:" <> user_id, "device:change", %{device: device}
{:ok, socket}
{:error, changeset} ->
{:error, changeset, socket}
end
end
end
| 29.75
| 104
| 0.619832
|
9e22f9f24f3f7bf1d1db8f929d751fb3fc444652
| 1,388
|
exs
|
Elixir
|
mix.exs
|
ghaabor/golem
|
202cde10fe86a7e4e5d3713e7c78a4848d4d96a1
|
[
"Apache-2.0"
] | null | null | null |
mix.exs
|
ghaabor/golem
|
202cde10fe86a7e4e5d3713e7c78a4848d4d96a1
|
[
"Apache-2.0"
] | null | null | null |
mix.exs
|
ghaabor/golem
|
202cde10fe86a7e4e5d3713e7c78a4848d4d96a1
|
[
"Apache-2.0"
] | null | null | null |
defmodule Golem.Mixfile do
use Mix.Project
def project do
[
app: :golem,
version: "0.0.1-alpha3",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
package: package(),
description: description(),
test_coverage: [tool: Coverex.Task, coveralls: true],
# Docs
name: "Golem",
source_url: "https://github.com/ghaabor/golem",
homepage: "https://github.com/ghaabor/golem",
docs: docs()
]
end
def application do
[extra_applications: [:logger]]
end
defp deps do
[
{:tesla, "~> 0.6.0"},
{:socket, "~> 0.3"},
{:poison, "~> 2.0"},
{:ex_doc, "~> 0.14", only: :dev, runtime: false},
{:credo, "~> 0.7", only: [:dev, :test]},
{:coverex, "~> 1.4.10", only: :test},
{:git_cli, "~> 0.2", only: :dev}
]
end
defp description do
"""
--- ALPHA ---
Chatbot built in Elixir.
"""
end
defp package do
[
name: :golem,
files: ["lib", "mix.exs", "README.md", "LICENSE"],
maintainers: ["Gábor Takács"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/ghaabor/golem",
"Docs" => "https://github.com/ghaabor/golem"}
]
end
defp docs do
[
main: "Golem",
extras: ["README.md"]
]
end
end
| 21.030303
| 62
| 0.512968
|
9e230e8de9c97e317aed7c07b1142a072a6350e5
| 1,514
|
exs
|
Elixir
|
test/absinthe_cache_test.exs
|
EnthuZiastic/absinthe_cache
|
48059fa41f4d04ad819f1564e3ea4a7d3b5c69cf
|
[
"MIT"
] | 19
|
2020-02-06T09:26:28.000Z
|
2022-02-18T01:17:40.000Z
|
test/absinthe_cache_test.exs
|
EnthuZiastic/absinthe_cache
|
48059fa41f4d04ad819f1564e3ea4a7d3b5c69cf
|
[
"MIT"
] | 1
|
2021-06-21T16:52:30.000Z
|
2021-07-12T00:03:10.000Z
|
test/absinthe_cache_test.exs
|
EnthuZiastic/absinthe_cache
|
48059fa41f4d04ad819f1564e3ea4a7d3b5c69cf
|
[
"MIT"
] | 3
|
2021-05-24T08:36:11.000Z
|
2021-08-05T07:47:26.000Z
|
defmodule AbsintheCacheTest do
defmodule Schema do
use Absinthe.Schema
import AbsintheCache, only: [cache_resolve: 1]
require Logger
query do
field :get_name_cached, non_null(:string) do
cache_resolve(fn _, _, _ ->
Logger.info("PRINTING SOME DATA")
{:ok, "Ivan"}
end)
end
field :get_name_not_cached, non_null(:string) do
resolve(fn _, _, _ ->
Logger.info("PRINTING SOME DATA")
{:ok, "Ivan"}
end)
end
end
end
use AbsintheCache.TestCase, async: true
import ExUnit.CaptureLog
test "uncached function is called every time" do
fun = fn ->
Absinthe.run("{ getNameNotCached }", Schema, root_value: %{})
end
# Every time the rsolver is executed
assert capture_log(fun) =~ "PRINTING SOME DATA"
assert capture_log(fun) =~ "PRINTING SOME DATA"
assert capture_log(fun) =~ "PRINTING SOME DATA"
assert capture_log(fun) =~ "PRINTING SOME DATA"
assert capture_log(fun) =~ "PRINTING SOME DATA"
end
test "cached function is called only the first time" do
fun = fn ->
Absinthe.run("{ getNameCached }", Schema, root_value: %{})
end
# Every time the rsolver is executed
assert capture_log(fun) =~ "PRINTING SOME DATA"
refute capture_log(fun) =~ "PRINTING SOME DATA"
refute capture_log(fun) =~ "PRINTING SOME DATA"
refute capture_log(fun) =~ "PRINTING SOME DATA"
refute capture_log(fun) =~ "PRINTING SOME DATA"
end
end
| 27.527273
| 67
| 0.641347
|
9e2315891abf33866467fb1a352639543237ba1d
| 37,691
|
ex
|
Elixir
|
clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v2/api/folders.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v2/api/folders.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v2/api/folders.ex
|
kolorahl/elixir-google-api
|
46bec1e092eb84c6a79d06c72016cb1a13777fa6
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudResourceManager.V2.Api.Folders do
@moduledoc """
API calls for all endpoints tagged `Folders`.
"""
alias GoogleApi.CloudResourceManager.V2.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Creates a Folder in the resource hierarchy.
Returns an Operation which can be used to track the progress of the
folder creation workflow.
Upon success the Operation.response field will be populated with the
created Folder.
In order to succeed, the addition of this new Folder must not violate
the Folder naming, height or fanout constraints.
+ The Folder's display_name must be distinct from all other Folder's that
share its parent.
+ The addition of the Folder must not cause the active Folder hierarchy
to exceed a height of 4. Note, the full active + deleted Folder hierarchy
is allowed to reach a height of 8; this provides additional headroom when
moving folders that contain deleted folders.
+ The addition of the Folder must not cause the total number of Folders
under its parent to exceed 100.
If the operation fails due to a folder constraint violation, some errors
may be returned by the CreateFolder request, with status code
FAILED_PRECONDITION and an error description. Other folder constraint
violations will be communicated in the Operation, with the specific
PreconditionFailure returned via the details list in the Operation.error
field.
The caller must have `resourcemanager.folders.create` permission on the
identified parent.
## Parameters
* `connection` (*type:* `GoogleApi.CloudResourceManager.V2.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:parent` (*type:* `String.t`) - Required. The resource name of the new Folder's parent.
Must be of the form `folders/{folder_id}` or `organizations/{org_id}`.
* `:body` (*type:* `GoogleApi.CloudResourceManager.V2.Model.Folder.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.CloudResourceManager.V2.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec cloudresourcemanager_folders_create(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V2.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def cloudresourcemanager_folders_create(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:parent => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v2/folders", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V2.Model.Operation{}])
end
@doc """
Requests deletion of a Folder. The Folder is moved into the
DELETE_REQUESTED state
immediately, and is deleted approximately 30 days later. This method may
only be called on an empty Folder in the
ACTIVE state, where a Folder is empty if
it doesn't contain any Folders or Projects in the
ACTIVE state.
The caller must have `resourcemanager.folders.delete` permission on the
identified folder.
## Parameters
* `connection` (*type:* `GoogleApi.CloudResourceManager.V2.Connection.t`) - Connection to server
* `folders_id` (*type:* `String.t`) - Part of `name`. Required. the resource name of the Folder to be deleted.
Must be of the form `folders/{folder_id}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.CloudResourceManager.V2.Model.Folder{}}` on success
* `{:error, info}` on failure
"""
@spec cloudresourcemanager_folders_delete(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V2.Model.Folder.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def cloudresourcemanager_folders_delete(
connection,
folders_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v2/folders/{foldersId}", %{
"foldersId" => URI.encode(folders_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V2.Model.Folder{}])
end
@doc """
Retrieves a Folder identified by the supplied resource name.
Valid Folder resource names have the format `folders/{folder_id}`
(for example, `folders/1234`).
The caller must have `resourcemanager.folders.get` permission on the
identified folder.
## Parameters
* `connection` (*type:* `GoogleApi.CloudResourceManager.V2.Connection.t`) - Connection to server
* `folders_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of the Folder to retrieve.
Must be of the form `folders/{folder_id}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.CloudResourceManager.V2.Model.Folder{}}` on success
* `{:error, info}` on failure
"""
@spec cloudresourcemanager_folders_get(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V2.Model.Folder.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def cloudresourcemanager_folders_get(connection, folders_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/folders/{foldersId}", %{
"foldersId" => URI.encode(folders_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V2.Model.Folder{}])
end
@doc """
Gets the access control policy for a Folder. The returned policy may be
empty if no such policy or resource exists. The `resource` field should
be the Folder's resource name, e.g. "folders/1234".
The caller must have `resourcemanager.folders.getIamPolicy` permission
on the identified folder.
## Parameters
* `connection` (*type:* `GoogleApi.CloudResourceManager.V2.Connection.t`) - Connection to server
* `folders_id` (*type:* `String.t`) - Part of `resource`. REQUIRED: The resource for which the policy is being requested.
See the operation documentation for the appropriate value for this field.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.CloudResourceManager.V2.Model.GetIamPolicyRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.CloudResourceManager.V2.Model.Policy{}}` on success
* `{:error, info}` on failure
"""
@spec cloudresourcemanager_folders_get_iam_policy(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.CloudResourceManager.V2.Model.Policy.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def cloudresourcemanager_folders_get_iam_policy(
connection,
folders_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v2/folders/{foldersId}:getIamPolicy", %{
"foldersId" => URI.encode(folders_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V2.Model.Policy{}])
end
@doc """
Lists the Folders that are direct descendants of supplied parent resource.
List provides a strongly consistent view of the Folders underneath
the specified parent resource.
List returns Folders sorted based upon the (ascending) lexical ordering
of their display_name.
The caller must have `resourcemanager.folders.list` permission on the
identified parent.
## Parameters
* `connection` (*type:* `GoogleApi.CloudResourceManager.V2.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of Folders to return in the response.
* `:pageToken` (*type:* `String.t`) - Optional. A pagination token returned from a previous call to `ListFolders`
that indicates where this listing should continue from.
* `:parent` (*type:* `String.t`) - Required. The resource name of the Organization or Folder whose Folders are
being listed.
Must be of the form `folders/{folder_id}` or `organizations/{org_id}`.
Access to this method is controlled by checking the
`resourcemanager.folders.list` permission on the `parent`.
* `:showDeleted` (*type:* `boolean()`) - Optional. Controls whether Folders in the
DELETE_REQUESTED
state should be returned. Defaults to false.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.CloudResourceManager.V2.Model.ListFoldersResponse{}}` on success
* `{:error, info}` on failure
"""
@spec cloudresourcemanager_folders_list(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V2.Model.ListFoldersResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def cloudresourcemanager_folders_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query,
:parent => :query,
:showDeleted => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/folders", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.CloudResourceManager.V2.Model.ListFoldersResponse{}]
)
end
@doc """
Moves a Folder under a new resource parent.
Returns an Operation which can be used to track the progress of the
folder move workflow.
Upon success the Operation.response field will be populated with the
moved Folder.
Upon failure, a FolderOperationError categorizing the failure cause will
be returned - if the failure occurs synchronously then the
FolderOperationError will be returned via the Status.details field
and if it occurs asynchronously then the FolderOperation will be returned
via the Operation.error field.
In addition, the Operation.metadata field will be populated with a
FolderOperation message as an aid to stateless clients.
Folder moves will be rejected if they violate either the naming, height
or fanout constraints described in the
CreateFolder documentation.
The caller must have `resourcemanager.folders.move` permission on the
folder's current and proposed new parent.
## Parameters
* `connection` (*type:* `GoogleApi.CloudResourceManager.V2.Connection.t`) - Connection to server
* `folders_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of the Folder to move.
Must be of the form folders/{folder_id}
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.CloudResourceManager.V2.Model.MoveFolderRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.CloudResourceManager.V2.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec cloudresourcemanager_folders_move(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V2.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def cloudresourcemanager_folders_move(connection, folders_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v2/folders/{foldersId}:move", %{
"foldersId" => URI.encode(folders_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V2.Model.Operation{}])
end
@doc """
Updates a Folder, changing its display_name.
Changes to the folder display_name will be rejected if they violate either
the display_name formatting rules or naming constraints described in
the CreateFolder documentation.
The Folder's display name must start and end with a letter or digit,
may contain letters, digits, spaces, hyphens and underscores and can be
no longer than 30 characters. This is captured by the regular expression:
[\\p{L}\\p{N}]([\\p{L}\\p{N}_- ]{0,28}[\\p{L}\\p{N}])?.
The caller must have `resourcemanager.folders.update` permission on the
identified folder.
If the update fails due to the unique name constraint then a
PreconditionFailure explaining this violation will be returned
in the Status.details field.
## Parameters
* `connection` (*type:* `GoogleApi.CloudResourceManager.V2.Connection.t`) - Connection to server
* `folders_id` (*type:* `String.t`) - Part of `folder.name`. Output only. The resource name of the Folder.
Its format is `folders/{folder_id}`, for example: "folders/1234".
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Required. Fields to be updated.
Only the `display_name` can be updated.
* `:body` (*type:* `GoogleApi.CloudResourceManager.V2.Model.Folder.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.CloudResourceManager.V2.Model.Folder{}}` on success
* `{:error, info}` on failure
"""
@spec cloudresourcemanager_folders_patch(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V2.Model.Folder.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def cloudresourcemanager_folders_patch(
connection,
folders_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v2/folders/{foldersId}", %{
"foldersId" => URI.encode(folders_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V2.Model.Folder{}])
end
@doc """
Search for folders that match specific filter criteria.
Search provides an eventually consistent view of the folders a user has
access to which meet the specified filter criteria.
This will only return folders on which the caller has the
permission `resourcemanager.folders.get`.
## Parameters
* `connection` (*type:* `GoogleApi.CloudResourceManager.V2.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.CloudResourceManager.V2.Model.SearchFoldersRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.CloudResourceManager.V2.Model.SearchFoldersResponse{}}` on success
* `{:error, info}` on failure
"""
@spec cloudresourcemanager_folders_search(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.CloudResourceManager.V2.Model.SearchFoldersResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def cloudresourcemanager_folders_search(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v2/folders:search", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.CloudResourceManager.V2.Model.SearchFoldersResponse{}]
)
end
@doc """
Sets the access control policy on a Folder, replacing any existing policy.
The `resource` field should be the Folder's resource name, e.g.
"folders/1234".
The caller must have `resourcemanager.folders.setIamPolicy` permission
on the identified folder.
## Parameters
* `connection` (*type:* `GoogleApi.CloudResourceManager.V2.Connection.t`) - Connection to server
* `folders_id` (*type:* `String.t`) - Part of `resource`. REQUIRED: The resource for which the policy is being specified.
See the operation documentation for the appropriate value for this field.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.CloudResourceManager.V2.Model.SetIamPolicyRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.CloudResourceManager.V2.Model.Policy{}}` on success
* `{:error, info}` on failure
"""
@spec cloudresourcemanager_folders_set_iam_policy(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.CloudResourceManager.V2.Model.Policy.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def cloudresourcemanager_folders_set_iam_policy(
connection,
folders_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v2/folders/{foldersId}:setIamPolicy", %{
"foldersId" => URI.encode(folders_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V2.Model.Policy{}])
end
@doc """
Returns permissions that a caller has on the specified Folder.
The `resource` field should be the Folder's resource name,
e.g. "folders/1234".
There are no permissions required for making this API call.
## Parameters
* `connection` (*type:* `GoogleApi.CloudResourceManager.V2.Connection.t`) - Connection to server
* `folders_id` (*type:* `String.t`) - Part of `resource`. REQUIRED: The resource for which the policy detail is being requested.
See the operation documentation for the appropriate value for this field.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.CloudResourceManager.V2.Model.TestIamPermissionsRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.CloudResourceManager.V2.Model.TestIamPermissionsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec cloudresourcemanager_folders_test_iam_permissions(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.CloudResourceManager.V2.Model.TestIamPermissionsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def cloudresourcemanager_folders_test_iam_permissions(
connection,
folders_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v2/folders/{foldersId}:testIamPermissions", %{
"foldersId" => URI.encode(folders_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.CloudResourceManager.V2.Model.TestIamPermissionsResponse{}]
)
end
@doc """
Cancels the deletion request for a Folder. This method may only be
called on a Folder in the
DELETE_REQUESTED state.
In order to succeed, the Folder's parent must be in the
ACTIVE state.
In addition, reintroducing the folder into the tree must not violate
folder naming, height and fanout constraints described in the
CreateFolder documentation.
The caller must have `resourcemanager.folders.undelete` permission on the
identified folder.
## Parameters
* `connection` (*type:* `GoogleApi.CloudResourceManager.V2.Connection.t`) - Connection to server
* `folders_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of the Folder to undelete.
Must be of the form `folders/{folder_id}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.CloudResourceManager.V2.Model.UndeleteFolderRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.CloudResourceManager.V2.Model.Folder{}}` on success
* `{:error, info}` on failure
"""
@spec cloudresourcemanager_folders_undelete(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.CloudResourceManager.V2.Model.Folder.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def cloudresourcemanager_folders_undelete(
connection,
folders_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v2/folders/{foldersId}:undelete", %{
"foldersId" => URI.encode(folders_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.CloudResourceManager.V2.Model.Folder{}])
end
end
| 45.686061
| 196
| 0.645671
|
9e231a87efedb1cc25c98ef4ac8f563625083544
| 1,192
|
ex
|
Elixir
|
priv/templates/gen.components/catalogue/components_catalogue/icons.ex
|
inspired-consulting/gen_components
|
47c567fb2d6e918341e2efa07c2f7345038a8a90
|
[
"MIT"
] | 2
|
2022-03-28T21:33:39.000Z
|
2022-03-30T17:14:34.000Z
|
priv/templates/gen.components/catalogue/components_catalogue/icons.ex
|
inspired-consulting/gen_components
|
47c567fb2d6e918341e2efa07c2f7345038a8a90
|
[
"MIT"
] | null | null | null |
priv/templates/gen.components/catalogue/components_catalogue/icons.ex
|
inspired-consulting/gen_components
|
47c567fb2d6e918341e2efa07c2f7345038a8a90
|
[
"MIT"
] | null | null | null |
defmodule <%= catalogue_module %>.Icons do
use <%= web_module %>, :component
import <%= components_module %>.Icon
defp frame(assigns) do
~H"""
<div title={@title} style="
border: 1px solid black;
width: 5rem;
height: 5rem;
margin: 0.5rem;
border-radius: 0.3rem;
display: flex;
justify-content: center;
align-items: center;
"><%%= render_slot(@inner_block) %></div>
"""
end
def icons(assigns) do
~H"""
<section>
<h2>Semantic Icons</h2>
<div style="display: flex; flex-wrap: wrap;">
<.frame title="icon_delete"><.icon_delete style="width: 3rem; height: 3rem;"/></.frame>
<.frame title="icon_add"><.icon_add style="width: 3rem; height: 3rem;"/></.frame>
<.frame title="icon_edit"><.icon_edit style="width: 3rem; height: 3rem;"/></.frame>
</div>
</section>
<section style="margin-top: 1rem;">
<h2>All Icons</h2>
<div style="display: flex; flex-wrap: wrap;">
<%%= for i <- icons() do %>
<.frame title={i}><.icon name={i} style="width: 3rem; height: 3rem;"/></.frame>
<%% end %>
</div>
</section>
"""
end
end
| 28.380952
| 95
| 0.555369
|
9e233176afd51a633dc91c104453f5b9bf7dc818
| 132
|
exs
|
Elixir
|
test/proj3_test.exs
|
anipmehta/ChordProtocol
|
7e9f0a1e0d3ea0d4bd927d1a28fcd3444f50a755
|
[
"MIT"
] | null | null | null |
test/proj3_test.exs
|
anipmehta/ChordProtocol
|
7e9f0a1e0d3ea0d4bd927d1a28fcd3444f50a755
|
[
"MIT"
] | null | null | null |
test/proj3_test.exs
|
anipmehta/ChordProtocol
|
7e9f0a1e0d3ea0d4bd927d1a28fcd3444f50a755
|
[
"MIT"
] | null | null | null |
defmodule Proj3Test do
use ExUnit.Case
doctest Proj3
test "greets the world" do
assert Proj3.hello() == :world
end
end
| 14.666667
| 34
| 0.69697
|
9e237d845221d7323494210b5f235686c5250088
| 1,731
|
ex
|
Elixir
|
clients/memcache/lib/google_api/memcache/v1beta2/model/daily_cycle.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/memcache/lib/google_api/memcache/v1beta2/model/daily_cycle.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/memcache/lib/google_api/memcache/v1beta2/model/daily_cycle.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Memcache.V1beta2.Model.DailyCycle do
@moduledoc """
Time window specified for daily operations.
## Attributes
* `duration` (*type:* `String.t`, *default:* `nil`) - Output only. Duration of the time window, set by service producer.
* `startTime` (*type:* `GoogleApi.Memcache.V1beta2.Model.TimeOfDay.t`, *default:* `nil`) - Time within the day to start the operations.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:duration => String.t() | nil,
:startTime => GoogleApi.Memcache.V1beta2.Model.TimeOfDay.t() | nil
}
field(:duration)
field(:startTime, as: GoogleApi.Memcache.V1beta2.Model.TimeOfDay)
end
defimpl Poison.Decoder, for: GoogleApi.Memcache.V1beta2.Model.DailyCycle do
def decode(value, options) do
GoogleApi.Memcache.V1beta2.Model.DailyCycle.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Memcache.V1beta2.Model.DailyCycle do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.62
| 139
| 0.731947
|
9e23a28aae934fc7db43d9943f333673b53b6da4
| 973
|
ex
|
Elixir
|
lib/mix/tasks/compile.phoenix.ex
|
arkgil/phoenix
|
b5d82814154b5fb87c0870e25c1c2243c9384d9e
|
[
"MIT"
] | null | null | null |
lib/mix/tasks/compile.phoenix.ex
|
arkgil/phoenix
|
b5d82814154b5fb87c0870e25c1c2243c9384d9e
|
[
"MIT"
] | null | null | null |
lib/mix/tasks/compile.phoenix.ex
|
arkgil/phoenix
|
b5d82814154b5fb87c0870e25c1c2243c9384d9e
|
[
"MIT"
] | null | null | null |
defmodule Mix.Tasks.Compile.Phoenix do
use Mix.Task
@recursive true
@moduledoc """
Compiles Phoenix source files that support code reloading.
"""
@doc false
def run(_args) do
{:ok, _} = Application.ensure_all_started(:phoenix)
case touch() do
[] -> :noop
_ -> :ok
end
end
@doc false
def touch do
Mix.Phoenix.modules()
|> modules_for_recompilation
|> modules_to_file_paths
|> Stream.map(&touch_if_exists(&1))
|> Stream.filter(&(&1 == :ok))
|> Enum.to_list()
end
defp touch_if_exists(path) do
:file.change_time(path, :calendar.local_time())
end
defp modules_for_recompilation(modules) do
Stream.filter(modules, fn mod ->
Code.ensure_loaded?(mod) and function_exported?(mod, :__phoenix_recompile__?, 0) and
mod.__phoenix_recompile__?
end)
end
defp modules_to_file_paths(modules) do
Stream.map(modules, fn mod -> mod.__info__(:compile)[:source] end)
end
end
| 22.113636
| 90
| 0.664954
|
9e23dd0e34cd931fd513a69742fc6f0916e80e70
| 725
|
exs
|
Elixir
|
test/grizzly/zwave/commands/barrier_operator_set_test.exs
|
smartrent/grizzly
|
65a397ea7bfedb5518fe63a3f058a0b6af473e39
|
[
"Apache-2.0"
] | 76
|
2019-09-04T16:56:58.000Z
|
2022-03-29T06:54:36.000Z
|
test/grizzly/zwave/commands/barrier_operator_set_test.exs
|
smartrent/grizzly
|
65a397ea7bfedb5518fe63a3f058a0b6af473e39
|
[
"Apache-2.0"
] | 124
|
2019-09-05T14:01:24.000Z
|
2022-02-28T22:58:14.000Z
|
test/grizzly/zwave/commands/barrier_operator_set_test.exs
|
smartrent/grizzly
|
65a397ea7bfedb5518fe63a3f058a0b6af473e39
|
[
"Apache-2.0"
] | 10
|
2019-10-23T19:25:45.000Z
|
2021-11-17T13:21:20.000Z
|
defmodule Grizzly.ZWave.Commands.BarrierOperatorSetTest do
use ExUnit.Case, async: true
alias Grizzly.ZWave.Commands.BarrierOperatorSet
test "creates the command and validates params" do
params = [target_value: :close]
{:ok, _command} = BarrierOperatorSet.new(params)
end
test "encodes params correctly" do
params = [target_value: :open]
{:ok, command} = BarrierOperatorSet.new(params)
expected_binary = <<0xFF>>
assert expected_binary == BarrierOperatorSet.encode_params(command)
end
test "decodes params correctly" do
binary_params = <<0x00>>
{:ok, params} = BarrierOperatorSet.decode_params(binary_params)
assert Keyword.get(params, :target_value) == :close
end
end
| 30.208333
| 71
| 0.732414
|
9e23e9ed30745a267d867e8a8d536f36d375a406
| 4,300
|
ex
|
Elixir
|
lib/duck_duck/transform.ex
|
the-mikedavis/duckduck
|
b76e3d9e7756e1c5eaa5488f817d28e0a0553e5b
|
[
"BSD-3-Clause"
] | 3
|
2018-12-06T02:46:16.000Z
|
2020-09-25T02:07:19.000Z
|
lib/duck_duck/transform.ex
|
the-mikedavis/duckduck
|
b76e3d9e7756e1c5eaa5488f817d28e0a0553e5b
|
[
"BSD-3-Clause"
] | 2
|
2018-12-25T14:54:32.000Z
|
2019-11-08T02:13:56.000Z
|
lib/duck_duck/transform.ex
|
the-mikedavis/duckduck
|
b76e3d9e7756e1c5eaa5488f817d28e0a0553e5b
|
[
"BSD-3-Clause"
] | null | null | null |
defmodule DuckDuck.Transform do
alias DuckDuck.UploadCommand, as: Command
@moduledoc """
Describes a series of transformations to iteratively build an UploadCommand.
"""
@effects Application.get_env(:duckduck, :effects_client, DuckDuck.Effects)
@switches [tag: :string, path: :string, yes: :boolean]
@aliases [t: :tag, f: :path, y: :yes]
@doc "Transform an argument list into a command"
@spec parse([String.t()]) :: Command.t()
def parse(argv) do
{parsed, _rest} =
OptionParser.parse!(argv, switches: @switches, aliases: @aliases)
params =
parsed
|> Enum.map(&translate/1)
|> Enum.into(%{})
Command.transform(%Command{}, params)
end
# translate to the keys of the UploadCommand schema
@spec translate({atom(), any()}) :: {atom(), any()}
defp translate({:yes, accepted?}), do: {:accept?, accepted?}
defp translate({:file, path}), do: {:path, path}
defp translate(pair), do: pair
@doc """
Put the owner in the command if not already there
"""
@spec owner(Command.t()) :: Command.t() | {:error, String.t()}
def owner(command) do
case @effects.fetch_env(:duckduck, :owner) do
{:ok, owner} ->
Command.transform(command, %{owner: owner})
:error ->
{:error, "Couldn't find repo owner in config"}
end
end
@doc """
Put the repo in the command if not already there
"""
@spec repo(Command.t()) :: Command.t() | {:error, String.t()}
def repo(command) do
case @effects.fetch_env(:duckduck, :repo) do
{:ok, repo} ->
Command.transform(command, %{repo: repo})
:error ->
{:error, "Couldn't find repo name in config"}
end
end
@doc """
Put the tag in a command if not already there
"""
@spec tag(Command.t()) :: Command.t()
def tag(command) do
Command.transform(command, %{tag: @effects.get_tag()})
end
@doc """
Put the path to the upload file in the command if not already present
"""
@spec path(Command.t()) :: Command.t() | {:error, String.t()}
def path(%Command{tag: tag} = command) do
case DuckDuck.find_release_file(tag) do
{:ok, file} ->
Command.transform(command, %{path: file})
{:error, _reason} = e ->
e
end
end
@doc """
Put the api token in the command if not already there.
"""
@spec api_token(Command.t()) :: Command.t() | {:error, String.t()}
def api_token(command) do
case @effects.read_api_token() do
{:ok, token} ->
Command.transform(command, %{api_token: token})
{:error, _reason} = e ->
e
end
end
@doc """
Ask the user if they're ok with the upload plan
"""
@spec accept?(Command.t()) :: Command.t()
def accept?(%Command{tag: tag, path: path} = command) do
Command.transform(command, %{accept?: DuckDuck.confirm(path, tag)})
end
@doc """
Find the upload url and put it in the command.
For uploading assets, you need to ask GitHub where to put them through
their API. Interestingly, you can't upload assets to a tag. Only a release
may have assets. So when you want to upload to a tag, you must also create
the release from the tag. You can do this with a single API call.
"""
@spec upload_url(Command.t()) :: Command.t() | {:error, String.t()}
def upload_url(
%Command{api_token: token, owner: owner, repo: repo, tag: tag} = command
) do
if Enum.any?([token, owner, repo, tag], &is_nil/1) do
{:error,
"""
Couldn't find the upload url because I didn't know at least one of
- api token
- repo owner
- repo name
- tag
"""}
else
Command.transform(command, %{
upload_url: DuckDuck.find_upload_url(token, owner, repo, tag)
})
end
end
@doc """
Try uploading the tarball given the information in the command.
"""
@spec upload(Command.t()) :: IO.chardata()
def upload(%Command{path: path, api_token: api_token, upload_url: url}) do
IO.puts("Please wait. Uploading #{path}...")
case DuckDuck.upload(path, api_token, url) do
:ok ->
[:green, "Release successfully uploaded", :reset, "."]
{:error, reason} ->
[:red, reason]
end
end
def upload(%Command{}) do
["Release upload ", :red, "failed", :reset, ".\n"]
end
end
| 28.104575
| 80
| 0.613953
|
9e24067ace2745c3ea1b0e853c04a0647eb4e708
| 440
|
ex
|
Elixir
|
test/commands/support/consistency/consistency_prefix_router.ex
|
jwilger/commanded
|
2d9950fd3ce76a23a3c410c99857b812f5705d66
|
[
"MIT"
] | 1,220
|
2017-10-31T10:56:40.000Z
|
2022-03-31T17:40:19.000Z
|
test/commands/support/consistency/consistency_prefix_router.ex
|
jwilger/commanded
|
2d9950fd3ce76a23a3c410c99857b812f5705d66
|
[
"MIT"
] | 294
|
2017-11-03T10:33:41.000Z
|
2022-03-24T08:36:42.000Z
|
test/commands/support/consistency/consistency_prefix_router.ex
|
jwilger/commanded
|
2d9950fd3ce76a23a3c410c99857b812f5705d66
|
[
"MIT"
] | 208
|
2017-11-03T10:56:47.000Z
|
2022-03-14T05:49:38.000Z
|
defmodule Commanded.Commands.ConsistencyPrefixRouter do
use Commanded.Commands.Router
alias Commanded.Commands.ConsistencyAggregateRoot
alias ConsistencyAggregateRoot.{
ConsistencyCommand,
NoOpCommand,
RequestDispatchCommand
}
identify ConsistencyAggregateRoot,
by: :uuid,
prefix: "example-prefix-"
dispatch [ConsistencyCommand, NoOpCommand, RequestDispatchCommand],
to: ConsistencyAggregateRoot
end
| 23.157895
| 69
| 0.790909
|
9e2407dc791dfb360a1324214122f6ecf3cefdee
| 482
|
ex
|
Elixir
|
test/support/process_helper.ex
|
jsmestad/eventstore
|
93660ce316ca174ff4694e211a7ac420253e4dac
|
[
"MIT"
] | 576
|
2017-11-03T14:11:07.000Z
|
2022-03-29T06:18:47.000Z
|
test/support/process_helper.ex
|
jsmestad/eventstore
|
93660ce316ca174ff4694e211a7ac420253e4dac
|
[
"MIT"
] | 129
|
2017-11-08T06:10:20.000Z
|
2021-09-15T16:18:14.000Z
|
test/support/process_helper.ex
|
jsmestad/eventstore
|
93660ce316ca174ff4694e211a7ac420253e4dac
|
[
"MIT"
] | 118
|
2017-11-14T14:10:09.000Z
|
2022-03-28T13:13:56.000Z
|
defmodule EventStore.ProcessHelper do
import ExUnit.Assertions
@doc """
Stop the given process name or PID with a non-normal exit reason.
"""
def shutdown(name_or_pid)
def shutdown(name) when is_atom(name) do
name |> Process.whereis() |> shutdown()
end
def shutdown(pid) when is_pid(pid) do
ref = Process.monitor(pid)
Process.unlink(pid)
Process.exit(pid, :shutdown)
assert_receive {:DOWN, ^ref, :process, _object, _reason}, 1_000
end
end
| 21.909091
| 67
| 0.692946
|
9e240e1bf11c68a24e2c2c7ed8ebd08c7aa691f7
| 2,854
|
ex
|
Elixir
|
lib/instagram_clone_web/live/page_live.ex
|
hminy572/InstagramClonePETAL
|
577cdad0e17399e47ef9d3f8e789bd07e33012b9
|
[
"MIT"
] | 1
|
2021-08-18T13:01:26.000Z
|
2021-08-18T13:01:26.000Z
|
lib/instagram_clone_web/live/page_live.ex
|
hminy572/InstagramClonePETAL
|
577cdad0e17399e47ef9d3f8e789bd07e33012b9
|
[
"MIT"
] | null | null | null |
lib/instagram_clone_web/live/page_live.ex
|
hminy572/InstagramClonePETAL
|
577cdad0e17399e47ef9d3f8e789bd07e33012b9
|
[
"MIT"
] | null | null | null |
defmodule InstagramCloneWeb.PageLive do
use InstagramCloneWeb, :live_view
alias InstagramClone.Uploaders.Avatar
alias InstagramClone.Accounts
alias InstagramCloneWeb.UserLive.FollowComponent
alias InstagramClone.Posts
alias InstagramClone.Posts.Post
alias InstagramCloneWeb.Live.LikeComponent
@impl true
def mount(_params, session, socket) do
socket = assign_defaults(session, socket)
if connected?(socket), do: Posts.subscribe
{:ok,
socket
|> assign(page_title: "InstagramClone")
|> assign(new_posts_added: false)
|> assign(page: 1, per_page: 15),
temporary_assigns: [user_feed: []]}
end
@impl true
def handle_params(_params, _uri, socket) do
{:noreply,
socket
|> assign(live_action: apply_action(socket.assigns.current_user))
|> assign_posts()}
end
@impl true
def handle_event("load-more-profile-posts", _, socket) do
{:noreply, socket |> load_posts}
end
defp load_posts(socket) do
total_posts = socket.assigns.accounts_feed_total
page = socket.assigns.page
per_page = socket.assigns.per_page
total_pages = ceil(total_posts / per_page)
if page == total_pages do
socket
else
socket
|> update(:page, &(&1 + 1))
|> assign_user_feed()
end
end
@impl true
def handle_info({FollowComponent, :update_totals, _}, socket) do
{:noreply, socket}
end
@impl true
def handle_info({LikeComponent, :update_comment_likes, _}, socket) do
{:noreply, socket}
end
@impl true
def handle_info({LikeComponent, :update_post_likes, post}, socket) do
post_feed = Posts.get_post_feed!(post.id)
{:noreply,
socket
|> update(:user_feed, fn user_feed -> [post_feed | user_feed] end)}
end
@impl true
def handle_info(%{event: "new_post", payload: %{post: %Post{user_id: post_user_id}}}, socket) do
if post_user_id in socket.assigns.following_list do
{:noreply, socket |> assign(new_posts_added: true)}
else
{:noreply, socket}
end
end
defp apply_action(current_user) do
if !current_user, do: :root_path
end
defp assign_posts(socket) do
if socket.assigns.current_user do
current_user = socket.assigns.current_user
following_list = Accounts.get_following_list(current_user)
accounts_feed_total = Posts.get_accounts_feed_total(following_list, socket.assigns)
random_5_users = Accounts.random_5(current_user)
socket
|> assign(following_list: following_list)
|> assign(accounts_feed_total: accounts_feed_total)
|> assign(users: random_5_users)
|> assign_user_feed()
else
socket
end
end
defp assign_user_feed(socket) do
user_feed = Posts.get_accounts_feed(socket.assigns.following_list, socket.assigns)
socket |> assign(user_feed: user_feed)
end
end
| 26.924528
| 98
| 0.697968
|
9e242144db05547bf4e8a90375c46441f2a3700a
| 73
|
ex
|
Elixir
|
lib/tapebas_web/views/user_registration_view.ex
|
cristineguadelupe/tapebas
|
5f8c70d5ac36b2a606fe4630cc659161b2f4d7bf
|
[
"MIT"
] | 3
|
2022-03-24T16:48:38.000Z
|
2022-03-24T16:50:04.000Z
|
lib/tapebas_web/views/user_registration_view.ex
|
cristineguadelupe/tapebas
|
5f8c70d5ac36b2a606fe4630cc659161b2f4d7bf
|
[
"MIT"
] | null | null | null |
lib/tapebas_web/views/user_registration_view.ex
|
cristineguadelupe/tapebas
|
5f8c70d5ac36b2a606fe4630cc659161b2f4d7bf
|
[
"MIT"
] | 1
|
2022-03-20T01:11:12.000Z
|
2022-03-20T01:11:12.000Z
|
defmodule TapebasWeb.UserRegistrationView do
use TapebasWeb, :view
end
| 18.25
| 44
| 0.835616
|
9e243e8518fa60af613f9b474df32f23d36c12e3
| 2,343
|
exs
|
Elixir
|
test/credo/check/consistency/space_around_operators/collector_test.exs
|
hrzndhrn/credo
|
71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593
|
[
"MIT"
] | 4,590
|
2015-09-28T06:01:43.000Z
|
2022-03-29T08:48:57.000Z
|
test/credo/check/consistency/space_around_operators/collector_test.exs
|
hrzndhrn/credo
|
71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593
|
[
"MIT"
] | 890
|
2015-11-16T21:07:07.000Z
|
2022-03-29T08:52:07.000Z
|
test/credo/check/consistency/space_around_operators/collector_test.exs
|
hrzndhrn/credo
|
71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593
|
[
"MIT"
] | 479
|
2015-11-17T19:42:40.000Z
|
2022-03-29T00:09:21.000Z
|
defmodule Credo.Check.Consistency.SpaceAroundOperators.CollectorTest do
use Credo.Test.Case
alias Credo.Check.Consistency.SpaceAroundOperators.Collector
test "it should report correct frequencies for operators surrounded by spaces" do
result =
"""
defmodule Credo.Sample1 do
defmodule InlineModule do
def foobar do
4 + 3
4 - 3
4 * 3
a = 3
4 && 3
"4" <> "3"
4 == 3
[4] ++ [3]
4 == 3
4 > 3
4 >= 3
4 <= 3
range = -999..-1
for op <- [:{}, :%{}, :^, :|, :<>] do
end
something = removed != []
Enum.map(dep.deps, &(&1.app)) ++ current_breadths
&function_capture/1
&:erlang_module.function_capture/3
&Elixir.function_capture/3
&@module.blah/1
|> my_func(&Some.Deep.Module.is_something/1)
end
end
end
"""
|> to_source_file()
|> Collector.collect_matches([])
assert %{with_space: 18} == result
end
test "it should report correct frequencies for operators surrounded by spaces /2" do
result =
"""
a = b + c + compare_fn.(-d, 0)
"""
|> to_source_file()
|> Collector.collect_matches([])
assert %{with_space: 3} == result
end
test "it should report correct frequencies for operators surrounded by spaces /3" do
result =
"""
a = b + c + compare_fn.(-d, 0)
"""
|> to_source_file()
|> Collector.collect_matches([])
assert %{with_space: 3} == result
end
test "it should report correct frequencies for operators not surrounded by spaces" do
result =
"""
defmodule Credo.Sample2 do
def foobar do
1+2
end
end
"""
|> to_source_file()
|> Collector.collect_matches([])
assert %{without_space: 1} == result
end
test "it should report correct frequencies for mixed cases" do
result =
"""
defmodule Credo.Sample3 do
def foobar do
1+ 2
3 *4
end
end
"""
|> to_source_file()
|> Collector.collect_matches([])
assert %{with_space: 2, without_space: 2} == result
end
end
| 24.154639
| 87
| 0.522834
|
9e243e9a58e997b7b0551e71fbf15b6f2e1672bc
| 2,075
|
ex
|
Elixir
|
clients/content/lib/google_api/content/v21/model/account_google_my_business_link.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/content/lib/google_api/content/v21/model/account_google_my_business_link.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | 1
|
2020-12-18T09:25:12.000Z
|
2020-12-18T09:25:12.000Z
|
clients/content/lib/google_api/content/v21/model/account_google_my_business_link.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V21.Model.AccountGoogleMyBusinessLink do
@moduledoc """
## Attributes
* `gmbAccountId` (*type:* `String.t`, *default:* `nil`) - The ID of the GMB account. If this is provided, then `gmbEmail` is ignored. The value of this field should match the `accountId` used by the GMB API.
* `gmbEmail` (*type:* `String.t`, *default:* `nil`) - The GMB email address of which a specific account within a GMB account. A sample account within a GMB account could be a business account with set of locations, managed under the GMB account.
* `status` (*type:* `String.t`, *default:* `nil`) - Status of the link between this Merchant Center account and the GMB account. Acceptable values are: - "`active`" - "`pending`"
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:gmbAccountId => String.t(),
:gmbEmail => String.t(),
:status => String.t()
}
field(:gmbAccountId)
field(:gmbEmail)
field(:status)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V21.Model.AccountGoogleMyBusinessLink do
def decode(value, options) do
GoogleApi.Content.V21.Model.AccountGoogleMyBusinessLink.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V21.Model.AccountGoogleMyBusinessLink do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.150943
| 249
| 0.724337
|
9e245cfde8c93cafd20205528b446c43765163f0
| 5,325
|
ex
|
Elixir
|
lib/fiql_ex.ex
|
davec82/fiqlex
|
f0f14c7366821830167f00fe8955b05099b59478
|
[
"MIT"
] | 4
|
2020-01-04T09:28:16.000Z
|
2020-04-21T15:14:47.000Z
|
lib/fiql_ex.ex
|
davec82/fiqlex
|
f0f14c7366821830167f00fe8955b05099b59478
|
[
"MIT"
] | null | null | null |
lib/fiql_ex.ex
|
davec82/fiqlex
|
f0f14c7366821830167f00fe8955b05099b59478
|
[
"MIT"
] | 1
|
2021-04-23T09:25:39.000Z
|
2021-04-23T09:25:39.000Z
|
defmodule FIQLEx do
@moduledoc """
[FIQL](http://tools.ietf.org/html/draft-nottingham-atompub-fiql-00) (Feed Item Query Language)
is a URI-friendly syntax for expressing filters.
FIQL looks like this:
```
fiql = "author.age=ge=25;author.name==*Doe"
```
Using this module you will be able to parse a FIQL string and to build a query for any
system (SQL, Elasticsearch, etc...) from it.
Given a FIQL string like:
```
fiql = "author.age=ge=25;author.name==*Doe"
```
Pass it to the `parse/1` or `parse1!/1` functions to retrieve an AST of the FIQL string:
```
{:ok, ast} = FIQLEx.parse(fiql)
```
Then you can use this AST to build you own query for your system or use our built-in
query builders like `FIQLEx.QueryBuilders.SQLQueryBuilder`:
```
{:ok, sql_query} = FIQLEx.build_query(ast, FIQLEx.QueryBuilders.SQLQueryBuilder, table: "author")
```
Here, `sql_query` is `SELECT * FROM author WHERE (author.age >= 25 AND author.name LIKE '%Doe')`.
You can use your own query builder by providing your own module that uses `FIQLEx.QueryBuilder`
as second argument of `build_query/3`.
"""
@type ast() :: any()
@doc """
Parses the FIQL string and returns an AST representation of the query to be built to
any other query (SQL, Elasticsearch) with the `build_query/3` function.
Returns `{:ok, ast}` if everything is fine and `{:error, reason}` in case of error in the
FIQL.
"""
@spec parse(binary) :: {:ok, ast()} | {:error, any()}
def parse(str) do
with {:ok, tokens, _end_line} <- str |> to_charlist() |> :fiql_lexer.string(),
{:ok, ast} <- :fiql_parser.parse(tokens) do
{:ok, ast}
else
{_, reason, _} ->
{:error, reason}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Same as `parse/1` but returns the AST or raises an exception.
"""
@spec parse!(binary) :: ast
def parse!(str) do
case parse(str) do
{:ok, ast} -> ast
{:error, err} -> throw(err)
end
end
@doc """
Use an AST to build a query in the way you want. For instance you could create a
query for Elasticsearch from a FIQL AST, or use the `FIQLEx.QueryBuilders.SQLQueryBuilder` module
to build an SQL query from a FIQL AST.
Parameters are:
* `ast`: The AST to transform to a query for another system
* `module`: The module to use for the AST traversal
* `opts`: Options you want to pass to the `init/2` function of your `module`
This function returns `{:ok, query}` with your created query if everything is fine, or
`{:error, reason}` if there is something wrong.
```
query = "author.age=ge=25;author.name==*Doe"
{:ok, ast} = FIQLEx.parse(query)
{:ok, query} = FIQLEx.build_query(ast, MyQueryBuilder)
```
See the documentation of the `FIQLEx.QueryBuilder` module to learn more about the AST
traversal.
"""
@spec build_query(ast(), atom(), Keyword.t()) :: {:ok, any()} | {:error, any()}
def build_query(ast, module, opts \\ []) do
state = apply(module, :init, [ast, opts])
with {:ok, state} <- run_ast(ast, ast, module, state) do
apply(module, :build, [ast, state])
else
{:error, err} -> {:error, err}
end
end
@doc """
This function will go deeper in the ast traversal.
Parameters are:
* `curr_ast`: The AST we want to go deeper with
* `ast`: The global AST
* `module`: The module to use for the traversal
* `state`: The current state of your query builder
The function returns `{:ok, state}` if everything is fine, and `{:error, reason}`
if there is an error
"""
@spec handle_ast(ast(), ast(), atom(), any()) :: {:ok, any()} | {:error, any()}
def handle_ast(curr_ast, ast, module, state) do
run_ast(curr_ast, ast, module, state)
end
@doc """
Same as `handle_ast/4` but returns the `state` or raises an exception.
"""
@spec handle_ast!(ast(), ast(), atom(), any()) :: any()
def handle_ast!(curr_ast, ast, module, state) do
case handle_ast(curr_ast, ast, module, state) do
{:ok, result} -> result
{:error, err} -> throw(err)
end
end
defp run_ast({:or_op, exp1, exp2}, ast, module, state) do
apply(module, :handle_or_expression, [exp1, exp2, ast, state])
end
defp run_ast({:and_op, exp1, exp2}, ast, module, state) do
apply(module, :handle_and_expression, [exp1, exp2, ast, state])
end
defp run_ast({:op, exp}, ast, module, state) do
apply(module, :handle_expression, [exp, ast, state])
end
defp run_ast({:selector, selector_name}, ast, module, state) do
apply(module, :handle_selector, [selector_name, ast, state])
end
defp run_ast({:selector_and_value, selector_name, :equal, value}, ast, module, state) do
apply(module, :handle_selector_and_value, [selector_name, :equal, value, ast, state])
end
defp run_ast({:selector_and_value, selector_name, :not_equal, value}, ast, module, state) do
apply(module, :handle_selector_and_value, [selector_name, :not_equal, value, ast, state])
end
defp run_ast(
{:selector_and_value, selector_name, {:comparison, comparison}, value},
ast,
module,
state
) do
apply(module, :handle_selector_and_value_with_comparison, [
selector_name,
comparison,
value,
ast,
state
])
end
end
| 30.084746
| 99
| 0.647887
|
9e2464f78723b5f1a1409a72118be951429e4c74
| 2,502
|
exs
|
Elixir
|
apps/jsonrpc2/test/jsonrpc2/spec_handler/gas_estimater_test.exs
|
wolflee/mana
|
db66dac85addfaad98d40da5bd4082b3a0198bb1
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 152
|
2018-10-27T04:52:03.000Z
|
2022-03-26T10:34:00.000Z
|
apps/jsonrpc2/test/jsonrpc2/spec_handler/gas_estimater_test.exs
|
wolflee/mana
|
db66dac85addfaad98d40da5bd4082b3a0198bb1
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 270
|
2018-04-14T07:34:57.000Z
|
2018-10-25T18:10:45.000Z
|
apps/jsonrpc2/test/jsonrpc2/spec_handler/gas_estimater_test.exs
|
wolflee/mana
|
db66dac85addfaad98d40da5bd4082b3a0198bb1
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 25
|
2018-10-27T12:15:13.000Z
|
2022-01-25T20:31:14.000Z
|
defmodule JSONRPC2.SpecHandler.GasEstimaterTest do
use ExUnit.Case, async: true
alias Blockchain.Account
alias Blockchain.Block
alias Blockchain.Chain
alias JSONRPC2.SpecHandler.GasEstimater
alias JSONRPC2.TestFactory
setup do
db = MerklePatriciaTree.Test.random_ets_db()
trie = MerklePatriciaTree.Trie.new(db)
chain = Chain.load_chain(:ropsten)
{:ok, %{trie: trie, chain: chain}}
end
describe "run/4" do
test "can't find block by block number", %{trie: trie, chain: chain} do
call_request = TestFactory.build(:call_request)
result = GasEstimater.run(trie, call_request, 10, chain)
assert result == %{error: "Block is not found"}
end
test "returns lower gas limit", %{trie: trie, chain: chain} do
block =
TestFactory.build(:block, header: TestFactory.build(:header, gas_limit: 100_000_000))
from_address = <<0x10::160>>
from_account = TestFactory.build(:account, balance: 10_000_000)
to_address = <<0x11::160>>
to_account = TestFactory.build(:account)
{:ok, {_, updated_trie}} = Block.put_block(block, trie, block.block_hash)
call_request =
TestFactory.build(:call_request, from: from_address, to: to_address, gas: 30_000)
trie_with_accounts =
updated_trie
|> Account.put_account(from_address, from_account)
|> Account.put_account(to_address, to_account)
result = GasEstimater.run(trie_with_accounts, call_request, block.header.number, chain)
assert result == {:ok, 21_000}
end
test "finds middle point gas", %{trie: trie, chain: chain} do
block =
TestFactory.build(:block, header: TestFactory.build(:header, gas_limit: 100_000_000))
from_address = <<0x10::160>>
from_account = TestFactory.build(:account, balance: 10_000_000)
to_address = <<0x11::160>>
to_account = TestFactory.build(:account)
{:ok, {_, updated_trie}} = Block.put_block(block, trie, block.block_hash)
call_request =
TestFactory.build(:call_request,
from: from_address,
to: to_address,
gas: 5_000,
data: <<1, 2, 3, 4, 5>>
)
trie_with_accounts =
updated_trie
|> Account.put_account(from_address, from_account)
|> Account.put_account(to_address, to_account)
result = GasEstimater.run(trie_with_accounts, call_request, block.header.number, chain)
assert result == {:ok, 21_340}
end
end
end
| 30.144578
| 93
| 0.663869
|
9e24664472c5ca71b2150a9347dcc98cf69a6acb
| 7,570
|
ex
|
Elixir
|
parkapp_server/lib/parkapp_web/api_integration/api_integration.ex
|
bitmaker-software/parkapp
|
39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310
|
[
"MIT"
] | 2
|
2018-11-06T12:21:16.000Z
|
2018-11-21T10:20:17.000Z
|
parkapp_server/lib/parkapp_web/api_integration/api_integration.ex
|
bitmaker-software/parkapp
|
39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310
|
[
"MIT"
] | null | null | null |
parkapp_server/lib/parkapp_web/api_integration/api_integration.ex
|
bitmaker-software/parkapp
|
39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310
|
[
"MIT"
] | null | null | null |
defmodule ParkappWeb.ApiIntegration do
@moduledoc """
This module handles coordination beetween thir party APIs and the server's database
"""
require Logger
alias __MODULE__.Embers.Helpers, as: EmbersHelpers
alias __MODULE__.MBWay.Helpers, as: MBWayHelpers
alias __MODULE__.MBWay.WebhookData
alias __MODULE__.Timeout
alias Parkapp.Reservations.ReservationStatus.Enum, as: ReservationStatusEnum
alias Parkapp.Reservations.ReservationType.Enum, as: ReservationTypeEnum
alias Parkapp.Reservations.ReservationType.ConfigurationStruct
alias Parkapp.ReservationsContext
alias Parkapp.Logging
@doc """
Performs the reservation with the Embers API and updates the databse.
If anythings go wrong, the reservation is cancelled.
"""
@spec reserve_single_use(String) :: {:ok, Map} | {:error, Changeset} | {:timeout, String} | nil
def reserve_single_use(device_id) when is_bitstring(device_id) do
configuration = ConfigurationStruct.get_configuration(ReservationTypeEnum.single_use())
reserve(device_id, configuration)
end
@doc """
Performs the reservation with the Embers API and updates the databse.
If anythings go wrong, the reservation is cancelled.
"""
@spec book_reservation(String) :: {:ok, Map} | {:error, Changeset} | {:timeout, String} | nil
def book_reservation(device_id) when is_bitstring(device_id) do
configuration = ConfigurationStruct.get_configuration(ReservationTypeEnum.booked())
reserve(device_id, configuration)
end
@doc """
Performs the reservation with the Embers API and updates the databse.
If anythings go wrong, the reservation is cancelled.
"""
@spec reserve(String, ConfigurationStruct) ::
{:ok, Map} | {:error, Changeset} | {:timeout, String} | nil
defp reserve(device_id, %ConfigurationStruct{} = configuration)
when is_bitstring(device_id) do
with(
reservation <- ReservationsContext.get_current_reservation(device_id),
true <- is_nil(reservation)
) do
case Timeout.check_time_since_last_cancelled_reservation(device_id) do
true ->
with(
module <- EmbersHelpers.get_api_module(),
{:ok, result} <- module.make_reservation(configuration),
locator <- Map.get(result, "locator"),
barcode <-
Map.get(result, "product")
|> Map.get("barcode"),
reservation_start_time <- DateTime.utc_now()
) do
case ReservationsContext.create_reservation_initial_state(%{
device_id: device_id,
locator: locator,
barcode: barcode,
reservation_type_id: configuration.reservation_type,
reservation_start_time: reservation_start_time
}) do
{:ok, reservation} ->
{:ok, reservation}
error ->
module.delete_reservation(locator)
error
end
else
_ ->
nil
end
false ->
{:timeout, Timeout.get_unban_datetime(device_id)}
end
else
_other ->
nil
end
end
defp reserve(_device_id, _config), do: nil
@doc """
Cancels the active reservation
"""
@spec cancel_reservation(String) :: {:ok, Map} | {:error, Changeset} | nil
def cancel_reservation(device_id) when is_bitstring(device_id) do
with(
reservation <- ReservationsContext.get_current_reservation(device_id),
false <- is_nil(reservation),
module <- EmbersHelpers.get_api_module(),
{:ok, _result} <- module.cancel_reservation(reservation.locator)
) do
ReservationsContext.cancel_reservation(reservation)
else
_other ->
nil
end
end
@spec payment1(String) :: {:ok, Map} | {:error, Changeset} | nil
def payment1(device_id) do
with(
reservation <- ReservationsContext.get_current_reservation(device_id),
false <- is_nil(reservation),
true <- reservation.reservation_status_id == ReservationStatusEnum.in_park(),
{:ok, result} <- EmbersHelpers.get_api_module().payment1(reservation.barcode),
context_token <- Map.get(result, "context_token"),
amount <- Map.get(result, "outstanding_amount"),
{:ok, reservation} <-
ReservationsContext.update_reservation_after_payment1(reservation, %{
context_token: context_token,
amount: amount,
payment1_time: DateTime.utc_now()
})
) do
{:ok, reservation}
else
{:error, changeset} ->
{:error, changeset}
_ ->
nil
end
end
@spec pay(String, String) :: {:ok, Map} | {:error, Changeset} | nil | :timeout
def pay(device_id, phone_number) do
with(
reservation <- ReservationsContext.get_current_reservation(device_id),
false <- is_nil(reservation),
true <- reservation.reservation_status_id == ReservationStatusEnum.in_park()
) do
with(
false <- Timeout.check_time_to_pay_timeout(reservation),
false <- is_nil(reservation.payment1_time)
) do
external_payment(reservation, device_id, phone_number)
else
_ ->
:timeout
end
else
_ ->
nil
end
end
@doc """
Handles the external_payment api request + database state transition
"""
@spec external_payment(Reservation, String, String) :: {:ok, Map} | {:error, Changeset} | nil
defp external_payment(reservation, device_id, phone_number) do
with(
true <- !is_nil(reservation.amount) && !is_nil(reservation.context_token),
{:ok, _result} <-
MBWayHelpers.get_api_module().request_payment(device_id, phone_number, reservation.amount),
{:ok, reservation} <- ReservationsContext.move_to_external_payment_state(reservation)
) do
{:ok, reservation}
else
{:error, changeset} ->
# What to do here?
{:error, changeset}
_other ->
# What to do here?
nil
end
end
@doc """
Handles the last steps of the payment workflow after the external payment is successfull
"""
@spec complete_payment_procedure(WebhookData) :: {:ok, Map} | {:error, Changeset} | nil
def complete_payment_procedure(nil), do: nil
def complete_payment_procedure(%WebhookData{} = webhook_data) do
with(
module <- EmbersHelpers.get_api_module(),
reservation <- ReservationsContext.get_current_reservation(webhook_data.device_id),
false <- is_nil(reservation),
true <- reservation.reservation_status_id == ReservationStatusEnum.external_payment()
) do
Logging.create_external_payment_log(%{
reservation_id: reservation.id,
received_at: DateTime.utc_now(),
body: webhook_data.body,
result_code: webhook_data.result_code
})
cond do
WebhookData.validate(webhook_data) == true ->
Logger.info("valid webhook_data")
case module.payment2(reservation.context_token) do
:ok ->
ReservationsContext.move_to_payment2_state(
reservation,
DateTime.utc_now()
)
_else ->
:send_new_notification
end
true ->
if WebhookData.should_revert(webhook_data) do
Logger.info("reverting to inpark")
ReservationsContext.revert_from_external_payment_to_in_park(reservation)
end
end
else
_error ->
nil
end
end
end
| 33.348018
| 99
| 0.648481
|
9e249a6dc2b604cccde271b7b6d80de25f8af990
| 836
|
ex
|
Elixir
|
servy/lib/servy.ex
|
herminiotorres/pragmaticstudio
|
273647694519fd4149716abf190eb8d97102f488
|
[
"MIT"
] | null | null | null |
servy/lib/servy.ex
|
herminiotorres/pragmaticstudio
|
273647694519fd4149716abf190eb8d97102f488
|
[
"MIT"
] | null | null | null |
servy/lib/servy.ex
|
herminiotorres/pragmaticstudio
|
273647694519fd4149716abf190eb8d97102f488
|
[
"MIT"
] | null | null | null |
defmodule Servy do
use Application
@doc """
Changing the Port at Runtime
In the video we ran the application with its default environment like so:
mix run --no-halt
But what if you wanted to override the default port value so that the web server runs on
port 5000, for example. Here's how to do that:
elixir --erl "-servy port 5000" -S mix run --no-halt
The --erl option is the way you pass flags (switches) to the Erlang VM.
Using -servy port 5000 tells the VM to set the port environment parameter to the value 5000
for the servy application. Using this same form,
you can set environment parameters for any application running in the VM.
"""
def start(_type, _args) do
IO.puts("Starting the application..")
Servy.Supervisor.start_link()
end
end
| 29.857143
| 97
| 0.687799
|
9e24b36558736a86a286939c05c7211c8b865547
| 340
|
exs
|
Elixir
|
test/test_helper.exs
|
joakimk/exqueue
|
bc2c4fdf311174ea92ff1cf3c0a1860137132ed7
|
[
"MIT",
"Unlicense"
] | 366
|
2015-07-04T22:05:44.000Z
|
2021-11-15T10:13:46.000Z
|
test/test_helper.exs
|
joakimk/exqueue
|
bc2c4fdf311174ea92ff1cf3c0a1860137132ed7
|
[
"MIT",
"Unlicense"
] | 47
|
2015-07-05T13:40:56.000Z
|
2019-10-04T03:16:56.000Z
|
test/test_helper.exs
|
joakimk/exqueue
|
bc2c4fdf311174ea92ff1cf3c0a1860137132ed7
|
[
"MIT",
"Unlicense"
] | 33
|
2015-07-05T12:50:55.000Z
|
2021-01-28T03:42:41.000Z
|
ExUnit.start()
defmodule CaptureLog do
import ExUnit.CaptureIO
# Borrowed from elixir test helper, is built into elixir in master
def capture_log(level \\ :debug, fun) do
Logger.configure(level: level)
capture_io(:user, fn ->
fun.()
Logger.flush()
end)
after
Logger.configure(level: :debug)
end
end
| 18.888889
| 68
| 0.673529
|
9e24beede21d2e2bc882a74dd5239005957c29ec
| 4,030
|
exs
|
Elixir
|
test/mazes/rectangular_maze_test.exs
|
angelikatyborska/mazes
|
cba3b1d6aaaa896f4ca505b477cf03b67523ebf0
|
[
"MIT"
] | 116
|
2020-12-26T20:56:01.000Z
|
2022-03-12T15:12:37.000Z
|
test/mazes/rectangular_maze_test.exs
|
lohayon/mazes
|
98a6276ea7440af938edfb14476a5877fdc295e1
|
[
"MIT"
] | null | null | null |
test/mazes/rectangular_maze_test.exs
|
lohayon/mazes
|
98a6276ea7440af938edfb14476a5877fdc295e1
|
[
"MIT"
] | 10
|
2020-12-29T05:11:43.000Z
|
2022-01-02T00:57:12.000Z
|
defmodule Mazes.RectangularMazeTest do
use ExUnit.Case
alias Mazes.RectangularMaze
describe "new" do
test "generates an adjacency matrix with no adjacent vertices" do
result = RectangularMaze.new(width: 2, height: 3)
assert result.width == 2
assert result.height == 3
assert result.adjacency_matrix == %{
{1, 1} => %{
{1, 2} => false,
{2, 1} => false
},
{1, 2} => %{
{1, 1} => false,
{1, 3} => false,
{2, 2} => false
},
{1, 3} => %{
{1, 2} => false,
{2, 3} => false
},
{2, 1} => %{
{1, 1} => false,
{2, 2} => false
},
{2, 2} => %{
{1, 2} => false,
{2, 1} => false,
{2, 3} => false
},
{2, 3} => %{
{1, 3} => false,
{2, 2} => false
}
}
end
test "generates an adjacency matrix with all vertices adjacent" do
assert RectangularMaze.new(width: 2, height: 3, all_vertices_adjacent?: true).adjacency_matrix ==
%{
{1, 1} => %{
{1, 2} => true,
{2, 1} => true
},
{1, 2} => %{
{1, 1} => true,
{1, 3} => true,
{2, 2} => true
},
{1, 3} => %{
{1, 2} => true,
{2, 3} => true
},
{2, 1} => %{
{1, 1} => true,
{2, 2} => true
},
{2, 2} => %{
{1, 2} => true,
{2, 1} => true,
{2, 3} => true
},
{2, 3} => %{
{1, 3} => true,
{2, 2} => true
}
}
end
end
describe "center" do
test "odd size" do
maze = RectangularMaze.new(width: 5, height: 5)
assert RectangularMaze.center(maze) == {3, 3}
end
test "even size" do
maze = RectangularMaze.new(width: 4, height: 4)
assert RectangularMaze.center(maze) == {2, 2}
end
end
describe "outer_wall?" do
test "checks if there is the outer wall between two vertices" do
maze = RectangularMaze.new(width: 2, height: 3)
assert RectangularMaze.outer_wall?(maze, {1, 1}, {0, 1}) == true
assert RectangularMaze.outer_wall?(maze, {1, 2}, {0, 2}) == true
assert RectangularMaze.outer_wall?(maze, {1, 3}, {0, 3}) == true
assert RectangularMaze.outer_wall?(maze, {1, 1}, {2, 1}) == false
assert RectangularMaze.outer_wall?(maze, {1, 2}, {2, 2}) == false
assert RectangularMaze.outer_wall?(maze, {1, 3}, {2, 3}) == false
assert RectangularMaze.outer_wall?(maze, {2, 1}, {3, 1}) == true
assert RectangularMaze.outer_wall?(maze, {2, 2}, {3, 2}) == true
assert RectangularMaze.outer_wall?(maze, {2, 3}, {3, 3}) == true
assert RectangularMaze.outer_wall?(maze, {2, 1}, {1, 1}) == false
assert RectangularMaze.outer_wall?(maze, {2, 2}, {1, 2}) == false
assert RectangularMaze.outer_wall?(maze, {2, 3}, {1, 3}) == false
assert RectangularMaze.outer_wall?(maze, {1, 1}, {1, 2}) == false
assert RectangularMaze.outer_wall?(maze, {2, 1}, {2, 2}) == false
assert RectangularMaze.outer_wall?(maze, {1, 1}, {1, 0}) == true
assert RectangularMaze.outer_wall?(maze, {2, 1}, {2, 0}) == true
assert RectangularMaze.outer_wall?(maze, {1, 3}, {1, 2}) == false
assert RectangularMaze.outer_wall?(maze, {2, 3}, {2, 2}) == false
assert RectangularMaze.outer_wall?(maze, {1, 3}, {1, 4}) == true
assert RectangularMaze.outer_wall?(maze, {2, 3}, {2, 4}) == true
end
end
end
| 33.865546
| 103
| 0.437469
|
9e24ebf92a3a23dc959a5b0d8a7c274135594da0
| 1,937
|
ex
|
Elixir
|
lib/smart_chain/units.ex
|
esprezzo/elixir-smart-chain
|
ed031713edd46c610472406f541196a67e07cb59
|
[
"MIT"
] | null | null | null |
lib/smart_chain/units.ex
|
esprezzo/elixir-smart-chain
|
ed031713edd46c610472406f541196a67e07cb59
|
[
"MIT"
] | null | null | null |
lib/smart_chain/units.ex
|
esprezzo/elixir-smart-chain
|
ed031713edd46c610472406f541196a67e07cb59
|
[
"MIT"
] | null | null | null |
defmodule SmartChain.Units do
@moduledoc """
Module for @Type that represents SmartChain.Unit struct with various ether denominations
"""
alias SmartChain.Units
@typedoc """
Type that represents SmartChain.Unit struct with various ether denominations
"""
@type t :: %Units{
"wei": integer,
"kwei": integer,
"Kwei": integer,
"babbage": integer,
"femtoether": integer,
"mwei": integer,
"Mwei": integer,
"lovelace": integer,
"picoether": integer,
"gwei": integer,
"Gwei": integer,
"shannon": integer,
"nanoether": integer,
"nano": integer,
"szabo": integer,
"microether": integer,
"micro": integer,
"finney": integer,
"milliether": integer,
"milli": integer,
"ether": integer,
"eth": integer,
"kether": integer,
"grand": integer,
"mether": integer,
"gether": integer,
"tether": integer
}
defstruct [
"wei": 1,
"kwei": 1000,
"Kwei": 1000,
"babbage": 1000,
"femtoether": 1000,
"mwei": 1000000,
"Mwei": 1000000,
"lovelace": 1000000,
"picoether": 1000000,
"gwei": 1000000000,
"Gwei": 1000000000,
"shannon": 1000000000,
"nanoether": 1000000000,
"nano": 1000000000,
"szabo": 1000000000000,
"microether": 1000000000000,
"micro": 1000000000000,
"finney": 1000000000000000,
"milliether": 1000000000000000,
"milli": 1000000000000000,
"ether": 1000000000000000000,
"eth": 1000000000000000000,
"kether": 1000000000000000000000,
"grand": 1000000000000000000000,
"mether": 1000000000000000000000000,
"gether": 1000000000000000000000000000,
"tether": 100000000000000000000000000000,
]
end
| 26.902778
| 90
| 0.55808
|
9e25272787671cb6bc0e6051b708f70a74f72caf
| 1,837
|
ex
|
Elixir
|
clients/machine_learning/lib/google_api/machine_learning/v1/deserializer.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | null | null | null |
clients/machine_learning/lib/google_api/machine_learning/v1/deserializer.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | null | null | null |
clients/machine_learning/lib/google_api/machine_learning/v1/deserializer.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | 1
|
2018-07-28T20:50:50.000Z
|
2018-07-28T20:50:50.000Z
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.MachineLearning.V1.Deserializer do
@moduledoc """
Helper functions for deserializing responses into models
"""
@doc """
Update the provided model with a deserialization of a nested value
"""
@spec deserialize(struct(), :atom, :atom, struct(), keyword()) :: struct()
def deserialize(model, field, :list, mod, options) do
model
|> Map.update!(field, &(Poison.Decode.decode(&1, Keyword.merge(options, [as: [struct(mod)]]))))
end
def deserialize(model, field, :struct, mod, options) do
model
|> Map.update!(field, &(Poison.Decode.decode(&1, Keyword.merge(options, [as: struct(mod)]))))
end
def deserialize(model, field, :map, mod, options) do
model
|> Map.update!(field, &(Map.new(&1, fn {key, val} -> {key, Poison.Decode.decode(val, Keyword.merge(options, [as: struct(mod)]))} end)))
end
def deserialize(model, field, :date, _, _options) do
case DateTime.from_iso8601(Map.get(model, field)) do
{:ok, datetime} ->
Map.put(model, field, datetime)
_ ->
model
end
end
end
| 36.74
| 139
| 0.697877
|
9e25574c88e74091178eebdb507e685e644404f7
| 1,879
|
ex
|
Elixir
|
apps/game/lib/game.ex
|
artvinn/phoenix-tictactoe
|
14f9a7dcc18449d8cba755c5625b665b7cfccfff
|
[
"MIT"
] | 1
|
2019-03-11T09:12:16.000Z
|
2019-03-11T09:12:16.000Z
|
apps/game/lib/game.ex
|
artvinn/phoenix-tictactoe
|
14f9a7dcc18449d8cba755c5625b665b7cfccfff
|
[
"MIT"
] | null | null | null |
apps/game/lib/game.ex
|
artvinn/phoenix-tictactoe
|
14f9a7dcc18449d8cba755c5625b665b7cfccfff
|
[
"MIT"
] | 1
|
2020-09-26T00:37:30.000Z
|
2020-09-26T00:37:30.000Z
|
defmodule Game do
alias Game.{Board, Rules}
defstruct [:board, :turn, :winner, players: [], over: false]
@players [:x, :o]
def new(turn \\ :x) do
%Game{board: Board.new(), turn: turn}
end
def join(%Game{players: players}) when length(players) == 2 do
{:error, "No more players allowed"}
end
def join(%Game{players: players} = game) do
player = next_player(players)
{:ok, %{game | players: [player | players]}, player}
end
def leave(%Game{players: players} = game, player) when player in @players do
{:ok, %{game | players: List.delete(players, player)}}
end
def make_move(%Game{over: true}, _, _), do: {:error, "Game over"}
def make_move(%Game{turn: turn}, player, _) when turn !== player, do: {:error, "Not your turn"}
def make_move(%Game{board: board} = game, player, position) do
case Board.put(board, position, player) do
{:ok, board} ->
game_updated =
game
|> update_board(board)
|> update_turn()
|> check_game_over()
{:ok, game_updated}
{:error, reason} ->
{:error, reason}
end
end
def opposite_player(:x), do: :o
def opposite_player(:o), do: :x
defp update_board(game, board), do: %{game | board: board}
defp update_turn(game), do: %{game | turn: opposite_player(game.turn)}
defp update_over(game), do: %{game | over: true}
defp update_winner(game, winner), do: %{game | winner: winner}
defp next_player([]), do: Enum.at(@players, 0)
defp next_player([player]), do: opposite_player(player)
defp check_game_over(%{board: board} = game) do
case Rules.get_winner(board) do
nil ->
if Board.full?(board) do
game |> update_winner(:draw) |> update_over()
else
game
end
winner ->
game |> update_winner(winner) |> update_over()
end
end
end
| 27.231884
| 97
| 0.605641
|
9e25aabaf9715f2aca416eccfa5eb2749d73738d
| 1,586
|
exs
|
Elixir
|
test/phoenix/socket/v1_json_serializer_test.exs
|
faheempatel/phoenix
|
a83318f2a2284b7ab29b0b86cdd9d2e1f4d0a7c9
|
[
"MIT"
] | 18,092
|
2015-01-01T01:51:04.000Z
|
2022-03-31T19:37:14.000Z
|
test/phoenix/socket/v1_json_serializer_test.exs
|
faheempatel/phoenix
|
a83318f2a2284b7ab29b0b86cdd9d2e1f4d0a7c9
|
[
"MIT"
] | 3,905
|
2015-01-01T00:22:47.000Z
|
2022-03-31T17:06:21.000Z
|
test/phoenix/socket/v1_json_serializer_test.exs
|
faheempatel/phoenix
|
a83318f2a2284b7ab29b0b86cdd9d2e1f4d0a7c9
|
[
"MIT"
] | 3,205
|
2015-01-03T10:58:22.000Z
|
2022-03-30T14:55:57.000Z
|
defmodule Phoenix.Socket.V1.JSONSerializerTest do
use ExUnit.Case, async: true
alias Phoenix.Socket.{Broadcast, Message, Reply, V1}
# v1 responses must not contain join_ref
@serializer V1.JSONSerializer
@v1_msg_json "{\"event\":\"e\",\"payload\":\"m\",\"ref\":null,\"topic\":\"t\"}"
@v1_reply_json "{\"event\":\"phx_reply\",\"payload\":{\"response\":null,\"status\":null},\"ref\":\"null\",\"topic\":\"t\"}"
@v1_fastlane_json "{\"event\":\"e\",\"payload\":\"m\",\"ref\":null,\"topic\":\"t\"}"
def encode!(serializer, msg) do
{:socket_push, :text, encoded} = serializer.encode!(msg)
IO.iodata_to_binary(encoded)
end
def decode!(serializer, msg, opts), do: serializer.decode!(msg, opts)
def fastlane!(serializer, msg) do
{:socket_push, :text, encoded} = serializer.fastlane!(msg)
IO.iodata_to_binary(encoded)
end
test "encode!/1 encodes `Phoenix.Socket.Message` as JSON" do
msg = %Message{topic: "t", event: "e", payload: "m"}
assert encode!(@serializer, msg) == @v1_msg_json
end
test "encode!/1 encodes `Phoenix.Socket.Reply` as JSON" do
msg = %Reply{topic: "t", ref: "null"}
assert encode!(@serializer, msg) == @v1_reply_json
end
test "decode!/2 decodes `Phoenix.Socket.Message` from JSON" do
assert %Message{topic: "t", event: "e", payload: "m"} ==
decode!(@serializer, @v1_msg_json, opcode: :text)
end
test "fastlane!/1 encodes a broadcast into a message as JSON" do
msg = %Broadcast{topic: "t", event: "e", payload: "m"}
assert fastlane!(@serializer, msg) == @v1_fastlane_json
end
end
| 36.045455
| 125
| 0.649433
|
9e25bdb457513028def26468ae0936e1ecf703f2
| 648
|
ex
|
Elixir
|
lib/jabbax/plug.ex
|
surgeventures/jabbax
|
69c90ef620c5a44705015601e5f5a4dd739bd593
|
[
"MIT"
] | 14
|
2017-03-20T12:43:20.000Z
|
2021-11-24T13:19:16.000Z
|
lib/jabbax/plug.ex
|
surgeventures/jabbax
|
69c90ef620c5a44705015601e5f5a4dd739bd593
|
[
"MIT"
] | 3
|
2017-04-27T15:00:33.000Z
|
2022-02-14T11:45:53.000Z
|
lib/jabbax/plug.ex
|
surgeventures/jabbax
|
69c90ef620c5a44705015601e5f5a4dd739bd593
|
[
"MIT"
] | 1
|
2019-09-09T13:57:58.000Z
|
2019-09-09T13:57:58.000Z
|
if Code.ensure_loaded?(Plug) do
defmodule Jabbax.Plug do
@moduledoc false
def init(opts) do
[
assign: Keyword.get(opts || [], :assign, :doc)
]
end
def call(conn = %{body_params: %{}}, opts) do
case Plug.Conn.get_req_header(conn, "content-type") do
["application/vnd.api+json"] ->
Plug.Conn.assign(conn, opts[:assign], Jabbax.Deserializer.call(conn.body_params))
_ ->
conn
end
rescue
# credo:disable-for-next-line Credo.Check.Warning.RaiseInsideRescue
e in Jabbax.StructureError -> raise Plug.Parsers.ParseError, exception: e
end
end
end
| 25.92
| 91
| 0.618827
|
9e25c569e0fcaadcfca8ee4e76a76ed53b0a2d4b
| 635
|
exs
|
Elixir
|
test/survey_api_web/views/error_view_test.exs
|
kamidev/survey_ap
|
483314842cf2e8279e1224e83b57d61a5da143ad
|
[
"MIT"
] | 6
|
2020-02-04T16:18:30.000Z
|
2020-10-31T06:00:03.000Z
|
test/survey_api_web/views/error_view_test.exs
|
kamidev/survey_ap
|
483314842cf2e8279e1224e83b57d61a5da143ad
|
[
"MIT"
] | 114
|
2019-11-14T03:48:17.000Z
|
2022-03-17T12:38:14.000Z
|
test/survey_api_web/views/error_view_test.exs
|
kamidev/survey_ap
|
483314842cf2e8279e1224e83b57d61a5da143ad
|
[
"MIT"
] | 1
|
2021-07-08T01:48:56.000Z
|
2021-07-08T01:48:56.000Z
|
defmodule SurveyAPIWeb.ErrorViewTest do
use SurveyAPIWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.json" do
assert render(SurveyAPIWeb.ErrorView, "404.json", []) ==
%{errors: %{detail: "Page not found"}}
end
test "render 500.json" do
assert render(SurveyAPIWeb.ErrorView, "500.json", []) ==
%{errors: %{detail: "Internal server error"}}
end
test "render any other" do
assert render(SurveyAPIWeb.ErrorView, "505.json", []) ==
%{errors: %{detail: "Internal server error"}}
end
end
| 28.863636
| 66
| 0.653543
|
9e25dcfc06d187febdd090d135dc097810bddcef
| 4,389
|
exs
|
Elixir
|
test/griffin/model/validations_test.exs
|
craigspaeth/griffin
|
44a59e08b119b4725369e90dc1398b9ec51173b7
|
[
"MIT"
] | null | null | null |
test/griffin/model/validations_test.exs
|
craigspaeth/griffin
|
44a59e08b119b4725369e90dc1398b9ec51173b7
|
[
"MIT"
] | 2
|
2017-12-11T06:40:30.000Z
|
2018-03-06T06:04:05.000Z
|
test/griffin/model/validations_test.exs
|
craigspaeth/griffin
|
44a59e08b119b4725369e90dc1398b9ec51173b7
|
[
"MIT"
] | null | null | null |
defmodule Griffin.Model.ValidationsTest do
@moduledoc """
Tests for Griffin validations functionality
"""
use ExUnit.Case
import Griffin.Model.Validations
test "validates a DSL schema" do
schema = [
name: [:string, :required]
]
harry = %{name: "Harry Potter"}
voldemort = %{name: nil}
assert valid?(harry, schema)
assert not valid?(voldemort, schema)
end
test "validates a DSL schema using imported functions" do
schema = [
name: [:string, &required/2]
]
harry = %{name: "Harry Potter"}
voldemort = %{name: nil}
assert valid?(harry, schema)
assert not valid?(voldemort, schema)
end
test "validates key val pairs" do
schema = [
name: [:string, min: 10]
]
bob = %{name: "Bob"}
assert not valid?(bob, schema)
end
test "validates multiple key val pairs" do
schema = [
name: [:string, min: 4, max: 6]
]
assert not valid?(%{name: "Bob"}, schema)
assert not valid?(%{name: "Miranda"}, schema)
assert valid?(%{name: "Sarah"}, schema)
end
test "validates custom validation functions" do
starts_with_letter = fn _, val, letter ->
String.first(val) == letter
end
schema = [
name: [:string, [starts_with_letter, "A"]]
]
assert not valid?(%{name: "Bob"}, schema)
assert valid?(%{name: "Anne"}, schema)
end
test "validates custom validation functions without args" do
starts_with_letter_a = fn _, val ->
String.first(val) == "A"
end
schema = [
name: [:string, starts_with_letter_a]
]
assert not valid?(%{name: "Bob"}, schema)
assert valid?(%{name: "Anne"}, schema)
end
test "validates custom validation functions based on types" do
starts_with_letter_a = fn type, val when type == :string ->
String.first(val) == "A"
end
schema = [name: [:string, starts_with_letter_a]]
assert not valid?(%{name: "Bob"}, schema)
assert valid?(%{name: "Anne"}, schema)
schema = [name: [:int, starts_with_letter_a]]
assert not valid?(%{name: "A Num"}, schema)
end
test "validates nested maps" do
schema = [
location: [
:map,
of: [
city: [:string, :required, min: 3]
]
]
]
cincinnati = %{
location: %{
city: "Cincinnati"
}
}
new_york = %{
location: %{
city: "NY"
}
}
assert valid?(cincinnati, schema)
assert not valid?(new_york, schema)
end
test "validates lists" do
schema = [
children: [:list, of: [:string, :required, min: 4]]
]
parent = %{children: ["Bobby"]}
expecting_parent = %{children: ["N/A"]}
assert valid?(parent, schema)
assert not valid?(expecting_parent, schema)
end
test "validates either types" do
schema = [
id: [
:either,
of: [
[:string, min: 10],
[:int, max: 100]
]
]
]
assert valid?(%{id: "abcdefghijkl"}, schema)
assert valid?(%{id: 99}, schema)
assert not valid?(%{id: 101}, schema)
end
test "validates list either types" do
schema = [
children: [
:list,
max: 3,
of: [
:either,
of: [
[:string, equals: "Bobby"],
[:string, equals: "Sally"]
]
]
]
]
assert valid?(%{children: ["Bobby", "Sally"]}, schema)
assert not valid?(%{children: ["Bobby", "Sally", "Timmy"]}, schema)
assert valid?(%{children: ["Bobby", "Sally", "Bobby"]}, schema)
assert not valid?(%{children: ["Bobby", "Sally", "Bobby", "Sally"]}, schema)
end
test "validates rules that only apply to CRUD operations" do
schema = [
name: [:string, max: 5, on_create_read: [:required], on_create: [min: 10]]
]
assert not valid?(%{name: "Bob"}, schema, :create)
assert valid?(%{name: "Bob"}, schema, :read)
end
test "validates emails" do
schema = [email: [:string, :email]]
assert valid?(%{email: "foo@bar.com"}, schema)
assert not valid?(%{email: "foo@bar"}, schema)
end
test "returns errors" do
schema = [email: [:string, :email]]
assert errors(%{email: "foo@bar"}, schema) == [
{
:error,
"email with value \"foo@bar\" is invalid " <> "according to the rule :email"
}
]
end
end
| 23.345745
| 91
| 0.558442
|
9e25e403741750f755a13d8892067f93a634f2eb
| 865
|
exs
|
Elixir
|
apps/avro/mix.exs
|
jdenen/hindsight
|
ef69b4c1a74c94729dd838a9a0849a48c9b6e04c
|
[
"Apache-2.0"
] | 12
|
2020-01-27T19:43:02.000Z
|
2021-07-28T19:46:29.000Z
|
apps/avro/mix.exs
|
jdenen/hindsight
|
ef69b4c1a74c94729dd838a9a0849a48c9b6e04c
|
[
"Apache-2.0"
] | 81
|
2020-01-28T18:07:23.000Z
|
2021-11-22T02:12:13.000Z
|
apps/avro/mix.exs
|
jdenen/hindsight
|
ef69b4c1a74c94729dd838a9a0849a48c9b6e04c
|
[
"Apache-2.0"
] | 10
|
2020-02-13T21:24:09.000Z
|
2020-05-21T18:39:35.000Z
|
defmodule Avro.MixProject do
use Mix.Project
def project do
[
app: :avro,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:erlavro, "~> 2.8"},
{:ok, in_umbrella: true},
{:definition_dictionary, in_umbrella: true},
{:temp, "~> 0.4.7"},
{:checkov, "~> 1.0", only: [:dev, :test]},
{:credo, "~> 1.3", only: [:dev]},
{:placebo, "~> 2.0.0-rc.2", only: [:dev, :test]}
]
end
end
| 22.763158
| 59
| 0.519075
|
9e2606b596e23e9da0cab45e451bbb17bb8103dd
| 6,122
|
ex
|
Elixir
|
lib/sentry.ex
|
mbta/sentry-elixir
|
ce417a69efec0eeb968a3c8fdcd61d3115c69875
|
[
"MIT"
] | null | null | null |
lib/sentry.ex
|
mbta/sentry-elixir
|
ce417a69efec0eeb968a3c8fdcd61d3115c69875
|
[
"MIT"
] | null | null | null |
lib/sentry.ex
|
mbta/sentry-elixir
|
ce417a69efec0eeb968a3c8fdcd61d3115c69875
|
[
"MIT"
] | null | null | null |
defmodule Sentry do
use Application
import Supervisor.Spec
alias Sentry.{Event, Config}
require Logger
@moduledoc """
Provides the basic functionality to submit a `Sentry.Event` to the Sentry Service.
## Configuration
Add the following to your production config
config :sentry, dsn: "https://public:secret@app.getsentry.com/1",
included_environments: [:prod],
environment_name: :prod,
tags: %{
env: "production"
}
The `environment_name` and `included_environments` work together to determine
if and when Sentry should record exceptions. The `environment_name` is the
name of the current environment. In the example above, we have explicitly set
the environment to `:prod` which works well if you are inside an environment
specific configuration `config/prod.exs`.
An alternative is to use `Mix.env` in your general configuration file:
config :sentry, dsn: "https://public:secret@app.getsentry.com/1",
included_environments: [:prod],
environment_name: Mix.env
This will set the environment name to whatever the current Mix environment
atom is, but it will only send events if the current environment is `:prod`,
since that is the only entry in the `included_environments` key.
You can even rely on more custom determinations of the environment name. It's
not uncommmon for most applications to have a "staging" environment. In order
to handle this without adding an additional Mix environment, you can set an
environment variable that determines the release level.
config :sentry, dsn: "https://public:secret@app.getsentry.com/1",
included_environments: ~w(production staging),
environment_name: System.get_env("RELEASE_LEVEL") || "development"
In this example, we are getting the environment name from the `RELEASE_LEVEL`
environment variable. If that variable does not exist, we default to `"development"`.
Now, on our servers, we can set the environment variable appropriately. On
our local development machines, exceptions will never be sent, because the
default value is not in the list of `included_environments`.
## Filtering Exceptions
If you would like to prevent certain exceptions, the `:filter` configuration option
allows you to implement the `Sentry.EventFilter` behaviour. The first argument is the
exception to be sent, and the second is the source of the event. `Sentry.Plug`
will have a source of `:plug`, and `Sentry.Logger` will have a source of `:logger`.
If an exception does not come from either of those sources, the source will be nil
unless the `:event_source` option is passed to `Sentry.capture_exception/2`
A configuration like below will prevent sending `Phoenix.Router.NoRouteError` from `Sentry.Plug`, but
allows other exceptions to be sent.
# sentry_event_filter.ex
defmodule MyApp.SentryEventFilter do
@behaviour Sentry.EventFilter
def exclude_exception?(%Elixir.Phoenix.Router.NoRouteError{}, :plug), do: true
def exclude_exception?(_exception, _source), do: false
end
# config.exs
config :sentry, filter: MyApp.SentryEventFilter,
included_environments: ~w(production staging),
environment_name: System.get_env("RELEASE_LEVEL") || "development"
## Capturing Exceptions
Simply calling `capture_exception/2` will send the event. By default, the event is sent asynchronously and the result can be awaited upon. The `:result` option can be used to change this behavior. See `Sentry.Client.send_event/2` for more information.
{:ok, task} = Sentry.capture_exception(my_exception)
{:ok, event_id} = Task.await(task)
{:ok, another_event_id} = Sentry.capture_exception(other_exception, [event_source: :my_source, result: :sync])
### Options
* `:event_source` - The source passed as the first argument to `Sentry.EventFilter.exclude_exception?/2`
## Configuring The `Logger` Backend
See `Sentry.Logger`
"""
@type task :: {:ok, Task.t} | :error | :excluded | :ignored
def start(_type, _opts) do
children = [
supervisor(Task.Supervisor, [[name: Sentry.TaskSupervisor]]),
:hackney_pool.child_spec(Sentry.Client.hackney_pool_name(), [timeout: Config.hackney_timeout(), max_connections: Config.max_hackney_connections()])
]
opts = [strategy: :one_for_one, name: Sentry.Supervisor]
Supervisor.start_link(children, opts)
end
@doc """
Parses and submits an exception to Sentry if current environment is in included_environments.
`opts` argument is passed as the second argument to `Sentry.send_event/2`.
"""
@spec capture_exception(Exception.t, Keyword.t) :: task
def capture_exception(exception, opts \\ []) do
filter_module = Config.filter()
{source, opts} = Keyword.pop(opts, :event_source)
if filter_module.exclude_exception?(exception, source) do
:excluded
else
exception
|> Event.transform_exception(opts)
|> send_event(opts)
end
end
@doc """
Reports a message to Sentry.
`opts` argument is passed as the second argument to `Sentry.send_event/2`.
"""
@spec capture_message(String.t, Keyword.t) :: task
def capture_message(message, opts \\ []) do
opts
|> Keyword.put(:message, message)
|> Event.create_event()
|> send_event(opts)
end
@doc """
Sends a `Sentry.Event`
`opts` argument is passed as the second argument to `send_event/2` of the configured `Sentry.HTTPClient`. See `Sentry.Client.send_event/2` for more information.
"""
@spec send_event(Event.t, Keyword.t) :: task
def send_event(event, opts \\ [])
def send_event(%Event{message: nil, exception: nil}, _opts) do
Logger.warn("Sentry: unable to parse exception")
:ignored
end
def send_event(%Event{} = event, opts) do
included_environments = Config.included_environments()
environment_name = Config.environment_name()
client = Config.client()
if environment_name in included_environments do
client.send_event(event, opts)
else
:ignored
end
end
end
| 38.024845
| 256
| 0.717903
|
9e2658f4019c0e83baf6d746dd068a43336dad92
| 1,386
|
ex
|
Elixir
|
test/support/data_case.ex
|
MattIII/CastBug
|
aad9eabce5af4a80dd0f4383683746a1f518e377
|
[
"MIT"
] | 1
|
2019-03-14T03:48:29.000Z
|
2019-03-14T03:48:29.000Z
|
test/support/data_case.ex
|
MattIII/CastBug
|
aad9eabce5af4a80dd0f4383683746a1f518e377
|
[
"MIT"
] | null | null | null |
test/support/data_case.ex
|
MattIII/CastBug
|
aad9eabce5af4a80dd0f4383683746a1f518e377
|
[
"MIT"
] | null | null | null |
defmodule CastBug.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias CastBug.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import CastBug.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(CastBug.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(CastBug.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Enum.reduce(opts, message, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 25.666667
| 77
| 0.680375
|
9e267958cecce5ed341a00a98e6c04124ad61570
| 85
|
ex
|
Elixir
|
lib/team_vacation_tool/repo.ex
|
pchmiele/team-vacation-tool
|
862243a94e6b95639629f4ed9653bc81b6522add
|
[
"Apache-2.0"
] | null | null | null |
lib/team_vacation_tool/repo.ex
|
pchmiele/team-vacation-tool
|
862243a94e6b95639629f4ed9653bc81b6522add
|
[
"Apache-2.0"
] | null | null | null |
lib/team_vacation_tool/repo.ex
|
pchmiele/team-vacation-tool
|
862243a94e6b95639629f4ed9653bc81b6522add
|
[
"Apache-2.0"
] | null | null | null |
defmodule TeamVacationTool.Repo do
use Ecto.Repo, otp_app: :team_vacation_tool
end
| 21.25
| 45
| 0.823529
|
9e26a0df75bc8837a9110a4df5a15415b30301a0
| 420
|
exs
|
Elixir
|
test/day05_test.exs
|
s22su/advent-of-code-2021
|
1cb0b9f08079641e32890e00b0a9b41c780724b4
|
[
"MIT"
] | null | null | null |
test/day05_test.exs
|
s22su/advent-of-code-2021
|
1cb0b9f08079641e32890e00b0a9b41c780724b4
|
[
"MIT"
] | null | null | null |
test/day05_test.exs
|
s22su/advent-of-code-2021
|
1cb0b9f08079641e32890e00b0a9b41c780724b4
|
[
"MIT"
] | null | null | null |
defmodule AdventOfCode.Day05Test do
use ExUnit.Case, async: true
describe "part1/1" do
test "solves the puzzle" do
input = File.read!("test/input_data/d05.txt")
assert AdventOfCode.Day05.part1(input) == 5
end
end
describe "part2/1" do
test "solves the puzzle" do
input = File.read!("test/input_data/d05.txt")
assert AdventOfCode.Day05.part2(input) == 12
end
end
end
| 21
| 51
| 0.661905
|
9e26e3e6b4cd80de39575aa70e49566f67d0c056
| 1,799
|
ex
|
Elixir
|
clients/service_directory/lib/google_api/service_directory/v1beta1/model/set_iam_policy_request.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/service_directory/lib/google_api/service_directory/v1beta1/model/set_iam_policy_request.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/service_directory/lib/google_api/service_directory/v1beta1/model/set_iam_policy_request.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceDirectory.V1beta1.Model.SetIamPolicyRequest do
@moduledoc """
Request message for `SetIamPolicy` method.
## Attributes
* `policy` (*type:* `GoogleApi.ServiceDirectory.V1beta1.Model.Policy.t`, *default:* `nil`) - REQUIRED: The complete policy to be applied to the `resource`. The size of the policy is limited to a few 10s of KB. An empty policy is a valid policy but certain Cloud Platform services (such as Projects) might reject them.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:policy => GoogleApi.ServiceDirectory.V1beta1.Model.Policy.t() | nil
}
field(:policy, as: GoogleApi.ServiceDirectory.V1beta1.Model.Policy)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceDirectory.V1beta1.Model.SetIamPolicyRequest do
def decode(value, options) do
GoogleApi.ServiceDirectory.V1beta1.Model.SetIamPolicyRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceDirectory.V1beta1.Model.SetIamPolicyRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.276596
| 321
| 0.759311
|
9e26e8a8993c1cc11313e43ab51f654856863285
| 315
|
ex
|
Elixir
|
web/views/tp_stat_view.ex
|
zombalo/cgrates_web_jsonapi
|
47845be4311839fe180cc9f2c7c6795649da4430
|
[
"MIT"
] | null | null | null |
web/views/tp_stat_view.ex
|
zombalo/cgrates_web_jsonapi
|
47845be4311839fe180cc9f2c7c6795649da4430
|
[
"MIT"
] | null | null | null |
web/views/tp_stat_view.ex
|
zombalo/cgrates_web_jsonapi
|
47845be4311839fe180cc9f2c7c6795649da4430
|
[
"MIT"
] | null | null | null |
defmodule CgratesWebJsonapi.TpStatView do
use CgratesWebJsonapi.Web, :view
use JaSerializer.PhoenixView
attributes ~w[id tpid tenant custom_id filter_ids queue_length weight activation_interval
ttl metrics parameters blocker stored min_items threshold_ids]a
def id(stat), do: stat.pk
end
| 31.5
| 91
| 0.780952
|
9e275a756956a0d86ad9ddcb0200929ab0859c96
| 953
|
exs
|
Elixir
|
test/file_store/adapters/disk_test.exs
|
warmwaffles/file_store
|
6b6f4cf4ef036b616915ce0dffd5de05e0b021de
|
[
"MIT"
] | 17
|
2019-10-11T05:50:28.000Z
|
2022-01-25T23:28:02.000Z
|
test/file_store/adapters/disk_test.exs
|
warmwaffles/file_store
|
6b6f4cf4ef036b616915ce0dffd5de05e0b021de
|
[
"MIT"
] | 8
|
2019-10-25T04:19:42.000Z
|
2022-01-27T02:32:47.000Z
|
test/file_store/adapters/disk_test.exs
|
warmwaffles/file_store
|
6b6f4cf4ef036b616915ce0dffd5de05e0b021de
|
[
"MIT"
] | 3
|
2019-10-25T02:30:22.000Z
|
2022-01-24T21:24:08.000Z
|
defmodule FileStore.Adapters.DiskTest do
use FileStore.AdapterCase
alias FileStore.Adapters.Disk
@url "http://localhost:4000/foo"
setup %{tmp: tmp} do
{:ok, store: Disk.new(storage_path: tmp, base_url: "http://localhost:4000")}
end
test "get_public_url/3 with query params", %{store: store} do
opts = [content_type: "text/plain", disposition: "attachment"]
url = FileStore.get_public_url(store, "foo", opts)
assert omit_query(url) == @url
assert get_query(url, "content_type") == "text/plain"
assert get_query(url, "disposition") == "attachment"
end
test "get_signed_url/3 with query params", %{store: store} do
opts = [content_type: "text/plain", disposition: "attachment"]
assert {:ok, url} = FileStore.get_signed_url(store, "foo", opts)
assert omit_query(url) == @url
assert get_query(url, "content_type") == "text/plain"
assert get_query(url, "disposition") == "attachment"
end
end
| 34.035714
| 80
| 0.687303
|
9e27910ba7c2c82bf482272d2b437df560cb9c9c
| 1,689
|
ex
|
Elixir
|
phoenix158/my-phoenix-json-api/lib/my_app_web/controllers/user_controller.ex
|
salbador/oldphp7_vs_laravel7_vs_phoenix1.5.8_vs_phalcon4
|
294b8668dc4940c07d6dde198f02b38100a1dc00
|
[
"MIT"
] | null | null | null |
phoenix158/my-phoenix-json-api/lib/my_app_web/controllers/user_controller.ex
|
salbador/oldphp7_vs_laravel7_vs_phoenix1.5.8_vs_phalcon4
|
294b8668dc4940c07d6dde198f02b38100a1dc00
|
[
"MIT"
] | null | null | null |
phoenix158/my-phoenix-json-api/lib/my_app_web/controllers/user_controller.ex
|
salbador/oldphp7_vs_laravel7_vs_phoenix1.5.8_vs_phalcon4
|
294b8668dc4940c07d6dde198f02b38100a1dc00
|
[
"MIT"
] | null | null | null |
defmodule MyAppWeb.UserController do
use MyAppWeb, :controller
alias MyApp.Account
alias MyApp.Account.User
action_fallback MyAppWeb.FallbackController
def index(conn, _params) do
users = Account.list_users()
render(conn, "index.json", users: users)
end
def create(conn, %{"user" => user_params}) do
with {:ok, %User{} = user} <- Account.create_user(user_params) do
conn
|> put_status(:created)
|> put_resp_header("location", Routes.user_path(conn, :show, user))
|> render("show.json", user: user)
end
end
def show(conn, %{"id" => id}) do
user = Account.get_user!(id)
render(conn, "show.json", user: user)
end
def update(conn, %{"id" => id, "user" => user_params}) do
user = Account.get_user!(id)
with {:ok, %User{} = user} <- Account.update_user(user, user_params) do
render(conn, "show.json", user: user)
end
end
def delete(conn, %{"id" => id}) do
user = Account.get_user!(id)
with {:ok, %User{}} <- Account.delete_user(user) do
send_resp(conn, :no_content, "")
end
end
def sign_in(conn, %{"email" => email, "password" => password}) do
case MyApp.Account.authenticate_user(email, password) do
{:ok, user} ->
conn
|> put_session(:current_user_id, user.id)
|> configure_session(renew: true)
|> put_status(:ok)
|> put_view(MyAppWeb.UserView)
|> render("sign_in.json", user: user)
{:error, message} ->
conn
|> delete_session(:current_user_id)
|> put_status(:unauthorized)
|> put_view(MyAppWeb.ErrorView)
|> render("401.json", message: message)
end
end
end
| 26.809524
| 75
| 0.612789
|
9e27c59b981527c6704f634a2fb22c72cb77438f
| 3,148
|
exs
|
Elixir
|
mix.exs
|
basbz/appsignal-elixir
|
7c9869fd1f31032f3479baded8aff0fd16318dd3
|
[
"MIT"
] | 1
|
2021-04-21T05:25:44.000Z
|
2021-04-21T05:25:44.000Z
|
mix.exs
|
basbz/appsignal-elixir
|
7c9869fd1f31032f3479baded8aff0fd16318dd3
|
[
"MIT"
] | null | null | null |
mix.exs
|
basbz/appsignal-elixir
|
7c9869fd1f31032f3479baded8aff0fd16318dd3
|
[
"MIT"
] | null | null | null |
defmodule Mix.Tasks.Compile.Appsignal do
use Mix.Task
def run(_args) do
{_, _} = Code.eval_file("mix_helpers.exs")
Mix.Appsignal.Helper.install()
{:ok, []}
end
end
defmodule Appsignal.Mixfile do
use Mix.Project
def project do
[
app: :appsignal,
version: "1.12.0-beta.1",
name: "AppSignal",
description: description(),
package: package(),
source_url: "https://github.com/appsignal/appsignal-elixir",
homepage_url: "https://appsignal.com",
test_paths: test_paths(Mix.env()),
elixir: "~> 1.0",
compilers: compilers(Mix.env()),
elixirc_paths: elixirc_paths(Mix.env()),
deps: deps(),
docs: [main: "Appsignal", logo: "logo.png"],
dialyzer: [
plt_add_deps: :transitive,
plt_add_apps: [:mix],
ignore_warnings: "dialyzer.ignore-warnings"
]
]
end
defp description do
"Collects error and performance data from your Elixir applications and sends it to AppSignal"
end
defp package do
%{
files: [
"lib",
"c_src/*.[ch]",
"mix.exs",
"mix_helpers.exs",
"*.md",
"LICENSE",
"Makefile",
"agent.exs",
"priv/cacert.pem"
],
maintainers: ["Jeff Kreeftmeijer", "Tom de Bruijn"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/appsignal/appsignal-elixir"}
}
end
def application do
[mod: {Appsignal, []}, applications: [:logger, :decorator, :hackney]]
end
defp compilers(:test_phoenix), do: [:phoenix] ++ compilers(:prod)
defp compilers(_), do: [:appsignal] ++ Mix.compilers()
defp test_paths(:test_phoenix), do: ["test/appsignal", "test/mix", "test/phoenix"]
defp test_paths(_), do: ["test/appsignal", "test/mix"]
defp elixirc_paths(env) do
case test?(env) do
true -> ["lib", "test/support"]
false -> ["lib"]
end
end
defp test?(:test), do: true
defp test?(:test_phoenix), do: true
defp test?(:test_no_nif), do: true
defp test?(:bench), do: true
defp test?(_), do: false
defp deps do
system_version = System.version()
poison_version =
case Version.compare(system_version, "1.6.0") do
:lt -> ">= 1.3.0 and < 4.0.0"
_ -> ">= 1.3.0"
end
phoenix_version =
case Version.compare(system_version, "1.4.0") do
:lt -> ">= 1.2.0 and < 1.4.0"
_ -> ">= 1.2.0"
end
[
{:benchee, "~> 1.0", only: :bench},
{:hackney, "~> 1.6"},
{:jason, "~> 1.0", optional: true},
{:poison, poison_version, optional: true},
{:decorator, "~> 1.2.3"},
{:plug, ">= 1.1.0", optional: true},
{:phoenix, phoenix_version, optional: true, only: [:prod, :test_phoenix, :dev]},
{:bypass, "~> 0.6.0", only: [:test, :test_phoenix, :test_no_nif]},
{:plug_cowboy, "~> 1.0", only: [:test, :test_phoenix, :test_no_nif]},
{:ex_doc, "~> 0.12", only: :dev, runtime: false},
{:credo, "~> 1.0.0", only: [:test, :dev], runtime: false},
{:dialyxir, "~> 1.0.0-rc4", only: [:dev], runtime: false},
{:telemetry, "~> 0.4"}
]
end
end
| 27.373913
| 97
| 0.563215
|
9e27ea73880cf82572dcd2dd688a2a57818196ff
| 71
|
ex
|
Elixir
|
food_poisoning/web/views/page_view.ex
|
javflores/food-poison
|
6b7503c5830c31044026655846e8009f933fe0f4
|
[
"MIT"
] | null | null | null |
food_poisoning/web/views/page_view.ex
|
javflores/food-poison
|
6b7503c5830c31044026655846e8009f933fe0f4
|
[
"MIT"
] | null | null | null |
food_poisoning/web/views/page_view.ex
|
javflores/food-poison
|
6b7503c5830c31044026655846e8009f933fe0f4
|
[
"MIT"
] | null | null | null |
defmodule FoodPoisoning.PageView do
use FoodPoisoning.Web, :view
end
| 17.75
| 35
| 0.816901
|
9e280635c80b9ef959850845ac1d3e803ff89669
| 2,154
|
exs
|
Elixir
|
config/dev.exs
|
kriips/jalka2021
|
f4d968e20cae116fd4056bff2f937cd036421977
|
[
"MIT"
] | null | null | null |
config/dev.exs
|
kriips/jalka2021
|
f4d968e20cae116fd4056bff2f937cd036421977
|
[
"MIT"
] | null | null | null |
config/dev.exs
|
kriips/jalka2021
|
f4d968e20cae116fd4056bff2f937cd036421977
|
[
"MIT"
] | null | null | null |
use Mix.Config
# Configure your database
config :jalka2021, Jalka2021.Repo,
username: "postgres",
password: "postgres",
database: "jalka2021_dev",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
pool_size: 10
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :jalka2021, Jalka2021Web.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :jalka2021, Jalka2021Web.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/jalka2021_web/(live|views)/.*(ex)$",
~r"lib/jalka2021_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 27.974026
| 68
| 0.693129
|
9e286e11f24eb733ee0d7d5aef4bb8118cb2262e
| 1,409
|
ex
|
Elixir
|
clients/service_networking/lib/google_api/service_networking/v1/model/partial_delete_connection_metadata.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/service_networking/lib/google_api/service_networking/v1/model/partial_delete_connection_metadata.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/service_networking/lib/google_api/service_networking/v1/model/partial_delete_connection_metadata.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceNetworking.V1.Model.PartialDeleteConnectionMetadata do
@moduledoc """
Metadata provided through GetOperation request for the LRO generated by Partial Delete Connection API
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.ServiceNetworking.V1.Model.PartialDeleteConnectionMetadata do
def decode(value, options) do
GoogleApi.ServiceNetworking.V1.Model.PartialDeleteConnectionMetadata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceNetworking.V1.Model.PartialDeleteConnectionMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.547619
| 103
| 0.782115
|
9e289b5903ce8c9436c9acbf0bec6dc16bf0b4a4
| 1,094
|
ex
|
Elixir
|
test/fixtures/version_helper.ex
|
NFIBrokerage/spear
|
611223d2cdba97daa63f2ddd81a08b4f55e72198
|
[
"Apache-2.0"
] | 34
|
2021-04-15T22:16:38.000Z
|
2022-03-11T10:20:11.000Z
|
test/fixtures/version_helper.ex
|
NFIBrokerage/spear
|
611223d2cdba97daa63f2ddd81a08b4f55e72198
|
[
"Apache-2.0"
] | 32
|
2021-04-05T18:07:35.000Z
|
2021-11-04T19:32:58.000Z
|
test/fixtures/version_helper.ex
|
NFIBrokerage/spear
|
611223d2cdba97daa63f2ddd81a08b4f55e72198
|
[
"Apache-2.0"
] | 1
|
2021-10-11T15:53:42.000Z
|
2021-10-11T15:53:42.000Z
|
defmodule VersionHelper do
@moduledoc """
Provides a function that tags tests depending on their compatibilty with the
EventStoreDB version declared in the env
"""
version =
case System.get_env("EVENTSTORE_VERSION") do
nil -> :error
version -> {:ok, version}
end
version =
with {:ok, version} <- version,
[capture] <- Regex.run(~r"\d[\d\.]+", version) do
capture
else
nil ->
# if the regex doesn't match then we're using the CI/nightly image
:nightly
:error ->
raise "Could not parse the eventstore version! Set the EVENTSTORE_VERSION environment variable."
end
@version version
defp version, do: @version
def compatible(pattern) do
cond do
pattern == :nightly and version() == :nightly ->
:version_compatible
not is_binary(pattern) ->
:version_incompatible
version() == :nightly ->
:version_compatible
Version.match?(version(), pattern) ->
:version_compatible
true ->
:version_incompatible
end
end
end
| 22.326531
| 104
| 0.619744
|
9e28aefc9d075779bb43d45e8ab28838d4afffdb
| 709
|
exs
|
Elixir
|
mix.exs
|
bdanklin/cowlib
|
ed073545b95032a37117a5473d0d04b3fca1ed16
|
[
"ISC"
] | null | null | null |
mix.exs
|
bdanklin/cowlib
|
ed073545b95032a37117a5473d0d04b3fca1ed16
|
[
"ISC"
] | null | null | null |
mix.exs
|
bdanklin/cowlib
|
ed073545b95032a37117a5473d0d04b3fca1ed16
|
[
"ISC"
] | null | null | null |
defmodule Cowlib.Mixfile do
use Mix.Project
def project do
[
app: :cowlib,
version: "2.11.1",
description: description(),
deps: deps(),
package: package()
]
end
defp description do
"""
HTTP/1.1, HTTP/2 and Websocket client for Erlang/OTP.
"""
end
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false}
]
end
defp package do
[
name: "remedy_cowlib",
maintainers: ["Benjamin Danklin"],
licenses: ["ISC"],
links: %{
"GitHub" => "https://github.com/bdanklin/cowlib"
},
files: ["erlang.mk", "LICENSE", "Makefile", "README.asciidoc", "rebar.config", "src"]
]
end
end
| 18.657895
| 91
| 0.544429
|
9e28ff53484c92977e932d52967f2536924745ef
| 2,537
|
ex
|
Elixir
|
test/support/test_adapter.ex
|
surgeventures/spandex_phoenix
|
f77bc73b1718deb3d553fd6d171ac23191c09331
|
[
"MIT"
] | null | null | null |
test/support/test_adapter.ex
|
surgeventures/spandex_phoenix
|
f77bc73b1718deb3d553fd6d171ac23191c09331
|
[
"MIT"
] | null | null | null |
test/support/test_adapter.ex
|
surgeventures/spandex_phoenix
|
f77bc73b1718deb3d553fd6d171ac23191c09331
|
[
"MIT"
] | null | null | null |
defmodule TestAdapter do
@moduledoc false
@behaviour Spandex.Adapter
require Logger
alias Spandex.{SpanContext, Tracer}
@max_id 9_223_372_036_854_775_807
@impl Spandex.Adapter
def trace_id, do: :rand.uniform(@max_id)
@impl Spandex.Adapter
def span_id, do: trace_id()
@impl Spandex.Adapter
def now, do: :os.system_time(:nano_seconds)
@impl Spandex.Adapter
def default_sender do
TestSender
end
@doc """
Fetches the test trace & parent IDs from the conn request headers
if they are present.
"""
@impl Spandex.Adapter
@spec distributed_context(conn :: Plug.Conn.t(), Keyword.t()) ::
{:ok, SpanContext.t()}
| {:error, :no_distributed_trace}
def distributed_context(%Plug.Conn{} = conn, _opts) do
trace_id = get_first_header(conn, "x-test-trace-id")
parent_id = get_first_header(conn, "x-test-parent-id")
# We default the priority to 1 so that we capture all traces by default until we implement trace sampling
priority = get_first_header(conn, "x-test-sampling-priority") || 1
if is_nil(trace_id) || is_nil(parent_id) do
{:error, :no_distributed_trace}
else
{:ok, %SpanContext{trace_id: trace_id, parent_id: parent_id, priority: priority}}
end
end
@doc """
Injects test HTTP headers to represent the specified SpanContext
"""
@impl Spandex.Adapter
@spec inject_context(Spandex.headers(), SpanContext.t(), Tracer.opts()) :: Spandex.headers()
def inject_context(headers, %SpanContext{} = span_context, _opts) when is_list(headers) do
span_context
|> tracing_headers()
|> Kernel.++(headers)
end
def inject_context(headers, %SpanContext{} = span_context, _opts) when is_map(headers) do
span_context
|> tracing_headers()
|> Enum.into(%{})
|> Map.merge(headers)
end
# Private Helpers
@spec get_first_header(conn :: Plug.Conn.t(), header_name :: binary) :: binary | nil
defp get_first_header(conn, header_name) do
conn
|> Plug.Conn.get_req_header(header_name)
|> List.first()
|> parse_header()
end
defp parse_header(header) when is_bitstring(header) do
case Integer.parse(header) do
{int, _} -> int
_ -> nil
end
end
defp parse_header(_header), do: nil
defp tracing_headers(%SpanContext{trace_id: trace_id, parent_id: parent_id, priority: priority}) do
[
{"x-test-trace-id", to_string(trace_id)},
{"x-test-parent-id", to_string(parent_id)},
{"x-test-sampling-priority", to_string(priority)}
]
end
end
| 28.188889
| 109
| 0.684667
|
9e290f4b10801ed4ee1c4637fff202dd4334358e
| 126,097
|
ex
|
Elixir
|
clients/game_services/lib/google_api/game_services/v1beta/api/projects.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/game_services/lib/google_api/game_services/v1beta/api/projects.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/game_services/lib/google_api/game_services/v1beta/api/projects.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.GameServices.V1beta.Api.Projects do
@moduledoc """
API calls for all endpoints tagged `Projects`.
"""
alias GoogleApi.GameServices.V1beta.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Gets information about a location.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Resource name for the location.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Location{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_get(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Location.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_get(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Location{}])
end
@doc """
Lists information about the supported locations for this service.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The resource that owns the locations collection, if applicable.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - A filter to narrow down results to a preferred subset. The filtering language accepts strings like "displayName=tokyo", and is documented in more detail in [AIP-160](https://google.aip.dev/160).
* `:includeUnrevealedLocations` (*type:* `boolean()`) - If true, the returned list will include locations which are not yet revealed.
* `:pageSize` (*type:* `integer()`) - The maximum number of results to return. If not set, the service selects a default.
* `:pageToken` (*type:* `String.t`) - A page token received from the `next_page_token` field in the response. Send that page token to receive the subsequent page.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.ListLocationsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.GameServices.V1beta.Model.ListLocationsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_list(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:includeUnrevealedLocations => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta/{+name}/locations", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.ListLocationsResponse{}]
)
end
@doc """
Creates a new game server deployment in a given project and location.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The parent resource name, in the following form: `projects/{project}/locations/{location}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:deploymentId` (*type:* `String.t`) - Required. The ID of the game server delpoyment resource to be created.
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.GameServerDeployment.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_create(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_create(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:deploymentId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1beta/{+parent}/gameServerDeployments", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Operation{}])
end
@doc """
Deletes a single game server deployment.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the game server delpoyment to delete, in the following form: `projects/{project}/locations/{location}/gameServerDeployments/{deployment}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_delete(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_delete(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1beta/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Operation{}])
end
@doc """
Retrieves information about the current state of the game server deployment. Gathers all the Agones fleets and Agones autoscalers, including fleets running an older version of the game server deployment.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the game server delpoyment, in the following form: `projects/{project}/locations/{location}/gameServerDeployments/{deployment}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.FetchDeploymentStateRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.FetchDeploymentStateResponse{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_fetch_deployment_state(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.FetchDeploymentStateResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_fetch_deployment_state(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1beta/{+name}:fetchDeploymentState", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.FetchDeploymentStateResponse{}]
)
end
@doc """
Gets details of a single game server deployment.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the game server delpoyment to retrieve, in the following form: `projects/{project}/locations/{location}/gameServerDeployments/{deployment}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.GameServerDeployment{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_get(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.GameServerDeployment.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_get(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.GameServerDeployment{}]
)
end
@doc """
Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `resource` (*type:* `String.t`) - REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:"options.requestedPolicyVersion"` (*type:* `integer()`) - Optional. The maximum policy version that will be used to format the policy. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for policies with any conditional role bindings must specify version 3. Policies with no conditional role bindings may specify any valid value or leave the field unset. The policy in the response might use the policy version that you specified, or it might use a lower policy version. For example, if you specify version 3, but the policy has no conditional role bindings, the response uses version 1. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Policy{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_get_iam_policy(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Policy.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_get_iam_policy(
connection,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:"options.requestedPolicyVersion" => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta/{+resource}:getIamPolicy", %{
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Policy{}])
end
@doc """
Gets details a single game server deployment rollout.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the game server delpoyment to retrieve, in the following form: `projects/{project}/locations/{location}/gameServerDeployments/{deployment}/rollout`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.GameServerDeploymentRollout{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_get_rollout(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.GameServerDeploymentRollout.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_get_rollout(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta/{+name}/rollout", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.GameServerDeploymentRollout{}]
)
end
@doc """
Lists game server deployments in a given project and location.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The parent resource name, in the following form: `projects/{project}/locations/{location}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - Optional. The filter to apply to list results.
* `:orderBy` (*type:* `String.t`) - Optional. Specifies the ordering of results following syntax at https://cloud.google.com/apis/design/design_patterns#sorting_order.
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of items to return. If unspecified, the server will pick an appropriate default. The server may return fewer items than requested. A caller should only rely on response's next_page_token to determine if there are more GameServerDeployments left to be queried.
* `:pageToken` (*type:* `String.t`) - Optional. The next_page_token value returned from a previous List request, if any.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.ListGameServerDeploymentsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.ListGameServerDeploymentsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:orderBy => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta/{+parent}/gameServerDeployments", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.ListGameServerDeploymentsResponse{}]
)
end
@doc """
Patches a game server deployment.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The resource name of the game server deployment, in the following form: `projects/{project}/locations/{location}/gameServerDeployments/{deployment}`. For example, `projects/my-project/locations/global/gameServerDeployments/my-deployment`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Required. Mask of fields to update. At least one path must be supplied in this field. For the `FieldMask` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.GameServerDeployment.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_patch(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_patch(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1beta/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Operation{}])
end
@doc """
Previews the game server deployment rollout. This API does not mutate the rollout resource.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The resource name of the game server deployment rollout, in the following form: `projects/{project}/locations/{location}/gameServerDeployments/{deployment}/rollout`. For example, `projects/my-project/locations/global/gameServerDeployments/my-deployment/rollout`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:previewTime` (*type:* `DateTime.t`) - Optional. The target timestamp to compute the preview. Defaults to the immediately after the proposed rollout completes.
* `:updateMask` (*type:* `String.t`) - Optional. Mask of fields to update. At least one path must be supplied in this field. For the `FieldMask` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.GameServerDeploymentRollout.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.PreviewGameServerDeploymentRolloutResponse{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_preview_rollout(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok,
GoogleApi.GameServices.V1beta.Model.PreviewGameServerDeploymentRolloutResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_preview_rollout(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:previewTime => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1beta/{+name}/rollout:preview", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[
struct:
%GoogleApi.GameServices.V1beta.Model.PreviewGameServerDeploymentRolloutResponse{}
]
)
end
@doc """
Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `resource` (*type:* `String.t`) - REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.SetIamPolicyRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Policy{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_set_iam_policy(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Policy.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_set_iam_policy(
connection,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1beta/{+resource}:setIamPolicy", %{
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Policy{}])
end
@doc """
Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `resource` (*type:* `String.t`) - REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.TestIamPermissionsRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.TestIamPermissionsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_test_iam_permissions(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.TestIamPermissionsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_test_iam_permissions(
connection,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1beta/{+resource}:testIamPermissions", %{
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.TestIamPermissionsResponse{}]
)
end
@doc """
Patches a single game server deployment rollout. The method will not return an error if the update does not affect any existing realms. For example - if the default_game_server_config is changed but all existing realms use the override, that is valid. Similarly, if a non existing realm is explicitly called out in game_server_config_overrides field, that will also not result in an error.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The resource name of the game server deployment rollout, in the following form: `projects/{project}/locations/{location}/gameServerDeployments/{deployment}/rollout`. For example, `projects/my-project/locations/global/gameServerDeployments/my-deployment/rollout`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Required. Mask of fields to update. At least one path must be supplied in this field. For the `FieldMask` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.GameServerDeploymentRollout.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_update_rollout(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_update_rollout(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1beta/{+name}/rollout", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Operation{}])
end
@doc """
Creates a new game server config in a given project, location, and game server deployment. Game server configs are immutable, and are not applied until referenced in the game server deployment rollout resource.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The parent resource name, in the following form: `projects/{project}/locations/{location}/gameServerDeployments/{deployment}/`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:configId` (*type:* `String.t`) - Required. The ID of the game server config resource to be created.
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.GameServerConfig.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_configs_create(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_configs_create(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:configId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1beta/{+parent}/configs", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Operation{}])
end
@doc """
Deletes a single game server config. The deletion will fail if the game server config is referenced in a game server deployment rollout.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the game server config to delete, in the following form: `projects/{project}/locations/{location}/gameServerDeployments/{deployment}/configs/{config}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_configs_delete(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_configs_delete(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1beta/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Operation{}])
end
@doc """
Gets details of a single game server config.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the game server config to retrieve, in the following form: `projects/{project}/locations/{location}/gameServerDeployments/{deployment}/configs/{config}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.GameServerConfig{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_configs_get(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.GameServerConfig.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_configs_get(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.GameServerConfig{}])
end
@doc """
Lists game server configs in a given project, location, and game server deployment.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The parent resource name, in the following form: `projects/{project}/locations/{location}/gameServerDeployments/{deployment}/configs/*`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - Optional. The filter to apply to list results.
* `:orderBy` (*type:* `String.t`) - Optional. Specifies the ordering of results following syntax at https://cloud.google.com/apis/design/design_patterns#sorting_order.
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of items to return. If unspecified, server will pick an appropriate default. Server may return fewer items than requested. A caller should only rely on response's next_page_token to determine if there are more GameServerConfigs left to be queried.
* `:pageToken` (*type:* `String.t`) - Optional. The next_page_token value returned from a previous list request, if any.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.ListGameServerConfigsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_game_server_deployments_configs_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.ListGameServerConfigsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_game_server_deployments_configs_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:orderBy => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta/{+parent}/configs", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.ListGameServerConfigsResponse{}]
)
end
@doc """
Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The name of the operation resource to be cancelled.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.CancelOperationRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_operations_cancel(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_operations_cancel(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1beta/{+name}:cancel", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Empty{}])
end
@doc """
Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The name of the operation resource to be deleted.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_operations_delete(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_operations_delete(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1beta/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Empty{}])
end
@doc """
Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The name of the operation resource.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_operations_get(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_operations_get(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Operation{}])
end
@doc """
Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The name of the operation's parent resource.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - The standard list filter.
* `:pageSize` (*type:* `integer()`) - The standard list page size.
* `:pageToken` (*type:* `String.t`) - The standard list page token.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.ListOperationsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_operations_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.ListOperationsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_operations_list(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta/{+name}/operations", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.ListOperationsResponse{}]
)
end
@doc """
Creates a new realm in a given project and location.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The parent resource name, in the following form: `projects/{project}/locations/{location}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:realmId` (*type:* `String.t`) - Required. The ID of the realm resource to be created.
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.Realm.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_realms_create(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_realms_create(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:realmId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1beta/{+parent}/realms", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Operation{}])
end
@doc """
Deletes a single realm.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the realm to delete, in the following form: `projects/{project}/locations/{location}/realms/{realm}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_realms_delete(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_realms_delete(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1beta/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Operation{}])
end
@doc """
Gets details of a single realm.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the realm to retrieve, in the following form: `projects/{project}/locations/{location}/realms/{realm}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Realm{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_realms_get(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Realm.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_realms_get(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Realm{}])
end
@doc """
Lists realms in a given project and location.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The parent resource name, in the following form: `projects/{project}/locations/{location}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - Optional. The filter to apply to list results.
* `:orderBy` (*type:* `String.t`) - Optional. Specifies the ordering of results following syntax at https://cloud.google.com/apis/design/design_patterns#sorting_order.
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of items to return. If unspecified, server will pick an appropriate default. Server may return fewer items than requested. A caller should only rely on response's next_page_token to determine if there are more realms left to be queried.
* `:pageToken` (*type:* `String.t`) - Optional. The next_page_token value returned from a previous List request, if any.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.ListRealmsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_realms_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.ListRealmsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_realms_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:orderBy => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta/{+parent}/realms", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.ListRealmsResponse{}]
)
end
@doc """
Patches a single realm.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The resource name of the realm, in the following form: `projects/{project}/locations/{location}/realms/{realm}`. For example, `projects/my-project/locations/{location}/realms/my-realm`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Required. The update mask applies to the resource. For the `FieldMask` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.Realm.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_realms_patch(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_realms_patch(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1beta/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Operation{}])
end
@doc """
Previews patches to a single realm.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The resource name of the realm, in the following form: `projects/{project}/locations/{location}/realms/{realm}`. For example, `projects/my-project/locations/{location}/realms/my-realm`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:previewTime` (*type:* `DateTime.t`) - Optional. The target timestamp to compute the preview.
* `:updateMask` (*type:* `String.t`) - Required. The update mask applies to the resource. For the `FieldMask` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.Realm.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.PreviewRealmUpdateResponse{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_realms_preview_update(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.PreviewRealmUpdateResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_realms_preview_update(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:previewTime => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1beta/{+name}:previewUpdate", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.PreviewRealmUpdateResponse{}]
)
end
@doc """
Creates a new game server cluster in a given project and location.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The parent resource name, in the following form: `projects/{project}/locations/{location}/realms/{realm-id}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:gameServerClusterId` (*type:* `String.t`) - Required. The ID of the game server cluster resource to be created.
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.GameServerCluster.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_realms_game_server_clusters_create(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_realms_game_server_clusters_create(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:gameServerClusterId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1beta/{+parent}/gameServerClusters", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Operation{}])
end
@doc """
Deletes a single game server cluster.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the game server cluster to delete, in the following form: `projects/{project}/locations/{location}/gameServerClusters/{cluster}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_realms_game_server_clusters_delete(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_realms_game_server_clusters_delete(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1beta/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Operation{}])
end
@doc """
Gets details of a single game server cluster.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the game server cluster to retrieve, in the following form: `projects/{project}/locations/{location}/realms/{realm-id}/gameServerClusters/{cluster}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:view` (*type:* `String.t`) - Optional. View for the returned GameServerCluster objects. When `FULL` is specified, the `cluster_state` field is also returned in the GameServerCluster object, which includes the state of the referenced Kubernetes cluster such as versions and provider info. The default/unset value is GAME_SERVER_CLUSTER_VIEW_UNSPECIFIED, same as BASIC, which does not return the `cluster_state` field.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.GameServerCluster{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_realms_game_server_clusters_get(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.GameServerCluster.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_realms_game_server_clusters_get(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:view => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.GameServerCluster{}])
end
@doc """
Lists game server clusters in a given project and location.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The parent resource name, in the following form: "projects/{project}/locations/{location}/realms/{realm}".
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - Optional. The filter to apply to list results.
* `:orderBy` (*type:* `String.t`) - Optional. Specifies the ordering of results following syntax at https://cloud.google.com/apis/design/design_patterns#sorting_order.
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of items to return. If unspecified, the server will pick an appropriate default. The server may return fewer items than requested. A caller should only rely on response's next_page_token to determine if there are more GameServerClusters left to be queried.
* `:pageToken` (*type:* `String.t`) - Optional. The next_page_token value returned from a previous List request, if any.
* `:view` (*type:* `String.t`) - Optional. View for the returned GameServerCluster objects. When `FULL` is specified, the `cluster_state` field is also returned in the GameServerCluster object, which includes the state of the referenced Kubernetes cluster such as versions and provider info. The default/unset value is GAME_SERVER_CLUSTER_VIEW_UNSPECIFIED, same as BASIC, which does not return the `cluster_state` field.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.ListGameServerClustersResponse{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_realms_game_server_clusters_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.ListGameServerClustersResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_realms_game_server_clusters_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:orderBy => :query,
:pageSize => :query,
:pageToken => :query,
:view => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1beta/{+parent}/gameServerClusters", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.ListGameServerClustersResponse{}]
)
end
@doc """
Patches a single game server cluster.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The resource name of the game server cluster, in the following form: `projects/{project}/locations/{location}/realms/{realm}/gameServerClusters/{cluster}`. For example, `projects/my-project/locations/{location}/realms/zanzibar/gameServerClusters/my-onprem-cluster`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Required. Mask of fields to update. At least one path must be supplied in this field. For the `FieldMask` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.GameServerCluster.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_realms_game_server_clusters_patch(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_realms_game_server_clusters_patch(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1beta/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.GameServices.V1beta.Model.Operation{}])
end
@doc """
Previews creation of a new game server cluster in a given project and location.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The parent resource name, in the following form: `projects/{project}/locations/{location}/realms/{realm}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:gameServerClusterId` (*type:* `String.t`) - Required. The ID of the game server cluster resource to be created.
* `:previewTime` (*type:* `DateTime.t`) - Optional. The target timestamp to compute the preview.
* `:view` (*type:* `String.t`) - Optional. This field is deprecated, preview will always return KubernetesClusterState.
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.GameServerCluster.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.PreviewCreateGameServerClusterResponse{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_realms_game_server_clusters_preview_create(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.PreviewCreateGameServerClusterResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_realms_game_server_clusters_preview_create(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:gameServerClusterId => :query,
:previewTime => :query,
:view => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1beta/{+parent}/gameServerClusters:previewCreate", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[struct: %GoogleApi.GameServices.V1beta.Model.PreviewCreateGameServerClusterResponse{}]
)
end
@doc """
Previews deletion of a single game server cluster.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the game server cluster to delete, in the following form: `projects/{project}/locations/{location}/gameServerClusters/{cluster}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:previewTime` (*type:* `DateTime.t`) - Optional. The target timestamp to compute the preview.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.PreviewDeleteGameServerClusterResponse{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_realms_game_server_clusters_preview_delete(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.PreviewDeleteGameServerClusterResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_realms_game_server_clusters_preview_delete(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:previewTime => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1beta/{+name}:previewDelete", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[struct: %GoogleApi.GameServices.V1beta.Model.PreviewDeleteGameServerClusterResponse{}]
)
end
@doc """
Previews updating a GameServerCluster.
## Parameters
* `connection` (*type:* `GoogleApi.GameServices.V1beta.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The resource name of the game server cluster, in the following form: `projects/{project}/locations/{location}/realms/{realm}/gameServerClusters/{cluster}`. For example, `projects/my-project/locations/{location}/realms/zanzibar/gameServerClusters/my-onprem-cluster`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:previewTime` (*type:* `DateTime.t`) - Optional. The target timestamp to compute the preview.
* `:updateMask` (*type:* `String.t`) - Required. Mask of fields to update. At least one path must be supplied in this field. For the `FieldMask` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* `:body` (*type:* `GoogleApi.GameServices.V1beta.Model.GameServerCluster.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.GameServices.V1beta.Model.PreviewUpdateGameServerClusterResponse{}}` on success
* `{:error, info}` on failure
"""
@spec gameservices_projects_locations_realms_game_server_clusters_preview_update(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.GameServices.V1beta.Model.PreviewUpdateGameServerClusterResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def gameservices_projects_locations_realms_game_server_clusters_preview_update(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:previewTime => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1beta/{+name}:previewUpdate", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[struct: %GoogleApi.GameServices.V1beta.Model.PreviewUpdateGameServerClusterResponse{}]
)
end
end
| 46.946016
| 802
| 0.62283
|
9e2923b04dee0d407ab414a1a070b324737b5535
| 2,677
|
ex
|
Elixir
|
clients/cloud_functions/lib/google_api/cloud_functions/v1/model/operation_metadata_v1.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/cloud_functions/lib/google_api/cloud_functions/v1/model/operation_metadata_v1.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/cloud_functions/lib/google_api/cloud_functions/v1/model/operation_metadata_v1.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudFunctions.V1.Model.OperationMetadataV1 do
@moduledoc """
Metadata describing an Operation
## Attributes
* `buildId` (*type:* `String.t`, *default:* `nil`) - The Cloud Build ID of the function created or updated by an API call. This field is only populated for Create and Update operations.
* `request` (*type:* `map()`, *default:* `nil`) - The original request that started the operation.
* `sourceToken` (*type:* `String.t`, *default:* `nil`) - An identifier for Firebase function sources. Disclaimer: This field is only supported for Firebase function deployments.
* `target` (*type:* `String.t`, *default:* `nil`) - Target of the operation - for example projects/project-1/locations/region-1/functions/function-1
* `type` (*type:* `String.t`, *default:* `nil`) - Type of operation.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - The last update timestamp of the operation.
* `versionId` (*type:* `String.t`, *default:* `nil`) - Version id of the function created or updated by an API call. This field is only populated for Create and Update operations.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:buildId => String.t(),
:request => map(),
:sourceToken => String.t(),
:target => String.t(),
:type => String.t(),
:updateTime => DateTime.t(),
:versionId => String.t()
}
field(:buildId)
field(:request, type: :map)
field(:sourceToken)
field(:target)
field(:type)
field(:updateTime, as: DateTime)
field(:versionId)
end
defimpl Poison.Decoder, for: GoogleApi.CloudFunctions.V1.Model.OperationMetadataV1 do
def decode(value, options) do
GoogleApi.CloudFunctions.V1.Model.OperationMetadataV1.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudFunctions.V1.Model.OperationMetadataV1 do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.184615
| 189
| 0.698917
|
9e2932abe501a6f1e2d7c3bf91b5d87257547d69
| 1,435
|
ex
|
Elixir
|
clients/dataflow/lib/google_api/dataflow/v1b3/model/shell_task.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | null | null | null |
clients/dataflow/lib/google_api/dataflow/v1b3/model/shell_task.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | null | null | null |
clients/dataflow/lib/google_api/dataflow/v1b3/model/shell_task.ex
|
GoNZooo/elixir-google-api
|
cf3ad7392921177f68091f3d9001f1b01b92f1cc
|
[
"Apache-2.0"
] | 1
|
2018-07-28T20:50:50.000Z
|
2018-07-28T20:50:50.000Z
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Dataflow.V1b3.Model.ShellTask do
@moduledoc """
A task which consists of a shell command for the worker to execute.
## Attributes
- command (String): The shell command to run. Defaults to: `null`.
- exitCode (Integer): Exit code for the task. Defaults to: `null`.
"""
defstruct [
:"command",
:"exitCode"
]
end
defimpl Poison.Decoder, for: GoogleApi.Dataflow.V1b3.Model.ShellTask do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataflow.V1b3.Model.ShellTask do
def encode(value, options) do
GoogleApi.Dataflow.V1b3.Deserializer.serialize_non_nil(value, options)
end
end
| 29.895833
| 77
| 0.742857
|
9e293c64cff69a237c26d82d936f34d7746b4c3c
| 1,562
|
ex
|
Elixir
|
test/support/data_case.ex
|
dcdourado/hello_finance
|
ff7d3b67c0c39c3c60675d018b49f5c7e304610b
|
[
"MIT"
] | null | null | null |
test/support/data_case.ex
|
dcdourado/hello_finance
|
ff7d3b67c0c39c3c60675d018b49f5c7e304610b
|
[
"MIT"
] | null | null | null |
test/support/data_case.ex
|
dcdourado/hello_finance
|
ff7d3b67c0c39c3c60675d018b49f5c7e304610b
|
[
"MIT"
] | null | null | null |
defmodule HelloFinance.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use HelloFinance.DataCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
alias HelloFinance.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import HelloFinance.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(HelloFinance.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(HelloFinance.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
| 27.892857
| 77
| 0.694622
|
9e29605c33ade73f948b43baf1b71d2063ff6fc0
| 12,258
|
ex
|
Elixir
|
lib/rexbug.ex
|
nietaki/rexbug
|
bc7e2c6a80975ee4d878df4a059f2f75e7e4eae3
|
[
"MIT"
] | 198
|
2017-11-13T11:28:12.000Z
|
2022-03-30T07:22:08.000Z
|
lib/rexbug.ex
|
nietaki/rexbug
|
bc7e2c6a80975ee4d878df4a059f2f75e7e4eae3
|
[
"MIT"
] | 48
|
2017-11-02T14:30:38.000Z
|
2021-04-19T00:49:50.000Z
|
lib/rexbug.ex
|
nietaki/rexbug
|
bc7e2c6a80975ee4d878df4a059f2f75e7e4eae3
|
[
"MIT"
] | 11
|
2017-12-04T11:06:23.000Z
|
2021-03-05T21:13:58.000Z
|
defmodule Rexbug do
@moduledoc """
A thin Elixir wrapper for the redbug Erlang tracing debugger.
"""
@help_message """
Rexbug - a thin Elixir wrapper for :redbug - the (sensibly) Restrictive
Debugger. It doesn't fork :redbug, only uses it under the hood.
You can use :redbug directly - run :redbug.help() to see its help message.
Inner workings:
Rexbug is a tool to interact with the Erlang trace facility.
It will instruct the Erlang VM to generate so called
"trace messages" when certain events (such as a particular
function being called) occur.
The trace messages are either printed (i.e. human readable)
to a file or to the screen; or written to a trc file.
Using a trc file puts less stress on the system, but
there is no way to count the messages (so the msgs opt
is ignored), and the files can only be read by special tools
(such as 'bread'). Printing and trc files cannot be combined.
By default (i.e. if the :file opt is not given), messages
are printed.
Basic usage:
Rexbug.start(trace_pattern, opts \\ [])
Rexbug.start(time_limit, message_limit, trace_pattern)
trace_pattern: :send | :receive | rtp | [:send | :receive | rtp]
rtp: restricted trace pattern
the rtp has the form: "<mfa> when <guards> :: <actions>"
where <mfa> can be:
"Mod", "Mod.fun/3", "Mod.fun/_" or "Mod.fun(_, :atom, x)"
<guard> is something like:
"x==1" or "is_atom(a)"
and <actions> is:
"", "return", "stack", or "return;stack"
E.g.
:ets.lookup(t, :hostname) when is_integer(t) :: stack
Map.new/2
Map.pop(_, :some_key, default) when default != nil :: return
Agent
NOTE: The <mfa> of "Map.new" is equivalent to "Map.new()" - the 0 arity
is implied. To trace the function with any arity use "Map.new/any" or
simply "Map.new/_".
opts: Keyword.t
general opts (and their default values):
time (15_000) stop trace after this many ms
msgs (10) stop trace after this many msgs
target (Node.self()) node to trace on
cookie (host cookie) target node cookie
blocking (false) block start/2, return a list of messages
arity (false) print arity instead of arg list
buffered (false) buffer messages till end of trace
discard (false) discard messages (when counting)
max_queue (5_000) fail if internal queue gets this long
max_msg_size (50_000) fail if seeing a msg this big
procs (:all) (list of) Erlang process(es)
:all|:new|pid()|atom(reg_name)|{:pid,i2,i3}
print-related opts:
print_calls (true) print calls
print_file (standard_io) print to this file
print_msec (false) print milliseconds on timestamps
print_depth (999_999) formatting depth for "~P"
print_re ("") print only messages that match this regex
print_return (true) print return value (if "return" action given)
print_fun () custom print handler, fun/1 or fun/2;
fun(trace_msg :: term) :: <ignored>
fun(trace_msg, acc_old) :: acc_new
(where initial accumulator is 0)
trc file related opts:
file (none) use a trc file based on this name
file_size (1) size of each trc file
file_count (8) number of trc files
"""
alias Rexbug.Translator
@type redbug_non_blocking_return :: {proc_count :: integer, func_count :: integer}
@type redbug_blocking_return :: {stop_reason :: atom, trace_messages :: [term]}
@type redbug_error :: {error_type :: atom, error_reason :: term}
@type rexbug_error :: {:error, reason :: term}
@type rexbug_return ::
redbug_non_blocking_return | redbug_blocking_return | redbug_error | rexbug_error
@type trace_pattern_instance :: String.t() | :send | :receive
@type trace_pattern :: trace_pattern_instance | [trace_pattern_instance]
@type proc :: pid() | atom | {pid, integer, integer}
@type procs :: :all | :new | :running | proc | [proc]
@spec start(trace_pattern) :: rexbug_return
@doc """
See `Rexbug.start/2`.
"""
def start(trace_pattern), do: start(trace_pattern, [])
@spec start(time :: integer, msgs :: integer, trace_pattern) :: rexbug_return
@doc """
See `Rexbug.start/2`.
"""
def start(time, msgs, trace_pattern), do: start(trace_pattern, time: time, msgs: msgs)
@spec start(time :: integer, msgs :: integer, procs :: procs, trace_pattern) :: rexbug_return
@doc """
See `Rexbug.start/2`.
"""
def start(time, msgs, procs, trace_pattern),
do: start(trace_pattern, time: time, msgs: msgs, procs: procs)
@spec start(time :: integer, msgs :: integer, procs :: procs, node :: node(), trace_pattern) ::
rexbug_return
@doc """
See `Rexbug.start/2`.
"""
def start(time, msgs, procs, node, trace_pattern),
do: start(trace_pattern, time: time, msgs: msgs, procs: procs, target: node)
@spec start(trace_pattern, opts :: Keyword.t()) :: rexbug_return
@doc """
Starts tracing for the given pattern with provided options.
If successful and `:blocking` option is not specified, returns a tuple
where the first element is the count of targeted processes and the
second element is the count of targeted functions
# Trace Pattern
The `trace_pattern` is a string (binary) describing which function(s)
should be traced.
The `trace_pattern` has the `"<mfa> when <guards> :: <actions>"` form,
where guards and actions are optional and <mfa> can be in the form of
`Mod`, `Mod.fun/3`, `Mod.fun/_` or `Mod.fun(_, :atom, x)`
Most normal [Elixir guards](https://hexdocs.pm/elixir/master/guards.html)
are valid as <guards>, so something like
`x==1` or `is_atom(x)` or `is_integer(i) and i > 0` would work.
The valid <actions> are: `return`, `stack`, or `return;stack`
Apart from tracing function calls you can trace sent and received messages.
To do so specify `:send` or `:receive` as the trace pattern.
You can also specify multiple trace patterns by providing a list of them
as the first argument.
# Options
There's a range of options that modify the behaviour of `Rexbug`.
## General options
| option | default | meaning |
| --- | --- | --- |
| time | `15_000` | stop tracing after this many milliseconds |
| msgs | `10` | stop tracing after this many messages |
| target | `Node.self()` | node to trace on |
| cookie | host cookie | target node cookie |
| blocking | `false` | block on `start/2` and return a list of messages. [see comment](#start/2-blocking) |
| arity | `false` | print arity instead of argument list |
| buffered | `false` | buffer messages till end of trace |
| discard | `false` | discard messages (when counting) |
| max_queue | `5_000` | fail if internal queue gets this long |
| max\_msg\_size | `50_000` | fail if seeing a message this big |
| procs | `:all` | (list of) Erlang process(es) to include when tracing. [see comment](#start/2-procs) |
## Print-related options
| option | default | meaning |
| --- | --- | --- |
| print_calls | `true` | print calls |
| print_file | standard_io | if provided, prints messages to the specified file |
| print_msec | `false` | print milliseconds on timestamps |
| print_depth | `999_999` | formatting depth for `"~P"` |
| print_re | `""` | print only messages that match this regex |
| print_return | `true` | if set to `false`, won't print the return values. Relevant only if `return` action is specified |
| print_fun | none | Custom print handler. [see comment](#start/2-print_fun) |
## Trace file related options
| option | default | meaning |
| --- | --- | --- |
| file | none | use a trc file based on this name |
| file_size | `1` | size of each trc file |
| file_count | `8` | number of trc files |
## Options comments
### `:blocking`
If set to true, instead of printing traces to stdio, `Rexbug.start/2` will block
and return a list of trace messages when it's done tracing.
### `:procs`
Which processes to trace. The possible values are `:all` for all processes,
`:new` for just the ones spawned after the tracing has started,
an atom for registered processes, or a pid. The pid can either be a PID
literal or a `{:pid, x, y}`, where `x` and `y` are the latter 2 integers from
the PID representation. So for example `#PID<0.150.0>` could be expressed with
`{:pid, 150, 0}`.
The first integer from the PID representation is omitted, because it represents
the node number. You can use the `target` option to specify a remote node instead.
You can provide either a single process or a list of procs to trace.
### `:print_fun`
Custom function to use to print the trace messages.
The function can be in the `fun(trace_msg :: term) :: <ignored>` format
or the `fun(trace_msg, acc_old) :: acc_new` format. If you use the latter format,
the initial accumulator will be `0`.
"""
def start(trace_pattern, options) do
with {:ok, options} <- Translator.translate_options(options),
options = add_default_options(options),
{:ok, translated} <- Translator.translate(trace_pattern) do
:redbug.start(translated, options)
end
end
@spec stop() :: :stopped | :not_started
@doc """
Stops all tracing.
"""
def stop() do
:redbug.stop()
end
@spec stop_sync(integer) :: :stopped | :not_started | {:error, :could_not_stop_redbug}
@doc """
Stops all tracing in a synchronous manner.
Usually there's no need to use this function over `stop/0`. You might want to use
it if you're going to start tracing immediately afterwards in an automated fashion.
"""
# kind of relies on redbug internal behaviour, but not really
def stop_sync(timeout \\ 100) do
case Process.whereis(:redbug) do
nil ->
:not_started
pid ->
ref = Process.monitor(pid)
res = :redbug.stop()
receive do
{:DOWN, ^ref, _, _, _} ->
:stopped
res
after
timeout -> {:error, :could_not_stop_redbug}
end
end
end
@spec help() :: :ok
@doc """
Prints the help message / usage manual to standard output.
The help message is as follows:
```txt
""" <> @help_message <> "\n```"
def help() do
IO.puts(@help_message)
:ok
end
defp add_default_options(opts) do
print_fun = fn t -> Rexbug.Printing.print_with_opts(t, opts) end
default_options = [
print_fun: print_fun
]
Keyword.merge(default_options, opts)
end
end
| 41.552542
| 130
| 0.569587
|
9e296d5be494ea97557d6d58ca164766c4056197
| 219
|
exs
|
Elixir
|
test/fixtures/system_platform/nerves.exs
|
opencollective/nerves
|
81f5d30de283e77f3720a87fa1435619f0da12de
|
[
"Apache-2.0"
] | 1
|
2019-06-12T17:34:10.000Z
|
2019-06-12T17:34:10.000Z
|
test/fixtures/system_platform/nerves.exs
|
opencollective/nerves
|
81f5d30de283e77f3720a87fa1435619f0da12de
|
[
"Apache-2.0"
] | null | null | null |
test/fixtures/system_platform/nerves.exs
|
opencollective/nerves
|
81f5d30de283e77f3720a87fa1435619f0da12de
|
[
"Apache-2.0"
] | null | null | null |
use Mix.Config
version =
Path.join(__DIR__, "VERSION")
|> File.read!
|> String.trim
config :system_platform, :nerves_env,
type: :system_platform,
version: version,
checksum: [
"env.exs",
"lib"
]
| 14.6
| 37
| 0.639269
|
9e2984c3671059286e8b67f592cbf2da77a21bae
| 9,157
|
exs
|
Elixir
|
test/process_managers/process_manager_telemetry_test.exs
|
octowombat/commanded
|
79a1965e276d3369dcf70ae65ef904d7e59f4a6a
|
[
"MIT"
] | 1,220
|
2017-10-31T10:56:40.000Z
|
2022-03-31T17:40:19.000Z
|
test/process_managers/process_manager_telemetry_test.exs
|
octowombat/commanded
|
79a1965e276d3369dcf70ae65ef904d7e59f4a6a
|
[
"MIT"
] | 294
|
2017-11-03T10:33:41.000Z
|
2022-03-24T08:36:42.000Z
|
test/process_managers/process_manager_telemetry_test.exs
|
octowombat/commanded
|
79a1965e276d3369dcf70ae65ef904d7e59f4a6a
|
[
"MIT"
] | 208
|
2017-11-03T10:56:47.000Z
|
2022-03-14T05:49:38.000Z
|
defmodule Commanded.ProcessManagers.ProcessManagerTelemetryTest do
use ExUnit.Case
import Mox
alias Commanded.ProcessManagers.ProcessManagerInstance
setup :set_mox_global
setup :verify_on_exit!
defmodule Commands do
defmodule Ok do
defstruct [:message]
end
end
defmodule Event do
@derive Jason.Encoder
defstruct [:message, :type]
end
defmodule Agg do
defstruct []
def execute(_, _) do
[]
end
def apply(_, _) do
%__MODULE__{}
end
end
defmodule Router do
use Commanded.Commands.Router
dispatch(Commands.Ok, to: Agg, identity: :message)
end
defmodule App do
alias Commanded.EventStore.Adapters.InMemory
alias Commanded.Serialization.JsonSerializer
use Commanded.Application,
otp_app: :app,
event_store: [
adapter: InMemory,
serializer: JsonSerializer
],
pubsub: :local,
registry: :local
router Router
end
defmodule ExamplePM do
use Commanded.ProcessManagers.ProcessManager,
application: App,
name: __MODULE__
alias Commands.Ok
@derive Jason.Encoder
defstruct message: "init"
def handle(%ExamplePM{}, %Event{type: type, message: message}) do
case type do
"ok" -> %Ok{message: message}
"error" -> {:error, message}
"retry" -> {:error, :retry}
"raise" -> raise message
end
end
def apply(%ExamplePM{}, %Event{message: message}),
do: %ExamplePM{message: message}
def error({:error, :retry}, %Event{}, failure_context) do
if failure_context.context[:retried?] do
:skip
else
{:retry, %{retried?: true}}
end
end
end
alias Commands.Ok
setup do
start_supervised!(App)
attach_telemetry()
:ok
end
@handler "test-pm-handler"
describe "process manager telemetry" do
test "emit `[:commanded, :process_manager, :handle, :start]` event" do
uuid = UUID.uuid4()
{:ok, instance} = start_process_manager_instance(uuid)
event = to_recorded_event(%Event{message: "start", type: "ok"})
:ok = ProcessManagerInstance.process_event(instance, event)
assert_receive {[:commanded, :process_manager, :handle, :start], 1, measurements, metadata}
assert match?(%{system_time: _system_time}, measurements)
assert match?(
%{
application: App,
process_manager_module: ExamplePM,
process_manager_name: "ExamplePM",
process_state: %ExamplePM{message: "init"},
process_uuid: ^uuid,
recorded_event: ^event
},
metadata
)
assert_receive {[:commanded, :process_manager, :handle, :stop], 2, _measurements, _metadata}
refute_receive {[:commanded, :process_manager, :handle, :exception], _, _measurements,
_metadata}
end
test "emit `[:commanded, :process_manager, :handle, :stop]` event" do
uuid = UUID.uuid4()
{:ok, instance} = start_process_manager_instance(uuid)
event = to_recorded_event(%Event{message: "start", type: "ok"})
:ok = ProcessManagerInstance.process_event(instance, event)
assert_receive {[:commanded, :process_manager, :handle, :start], 1, _measurements,
_metadata}
assert_receive {[:commanded, :process_manager, :handle, :stop], 2, measurements, metadata}
assert match?(%{duration: _}, measurements)
assert is_integer(measurements.duration)
assert match?(
%{
application: App,
process_manager_module: ExamplePM,
process_manager_name: "ExamplePM",
process_state: %ExamplePM{message: "init"},
process_uuid: ^uuid,
recorded_event: ^event,
commands: [%Ok{message: "start"}]
},
metadata
)
refute_receive {[:commanded, :process_manager, :handle, :exception], _num, _measurements,
_metadata}
end
test "emit `[:commanded, :process_manager, :handle, :stop]` with error event" do
uuid = UUID.uuid4()
{:ok, instance} = start_process_manager_instance(uuid)
event = to_recorded_event(%Event{message: "stop", type: "error"})
:ok = ProcessManagerInstance.process_event(instance, event)
assert_receive {[:commanded, :process_manager, :handle, :start], 1, _measurements,
_metadata}
assert_receive {[:commanded, :process_manager, :handle, :stop], 2, measurements, metadata}
assert match?(%{duration: _}, measurements)
assert is_integer(measurements.duration)
assert match?(
%{
application: App,
process_manager_module: ExamplePM,
process_manager_name: "ExamplePM",
process_state: %ExamplePM{message: "init"},
process_uuid: ^uuid,
recorded_event: ^event,
error: "stop"
},
metadata
)
refute_receive {[:commanded, :process_manager, :handle, :exception], _num, _measurements,
_metadata}
end
test "events are emitted with discrete start/stop on retries" do
uuid = UUID.uuid4()
{:ok, instance} = start_process_manager_instance(uuid)
event = to_recorded_event(%Event{message: "retry", type: "retry"})
:ok = ProcessManagerInstance.process_event(instance, event)
assert_receive {[:commanded, :process_manager, :handle, :start], 1, _measurements,
_metadata}
assert_receive {[:commanded, :process_manager, :handle, :stop], 2, measurements, metadata}
assert match?(%{duration: _}, measurements)
assert is_integer(measurements.duration)
assert match?(
%{
application: App,
process_manager_module: ExamplePM,
process_manager_name: "ExamplePM",
process_state: %ExamplePM{message: "init"},
process_uuid: ^uuid,
recorded_event: ^event,
error: :retry
},
metadata
)
refute_receive {[:commanded, :process_manager, :handle, :exception], _num, _measurements,
_metadata}
assert_receive {[:commanded, :process_manager, :handle, :start], 3, _measurements,
_metadata}
assert_receive {[:commanded, :process_manager, :handle, :stop], 4, _measurements, _metadata}
end
@tag capture_log: true
test "emit `[:commanded, :process_manager, :handle, :exception]` event" do
uuid = UUID.uuid4()
{:ok, instance} = start_process_manager_instance(uuid)
event = to_recorded_event(%Event{message: "exception", type: "raise"})
:ok = ProcessManagerInstance.process_event(instance, event)
assert_receive {[:commanded, :process_manager, :handle, :start], 1, _measurements,
_metadata}
refute_receive {[:commanded, :process_manager, :handle, :stop], _num, _measurements,
_metadata}
assert_receive {[:commanded, :process_manager, :handle, :exception], 2, measurements,
metadata}
assert match?(%{duration: _}, measurements)
assert is_integer(measurements.duration)
assert match?(
%{
application: App,
process_manager_module: ExamplePM,
process_manager_name: "ExamplePM",
process_state: %ExamplePM{message: "init"},
process_uuid: ^uuid,
recorded_event: ^event,
kind: :error,
reason: %RuntimeError{message: "exception"},
stacktrace: _
},
metadata
)
end
end
defp attach_telemetry do
agent = start_supervised!({Agent, fn -> 1 end})
:telemetry.attach_many(
@handler,
[
[:commanded, :process_manager, :handle, :start],
[:commanded, :process_manager, :handle, :stop],
[:commanded, :process_manager, :handle, :exception]
],
fn event_name, measurements, metadata, reply_to ->
num = Agent.get_and_update(agent, fn num -> {num, num + 1} end)
send(reply_to, {event_name, num, measurements, metadata})
end,
self()
)
on_exit(fn ->
:telemetry.detach(@handler)
end)
end
defp start_process_manager_instance(transfer_uuid) do
start_supervised(
{ProcessManagerInstance,
application: App,
idle_timeout: :infinity,
process_manager_name: "ExamplePM",
process_manager_module: ExamplePM,
process_router: self(),
process_uuid: transfer_uuid}
)
end
defp to_recorded_event(event) do
alias Commanded.EventStore.RecordedEvent
%RecordedEvent{event_number: 1, stream_id: "stream-id", stream_version: 1, data: event}
end
end
| 28.886435
| 98
| 0.596374
|
9e298b0be3a3951a6f38540c1e0dc2140637c7a9
| 885
|
ex
|
Elixir
|
elixir/graphql/lib/graphql/user/user.ex
|
gilmoreg/learn
|
0c4f34387f0d2235ecd88ac62fb86a51f87eb5c2
|
[
"MIT"
] | null | null | null |
elixir/graphql/lib/graphql/user/user.ex
|
gilmoreg/learn
|
0c4f34387f0d2235ecd88ac62fb86a51f87eb5c2
|
[
"MIT"
] | null | null | null |
elixir/graphql/lib/graphql/user/user.ex
|
gilmoreg/learn
|
0c4f34387f0d2235ecd88ac62fb86a51f87eb5c2
|
[
"MIT"
] | null | null | null |
defmodule Graphql.User do
use Ecto.Schema
@primary_key {:user_id, :integer, autogenerate: false, source: :UserId}
schema "Users" do
field :external_id, Tds.UUID, source: :ExternalId
field :email_identifier, :string, source: :EmailIdentifier
field :login_name, :string, source: :LoginName
field :contact_email, :string, source: :ContactEmail
field :display_name, :string, source: :DisplayName
field :first_name, :string, source: :FirstName
field :last_name, :string, source: :LastName
field :phone_number, :string, source: :PhoneNumber
field :entity_state, :string, source: :EntityState
field :created_date, :utc_datetime, source: :CreatedDate
field :created_by, :string, source: :CreatedBy
field :last_modified_date, :utc_datetime, source: :LastModifiedDate
field :last_modified_by, :string, source: :LastModifiedBy
end
end
| 42.142857
| 73
| 0.733333
|
9e29b7c9dd47e2d5477a2479ea36390faaaeb71f
| 3,228
|
ex
|
Elixir
|
lib/railway_ipc/adapters/rabbit_mq/consumer.ex
|
djeusette/railway_ipc
|
30fb58726e43148fe72fbfe63fdf46161fc933cf
|
[
"MIT"
] | null | null | null |
lib/railway_ipc/adapters/rabbit_mq/consumer.ex
|
djeusette/railway_ipc
|
30fb58726e43148fe72fbfe63fdf46161fc933cf
|
[
"MIT"
] | null | null | null |
lib/railway_ipc/adapters/rabbit_mq/consumer.ex
|
djeusette/railway_ipc
|
30fb58726e43148fe72fbfe63fdf46161fc933cf
|
[
"MIT"
] | null | null | null |
defmodule RailwayIpc.Adapters.RabbitMQ.Consumer do
@moduledoc false
use ExRabbitPool.Consumer
alias RailwayIpc.Adapters.RabbitMQ
alias RailwayIpc.Payload
@behaviour RailwayIpc.Consumer.Impl
@impl true
def validate_config!(config) do
queue_name = config[:queue_name]
unless queue_name do
raise ArgumentError,
"missing :queue_name option on use RailwayIpc.Consumer with RabbitMQ adapter"
end
:ok
end
@impl true
def child_spec(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
queue_name = Keyword.fetch!(opts, :queue_name)
module = Keyword.fetch!(opts, :module)
adapter = Keyword.get(opts, :adapter, RabbitMQ.current_impl())
queue_initializer = Keyword.get(opts, :queue_initializer, RabbitMQ.QueueInitializer)
name = Keyword.fetch!(opts, :name)
name = String.to_atom("#{name}.RabbitMQ")
pool_id = RabbitMQ.Connection.pool_name(:consumers, otp_app)
[
queue_initializer.child_spec(opts),
%{
id: name,
start:
{__MODULE__, :start_link,
[
[
adapter: adapter,
pool_id: pool_id,
queue: queue_name,
module: module,
otp_app: otp_app
],
name
]}
}
]
end
def start_link(config, name) do
GenServer.start_link(__MODULE__, config, name: name)
end
def basic_deliver(
%{config: config, adapter: adapter, channel: channel},
payload,
%{delivery_tag: delivery_tag} = metadata
) do
module = Keyword.fetch!(config, :module)
otp_app = Keyword.fetch!(config, :otp_app)
ack_func = fn ->
:ok = adapter.ack(channel, delivery_tag, requeue: false)
end
RailwayIpc.Consumer.process(payload, metadata, module, ack_func)
|> reply_if_needed(metadata, otp_app)
:ok
end
defp reply_if_needed(
{:ok, payload},
%{correlation_id: correlation_id, reply_to: reply_to_queue},
otp_app
)
when is_binary(reply_to_queue) and is_binary(correlation_id) do
{:ok, encoded_payload} = Payload.encode(payload)
RabbitMQ.publish(encoded_payload, %{},
otp_app: otp_app,
correlation_id: correlation_id,
routing_key: reply_to_queue
)
end
defp reply_if_needed(_payload, _metadata, _otp_app), do: :ok
def basic_consume_ok(%{config: config} = state, consumer_tag) do
module = Keyword.fetch!(config, :module)
module.consumer_registered(state, %{consumer_tag: consumer_tag})
|> case do
:ok -> :ok
{:error, error} -> {:stop, error}
end
end
def basic_cancel(%{config: config} = state, consumer_tag, no_wait) do
module = Keyword.fetch!(config, :module)
module.consumer_unexpectedly_cancelled(state, %{consumer_tag: consumer_tag, no_wait: no_wait})
|> case do
:ok -> :ok
{:error, error} -> {:stop, error}
end
end
def basic_cancel_ok(%{config: config} = state, consumer_tag) do
module = Keyword.fetch!(config, :module)
module.consumer_cancelled(state, %{consumer_tag: consumer_tag})
|> case do
:ok -> :ok
{:error, error} -> {:stop, error}
end
end
end
| 26.032258
| 98
| 0.636307
|
9e29d6e8ea10db81473dd897ab18e57486c32a41
| 878
|
ex
|
Elixir
|
clients/pub_sub/lib/google_api/pub_sub/v1/metadata.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/pub_sub/lib/google_api/pub_sub/v1/metadata.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
clients/pub_sub/lib/google_api/pub_sub/v1/metadata.ex
|
mcrumm/elixir-google-api
|
544f22797cec52b3a23dfb6e39117f0018448610
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PubSub.V1 do
@moduledoc """
API client metadata for GoogleApi.PubSub.V1.
"""
@discovery_revision "20210208"
def discovery_revision(), do: @discovery_revision
end
| 32.518519
| 74
| 0.757403
|
9e2a0dde2c9b4324af1c6a68359b7614af56aa5b
| 4,374
|
ex
|
Elixir
|
clients/recommendation_engine/lib/google_api/recommendation_engine/v1beta1/model/google_cloud_recommendationengine_v1beta1_product_catalog_item.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/recommendation_engine/lib/google_api/recommendation_engine/v1beta1/model/google_cloud_recommendationengine_v1beta1_product_catalog_item.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/recommendation_engine/lib/google_api/recommendation_engine/v1beta1/model/google_cloud_recommendationengine_v1beta1_product_catalog_item.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.RecommendationEngine.V1beta1.Model.GoogleCloudRecommendationengineV1beta1ProductCatalogItem do
@moduledoc """
ProductCatalogItem captures item metadata specific to retail products.
## Attributes
* `availableQuantity` (*type:* `String.t`, *default:* `nil`) - Optional. The available quantity of the item.
* `canonicalProductUri` (*type:* `String.t`, *default:* `nil`) - Optional. Canonical URL directly linking to the item detail page with a length limit of 5 KiB..
* `costs` (*type:* `map()`, *default:* `nil`) - Optional. A map to pass the costs associated with the product. For example: {"manufacturing": 45.5} The profit of selling this item is computed like so: * If 'exactPrice' is provided, profit = displayPrice - sum(costs) * If 'priceRange' is provided, profit = minPrice - sum(costs)
* `currencyCode` (*type:* `String.t`, *default:* `nil`) - Optional. Only required if the price is set. Currency code for price/costs. Use three-character ISO-4217 code.
* `exactPrice` (*type:* `GoogleApi.RecommendationEngine.V1beta1.Model.GoogleCloudRecommendationengineV1beta1ProductCatalogItemExactPrice.t`, *default:* `nil`) - Optional. The exact product price.
* `images` (*type:* `list(GoogleApi.RecommendationEngine.V1beta1.Model.GoogleCloudRecommendationengineV1beta1Image.t)`, *default:* `nil`) - Optional. Product images for the catalog item.
* `priceRange` (*type:* `GoogleApi.RecommendationEngine.V1beta1.Model.GoogleCloudRecommendationengineV1beta1ProductCatalogItemPriceRange.t`, *default:* `nil`) - Optional. The product price range.
* `stockState` (*type:* `String.t`, *default:* `nil`) - Optional. Online stock state of the catalog item. Default is `IN_STOCK`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:availableQuantity => String.t() | nil,
:canonicalProductUri => String.t() | nil,
:costs => map() | nil,
:currencyCode => String.t() | nil,
:exactPrice =>
GoogleApi.RecommendationEngine.V1beta1.Model.GoogleCloudRecommendationengineV1beta1ProductCatalogItemExactPrice.t()
| nil,
:images =>
list(
GoogleApi.RecommendationEngine.V1beta1.Model.GoogleCloudRecommendationengineV1beta1Image.t()
)
| nil,
:priceRange =>
GoogleApi.RecommendationEngine.V1beta1.Model.GoogleCloudRecommendationengineV1beta1ProductCatalogItemPriceRange.t()
| nil,
:stockState => String.t() | nil
}
field(:availableQuantity)
field(:canonicalProductUri)
field(:costs, type: :map)
field(:currencyCode)
field(:exactPrice,
as:
GoogleApi.RecommendationEngine.V1beta1.Model.GoogleCloudRecommendationengineV1beta1ProductCatalogItemExactPrice
)
field(:images,
as: GoogleApi.RecommendationEngine.V1beta1.Model.GoogleCloudRecommendationengineV1beta1Image,
type: :list
)
field(:priceRange,
as:
GoogleApi.RecommendationEngine.V1beta1.Model.GoogleCloudRecommendationengineV1beta1ProductCatalogItemPriceRange
)
field(:stockState)
end
defimpl Poison.Decoder,
for:
GoogleApi.RecommendationEngine.V1beta1.Model.GoogleCloudRecommendationengineV1beta1ProductCatalogItem do
def decode(value, options) do
GoogleApi.RecommendationEngine.V1beta1.Model.GoogleCloudRecommendationengineV1beta1ProductCatalogItem.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.RecommendationEngine.V1beta1.Model.GoogleCloudRecommendationengineV1beta1ProductCatalogItem do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45.5625
| 332
| 0.73251
|
9e2a2e04acc2480ba281baab587ea4ea63be59be
| 166
|
exs
|
Elixir
|
implementations/elixir/ockam/ockam/config/config.exs
|
jjscheel/ockam
|
04088346c5c11c0f2969422288ffd95404d9b00a
|
[
"Apache-2.0"
] | null | null | null |
implementations/elixir/ockam/ockam/config/config.exs
|
jjscheel/ockam
|
04088346c5c11c0f2969422288ffd95404d9b00a
|
[
"Apache-2.0"
] | null | null | null |
implementations/elixir/ockam/ockam/config/config.exs
|
jjscheel/ockam
|
04088346c5c11c0f2969422288ffd95404d9b00a
|
[
"Apache-2.0"
] | null | null | null |
import Config
config :ockam, Ockam.Wire, default: Ockam.Wire.Binary.V2
config :logger, :console, metadata: [:module, :line, :pid]
import_config "#{Mix.env()}.exs"
| 20.75
| 58
| 0.710843
|
9e2a35c29051354908ffd0a802cb90175fd841c2
| 12,101
|
exs
|
Elixir
|
test/pow/store/backend/mnesia_cache_test.exs
|
randaalex/pow
|
2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b
|
[
"MIT"
] | null | null | null |
test/pow/store/backend/mnesia_cache_test.exs
|
randaalex/pow
|
2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b
|
[
"MIT"
] | null | null | null |
test/pow/store/backend/mnesia_cache_test.exs
|
randaalex/pow
|
2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b
|
[
"MIT"
] | null | null | null |
defmodule Pow.Store.Backend.MnesiaCacheTest do
use ExUnit.Case
doctest Pow.Store.Backend.MnesiaCache
alias Pow.{Config, Config.ConfigError, Store.Backend.MnesiaCache}
@default_config [namespace: "pow:test", ttl: :timer.hours(1)]
setup_all do
# Turn node into a distributed node with the given long name
:net_kernel.start([:"master@127.0.0.1"])
# Allow spawned nodes to fetch all code from this node
:erl_boot_server.start([])
{:ok, ipv4} = :inet.parse_ipv4_address('127.0.0.1')
:erl_boot_server.add_slave(ipv4)
:ok
end
describe "single node" do
setup do
:mnesia.kill()
File.rm_rf!("tmp/mnesia")
File.mkdir_p!("tmp/mnesia")
start(@default_config)
:ok
end
test "can put, get and delete records with persistent storage" do
assert MnesiaCache.get(@default_config, "key") == :not_found
MnesiaCache.put(@default_config, "key", "value")
:timer.sleep(100)
assert MnesiaCache.get(@default_config, "key") == "value"
restart(@default_config)
assert MnesiaCache.get(@default_config, "key") == "value"
MnesiaCache.delete(@default_config, "key")
:timer.sleep(100)
assert MnesiaCache.get(@default_config, "key") == :not_found
end
test "with no `:ttl` config option" do
assert_raise ConfigError, "`:ttl` configuration option is required for Pow.Store.Backend.MnesiaCache", fn ->
MnesiaCache.put([namespace: "pow:test"], "key", "value")
end
end
test "fetch keys" do
MnesiaCache.put(@default_config, "key1", "value")
MnesiaCache.put(@default_config, "key2", "value")
:timer.sleep(100)
assert MnesiaCache.keys(@default_config) == ["key1", "key2"]
end
test "records auto purge with persistent storage" do
config = Config.put(@default_config, :ttl, 100)
MnesiaCache.put(config, "key", "value")
:timer.sleep(50)
assert MnesiaCache.get(config, "key") == "value"
:timer.sleep(100)
assert MnesiaCache.get(config, "key") == :not_found
MnesiaCache.put(config, "key", "value")
:timer.sleep(50)
restart(config)
assert MnesiaCache.get(config, "key") == "value"
:timer.sleep(100)
assert MnesiaCache.get(config, "key") == :not_found
end
end
defp start(config) do
start_supervised!({MnesiaCache, config})
end
defp restart(config) do
:ok = stop_supervised(MnesiaCache)
:mnesia.stop()
start(config)
end
describe "distributed nodes" do
setup do
File.rm_rf!("tmp/mnesia_multi")
File.mkdir_p!("tmp/mnesia_multi")
on_exit(fn ->
:slave.stop(:'a@127.0.0.1')
:slave.stop(:'b@127.0.0.1')
end)
:ok
end
@startup_wait_time 3_000
test "will join cluster" do
# Init node a and write to it
node_a = spawn_node("a")
{:ok, _pid} = :rpc.call(node_a, MnesiaCache, :start_link, [@default_config])
assert :rpc.call(node_a, :mnesia, :table_info, [MnesiaCache, :storage_type]) == :disc_copies
assert :rpc.call(node_a, :mnesia, :system_info, [:extra_db_nodes]) == []
assert :rpc.call(node_a, :mnesia, :system_info, [:running_db_nodes]) == [node_a]
assert :rpc.call(node_a, MnesiaCache, :put, [@default_config, "key_set_on_a", "value"])
:timer.sleep(50)
assert :rpc.call(node_a, MnesiaCache, :get, [@default_config, "key_set_on_a"]) == "value"
# Join cluster with node b and ensures that it has node a data
node_b = spawn_node("b")
config = @default_config ++ [extra_db_nodes: [node_a]]
{:ok, _pid} = :rpc.call(node_b, MnesiaCache, :start_link, [config])
assert :rpc.call(node_b, :mnesia, :table_info, [MnesiaCache, :storage_type]) == :disc_copies
assert :rpc.call(node_b, :mnesia, :system_info, [:extra_db_nodes]) == [node_a]
assert :rpc.call(node_b, :mnesia, :system_info, [:running_db_nodes]) == [node_a, node_b]
assert :rpc.call(node_b, MnesiaCache, :get, [@default_config, "key_set_on_a"]) == "value"
# Write to node b can be fetched on node a
assert :rpc.call(node_b, MnesiaCache, :put, [@default_config, "key_set_on_b", "value"])
:timer.sleep(50)
assert :rpc.call(node_a, MnesiaCache, :get, [@default_config, "key_set_on_b"]) == "value"
# Set short TTL on node a
config = Config.put(@default_config, :ttl, 150)
assert :rpc.call(node_a, MnesiaCache, :put, [config, "short_ttl_key_set_on_a", "value"])
:timer.sleep(50)
# Stop node a
:ok = :slave.stop(node_a)
:timer.sleep(50)
assert :rpc.call(node_b, :mnesia, :system_info, [:running_db_nodes]) == [node_b]
# Ensure that node b invalidates with TTL set on node a
assert :rpc.call(node_b, MnesiaCache, :get, [config, "short_ttl_key_set_on_a"]) == "value"
:timer.sleep(50)
assert :rpc.call(node_b, MnesiaCache, :get, [config, "short_ttl_key_set_on_a"]) == :not_found
# Continue writing to node b with short TTL
config = Config.put(@default_config, :ttl, @startup_wait_time + 100)
assert :rpc.call(node_b, MnesiaCache, :put, [config, "short_ttl_key_2_set_on_b", "value"])
:timer.sleep(50)
assert :rpc.call(node_b, MnesiaCache, :get, [config, "short_ttl_key_2_set_on_b"]) == "value"
# Start node a and join cluster
startup_timestamp = :os.system_time(:millisecond)
node_a = spawn_node("a")
config = @default_config ++ [extra_db_nodes: [node_b]]
{:ok, _pid} = :rpc.call(node_a, MnesiaCache, :start_link, [config])
assert :rpc.call(node_b, :mnesia, :system_info, [:running_db_nodes]) == [node_a, node_b]
assert :rpc.call(node_b, MnesiaCache, :get, [config, "short_ttl_key_2_set_on_b"]) == "value"
assert :rpc.call(node_a, MnesiaCache, :get, [config, "short_ttl_key_2_set_on_b"]) == "value"
# Stop node b
:ok = :slave.stop(node_b)
# Node a invalidates short TTL value written on node b
startup_time = :os.system_time(:millisecond) - startup_timestamp
:timer.sleep(@startup_wait_time - startup_time + 100)
assert :rpc.call(node_a, MnesiaCache, :get, [config, "short_ttl_key_2_set_on_b"]) == :not_found
end
test "recovers from netsplit with MnesiaCache.Unsplit" do
node_a = spawn_node("a")
{:ok, _pid} = :rpc.call(node_a, Supervisor, :start_child, [Pow.Supervisor, {MnesiaCache, @default_config}])
{:ok, _pid} = :rpc.call(node_a, Supervisor, :start_child, [Pow.Supervisor, MnesiaCache.Unsplit])
# Create isolated table on node a
{:atomic, :ok} = :rpc.call(node_a, :mnesia, :create_table, [:node_a_table, [disc_copies: [node_a]]])
:ok = :rpc.call(node_a, :mnesia, :wait_for_tables, [[:node_a_table], 1_000])
:ok = :rpc.call(node_a, :mnesia, :dirty_write, [{:node_a_table, :key, "a"}])
node_b = spawn_node("b")
config = @default_config ++ [extra_db_nodes: [node_a]]
{:ok, _pid} = :rpc.call(node_b, Supervisor, :start_child, [Pow.Supervisor, {MnesiaCache, config}])
{:ok, _pid} = :rpc.call(node_b, Supervisor, :start_child, [Pow.Supervisor, MnesiaCache.Unsplit])
# Create isolated table on node b
{:atomic, :ok} = :rpc.call(node_b, :mnesia, :create_table, [:node_b_table, [disc_copies: [node_b]]])
:ok = :rpc.call(node_b, :mnesia, :wait_for_tables, [[:node_b_table], 1_000])
:ok = :rpc.call(node_b, :mnesia, :dirty_write, [{:node_b_table, :key, "b"}])
# Ensure that data writing on node a is replicated on node b
assert :rpc.call(node_a, MnesiaCache, :put, [@default_config, "key_1", "value"])
:timer.sleep(50)
assert :rpc.call(node_a, MnesiaCache, :get, [@default_config, "key_1"]) == "value"
assert :rpc.call(node_b, MnesiaCache, :get, [@default_config, "key_1"]) == "value"
# Disconnect the nodes
disconnect(node_b, node_a)
# Continue writing on node a and node b
assert :rpc.call(node_a, MnesiaCache, :put, [@default_config, "key_1", "a"])
assert :rpc.call(node_a, MnesiaCache, :put, [@default_config, "key_1_a", "value"])
assert :rpc.call(node_b, MnesiaCache, :put, [@default_config, "key_1", "b"])
assert :rpc.call(node_b, MnesiaCache, :put, [@default_config, "key_1_b", "value"])
:timer.sleep(50)
assert :rpc.call(node_a, MnesiaCache, :get, [@default_config, "key_1"]) == "a"
assert :rpc.call(node_a, MnesiaCache, :get, [@default_config, "key_1_a"]) == "value"
assert :rpc.call(node_b, MnesiaCache, :get, [@default_config, "key_1"]) == "b"
assert :rpc.call(node_b, MnesiaCache, :get, [@default_config, "key_1_b"]) == "value"
# Reconnect
connect(node_b, node_a)
# Node a wins recovery and node b purges its data
assert :rpc.call(node_a, :mnesia, :system_info, [:running_db_nodes]) == [node_b, node_a]
assert :rpc.call(node_a, MnesiaCache, :get, [@default_config, "key_1"]) == "a"
assert :rpc.call(node_b, MnesiaCache, :get, [@default_config, "key_1"]) == "a"
assert :rpc.call(node_a, MnesiaCache, :get, [@default_config, "key_1_b"]) == :not_found
assert :rpc.call(node_b, MnesiaCache, :get, [@default_config, "key_1_a"]) == "value"
# Isolated tables still works on both nodes
assert :rpc.call(node_a, :mnesia, :dirty_read, [{:node_a_table, :key}]) == [{:node_a_table, :key, "a"}]
assert :rpc.call(node_b, :mnesia, :dirty_read, [{:node_b_table, :key}]) == [{:node_b_table, :key, "b"}]
# Shared tables unrelated to Pow can't reconnect
{:atomic, :ok} = :rpc.call(node_a, :mnesia, :create_table, [:shared, [disc_copies: [node_a]]])
{:atomic, :ok} = :rpc.call(node_b, :mnesia, :add_table_copy, [:shared, node_b, :disc_copies])
disconnect(node_b, node_a)
connect(node_b, node_a)
assert :rpc.call(node_a, :mnesia, :system_info, [:running_db_nodes]) == [node_a]
# Can't reconnect if table not defined in flush table
:rpc.call(node_a, MnesiaCache.Unsplit, :__heal__, [node_b, [flush_tables: [:unrelated]]])
assert :rpc.call(node_a, :mnesia, :system_info, [:running_db_nodes]) == [node_a]
# Can reconnect if `:flush_tables` is set as `:all` or with table
:rpc.call(node_a, MnesiaCache.Unsplit, :__heal__, [node_b, [flush_tables: [:shared]]])
assert :rpc.call(node_a, :mnesia, :system_info, [:running_db_nodes]) == [node_b, node_a]
disconnect(node_b, node_a)
connect(node_b, node_a)
assert :rpc.call(node_a, :mnesia, :system_info, [:running_db_nodes]) == [node_a]
:rpc.call(node_a, MnesiaCache.Unsplit, :__heal__, [node_b, [flush_tables: :all]])
assert :rpc.call(node_a, :mnesia, :system_info, [:running_db_nodes]) == [node_b, node_a]
end
end
defp spawn_node(sname) do
fn -> init_node(sname) end
|> Task.async()
|> Task.await(30_000)
end
defp init_node(sname) do
{:ok, node} = :slave.start('127.0.0.1', String.to_atom(sname), '-loader inet -hosts 127.0.0.1 -setcookie #{:erlang.get_cookie()}')
# Copy code
rpc(node, :code, :add_paths, [:code.get_path()])
# Copy all config
for {app_name, _, _} <- Application.loaded_applications() do
for {key, val} <- Application.get_all_env(app_name) do
rpc(node, Application, :put_env, [app_name, key, val])
end
end
# Set mnesia directory
rpc(node, Application, :put_env, [:mnesia, :dir, 'tmp/mnesia_multi/#{sname}'])
# Start all apps
rpc(node, Application, :ensure_all_started, [:mix])
rpc(node, Mix, :env, [Mix.env()])
for {app_name, _, _} <- Application.started_applications() do
rpc(node, Application, :ensure_all_started, [app_name])
end
# Remove logger
rpc(node, Logger, :remove_backend, [:console])
node
end
defp rpc(node, module, function, args) do
:rpc.block_call(node, module, function, args)
end
defp disconnect(node_a, node_b) do
true = :rpc.call(node_a, Node, :disconnect, [node_b])
:timer.sleep(50)
end
defp connect(node_a, node_b) do
true = :rpc.call(node_a, Node, :connect, [node_b])
:timer.sleep(500)
end
end
| 41.020339
| 134
| 0.647385
|
9e2a3db868eff9e01723040d57a16df5a7a508cf
| 3,942
|
ex
|
Elixir
|
lib/chat_api/browser_sessions.ex
|
Tiamat-Tech/papercups
|
f17d2b0ce080c0edab92a4b2e6d4afcef04aa291
|
[
"MIT"
] | 1
|
2021-06-17T03:17:24.000Z
|
2021-06-17T03:17:24.000Z
|
lib/chat_api/browser_sessions.ex
|
Tiamat-Tech/papercups
|
f17d2b0ce080c0edab92a4b2e6d4afcef04aa291
|
[
"MIT"
] | 1
|
2021-01-17T10:42:34.000Z
|
2021-01-17T10:42:34.000Z
|
lib/chat_api/browser_sessions.ex
|
Tiamat-Tech/papercups
|
f17d2b0ce080c0edab92a4b2e6d4afcef04aa291
|
[
"MIT"
] | null | null | null |
defmodule ChatApi.BrowserSessions do
@moduledoc """
The BrowserSessions context.
"""
import Ecto.Query, warn: false
alias ChatApi.Repo
alias ChatApi.BrowserSessions.BrowserSession
@spec list_browser_sessions(binary(), map()) :: [BrowserSession.t()]
def list_browser_sessions(account_id, filters \\ %{}) do
limit = filters |> Map.get("limit", "100") |> String.to_integer()
BrowserSession
|> where(account_id: ^account_id)
|> where(^filter_where(filters))
|> order_by(desc: :updated_at)
|> limit(^limit)
|> Repo.all()
|> Repo.preload([:customer])
end
@spec count_browser_sessions(binary(), map()) :: number()
def count_browser_sessions(account_id, filters \\ %{}) do
BrowserSession
|> where(account_id: ^account_id)
|> where(^filter_where(filters))
|> select([p], count(p.id))
|> Repo.one()
end
@doc """
Gets a single browser_session.
Raises `Ecto.NoResultsError` if the Browser session does not exist.
## Examples
iex> get_browser_session!(123)
%BrowserSession{}
iex> get_browser_session!(456)
** (Ecto.NoResultsError)
"""
@spec get_browser_session!(binary()) :: BrowserSession.t()
def get_browser_session!(id) do
BrowserSession |> Repo.get!(id) |> Repo.preload([:browser_replay_events, :customer])
end
@spec get_browser_session!(binary(), binary()) :: BrowserSession.t()
def get_browser_session!(id, account_id) do
BrowserSession
|> where(id: ^id)
|> where(account_id: ^account_id)
|> Repo.one!()
|> Repo.preload([:browser_replay_events, :customer])
end
@doc """
Creates a browser_session.
## Examples
iex> create_browser_session(%{field: value})
{:ok, %BrowserSession{}}
iex> create_browser_session(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_browser_session(attrs \\ %{}) do
%BrowserSession{}
|> BrowserSession.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a browser_session.
## Examples
iex> update_browser_session(browser_session, %{field: new_value})
{:ok, %BrowserSession{}}
iex> update_browser_session(browser_session, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_browser_session(%BrowserSession{} = browser_session, attrs) do
browser_session
|> BrowserSession.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a browser_session.
## Examples
iex> delete_browser_session(browser_session)
{:ok, %BrowserSession{}}
iex> delete_browser_session(browser_session)
{:error, %Ecto.Changeset{}}
"""
def delete_browser_session(%BrowserSession{} = browser_session) do
Repo.delete(browser_session)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking browser_session changes.
## Examples
iex> change_browser_session(browser_session)
%Ecto.Changeset{data: %BrowserSession{}}
"""
def change_browser_session(%BrowserSession{} = browser_session, attrs \\ %{}) do
BrowserSession.changeset(browser_session, attrs)
end
def exists?(id) do
count =
BrowserSession
|> where(id: ^id)
|> select([p], count(p.id))
|> Repo.one()
count > 0
end
# Pulled from https://hexdocs.pm/ecto/dynamic-queries.html#building-dynamic-queries
@spec filter_where(map) :: %Ecto.Query.DynamicExpr{}
def filter_where(params) do
Enum.reduce(params, dynamic(true), fn
{"customer_id", value}, dynamic ->
dynamic([p], ^dynamic and p.customer_id == ^value)
{"ids", list}, dynamic ->
dynamic([p], ^dynamic and p.id in ^list)
{"active", "true"}, dynamic ->
dynamic([p], ^dynamic and is_nil(p.finished_at))
{"active", "false"}, dynamic ->
dynamic([p], ^dynamic and not is_nil(p.finished_at))
{_, _}, dynamic ->
# Not a where parameter
dynamic
end)
end
end
| 24.949367
| 88
| 0.647894
|
9e2a554af4056e57778b1bed6af3e7d82c61219a
| 1,395
|
ex
|
Elixir
|
lib/codes/codes_w03.ex
|
badubizzle/icd_code
|
4c625733f92b7b1d616e272abc3009bb8b916c0c
|
[
"Apache-2.0"
] | null | null | null |
lib/codes/codes_w03.ex
|
badubizzle/icd_code
|
4c625733f92b7b1d616e272abc3009bb8b916c0c
|
[
"Apache-2.0"
] | null | null | null |
lib/codes/codes_w03.ex
|
badubizzle/icd_code
|
4c625733f92b7b1d616e272abc3009bb8b916c0c
|
[
"Apache-2.0"
] | null | null | null |
defmodule IcdCode.ICDCode.Codes_W03 do
alias IcdCode.ICDCode
def _W03XXXA do
%ICDCode{full_code: "W03XXXA",
category_code: "W03",
short_code: "XXXA",
full_name: "Other fall on same level due to collision with another person, initial encounter",
short_name: "Other fall on same level due to collision with another person, initial encounter",
category_name: "Other fall on same level due to collision with another person, initial encounter"
}
end
def _W03XXXD do
%ICDCode{full_code: "W03XXXD",
category_code: "W03",
short_code: "XXXD",
full_name: "Other fall on same level due to collision with another person, subsequent encounter",
short_name: "Other fall on same level due to collision with another person, subsequent encounter",
category_name: "Other fall on same level due to collision with another person, subsequent encounter"
}
end
def _W03XXXS do
%ICDCode{full_code: "W03XXXS",
category_code: "W03",
short_code: "XXXS",
full_name: "Other fall on same level due to collision with another person, sequela",
short_name: "Other fall on same level due to collision with another person, sequela",
category_name: "Other fall on same level due to collision with another person, sequela"
}
end
end
| 41.029412
| 110
| 0.676703
|
9e2a780caa256a65ca2488aa3f1b741f3161dc7c
| 2,343
|
ex
|
Elixir
|
lib/chatbot/alias_entity.ex
|
mikehelmick/meme-bot
|
52a84cfb3f5ddcdddadf59b0ba3976f9e3f23800
|
[
"Apache-2.0"
] | 7
|
2019-04-05T06:12:56.000Z
|
2021-04-03T11:39:40.000Z
|
lib/chatbot/alias_entity.ex
|
mikehelmick/meme-bot
|
52a84cfb3f5ddcdddadf59b0ba3976f9e3f23800
|
[
"Apache-2.0"
] | null | null | null |
lib/chatbot/alias_entity.ex
|
mikehelmick/meme-bot
|
52a84cfb3f5ddcdddadf59b0ba3976f9e3f23800
|
[
"Apache-2.0"
] | 3
|
2019-04-20T13:05:48.000Z
|
2019-06-05T16:52:46.000Z
|
defmodule Chatbot.AliasEntity do
@behaviour Chatbot.EntityBehviour
alias Chatbot.Properties, as: Properties
defstruct name: nil,
imageUrl: nil,
owner: nil,
createdAt: nil,
updatedAt: nil,
uses: nil
@type t :: %__MODULE__ {
name: String.t(),
imageUrl: String.t(),
owner: String.t(),
createdAt: nil,
updatedAt: nil,
uses: Integer.t()
}
def new(name, url, owner) do
%Chatbot.AliasEntity {
name: name,
imageUrl: url,
owner: owner,
createdAt: DateTime.utc_now(),
updatedAt: DateTime.utc_now(),
uses: 0
}
end
def kind(), do: "Alias"
def name(%Chatbot.AliasEntity{name: name}), do: name
def imageUrl(%Chatbot.AliasEntity{imageUrl: url}), do: url
def owner(%Chatbot.AliasEntity{owner: owner}), do: owner
def uses(%Chatbot.AliasEntity{uses: uses}), do: uses
def addUse(entity = %Chatbot.AliasEntity {uses: uses}) do
Map.put(entity, :uses, uses + 1)
end
def toEntity(%{name: name, imageUrl: imageUrl,
owner: owner, createdAt: createdAt,
updatedAt: updatedAt, uses: uses}) do
%GoogleApi.Datastore.V1.Model.Entity {
key: %GoogleApi.Datastore.V1.Model.Key {
path: [
%GoogleApi.Datastore.V1.Model.PathElement{
kind: kind(),
name: name
}
]
},
properties: %{}
|> Properties.setUnindexedString("imageUrl", imageUrl)
|> Properties.set_string_property("owner", owner)
|> Properties.set_datetime_property("created_at", createdAt)
|> Properties.set_datetime_property("updated_at", updatedAt)
|> Properties.set_integer_property("uses", uses)
}
end
def parseEntity(entity) do
parseQueryEntity(entity["entity"])
end
def parseQueryEntity(entity) do
[path] = entity["key"]["path"]
name = path["name"]
properties = entity["properties"]
%Chatbot.AliasEntity{
name: name,
imageUrl: Properties.parse_string(properties["imageUrl"]),
owner: Properties.parse_string(properties["owner"]),
createdAt: Properties.parse_timestamp(properties["created_at"]),
updatedAt: Properties.parse_timestamp(properties["updated_at"]),
uses: Properties.parse_int(properties["uses"])
}
end
end
| 28.573171
| 70
| 0.624413
|
9e2a9c1b8b1c2c1d2301c316034f1e6b3e059d94
| 324
|
exs
|
Elixir
|
test/co2_offset/geo/great_circle_distance_test.exs
|
styx/co2_offset
|
ac4b2bce8142e2d33ea089322c8dade34839448b
|
[
"Apache-2.0"
] | 15
|
2018-12-26T10:31:16.000Z
|
2020-12-01T09:27:01.000Z
|
test/co2_offset/geo/great_circle_distance_test.exs
|
styx/co2_offset
|
ac4b2bce8142e2d33ea089322c8dade34839448b
|
[
"Apache-2.0"
] | 267
|
2018-12-26T07:46:17.000Z
|
2020-04-04T17:05:47.000Z
|
test/co2_offset/geo/great_circle_distance_test.exs
|
styx/co2_offset
|
ac4b2bce8142e2d33ea089322c8dade34839448b
|
[
"Apache-2.0"
] | 1
|
2019-07-12T13:53:25.000Z
|
2019-07-12T13:53:25.000Z
|
defmodule Co2Offset.Geo.GreatCircleDistanceTest do
use Co2Offset.DataCase, async: true
alias Co2Offset.Geo.GreatCircleDistance
test "correct calculations" do
assert(GreatCircleDistance.call(-6.08, 145.39, -5.20, 145.78) == 107)
assert(GreatCircleDistance.call(38.15, 21.42, 64.19, -83.35) == 6899)
end
end
| 29.454545
| 73
| 0.737654
|
9e2abe51922db2e44e626e2851557469f4800b64
| 872
|
ex
|
Elixir
|
lib/multiverses.finch.ex
|
ityonemo/multiverses_finch
|
200af83f02109ee9d1e51fac929a44c7c249c44c
|
[
"MIT"
] | null | null | null |
lib/multiverses.finch.ex
|
ityonemo/multiverses_finch
|
200af83f02109ee9d1e51fac929a44c7c249c44c
|
[
"MIT"
] | null | null | null |
lib/multiverses.finch.ex
|
ityonemo/multiverses_finch
|
200af83f02109ee9d1e51fac929a44c7c249c44c
|
[
"MIT"
] | null | null | null |
defmodule Multiverses.Finch do
@moduledoc """
clones and instruments the Finch library with a substituted build
method that allows universe assignments to escape the BEAM vm via http
requests.
This is useful to test APIs and HTTP requests in integration or end-to-end
testing scenarios.
The http request is instrumented with the `universe` header; this header
contains the universe information serialized. This can then be intercepted
downstream using the `Multiverses.Finch.Plug` module.
"""
use Multiverses.Clone,
module: Finch,
except: [build: 2, build: 3, build: 4]
def build(method, url, headers \\ [], body \\ nil) do
require Multiverses
link_id = Multiverses.link() |> :erlang.term_to_binary |> Base.url_encode64
Elixir.Finch.build(method,
url,
headers ++ [{"universe", link_id}],
body)
end
end
| 30.068966
| 79
| 0.713303
|
9e2b03fa3feb94a54db0eddef37338957d5d0c1c
| 929
|
exs
|
Elixir
|
ch14-03.exs
|
gabrielelana/programming-elixir
|
475319123d21b03c3bfcc02a23178ab9db67a6b3
|
[
"MIT"
] | 9
|
2016-01-22T17:28:27.000Z
|
2020-06-07T01:38:44.000Z
|
ch14-03.exs
|
gabrielelana/programming-elixir
|
475319123d21b03c3bfcc02a23178ab9db67a6b3
|
[
"MIT"
] | null | null | null |
ch14-03.exs
|
gabrielelana/programming-elixir
|
475319123d21b03c3bfcc02a23178ab9db67a6b3
|
[
"MIT"
] | 1
|
2019-04-18T10:08:38.000Z
|
2019-04-18T10:08:38.000Z
|
# Use spawn_link to start a process, and have that process send a message to the
# parent and then exit immediately. Meanwhile, sleep for 500 ms in the parent,
# then receive as many messages as are waiting. Trace what you receive. Does it
# matter that you weren’t waiting for the notification from the child when it
# exited?
defmodule Programming.Elixir do
defmodule CallMeIfYouDie do
import :timer, only: [sleep: 1]
def child(parent) do
send parent, :ok
end
def start do
Process.flag(:trap_exit, true)
spawn_link(__MODULE__, :child, [self])
sleep 500
flush
end
defp flush do
receive do
message ->
IO.puts("Received: #{inspect message}")
flush
after 100 ->
IO.puts("End of messages")
end
end
end
CallMeIfYouDie.start
# receives
# > Received: :ok
# > Received: {:EXIT, #PID<0.55.0>, :normal}
end
| 23.225
| 80
| 0.644779
|
9e2b07c3d4af9ee5751ce109943cf67f0b358c8c
| 1,999
|
exs
|
Elixir
|
config/prod.exs
|
elixir-sea/typo_paint
|
61183f0a450e62ce1f561635ed39965fc9fe770d
|
[
"CC-BY-4.0"
] | 4
|
2019-07-31T03:34:19.000Z
|
2019-08-03T04:53:48.000Z
|
config/prod.exs
|
elixir-sea/typo_kart
|
61183f0a450e62ce1f561635ed39965fc9fe770d
|
[
"CC-BY-4.0"
] | 3
|
2019-07-31T04:18:58.000Z
|
2019-08-05T00:00:07.000Z
|
config/prod.exs
|
elixir-sea/typo_paint
|
61183f0a450e62ce1f561635ed39965fc9fe770d
|
[
"CC-BY-4.0"
] | 3
|
2019-07-31T05:55:25.000Z
|
2019-09-15T22:31:32.000Z
|
use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
# config :typo_paint, TypoPaintWeb.Endpoint,
# url: [host: "example.com", port: 80],
# cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :typo_paint, TypoPaintWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# :inet6,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :typo_paint, TypoPaintWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# Finally import the config/prod.secret.exs which loads secrets
# and configuration from environment variables.
# import_config "prod.secret.exs"
| 35.696429
| 66
| 0.712856
|
9e2b30a46e5eb0e9f8e6b58edbbdbb293d9ade07
| 789
|
ex
|
Elixir
|
lib/elixir_mq/dynamo.ex
|
derekchiang/Elixir-Message-Queue
|
b979ad0cbdd38816587c7af337e7e048b59fd517
|
[
"WTFPL"
] | 1
|
2019-10-29T17:56:34.000Z
|
2019-10-29T17:56:34.000Z
|
lib/elixir_mq/dynamo.ex
|
derekchiang/Elixir-Message-Queue
|
b979ad0cbdd38816587c7af337e7e048b59fd517
|
[
"WTFPL"
] | null | null | null |
lib/elixir_mq/dynamo.ex
|
derekchiang/Elixir-Message-Queue
|
b979ad0cbdd38816587c7af337e7e048b59fd517
|
[
"WTFPL"
] | null | null | null |
defmodule ElixirMq.Dynamo do
use Dynamo
config :dynamo,
# The environment this Dynamo runs on
env: Mix.env,
# The OTP application associated to this Dynamo
otp_app: :elixir_mq,
# The endpoint to dispatch requests to
endpoint: ApplicationRouter,
# The route from where static assets are served
# You can turn off static assets by setting it to false
static_route: "/static"
# Uncomment the lines below to enable the cookie session store
# config :dynamo,
# session_store: Session.CookieStore,
# session_options:
# [ key: "_elixir_mq_session",
# secret: "dbpxAjZEjw/mN+lW7f0qOgeW5ZLz0u6t4doBjhk14Woi8XsQ7GIV7dhxLJlQ9V8C"]
# Default functionality available in templates
templates do
use Dynamo.Helpers
end
end
| 26.3
| 85
| 0.717364
|
9e2b4fe8a224783e50ac4319fd0bc87fa3efac87
| 6,922
|
ex
|
Elixir
|
lib/ex_unit/lib/ex_unit.ex
|
hamiltop/elixir
|
3b601660d4d4eb0c69f824fcebbbe93a3f2ba463
|
[
"Apache-2.0"
] | null | null | null |
lib/ex_unit/lib/ex_unit.ex
|
hamiltop/elixir
|
3b601660d4d4eb0c69f824fcebbbe93a3f2ba463
|
[
"Apache-2.0"
] | null | null | null |
lib/ex_unit/lib/ex_unit.ex
|
hamiltop/elixir
|
3b601660d4d4eb0c69f824fcebbbe93a3f2ba463
|
[
"Apache-2.0"
] | null | null | null |
defmodule ExUnit do
@moduledoc """
Unit testing framework for Elixir.
## Example
A basic setup for ExUnit is shown below:
# File: assertion_test.exs
# 1) Start ExUnit.
ExUnit.start
# 2) Create a new test module (test case) and use "ExUnit.Case".
defmodule AssertionTest do
# 3) Notice we pass "async: true", this runs the test case
# concurrently with other test cases
use ExUnit.Case, async: true
# 4) Use the "test" macro instead of "def" for clarity.
test "the truth" do
assert true
end
end
To run the tests above, run the file using `elixir` from the
command line. Assuming you named the file `assertion_test.exs`,
you can run it as:
elixir assertion_test.exs
## Case, Callbacks and Assertions
See `ExUnit.Case` and `ExUnit.Callbacks` for more information
about defining test cases and setting up callbacks.
The `ExUnit.Assertions` module contains a set of macros to
generate assertions with appropriate error messages.
## Integration with Mix
Mix is the project management and build tool for Elixir. Invoking `mix test`
from the command line will run the tests in each file matching the pattern
`*_test.exs` found in the `test` directory of your project.
You must create a `test_helper.exs` file inside the
`test` directory and put the code common to all tests there.
The minimum example of a `test_helper.exs` file would be:
# test/test_helper.exs
ExUnit.start
Mix will load the `test_helper.exs` file before executing the tests.
It is not necessary to `require` the `test_helper.exs` file in your test
files. See `Mix.Tasks.Test` for more information.
"""
@typedoc "The error state returned by ExUnit.Test and ExUnit.TestCase"
@type state :: nil | {:failed, failed} | {:skip, binary} | {:invalid, module}
@type failed :: {Exception.kind, reason :: term, stacktrace :: [tuple]}
defmodule Test do
@moduledoc """
A struct that keeps information about the test.
It is received by formatters and contains the following fields:
* `:name` - the test name
* `:case` - the test case
* `:state` - the test error state (see ExUnit.state)
* `:time` - the time to run the test
* `:tags` - the test tags
* `:logs` - the captured logs
"""
defstruct [:name, :case, :state, time: 0, tags: %{}, logs: ""]
@type t :: %__MODULE__{
name: atom,
case: module,
state: ExUnit.state,
time: non_neg_integer,
tags: map}
end
defmodule TestCase do
@moduledoc """
A struct that keeps information about the test case.
It is received by formatters and contains the following fields:
* `:name` - the test case name
* `:state` - the test error state (see ExUnit.state)
* `:tests` - all tests for this case
"""
defstruct [:name, :state, tests: []]
@type t :: %__MODULE__{
name: module,
state: ExUnit.state,
tests: [ExUnit.Test.t]}
end
defmodule TimeoutError do
defexception [:timeout]
def message(timeout)
def message(%{timeout: timeout}) do
"""
test timed out after #{timeout}ms. You can change the timeout:
1. per test by setting "@tag timeout: x"
2. per case by setting "@moduletag timeout: x"
3. globally via "ExUnit.start(timeout: x)" configuration
4. or set it to infinity per run by calling "mix test --trace"
(useful when using IEx.pry)
Timeouts are given as integers in milliseconds.
"""
end
end
use Application
@doc false
def start(_type, []) do
import Supervisor.Spec
children = [
worker(ExUnit.Server, []),
worker(ExUnit.OnExitHandler, [])
]
opts = [strategy: :one_for_one, name: ExUnit.Supervisor]
Supervisor.start_link(children, opts)
end
@doc """
Starts ExUnit and automatically runs tests right before the
VM terminates. It accepts a set of options to configure `ExUnit`
(the same ones accepted by `configure/1`).
If you want to run tests manually, you can set `:autorun` to `false`.
"""
def start(options \\ []) do
{:ok, _} = Application.ensure_all_started(:ex_unit)
configure(options)
if Application.fetch_env!(:ex_unit, :autorun) do
Application.put_env(:ex_unit, :autorun, false)
System.at_exit fn
0 ->
%{failures: failures} = ExUnit.run
System.at_exit fn _ ->
if failures > 0, do: exit({:shutdown, 1})
end
_ ->
:ok
end
end
end
@doc """
Configures ExUnit.
## Options
ExUnit supports the following options:
* `:assert_receive_timeout` - the timeout to be used on `assert_receive`
calls. Defaults to 100ms.
* `:capture_log` - if ExUnit should default to keeping track of log messages
and print them on test failure. Can be overriden for individual tests via
`@tag capture_log: false`. Defaults to `false`.
* `:colors` - a keyword list of colors to be used by some formatters.
The only option so far is `[enabled: boolean]` which defaults to `IO.ANSI.enabled?/0`
* `:formatters` - the formatters that will print results;
defaults to `[ExUnit.CLIFormatter]`
* `:max_cases` - maximum number of cases to run in parallel;
defaults to `:erlang.system_info(:schedulers_online)`
* `:trace` - set ExUnit into trace mode, this sets `:max_cases` to `1` and
prints each test case and test while running
* `:autorun` - if ExUnit should run by default on exit; defaults to `true`
* `:include` - specify which tests are run by skipping tests that do not
match the filter
* `:exclude` - specify which tests are run by skipping tests that match the
filter
* `:refute_receive_timeout` - the timeout to be used on `refute_receive`
calls (defaults to 100ms)
* `:seed` - an integer seed value to randomize the test suite
* `:stacktrace_depth` - configures the stacktrace depth to be used
on formatting and reporters (defaults to 20)
* `:timeout` - set the timeout for the tests (default 60_000ms)
"""
def configure(options) do
Enum.each options, fn {k, v} ->
Application.put_env(:ex_unit, k, v)
end
end
@doc """
Returns ExUnit configuration.
"""
def configuration do
Application.get_all_env(:ex_unit)
end
@doc """
API used to run the tests. It is invoked automatically
if ExUnit is started via `ExUnit.start/1`.
Returns a map containing the total number of tests, the number
of failures and the number of skipped tests.
"""
def run do
{async, sync, load_us} = ExUnit.Server.start_run
ExUnit.Runner.run async, sync, configuration, load_us
end
end
| 29.330508
| 91
| 0.6449
|
9e2b69c44b2615faca58d16e3335b735e2898087
| 1,625
|
ex
|
Elixir
|
lib/globe_request_mapper_web/endpoint.ex
|
mrkurt/globe-request-mapper
|
1270c7f69a0c1403b026508385fab54d2eaa685e
|
[
"Apache-2.0"
] | 1
|
2022-02-24T04:00:56.000Z
|
2022-02-24T04:00:56.000Z
|
lib/globe_request_mapper_web/endpoint.ex
|
mrkurt/globe-request-mapper
|
1270c7f69a0c1403b026508385fab54d2eaa685e
|
[
"Apache-2.0"
] | null | null | null |
lib/globe_request_mapper_web/endpoint.ex
|
mrkurt/globe-request-mapper
|
1270c7f69a0c1403b026508385fab54d2eaa685e
|
[
"Apache-2.0"
] | 4
|
2021-04-05T22:39:42.000Z
|
2021-04-05T23:16:50.000Z
|
defmodule GlobeRequestMapperWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :globe_request_mapper
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_globe_request_mapper_key",
signing_salt: "1QNCyhHV"
]
socket "/socket", GlobeRequestMapperWeb.UserSocket,
websocket: true,
longpoll: false
socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :globe_request_mapper,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Phoenix.LiveDashboard.RequestLogger,
param_key: "request_logger",
cookie_key: "request_logger"
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug GlobeRequestMapperWeb.Router
end
| 30.092593
| 97
| 0.729231
|
9e2ba2cd88ae47d2bd6eb2a185c5ba0aa1ffd5df
| 896
|
ex
|
Elixir
|
clients/sas_portal/lib/google_api/sas_portal/v1alpha1/metadata.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2021-12-20T03:40:53.000Z
|
2021-12-20T03:40:53.000Z
|
clients/sas_portal/lib/google_api/sas_portal/v1alpha1/metadata.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | 1
|
2020-08-18T00:11:23.000Z
|
2020-08-18T00:44:16.000Z
|
clients/sas_portal/lib/google_api/sas_portal/v1alpha1/metadata.ex
|
pojiro/elixir-google-api
|
928496a017d3875a1929c6809d9221d79404b910
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SASPortal.V1alpha1 do
@moduledoc """
API client metadata for GoogleApi.SASPortal.V1alpha1.
"""
@discovery_revision "20210910"
def discovery_revision(), do: @discovery_revision
end
| 33.185185
| 74
| 0.762277
|
9e2bfeb32c8860e26ee0e062e868c84238987485
| 380
|
ex
|
Elixir
|
plugins/one_webrtc/lib/one_webrtc_web/views/error_view.ex
|
smpallen99/ucx_ucc
|
47225f205a6ac4aacdb9bb4f7512dcf4092576ad
|
[
"MIT"
] | 11
|
2017-05-15T18:35:05.000Z
|
2018-02-05T18:27:40.000Z
|
plugins/one_webrtc/lib/one_webrtc_web/views/error_view.ex
|
anndream/infinity_one
|
47225f205a6ac4aacdb9bb4f7512dcf4092576ad
|
[
"MIT"
] | 15
|
2017-11-27T10:38:05.000Z
|
2018-02-09T20:42:08.000Z
|
plugins/one_webrtc/lib/one_webrtc_web/views/error_view.ex
|
anndream/infinity_one
|
47225f205a6ac4aacdb9bb4f7512dcf4092576ad
|
[
"MIT"
] | 4
|
2017-09-13T11:34:16.000Z
|
2018-02-26T13:37:06.000Z
|
defmodule OneWebrtcWeb.ErrorView do
use OneWebrtcWeb, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Internal server error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 21.111111
| 47
| 0.705263
|
9e2c3490854b7adc7ebc04305fbfa46e804e0ebd
| 319
|
exs
|
Elixir
|
config/prod.exs
|
aoswalt/portfolio-web-site
|
9869da8caf9d1d31a1d82e60f437ec0513a84748
|
[
"MIT"
] | 1
|
2020-07-23T12:54:25.000Z
|
2020-07-23T12:54:25.000Z
|
config/prod.exs
|
aoswalt/adamoswalt.com
|
9869da8caf9d1d31a1d82e60f437ec0513a84748
|
[
"MIT"
] | null | null | null |
config/prod.exs
|
aoswalt/adamoswalt.com
|
9869da8caf9d1d31a1d82e60f437ec0513a84748
|
[
"MIT"
] | null | null | null |
use Mix.Config
config :home, HomeWeb.Endpoint,
url: [host: "adamoswalt.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json",
http: [
port: String.to_integer(System.get_env("PORT") || "80"),
transport_options: [socket_opts: [:inet6]]
],
server: true
config :logger, level: :info
| 24.538462
| 60
| 0.68652
|
9e2c63be6665d2edd246fbeca795fa44946269e9
| 4,101
|
ex
|
Elixir
|
lib/aws/generated/braket.ex
|
smanolloff/aws-elixir
|
c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84
|
[
"Apache-2.0"
] | null | null | null |
lib/aws/generated/braket.ex
|
smanolloff/aws-elixir
|
c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84
|
[
"Apache-2.0"
] | null | null | null |
lib/aws/generated/braket.ex
|
smanolloff/aws-elixir
|
c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84
|
[
"Apache-2.0"
] | null | null | null |
# WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.Braket do
@moduledoc """
The Amazon Braket API Reference provides information about the operations and
structures supported in Amazon Braket.
"""
@doc """
Cancels the specified task.
"""
def cancel_quantum_task(client, quantum_task_arn, input, options \\ []) do
path_ = "/quantum-task/#{URI.encode(quantum_task_arn)}/cancel"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Creates a quantum task.
"""
def create_quantum_task(client, input, options \\ []) do
path_ = "/quantum-task"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Retrieves the devices available in Amazon Braket.
"""
def get_device(client, device_arn, options \\ []) do
path_ = "/device/#{URI.encode(device_arn)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves the specified quantum task.
"""
def get_quantum_task(client, quantum_task_arn, options \\ []) do
path_ = "/quantum-task/#{URI.encode(quantum_task_arn)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Searches for devices using the specified filters.
"""
def search_devices(client, input, options \\ []) do
path_ = "/devices"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Searches for tasks that match the specified filter values.
"""
def search_quantum_tasks(client, input, options \\ []) do
path_ = "/quantum-tasks"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "braket"}
host = build_host("braket", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
| 30.604478
| 100
| 0.648622
|
9e2c7c203276423d3afc3c8c11c96ac32c86e18d
| 1,156
|
ex
|
Elixir
|
lib/plausible/session/clickhouse_schema.ex
|
lizlam/plausible
|
886ba62cd814e5ca2d05c51a375bccc753c7c6ff
|
[
"MIT"
] | null | null | null |
lib/plausible/session/clickhouse_schema.ex
|
lizlam/plausible
|
886ba62cd814e5ca2d05c51a375bccc753c7c6ff
|
[
"MIT"
] | null | null | null |
lib/plausible/session/clickhouse_schema.ex
|
lizlam/plausible
|
886ba62cd814e5ca2d05c51a375bccc753c7c6ff
|
[
"MIT"
] | null | null | null |
defmodule Plausible.ClickhouseSession do
use Ecto.Schema
import Ecto.Changeset
@primary_key false
schema "sessions" do
field :hostname, :string
field :domain, :string
field :user_id, :integer
field :session_id, :integer
field :start, :naive_datetime
field :duration, :integer
field :is_bounce, :boolean
field :entry_page, :string
field :exit_page, :string
field :pageviews, :integer
field :events, :integer
field :sign, :integer
field :referrer, :string
field :referrer_source, :string
field :country_code, :string
field :screen_size, :string
field :operating_system, :string
field :browser, :string
field :timestamp, :naive_datetime
end
def random_uint64() do
:crypto.strong_rand_bytes(8) |> :binary.decode_unsigned()
end
def changeset(session, attrs) do
session
|> cast(attrs, [:hostname, :domain, :entry_page, :exit_page, :referrer, :fingerprint, :start, :length, :is_bounce, :operating_system, :browser, :referrer_source, :country_code, :screen_size])
|> validate_required([:hostname, :domain, :fingerprint, :is_bounce, :start])
end
end
| 28.9
| 195
| 0.698962
|
9e2c9e210c51d98d18ae81d426d7eee15cd6a702
| 3,227
|
ex
|
Elixir
|
lib/ecto/adapters/postgres/worker.ex
|
TanYewWei/ecto
|
916c6467d5f7368fa10ecd7cfcfd2d4a9924a282
|
[
"Apache-2.0"
] | 1
|
2015-08-27T13:17:10.000Z
|
2015-08-27T13:17:10.000Z
|
lib/ecto/adapters/postgres/worker.ex
|
TanYewWei/ecto
|
916c6467d5f7368fa10ecd7cfcfd2d4a9924a282
|
[
"Apache-2.0"
] | null | null | null |
lib/ecto/adapters/postgres/worker.ex
|
TanYewWei/ecto
|
916c6467d5f7368fa10ecd7cfcfd2d4a9924a282
|
[
"Apache-2.0"
] | null | null | null |
defmodule Ecto.Adapters.Postgres.Worker do
@moduledoc false
use GenServer.Behaviour
defrecordp :state, [ :conn, :params, :monitor ]
@timeout 5000
def start(args) do
:gen_server.start(__MODULE__, args, [])
end
def start_link(args) do
:gen_server.start_link(__MODULE__, args, [])
end
def query!(worker, sql, params, timeout \\ @timeout) do
case :gen_server.call(worker, { :query, sql, params, timeout }, timeout) do
{ :ok, res } -> res
{ :error, Postgrex.Error[] = err } -> raise err
end
end
def begin!(worker, timeout \\ @timeout) do
case :gen_server.call(worker, { :begin, timeout }, timeout) do
:ok -> :ok
Postgrex.Error[] = err -> raise err
end
end
def commit!(worker, timeout \\ @timeout) do
case :gen_server.call(worker, { :commit, timeout }, timeout) do
:ok -> :ok
Postgrex.Error[] = err -> raise err
end
end
def rollback!(worker, timeout \\ @timeout) do
case :gen_server.call(worker, { :rollback, timeout }, timeout) do
:ok -> :ok
Postgrex.Error[] = err -> raise err
end
end
def monitor_me(worker) do
:gen_server.cast(worker, { :monitor, self })
end
def demonitor_me(worker) do
:gen_server.cast(worker, { :demonitor, self })
end
def init(args) do
Process.flag(:trap_exit, true)
conn =
case args[:lazy] == "false" && Postgrex.Connection.start_link(args) do
{ :ok, conn } -> conn
_ -> nil
end
{ :ok, state(conn: conn, params: args) }
end
# Connection is disconnected, reconnect before continuing
def handle_call(request, from, state(conn: nil, params: params) = s) do
case Postgrex.Connection.start_link(params) do
{ :ok, conn } ->
handle_call(request, from, state(s, conn: conn))
{ :error, err } ->
{ :reply, { :error, err }, s }
end
end
def handle_call({ :query, sql, params, timeout }, _from, state(conn: conn) = s) do
{ :reply, Postgrex.Connection.query(conn, sql, params, timeout), s }
end
def handle_call({ :begin, timeout }, _from, state(conn: conn) = s) do
{ :reply, Postgrex.Connection.begin(conn, timeout), s }
end
def handle_call({ :commit, timeout }, _from, state(conn: conn) = s) do
{ :reply, Postgrex.Connection.commit(conn, timeout), s }
end
def handle_call({ :rollback, timeout }, _from, state(conn: conn) = s) do
{ :reply, Postgrex.Connection.rollback(conn, timeout), s }
end
def handle_cast({ :monitor, pid }, state(monitor: nil) = s) do
ref = Process.monitor(pid)
{ :noreply, state(s, monitor: { pid, ref }) }
end
def handle_cast({ :demonitor, pid }, state(monitor: { pid, ref }) = s) do
Process.demonitor(ref)
{ :noreply, state(s, monitor: nil) }
end
def handle_info({ :EXIT, conn, _reason }, state(conn: conn) = s) do
{ :noreply, state(s, conn: nil) }
end
def handle_info({ :DOWN, ref, :process, pid, _info }, state(monitor: { pid, ref }) = s) do
{ :stop, :normal, s }
end
def handle_info(_info, s) do
{ :noreply, s }
end
def terminate(_reason, state(conn: nil)) do
:ok
end
def terminate(_reason, state(conn: conn)) do
Postgrex.Connection.stop(conn)
end
end
| 26.45082
| 92
| 0.618221
|
9e2cb81993e4754f5be92b5e0d9b7805c81a35da
| 101
|
ex
|
Elixir
|
lib/phoenix_absinthe_dataloader_kv_web/views/page_view.ex
|
alexandrubagu/phoenie_absinthe_dataloader_kv
|
b75ceabe6e384a56b40144e35624bcbd823af273
|
[
"MIT"
] | null | null | null |
lib/phoenix_absinthe_dataloader_kv_web/views/page_view.ex
|
alexandrubagu/phoenie_absinthe_dataloader_kv
|
b75ceabe6e384a56b40144e35624bcbd823af273
|
[
"MIT"
] | 3
|
2020-05-08T21:01:07.000Z
|
2020-05-08T21:01:07.000Z
|
lib/phoenix_absinthe_dataloader_kv_web/views/page_view.ex
|
alexandrubagu/phoenie_absinthe_dataloader_kv
|
b75ceabe6e384a56b40144e35624bcbd823af273
|
[
"MIT"
] | 2
|
2019-02-14T13:43:45.000Z
|
2020-02-28T22:04:33.000Z
|
defmodule PhoenixAbsintheDataloaderKvWeb.PageView do
use PhoenixAbsintheDataloaderKvWeb, :view
end
| 25.25
| 52
| 0.881188
|
9e2cbb4640487745027eadebe6d9c51c270ec0fd
| 1,043
|
ex
|
Elixir
|
lib/rasmus_app.ex
|
enter-haken/rasmus
|
aa4d474ef6c0c018409de82dad62435b74b7aac1
|
[
"Apache-2.0"
] | 8
|
2018-03-01T11:58:50.000Z
|
2018-11-30T02:50:57.000Z
|
lib/rasmus_app.ex
|
enter-haken/rasmus
|
aa4d474ef6c0c018409de82dad62435b74b7aac1
|
[
"Apache-2.0"
] | null | null | null |
lib/rasmus_app.ex
|
enter-haken/rasmus
|
aa4d474ef6c0c018409de82dad62435b74b7aac1
|
[
"Apache-2.0"
] | null | null | null |
defmodule RasmusApp do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
require Logger
def start(_type, _args) do
Logger.info("start rasmus application")
credentials = Application.get_env(:rasmus, :pg_config)
# List all child processes to be supervised
children = [
{ Core.Counter, credentials },
{ Core.Inbound, credentials },
{ Core.Manager, credentials },
{ Core.Entity.Graph, credentials },
{ Plug.Adapters.Cowboy2, scheme: :http, plug: Web.Router, options: [port: 8080, dispatch: dispatch_config()] }
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Rasmus.Supervisor]
Supervisor.start_link(children, opts)
end
defp dispatch_config do
[
{:_, [
{"/websocket/", Web.Socket, []},
{:_, Plug.Adapters.Cowboy2.Handler, {Web.Router, []}}
]}
]
end
end
| 27.447368
| 116
| 0.651007
|
9e2cd907bf9ab29e2c28f1cf2ddfa3ac54c51689
| 4,512
|
ex
|
Elixir
|
lib/kastlex/cg_cache.ex
|
zmstone/kastlex
|
3478bc230f08bde99e768067787b0ef3f8e1c026
|
[
"Apache-2.0"
] | 38
|
2016-10-21T08:26:15.000Z
|
2021-03-19T23:10:40.000Z
|
lib/kastlex/cg_cache.ex
|
zmstone/kastlex
|
3478bc230f08bde99e768067787b0ef3f8e1c026
|
[
"Apache-2.0"
] | 32
|
2017-02-19T08:57:35.000Z
|
2020-02-05T13:15:11.000Z
|
lib/kastlex/cg_cache.ex
|
zmstone/kastlex
|
3478bc230f08bde99e768067787b0ef3f8e1c026
|
[
"Apache-2.0"
] | 6
|
2017-11-24T16:02:20.000Z
|
2022-02-22T07:44:36.000Z
|
defmodule Kastlex.CgCache do
@compile if Mix.env == :test, do: :export_all
require Logger
## ets tables
@offsets :cg_cache_offsets # ets table for offsets
@cgs :cg_cache_cgs # ets table for cg status
@progress :cg_cache_progress # ets table for cg data collection progress
## return all (active/inactive consumer groups)
def get_groups() do
## return all active consumer groups
cg_keys = :ets.select(@cgs, [{{:"$1", :_}, [], [:"$1"]}]) |> :gb_sets.from_list
## return also inactive consumer groups (when committed offsets are found)
offset_keys = :ets.select(@offsets, [{{:"$1", :_}, [], [:"$1"]}]) |> :gb_sets.from_list
:gb_sets.union(cg_keys, offset_keys) |> :gb_sets.to_list
end
def get_group(group_id) do
committed_offsets =
lookup(@offsets, group_id, %{}) |>
Enum.map(fn({{topic, partition}, details}) ->
offset = fetch!(details, :offset)
[ {:topic, topic},
{:partition, partition},
{:lagging, get_lagging(topic, partition, offset)}
| to_list(details)] |> Kastlex.CgLib.to_maps
end)
case lookup(@cgs, group_id, false) do
false ->
%{:group_id => group_id,
:offsets => committed_offsets,
:status => :inactive
}
value ->
Kastlex.CgLib.to_maps([{:status, value}]) |>
put(:group_id, group_id) |>
put(:offsets, committed_offsets)
end
end
## Returns all consumer groups with their committed offsets
def get_consumer_groups_offsets() do
:ets.select(@offsets, [{{:"$1", :"$2"}, [], [{{:"$1", :"$2"}}]}])
|> Enum.flat_map(fn({group_id, topics}) ->
topics
|> Enum.map(fn({{topic, partition}, details}) ->
offset = fetch!(details, :offset)
%{group_id: group_id, topic: topic, partition: partition, offset: offset}
end)
end)
end
def committed_offset(key, value) do
group_id = ets_key = key[:group_id]
map_key = {key[:topic], key[:partition]}
group = lookup(@offsets, ets_key, %{})
group = case value do
[] -> delete(group, map_key)
_ -> put(group, map_key, value)
end
case group === %{} do
:true -> :ets.delete(@offsets, group_id)
:false -> :ets.insert(@offsets, {group_id, group})
end
end
def new_cg_status(key, []) do
group_id = key[:group_id]
:ets.delete(@cgs, group_id)
end
def new_cg_status(key, value) do
group_id = key[:group_id]
:ets.insert(@cgs, {group_id, value})
end
def update_progress(partition, offset) do
:ets.insert(@progress, {partition, offset})
end
def get_progress(partition) do
case :ets.lookup(@progress, partition) do
[{_, offset}] -> offset
_ -> false
end
end
def init() do
opts = [:named_table, :set, :public, {:write_concurrency, true}, {:read_concurrency, true}]
:ets.new(@offsets, opts)
:ets.new(@cgs, opts)
:ets.new(@progress, opts)
:ok
end
def maybe_delete_excluded(nil), do: :ok
def maybe_delete_excluded(exc) do
Enum.each(get_groups(),
fn(group_id) ->
case exc.(group_id) do
true ->
:ets.delete(@cgs, group_id)
:ets.delete(@offsets, group_id)
false ->
:ok
end
end)
end
defp lookup(table, key, default) do
case :ets.lookup(table, key) do
[] -> default
[{_, value}] -> value
end
end
defp delete(x, key) when is_map(x), do: Map.delete(x, key)
defp delete(x, key) when is_list(x), do: Keyword.delete(x, key)
defp fetch!(x, key) when is_map(x), do: Map.fetch!(x, key)
defp fetch!(x, key) when is_list(x), do: Keyword.fetch!(x, key)
defp put(x, key, val) when is_map(x), do: Map.put(x, key, val)
defp put(x, key, val) when is_list(x), do: Keyword.put(x, key, val)
defp to_list(l) when is_list(l), do: l
defp to_list(m) when is_map(m), do: Map.to_list(m)
defp get_lagging(topic, partition, offset) do
offset_hwm =
try do
# returns -1 in case not found in offsets cache
Kastlex.OffsetsCache.get_hwm_offset(topic, partition)
rescue _ ->
# when offset cache does not exist or when offsets_cache is restarting
-2
end
case offset_hwm do
-1 ->
-1
-2 ->
-2
n when n >= offset ->
n - offset
_ ->
# high-watermark offset is not up-to-date
0
end
end
end
| 29.684211
| 95
| 0.583333
|
9e2cf3681f9fcf4ecfd11ed81e77ede95e7b85cb
| 2,984
|
ex
|
Elixir
|
clients/run/lib/google_api/run/v1/model/task_spec.ex
|
MMore/elixir-google-api
|
0574ec1439d9bbfe22d63965be1681b0f45a94c9
|
[
"Apache-2.0"
] | null | null | null |
clients/run/lib/google_api/run/v1/model/task_spec.ex
|
MMore/elixir-google-api
|
0574ec1439d9bbfe22d63965be1681b0f45a94c9
|
[
"Apache-2.0"
] | null | null | null |
clients/run/lib/google_api/run/v1/model/task_spec.ex
|
MMore/elixir-google-api
|
0574ec1439d9bbfe22d63965be1681b0f45a94c9
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Run.V1.Model.TaskSpec do
@moduledoc """
TaskSpec is a description of a task.
## Attributes
* `containers` (*type:* `list(GoogleApi.Run.V1.Model.Container.t)`, *default:* `nil`) - Optional. List of containers belonging to the task. We disallow a number of fields on this Container. Only a single container may be provided.
* `maxRetries` (*type:* `integer()`, *default:* `nil`) - Optional. Number of retries allowed per task, before marking this job failed.
* `serviceAccountName` (*type:* `String.t`, *default:* `nil`) - Optional. Email address of the IAM service account associated with the task of a job execution. The service account represents the identity of the running task, and determines what permissions the task has. If not provided, the task will use the project's default service account. +optional
* `timeoutSeconds` (*type:* `String.t`, *default:* `nil`) - Optional. Optional duration in seconds the task may be active before the system will actively try to mark it failed and kill associated containers. This applies per attempt of a task, meaning each retry can run for the full timeout. +optional
* `volumes` (*type:* `list(GoogleApi.Run.V1.Model.Volume.t)`, *default:* `nil`) - Optional. List of volumes that can be mounted by containers belonging to the task. More info: https://kubernetes.io/docs/concepts/storage/volumes +optional
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:containers => list(GoogleApi.Run.V1.Model.Container.t()) | nil,
:maxRetries => integer() | nil,
:serviceAccountName => String.t() | nil,
:timeoutSeconds => String.t() | nil,
:volumes => list(GoogleApi.Run.V1.Model.Volume.t()) | nil
}
field(:containers, as: GoogleApi.Run.V1.Model.Container, type: :list)
field(:maxRetries)
field(:serviceAccountName)
field(:timeoutSeconds)
field(:volumes, as: GoogleApi.Run.V1.Model.Volume, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Run.V1.Model.TaskSpec do
def decode(value, options) do
GoogleApi.Run.V1.Model.TaskSpec.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Run.V1.Model.TaskSpec do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 50.576271
| 358
| 0.724196
|
9e2d0227c30c20d994c5bcdc7804e311d2d2defc
| 253
|
ex
|
Elixir
|
lib/geo_tasks.ex
|
Jesterovskiy/geo-tasks
|
65fbfdc3d1604084aa288fefba9c548e087a387e
|
[
"MIT"
] | null | null | null |
lib/geo_tasks.ex
|
Jesterovskiy/geo-tasks
|
65fbfdc3d1604084aa288fefba9c548e087a387e
|
[
"MIT"
] | null | null | null |
lib/geo_tasks.ex
|
Jesterovskiy/geo-tasks
|
65fbfdc3d1604084aa288fefba9c548e087a387e
|
[
"MIT"
] | null | null | null |
defmodule GeoTasks do
@moduledoc """
GeoTasks keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 25.3
| 66
| 0.754941
|
9e2d437d6fd72b59964b9e177c9d7b8b83c6f37e
| 1,815
|
ex
|
Elixir
|
apps/blog_api_web/lib/blog_api_web.ex
|
yashin5/blog_api
|
5dd6d0c9e43ca9c1dc555afd73713b62b4efa34e
|
[
"MIT"
] | null | null | null |
apps/blog_api_web/lib/blog_api_web.ex
|
yashin5/blog_api
|
5dd6d0c9e43ca9c1dc555afd73713b62b4efa34e
|
[
"MIT"
] | 5
|
2021-01-13T04:16:16.000Z
|
2021-01-13T21:43:00.000Z
|
apps/blog_api_web/lib/blog_api_web.ex
|
yashin5/blog_api
|
5dd6d0c9e43ca9c1dc555afd73713b62b4efa34e
|
[
"MIT"
] | null | null | null |
defmodule BlogApiWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use BlogApiWeb, :controller
use BlogApiWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: BlogApiWeb
import Plug.Conn
import BlogApiWeb.Gettext
alias BlogApiWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/blog_api_web/templates",
namespace: BlogApiWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import BlogApiWeb.Gettext
end
end
defp view_helpers do
quote do
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import BlogApiWeb.ErrorHelpers
import BlogApiWeb.Gettext
alias BlogApiWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 22.974684
| 76
| 0.688154
|
9e2d6a0930fabd08be82da4fd2dbba0a17252fa7
| 359
|
exs
|
Elixir
|
golf_phoenix/priv/repo/seeds.exs
|
RobertDober/golftour
|
1a1187ef46be99532b41a08801c10541a9a84ab1
|
[
"Apache-2.0"
] | null | null | null |
golf_phoenix/priv/repo/seeds.exs
|
RobertDober/golftour
|
1a1187ef46be99532b41a08801c10541a9a84ab1
|
[
"Apache-2.0"
] | null | null | null |
golf_phoenix/priv/repo/seeds.exs
|
RobertDober/golftour
|
1a1187ef46be99532b41a08801c10541a9a84ab1
|
[
"Apache-2.0"
] | null | null | null |
# Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# GolfPhoenix.Repo.insert!(%GolfPhoenix.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.916667
| 61
| 0.713092
|
9e2d82b7bc60686fd45c4b635c7ecdda9b495a33
| 167
|
exs
|
Elixir
|
priv/repo/migrations/20191126095153_change_prompt_html_to_text.exs
|
topherhunt/reassembling-the-line
|
c6823b3394ee98d9b0149fa3d09448928ac5c0db
|
[
"MIT"
] | 1
|
2019-04-27T15:39:20.000Z
|
2019-04-27T15:39:20.000Z
|
priv/repo/migrations/20191126095153_change_prompt_html_to_text.exs
|
topherhunt/reassembling-the-line
|
c6823b3394ee98d9b0149fa3d09448928ac5c0db
|
[
"MIT"
] | 11
|
2020-07-16T11:40:53.000Z
|
2021-08-16T07:03:33.000Z
|
priv/repo/migrations/20191126095153_change_prompt_html_to_text.exs
|
topherhunt/reassembling-the-line
|
c6823b3394ee98d9b0149fa3d09448928ac5c0db
|
[
"MIT"
] | null | null | null |
defmodule RTL.Repo.Migrations.ChangePromptHtmlToText do
use Ecto.Migration
def change do
alter table(:prompts) do
modify :html, :text
end
end
end
| 16.7
| 55
| 0.712575
|
9e2dc8b57b49b78e28572b2b76a287631120ec86
| 1,450
|
ex
|
Elixir
|
lib/trademark_free_strategic_land_warfare_web/telemetry.ex
|
WizardOfOgz/trademark_free_strategic_land_warfare
|
a14287eab1f60c13d43f70ac2309391c291a6704
|
[
"MIT"
] | 1
|
2020-06-30T16:37:50.000Z
|
2020-06-30T16:37:50.000Z
|
lib/trademark_free_strategic_land_warfare_web/telemetry.ex
|
WizardOfOgz/trademark_free_strategic_land_warfare
|
a14287eab1f60c13d43f70ac2309391c291a6704
|
[
"MIT"
] | null | null | null |
lib/trademark_free_strategic_land_warfare_web/telemetry.ex
|
WizardOfOgz/trademark_free_strategic_land_warfare
|
a14287eab1f60c13d43f70ac2309391c291a6704
|
[
"MIT"
] | 13
|
2020-06-04T23:10:19.000Z
|
2020-06-05T01:25:26.000Z
|
defmodule TrademarkFreeStrategicLandWarfareWeb.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {TrademarkFreeStrategicLandWarfareWeb, :count_users, []}
]
end
end
| 29.591837
| 86
| 0.682069
|
9e2dff86a5f34fb3801d7cc9b89549101e35cb30
| 35,512
|
ex
|
Elixir
|
apps/language_server/lib/language_server/server.ex
|
zhenfeng-zhu/elixir-ls
|
b019a172646826ff5cc1536369535c115ab6085d
|
[
"Apache-2.0"
] | null | null | null |
apps/language_server/lib/language_server/server.ex
|
zhenfeng-zhu/elixir-ls
|
b019a172646826ff5cc1536369535c115ab6085d
|
[
"Apache-2.0"
] | null | null | null |
apps/language_server/lib/language_server/server.ex
|
zhenfeng-zhu/elixir-ls
|
b019a172646826ff5cc1536369535c115ab6085d
|
[
"Apache-2.0"
] | null | null | null |
defmodule ElixirLS.LanguageServer.Server do
@moduledoc """
Language Server Protocol server
This server tracks open files, attempts to rebuild the project when a file changes, and handles
requests from the IDE (for things like autocompletion, hover, etc.)
Notifications from the IDE are handled synchronously, whereas requests can be handled sychronously
or asynchronously.
When possible, handling the request asynchronously has several advantages. The asynchronous
request handling cannot modify the server state. That way, if the process handling the request
crashes, we can report that error to the client and continue knowing that the state is
uncorrupted. Also, asynchronous requests can be cancelled by the client if they're taking too long
or the user no longer cares about the result.
"""
use GenServer
alias ElixirLS.LanguageServer.{SourceFile, Build, Protocol, JsonRpc, Dialyzer}
alias ElixirLS.LanguageServer.Providers.{
Completion,
Hover,
Definition,
Implementation,
References,
Formatting,
SignatureHelp,
DocumentSymbols,
WorkspaceSymbols,
OnTypeFormatting,
CodeLens,
ExecuteCommand,
FoldingRange
}
alias ElixirLS.Utils.Launch
use Protocol
defstruct [
:server_instance_id,
:build_ref,
:dialyzer_sup,
:client_capabilities,
:root_uri,
:project_dir,
:settings,
build_diagnostics: [],
dialyzer_diagnostics: [],
needs_build?: false,
load_all_modules?: false,
build_running?: false,
analysis_ready?: false,
received_shutdown?: false,
requests: %{},
# Tracks source files that are currently open in the editor
source_files: %{},
awaiting_contracts: [],
supports_dynamic: false,
no_mixfile_warned?: false
]
defmodule InvalidParamError do
defexception [:uri, :message]
@impl true
def exception(uri) do
msg = "invalid URI: #{inspect(uri)}"
%InvalidParamError{message: msg, uri: uri}
end
end
@watched_extensions [".ex", ".exs", ".erl", ".hrl", ".yrl", ".xrl", ".eex", ".leex"]
## Client API
def start_link(name \\ nil) do
GenServer.start_link(__MODULE__, :ok, name: name || __MODULE__)
end
def receive_packet(server \\ __MODULE__, packet) do
GenServer.cast(server, {:receive_packet, packet})
end
def build_finished(server \\ __MODULE__, result) do
GenServer.cast(server, {:build_finished, result})
end
def dialyzer_finished(server \\ __MODULE__, diagnostics, build_ref) do
GenServer.cast(server, {:dialyzer_finished, diagnostics, build_ref})
end
def rebuild(server \\ __MODULE__) do
GenServer.cast(server, :rebuild)
end
def suggest_contracts(server \\ __MODULE__, uri) do
GenServer.call(server, {:suggest_contracts, uri}, :infinity)
end
defguardp is_initialized(server_instance_id) when not is_nil(server_instance_id)
## Server Callbacks
@impl GenServer
def init(:ok) do
{:ok, %__MODULE__{}}
end
@impl GenServer
def handle_call({:request_finished, id, result}, _from, state = %__MODULE__{}) do
case result do
{:error, type, msg} -> JsonRpc.respond_with_error(id, type, msg)
{:ok, result} -> JsonRpc.respond(id, result)
end
state = %{state | requests: Map.delete(state.requests, id)}
{:reply, :ok, state}
end
@impl GenServer
def handle_call({:suggest_contracts, uri = "file:" <> _}, from, state = %__MODULE__{}) do
case state do
%{analysis_ready?: true, source_files: %{^uri => %{dirty?: false}}} ->
abs_path =
uri
|> SourceFile.abs_path_from_uri()
{:reply, Dialyzer.suggest_contracts([abs_path]), state}
%{source_files: %{^uri => _}} ->
# file not saved or analysis not finished
awaiting_contracts = reject_awaiting_contracts(state.awaiting_contracts, uri)
{:noreply, %{state | awaiting_contracts: [{from, uri} | awaiting_contracts]}}
_ ->
# file not or no longer open
{:reply, [], state}
end
end
def handle_call({:suggest_contracts, _uri}, _from, state = %__MODULE__{}) do
{:reply, [], state}
end
@impl GenServer
def handle_cast({:build_finished, {status, diagnostics}}, state = %__MODULE__{})
when status in [:ok, :noop, :error, :no_mixfile] and is_list(diagnostics) do
{:noreply, handle_build_result(status, diagnostics, state)}
end
@impl GenServer
def handle_cast({:dialyzer_finished, diagnostics, build_ref}, state = %__MODULE__{}) do
{:noreply, handle_dialyzer_result(diagnostics, build_ref, state)}
end
@impl GenServer
def handle_cast({:receive_packet, request(id, _, _) = packet}, state = %__MODULE__{}) do
{:noreply, handle_request_packet(id, packet, state)}
end
@impl GenServer
def handle_cast({:receive_packet, request(id, method)}, state = %__MODULE__{}) do
{:noreply, handle_request_packet(id, request(id, method, nil), state)}
end
@impl GenServer
def handle_cast(
{:receive_packet, notification(_) = packet},
state = %__MODULE__{received_shutdown?: false, server_instance_id: server_instance_id}
)
when is_initialized(server_instance_id) do
{:noreply, handle_notification(packet, state)}
end
@impl GenServer
def handle_cast({:receive_packet, notification(_) = packet}, state = %__MODULE__{}) do
case packet do
notification("exit") ->
{:noreply, handle_notification(packet, state)}
_ ->
{:noreply, state}
end
end
@impl GenServer
def handle_cast(:rebuild, state = %__MODULE__{}) do
{:noreply, trigger_build(state)}
end
@impl GenServer
def handle_info(:default_config, state = %__MODULE__{}) do
state =
case state do
%{settings: nil} ->
JsonRpc.show_message(
:info,
"Did not receive workspace/didChangeConfiguration notification after 5 seconds. " <>
"Using default settings."
)
set_settings(state, %{})
_ ->
state
end
{:noreply, state}
end
@impl GenServer
def handle_info(
{:DOWN, ref, _, _pid, reason},
%__MODULE__{build_ref: ref, build_running?: true} = state
) do
state = %{state | build_running?: false}
state =
case reason do
:normal -> state
_ -> handle_build_result(:error, [Build.exception_to_diagnostic(reason)], state)
end
if reason == :normal do
WorkspaceSymbols.notify_build_complete()
end
state = if state.needs_build?, do: trigger_build(state), else: state
{:noreply, state}
end
@impl GenServer
def handle_info({:DOWN, _ref, :process, pid, reason}, %__MODULE__{requests: requests} = state) do
state =
case Enum.find(requests, &match?({_, ^pid}, &1)) do
{id, _} ->
error_msg = Exception.format_exit(reason)
JsonRpc.respond_with_error(id, :server_error, error_msg)
%{state | requests: Map.delete(requests, id)}
nil ->
state
end
{:noreply, state}
end
## Helpers
defp handle_notification(notification("initialized"), state = %__MODULE__{}) do
# If we don't receive workspace/didChangeConfiguration for 5 seconds, use default settings
Process.send_after(self(), :default_config, 5000)
if state.supports_dynamic do
watchers = for ext <- @watched_extensions, do: %{"globPattern" => "**/*." <> ext}
register_capability_result =
JsonRpc.register_capability_request("workspace/didChangeWatchedFiles", %{
"watchers" => watchers
})
case register_capability_result do
{:ok, nil} ->
:ok
other ->
JsonRpc.log_message(
:error,
"client/registerCapability returned: #{inspect(other)}"
)
end
end
state
end
defp handle_notification(cancel_request(id), %__MODULE__{requests: requests} = state) do
case requests do
%{^id => pid} ->
Process.exit(pid, :cancelled)
JsonRpc.respond_with_error(id, :request_cancelled, "Request cancelled")
%{state | requests: Map.delete(requests, id)}
_ ->
JsonRpc.log_message(
:warning,
"Received $/cancelRequest for unknown request id: #{inspect(id)}"
)
state
end
end
# We don't start performing builds until we receive settings from the client in case they've set
# the `projectDir` or `mixEnv` settings. If the settings don't match the format expected, leave
# settings unchanged or set default settings if this is the first request.
defp handle_notification(did_change_configuration(changed_settings), state = %__MODULE__{}) do
prev_settings = state.settings || %{}
new_settings =
case changed_settings do
%{"elixirLS" => changed_settings} when is_map(changed_settings) ->
Map.merge(prev_settings, changed_settings)
_ ->
prev_settings
end
set_settings(state, new_settings)
end
defp handle_notification(notification("exit"), state = %__MODULE__{}) do
code = if state.received_shutdown?, do: 0, else: 1
unless Application.get_env(:language_server, :test_mode) do
System.halt(code)
else
Process.exit(self(), {:exit_code, code})
end
state
end
defp handle_notification(did_open(uri, _language_id, version, text), state = %__MODULE__{}) do
if Map.has_key?(state.source_files, uri) do
# An open notification must not be sent more than once without a corresponding
# close notification send before
JsonRpc.log_message(
:warning,
"Received textDocument/didOpen for file that is already open. Received uri: #{inspect(uri)}"
)
state
else
source_file = %SourceFile{text: text, version: version}
Build.publish_file_diagnostics(
uri,
state.build_diagnostics ++ state.dialyzer_diagnostics,
source_file
)
put_in(state.source_files[uri], source_file)
end
end
defp handle_notification(did_close(uri), state = %__MODULE__{}) do
if not Map.has_key?(state.source_files, uri) do
# A close notification requires a previous open notification to be sent
JsonRpc.log_message(
:warning,
"Received textDocument/didClose for file that is not open. Received uri: #{inspect(uri)}"
)
state
else
awaiting_contracts = reject_awaiting_contracts(state.awaiting_contracts, uri)
%{
state
| source_files: Map.delete(state.source_files, uri),
awaiting_contracts: awaiting_contracts
}
end
end
defp handle_notification(did_change(uri, version, content_changes), state = %__MODULE__{}) do
if not Map.has_key?(state.source_files, uri) do
# The source file was not marked as open either due to a bug in the
# client or a restart of the server. So just ignore the message and do
# not update the state
JsonRpc.log_message(
:warning,
"Received textDocument/didChange for file that is not open. Received uri: #{inspect(uri)}"
)
state
else
update_in(state.source_files[uri], fn source_file ->
%SourceFile{source_file | version: version, dirty?: true}
|> SourceFile.apply_content_changes(content_changes)
end)
end
end
defp handle_notification(did_save(uri), state = %__MODULE__{}) do
if not Map.has_key?(state.source_files, uri) do
JsonRpc.log_message(
:warning,
"Received textDocument/didSave for file that is not open. Received uri: #{inspect(uri)}"
)
state
else
WorkspaceSymbols.notify_uris_modified([uri])
state = update_in(state.source_files[uri], &%{&1 | dirty?: false})
trigger_build(state)
end
end
defp handle_notification(did_change_watched_files(changes), state = %__MODULE__{}) do
changes = Enum.filter(changes, &match?(%{"uri" => "file:" <> _}, &1))
needs_build =
Enum.any?(changes, fn %{"uri" => uri = "file:" <> _, "type" => type} ->
path = SourceFile.path_from_uri(uri)
Path.extname(path) in @watched_extensions and
(type in [1, 3] or not Map.has_key?(state.source_files, uri) or
state.source_files[uri].dirty?)
end)
source_files =
changes
|> Enum.reduce(state.source_files, fn
%{"type" => 3}, acc ->
# deleted file still open in editor, keep dirty flag
acc
%{"uri" => uri = "file:" <> _}, acc ->
# file created/updated - set dirty flag to false if file contents are equal
case acc[uri] do
%SourceFile{text: source_file_text, dirty?: true} = source_file ->
case File.read(SourceFile.path_from_uri(uri)) do
{:ok, ^source_file_text} ->
Map.put(acc, uri, %SourceFile{source_file | dirty?: false})
{:ok, _} ->
acc
{:error, reason} ->
JsonRpc.log_message(:warning, "Unable to read #{uri}: #{inspect(reason)}")
# keep dirty if read fails
acc
end
_ ->
# file not open or not dirty
acc
end
end)
state = %{state | source_files: source_files}
changes
|> Enum.map(& &1["uri"])
|> Enum.uniq()
|> WorkspaceSymbols.notify_uris_modified()
if needs_build, do: trigger_build(state), else: state
end
defp handle_notification(%{"method" => "$/" <> _}, state = %__MODULE__{}) do
# not supported "$/" notifications may be safely ignored
state
end
defp handle_notification(packet, state = %__MODULE__{}) do
JsonRpc.log_message(:warning, "Received unmatched notification: #{inspect(packet)}")
state
end
defp handle_request_packet(
id,
packet,
state = %__MODULE__{server_instance_id: server_instance_id}
)
when not is_initialized(server_instance_id) do
case packet do
initialize_req(_id, _root_uri, _client_capabilities) ->
{:ok, result, state} = handle_request(packet, state)
JsonRpc.respond(id, result)
state
_ ->
JsonRpc.respond_with_error(id, :server_not_initialized)
state
end
end
defp handle_request_packet(id, packet, state = %__MODULE__{received_shutdown?: false}) do
case handle_request(packet, state) do
{:ok, result, state} ->
JsonRpc.respond(id, result)
state
{:error, type, msg, state} ->
JsonRpc.respond_with_error(id, type, msg)
state
{:async, fun, state} ->
{pid, _ref} = handle_request_async(id, fun)
%{state | requests: Map.put(state.requests, id, pid)}
end
rescue
e in InvalidParamError ->
JsonRpc.respond_with_error(id, :invalid_params, e.message)
state
end
defp handle_request_packet(id, _packet, state = %__MODULE__{}) do
JsonRpc.respond_with_error(id, :invalid_request)
state
end
defp handle_request(
initialize_req(_id, root_uri, client_capabilities),
state = %__MODULE__{server_instance_id: server_instance_id}
)
when not is_initialized(server_instance_id) do
show_version_warnings()
server_instance_id =
:crypto.strong_rand_bytes(32) |> Base.url_encode64() |> binary_part(0, 32)
state =
case root_uri do
"file://" <> _ ->
root_path = SourceFile.abs_path_from_uri(root_uri)
File.cd!(root_path)
%{state | root_uri: root_uri}
nil ->
state
end
# Explicitly request file watchers from the client if supported
supports_dynamic =
get_in(client_capabilities, [
"textDocument",
"codeAction",
"dynamicRegistration"
])
state = %{
state
| client_capabilities: client_capabilities,
server_instance_id: server_instance_id,
supports_dynamic: supports_dynamic
}
{:ok,
%{
"capabilities" => server_capabilities(server_instance_id),
"serverInfo" => %{
"name" => "ElixirLS",
"version" => "#{Launch.language_server_version()}"
}
}, state}
end
defp handle_request(request(_id, "shutdown", _params), state = %__MODULE__{}) do
{:ok, nil, %{state | received_shutdown?: true}}
end
defp handle_request(definition_req(_id, uri, line, character), state = %__MODULE__{}) do
source_file = get_source_file(state, uri)
fun = fn ->
Definition.definition(uri, source_file.text, line, character)
end
{:async, fun, state}
end
defp handle_request(implementation_req(_id, uri, line, character), state = %__MODULE__{}) do
source_file = get_source_file(state, uri)
fun = fn ->
Implementation.implementation(uri, source_file.text, line, character)
end
{:async, fun, state}
end
defp handle_request(
references_req(_id, uri, line, character, include_declaration),
state = %__MODULE__{}
) do
source_file = get_source_file(state, uri)
fun = fn ->
{:ok,
References.references(
source_file.text,
uri,
line,
character,
include_declaration
)}
end
{:async, fun, state}
end
defp handle_request(hover_req(_id, uri, line, character), state = %__MODULE__{}) do
source_file = get_source_file(state, uri)
fun = fn ->
Hover.hover(source_file.text, line, character, state.project_dir)
end
{:async, fun, state}
end
defp handle_request(document_symbol_req(_id, uri), state = %__MODULE__{}) do
source_file = get_source_file(state, uri)
fun = fn ->
hierarchical? =
get_in(state.client_capabilities, [
"textDocument",
"documentSymbol",
"hierarchicalDocumentSymbolSupport"
]) || false
if String.ends_with?(uri, [".ex", ".exs"]) do
DocumentSymbols.symbols(uri, source_file.text, hierarchical?)
else
{:ok, []}
end
end
{:async, fun, state}
end
defp handle_request(workspace_symbol_req(_id, query), state = %__MODULE__{}) do
fun = fn ->
WorkspaceSymbols.symbols(query)
end
{:async, fun, state}
end
defp handle_request(completion_req(_id, uri, line, character), state = %__MODULE__{}) do
source_file = get_source_file(state, uri)
snippets_supported =
!!get_in(state.client_capabilities, [
"textDocument",
"completion",
"completionItem",
"snippetSupport"
])
# deprecated as of Language Server Protocol Specification - 3.15
deprecated_supported =
!!get_in(state.client_capabilities, [
"textDocument",
"completion",
"completionItem",
"deprecatedSupport"
])
tags_supported =
case get_in(state.client_capabilities, [
"textDocument",
"completion",
"completionItem",
"tagSupport"
]) do
nil -> []
%{"valueSet" => value_set} -> value_set
end
signature_help_supported =
!!get_in(state.client_capabilities, ["textDocument", "signatureHelp"])
locals_without_parens =
case SourceFile.formatter_opts(uri) do
{:ok, opts} -> Keyword.get(opts, :locals_without_parens, [])
:error -> []
end
|> MapSet.new()
signature_after_complete = Map.get(state.settings || %{}, "signatureAfterComplete", true)
fun = fn ->
Completion.completion(source_file.text, line, character,
snippets_supported: snippets_supported,
deprecated_supported: deprecated_supported,
tags_supported: tags_supported,
signature_help_supported: signature_help_supported,
locals_without_parens: locals_without_parens,
signature_after_complete: signature_after_complete
)
end
{:async, fun, state}
end
defp handle_request(formatting_req(_id, uri, _options), state = %__MODULE__{}) do
source_file = get_source_file(state, uri)
fun = fn -> Formatting.format(source_file, uri, state.project_dir) end
{:async, fun, state}
end
defp handle_request(signature_help_req(_id, uri, line, character), state = %__MODULE__{}) do
source_file = get_source_file(state, uri)
fun = fn -> SignatureHelp.signature(source_file, line, character) end
{:async, fun, state}
end
defp handle_request(
on_type_formatting_req(_id, uri, line, character, ch, options),
state = %__MODULE__{}
) do
source_file = get_source_file(state, uri)
fun = fn ->
OnTypeFormatting.format(source_file, line, character, ch, options)
end
{:async, fun, state}
end
defp handle_request(code_lens_req(_id, uri), state = %__MODULE__{}) do
source_file = get_source_file(state, uri)
fun = fn ->
with {:ok, spec_code_lenses} <- get_spec_code_lenses(state, uri, source_file),
{:ok, test_code_lenses} <- get_test_code_lenses(state, uri, source_file) do
{:ok, spec_code_lenses ++ test_code_lenses}
else
{:error, %ElixirSense.Core.Metadata{error: {line, error_msg}}} ->
{:error, :code_lens_error, "#{line}: #{error_msg}"}
{:error, error} ->
{:error, :code_lens_error, "Error while building code lenses: #{inspect(error)}"}
error ->
error
end
end
{:async, fun, state}
end
defp handle_request(execute_command_req(_id, command, args) = req, state = %__MODULE__{}) do
{:async,
fn ->
case ExecuteCommand.execute(command, args, state) do
{:error, :invalid_request, _msg} = res ->
JsonRpc.log_message(:warning, "Unmatched request: #{inspect(req)}")
res
other ->
other
end
end, state}
end
defp handle_request(folding_range_req(_id, uri), state = %__MODULE__{}) do
case get_source_file(state, uri) do
nil ->
{:error, :server_error, "Missing source file", state}
source_file ->
fun = fn -> FoldingRange.provide(source_file) end
{:async, fun, state}
end
end
# TODO remove in ElixirLS 0.8
defp handle_request(
macro_expansion(_id, whole_buffer, selected_macro, macro_line),
state = %__MODULE__{}
) do
IO.warn(
"Custom `elixirDocument/macroExpansion` request is deprecated. Switch to command `executeMacro` via `workspace/executeCommand`"
)
x = ElixirSense.expand_full(whole_buffer, selected_macro, macro_line)
{:ok, x, state}
end
defp handle_request(%{"method" => "$/" <> _}, state = %__MODULE__{}) do
# "$/" requests that the server doesn't support must return method_not_found
{:error, :method_not_found, nil, state}
end
defp handle_request(req, state = %__MODULE__{}) do
JsonRpc.log_message(:warning, "Unmatched request: #{inspect(req)}")
{:error, :invalid_request, nil, state}
end
defp handle_request_async(id, func) do
parent = self()
spawn_monitor(fn ->
result =
try do
func.()
rescue
e in InvalidParamError ->
{:error, :invalid_params, e.message}
end
GenServer.call(parent, {:request_finished, id, result}, :infinity)
end)
end
defp server_capabilities(server_instance_id) do
%{
"macroExpansion" => true,
"textDocumentSync" => %{
"change" => 2,
"openClose" => true,
"save" => %{"includeText" => true}
},
"hoverProvider" => true,
"completionProvider" => %{"triggerCharacters" => Completion.trigger_characters()},
"definitionProvider" => true,
"implementationProvider" => true,
"referencesProvider" => true,
"documentFormattingProvider" => true,
"signatureHelpProvider" => %{"triggerCharacters" => SignatureHelp.trigger_characters()},
"documentSymbolProvider" => true,
"workspaceSymbolProvider" => true,
"documentOnTypeFormattingProvider" => %{"firstTriggerCharacter" => "\n"},
"codeLensProvider" => %{"resolveProvider" => false},
"executeCommandProvider" => %{
"commands" => [
"spec:#{server_instance_id}",
"expandMacro:#{server_instance_id}",
"manipulatePipes:#{server_instance_id}"
]
},
"workspace" => %{
"workspaceFolders" => %{"supported" => false, "changeNotifications" => false}
},
"foldingRangeProvider" => true
}
end
defp get_spec_code_lenses(state = %__MODULE__{}, uri, source_file) do
if dialyzer_enabled?(state) and !!state.settings["suggestSpecs"] do
CodeLens.spec_code_lens(state.server_instance_id, uri, source_file.text)
else
{:ok, []}
end
end
defp get_test_code_lenses(state = %__MODULE__{}, uri, source_file) do
get_test_code_lenses(
state,
uri,
source_file,
state.settings["enableTestLenses"] || false,
Mix.Project.umbrella?()
)
end
defp get_test_code_lenses(
state = %__MODULE__{project_dir: project_dir},
uri,
source_file,
true = _enabled,
true = _umbrella
)
when is_binary(project_dir) do
file_path = SourceFile.path_from_uri(uri)
Mix.Project.apps_paths()
|> Enum.find(fn {_app, app_path} -> String.contains?(file_path, app_path) end)
|> case do
nil ->
{:ok, []}
{app, app_path} ->
if is_test_file?(file_path, state, app, app_path) do
CodeLens.test_code_lens(uri, source_file.text, Path.join(project_dir, app_path))
else
{:ok, []}
end
end
end
defp get_test_code_lenses(
%__MODULE__{project_dir: project_dir},
uri,
source_file,
true = _enabled,
false = _umbrella
)
when is_binary(project_dir) do
try do
file_path = SourceFile.path_from_uri(uri)
if is_test_file?(file_path) do
CodeLens.test_code_lens(uri, source_file.text, project_dir)
else
{:ok, []}
end
rescue
_ in ArgumentError -> {:ok, []}
end
end
defp get_test_code_lenses(%__MODULE__{}, _uri, _source_file, _, _), do: {:ok, []}
defp is_test_file?(file_path, state = %__MODULE__{project_dir: project_dir}, app, app_path)
when is_binary(project_dir) do
app_name = Atom.to_string(app)
test_paths =
(get_in(state.settings, ["testPaths", app_name]) || ["test"])
|> Enum.map(fn path -> Path.join([project_dir, app_path, path]) end)
test_pattern = get_in(state.settings, ["testPattern", app_name]) || "*_test.exs"
Mix.Utils.extract_files(test_paths, test_pattern)
|> Enum.any?(fn path -> String.ends_with?(file_path, path) end)
end
defp is_test_file?(file_path) do
test_paths = Mix.Project.config()[:test_paths] || ["test"]
test_pattern = Mix.Project.config()[:test_pattern] || "*_test.exs"
Mix.Utils.extract_files(test_paths, test_pattern)
|> Enum.map(&Path.absname/1)
|> Enum.any?(&(&1 == file_path))
end
# Build
defp trigger_build(state = %__MODULE__{project_dir: project_dir}) do
cond do
not build_enabled?(state) ->
state
not state.build_running? ->
fetch_deps? = Map.get(state.settings || %{}, "fetchDeps", true)
{_pid, build_ref} =
Build.build(self(), project_dir,
fetch_deps?: fetch_deps?,
load_all_modules?: state.load_all_modules?
)
%__MODULE__{
state
| build_ref: build_ref,
needs_build?: false,
build_running?: true,
analysis_ready?: false,
load_all_modules?: false
}
true ->
%__MODULE__{state | needs_build?: true, analysis_ready?: false}
end
end
defp dialyze(state = %__MODULE__{}) do
warn_opts =
(state.settings["dialyzerWarnOpts"] || [])
|> Enum.map(&String.to_atom/1)
Dialyzer.analyze(state.build_ref, warn_opts, dialyzer_default_format(state))
state
end
defp dialyzer_default_format(state = %__MODULE__{}) do
state.settings["dialyzerFormat"] || "dialyxir_long"
end
defp handle_build_result(:no_mixfile, _, state = %__MODULE__{}) do
unless state.no_mixfile_warned? do
msg =
"No mixfile found in project. " <>
"To use a subdirectory, set `elixirLS.projectDir` in your settings"
JsonRpc.show_message(:info, msg)
end
%__MODULE__{state | no_mixfile_warned?: true}
end
defp handle_build_result(status, diagnostics, state = %__MODULE__{}) do
old_diagnostics = state.build_diagnostics ++ state.dialyzer_diagnostics
state = put_in(state.build_diagnostics, diagnostics)
state =
cond do
state.needs_build? ->
state
status == :error or not dialyzer_enabled?(state) ->
put_in(state.dialyzer_diagnostics, [])
true ->
dialyze(state)
end
publish_diagnostics(
state.build_diagnostics ++ state.dialyzer_diagnostics,
old_diagnostics,
state.source_files
)
state
end
defp handle_dialyzer_result(diagnostics, build_ref, state = %__MODULE__{}) do
old_diagnostics = state.build_diagnostics ++ state.dialyzer_diagnostics
state = put_in(state.dialyzer_diagnostics, diagnostics)
publish_diagnostics(
state.build_diagnostics ++ state.dialyzer_diagnostics,
old_diagnostics,
state.source_files
)
# If these results were triggered by the most recent build and files are not dirty, then we know
# we're up to date and can release spec suggestions to the code lens provider
if build_ref == state.build_ref do
JsonRpc.log_message(:info, "Dialyzer analysis is up to date")
{dirty, not_dirty} =
state.awaiting_contracts
|> Enum.split_with(fn {_, uri} ->
Map.fetch!(state.source_files, uri).dirty?
end)
contracts_by_file =
not_dirty
|> Enum.map(fn {_from, uri} -> SourceFile.path_from_uri(uri) end)
|> Dialyzer.suggest_contracts()
|> Enum.group_by(fn {file, _, _, _, _} -> file end)
for {from, uri} <- not_dirty do
contracts =
contracts_by_file
|> Map.get(SourceFile.path_from_uri(uri), [])
GenServer.reply(from, contracts)
end
%{state | analysis_ready?: true, awaiting_contracts: dirty}
else
state
end
end
defp build_enabled?(state = %__MODULE__{}) do
is_binary(state.project_dir)
end
defp dialyzer_enabled?(state = %__MODULE__{}) do
Dialyzer.check_support() == :ok and build_enabled?(state) and state.dialyzer_sup != nil
end
defp publish_diagnostics(new_diagnostics, old_diagnostics, source_files) do
files =
Enum.uniq(Enum.map(new_diagnostics, & &1.file) ++ Enum.map(old_diagnostics, & &1.file))
for file <- files,
uri = SourceFile.path_to_uri(file),
do: Build.publish_file_diagnostics(uri, new_diagnostics, Map.get(source_files, uri))
end
defp show_version_warnings do
with {:error, message} <- ElixirLS.Utils.MinimumVersion.check_elixir_version() do
JsonRpc.show_message(:warning, message)
end
with {:error, message} <- ElixirLS.Utils.MinimumVersion.check_otp_version() do
JsonRpc.show_message(:warning, message)
end
case Dialyzer.check_support() do
:ok -> :ok
{:error, msg} -> JsonRpc.show_message(:info, msg)
end
:ok
end
defp set_settings(state = %__MODULE__{}, settings) do
enable_dialyzer =
Dialyzer.check_support() == :ok && Map.get(settings, "dialyzerEnabled", true)
mix_env = Map.get(settings, "mixEnv", "test")
mix_target = Map.get(settings, "mixTarget")
project_dir = Map.get(settings, "projectDir")
state =
state
|> set_mix_env(mix_env)
|> maybe_set_mix_target(mix_target)
|> set_project_dir(project_dir)
|> set_dialyzer_enabled(enable_dialyzer)
state = create_gitignore(state)
trigger_build(%{state | settings: settings})
end
defp set_dialyzer_enabled(state = %__MODULE__{}, enable_dialyzer) do
cond do
enable_dialyzer and state.dialyzer_sup == nil and is_binary(state.project_dir) ->
{:ok, pid} = Dialyzer.Supervisor.start_link(state.project_dir)
%{state | dialyzer_sup: pid}
not enable_dialyzer and state.dialyzer_sup != nil ->
Process.exit(state.dialyzer_sup, :normal)
%{state | dialyzer_sup: nil, analysis_ready?: false}
true ->
state
end
end
defp set_mix_env(state = %__MODULE__{}, env) do
prev_env = state.settings["mixEnv"]
if is_nil(prev_env) or env == prev_env do
Mix.env(String.to_atom(env))
else
JsonRpc.show_message(:warning, "You must restart ElixirLS after changing Mix env")
end
state
end
defp maybe_set_mix_target(state = %__MODULE__{}, nil), do: state
defp maybe_set_mix_target(state = %__MODULE__{}, target) do
set_mix_target(state, target)
end
defp set_mix_target(state = %__MODULE__{}, target) do
target = target || "host"
prev_target = state.settings["mixTarget"]
if is_nil(prev_target) or target == prev_target do
Mix.target(String.to_atom(target))
else
JsonRpc.show_message(:warning, "You must restart ElixirLS after changing Mix target")
end
state
end
defp set_project_dir(
%__MODULE__{project_dir: prev_project_dir, root_uri: root_uri} = state,
project_dir
)
when is_binary(root_uri) do
root_dir = root_uri |> SourceFile.abs_path_from_uri()
project_dir =
if is_binary(project_dir) do
Path.absname(Path.join(root_dir, project_dir))
else
root_dir
end
cond do
not File.dir?(project_dir) ->
JsonRpc.show_message(:error, "Project directory #{project_dir} does not exist")
state
is_nil(prev_project_dir) ->
File.cd!(project_dir)
Map.merge(state, %{project_dir: project_dir, load_all_modules?: true})
prev_project_dir != project_dir ->
JsonRpc.show_message(
:warning,
"You must restart ElixirLS after changing the project directory"
)
state
true ->
state
end
end
defp set_project_dir(state = %__MODULE__{}, _) do
state
end
defp create_gitignore(%__MODULE__{project_dir: project_dir} = state)
when is_binary(project_dir) do
with gitignore_path <- Path.join([project_dir, ".elixir_ls", ".gitignore"]),
false <- File.exists?(gitignore_path),
:ok <- gitignore_path |> Path.dirname() |> File.mkdir_p(),
:ok <- File.write(gitignore_path, "*", [:write]) do
state
else
true ->
state
{:error, err} ->
JsonRpc.log_message(
:warning,
"Cannot create .elixir_ls/.gitignore, cause: #{Atom.to_string(err)}"
)
state
end
end
defp create_gitignore(state = %__MODULE__{}) do
state
end
def get_source_file(state = %__MODULE__{}, uri) do
case state.source_files[uri] do
nil ->
raise InvalidParamError, uri
source_file ->
source_file
end
end
defp reject_awaiting_contracts(awaiting_contracts, uri) do
Enum.reject(awaiting_contracts, fn
{from, ^uri} -> GenServer.reply(from, [])
_ -> false
end)
end
end
| 28.824675
| 133
| 0.637249
|
9e2e28d227eae7000b486e1ae37c63b42676ecbd
| 5,749
|
ex
|
Elixir
|
apps/andi/lib/andi_web/controllers/edit_controller.ex
|
smartcitiesdata/smartcitiesdata
|
c926c25003a8ee2d09b933c521c49f674841c0b6
|
[
"Apache-2.0"
] | 26
|
2019-09-20T23:54:45.000Z
|
2020-08-20T14:23:32.000Z
|
apps/andi/lib/andi_web/controllers/edit_controller.ex
|
smartcitiesdata/smartcitiesdata
|
c926c25003a8ee2d09b933c521c49f674841c0b6
|
[
"Apache-2.0"
] | 757
|
2019-08-15T18:15:07.000Z
|
2020-09-18T20:55:31.000Z
|
apps/andi/lib/andi_web/controllers/edit_controller.ex
|
smartcitiesdata/smartcitiesdata
|
c926c25003a8ee2d09b933c521c49f674841c0b6
|
[
"Apache-2.0"
] | 9
|
2019-11-12T16:43:46.000Z
|
2020-03-25T16:23:16.000Z
|
defmodule AndiWeb.EditController do
use AndiWeb, :controller
use Properties, otp_app: :andi
alias Andi.InputSchemas.Datasets
alias Andi.InputSchemas.Organizations
alias Andi.InputSchemas.Ingestions
alias Andi.InputSchemas.AccessGroups
alias Andi.Schemas.DatasetDownload
alias Andi.Schemas.User
getter(:hosted_bucket, generic: true)
@bucket_path "samples/"
access_levels(
edit_organization: [:private],
edit_ingestion: [:private],
edit_user: [:private],
edit_dataset: [:private],
edit_submission: [:private, :public],
download_dataset_sample: [:private],
edit_access_group: [:private]
)
def edit_dataset(conn, %{"id" => id}) do
render_view_if_accessible(conn, id, AndiWeb.EditLiveView)
end
def edit_submission(conn, %{"id" => id}) do
render_view_if_accessible(conn, id, AndiWeb.SubmitLiveView)
end
def download_dataset_sample(conn, %{"id" => dataset_id}) do
%{"user_id" => current_user_id, "is_curator" => is_curator} = AndiWeb.Auth.TokenHandler.Plug.current_resource(conn)
andi_dataset = Andi.InputSchemas.Datasets.get(dataset_id)
dataset_link = andi_dataset.datasetLink
request_headers = conn.req_headers |> Enum.map(&Tuple.to_list/1) |> Jason.encode!()
with true <- is_curator,
false <- is_nil(dataset_link) do
persist_dataset_download_request(dataset_id, dataset_link, current_user_id, request_headers, true)
{:ok, presigned_url} = presigned_url(dataset_id, dataset_link)
redirect(conn, external: presigned_url)
else
_ ->
persist_dataset_download_request(dataset_id, dataset_link, current_user_id, request_headers, false)
conn
|> put_view(AndiWeb.ErrorView)
|> put_status(404)
|> render("404.html")
end
end
defp persist_dataset_download_request(dataset_id, dataset_link, current_user_id, req_headers, download_success) do
download_request = %{
dataset_id: dataset_id,
dataset_link: dataset_link,
request_headers: req_headers,
timestamp: DateTime.utc_now(),
user_accessing: current_user_id,
download_success: download_success
}
download_request_changeset = DatasetDownload.changeset(%DatasetDownload{}, download_request)
Andi.Repo.insert_or_update(download_request_changeset)
end
defp presigned_url(dataset_id, dataset_link) do
file_name = get_file_name_from_dataset_link(dataset_link)
ExAws.Config.new(:s3)
|> ExAws.S3.presigned_url(:get, "#{hosted_bucket()}/#{@bucket_path}#{dataset_id}", file_name)
|> case do
{:ok, presigned_url} -> {:ok, presigned_url}
{_, error} -> {:error, error}
end
end
defp get_file_name_from_dataset_link(dataset_link) do
dataset_link
|> String.split("/")
|> List.last()
end
defp render_view_if_accessible(conn, id, view) do
%{"user_id" => user_id, "is_curator" => is_curator} = AndiWeb.Auth.TokenHandler.Plug.current_resource(conn)
case get_dataset_if_accessible(id, is_curator, user_id) do
nil ->
conn
|> put_view(AndiWeb.ErrorView)
|> put_status(404)
|> render("404.html")
dataset ->
live_render(conn, view, session: %{"dataset" => dataset, "is_curator" => is_curator, "user_id" => user_id})
end
end
defp get_dataset_if_accessible(_id, _is_curator, nil), do: nil
defp get_dataset_if_accessible(id, true, _user_id) do
Datasets.get(id)
end
defp get_dataset_if_accessible(id, false, user_id) do
case Datasets.get(id) do
nil -> nil
%{owner_id: owner_id} = dataset when owner_id == user_id -> dataset
_dataset -> nil
end
end
def edit_organization(conn, %{"id" => id}) do
%{"is_curator" => is_curator, "user_id" => user_id} = AndiWeb.Auth.TokenHandler.Plug.current_resource(conn)
case Organizations.get(id) do
nil ->
conn
|> put_view(AndiWeb.ErrorView)
|> put_status(404)
|> render("404.html")
org ->
live_render(conn, AndiWeb.EditOrganizationLiveView,
session: %{"organization" => org, "is_curator" => is_curator, "user_id" => user_id}
)
end
end
def edit_ingestion(conn, %{"id" => id}) do
%{"is_curator" => is_curator, "user_id" => user_id} = AndiWeb.Auth.TokenHandler.Plug.current_resource(conn)
case Ingestions.get(id) do
nil ->
conn
|> put_view(AndiWeb.ErrorView)
|> put_status(404)
|> render("404.html")
ingestion ->
live_render(conn, AndiWeb.IngestionLiveView.EditIngestionLiveView,
session: %{"ingestion" => ingestion, "is_curator" => is_curator, "user_id" => user_id}
)
end
end
def edit_access_group(conn, %{"id" => id}) do
%{"is_curator" => is_curator, "user_id" => user_id} = AndiWeb.Auth.TokenHandler.Plug.current_resource(conn)
case AccessGroups.get(id) do
nil ->
conn
|> put_view(AndiWeb.ErrorView)
|> put_status(404)
|> render("404.html")
access_group ->
live_render(conn, AndiWeb.AccessGroupLiveView.EditAccessGroupLiveView,
session: %{"access_group" => access_group, "is_curator" => is_curator, "user_id" => user_id}
)
end
end
def edit_user(conn, %{"id" => id}) do
%{"is_curator" => is_curator, "user_id" => user_id} = AndiWeb.Auth.TokenHandler.Plug.current_resource(conn)
case User.get_by_id(id) do
nil ->
conn
|> put_view(AndiWeb.ErrorView)
|> put_status(404)
|> render("404.html")
user ->
live_render(conn, AndiWeb.UserLiveView.EditUserLiveView,
session: %{"is_curator" => is_curator, "user" => user, "user_id" => user_id}
)
end
end
end
| 31.075676
| 119
| 0.665333
|
9e2e2a5d3cda98fffe9398426fee5f1d06bedc44
| 288
|
exs
|
Elixir
|
test/integration/derive_test.exs
|
dvjoness/construct
|
e07ec8e776b0ef1a8fea94691d912cef03d76573
|
[
"MIT"
] | 32
|
2017-12-26T18:08:41.000Z
|
2022-03-24T21:18:11.000Z
|
test/integration/derive_test.exs
|
ExpressApp/struct
|
301df7388b9283f39ee43bb209c06f3aa055c8e7
|
[
"MIT"
] | 4
|
2018-01-17T11:56:08.000Z
|
2022-03-24T21:16:27.000Z
|
test/integration/derive_test.exs
|
dvjoness/construct
|
e07ec8e776b0ef1a8fea94691d912cef03d76573
|
[
"MIT"
] | 7
|
2017-12-29T13:06:31.000Z
|
2022-03-24T05:56:56.000Z
|
defmodule Construct.Integration.DeriveTest do
use ExUnit.Case
test "derive inheritance and override" do
assert {:ok, structure} = Derive.make(a: "string")
assert ~s({"a":"string","b":{"ba":{"baa":"test"}},"d":{"da":{"daa":0}}})
== Jason.encode!(structure)
end
end
| 26.181818
| 76
| 0.621528
|
9e2e325dec5684d6f9795b12cda0cacf2ea966d8
| 1,721
|
ex
|
Elixir
|
lib/trademark_free_strategic_land_warfare_web/endpoint.ex
|
WizardOfOgz/trademark_free_strategic_land_warfare
|
a14287eab1f60c13d43f70ac2309391c291a6704
|
[
"MIT"
] | 1
|
2020-06-30T16:37:50.000Z
|
2020-06-30T16:37:50.000Z
|
lib/trademark_free_strategic_land_warfare_web/endpoint.ex
|
WizardOfOgz/trademark_free_strategic_land_warfare
|
a14287eab1f60c13d43f70ac2309391c291a6704
|
[
"MIT"
] | null | null | null |
lib/trademark_free_strategic_land_warfare_web/endpoint.ex
|
WizardOfOgz/trademark_free_strategic_land_warfare
|
a14287eab1f60c13d43f70ac2309391c291a6704
|
[
"MIT"
] | 13
|
2020-06-04T23:10:19.000Z
|
2020-06-05T01:25:26.000Z
|
defmodule TrademarkFreeStrategicLandWarfareWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :trademark_free_strategic_land_warfare
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_trademark_free_strategic_land_warfare_key",
signing_salt: "JfTZ3Yp8"
]
socket "/socket", TrademarkFreeStrategicLandWarfareWeb.UserSocket,
websocket: true,
longpoll: false
socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :trademark_free_strategic_land_warfare,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Phoenix.LiveDashboard.RequestLogger,
param_key: "request_logger",
cookie_key: "request_logger"
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug TrademarkFreeStrategicLandWarfareWeb.Router
end
| 31.87037
| 97
| 0.744335
|
9e2e50546030e150f147143472c46263d711bf73
| 552
|
exs
|
Elixir
|
priv/repo/seeds.exs
|
sergey-nechaev/asciinema-server
|
64f87acec9c9b45fefa8bed8f70b41f0068ccb2b
|
[
"Apache-2.0"
] | null | null | null |
priv/repo/seeds.exs
|
sergey-nechaev/asciinema-server
|
64f87acec9c9b45fefa8bed8f70b41f0068ccb2b
|
[
"Apache-2.0"
] | null | null | null |
priv/repo/seeds.exs
|
sergey-nechaev/asciinema-server
|
64f87acec9c9b45fefa8bed8f70b41f0068ccb2b
|
[
"Apache-2.0"
] | null | null | null |
# Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Asciinema.Repo.insert!(%Asciinema.SomeModel{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
Application.put_env(:asciinema, :snapshot_updater, Asciinema.Asciicasts.SnapshotUpdater.Sync)
user = Asciinema.Accounts.ensure_asciinema_user()
Asciinema.Asciicasts.ensure_welcome_asciicast(user)
| 32.470588
| 93
| 0.762681
|
9e2f0d02229c24fb40008823c169fa2459dea369
| 770
|
ex
|
Elixir
|
lib/espec/assertions/enum/have_max_by.ex
|
MeneDev/espec
|
ec4b3d579c5192999e930224a8a2650bb1fdf0bc
|
[
"Apache-2.0"
] | 807
|
2015-03-25T14:00:19.000Z
|
2022-03-24T08:08:15.000Z
|
lib/espec/assertions/enum/have_max_by.ex
|
MeneDev/espec
|
ec4b3d579c5192999e930224a8a2650bb1fdf0bc
|
[
"Apache-2.0"
] | 254
|
2015-03-27T10:12:25.000Z
|
2021-07-12T01:40:15.000Z
|
lib/espec/assertions/enum/have_max_by.ex
|
MeneDev/espec
|
ec4b3d579c5192999e930224a8a2650bb1fdf0bc
|
[
"Apache-2.0"
] | 85
|
2015-04-02T10:25:19.000Z
|
2021-01-30T21:30:43.000Z
|
defmodule ESpec.Assertions.Enum.HaveMaxBy do
@moduledoc """
Defines 'have_max_by' assertion.
it do: expect(collection).to have_max_by(func, value)
"""
use ESpec.Assertions.Interface
defp match(enum, [func, val]) do
result = Enum.max_by(enum, func)
{result == val, result}
end
defp success_message(enum, [func, val], _result, positive) do
to = if positive, do: "is", else: "is not"
"The maximum value of `#{inspect(enum)}` using `#{inspect(func)}` #{to} `#{val}`."
end
defp error_message(enum, [func, val], result, positive) do
to = if positive, do: "to be", else: "not to be"
"Expected the maximum value of `#{inspect(enum)}` using `#{inspect(func)}` #{to} `#{val}` but the maximum is `#{
result
}`."
end
end
| 28.518519
| 116
| 0.635065
|
9e2f51199dee1677a3d41d6aa31c06bd8a126ae3
| 1,785
|
ex
|
Elixir
|
clients/spanner/lib/google_api/spanner/v1/model/list_instances_response.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/spanner/lib/google_api/spanner/v1/model/list_instances_response.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
clients/spanner/lib/google_api/spanner/v1/model/list_instances_response.ex
|
medikent/elixir-google-api
|
98a83d4f7bfaeac15b67b04548711bb7e49f9490
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Spanner.V1.Model.ListInstancesResponse do
@moduledoc """
The response for ListInstances.
## Attributes
* `instances` (*type:* `list(GoogleApi.Spanner.V1.Model.Instance.t)`, *default:* `nil`) - The list of requested instances.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - `next_page_token` can be sent in a subsequent
ListInstances call to fetch more
of the matching instances.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:instances => list(GoogleApi.Spanner.V1.Model.Instance.t()),
:nextPageToken => String.t()
}
field(:instances, as: GoogleApi.Spanner.V1.Model.Instance, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.Spanner.V1.Model.ListInstancesResponse do
def decode(value, options) do
GoogleApi.Spanner.V1.Model.ListInstancesResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Spanner.V1.Model.ListInstancesResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.326923
| 126
| 0.731653
|
9e2f51382358ee7e18231c9b839fb589dade4934
| 746
|
ex
|
Elixir
|
lib/central/logging/schemas/page_view_log.ex
|
icexuick/teiserver
|
22f2e255e7e21f977e6b262acf439803626a506c
|
[
"MIT"
] | 6
|
2021-02-08T10:42:53.000Z
|
2021-04-25T12:12:03.000Z
|
lib/central/logging/schemas/page_view_log.ex
|
icexuick/teiserver
|
22f2e255e7e21f977e6b262acf439803626a506c
|
[
"MIT"
] | null | null | null |
lib/central/logging/schemas/page_view_log.ex
|
icexuick/teiserver
|
22f2e255e7e21f977e6b262acf439803626a506c
|
[
"MIT"
] | 2
|
2021-02-23T22:34:00.000Z
|
2021-04-08T13:31:36.000Z
|
defmodule Central.Logging.PageViewLog do
use CentralWeb, :schema
schema "page_view_logs" do
field :path, :string
field :section, :string
field :method, :string
field :ip, :string
field :load_time, :integer
field :status, :integer
belongs_to :user, Central.Account.User
timestamps()
end
@doc false
def changeset(struct, params) do
struct
|> cast(params, [:path, :section, :method, :ip, :load_time, :user_id, :status])
|> validate_required([:method, :ip, :load_time, :status])
end
@spec authorize(any, Plug.Conn.t(), atom) :: boolean
def authorize(_, conn, :delete), do: allow?(conn, "logging.page_view.delete")
def authorize(_, conn, _), do: allow?(conn, "logging.page_view")
end
| 26.642857
| 83
| 0.66756
|
9e2f8686d0267b59e1bb92c2caefe08433635fb7
| 1,830
|
exs
|
Elixir
|
mix.exs
|
manukall/phoenix_token_auth
|
389deeda9e2dc8c93e9e5c066e9df72cff19cdff
|
[
"MIT"
] | 190
|
2015-03-11T14:05:44.000Z
|
2021-11-13T03:42:59.000Z
|
mix.exs
|
manukall/phoenix_token_auth
|
389deeda9e2dc8c93e9e5c066e9df72cff19cdff
|
[
"MIT"
] | 48
|
2015-03-14T17:41:25.000Z
|
2016-08-16T11:37:37.000Z
|
mix.exs
|
manukall/phoenix_token_auth
|
389deeda9e2dc8c93e9e5c066e9df72cff19cdff
|
[
"MIT"
] | 62
|
2015-03-20T04:31:34.000Z
|
2020-08-24T05:05:12.000Z
|
defmodule PhoenixTokenAuth.Mixfile do
use Mix.Project
@repo_url "https://github.com/manukall/phoenix_token_auth"
def project do
[app: :phoenix_token_auth,
version: "0.4.0",
elixir: "~> 1.1",
package: package,
description: description,
source_url: @repo_url,
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[applications: applications(Mix.env)]
end
defp applications(:test), do: applications(:all) ++ [:blacksmith]
defp applications(_all), do: [:logger]
defp package do
[
maintainers: ["Manuel Kallenbach"],
licenses: ["MIT"],
links: %{"GitHub" => @repo_url,
"Phoenix" => "https://github.com/phoenixframework/phoenix"}
]
end
defp description do
"""
Solution for token auth in Phoenix apps. Provides an api for registration, account confirmation
and logging in.
"""
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type `mix help deps` for more examples and options
defp deps do
[
{:cowboy, "~> 1.0.0"},
{:phoenix, "~> 1.1.0"},
{:ecto, "~> 1.0"},
{:comeonin, "~> 2.0.0"},
{:postgrex, ">= 0.6.0"},
{:joken, "~> 0.13.1"},
{:poison, "~> 1.5.0"},
{:secure_random, "~> 0.1.0"},
{:mailgun, "~> 0.1.2"},
{:timex, "~> 0.19"},
# DEV
{:earmark, "~> 0.1.0", only: :dev},
{:ex_doc, "~> 0.7.0", only: :dev},
# TESTING
{:mock, "~> 0.1.0", only: :test},
{:blacksmith, git: "git://github.com/batate/blacksmith.git", only: :test},
]
end
end
| 25.416667
| 99
| 0.548634
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.