hexsha
stringlengths 40
40
| size
int64 5
1.05M
| ext
stringclasses 98
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
118
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
118
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
134k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
1.05M
| avg_line_length
float64 1
1.03M
| max_line_length
int64 2
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c9353e982c46a0174dbf30019467b6f2bce4a73a
| 418
|
ts
|
TypeScript
|
assets/script/core/utils/SingletonFactory.ts
|
CaoHu0/CocosCreater-TS
|
5ae6a51e5daddbaa24ccf9cdcd5d808aef591d96
|
[
"Apache-2.0"
] | null | null | null |
assets/script/core/utils/SingletonFactory.ts
|
CaoHu0/CocosCreater-TS
|
5ae6a51e5daddbaa24ccf9cdcd5d808aef591d96
|
[
"Apache-2.0"
] | null | null | null |
assets/script/core/utils/SingletonFactory.ts
|
CaoHu0/CocosCreater-TS
|
5ae6a51e5daddbaa24ccf9cdcd5d808aef591d96
|
[
"Apache-2.0"
] | 1
|
2020-10-06T05:27:33.000Z
|
2020-10-06T05:27:33.000Z
|
/**
* 单例工厂
*/
export class SingletonFactory {
private static instances: Map<{ new() }, Object> = new Map<{ new() }, Object>();
public static getInstance<T>(c: { new(): T }): T {
if (!SingletonFactory.instances.has(c)) {
let obj = new c();
SingletonFactory.instances.set(c, obj);
return obj;
}
return <T>SingletonFactory.instances.get(c);
}
}
| 24.588235
| 84
| 0.545455
|
5baa4f0fb6c02edb2165c53c64972267c2ab16e4
| 347
|
sh
|
Shell
|
make.sh
|
qnib/docker-plugin-authz
|
81d5d47d78a0a94955524d5d0a0606730542e1aa
|
[
"Apache-2.0"
] | null | null | null |
make.sh
|
qnib/docker-plugin-authz
|
81d5d47d78a0a94955524d5d0a0606730542e1aa
|
[
"Apache-2.0"
] | null | null | null |
make.sh
|
qnib/docker-plugin-authz
|
81d5d47d78a0a94955524d5d0a0606730542e1aa
|
[
"Apache-2.0"
] | null | null | null |
#!/bin/sh
set -x
name=${1:-"qnib/docker-plugin-authz"}
docker build -f Dockerfile.pluginbuild -t "$name" .
id=$(docker create "$name")
rm -rf rootfs
mkdir -p rootfs
docker export "$id" | tar -xvf - -C rootfs
docker rm "$id"
rm -rf rootfs/proc rootfs/sys rootfs/go rootfs/etc rootfs/dev
docker plugin rm "$name"
docker plugin create "$name" .
| 19.277778
| 61
| 0.688761
|
436bf1983447c65fae2d22e2082d8b3ebb4d180a
| 373
|
ts
|
TypeScript
|
src/types.spec.ts
|
rogalmic/vscode-xml-complete
|
d484083d7dc56b40cf92257eed11f5cbc056febe
|
[
"MIT"
] | 28
|
2019-04-09T09:14:52.000Z
|
2021-11-09T09:56:50.000Z
|
test/src/types.spec.ts
|
zkoss/ZK-vscode-plugin
|
163fe8bc46c2bc1e9291eb1869ee3db345ec4a6f
|
[
"MIT"
] | 31
|
2019-03-08T16:42:13.000Z
|
2021-04-21T09:35:09.000Z
|
test/src/types.spec.ts
|
zkoss/ZK-vscode-plugin
|
163fe8bc46c2bc1e9291eb1869ee3db345ec4a6f
|
[
"MIT"
] | 19
|
2019-05-29T20:08:13.000Z
|
2022-03-11T02:33:29.000Z
|
import { CompletionString, XmlTagCollection } from "./types";
describe("XmlTagCollection", () => {
it("return empty string when data missing", () => {
const xtc = new XmlTagCollection();
xtc.setNsMap("a", "b");
expect(xtc.fixNs(new CompletionString(""), new Map<string, string>()))
.toEqual(new CompletionString(""));
});
});
| 28.692308
| 78
| 0.595174
|
a385a013c04dc7aa6bb69111dd9e05deef6aefc6
| 2,015
|
java
|
Java
|
thirdeye/thirdeye-detector/src/main/java/com/linkedin/thirdeye/lib/util/MetricTimeSeriesUtils.java
|
Hanmourang/Pinot
|
85204d140c34470a948e000a8562b87c0cc3f1d7
|
[
"Apache-2.0"
] | 17
|
2015-11-27T15:56:18.000Z
|
2020-11-17T12:38:17.000Z
|
thirdeye/thirdeye-detector/src/main/java/com/linkedin/thirdeye/lib/util/MetricTimeSeriesUtils.java
|
Hanmourang/Pinot
|
85204d140c34470a948e000a8562b87c0cc3f1d7
|
[
"Apache-2.0"
] | null | null | null |
thirdeye/thirdeye-detector/src/main/java/com/linkedin/thirdeye/lib/util/MetricTimeSeriesUtils.java
|
Hanmourang/Pinot
|
85204d140c34470a948e000a8562b87c0cc3f1d7
|
[
"Apache-2.0"
] | 10
|
2015-12-30T07:50:16.000Z
|
2019-10-31T03:13:23.000Z
|
package com.linkedin.thirdeye.lib.util;
import com.linkedin.thirdeye.api.MetricTimeSeries;
import org.apache.commons.math3.util.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.Set;
/**
*
*/
public class MetricTimeSeriesUtils {
private static final Logger LOGGER = LoggerFactory.getLogger(MetricTimeSeriesUtils.class);
/**
* @param series
* The MetricTimeSeries
* @param metric
* The metric to extract
* @param bucketWidthInMillis
* The width of the bucket in milliseconds
* @param missingTimeStamps
* If this is not null, then any missing timestamps will be added to this set
* @param defaultValue
* The default value to use if the data is missing
* @return
* A pair containing the timestamps and values
*/
public static Pair<long[], double[]> toArray(MetricTimeSeries series, String metric, long bucketWidthInMillis,
Set<Long> missingTimeStamps, double defaultValue) {
long startTime = Collections.min(series.getTimeWindowSet());
long endTime = Collections.max(series.getTimeWindowSet());
int numValues = (int) (1 + ((endTime - startTime) / bucketWidthInMillis));
double[] values = new double[numValues];
long[] timestamps = new long[numValues];
for (int i = 0; i < numValues; i++) {
long timeWindow = startTime + (i * bucketWidthInMillis);
timestamps[i] = timeWindow;
if (series.getTimeWindowSet().contains(timeWindow)) {
values[i] = series.get(timeWindow, metric).doubleValue();
} else {
values[i] = defaultValue;
if (missingTimeStamps != null) {
missingTimeStamps.add(timeWindow); // do not use these datapoints
}
}
}
if (numValues != series.getTimeWindowSet().size()) {
LOGGER.warn("looks like there are holes in the data: expected {} timestamps, found {}", numValues,
series.getTimeWindowSet().size());
}
return new Pair<>(timestamps, values);
}
}
| 31.984127
| 112
| 0.683375
|
f0afead3c3737842fde4f208ec17f0b72b32bc05
| 9,196
|
lua
|
Lua
|
node.lua
|
flazer/package-remotephotoviewer
|
a0183743a9101d090b247ed548f74321243ec158
|
[
"MIT"
] | null | null | null |
node.lua
|
flazer/package-remotephotoviewer
|
a0183743a9101d090b247ed548f74321243ec158
|
[
"MIT"
] | null | null | null |
node.lua
|
flazer/package-remotephotoviewer
|
a0183743a9101d090b247ed548f74321243ec158
|
[
"MIT"
] | null | null | null |
gl.setup(NATIVE_WIDTH, NATIVE_HEIGHT)
local json = require "json"
local font = resource.load_font "roboto.ttf"
local black = resource.create_colored_texture(0,0,0,1)
local shaders = {
multisample = resource.create_shader[[
uniform sampler2D Texture;
varying vec2 TexCoord;
uniform vec4 Color;
uniform float x, y, s;
void main() {
vec2 texcoord = TexCoord * vec2(s, s) + vec2(x, y);
vec4 c1 = texture2D(Texture, texcoord);
vec4 c2 = texture2D(Texture, texcoord + vec2(0.0002, 0.0002));
gl_FragColor = (c2+c1)*0.5 * Color;
}
]],
simple = resource.create_shader[[
uniform sampler2D Texture;
varying vec2 TexCoord;
uniform vec4 Color;
uniform float x, y, s;
void main() {
gl_FragColor = texture2D(Texture, TexCoord * vec2(s, s) + vec2(x, y)) * Color;
}
]],
}
local settings = {
IMAGE_PRELOAD = 2;
PRELOAD_TIME = 5;
}
local function ramp(t_s, t_e, t_c, ramp_time)
if ramp_time == 0 then return 1 end
local delta_s = t_c - t_s
local delta_e = t_e - t_c
return math.min(1, delta_s * 1/ramp_time, delta_e * 1/ramp_time)
end
local function cycled(items, offset)
offset = offset % #items + 1
return items[offset], offset
end
local Loading = (function()
local font = resource.load_font "roboto.ttf"
local loading = "Loading..."
local size = 80
local w = font:width(loading, size)
local alpha = 0
local function draw()
if alpha == 0 then
return
end
font:write((WIDTH-w)/2, (HEIGHT-size)/2, loading, size, 1,1,1,alpha)
end
local function fade_in()
alpha = math.min(1, alpha + 0.01)
end
local function fade_out()
alpha = math.max(0, alpha - 0.01)
end
return {
fade_in = fade_in;
fade_out = fade_out;
draw = draw;
}
end)()
local Config = (function()
local playlist = {}
local switch_time = 1
local kenburns = false
local switch_time = 30
util.file_watch("config.json", function(raw)
print "updated config.json"
local config = json.decode(raw)
kenburns = config.kenburns
switch_time = config.switch_time
display_time = config.display_time
end)
local photos = {}
local function update_playlist()
print "updating playlist"
local sorted = {}
for basename, photo in pairs(photos) do
sorted[#sorted+1] = photo
end
table.sort(sorted, function(p1, p2)
return p1.stored < p2.stored
end)
playlist = {}
for idx = 1, #sorted do
local photo = sorted[idx]
playlist[#playlist+1] = {
asset_name = photo['jpg'];
type = "image";
meta = photo;
}
end
node.gc()
end
node.event("content_update", function(filename)
print("updated content", filename)
if filename:sub(1, 6) == "photo-" and filename:sub(-4, -1) == ".jpg" then
local basename = filename:sub(1, -5)
photos[basename] = json.decode(resource.load_file(basename .. ".json"))
update_playlist()
end
end)
node.event("content_remove", function(filename)
if filename:sub(1, 6) == "photo-" and filename:sub(-4, -1) == ".jpg" then
local basename = filename:sub(1, -5)
photos[basename] = nil
update_playlist()
end
end)
return {
get_playlist = function() return playlist end;
get_switch_time = function() return switch_time end;
get_display_time = function() return display_time end;
get_kenburns = function() return kenburns end;
}
end)()
local Scheduler = (function()
local playlist_offset = 0
local function get_next()
local playlist = Config.get_playlist()
if #playlist == 0 then
print "no item to schedule"
return nil
end
local item
item, playlist_offset = cycled(playlist, playlist_offset)
print(string.format("next scheduled item is %s", item.asset_name))
return item
end
return {
get_next = get_next;
}
end)()
local ImageJob = function(item, ctx, fn)
fn.wait_t(ctx.starts - settings.IMAGE_PRELOAD)
local res = resource.load_image{
file = ctx.asset,
mipmap = true,
}
for now in fn.wait_next_frame do
local state, err = res:state()
if state == "loaded" then
break
elseif state == "error" then
error("preloading failed: " .. err)
end
end
print "waiting for start"
local starts = fn.wait_t(ctx.starts)
local duration = ctx.ends - starts
print "starting"
if Config.get_kenburns() then
local function lerp(s, e, t)
return s + t * (e-s)
end
local paths = {
{from = {x=0.0, y=0.0, s=1.0 }, to = {x=0.08, y=0.08, s=0.9 }},
{from = {x=0.05, y=0.0, s=0.93}, to = {x=0.03, y=0.03, s=0.97}},
{from = {x=0.02, y=0.05, s=0.91}, to = {x=0.01, y=0.05, s=0.95}},
{from = {x=0.07, y=0.05, s=0.91}, to = {x=0.04, y=0.03, s=0.95}},
}
local path = paths[math.random(1, #paths)]
local to, from = path.to, path.from
if math.random() >= 0.5 then
to, from = from, to
end
local w, h = res:size()
local multisample = w / WIDTH > 0.8 or h / HEIGHT > 0.8
local shader = multisample and shaders.multisample or shaders.simple
for now in fn.wait_next_frame do
local t = (now - starts) / duration
shader:use{
x = lerp(from.x, to.x, t);
y = lerp(from.y, to.y, t);
s = lerp(from.s, to.s, t);
}
util.draw_correct(res, 0, 0, WIDTH, HEIGHT, ramp(
ctx.starts, ctx.ends, now, Config.get_switch_time()
))
shader:deactivate()
if now > ctx.ends then
break
end
end
else
for now in fn.wait_next_frame do
util.draw_correct(res, 0, 0, WIDTH, HEIGHT, ramp(
ctx.starts, ctx.ends, now, Config.get_switch_time()
))
if now > ctx.ends then
break
end
end
end
res:dispose()
print "image job completed"
return true
end
local Queue = (function()
local jobs = {}
local scheduled_until = sys.now()
local function enqueue(starts, ends, item)
local co = coroutine.create(({
image = ImageJob,
})[item.type])
local success, asset = pcall(resource.open_file, item.asset_name)
if not success then
print("CANNOT GRAB ASSET: ", asset)
return
end
-- an image may overlap another image
if #jobs > 0 and jobs[#jobs].type == "image" and item.type == "image" then
starts = starts - Config.get_switch_time()
end
local ctx = {
starts = starts,
ends = ends,
asset = asset;
}
local success, err = coroutine.resume(co, item, ctx, {
wait_next_frame = function ()
return coroutine.yield(false)
end;
wait_t = function(t)
while true do
local now = coroutine.yield(false)
if now >= t then
return now
end
end
end;
})
if not success then
print("CANNOT START JOB: ", err)
return
end
jobs[#jobs+1] = {
co = co;
ctx = ctx;
type = item.type;
}
scheduled_until = ends
print("added job. scheduled program until ", scheduled_until)
end
local function tick()
gl.clear(0, 0, 0, 0)
for try = 1,3 do
if sys.now() + settings.PRELOAD_TIME < scheduled_until then
break
end
local item = Scheduler.get_next()
if item then
enqueue(scheduled_until, scheduled_until + Config.get_display_time(), item)
end
end
if #jobs == 0 then
Loading.fade_in()
else
Loading.fade_out()
end
local now = sys.now()
for idx = #jobs,1,-1 do -- iterate backwards so we can remove finished jobs
local job = jobs[idx]
local success, is_finished = coroutine.resume(job.co, now)
if not success then
print("CANNOT RESUME JOB: ", is_finished)
table.remove(jobs, idx)
elseif is_finished then
table.remove(jobs, idx)
end
end
Loading.draw()
end
return {
tick = tick;
}
end)()
util.set_interval(1, node.gc)
function node.render()
gl.clear(0, 0, 0, 1)
Queue.tick()
end
| 26.967742
| 91
| 0.5311
|
313dc2ab60b87ec605b085bd2044891c11887002
| 157,640
|
rb
|
Ruby
|
gems/aws-sdk-sagemaker/lib/aws-sdk-sagemaker/client.rb
|
DalavanCloud/aws-sdk-ruby
|
6f190d198f8857ec63fcf32bf5aee9b104ffa938
|
[
"Apache-2.0"
] | 1
|
2018-11-04T17:54:43.000Z
|
2018-11-04T17:54:43.000Z
|
gems/aws-sdk-sagemaker/lib/aws-sdk-sagemaker/client.rb
|
DalavanCloud/aws-sdk-ruby
|
6f190d198f8857ec63fcf32bf5aee9b104ffa938
|
[
"Apache-2.0"
] | null | null | null |
gems/aws-sdk-sagemaker/lib/aws-sdk-sagemaker/client.rb
|
DalavanCloud/aws-sdk-ruby
|
6f190d198f8857ec63fcf32bf5aee9b104ffa938
|
[
"Apache-2.0"
] | null | null | null |
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'seahorse/client/plugins/content_length.rb'
require 'aws-sdk-core/plugins/credentials_configuration.rb'
require 'aws-sdk-core/plugins/logging.rb'
require 'aws-sdk-core/plugins/param_converter.rb'
require 'aws-sdk-core/plugins/param_validator.rb'
require 'aws-sdk-core/plugins/user_agent.rb'
require 'aws-sdk-core/plugins/helpful_socket_errors.rb'
require 'aws-sdk-core/plugins/retry_errors.rb'
require 'aws-sdk-core/plugins/global_configuration.rb'
require 'aws-sdk-core/plugins/regional_endpoint.rb'
require 'aws-sdk-core/plugins/response_paging.rb'
require 'aws-sdk-core/plugins/stub_responses.rb'
require 'aws-sdk-core/plugins/idempotency_token.rb'
require 'aws-sdk-core/plugins/jsonvalue_converter.rb'
require 'aws-sdk-core/plugins/client_metrics_plugin.rb'
require 'aws-sdk-core/plugins/client_metrics_send_plugin.rb'
require 'aws-sdk-core/plugins/signature_v4.rb'
require 'aws-sdk-core/plugins/protocols/json_rpc.rb'
Aws::Plugins::GlobalConfiguration.add_identifier(:sagemaker)
module Aws::SageMaker
class Client < Seahorse::Client::Base
include Aws::ClientStubs
@identifier = :sagemaker
set_api(ClientApi::API)
add_plugin(Seahorse::Client::Plugins::ContentLength)
add_plugin(Aws::Plugins::CredentialsConfiguration)
add_plugin(Aws::Plugins::Logging)
add_plugin(Aws::Plugins::ParamConverter)
add_plugin(Aws::Plugins::ParamValidator)
add_plugin(Aws::Plugins::UserAgent)
add_plugin(Aws::Plugins::HelpfulSocketErrors)
add_plugin(Aws::Plugins::RetryErrors)
add_plugin(Aws::Plugins::GlobalConfiguration)
add_plugin(Aws::Plugins::RegionalEndpoint)
add_plugin(Aws::Plugins::ResponsePaging)
add_plugin(Aws::Plugins::StubResponses)
add_plugin(Aws::Plugins::IdempotencyToken)
add_plugin(Aws::Plugins::JsonvalueConverter)
add_plugin(Aws::Plugins::ClientMetricsPlugin)
add_plugin(Aws::Plugins::ClientMetricsSendPlugin)
add_plugin(Aws::Plugins::SignatureV4)
add_plugin(Aws::Plugins::Protocols::JsonRpc)
# @overload initialize(options)
# @param [Hash] options
# @option options [required, Aws::CredentialProvider] :credentials
# Your AWS credentials. This can be an instance of any one of the
# following classes:
#
# * `Aws::Credentials` - Used for configuring static, non-refreshing
# credentials.
#
# * `Aws::InstanceProfileCredentials` - Used for loading credentials
# from an EC2 IMDS on an EC2 instance.
#
# * `Aws::SharedCredentials` - Used for loading credentials from a
# shared file, such as `~/.aws/config`.
#
# * `Aws::AssumeRoleCredentials` - Used when you need to assume a role.
#
# When `:credentials` are not configured directly, the following
# locations will be searched for credentials:
#
# * `Aws.config[:credentials]`
# * The `:access_key_id`, `:secret_access_key`, and `:session_token` options.
# * ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY']
# * `~/.aws/credentials`
# * `~/.aws/config`
# * EC2 IMDS instance profile - When used by default, the timeouts are
# very aggressive. Construct and pass an instance of
# `Aws::InstanceProfileCredentails` to enable retries and extended
# timeouts.
#
# @option options [required, String] :region
# The AWS region to connect to. The configured `:region` is
# used to determine the service `:endpoint`. When not passed,
# a default `:region` is search for in the following locations:
#
# * `Aws.config[:region]`
# * `ENV['AWS_REGION']`
# * `ENV['AMAZON_REGION']`
# * `ENV['AWS_DEFAULT_REGION']`
# * `~/.aws/credentials`
# * `~/.aws/config`
#
# @option options [String] :access_key_id
#
# @option options [Boolean] :client_side_monitoring (false)
# When `true`, client-side metrics will be collected for all API requests from
# this client.
#
# @option options [String] :client_side_monitoring_client_id ("")
# Allows you to provide an identifier for this client which will be attached to
# all generated client side metrics. Defaults to an empty string.
#
# @option options [Integer] :client_side_monitoring_port (31000)
# Required for publishing client metrics. The port that the client side monitoring
# agent is running on, where client metrics will be published via UDP.
#
# @option options [Aws::ClientSideMonitoring::Publisher] :client_side_monitoring_publisher (Aws::ClientSideMonitoring::Publisher)
# Allows you to provide a custom client-side monitoring publisher class. By default,
# will use the Client Side Monitoring Agent Publisher.
#
# @option options [Boolean] :convert_params (true)
# When `true`, an attempt is made to coerce request parameters into
# the required types.
#
# @option options [String] :endpoint
# The client endpoint is normally constructed from the `:region`
# option. You should only configure an `:endpoint` when connecting
# to test endpoints. This should be avalid HTTP(S) URI.
#
# @option options [Aws::Log::Formatter] :log_formatter (Aws::Log::Formatter.default)
# The log formatter.
#
# @option options [Symbol] :log_level (:info)
# The log level to send messages to the `:logger` at.
#
# @option options [Logger] :logger
# The Logger instance to send log messages to. If this option
# is not set, logging will be disabled.
#
# @option options [String] :profile ("default")
# Used when loading credentials from the shared credentials file
# at HOME/.aws/credentials. When not specified, 'default' is used.
#
# @option options [Float] :retry_base_delay (0.3)
# The base delay in seconds used by the default backoff function.
#
# @option options [Symbol] :retry_jitter (:none)
# A delay randomiser function used by the default backoff function. Some predefined functions can be referenced by name - :none, :equal, :full, otherwise a Proc that takes and returns a number.
#
# @see https://www.awsarchitectureblog.com/2015/03/backoff.html
#
# @option options [Integer] :retry_limit (3)
# The maximum number of times to retry failed requests. Only
# ~ 500 level server errors and certain ~ 400 level client errors
# are retried. Generally, these are throttling errors, data
# checksum errors, networking errors, timeout errors and auth
# errors from expired credentials.
#
# @option options [Integer] :retry_max_delay (0)
# The maximum number of seconds to delay between retries (0 for no limit) used by the default backoff function.
#
# @option options [String] :secret_access_key
#
# @option options [String] :session_token
#
# @option options [Boolean] :simple_json (false)
# Disables request parameter conversion, validation, and formatting.
# Also disable response data type conversions. This option is useful
# when you want to ensure the highest level of performance by
# avoiding overhead of walking request parameters and response data
# structures.
#
# When `:simple_json` is enabled, the request parameters hash must
# be formatted exactly as the DynamoDB API expects.
#
# @option options [Boolean] :stub_responses (false)
# Causes the client to return stubbed responses. By default
# fake responses are generated and returned. You can specify
# the response data to return or errors to raise by calling
# {ClientStubs#stub_responses}. See {ClientStubs} for more information.
#
# ** Please note ** When response stubbing is enabled, no HTTP
# requests are made, and retries are disabled.
#
# @option options [Boolean] :validate_params (true)
# When `true`, request parameters are validated before
# sending the request.
#
def initialize(*args)
super
end
# @!group API Operations
# Adds or overwrites one or more tags for the specified Amazon SageMaker
# resource. You can add tags to notebook instances, training jobs,
# hyperparameter tuning jobs, models, endpoint configurations, and
# endpoints.
#
# Each tag consists of a key and an optional value. Tag keys must be
# unique per resource. For more information about tags, see For more
# information, see [AWS Tagging Strategies][1].
#
# <note markdown="1"> Tags that you add to a hyperparameter tuning job by calling this API
# are also added to any training jobs that the hyperparameter tuning job
# launches after you call this API, but not to training jobs that the
# hyperparameter tuning job launched before you called this API. To make
# sure that the tags associated with a hyperparameter tuning job are
# also added to all training jobs that the hyperparameter tuning job
# launches, add the tags when you first create the tuning job by
# specifying them in the `Tags` parameter of
# CreateHyperParameterTuningJob
#
# </note>
#
#
#
# [1]: https://aws.amazon.com/answers/account-management/aws-tagging-strategies/
#
# @option params [required, String] :resource_arn
# The Amazon Resource Name (ARN) of the resource that you want to tag.
#
# @option params [required, Array<Types::Tag>] :tags
# An array of `Tag` objects. Each tag is a key-value pair. Only the
# `key` parameter is required. If you don't specify a value, Amazon
# SageMaker sets the value to an empty string.
#
# @return [Types::AddTagsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::AddTagsOutput#tags #tags} => Array<Types::Tag>
#
# @example Request syntax with placeholder values
#
# resp = client.add_tags({
# resource_arn: "ResourceArn", # required
# tags: [ # required
# {
# key: "TagKey", # required
# value: "TagValue", # required
# },
# ],
# })
#
# @example Response structure
#
# resp.tags #=> Array
# resp.tags[0].key #=> String
# resp.tags[0].value #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/AddTags AWS API Documentation
#
# @overload add_tags(params = {})
# @param [Hash] params ({})
def add_tags(params = {}, options = {})
req = build_request(:add_tags, params)
req.send_request(options)
end
# Creates an endpoint using the endpoint configuration specified in the
# request. Amazon SageMaker uses the endpoint to provision resources and
# deploy models. You create the endpoint configuration with the
# [CreateEndpointConfig][1] API.
#
# <note markdown="1"> Use this API only for hosting models using Amazon SageMaker hosting
# services.
#
# </note>
#
# The endpoint name must be unique within an AWS Region in your AWS
# account.
#
# When it receives the request, Amazon SageMaker creates the endpoint,
# launches the resources (ML compute instances), and deploys the
# model(s) on them.
#
# When Amazon SageMaker receives the request, it sets the endpoint
# status to `Creating`. After it creates the endpoint, it sets the
# status to `InService`. Amazon SageMaker can then process incoming
# requests for inferences. To check the status of an endpoint, use the
# [DescribeEndpoint][2] API.
#
# For an example, see [Exercise 1: Using the K-Means Algorithm Provided
# by Amazon SageMaker][3].
#
# If any of the models hosted at this endpoint get model data from an
# Amazon S3 location, Amazon SageMaker uses AWS Security Token Service
# to download model artifacts from the S3 path you provided. AWS STS is
# activated in your IAM user account by default. If you previously
# deactivated AWS STS for a region, you need to reactivate AWS STS for
# that region. For more information, see [Activating and Deactivating
# AWS STS i an AWS Region][4] in the *AWS Identity and Access Management
# User Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/API_CreateEndpointConfig.html
# [2]: http://docs.aws.amazon.com/sagemaker/latest/dg/API_DescribeEndpoint.html
# [3]: http://docs.aws.amazon.com/sagemaker/latest/dg/ex1.html
# [4]: http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_enable-regions.html
#
# @option params [required, String] :endpoint_name
# The name of the endpoint. The name must be unique within an AWS Region
# in your AWS account.
#
# @option params [required, String] :endpoint_config_name
# The name of an endpoint configuration. For more information, see
# [CreateEndpointConfig][1].
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/API_CreateEndpointConfig.html
#
# @option params [Array<Types::Tag>] :tags
# An array of key-value pairs. For more information, see [Using Cost
# Allocation Tags][1]in the *AWS Billing and Cost Management User
# Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html#allocation-what
#
# @return [Types::CreateEndpointOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateEndpointOutput#endpoint_arn #endpoint_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_endpoint({
# endpoint_name: "EndpointName", # required
# endpoint_config_name: "EndpointConfigName", # required
# tags: [
# {
# key: "TagKey", # required
# value: "TagValue", # required
# },
# ],
# })
#
# @example Response structure
#
# resp.endpoint_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/CreateEndpoint AWS API Documentation
#
# @overload create_endpoint(params = {})
# @param [Hash] params ({})
def create_endpoint(params = {}, options = {})
req = build_request(:create_endpoint, params)
req.send_request(options)
end
# Creates an endpoint configuration that Amazon SageMaker hosting
# services uses to deploy models. In the configuration, you identify one
# or more models, created using the `CreateModel` API, to deploy and the
# resources that you want Amazon SageMaker to provision. Then you call
# the [CreateEndpoint][1] API.
#
# <note markdown="1"> Use this API only if you want to use Amazon SageMaker hosting services
# to deploy models into production.
#
# </note>
#
# In the request, you define one or more `ProductionVariant`s, each of
# which identifies a model. Each `ProductionVariant` parameter also
# describes the resources that you want Amazon SageMaker to provision.
# This includes the number and type of ML compute instances to deploy.
#
# If you are hosting multiple models, you also assign a `VariantWeight`
# to specify how much traffic you want to allocate to each model. For
# example, suppose that you want to host two models, A and B, and you
# assign traffic weight 2 for model A and 1 for model B. Amazon
# SageMaker distributes two-thirds of the traffic to Model A, and
# one-third to model B.
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/API_CreateEndpoint.html
#
# @option params [required, String] :endpoint_config_name
# The name of the endpoint configuration. You specify this name in a
# [CreateEndpoint][1] request.
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/API_CreateEndpoint.html
#
# @option params [required, Array<Types::ProductionVariant>] :production_variants
# An array of `ProductionVariant` objects, one for each model that you
# want to host at this endpoint.
#
# @option params [Array<Types::Tag>] :tags
# An array of key-value pairs. For more information, see [Using Cost
# Allocation Tags][1] in the *AWS Billing and Cost Management User
# Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html#allocation-what
#
# @option params [String] :kms_key_id
# The Amazon Resource Name (ARN) of a AWS Key Management Service key
# that Amazon SageMaker uses to encrypt data on the storage volume
# attached to the ML compute instance that hosts the endpoint.
#
# @return [Types::CreateEndpointConfigOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateEndpointConfigOutput#endpoint_config_arn #endpoint_config_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_endpoint_config({
# endpoint_config_name: "EndpointConfigName", # required
# production_variants: [ # required
# {
# variant_name: "VariantName", # required
# model_name: "ModelName", # required
# initial_instance_count: 1, # required
# instance_type: "ml.t2.medium", # required, accepts ml.t2.medium, ml.t2.large, ml.t2.xlarge, ml.t2.2xlarge, ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge, ml.c4.large, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.large, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge
# initial_variant_weight: 1.0,
# },
# ],
# tags: [
# {
# key: "TagKey", # required
# value: "TagValue", # required
# },
# ],
# kms_key_id: "KmsKeyId",
# })
#
# @example Response structure
#
# resp.endpoint_config_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/CreateEndpointConfig AWS API Documentation
#
# @overload create_endpoint_config(params = {})
# @param [Hash] params ({})
def create_endpoint_config(params = {}, options = {})
req = build_request(:create_endpoint_config, params)
req.send_request(options)
end
# Starts a hyperparameter tuning job.
#
# @option params [required, String] :hyper_parameter_tuning_job_name
# The name of the tuning job. This name is the prefix for the names of
# all training jobs that this tuning job launches. The name must be
# unique within the same AWS account and AWS Region. Names are not case
# sensitive, and must be between 1-32 characters.
#
# @option params [required, Types::HyperParameterTuningJobConfig] :hyper_parameter_tuning_job_config
# The HyperParameterTuningJobConfig object that describes the tuning
# job, including the search strategy, metric used to evaluate training
# jobs, ranges of parameters to search, and resource limits for the
# tuning job.
#
# @option params [required, Types::HyperParameterTrainingJobDefinition] :training_job_definition
# The HyperParameterTrainingJobDefinition object that describes the
# training jobs that this tuning job launches, including static
# hyperparameters, input data configuration, output data configuration,
# resource configuration, and stopping condition.
#
# @option params [Array<Types::Tag>] :tags
# An array of key-value pairs. You can use tags to categorize your AWS
# resources in different ways, for example, by purpose, owner, or
# environment. For more information, see [AWS Tagging Strategies][1].
#
# Tags that you specify for the tuning job are also added to all
# training jobs that the tuning job launches.
#
#
#
# [1]: https://aws.amazon.com/answers/account-management/aws-tagging-strategies/
#
# @return [Types::CreateHyperParameterTuningJobResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateHyperParameterTuningJobResponse#hyper_parameter_tuning_job_arn #hyper_parameter_tuning_job_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_hyper_parameter_tuning_job({
# hyper_parameter_tuning_job_name: "HyperParameterTuningJobName", # required
# hyper_parameter_tuning_job_config: { # required
# strategy: "Bayesian", # required, accepts Bayesian
# hyper_parameter_tuning_job_objective: { # required
# type: "Maximize", # required, accepts Maximize, Minimize
# metric_name: "MetricName", # required
# },
# resource_limits: { # required
# max_number_of_training_jobs: 1, # required
# max_parallel_training_jobs: 1, # required
# },
# parameter_ranges: { # required
# integer_parameter_ranges: [
# {
# name: "ParameterKey", # required
# min_value: "ParameterValue", # required
# max_value: "ParameterValue", # required
# },
# ],
# continuous_parameter_ranges: [
# {
# name: "ParameterKey", # required
# min_value: "ParameterValue", # required
# max_value: "ParameterValue", # required
# },
# ],
# categorical_parameter_ranges: [
# {
# name: "ParameterKey", # required
# values: ["ParameterValue"], # required
# },
# ],
# },
# },
# training_job_definition: { # required
# static_hyper_parameters: {
# "ParameterKey" => "ParameterValue",
# },
# algorithm_specification: { # required
# training_image: "AlgorithmImage", # required
# training_input_mode: "Pipe", # required, accepts Pipe, File
# metric_definitions: [
# {
# name: "MetricName", # required
# regex: "MetricRegex", # required
# },
# ],
# },
# role_arn: "RoleArn", # required
# input_data_config: [ # required
# {
# channel_name: "ChannelName", # required
# data_source: { # required
# s3_data_source: { # required
# s3_data_type: "ManifestFile", # required, accepts ManifestFile, S3Prefix
# s3_uri: "S3Uri", # required
# s3_data_distribution_type: "FullyReplicated", # accepts FullyReplicated, ShardedByS3Key
# },
# },
# content_type: "ContentType",
# compression_type: "None", # accepts None, Gzip
# record_wrapper_type: "None", # accepts None, RecordIO
# input_mode: "Pipe", # accepts Pipe, File
# },
# ],
# vpc_config: {
# security_group_ids: ["SecurityGroupId"], # required
# subnets: ["SubnetId"], # required
# },
# output_data_config: { # required
# kms_key_id: "KmsKeyId",
# s3_output_path: "S3Uri", # required
# },
# resource_config: { # required
# instance_type: "ml.m4.xlarge", # required, accepts ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge
# instance_count: 1, # required
# volume_size_in_gb: 1, # required
# volume_kms_key_id: "KmsKeyId",
# },
# stopping_condition: { # required
# max_runtime_in_seconds: 1,
# },
# },
# tags: [
# {
# key: "TagKey", # required
# value: "TagValue", # required
# },
# ],
# })
#
# @example Response structure
#
# resp.hyper_parameter_tuning_job_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/CreateHyperParameterTuningJob AWS API Documentation
#
# @overload create_hyper_parameter_tuning_job(params = {})
# @param [Hash] params ({})
def create_hyper_parameter_tuning_job(params = {}, options = {})
req = build_request(:create_hyper_parameter_tuning_job, params)
req.send_request(options)
end
# Creates a model in Amazon SageMaker. In the request, you name the
# model and describe a primary container. For the primary container, you
# specify the docker image containing inference code, artifacts (from
# prior training), and custom environment map that the inference code
# uses when you deploy the model for predictions.
#
# Use this API to create a model if you want to use Amazon SageMaker
# hosting services or run a batch transform job.
#
# To host your model, you create an endpoint configuration with the
# `CreateEndpointConfig` API, and then create an endpoint with the
# `CreateEndpoint` API. Amazon SageMaker then deploys all of the
# containers that you defined for the model in the hosting environment.
#
# To run a batch transform using your model, you start a job with the
# `CreateTransformJob` API. Amazon SageMaker uses your model and your
# dataset to get inferences which are then saved to a specified S3
# location.
#
# In the `CreateModel` request, you must define a container with the
# `PrimaryContainer` parameter.
#
# In the request, you also provide an IAM role that Amazon SageMaker can
# assume to access model artifacts and docker image for deployment on ML
# compute hosting instances or for batch transform jobs. In addition,
# you also use the IAM role to manage permissions the inference code
# needs. For example, if the inference code access any other AWS
# resources, you grant necessary permissions via this role.
#
# @option params [required, String] :model_name
# The name of the new model.
#
# @option params [required, Types::ContainerDefinition] :primary_container
# The location of the primary docker image containing inference code,
# associated artifacts, and custom environment map that the inference
# code uses when the model is deployed for predictions.
#
# @option params [required, String] :execution_role_arn
# The Amazon Resource Name (ARN) of the IAM role that Amazon SageMaker
# can assume to access model artifacts and docker image for deployment
# on ML compute instances or for batch transform jobs. Deploying on ML
# compute instances is part of model hosting. For more information, see
# [Amazon SageMaker Roles][1].
#
# <note markdown="1"> To be able to pass this role to Amazon SageMaker, the caller of this
# API must have the `iam:PassRole` permission.
#
# </note>
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html
#
# @option params [Array<Types::Tag>] :tags
# An array of key-value pairs. For more information, see [Using Cost
# Allocation Tags][1] in the *AWS Billing and Cost Management User
# Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html#allocation-what
#
# @option params [Types::VpcConfig] :vpc_config
# A VpcConfig object that specifies the VPC that you want your model to
# connect to. Control access to and from your model container by
# configuring the VPC. `VpcConfig` is used in hosting services and in
# batch transform. For more information, see [Protect Endpoints by Using
# an Amazon Virtual Private Cloud][1] and [Protect Data in Batch
# Transform Jobs by Using an Amazon Virtual Private Cloud][2].
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/host-vpc.html
# [2]: http://docs.aws.amazon.com/sagemaker/latest/dg/batch-vpc.html
#
# @return [Types::CreateModelOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateModelOutput#model_arn #model_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_model({
# model_name: "ModelName", # required
# primary_container: { # required
# container_hostname: "ContainerHostname",
# image: "Image", # required
# model_data_url: "Url",
# environment: {
# "EnvironmentKey" => "EnvironmentValue",
# },
# },
# execution_role_arn: "RoleArn", # required
# tags: [
# {
# key: "TagKey", # required
# value: "TagValue", # required
# },
# ],
# vpc_config: {
# security_group_ids: ["SecurityGroupId"], # required
# subnets: ["SubnetId"], # required
# },
# })
#
# @example Response structure
#
# resp.model_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/CreateModel AWS API Documentation
#
# @overload create_model(params = {})
# @param [Hash] params ({})
def create_model(params = {}, options = {})
req = build_request(:create_model, params)
req.send_request(options)
end
# Creates an Amazon SageMaker notebook instance. A notebook instance is
# a machine learning (ML) compute instance running on a Jupyter
# notebook.
#
# In a `CreateNotebookInstance` request, specify the type of ML compute
# instance that you want to run. Amazon SageMaker launches the instance,
# installs common libraries that you can use to explore datasets for
# model training, and attaches an ML storage volume to the notebook
# instance.
#
# Amazon SageMaker also provides a set of example notebooks. Each
# notebook demonstrates how to use Amazon SageMaker with a specific
# algorithm or with a machine learning framework.
#
# After receiving the request, Amazon SageMaker does the following:
#
# 1. Creates a network interface in the Amazon SageMaker VPC.
#
# 2. (Option) If you specified `SubnetId`, Amazon SageMaker creates a
# network interface in your own VPC, which is inferred from the
# subnet ID that you provide in the input. When creating this
# network interface, Amazon SageMaker attaches the security group
# that you specified in the request to the network interface that it
# creates in your VPC.
#
# 3. Launches an EC2 instance of the type specified in the request in
# the Amazon SageMaker VPC. If you specified `SubnetId` of your VPC,
# Amazon SageMaker specifies both network interfaces when launching
# this instance. This enables inbound traffic from your own VPC to
# the notebook instance, assuming that the security groups allow it.
#
# After creating the notebook instance, Amazon SageMaker returns its
# Amazon Resource Name (ARN).
#
# After Amazon SageMaker creates the notebook instance, you can connect
# to the Jupyter server and work in Jupyter notebooks. For example, you
# can write code to explore a dataset that you can use for model
# training, train a model, host models by creating Amazon SageMaker
# endpoints, and validate hosted models.
#
# For more information, see [How It Works][1].
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/how-it-works.html
#
# @option params [required, String] :notebook_instance_name
# The name of the new notebook instance.
#
# @option params [required, String] :instance_type
# The type of ML compute instance to launch for the notebook instance.
#
# @option params [String] :subnet_id
# The ID of the subnet in a VPC to which you would like to have a
# connectivity from your ML compute instance.
#
# @option params [Array<String>] :security_group_ids
# The VPC security group IDs, in the form sg-xxxxxxxx. The security
# groups must be for the same VPC as specified in the subnet.
#
# @option params [required, String] :role_arn
# When you send any requests to AWS resources from the notebook
# instance, Amazon SageMaker assumes this role to perform tasks on your
# behalf. You must grant this role necessary permissions so Amazon
# SageMaker can perform these tasks. The policy must allow the Amazon
# SageMaker service principal (sagemaker.amazonaws.com) permissions to
# assume this role. For more information, see [Amazon SageMaker
# Roles][1].
#
# <note markdown="1"> To be able to pass this role to Amazon SageMaker, the caller of this
# API must have the `iam:PassRole` permission.
#
# </note>
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html
#
# @option params [String] :kms_key_id
# If you provide a AWS KMS key ID, Amazon SageMaker uses it to encrypt
# data at rest on the ML storage volume that is attached to your
# notebook instance.
#
# @option params [Array<Types::Tag>] :tags
# A list of tags to associate with the notebook instance. You can add
# tags later by using the `CreateTags` API.
#
# @option params [String] :lifecycle_config_name
# The name of a lifecycle configuration to associate with the notebook
# instance. For information about lifestyle configurations, see [Step
# 2.1: (Optional) Customize a Notebook Instance][1].
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/notebook-lifecycle-config.html
#
# @option params [String] :direct_internet_access
# Sets whether Amazon SageMaker provides internet access to the notebook
# instance. If you set this to `Disabled` this notebook instance will be
# able to access resources only in your VPC, and will not be able to
# connect to Amazon SageMaker training and endpoint services unless your
# configure a NAT Gateway in your VPC.
#
# For more information, see [Notebook Instances Are Internet-Enabled by
# Default][1]. You can set the value of this parameter to `Disabled`
# only if you set a value for the `SubnetId` parameter.
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/appendix-additional-considerations.html#appendix-notebook-and-internet-access
#
# @option params [Integer] :volume_size_in_gb
# The size, in GB, of the ML storage volume to attach to the notebook
# instance.
#
# @return [Types::CreateNotebookInstanceOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateNotebookInstanceOutput#notebook_instance_arn #notebook_instance_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_notebook_instance({
# notebook_instance_name: "NotebookInstanceName", # required
# instance_type: "ml.t2.medium", # required, accepts ml.t2.medium, ml.t2.large, ml.t2.xlarge, ml.t2.2xlarge, ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge
# subnet_id: "SubnetId",
# security_group_ids: ["SecurityGroupId"],
# role_arn: "RoleArn", # required
# kms_key_id: "KmsKeyId",
# tags: [
# {
# key: "TagKey", # required
# value: "TagValue", # required
# },
# ],
# lifecycle_config_name: "NotebookInstanceLifecycleConfigName",
# direct_internet_access: "Enabled", # accepts Enabled, Disabled
# volume_size_in_gb: 1,
# })
#
# @example Response structure
#
# resp.notebook_instance_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/CreateNotebookInstance AWS API Documentation
#
# @overload create_notebook_instance(params = {})
# @param [Hash] params ({})
def create_notebook_instance(params = {}, options = {})
req = build_request(:create_notebook_instance, params)
req.send_request(options)
end
# Creates a lifecycle configuration that you can associate with a
# notebook instance. A *lifecycle configuration* is a collection of
# shell scripts that run when you create or start a notebook instance.
#
# Each lifecycle configuration script has a limit of 16384 characters.
#
# The value of the `$PATH` environment variable that is available to
# both scripts is `/sbin:bin:/usr/sbin:/usr/bin`.
#
# View CloudWatch Logs for notebook instance lifecycle configurations in
# log group `/aws/sagemaker/NotebookInstances` in log stream
# `[notebook-instance-name]/[LifecycleConfigHook]`.
#
# Lifecycle configuration scripts cannot run for longer than 5 minutes.
# If a script runs for longer than 5 minutes, it fails and the notebook
# instance is not created or started.
#
# For information about notebook instance lifestyle configurations, see
# [Step 2.1: (Optional) Customize a Notebook Instance][1].
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/notebook-lifecycle-config.html
#
# @option params [required, String] :notebook_instance_lifecycle_config_name
# The name of the lifecycle configuration.
#
# @option params [Array<Types::NotebookInstanceLifecycleHook>] :on_create
# A shell script that runs only once, when you create a notebook
# instance. The shell script must be a base64-encoded string.
#
# @option params [Array<Types::NotebookInstanceLifecycleHook>] :on_start
# A shell script that runs every time you start a notebook instance,
# including when you create the notebook instance. The shell script must
# be a base64-encoded string.
#
# @return [Types::CreateNotebookInstanceLifecycleConfigOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateNotebookInstanceLifecycleConfigOutput#notebook_instance_lifecycle_config_arn #notebook_instance_lifecycle_config_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_notebook_instance_lifecycle_config({
# notebook_instance_lifecycle_config_name: "NotebookInstanceLifecycleConfigName", # required
# on_create: [
# {
# content: "NotebookInstanceLifecycleConfigContent",
# },
# ],
# on_start: [
# {
# content: "NotebookInstanceLifecycleConfigContent",
# },
# ],
# })
#
# @example Response structure
#
# resp.notebook_instance_lifecycle_config_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/CreateNotebookInstanceLifecycleConfig AWS API Documentation
#
# @overload create_notebook_instance_lifecycle_config(params = {})
# @param [Hash] params ({})
def create_notebook_instance_lifecycle_config(params = {}, options = {})
req = build_request(:create_notebook_instance_lifecycle_config, params)
req.send_request(options)
end
# Returns a URL that you can use to connect to the Jupyter server from a
# notebook instance. In the Amazon SageMaker console, when you choose
# `Open` next to a notebook instance, Amazon SageMaker opens a new tab
# showing the Jupyter server home page from the notebook instance. The
# console uses this API to get the URL and show the page.
#
# You can restrict access to this API and to the URL that it returns to
# a list of IP addresses that you specify. To restrict access, attach an
# IAM policy that denies access to this API unless the call comes from
# an IP address in the specified list to every AWS Identity and Access
# Management user, group, or role used to access the notebook instance.
# Use the `NotIpAddress` condition operator and the `aws:SourceIP`
# condition context key to specify the list of IP addresses that you
# want to have access to the notebook instance. For more information,
# see [Limit Access to a Notebook Instance by IP Address][1].
#
#
#
# [1]: http://docs.aws.amazon.com/https:/docs.aws.amazon.com/sagemaker/latest/dg/howitworks-access-ws.html#nbi-ip-filter
#
# @option params [required, String] :notebook_instance_name
# The name of the notebook instance.
#
# @option params [Integer] :session_expiration_duration_in_seconds
# The duration of the session, in seconds. The default is 12 hours.
#
# @return [Types::CreatePresignedNotebookInstanceUrlOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreatePresignedNotebookInstanceUrlOutput#authorized_url #authorized_url} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_presigned_notebook_instance_url({
# notebook_instance_name: "NotebookInstanceName", # required
# session_expiration_duration_in_seconds: 1,
# })
#
# @example Response structure
#
# resp.authorized_url #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/CreatePresignedNotebookInstanceUrl AWS API Documentation
#
# @overload create_presigned_notebook_instance_url(params = {})
# @param [Hash] params ({})
def create_presigned_notebook_instance_url(params = {}, options = {})
req = build_request(:create_presigned_notebook_instance_url, params)
req.send_request(options)
end
# Starts a model training job. After training completes, Amazon
# SageMaker saves the resulting model artifacts to an Amazon S3 location
# that you specify.
#
# If you choose to host your model using Amazon SageMaker hosting
# services, you can use the resulting model artifacts as part of the
# model. You can also use the artifacts in a deep learning service other
# than Amazon SageMaker, provided that you know how to use them for
# inferences.
#
# In the request body, you provide the following:
#
# * `AlgorithmSpecification` - Identifies the training algorithm to use.
#
# * `HyperParameters` - Specify these algorithm-specific parameters to
# influence the quality of the final model. For a list of
# hyperparameters for each training algorithm provided by Amazon
# SageMaker, see [Algorithms][1].
#
# * `InputDataConfig` - Describes the training dataset and the Amazon S3
# location where it is stored.
#
# * `OutputDataConfig` - Identifies the Amazon S3 location where you
# want Amazon SageMaker to save the results of model training.
#
#
#
# * `ResourceConfig` - Identifies the resources, ML compute instances,
# and ML storage volumes to deploy for model training. In distributed
# training, you specify more than one instance.
#
# * `RoleARN` - The Amazon Resource Number (ARN) that Amazon SageMaker
# assumes to perform tasks on your behalf during model training. You
# must grant this role the necessary permissions so that Amazon
# SageMaker can successfully complete model training.
#
# * `StoppingCondition` - Sets a duration for training. Use this
# parameter to cap model training costs.
#
# For more information about Amazon SageMaker, see [How It Works][2].
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/algos.html
# [2]: http://docs.aws.amazon.com/sagemaker/latest/dg/how-it-works.html
#
# @option params [required, String] :training_job_name
# The name of the training job. The name must be unique within an AWS
# Region in an AWS account.
#
# @option params [Hash<String,String>] :hyper_parameters
# Algorithm-specific parameters that influence the quality of the model.
# You set hyperparameters before you start the learning process. For a
# list of hyperparameters for each training algorithm provided by Amazon
# SageMaker, see [Algorithms][1].
#
# You can specify a maximum of 100 hyperparameters. Each hyperparameter
# is a key-value pair. Each key and value is limited to 256 characters,
# as specified by the `Length Constraint`.
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/algos.html
#
# @option params [required, Types::AlgorithmSpecification] :algorithm_specification
# The registry path of the Docker image that contains the training
# algorithm and algorithm-specific metadata, including the input mode.
# For more information about algorithms provided by Amazon SageMaker,
# see [Algorithms][1]. For information about providing your own
# algorithms, see [Using Your Own Algorithms with Amazon SageMaker][2].
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/algos.html
# [2]: http://docs.aws.amazon.com/sagemaker/latest/dg/your-algorithms.html
#
# @option params [required, String] :role_arn
# The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker
# can assume to perform tasks on your behalf.
#
# During model training, Amazon SageMaker needs your permission to read
# input data from an S3 bucket, download a Docker image that contains
# training code, write model artifacts to an S3 bucket, write logs to
# Amazon CloudWatch Logs, and publish metrics to Amazon CloudWatch. You
# grant permissions for all of these tasks to an IAM role. For more
# information, see [Amazon SageMaker Roles][1].
#
# <note markdown="1"> To be able to pass this role to Amazon SageMaker, the caller of this
# API must have the `iam:PassRole` permission.
#
# </note>
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html
#
# @option params [Array<Types::Channel>] :input_data_config
# An array of `Channel` objects. Each channel is a named input source.
# `InputDataConfig` describes the input data and its location.
#
# Algorithms can accept input data from one or more channels. For
# example, an algorithm might have two channels of input data,
# `training_data` and `validation_data`. The configuration for each
# channel provides the S3 location where the input data is stored. It
# also provides information about the stored data: the MIME type,
# compression method, and whether the data is wrapped in RecordIO
# format.
#
# Depending on the input mode that the algorithm supports, Amazon
# SageMaker either copies input data files from an S3 bucket to a local
# directory in the Docker container, or makes it available as input
# streams.
#
# @option params [required, Types::OutputDataConfig] :output_data_config
# Specifies the path to the S3 bucket where you want to store model
# artifacts. Amazon SageMaker creates subfolders for the artifacts.
#
# @option params [required, Types::ResourceConfig] :resource_config
# The resources, including the ML compute instances and ML storage
# volumes, to use for model training.
#
# ML storage volumes store model artifacts and incremental states.
# Training algorithms might also use ML storage volumes for scratch
# space. If you want Amazon SageMaker to use the ML storage volume to
# store the training data, choose `File` as the `TrainingInputMode` in
# the algorithm specification. For distributed training algorithms,
# specify an instance count greater than 1.
#
# @option params [Types::VpcConfig] :vpc_config
# A VpcConfig object that specifies the VPC that you want your training
# job to connect to. Control access to and from your training container
# by configuring the VPC. For more information, see [Protect Training
# Jobs by Using an Amazon Virtual Private Cloud][1].
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/train-vpc.html
#
# @option params [required, Types::StoppingCondition] :stopping_condition
# Sets a duration for training. Use this parameter to cap model training
# costs. To stop a job, Amazon SageMaker sends the algorithm the
# `SIGTERM` signal, which delays job termination for 120 seconds.
# Algorithms might use this 120-second window to save the model
# artifacts.
#
# When Amazon SageMaker terminates a job because the stopping condition
# has been met, training algorithms provided by Amazon SageMaker save
# the intermediate results of the job. This intermediate data is a valid
# model artifact. You can use it to create a model using the
# `CreateModel` API.
#
# @option params [Array<Types::Tag>] :tags
# An array of key-value pairs. For more information, see [Using Cost
# Allocation Tags][1] in the *AWS Billing and Cost Management User
# Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html#allocation-what
#
# @return [Types::CreateTrainingJobResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateTrainingJobResponse#training_job_arn #training_job_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_training_job({
# training_job_name: "TrainingJobName", # required
# hyper_parameters: {
# "ParameterKey" => "ParameterValue",
# },
# algorithm_specification: { # required
# training_image: "AlgorithmImage", # required
# training_input_mode: "Pipe", # required, accepts Pipe, File
# },
# role_arn: "RoleArn", # required
# input_data_config: [
# {
# channel_name: "ChannelName", # required
# data_source: { # required
# s3_data_source: { # required
# s3_data_type: "ManifestFile", # required, accepts ManifestFile, S3Prefix
# s3_uri: "S3Uri", # required
# s3_data_distribution_type: "FullyReplicated", # accepts FullyReplicated, ShardedByS3Key
# },
# },
# content_type: "ContentType",
# compression_type: "None", # accepts None, Gzip
# record_wrapper_type: "None", # accepts None, RecordIO
# input_mode: "Pipe", # accepts Pipe, File
# },
# ],
# output_data_config: { # required
# kms_key_id: "KmsKeyId",
# s3_output_path: "S3Uri", # required
# },
# resource_config: { # required
# instance_type: "ml.m4.xlarge", # required, accepts ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge
# instance_count: 1, # required
# volume_size_in_gb: 1, # required
# volume_kms_key_id: "KmsKeyId",
# },
# vpc_config: {
# security_group_ids: ["SecurityGroupId"], # required
# subnets: ["SubnetId"], # required
# },
# stopping_condition: { # required
# max_runtime_in_seconds: 1,
# },
# tags: [
# {
# key: "TagKey", # required
# value: "TagValue", # required
# },
# ],
# })
#
# @example Response structure
#
# resp.training_job_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/CreateTrainingJob AWS API Documentation
#
# @overload create_training_job(params = {})
# @param [Hash] params ({})
def create_training_job(params = {}, options = {})
req = build_request(:create_training_job, params)
req.send_request(options)
end
# Starts a transform job. A transform job uses a trained model to get
# inferences on a dataset and saves these results to an Amazon S3
# location that you specify.
#
# To perform batch transformations, you create a transform job and use
# the data that you have readily available.
#
# In the request body, you provide the following:
#
# * `TransformJobName` - Identifies the transform job. The name must be
# unique within an AWS Region in an AWS account.
#
# * `ModelName` - Identifies the model to use. `ModelName` must be the
# name of an existing Amazon SageMaker model in the same AWS Region
# and AWS account. For information on creating a model, see
# CreateModel.
#
# * `TransformInput` - Describes the dataset to be transformed and the
# Amazon S3 location where it is stored.
#
# * `TransformOutput` - Identifies the Amazon S3 location where you want
# Amazon SageMaker to save the results from the transform job.
#
# * `TransformResources` - Identifies the ML compute instances for the
# transform job.
#
# For more information about how batch transformation works Amazon
# SageMaker, see [How It Works][1].
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/batch-transform.html
#
# @option params [required, String] :transform_job_name
# The name of the transform job. The name must be unique within an AWS
# Region in an AWS account.
#
# @option params [required, String] :model_name
# The name of the model that you want to use for the transform job.
# `ModelName` must be the name of an existing Amazon SageMaker model
# within an AWS Region in an AWS account.
#
# @option params [Integer] :max_concurrent_transforms
# The maximum number of parallel requests that can be sent to each
# instance in a transform job. This is good for algorithms that
# implement multiple workers on larger instances . The default value is
# `1`. To allow Amazon SageMaker to determine the appropriate number for
# `MaxConcurrentTransforms`, set the value to `0`.
#
# @option params [Integer] :max_payload_in_mb
# The maximum payload size allowed, in MB. A payload is the data portion
# of a record (without metadata). The value in `MaxPayloadInMB` must be
# greater or equal to the size of a single record. You can approximate
# the size of a record by dividing the size of your dataset by the
# number of records. Then multiply this value by the number of records
# you want in a mini-batch. It is recommended to enter a value slightly
# larger than this to ensure the records fit within the maximum payload
# size. The default value is `6` MB. For an unlimited payload size, set
# the value to `0`.
#
# @option params [String] :batch_strategy
# Determines the number of records included in a single mini-batch.
# `SingleRecord` means only one record is used per mini-batch.
# `MultiRecord` means a mini-batch is set to contain as many records
# that can fit within the `MaxPayloadInMB` limit.
#
# Batch transform will automatically split your input data into whatever
# payload size is specified if you set `SplitType` to `Line` and
# `BatchStrategy` to `MultiRecord`. There's no need to split the
# dataset into smaller files or to use larger payload sizes unless the
# records in your dataset are very large.
#
# @option params [Hash<String,String>] :environment
# The environment variables to set in the Docker container. We support
# up to 16 key and values entries in the map.
#
# @option params [required, Types::TransformInput] :transform_input
# Describes the input source and the way the transform job consumes it.
#
# @option params [required, Types::TransformOutput] :transform_output
# Describes the results of the transform job.
#
# @option params [required, Types::TransformResources] :transform_resources
# Describes the resources, including ML instance types and ML instance
# count, to use for the transform job.
#
# @option params [Array<Types::Tag>] :tags
# An array of key-value pairs. Adding tags is optional. For more
# information, see [Using Cost Allocation Tags][1] in the *AWS Billing
# and Cost Management User Guide*.
#
#
#
# [1]: http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html#allocation-what
#
# @return [Types::CreateTransformJobResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateTransformJobResponse#transform_job_arn #transform_job_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_transform_job({
# transform_job_name: "TransformJobName", # required
# model_name: "ModelName", # required
# max_concurrent_transforms: 1,
# max_payload_in_mb: 1,
# batch_strategy: "MultiRecord", # accepts MultiRecord, SingleRecord
# environment: {
# "TransformEnvironmentKey" => "TransformEnvironmentValue",
# },
# transform_input: { # required
# data_source: { # required
# s3_data_source: { # required
# s3_data_type: "ManifestFile", # required, accepts ManifestFile, S3Prefix
# s3_uri: "S3Uri", # required
# },
# },
# content_type: "ContentType",
# compression_type: "None", # accepts None, Gzip
# split_type: "None", # accepts None, Line, RecordIO
# },
# transform_output: { # required
# s3_output_path: "S3Uri", # required
# accept: "Accept",
# assemble_with: "None", # accepts None, Line
# kms_key_id: "KmsKeyId",
# },
# transform_resources: { # required
# instance_type: "ml.m4.xlarge", # required, accepts ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge
# instance_count: 1, # required
# volume_kms_key_id: "KmsKeyId",
# },
# tags: [
# {
# key: "TagKey", # required
# value: "TagValue", # required
# },
# ],
# })
#
# @example Response structure
#
# resp.transform_job_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/CreateTransformJob AWS API Documentation
#
# @overload create_transform_job(params = {})
# @param [Hash] params ({})
def create_transform_job(params = {}, options = {})
req = build_request(:create_transform_job, params)
req.send_request(options)
end
# Deletes an endpoint. Amazon SageMaker frees up all of the resources
# that were deployed when the endpoint was created.
#
# Amazon SageMaker retires any custom KMS key grants associated with the
# endpoint, meaning you don't need to use the [RevokeGrant][1] API
# call.
#
#
#
# [1]: http://docs.aws.amazon.com/kms/latest/APIReference/API_RevokeGrant.html
#
# @option params [required, String] :endpoint_name
# The name of the endpoint that you want to delete.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.delete_endpoint({
# endpoint_name: "EndpointName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/DeleteEndpoint AWS API Documentation
#
# @overload delete_endpoint(params = {})
# @param [Hash] params ({})
def delete_endpoint(params = {}, options = {})
req = build_request(:delete_endpoint, params)
req.send_request(options)
end
# Deletes an endpoint configuration. The `DeleteEndpointConfig` API
# deletes only the specified configuration. It does not delete endpoints
# created using the configuration.
#
# @option params [required, String] :endpoint_config_name
# The name of the endpoint configuration that you want to delete.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.delete_endpoint_config({
# endpoint_config_name: "EndpointConfigName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/DeleteEndpointConfig AWS API Documentation
#
# @overload delete_endpoint_config(params = {})
# @param [Hash] params ({})
def delete_endpoint_config(params = {}, options = {})
req = build_request(:delete_endpoint_config, params)
req.send_request(options)
end
# Deletes a model. The `DeleteModel` API deletes only the model entry
# that was created in Amazon SageMaker when you called the
# [CreateModel][1] API. It does not delete model artifacts, inference
# code, or the IAM role that you specified when creating the model.
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/API_CreateModel.html
#
# @option params [required, String] :model_name
# The name of the model to delete.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.delete_model({
# model_name: "ModelName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/DeleteModel AWS API Documentation
#
# @overload delete_model(params = {})
# @param [Hash] params ({})
def delete_model(params = {}, options = {})
req = build_request(:delete_model, params)
req.send_request(options)
end
# Deletes an Amazon SageMaker notebook instance. Before you can delete a
# notebook instance, you must call the `StopNotebookInstance` API.
#
# When you delete a notebook instance, you lose all of your data. Amazon
# SageMaker removes the ML compute instance, and deletes the ML storage
# volume and the network interface associated with the notebook
# instance.
#
# @option params [required, String] :notebook_instance_name
# The name of the Amazon SageMaker notebook instance to delete.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.delete_notebook_instance({
# notebook_instance_name: "NotebookInstanceName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/DeleteNotebookInstance AWS API Documentation
#
# @overload delete_notebook_instance(params = {})
# @param [Hash] params ({})
def delete_notebook_instance(params = {}, options = {})
req = build_request(:delete_notebook_instance, params)
req.send_request(options)
end
# Deletes a notebook instance lifecycle configuration.
#
# @option params [required, String] :notebook_instance_lifecycle_config_name
# The name of the lifecycle configuration to delete.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.delete_notebook_instance_lifecycle_config({
# notebook_instance_lifecycle_config_name: "NotebookInstanceLifecycleConfigName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/DeleteNotebookInstanceLifecycleConfig AWS API Documentation
#
# @overload delete_notebook_instance_lifecycle_config(params = {})
# @param [Hash] params ({})
def delete_notebook_instance_lifecycle_config(params = {}, options = {})
req = build_request(:delete_notebook_instance_lifecycle_config, params)
req.send_request(options)
end
# Deletes the specified tags from an Amazon SageMaker resource.
#
# To list a resource's tags, use the `ListTags` API.
#
# <note markdown="1"> When you call this API to delete tags from a hyperparameter tuning
# job, the deleted tags are not removed from training jobs that the
# hyperparameter tuning job launched before you called this API.
#
# </note>
#
# @option params [required, String] :resource_arn
# The Amazon Resource Name (ARN) of the resource whose tags you want to
# delete.
#
# @option params [required, Array<String>] :tag_keys
# An array or one or more tag keys to delete.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.delete_tags({
# resource_arn: "ResourceArn", # required
# tag_keys: ["TagKey"], # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/DeleteTags AWS API Documentation
#
# @overload delete_tags(params = {})
# @param [Hash] params ({})
def delete_tags(params = {}, options = {})
req = build_request(:delete_tags, params)
req.send_request(options)
end
# Returns the description of an endpoint.
#
# @option params [required, String] :endpoint_name
# The name of the endpoint.
#
# @return [Types::DescribeEndpointOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeEndpointOutput#endpoint_name #endpoint_name} => String
# * {Types::DescribeEndpointOutput#endpoint_arn #endpoint_arn} => String
# * {Types::DescribeEndpointOutput#endpoint_config_name #endpoint_config_name} => String
# * {Types::DescribeEndpointOutput#production_variants #production_variants} => Array<Types::ProductionVariantSummary>
# * {Types::DescribeEndpointOutput#endpoint_status #endpoint_status} => String
# * {Types::DescribeEndpointOutput#failure_reason #failure_reason} => String
# * {Types::DescribeEndpointOutput#creation_time #creation_time} => Time
# * {Types::DescribeEndpointOutput#last_modified_time #last_modified_time} => Time
#
# @example Request syntax with placeholder values
#
# resp = client.describe_endpoint({
# endpoint_name: "EndpointName", # required
# })
#
# @example Response structure
#
# resp.endpoint_name #=> String
# resp.endpoint_arn #=> String
# resp.endpoint_config_name #=> String
# resp.production_variants #=> Array
# resp.production_variants[0].variant_name #=> String
# resp.production_variants[0].deployed_images #=> Array
# resp.production_variants[0].deployed_images[0].specified_image #=> String
# resp.production_variants[0].deployed_images[0].resolved_image #=> String
# resp.production_variants[0].deployed_images[0].resolution_time #=> Time
# resp.production_variants[0].current_weight #=> Float
# resp.production_variants[0].desired_weight #=> Float
# resp.production_variants[0].current_instance_count #=> Integer
# resp.production_variants[0].desired_instance_count #=> Integer
# resp.endpoint_status #=> String, one of "OutOfService", "Creating", "Updating", "SystemUpdating", "RollingBack", "InService", "Deleting", "Failed"
# resp.failure_reason #=> String
# resp.creation_time #=> Time
# resp.last_modified_time #=> Time
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/DescribeEndpoint AWS API Documentation
#
# @overload describe_endpoint(params = {})
# @param [Hash] params ({})
def describe_endpoint(params = {}, options = {})
req = build_request(:describe_endpoint, params)
req.send_request(options)
end
# Returns the description of an endpoint configuration created using the
# `CreateEndpointConfig` API.
#
# @option params [required, String] :endpoint_config_name
# The name of the endpoint configuration.
#
# @return [Types::DescribeEndpointConfigOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeEndpointConfigOutput#endpoint_config_name #endpoint_config_name} => String
# * {Types::DescribeEndpointConfigOutput#endpoint_config_arn #endpoint_config_arn} => String
# * {Types::DescribeEndpointConfigOutput#production_variants #production_variants} => Array<Types::ProductionVariant>
# * {Types::DescribeEndpointConfigOutput#kms_key_id #kms_key_id} => String
# * {Types::DescribeEndpointConfigOutput#creation_time #creation_time} => Time
#
# @example Request syntax with placeholder values
#
# resp = client.describe_endpoint_config({
# endpoint_config_name: "EndpointConfigName", # required
# })
#
# @example Response structure
#
# resp.endpoint_config_name #=> String
# resp.endpoint_config_arn #=> String
# resp.production_variants #=> Array
# resp.production_variants[0].variant_name #=> String
# resp.production_variants[0].model_name #=> String
# resp.production_variants[0].initial_instance_count #=> Integer
# resp.production_variants[0].instance_type #=> String, one of "ml.t2.medium", "ml.t2.large", "ml.t2.xlarge", "ml.t2.2xlarge", "ml.m4.xlarge", "ml.m4.2xlarge", "ml.m4.4xlarge", "ml.m4.10xlarge", "ml.m4.16xlarge", "ml.m5.large", "ml.m5.xlarge", "ml.m5.2xlarge", "ml.m5.4xlarge", "ml.m5.12xlarge", "ml.m5.24xlarge", "ml.c4.large", "ml.c4.xlarge", "ml.c4.2xlarge", "ml.c4.4xlarge", "ml.c4.8xlarge", "ml.p2.xlarge", "ml.p2.8xlarge", "ml.p2.16xlarge", "ml.p3.2xlarge", "ml.p3.8xlarge", "ml.p3.16xlarge", "ml.c5.large", "ml.c5.xlarge", "ml.c5.2xlarge", "ml.c5.4xlarge", "ml.c5.9xlarge", "ml.c5.18xlarge"
# resp.production_variants[0].initial_variant_weight #=> Float
# resp.kms_key_id #=> String
# resp.creation_time #=> Time
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/DescribeEndpointConfig AWS API Documentation
#
# @overload describe_endpoint_config(params = {})
# @param [Hash] params ({})
def describe_endpoint_config(params = {}, options = {})
req = build_request(:describe_endpoint_config, params)
req.send_request(options)
end
# Gets a description of a hyperparameter tuning job.
#
# @option params [required, String] :hyper_parameter_tuning_job_name
# The name of the tuning job to describe.
#
# @return [Types::DescribeHyperParameterTuningJobResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeHyperParameterTuningJobResponse#hyper_parameter_tuning_job_name #hyper_parameter_tuning_job_name} => String
# * {Types::DescribeHyperParameterTuningJobResponse#hyper_parameter_tuning_job_arn #hyper_parameter_tuning_job_arn} => String
# * {Types::DescribeHyperParameterTuningJobResponse#hyper_parameter_tuning_job_config #hyper_parameter_tuning_job_config} => Types::HyperParameterTuningJobConfig
# * {Types::DescribeHyperParameterTuningJobResponse#training_job_definition #training_job_definition} => Types::HyperParameterTrainingJobDefinition
# * {Types::DescribeHyperParameterTuningJobResponse#hyper_parameter_tuning_job_status #hyper_parameter_tuning_job_status} => String
# * {Types::DescribeHyperParameterTuningJobResponse#creation_time #creation_time} => Time
# * {Types::DescribeHyperParameterTuningJobResponse#hyper_parameter_tuning_end_time #hyper_parameter_tuning_end_time} => Time
# * {Types::DescribeHyperParameterTuningJobResponse#last_modified_time #last_modified_time} => Time
# * {Types::DescribeHyperParameterTuningJobResponse#training_job_status_counters #training_job_status_counters} => Types::TrainingJobStatusCounters
# * {Types::DescribeHyperParameterTuningJobResponse#objective_status_counters #objective_status_counters} => Types::ObjectiveStatusCounters
# * {Types::DescribeHyperParameterTuningJobResponse#best_training_job #best_training_job} => Types::HyperParameterTrainingJobSummary
# * {Types::DescribeHyperParameterTuningJobResponse#failure_reason #failure_reason} => String
#
# @example Request syntax with placeholder values
#
# resp = client.describe_hyper_parameter_tuning_job({
# hyper_parameter_tuning_job_name: "HyperParameterTuningJobName", # required
# })
#
# @example Response structure
#
# resp.hyper_parameter_tuning_job_name #=> String
# resp.hyper_parameter_tuning_job_arn #=> String
# resp.hyper_parameter_tuning_job_config.strategy #=> String, one of "Bayesian"
# resp.hyper_parameter_tuning_job_config.hyper_parameter_tuning_job_objective.type #=> String, one of "Maximize", "Minimize"
# resp.hyper_parameter_tuning_job_config.hyper_parameter_tuning_job_objective.metric_name #=> String
# resp.hyper_parameter_tuning_job_config.resource_limits.max_number_of_training_jobs #=> Integer
# resp.hyper_parameter_tuning_job_config.resource_limits.max_parallel_training_jobs #=> Integer
# resp.hyper_parameter_tuning_job_config.parameter_ranges.integer_parameter_ranges #=> Array
# resp.hyper_parameter_tuning_job_config.parameter_ranges.integer_parameter_ranges[0].name #=> String
# resp.hyper_parameter_tuning_job_config.parameter_ranges.integer_parameter_ranges[0].min_value #=> String
# resp.hyper_parameter_tuning_job_config.parameter_ranges.integer_parameter_ranges[0].max_value #=> String
# resp.hyper_parameter_tuning_job_config.parameter_ranges.continuous_parameter_ranges #=> Array
# resp.hyper_parameter_tuning_job_config.parameter_ranges.continuous_parameter_ranges[0].name #=> String
# resp.hyper_parameter_tuning_job_config.parameter_ranges.continuous_parameter_ranges[0].min_value #=> String
# resp.hyper_parameter_tuning_job_config.parameter_ranges.continuous_parameter_ranges[0].max_value #=> String
# resp.hyper_parameter_tuning_job_config.parameter_ranges.categorical_parameter_ranges #=> Array
# resp.hyper_parameter_tuning_job_config.parameter_ranges.categorical_parameter_ranges[0].name #=> String
# resp.hyper_parameter_tuning_job_config.parameter_ranges.categorical_parameter_ranges[0].values #=> Array
# resp.hyper_parameter_tuning_job_config.parameter_ranges.categorical_parameter_ranges[0].values[0] #=> String
# resp.training_job_definition.static_hyper_parameters #=> Hash
# resp.training_job_definition.static_hyper_parameters["ParameterKey"] #=> String
# resp.training_job_definition.algorithm_specification.training_image #=> String
# resp.training_job_definition.algorithm_specification.training_input_mode #=> String, one of "Pipe", "File"
# resp.training_job_definition.algorithm_specification.metric_definitions #=> Array
# resp.training_job_definition.algorithm_specification.metric_definitions[0].name #=> String
# resp.training_job_definition.algorithm_specification.metric_definitions[0].regex #=> String
# resp.training_job_definition.role_arn #=> String
# resp.training_job_definition.input_data_config #=> Array
# resp.training_job_definition.input_data_config[0].channel_name #=> String
# resp.training_job_definition.input_data_config[0].data_source.s3_data_source.s3_data_type #=> String, one of "ManifestFile", "S3Prefix"
# resp.training_job_definition.input_data_config[0].data_source.s3_data_source.s3_uri #=> String
# resp.training_job_definition.input_data_config[0].data_source.s3_data_source.s3_data_distribution_type #=> String, one of "FullyReplicated", "ShardedByS3Key"
# resp.training_job_definition.input_data_config[0].content_type #=> String
# resp.training_job_definition.input_data_config[0].compression_type #=> String, one of "None", "Gzip"
# resp.training_job_definition.input_data_config[0].record_wrapper_type #=> String, one of "None", "RecordIO"
# resp.training_job_definition.input_data_config[0].input_mode #=> String, one of "Pipe", "File"
# resp.training_job_definition.vpc_config.security_group_ids #=> Array
# resp.training_job_definition.vpc_config.security_group_ids[0] #=> String
# resp.training_job_definition.vpc_config.subnets #=> Array
# resp.training_job_definition.vpc_config.subnets[0] #=> String
# resp.training_job_definition.output_data_config.kms_key_id #=> String
# resp.training_job_definition.output_data_config.s3_output_path #=> String
# resp.training_job_definition.resource_config.instance_type #=> String, one of "ml.m4.xlarge", "ml.m4.2xlarge", "ml.m4.4xlarge", "ml.m4.10xlarge", "ml.m4.16xlarge", "ml.m5.large", "ml.m5.xlarge", "ml.m5.2xlarge", "ml.m5.4xlarge", "ml.m5.12xlarge", "ml.m5.24xlarge", "ml.c4.xlarge", "ml.c4.2xlarge", "ml.c4.4xlarge", "ml.c4.8xlarge", "ml.p2.xlarge", "ml.p2.8xlarge", "ml.p2.16xlarge", "ml.p3.2xlarge", "ml.p3.8xlarge", "ml.p3.16xlarge", "ml.c5.xlarge", "ml.c5.2xlarge", "ml.c5.4xlarge", "ml.c5.9xlarge", "ml.c5.18xlarge"
# resp.training_job_definition.resource_config.instance_count #=> Integer
# resp.training_job_definition.resource_config.volume_size_in_gb #=> Integer
# resp.training_job_definition.resource_config.volume_kms_key_id #=> String
# resp.training_job_definition.stopping_condition.max_runtime_in_seconds #=> Integer
# resp.hyper_parameter_tuning_job_status #=> String, one of "Completed", "InProgress", "Failed", "Stopped", "Stopping"
# resp.creation_time #=> Time
# resp.hyper_parameter_tuning_end_time #=> Time
# resp.last_modified_time #=> Time
# resp.training_job_status_counters.completed #=> Integer
# resp.training_job_status_counters.in_progress #=> Integer
# resp.training_job_status_counters.retryable_error #=> Integer
# resp.training_job_status_counters.non_retryable_error #=> Integer
# resp.training_job_status_counters.stopped #=> Integer
# resp.objective_status_counters.succeeded #=> Integer
# resp.objective_status_counters.pending #=> Integer
# resp.objective_status_counters.failed #=> Integer
# resp.best_training_job.training_job_name #=> String
# resp.best_training_job.training_job_arn #=> String
# resp.best_training_job.creation_time #=> Time
# resp.best_training_job.training_start_time #=> Time
# resp.best_training_job.training_end_time #=> Time
# resp.best_training_job.training_job_status #=> String, one of "InProgress", "Completed", "Failed", "Stopping", "Stopped"
# resp.best_training_job.tuned_hyper_parameters #=> Hash
# resp.best_training_job.tuned_hyper_parameters["ParameterKey"] #=> String
# resp.best_training_job.failure_reason #=> String
# resp.best_training_job.final_hyper_parameter_tuning_job_objective_metric.type #=> String, one of "Maximize", "Minimize"
# resp.best_training_job.final_hyper_parameter_tuning_job_objective_metric.metric_name #=> String
# resp.best_training_job.final_hyper_parameter_tuning_job_objective_metric.value #=> Float
# resp.best_training_job.objective_status #=> String, one of "Succeeded", "Pending", "Failed"
# resp.failure_reason #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/DescribeHyperParameterTuningJob AWS API Documentation
#
# @overload describe_hyper_parameter_tuning_job(params = {})
# @param [Hash] params ({})
def describe_hyper_parameter_tuning_job(params = {}, options = {})
req = build_request(:describe_hyper_parameter_tuning_job, params)
req.send_request(options)
end
# Describes a model that you created using the `CreateModel` API.
#
# @option params [required, String] :model_name
# The name of the model.
#
# @return [Types::DescribeModelOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeModelOutput#model_name #model_name} => String
# * {Types::DescribeModelOutput#primary_container #primary_container} => Types::ContainerDefinition
# * {Types::DescribeModelOutput#execution_role_arn #execution_role_arn} => String
# * {Types::DescribeModelOutput#vpc_config #vpc_config} => Types::VpcConfig
# * {Types::DescribeModelOutput#creation_time #creation_time} => Time
# * {Types::DescribeModelOutput#model_arn #model_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.describe_model({
# model_name: "ModelName", # required
# })
#
# @example Response structure
#
# resp.model_name #=> String
# resp.primary_container.container_hostname #=> String
# resp.primary_container.image #=> String
# resp.primary_container.model_data_url #=> String
# resp.primary_container.environment #=> Hash
# resp.primary_container.environment["EnvironmentKey"] #=> String
# resp.execution_role_arn #=> String
# resp.vpc_config.security_group_ids #=> Array
# resp.vpc_config.security_group_ids[0] #=> String
# resp.vpc_config.subnets #=> Array
# resp.vpc_config.subnets[0] #=> String
# resp.creation_time #=> Time
# resp.model_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/DescribeModel AWS API Documentation
#
# @overload describe_model(params = {})
# @param [Hash] params ({})
def describe_model(params = {}, options = {})
req = build_request(:describe_model, params)
req.send_request(options)
end
# Returns information about a notebook instance.
#
# @option params [required, String] :notebook_instance_name
# The name of the notebook instance that you want information about.
#
# @return [Types::DescribeNotebookInstanceOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeNotebookInstanceOutput#notebook_instance_arn #notebook_instance_arn} => String
# * {Types::DescribeNotebookInstanceOutput#notebook_instance_name #notebook_instance_name} => String
# * {Types::DescribeNotebookInstanceOutput#notebook_instance_status #notebook_instance_status} => String
# * {Types::DescribeNotebookInstanceOutput#failure_reason #failure_reason} => String
# * {Types::DescribeNotebookInstanceOutput#url #url} => String
# * {Types::DescribeNotebookInstanceOutput#instance_type #instance_type} => String
# * {Types::DescribeNotebookInstanceOutput#subnet_id #subnet_id} => String
# * {Types::DescribeNotebookInstanceOutput#security_groups #security_groups} => Array<String>
# * {Types::DescribeNotebookInstanceOutput#role_arn #role_arn} => String
# * {Types::DescribeNotebookInstanceOutput#kms_key_id #kms_key_id} => String
# * {Types::DescribeNotebookInstanceOutput#network_interface_id #network_interface_id} => String
# * {Types::DescribeNotebookInstanceOutput#last_modified_time #last_modified_time} => Time
# * {Types::DescribeNotebookInstanceOutput#creation_time #creation_time} => Time
# * {Types::DescribeNotebookInstanceOutput#notebook_instance_lifecycle_config_name #notebook_instance_lifecycle_config_name} => String
# * {Types::DescribeNotebookInstanceOutput#direct_internet_access #direct_internet_access} => String
# * {Types::DescribeNotebookInstanceOutput#volume_size_in_gb #volume_size_in_gb} => Integer
#
# @example Request syntax with placeholder values
#
# resp = client.describe_notebook_instance({
# notebook_instance_name: "NotebookInstanceName", # required
# })
#
# @example Response structure
#
# resp.notebook_instance_arn #=> String
# resp.notebook_instance_name #=> String
# resp.notebook_instance_status #=> String, one of "Pending", "InService", "Stopping", "Stopped", "Failed", "Deleting", "Updating"
# resp.failure_reason #=> String
# resp.url #=> String
# resp.instance_type #=> String, one of "ml.t2.medium", "ml.t2.large", "ml.t2.xlarge", "ml.t2.2xlarge", "ml.m4.xlarge", "ml.m4.2xlarge", "ml.m4.4xlarge", "ml.m4.10xlarge", "ml.m4.16xlarge", "ml.p2.xlarge", "ml.p2.8xlarge", "ml.p2.16xlarge", "ml.p3.2xlarge", "ml.p3.8xlarge", "ml.p3.16xlarge"
# resp.subnet_id #=> String
# resp.security_groups #=> Array
# resp.security_groups[0] #=> String
# resp.role_arn #=> String
# resp.kms_key_id #=> String
# resp.network_interface_id #=> String
# resp.last_modified_time #=> Time
# resp.creation_time #=> Time
# resp.notebook_instance_lifecycle_config_name #=> String
# resp.direct_internet_access #=> String, one of "Enabled", "Disabled"
# resp.volume_size_in_gb #=> Integer
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/DescribeNotebookInstance AWS API Documentation
#
# @overload describe_notebook_instance(params = {})
# @param [Hash] params ({})
def describe_notebook_instance(params = {}, options = {})
req = build_request(:describe_notebook_instance, params)
req.send_request(options)
end
# Returns a description of a notebook instance lifecycle configuration.
#
# For information about notebook instance lifestyle configurations, see
# [Step 2.1: (Optional) Customize a Notebook Instance][1].
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/notebook-lifecycle-config.html
#
# @option params [required, String] :notebook_instance_lifecycle_config_name
# The name of the lifecycle configuration to describe.
#
# @return [Types::DescribeNotebookInstanceLifecycleConfigOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeNotebookInstanceLifecycleConfigOutput#notebook_instance_lifecycle_config_arn #notebook_instance_lifecycle_config_arn} => String
# * {Types::DescribeNotebookInstanceLifecycleConfigOutput#notebook_instance_lifecycle_config_name #notebook_instance_lifecycle_config_name} => String
# * {Types::DescribeNotebookInstanceLifecycleConfigOutput#on_create #on_create} => Array<Types::NotebookInstanceLifecycleHook>
# * {Types::DescribeNotebookInstanceLifecycleConfigOutput#on_start #on_start} => Array<Types::NotebookInstanceLifecycleHook>
# * {Types::DescribeNotebookInstanceLifecycleConfigOutput#last_modified_time #last_modified_time} => Time
# * {Types::DescribeNotebookInstanceLifecycleConfigOutput#creation_time #creation_time} => Time
#
# @example Request syntax with placeholder values
#
# resp = client.describe_notebook_instance_lifecycle_config({
# notebook_instance_lifecycle_config_name: "NotebookInstanceLifecycleConfigName", # required
# })
#
# @example Response structure
#
# resp.notebook_instance_lifecycle_config_arn #=> String
# resp.notebook_instance_lifecycle_config_name #=> String
# resp.on_create #=> Array
# resp.on_create[0].content #=> String
# resp.on_start #=> Array
# resp.on_start[0].content #=> String
# resp.last_modified_time #=> Time
# resp.creation_time #=> Time
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/DescribeNotebookInstanceLifecycleConfig AWS API Documentation
#
# @overload describe_notebook_instance_lifecycle_config(params = {})
# @param [Hash] params ({})
def describe_notebook_instance_lifecycle_config(params = {}, options = {})
req = build_request(:describe_notebook_instance_lifecycle_config, params)
req.send_request(options)
end
# Returns information about a training job.
#
# @option params [required, String] :training_job_name
# The name of the training job.
#
# @return [Types::DescribeTrainingJobResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeTrainingJobResponse#training_job_name #training_job_name} => String
# * {Types::DescribeTrainingJobResponse#training_job_arn #training_job_arn} => String
# * {Types::DescribeTrainingJobResponse#tuning_job_arn #tuning_job_arn} => String
# * {Types::DescribeTrainingJobResponse#model_artifacts #model_artifacts} => Types::ModelArtifacts
# * {Types::DescribeTrainingJobResponse#training_job_status #training_job_status} => String
# * {Types::DescribeTrainingJobResponse#secondary_status #secondary_status} => String
# * {Types::DescribeTrainingJobResponse#failure_reason #failure_reason} => String
# * {Types::DescribeTrainingJobResponse#hyper_parameters #hyper_parameters} => Hash<String,String>
# * {Types::DescribeTrainingJobResponse#algorithm_specification #algorithm_specification} => Types::AlgorithmSpecification
# * {Types::DescribeTrainingJobResponse#role_arn #role_arn} => String
# * {Types::DescribeTrainingJobResponse#input_data_config #input_data_config} => Array<Types::Channel>
# * {Types::DescribeTrainingJobResponse#output_data_config #output_data_config} => Types::OutputDataConfig
# * {Types::DescribeTrainingJobResponse#resource_config #resource_config} => Types::ResourceConfig
# * {Types::DescribeTrainingJobResponse#vpc_config #vpc_config} => Types::VpcConfig
# * {Types::DescribeTrainingJobResponse#stopping_condition #stopping_condition} => Types::StoppingCondition
# * {Types::DescribeTrainingJobResponse#creation_time #creation_time} => Time
# * {Types::DescribeTrainingJobResponse#training_start_time #training_start_time} => Time
# * {Types::DescribeTrainingJobResponse#training_end_time #training_end_time} => Time
# * {Types::DescribeTrainingJobResponse#last_modified_time #last_modified_time} => Time
# * {Types::DescribeTrainingJobResponse#secondary_status_transitions #secondary_status_transitions} => Array<Types::SecondaryStatusTransition>
#
# @example Request syntax with placeholder values
#
# resp = client.describe_training_job({
# training_job_name: "TrainingJobName", # required
# })
#
# @example Response structure
#
# resp.training_job_name #=> String
# resp.training_job_arn #=> String
# resp.tuning_job_arn #=> String
# resp.model_artifacts.s3_model_artifacts #=> String
# resp.training_job_status #=> String, one of "InProgress", "Completed", "Failed", "Stopping", "Stopped"
# resp.secondary_status #=> String, one of "Starting", "LaunchingMLInstances", "PreparingTrainingStack", "Downloading", "DownloadingTrainingImage", "Training", "Uploading", "Stopping", "Stopped", "MaxRuntimeExceeded", "Completed", "Failed"
# resp.failure_reason #=> String
# resp.hyper_parameters #=> Hash
# resp.hyper_parameters["ParameterKey"] #=> String
# resp.algorithm_specification.training_image #=> String
# resp.algorithm_specification.training_input_mode #=> String, one of "Pipe", "File"
# resp.role_arn #=> String
# resp.input_data_config #=> Array
# resp.input_data_config[0].channel_name #=> String
# resp.input_data_config[0].data_source.s3_data_source.s3_data_type #=> String, one of "ManifestFile", "S3Prefix"
# resp.input_data_config[0].data_source.s3_data_source.s3_uri #=> String
# resp.input_data_config[0].data_source.s3_data_source.s3_data_distribution_type #=> String, one of "FullyReplicated", "ShardedByS3Key"
# resp.input_data_config[0].content_type #=> String
# resp.input_data_config[0].compression_type #=> String, one of "None", "Gzip"
# resp.input_data_config[0].record_wrapper_type #=> String, one of "None", "RecordIO"
# resp.input_data_config[0].input_mode #=> String, one of "Pipe", "File"
# resp.output_data_config.kms_key_id #=> String
# resp.output_data_config.s3_output_path #=> String
# resp.resource_config.instance_type #=> String, one of "ml.m4.xlarge", "ml.m4.2xlarge", "ml.m4.4xlarge", "ml.m4.10xlarge", "ml.m4.16xlarge", "ml.m5.large", "ml.m5.xlarge", "ml.m5.2xlarge", "ml.m5.4xlarge", "ml.m5.12xlarge", "ml.m5.24xlarge", "ml.c4.xlarge", "ml.c4.2xlarge", "ml.c4.4xlarge", "ml.c4.8xlarge", "ml.p2.xlarge", "ml.p2.8xlarge", "ml.p2.16xlarge", "ml.p3.2xlarge", "ml.p3.8xlarge", "ml.p3.16xlarge", "ml.c5.xlarge", "ml.c5.2xlarge", "ml.c5.4xlarge", "ml.c5.9xlarge", "ml.c5.18xlarge"
# resp.resource_config.instance_count #=> Integer
# resp.resource_config.volume_size_in_gb #=> Integer
# resp.resource_config.volume_kms_key_id #=> String
# resp.vpc_config.security_group_ids #=> Array
# resp.vpc_config.security_group_ids[0] #=> String
# resp.vpc_config.subnets #=> Array
# resp.vpc_config.subnets[0] #=> String
# resp.stopping_condition.max_runtime_in_seconds #=> Integer
# resp.creation_time #=> Time
# resp.training_start_time #=> Time
# resp.training_end_time #=> Time
# resp.last_modified_time #=> Time
# resp.secondary_status_transitions #=> Array
# resp.secondary_status_transitions[0].status #=> String, one of "Starting", "LaunchingMLInstances", "PreparingTrainingStack", "Downloading", "DownloadingTrainingImage", "Training", "Uploading", "Stopping", "Stopped", "MaxRuntimeExceeded", "Completed", "Failed"
# resp.secondary_status_transitions[0].start_time #=> Time
# resp.secondary_status_transitions[0].end_time #=> Time
# resp.secondary_status_transitions[0].status_message #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/DescribeTrainingJob AWS API Documentation
#
# @overload describe_training_job(params = {})
# @param [Hash] params ({})
def describe_training_job(params = {}, options = {})
req = build_request(:describe_training_job, params)
req.send_request(options)
end
# Returns information about a transform job.
#
# @option params [required, String] :transform_job_name
# The name of the transform job that you want to view details of.
#
# @return [Types::DescribeTransformJobResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeTransformJobResponse#transform_job_name #transform_job_name} => String
# * {Types::DescribeTransformJobResponse#transform_job_arn #transform_job_arn} => String
# * {Types::DescribeTransformJobResponse#transform_job_status #transform_job_status} => String
# * {Types::DescribeTransformJobResponse#failure_reason #failure_reason} => String
# * {Types::DescribeTransformJobResponse#model_name #model_name} => String
# * {Types::DescribeTransformJobResponse#max_concurrent_transforms #max_concurrent_transforms} => Integer
# * {Types::DescribeTransformJobResponse#max_payload_in_mb #max_payload_in_mb} => Integer
# * {Types::DescribeTransformJobResponse#batch_strategy #batch_strategy} => String
# * {Types::DescribeTransformJobResponse#environment #environment} => Hash<String,String>
# * {Types::DescribeTransformJobResponse#transform_input #transform_input} => Types::TransformInput
# * {Types::DescribeTransformJobResponse#transform_output #transform_output} => Types::TransformOutput
# * {Types::DescribeTransformJobResponse#transform_resources #transform_resources} => Types::TransformResources
# * {Types::DescribeTransformJobResponse#creation_time #creation_time} => Time
# * {Types::DescribeTransformJobResponse#transform_start_time #transform_start_time} => Time
# * {Types::DescribeTransformJobResponse#transform_end_time #transform_end_time} => Time
#
# @example Request syntax with placeholder values
#
# resp = client.describe_transform_job({
# transform_job_name: "TransformJobName", # required
# })
#
# @example Response structure
#
# resp.transform_job_name #=> String
# resp.transform_job_arn #=> String
# resp.transform_job_status #=> String, one of "InProgress", "Completed", "Failed", "Stopping", "Stopped"
# resp.failure_reason #=> String
# resp.model_name #=> String
# resp.max_concurrent_transforms #=> Integer
# resp.max_payload_in_mb #=> Integer
# resp.batch_strategy #=> String, one of "MultiRecord", "SingleRecord"
# resp.environment #=> Hash
# resp.environment["TransformEnvironmentKey"] #=> String
# resp.transform_input.data_source.s3_data_source.s3_data_type #=> String, one of "ManifestFile", "S3Prefix"
# resp.transform_input.data_source.s3_data_source.s3_uri #=> String
# resp.transform_input.content_type #=> String
# resp.transform_input.compression_type #=> String, one of "None", "Gzip"
# resp.transform_input.split_type #=> String, one of "None", "Line", "RecordIO"
# resp.transform_output.s3_output_path #=> String
# resp.transform_output.accept #=> String
# resp.transform_output.assemble_with #=> String, one of "None", "Line"
# resp.transform_output.kms_key_id #=> String
# resp.transform_resources.instance_type #=> String, one of "ml.m4.xlarge", "ml.m4.2xlarge", "ml.m4.4xlarge", "ml.m4.10xlarge", "ml.m4.16xlarge", "ml.c4.xlarge", "ml.c4.2xlarge", "ml.c4.4xlarge", "ml.c4.8xlarge", "ml.p2.xlarge", "ml.p2.8xlarge", "ml.p2.16xlarge", "ml.p3.2xlarge", "ml.p3.8xlarge", "ml.p3.16xlarge", "ml.c5.xlarge", "ml.c5.2xlarge", "ml.c5.4xlarge", "ml.c5.9xlarge", "ml.c5.18xlarge", "ml.m5.large", "ml.m5.xlarge", "ml.m5.2xlarge", "ml.m5.4xlarge", "ml.m5.12xlarge", "ml.m5.24xlarge"
# resp.transform_resources.instance_count #=> Integer
# resp.transform_resources.volume_kms_key_id #=> String
# resp.creation_time #=> Time
# resp.transform_start_time #=> Time
# resp.transform_end_time #=> Time
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/DescribeTransformJob AWS API Documentation
#
# @overload describe_transform_job(params = {})
# @param [Hash] params ({})
def describe_transform_job(params = {}, options = {})
req = build_request(:describe_transform_job, params)
req.send_request(options)
end
# Lists endpoint configurations.
#
# @option params [String] :sort_by
# The field to sort results by. The default is `CreationTime`.
#
# @option params [String] :sort_order
# The sort order for results. The default is `Ascending`.
#
# @option params [String] :next_token
# If the result of the previous `ListEndpointConfig` request was
# truncated, the response includes a `NextToken`. To retrieve the next
# set of endpoint configurations, use the token in the next request.
#
# @option params [Integer] :max_results
# The maximum number of training jobs to return in the response.
#
# @option params [String] :name_contains
# A string in the endpoint configuration name. This filter returns only
# endpoint configurations whose name contains the specified string.
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_before
# A filter that returns only endpoint configurations created before the
# specified time (timestamp).
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_after
# A filter that returns only endpoint configurations created after the
# specified time (timestamp).
#
# @return [Types::ListEndpointConfigsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListEndpointConfigsOutput#endpoint_configs #endpoint_configs} => Array<Types::EndpointConfigSummary>
# * {Types::ListEndpointConfigsOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.list_endpoint_configs({
# sort_by: "Name", # accepts Name, CreationTime
# sort_order: "Ascending", # accepts Ascending, Descending
# next_token: "PaginationToken",
# max_results: 1,
# name_contains: "EndpointConfigNameContains",
# creation_time_before: Time.now,
# creation_time_after: Time.now,
# })
#
# @example Response structure
#
# resp.endpoint_configs #=> Array
# resp.endpoint_configs[0].endpoint_config_name #=> String
# resp.endpoint_configs[0].endpoint_config_arn #=> String
# resp.endpoint_configs[0].creation_time #=> Time
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/ListEndpointConfigs AWS API Documentation
#
# @overload list_endpoint_configs(params = {})
# @param [Hash] params ({})
def list_endpoint_configs(params = {}, options = {})
req = build_request(:list_endpoint_configs, params)
req.send_request(options)
end
# Lists endpoints.
#
# @option params [String] :sort_by
# Sorts the list of results. The default is `CreationTime`.
#
# @option params [String] :sort_order
# The sort order for results. The default is `Ascending`.
#
# @option params [String] :next_token
# If the result of a `ListEndpoints` request was truncated, the response
# includes a `NextToken`. To retrieve the next set of endpoints, use the
# token in the next request.
#
# @option params [Integer] :max_results
# The maximum number of endpoints to return in the response.
#
# @option params [String] :name_contains
# A string in endpoint names. This filter returns only endpoints whose
# name contains the specified string.
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_before
# A filter that returns only endpoints that were created before the
# specified time (timestamp).
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_after
# A filter that returns only endpoints that were created after the
# specified time (timestamp).
#
# @option params [Time,DateTime,Date,Integer,String] :last_modified_time_before
# A filter that returns only endpoints that were modified before the
# specified timestamp.
#
# @option params [Time,DateTime,Date,Integer,String] :last_modified_time_after
# A filter that returns only endpoints that were modified after the
# specified timestamp.
#
# @option params [String] :status_equals
# A filter that returns only endpoints with the specified status.
#
# @return [Types::ListEndpointsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListEndpointsOutput#endpoints #endpoints} => Array<Types::EndpointSummary>
# * {Types::ListEndpointsOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.list_endpoints({
# sort_by: "Name", # accepts Name, CreationTime, Status
# sort_order: "Ascending", # accepts Ascending, Descending
# next_token: "PaginationToken",
# max_results: 1,
# name_contains: "EndpointNameContains",
# creation_time_before: Time.now,
# creation_time_after: Time.now,
# last_modified_time_before: Time.now,
# last_modified_time_after: Time.now,
# status_equals: "OutOfService", # accepts OutOfService, Creating, Updating, SystemUpdating, RollingBack, InService, Deleting, Failed
# })
#
# @example Response structure
#
# resp.endpoints #=> Array
# resp.endpoints[0].endpoint_name #=> String
# resp.endpoints[0].endpoint_arn #=> String
# resp.endpoints[0].creation_time #=> Time
# resp.endpoints[0].last_modified_time #=> Time
# resp.endpoints[0].endpoint_status #=> String, one of "OutOfService", "Creating", "Updating", "SystemUpdating", "RollingBack", "InService", "Deleting", "Failed"
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/ListEndpoints AWS API Documentation
#
# @overload list_endpoints(params = {})
# @param [Hash] params ({})
def list_endpoints(params = {}, options = {})
req = build_request(:list_endpoints, params)
req.send_request(options)
end
# Gets a list of HyperParameterTuningJobSummary objects that describe
# the hyperparameter tuning jobs launched in your account.
#
# @option params [String] :next_token
# If the result of the previous `ListHyperParameterTuningJobs` request
# was truncated, the response includes a `NextToken`. To retrieve the
# next set of tuning jobs, use the token in the next request.
#
# @option params [Integer] :max_results
# The maximum number of tuning jobs to return. The default value is 10.
#
# @option params [String] :sort_by
# The field to sort results by. The default is `Name`.
#
# @option params [String] :sort_order
# The sort order for results. The default is `Ascending`.
#
# @option params [String] :name_contains
# A string in the tuning job name. This filter returns only tuning jobs
# whose name contains the specified string.
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_after
# A filter that returns only tuning jobs that were created after the
# specified time.
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_before
# A filter that returns only tuning jobs that were created before the
# specified time.
#
# @option params [Time,DateTime,Date,Integer,String] :last_modified_time_after
# A filter that returns only tuning jobs that were modified after the
# specified time.
#
# @option params [Time,DateTime,Date,Integer,String] :last_modified_time_before
# A filter that returns only tuning jobs that were modified before the
# specified time.
#
# @option params [String] :status_equals
# A filter that returns only tuning jobs with the specified status.
#
# @return [Types::ListHyperParameterTuningJobsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListHyperParameterTuningJobsResponse#hyper_parameter_tuning_job_summaries #hyper_parameter_tuning_job_summaries} => Array<Types::HyperParameterTuningJobSummary>
# * {Types::ListHyperParameterTuningJobsResponse#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.list_hyper_parameter_tuning_jobs({
# next_token: "NextToken",
# max_results: 1,
# sort_by: "Name", # accepts Name, Status, CreationTime
# sort_order: "Ascending", # accepts Ascending, Descending
# name_contains: "NameContains",
# creation_time_after: Time.now,
# creation_time_before: Time.now,
# last_modified_time_after: Time.now,
# last_modified_time_before: Time.now,
# status_equals: "Completed", # accepts Completed, InProgress, Failed, Stopped, Stopping
# })
#
# @example Response structure
#
# resp.hyper_parameter_tuning_job_summaries #=> Array
# resp.hyper_parameter_tuning_job_summaries[0].hyper_parameter_tuning_job_name #=> String
# resp.hyper_parameter_tuning_job_summaries[0].hyper_parameter_tuning_job_arn #=> String
# resp.hyper_parameter_tuning_job_summaries[0].hyper_parameter_tuning_job_status #=> String, one of "Completed", "InProgress", "Failed", "Stopped", "Stopping"
# resp.hyper_parameter_tuning_job_summaries[0].strategy #=> String, one of "Bayesian"
# resp.hyper_parameter_tuning_job_summaries[0].creation_time #=> Time
# resp.hyper_parameter_tuning_job_summaries[0].hyper_parameter_tuning_end_time #=> Time
# resp.hyper_parameter_tuning_job_summaries[0].last_modified_time #=> Time
# resp.hyper_parameter_tuning_job_summaries[0].training_job_status_counters.completed #=> Integer
# resp.hyper_parameter_tuning_job_summaries[0].training_job_status_counters.in_progress #=> Integer
# resp.hyper_parameter_tuning_job_summaries[0].training_job_status_counters.retryable_error #=> Integer
# resp.hyper_parameter_tuning_job_summaries[0].training_job_status_counters.non_retryable_error #=> Integer
# resp.hyper_parameter_tuning_job_summaries[0].training_job_status_counters.stopped #=> Integer
# resp.hyper_parameter_tuning_job_summaries[0].objective_status_counters.succeeded #=> Integer
# resp.hyper_parameter_tuning_job_summaries[0].objective_status_counters.pending #=> Integer
# resp.hyper_parameter_tuning_job_summaries[0].objective_status_counters.failed #=> Integer
# resp.hyper_parameter_tuning_job_summaries[0].resource_limits.max_number_of_training_jobs #=> Integer
# resp.hyper_parameter_tuning_job_summaries[0].resource_limits.max_parallel_training_jobs #=> Integer
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/ListHyperParameterTuningJobs AWS API Documentation
#
# @overload list_hyper_parameter_tuning_jobs(params = {})
# @param [Hash] params ({})
def list_hyper_parameter_tuning_jobs(params = {}, options = {})
req = build_request(:list_hyper_parameter_tuning_jobs, params)
req.send_request(options)
end
# Lists models created with the [CreateModel][1] API.
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/API_CreateModel.html
#
# @option params [String] :sort_by
# Sorts the list of results. The default is `CreationTime`.
#
# @option params [String] :sort_order
# The sort order for results. The default is `Ascending`.
#
# @option params [String] :next_token
# If the response to a previous `ListModels` request was truncated, the
# response includes a `NextToken`. To retrieve the next set of models,
# use the token in the next request.
#
# @option params [Integer] :max_results
# The maximum number of models to return in the response.
#
# @option params [String] :name_contains
# A string in the training job name. This filter returns only models in
# the training job whose name contains the specified string.
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_before
# A filter that returns only models created before the specified time
# (timestamp).
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_after
# A filter that returns only models created after the specified time
# (timestamp).
#
# @return [Types::ListModelsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListModelsOutput#models #models} => Array<Types::ModelSummary>
# * {Types::ListModelsOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.list_models({
# sort_by: "Name", # accepts Name, CreationTime
# sort_order: "Ascending", # accepts Ascending, Descending
# next_token: "PaginationToken",
# max_results: 1,
# name_contains: "ModelNameContains",
# creation_time_before: Time.now,
# creation_time_after: Time.now,
# })
#
# @example Response structure
#
# resp.models #=> Array
# resp.models[0].model_name #=> String
# resp.models[0].model_arn #=> String
# resp.models[0].creation_time #=> Time
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/ListModels AWS API Documentation
#
# @overload list_models(params = {})
# @param [Hash] params ({})
def list_models(params = {}, options = {})
req = build_request(:list_models, params)
req.send_request(options)
end
# Lists notebook instance lifestyle configurations created with the
# CreateNotebookInstanceLifecycleConfig API.
#
# @option params [String] :next_token
# If the result of a `ListNotebookInstanceLifecycleConfigs` request was
# truncated, the response includes a `NextToken`. To get the next set of
# lifecycle configurations, use the token in the next request.
#
# @option params [Integer] :max_results
# The maximum number of lifecycle configurations to return in the
# response.
#
# @option params [String] :sort_by
# Sorts the list of results. The default is `CreationTime`.
#
# @option params [String] :sort_order
# The sort order for results.
#
# @option params [String] :name_contains
# A string in the lifecycle configuration name. This filter returns only
# lifecycle configurations whose name contains the specified string.
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_before
# A filter that returns only lifecycle configurations that were created
# before the specified time (timestamp).
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_after
# A filter that returns only lifecycle configurations that were created
# after the specified time (timestamp).
#
# @option params [Time,DateTime,Date,Integer,String] :last_modified_time_before
# A filter that returns only lifecycle configurations that were modified
# before the specified time (timestamp).
#
# @option params [Time,DateTime,Date,Integer,String] :last_modified_time_after
# A filter that returns only lifecycle configurations that were modified
# after the specified time (timestamp).
#
# @return [Types::ListNotebookInstanceLifecycleConfigsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListNotebookInstanceLifecycleConfigsOutput#next_token #next_token} => String
# * {Types::ListNotebookInstanceLifecycleConfigsOutput#notebook_instance_lifecycle_configs #notebook_instance_lifecycle_configs} => Array<Types::NotebookInstanceLifecycleConfigSummary>
#
# @example Request syntax with placeholder values
#
# resp = client.list_notebook_instance_lifecycle_configs({
# next_token: "NextToken",
# max_results: 1,
# sort_by: "Name", # accepts Name, CreationTime, LastModifiedTime
# sort_order: "Ascending", # accepts Ascending, Descending
# name_contains: "NotebookInstanceLifecycleConfigNameContains",
# creation_time_before: Time.now,
# creation_time_after: Time.now,
# last_modified_time_before: Time.now,
# last_modified_time_after: Time.now,
# })
#
# @example Response structure
#
# resp.next_token #=> String
# resp.notebook_instance_lifecycle_configs #=> Array
# resp.notebook_instance_lifecycle_configs[0].notebook_instance_lifecycle_config_name #=> String
# resp.notebook_instance_lifecycle_configs[0].notebook_instance_lifecycle_config_arn #=> String
# resp.notebook_instance_lifecycle_configs[0].creation_time #=> Time
# resp.notebook_instance_lifecycle_configs[0].last_modified_time #=> Time
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/ListNotebookInstanceLifecycleConfigs AWS API Documentation
#
# @overload list_notebook_instance_lifecycle_configs(params = {})
# @param [Hash] params ({})
def list_notebook_instance_lifecycle_configs(params = {}, options = {})
req = build_request(:list_notebook_instance_lifecycle_configs, params)
req.send_request(options)
end
# Returns a list of the Amazon SageMaker notebook instances in the
# requester's account in an AWS Region.
#
# @option params [String] :next_token
# If the previous call to the `ListNotebookInstances` is truncated, the
# response includes a `NextToken`. You can use this token in your
# subsequent `ListNotebookInstances` request to fetch the next set of
# notebook instances.
#
# <note markdown="1"> You might specify a filter or a sort order in your request. When
# response is truncated, you must use the same values for the filer and
# sort order in the next request.
#
# </note>
#
# @option params [Integer] :max_results
# The maximum number of notebook instances to return.
#
# @option params [String] :sort_by
# The field to sort results by. The default is `Name`.
#
# @option params [String] :sort_order
# The sort order for results.
#
# @option params [String] :name_contains
# A string in the notebook instances' name. This filter returns only
# notebook instances whose name contains the specified string.
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_before
# A filter that returns only notebook instances that were created before
# the specified time (timestamp).
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_after
# A filter that returns only notebook instances that were created after
# the specified time (timestamp).
#
# @option params [Time,DateTime,Date,Integer,String] :last_modified_time_before
# A filter that returns only notebook instances that were modified
# before the specified time (timestamp).
#
# @option params [Time,DateTime,Date,Integer,String] :last_modified_time_after
# A filter that returns only notebook instances that were modified after
# the specified time (timestamp).
#
# @option params [String] :status_equals
# A filter that returns only notebook instances with the specified
# status.
#
# @option params [String] :notebook_instance_lifecycle_config_name_contains
# A string in the name of a notebook instances lifecycle configuration
# associated with this notebook instance. This filter returns only
# notebook instances associated with a lifecycle configuration with a
# name that contains the specified string.
#
# @return [Types::ListNotebookInstancesOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListNotebookInstancesOutput#next_token #next_token} => String
# * {Types::ListNotebookInstancesOutput#notebook_instances #notebook_instances} => Array<Types::NotebookInstanceSummary>
#
# @example Request syntax with placeholder values
#
# resp = client.list_notebook_instances({
# next_token: "NextToken",
# max_results: 1,
# sort_by: "Name", # accepts Name, CreationTime, Status
# sort_order: "Ascending", # accepts Ascending, Descending
# name_contains: "NotebookInstanceNameContains",
# creation_time_before: Time.now,
# creation_time_after: Time.now,
# last_modified_time_before: Time.now,
# last_modified_time_after: Time.now,
# status_equals: "Pending", # accepts Pending, InService, Stopping, Stopped, Failed, Deleting, Updating
# notebook_instance_lifecycle_config_name_contains: "NotebookInstanceLifecycleConfigName",
# })
#
# @example Response structure
#
# resp.next_token #=> String
# resp.notebook_instances #=> Array
# resp.notebook_instances[0].notebook_instance_name #=> String
# resp.notebook_instances[0].notebook_instance_arn #=> String
# resp.notebook_instances[0].notebook_instance_status #=> String, one of "Pending", "InService", "Stopping", "Stopped", "Failed", "Deleting", "Updating"
# resp.notebook_instances[0].url #=> String
# resp.notebook_instances[0].instance_type #=> String, one of "ml.t2.medium", "ml.t2.large", "ml.t2.xlarge", "ml.t2.2xlarge", "ml.m4.xlarge", "ml.m4.2xlarge", "ml.m4.4xlarge", "ml.m4.10xlarge", "ml.m4.16xlarge", "ml.p2.xlarge", "ml.p2.8xlarge", "ml.p2.16xlarge", "ml.p3.2xlarge", "ml.p3.8xlarge", "ml.p3.16xlarge"
# resp.notebook_instances[0].creation_time #=> Time
# resp.notebook_instances[0].last_modified_time #=> Time
# resp.notebook_instances[0].notebook_instance_lifecycle_config_name #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/ListNotebookInstances AWS API Documentation
#
# @overload list_notebook_instances(params = {})
# @param [Hash] params ({})
def list_notebook_instances(params = {}, options = {})
req = build_request(:list_notebook_instances, params)
req.send_request(options)
end
# Returns the tags for the specified Amazon SageMaker resource.
#
# @option params [required, String] :resource_arn
# The Amazon Resource Name (ARN) of the resource whose tags you want to
# retrieve.
#
# @option params [String] :next_token
# If the response to the previous `ListTags` request is truncated,
# Amazon SageMaker returns this token. To retrieve the next set of tags,
# use it in the subsequent request.
#
# @option params [Integer] :max_results
# Maximum number of tags to return.
#
# @return [Types::ListTagsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListTagsOutput#tags #tags} => Array<Types::Tag>
# * {Types::ListTagsOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.list_tags({
# resource_arn: "ResourceArn", # required
# next_token: "NextToken",
# max_results: 1,
# })
#
# @example Response structure
#
# resp.tags #=> Array
# resp.tags[0].key #=> String
# resp.tags[0].value #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/ListTags AWS API Documentation
#
# @overload list_tags(params = {})
# @param [Hash] params ({})
def list_tags(params = {}, options = {})
req = build_request(:list_tags, params)
req.send_request(options)
end
# Lists training jobs.
#
# @option params [String] :next_token
# If the result of the previous `ListTrainingJobs` request was
# truncated, the response includes a `NextToken`. To retrieve the next
# set of training jobs, use the token in the next request.
#
# @option params [Integer] :max_results
# The maximum number of training jobs to return in the response.
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_after
# A filter that returns only training jobs created after the specified
# time (timestamp).
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_before
# A filter that returns only training jobs created before the specified
# time (timestamp).
#
# @option params [Time,DateTime,Date,Integer,String] :last_modified_time_after
# A filter that returns only training jobs modified after the specified
# time (timestamp).
#
# @option params [Time,DateTime,Date,Integer,String] :last_modified_time_before
# A filter that returns only training jobs modified before the specified
# time (timestamp).
#
# @option params [String] :name_contains
# A string in the training job name. This filter returns only training
# jobs whose name contains the specified string.
#
# @option params [String] :status_equals
# A filter that retrieves only training jobs with a specific status.
#
# @option params [String] :sort_by
# The field to sort results by. The default is `CreationTime`.
#
# @option params [String] :sort_order
# The sort order for results. The default is `Ascending`.
#
# @return [Types::ListTrainingJobsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListTrainingJobsResponse#training_job_summaries #training_job_summaries} => Array<Types::TrainingJobSummary>
# * {Types::ListTrainingJobsResponse#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.list_training_jobs({
# next_token: "NextToken",
# max_results: 1,
# creation_time_after: Time.now,
# creation_time_before: Time.now,
# last_modified_time_after: Time.now,
# last_modified_time_before: Time.now,
# name_contains: "NameContains",
# status_equals: "InProgress", # accepts InProgress, Completed, Failed, Stopping, Stopped
# sort_by: "Name", # accepts Name, CreationTime, Status
# sort_order: "Ascending", # accepts Ascending, Descending
# })
#
# @example Response structure
#
# resp.training_job_summaries #=> Array
# resp.training_job_summaries[0].training_job_name #=> String
# resp.training_job_summaries[0].training_job_arn #=> String
# resp.training_job_summaries[0].creation_time #=> Time
# resp.training_job_summaries[0].training_end_time #=> Time
# resp.training_job_summaries[0].last_modified_time #=> Time
# resp.training_job_summaries[0].training_job_status #=> String, one of "InProgress", "Completed", "Failed", "Stopping", "Stopped"
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/ListTrainingJobs AWS API Documentation
#
# @overload list_training_jobs(params = {})
# @param [Hash] params ({})
def list_training_jobs(params = {}, options = {})
req = build_request(:list_training_jobs, params)
req.send_request(options)
end
# Gets a list of TrainingJobSummary objects that describe the training
# jobs that a hyperparameter tuning job launched.
#
# @option params [required, String] :hyper_parameter_tuning_job_name
# The name of the tuning job whose training jobs you want to list.
#
# @option params [String] :next_token
# If the result of the previous
# `ListTrainingJobsForHyperParameterTuningJob` request was truncated,
# the response includes a `NextToken`. To retrieve the next set of
# training jobs, use the token in the next request.
#
# @option params [Integer] :max_results
# The maximum number of training jobs to return. The default value is
# 10.
#
# @option params [String] :status_equals
# A filter that returns only training jobs with the specified status.
#
# @option params [String] :sort_by
# The field to sort results by. The default is `Name`.
#
# If the value of this field is `FinalObjectiveMetricValue`, any
# training jobs that did not return an objective metric are not listed.
#
# @option params [String] :sort_order
# The sort order for results. The default is `Ascending`.
#
# @return [Types::ListTrainingJobsForHyperParameterTuningJobResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListTrainingJobsForHyperParameterTuningJobResponse#training_job_summaries #training_job_summaries} => Array<Types::HyperParameterTrainingJobSummary>
# * {Types::ListTrainingJobsForHyperParameterTuningJobResponse#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.list_training_jobs_for_hyper_parameter_tuning_job({
# hyper_parameter_tuning_job_name: "HyperParameterTuningJobName", # required
# next_token: "NextToken",
# max_results: 1,
# status_equals: "InProgress", # accepts InProgress, Completed, Failed, Stopping, Stopped
# sort_by: "Name", # accepts Name, CreationTime, Status, FinalObjectiveMetricValue
# sort_order: "Ascending", # accepts Ascending, Descending
# })
#
# @example Response structure
#
# resp.training_job_summaries #=> Array
# resp.training_job_summaries[0].training_job_name #=> String
# resp.training_job_summaries[0].training_job_arn #=> String
# resp.training_job_summaries[0].creation_time #=> Time
# resp.training_job_summaries[0].training_start_time #=> Time
# resp.training_job_summaries[0].training_end_time #=> Time
# resp.training_job_summaries[0].training_job_status #=> String, one of "InProgress", "Completed", "Failed", "Stopping", "Stopped"
# resp.training_job_summaries[0].tuned_hyper_parameters #=> Hash
# resp.training_job_summaries[0].tuned_hyper_parameters["ParameterKey"] #=> String
# resp.training_job_summaries[0].failure_reason #=> String
# resp.training_job_summaries[0].final_hyper_parameter_tuning_job_objective_metric.type #=> String, one of "Maximize", "Minimize"
# resp.training_job_summaries[0].final_hyper_parameter_tuning_job_objective_metric.metric_name #=> String
# resp.training_job_summaries[0].final_hyper_parameter_tuning_job_objective_metric.value #=> Float
# resp.training_job_summaries[0].objective_status #=> String, one of "Succeeded", "Pending", "Failed"
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/ListTrainingJobsForHyperParameterTuningJob AWS API Documentation
#
# @overload list_training_jobs_for_hyper_parameter_tuning_job(params = {})
# @param [Hash] params ({})
def list_training_jobs_for_hyper_parameter_tuning_job(params = {}, options = {})
req = build_request(:list_training_jobs_for_hyper_parameter_tuning_job, params)
req.send_request(options)
end
# Lists transform jobs.
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_after
# A filter that returns only transform jobs created after the specified
# time.
#
# @option params [Time,DateTime,Date,Integer,String] :creation_time_before
# A filter that returns only transform jobs created before the specified
# time.
#
# @option params [Time,DateTime,Date,Integer,String] :last_modified_time_after
# A filter that returns only transform jobs modified after the specified
# time.
#
# @option params [Time,DateTime,Date,Integer,String] :last_modified_time_before
# A filter that returns only transform jobs modified before the
# specified time.
#
# @option params [String] :name_contains
# A string in the transform job name. This filter returns only transform
# jobs whose name contains the specified string.
#
# @option params [String] :status_equals
# A filter that retrieves only transform jobs with a specific status.
#
# @option params [String] :sort_by
# The field to sort results by. The default is `CreationTime`.
#
# @option params [String] :sort_order
# The sort order for results. The default is `Descending`.
#
# @option params [String] :next_token
# If the result of the previous `ListTransformJobs` request was
# truncated, the response includes a `NextToken`. To retrieve the next
# set of transform jobs, use the token in the next request.
#
# @option params [Integer] :max_results
# The maximum number of transform jobs to return in the response. The
# default value is `10`.
#
# @return [Types::ListTransformJobsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListTransformJobsResponse#transform_job_summaries #transform_job_summaries} => Array<Types::TransformJobSummary>
# * {Types::ListTransformJobsResponse#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.list_transform_jobs({
# creation_time_after: Time.now,
# creation_time_before: Time.now,
# last_modified_time_after: Time.now,
# last_modified_time_before: Time.now,
# name_contains: "NameContains",
# status_equals: "InProgress", # accepts InProgress, Completed, Failed, Stopping, Stopped
# sort_by: "Name", # accepts Name, CreationTime, Status
# sort_order: "Ascending", # accepts Ascending, Descending
# next_token: "NextToken",
# max_results: 1,
# })
#
# @example Response structure
#
# resp.transform_job_summaries #=> Array
# resp.transform_job_summaries[0].transform_job_name #=> String
# resp.transform_job_summaries[0].transform_job_arn #=> String
# resp.transform_job_summaries[0].creation_time #=> Time
# resp.transform_job_summaries[0].transform_end_time #=> Time
# resp.transform_job_summaries[0].last_modified_time #=> Time
# resp.transform_job_summaries[0].transform_job_status #=> String, one of "InProgress", "Completed", "Failed", "Stopping", "Stopped"
# resp.transform_job_summaries[0].failure_reason #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/ListTransformJobs AWS API Documentation
#
# @overload list_transform_jobs(params = {})
# @param [Hash] params ({})
def list_transform_jobs(params = {}, options = {})
req = build_request(:list_transform_jobs, params)
req.send_request(options)
end
# Launches an ML compute instance with the latest version of the
# libraries and attaches your ML storage volume. After configuring the
# notebook instance, Amazon SageMaker sets the notebook instance status
# to `InService`. A notebook instance's status must be `InService`
# before you can connect to your Jupyter notebook.
#
# @option params [required, String] :notebook_instance_name
# The name of the notebook instance to start.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.start_notebook_instance({
# notebook_instance_name: "NotebookInstanceName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/StartNotebookInstance AWS API Documentation
#
# @overload start_notebook_instance(params = {})
# @param [Hash] params ({})
def start_notebook_instance(params = {}, options = {})
req = build_request(:start_notebook_instance, params)
req.send_request(options)
end
# Stops a running hyperparameter tuning job and all running training
# jobs that the tuning job launched.
#
# All model artifacts output from the training jobs are stored in Amazon
# Simple Storage Service (Amazon S3). All data that the training jobs
# write to Amazon CloudWatch Logs are still available in CloudWatch.
# After the tuning job moves to the `Stopped` state, it releases all
# reserved resources for the tuning job.
#
# @option params [required, String] :hyper_parameter_tuning_job_name
# The name of the tuning job to stop.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.stop_hyper_parameter_tuning_job({
# hyper_parameter_tuning_job_name: "HyperParameterTuningJobName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/StopHyperParameterTuningJob AWS API Documentation
#
# @overload stop_hyper_parameter_tuning_job(params = {})
# @param [Hash] params ({})
def stop_hyper_parameter_tuning_job(params = {}, options = {})
req = build_request(:stop_hyper_parameter_tuning_job, params)
req.send_request(options)
end
# Terminates the ML compute instance. Before terminating the instance,
# Amazon SageMaker disconnects the ML storage volume from it. Amazon
# SageMaker preserves the ML storage volume.
#
# To access data on the ML storage volume for a notebook instance that
# has been terminated, call the `StartNotebookInstance` API.
# `StartNotebookInstance` launches another ML compute instance,
# configures it, and attaches the preserved ML storage volume so you can
# continue your work.
#
# @option params [required, String] :notebook_instance_name
# The name of the notebook instance to terminate.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.stop_notebook_instance({
# notebook_instance_name: "NotebookInstanceName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/StopNotebookInstance AWS API Documentation
#
# @overload stop_notebook_instance(params = {})
# @param [Hash] params ({})
def stop_notebook_instance(params = {}, options = {})
req = build_request(:stop_notebook_instance, params)
req.send_request(options)
end
# Stops a training job. To stop a job, Amazon SageMaker sends the
# algorithm the `SIGTERM` signal, which delays job termination for 120
# seconds. Algorithms might use this 120-second window to save the model
# artifacts, so the results of the training is not lost.
#
# Training algorithms provided by Amazon SageMaker save the intermediate
# results of a model training job. This intermediate data is a valid
# model artifact. You can use the model artifacts that are saved when
# Amazon SageMaker stops a training job to create a model.
#
# When it receives a `StopTrainingJob` request, Amazon SageMaker changes
# the status of the job to `Stopping`. After Amazon SageMaker stops the
# job, it sets the status to `Stopped`.
#
# @option params [required, String] :training_job_name
# The name of the training job to stop.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.stop_training_job({
# training_job_name: "TrainingJobName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/StopTrainingJob AWS API Documentation
#
# @overload stop_training_job(params = {})
# @param [Hash] params ({})
def stop_training_job(params = {}, options = {})
req = build_request(:stop_training_job, params)
req.send_request(options)
end
# Stops a transform job.
#
# When Amazon SageMaker receives a `StopTransformJob` request, the
# status of the job changes to `Stopping`. After Amazon SageMaker stops
# the job, the status is set to `Stopped`. When you stop a transform job
# before it is completed, Amazon SageMaker doesn't store the job's
# output in Amazon S3.
#
# @option params [required, String] :transform_job_name
# The name of the transform job to stop.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.stop_transform_job({
# transform_job_name: "TransformJobName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/StopTransformJob AWS API Documentation
#
# @overload stop_transform_job(params = {})
# @param [Hash] params ({})
def stop_transform_job(params = {}, options = {})
req = build_request(:stop_transform_job, params)
req.send_request(options)
end
# Deploys the new `EndpointConfig` specified in the request, switches to
# using newly created endpoint, and then deletes resources provisioned
# for the endpoint using the previous `EndpointConfig` (there is no
# availability loss).
#
# When Amazon SageMaker receives the request, it sets the endpoint
# status to `Updating`. After updating the endpoint, it sets the status
# to `InService`. To check the status of an endpoint, use the
# [DescribeEndpoint][1] API.
#
# <note markdown="1"> You cannot update an endpoint with the current `EndpointConfig`. To
# update an endpoint, you must create a new `EndpointConfig`.
#
# </note>
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/API_DescribeEndpoint.html
#
# @option params [required, String] :endpoint_name
# The name of the endpoint whose configuration you want to update.
#
# @option params [required, String] :endpoint_config_name
# The name of the new endpoint configuration.
#
# @return [Types::UpdateEndpointOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateEndpointOutput#endpoint_arn #endpoint_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.update_endpoint({
# endpoint_name: "EndpointName", # required
# endpoint_config_name: "EndpointConfigName", # required
# })
#
# @example Response structure
#
# resp.endpoint_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/UpdateEndpoint AWS API Documentation
#
# @overload update_endpoint(params = {})
# @param [Hash] params ({})
def update_endpoint(params = {}, options = {})
req = build_request(:update_endpoint, params)
req.send_request(options)
end
# Updates variant weight of one or more variants associated with an
# existing endpoint, or capacity of one variant associated with an
# existing endpoint. When it receives the request, Amazon SageMaker sets
# the endpoint status to `Updating`. After updating the endpoint, it
# sets the status to `InService`. To check the status of an endpoint,
# use the [DescribeEndpoint][1] API.
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/API_DescribeEndpoint.html
#
# @option params [required, String] :endpoint_name
# The name of an existing Amazon SageMaker endpoint.
#
# @option params [required, Array<Types::DesiredWeightAndCapacity>] :desired_weights_and_capacities
# An object that provides new capacity and weight values for a variant.
#
# @return [Types::UpdateEndpointWeightsAndCapacitiesOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateEndpointWeightsAndCapacitiesOutput#endpoint_arn #endpoint_arn} => String
#
# @example Request syntax with placeholder values
#
# resp = client.update_endpoint_weights_and_capacities({
# endpoint_name: "EndpointName", # required
# desired_weights_and_capacities: [ # required
# {
# variant_name: "VariantName", # required
# desired_weight: 1.0,
# desired_instance_count: 1,
# },
# ],
# })
#
# @example Response structure
#
# resp.endpoint_arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/UpdateEndpointWeightsAndCapacities AWS API Documentation
#
# @overload update_endpoint_weights_and_capacities(params = {})
# @param [Hash] params ({})
def update_endpoint_weights_and_capacities(params = {}, options = {})
req = build_request(:update_endpoint_weights_and_capacities, params)
req.send_request(options)
end
# Updates a notebook instance. NotebookInstance updates include
# upgrading or downgrading the ML compute instance used for your
# notebook instance to accommodate changes in your workload
# requirements. You can also update the VPC security groups.
#
# @option params [required, String] :notebook_instance_name
# The name of the notebook instance to update.
#
# @option params [String] :instance_type
# The Amazon ML compute instance type.
#
# @option params [String] :role_arn
# The Amazon Resource Name (ARN) of the IAM role that Amazon SageMaker
# can assume to access the notebook instance. For more information, see
# [Amazon SageMaker Roles][1].
#
# <note markdown="1"> To be able to pass this role to Amazon SageMaker, the caller of this
# API must have the `iam:PassRole` permission.
#
# </note>
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html
#
# @option params [String] :lifecycle_config_name
# The name of a lifecycle configuration to associate with the notebook
# instance. For information about lifestyle configurations, see [Step
# 2.1: (Optional) Customize a Notebook Instance][1].
#
#
#
# [1]: http://docs.aws.amazon.com/sagemaker/latest/dg/notebook-lifecycle-config.html
#
# @option params [Boolean] :disassociate_lifecycle_config
# Set to `true` to remove the notebook instance lifecycle configuration
# currently associated with the notebook instance.
#
# @option params [Integer] :volume_size_in_gb
# The size, in GB, of the ML storage volume to attach to the notebook
# instance.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.update_notebook_instance({
# notebook_instance_name: "NotebookInstanceName", # required
# instance_type: "ml.t2.medium", # accepts ml.t2.medium, ml.t2.large, ml.t2.xlarge, ml.t2.2xlarge, ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge
# role_arn: "RoleArn",
# lifecycle_config_name: "NotebookInstanceLifecycleConfigName",
# disassociate_lifecycle_config: false,
# volume_size_in_gb: 1,
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/UpdateNotebookInstance AWS API Documentation
#
# @overload update_notebook_instance(params = {})
# @param [Hash] params ({})
def update_notebook_instance(params = {}, options = {})
req = build_request(:update_notebook_instance, params)
req.send_request(options)
end
# Updates a notebook instance lifecycle configuration created with the
# CreateNotebookInstanceLifecycleConfig API.
#
# @option params [required, String] :notebook_instance_lifecycle_config_name
# The name of the lifecycle configuration.
#
# @option params [Array<Types::NotebookInstanceLifecycleHook>] :on_create
# The shell script that runs only once, when you create a notebook
# instance
#
# @option params [Array<Types::NotebookInstanceLifecycleHook>] :on_start
# The shell script that runs every time you start a notebook instance,
# including when you create the notebook instance.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.update_notebook_instance_lifecycle_config({
# notebook_instance_lifecycle_config_name: "NotebookInstanceLifecycleConfigName", # required
# on_create: [
# {
# content: "NotebookInstanceLifecycleConfigContent",
# },
# ],
# on_start: [
# {
# content: "NotebookInstanceLifecycleConfigContent",
# },
# ],
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/UpdateNotebookInstanceLifecycleConfig AWS API Documentation
#
# @overload update_notebook_instance_lifecycle_config(params = {})
# @param [Hash] params ({})
def update_notebook_instance_lifecycle_config(params = {}, options = {})
req = build_request(:update_notebook_instance_lifecycle_config, params)
req.send_request(options)
end
# @!endgroup
# @param params ({})
# @api private
def build_request(operation_name, params = {})
handlers = @handlers.for(operation_name)
context = Seahorse::Client::RequestContext.new(
operation_name: operation_name,
operation: config.api.operation(operation_name),
client: self,
params: params,
config: config)
context[:gem_name] = 'aws-sdk-sagemaker'
context[:gem_version] = '1.22.0'
Seahorse::Client::Request.new(handlers, context)
end
# Polls an API operation until a resource enters a desired state.
#
# ## Basic Usage
#
# A waiter will call an API operation until:
#
# * It is successful
# * It enters a terminal state
# * It makes the maximum number of attempts
#
# In between attempts, the waiter will sleep.
#
# # polls in a loop, sleeping between attempts
# client.waiter_until(waiter_name, params)
#
# ## Configuration
#
# You can configure the maximum number of polling attempts, and the
# delay (in seconds) between each polling attempt. You can pass
# configuration as the final arguments hash.
#
# # poll for ~25 seconds
# client.wait_until(waiter_name, params, {
# max_attempts: 5,
# delay: 5,
# })
#
# ## Callbacks
#
# You can be notified before each polling attempt and before each
# delay. If you throw `:success` or `:failure` from these callbacks,
# it will terminate the waiter.
#
# started_at = Time.now
# client.wait_until(waiter_name, params, {
#
# # disable max attempts
# max_attempts: nil,
#
# # poll for 1 hour, instead of a number of attempts
# before_wait: -> (attempts, response) do
# throw :failure if Time.now - started_at > 3600
# end
# })
#
# ## Handling Errors
#
# When a waiter is unsuccessful, it will raise an error.
# All of the failure errors extend from
# {Aws::Waiters::Errors::WaiterFailed}.
#
# begin
# client.wait_until(...)
# rescue Aws::Waiters::Errors::WaiterFailed
# # resource did not enter the desired state in time
# end
#
# ## Valid Waiters
#
# The following table lists the valid waiter names, the operations they call,
# and the default `:delay` and `:max_attempts` values.
#
# | waiter_name | params | :delay | :max_attempts |
# | ---------------------------------- | ----------------------------- | -------- | ------------- |
# | endpoint_deleted | {#describe_endpoint} | 30 | 60 |
# | endpoint_in_service | {#describe_endpoint} | 30 | 120 |
# | notebook_instance_deleted | {#describe_notebook_instance} | 30 | 60 |
# | notebook_instance_in_service | {#describe_notebook_instance} | 30 | 60 |
# | notebook_instance_stopped | {#describe_notebook_instance} | 30 | 60 |
# | training_job_completed_or_stopped | {#describe_training_job} | 120 | 180 |
# | transform_job_completed_or_stopped | {#describe_transform_job} | 60 | 60 |
#
# @raise [Errors::FailureStateError] Raised when the waiter terminates
# because the waiter has entered a state that it will not transition
# out of, preventing success.
#
# @raise [Errors::TooManyAttemptsError] Raised when the configured
# maximum number of attempts have been made, and the waiter is not
# yet successful.
#
# @raise [Errors::UnexpectedError] Raised when an error is encounted
# while polling for a resource that is not expected.
#
# @raise [Errors::NoSuchWaiterError] Raised when you request to wait
# for an unknown state.
#
# @return [Boolean] Returns `true` if the waiter was successful.
# @param [Symbol] waiter_name
# @param [Hash] params ({})
# @param [Hash] options ({})
# @option options [Integer] :max_attempts
# @option options [Integer] :delay
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
def wait_until(waiter_name, params = {}, options = {})
w = waiter(waiter_name, options)
yield(w.waiter) if block_given? # deprecated
w.wait(params)
end
# @api private
# @deprecated
def waiter_names
waiters.keys
end
private
# @param [Symbol] waiter_name
# @param [Hash] options ({})
def waiter(waiter_name, options = {})
waiter_class = waiters[waiter_name]
if waiter_class
waiter_class.new(options.merge(client: self))
else
raise Aws::Waiters::Errors::NoSuchWaiterError.new(waiter_name, waiters.keys)
end
end
def waiters
{
endpoint_deleted: Waiters::EndpointDeleted,
endpoint_in_service: Waiters::EndpointInService,
notebook_instance_deleted: Waiters::NotebookInstanceDeleted,
notebook_instance_in_service: Waiters::NotebookInstanceInService,
notebook_instance_stopped: Waiters::NotebookInstanceStopped,
training_job_completed_or_stopped: Waiters::TrainingJobCompletedOrStopped,
transform_job_completed_or_stopped: Waiters::TransformJobCompletedOrStopped
}
end
class << self
# @api private
attr_reader :identifier
# @api private
def errors_module
Errors
end
end
end
end
| 48.017058
| 603
| 0.680348
|
99a5afcd59385c77f650f54924c9415d75b36270
| 6,554
|
rs
|
Rust
|
as_derive_utils/src/utils.rs
|
Monadic-Cat/abi_stable_crates
|
19d71ec9175f870c1c784c37dae730b99948cacf
|
[
"Apache-2.0",
"MIT"
] | 284
|
2019-04-13T05:27:34.000Z
|
2022-03-22T16:38:11.000Z
|
as_derive_utils/src/utils.rs
|
Monadic-Cat/abi_stable_crates
|
19d71ec9175f870c1c784c37dae730b99948cacf
|
[
"Apache-2.0",
"MIT"
] | 43
|
2019-04-28T21:08:24.000Z
|
2022-03-04T21:44:44.000Z
|
as_derive_utils/src/utils.rs
|
Monadic-Cat/abi_stable_crates
|
19d71ec9175f870c1c784c37dae730b99948cacf
|
[
"Apache-2.0",
"MIT"
] | 18
|
2019-08-02T06:50:45.000Z
|
2022-02-13T18:02:22.000Z
|
use std::{
fmt::Display,
ops::{Deref,DerefMut},
mem::{self,ManuallyDrop},
ptr,
};
use quote::ToTokens;
use proc_macro2::{
TokenStream as TokenStream2,
Span,
};
use syn::spanned::Spanned;
////////////////////////////////////////////////////////////////////////////////
#[derive(Debug,Copy,Clone,PartialEq,Eq,Hash)]
pub struct NoTokens;
impl ToTokens for NoTokens {
fn to_tokens(&self, _: &mut TokenStream2) {}
}
////////////////////////////////////////////////////////////////////////////////
pub trait SynPathExt{
fn equals_str(&self,s:&str)->bool;
fn equals_ident(&self,s:&syn::Ident)->bool;
fn into_ident(self)->Result<syn::Ident,Self>
where Self:Sized;
}
impl SynPathExt for syn::Path{
fn equals_str(&self,s:&str)->bool{
match self.get_ident() {
Some(ident)=>ident==s,
None=>false,
}
}
fn equals_ident(&self,s:&syn::Ident)->bool{
self.get_ident()==Some(s)
}
fn into_ident(mut self)->Result<syn::Ident,Self>{
if self.segments.len()==1 {
Ok(self.segments.pop().expect("TEST BUG").into_value().ident)
}else{
Err(self)
}
}
}
////////////////////////////////////////////////////////////////////////////////
pub trait SynResultExt{
fn push_err(&mut self,err:syn::Error);
fn combine_err<T>(&mut self,res:Result<T,syn::Error>);
fn combine_into_err<T>(self,into:&mut Result<T,syn::Error>);
}
impl<T> SynResultExt for Result<T,syn::Error>{
fn push_err(&mut self,err:syn::Error){
match self {
this@Ok(_)=>*this=Err(err),
Err(e)=>e.combine(err),
}
}
fn combine_err<T2>(&mut self,res:Result<T2,syn::Error>) {
if let Err(err)=res {
self.push_err(err);
}
}
fn combine_into_err<T2>(self,into:&mut Result<T2,syn::Error>){
into.combine_err(self);
}
}
////////////////////////////////////////////////////////////////////////////////
/// A result wrapper which panics if it's the error variant is not handled,
/// by calling `.into_result()`.
#[derive(Debug,Clone)]
pub struct LinearResult<T>{
errors:ManuallyDrop<Result<T,syn::Error>>,
}
impl<T> Drop for LinearResult<T>{
fn drop(&mut self){
let res=unsafe{ take_manuallydrop(&mut self.errors) };
res.expect("Expected LinearResult to be handled");
}
}
impl<T> LinearResult<T>{
#[inline]
pub fn new(res:Result<T,syn::Error>)->Self{
Self{
errors:ManuallyDrop::new(res),
}
}
#[inline]
pub fn ok(value:T)->Self{
Self::new(Ok(value))
}
}
impl<T> Default for LinearResult<T>
where
T:Default
{
fn default()->Self{
Self::new(Ok(T::default()))
}
}
impl<T> From<Result<T,syn::Error>> for LinearResult<T>{
#[inline]
fn from(res:Result<T,syn::Error>)->Self{
Self::new(res)
}
}
impl<T> Deref for LinearResult<T>{
type Target=Result<T,syn::Error>;
fn deref(&self)->&Result<T,syn::Error>{
&self.errors
}
}
impl<T> DerefMut for LinearResult<T>{
fn deref_mut(&mut self)->&mut Result<T,syn::Error>{
&mut self.errors
}
}
impl<T> Into<Result<T,syn::Error>> for LinearResult<T>{
#[inline]
fn into(self)->Result<T,syn::Error>{
self.into_result()
}
}
#[allow(dead_code)]
impl<T> LinearResult<T>{
#[inline]
pub fn into_result(self)->Result<T,syn::Error>{
let mut this=ManuallyDrop::new(self);
unsafe{ take_manuallydrop(&mut this.errors) }
}
#[inline]
pub fn take(&mut self)->Result<T,syn::Error>
where
T:Default
{
self.replace(Ok(Default::default()))
}
#[inline]
pub fn replace(&mut self,other:Result<T,syn::Error>)->Result<T,syn::Error>{
mem::replace(&mut *self.errors,other)
}
}
impl<T> SynResultExt for LinearResult<T>{
#[inline]
fn push_err(&mut self,err:syn::Error){
self.errors.push_err(err);
}
#[inline]
fn combine_err<T2>(&mut self,res:Result<T2,syn::Error>) {
self.errors.combine_err(res);
}
#[inline]
fn combine_into_err<T2>(self,into:&mut Result<T2,syn::Error>){
self.into_result().combine_into_err(into);
}
}
////////////////////////////////////////////////////////////////////////////////
/// Takes the contents out of a `ManuallyDrop<T>`.
///
/// # Safety
///
/// After this function is called `slot` will become uninitialized and
/// must not be read again.
pub unsafe fn take_manuallydrop<T>(slot: &mut ManuallyDrop<T>) -> T {
ManuallyDrop::into_inner(ptr::read(slot))
}
////////////////////////////////////////////////////////////////////////////////
pub fn spanned_err(tokens:&dyn ToTokens, display:&dyn Display)-> syn::Error {
syn::Error::new_spanned(tokens,display)
}
#[allow(dead_code)]
pub fn syn_err(span:Span,display:&dyn Display)-> syn::Error {
syn::Error::new(span,display)
}
////////////////////////////////////////////////////////////////////////////////
pub fn join_spans<I,T>(iter:I)->Span
where
I:IntoIterator<Item=T>,
T:Spanned,
{
let call_site=Span::call_site();
let mut iter=iter.into_iter();
let first:Span=match iter.next() {
Some(x)=>x.span(),
None=>return call_site,
};
iter.fold(first,|l,r| l.join(r.span()).unwrap_or(call_site) )
}
////////////////////////////////////////////////////////////////////////////////
#[inline(never)]
pub fn dummy_ident()->syn::Ident{
syn::Ident::new("DUMMY_IDENT",Span::call_site())
}
////////////////////////////////////////////////////////////////////////////////
pub fn type_from_ident(ident: syn::Ident) -> syn::Type {
let path: syn::Path = ident.into();
let path = syn::TypePath { qself: None, path };
path.into()
}
pub fn expr_from_ident(ident:syn::Ident)->syn::Expr{
let x=syn::Path::from(ident);
let x=syn::ExprPath{
attrs:Vec::new(),
qself:None,
path:x,
};
syn::Expr::Path(x)
}
/// Used to tokenize an integer without a type suffix.
pub fn expr_from_int(int:u64)->syn::Expr{
let x=proc_macro2::Literal::u64_unsuffixed(int);
let x=syn::LitInt::from(x);
let x=syn::Lit::Int(x);
let x=syn::ExprLit{attrs:Vec::new(),lit:x};
syn::Expr::Lit(x)
}
/// Used to tokenize an integer without a type suffix.
/// This one should be cheaper than `expr_from_int`.
pub fn uint_lit(int:u64)->syn::LitInt{
let x=proc_macro2::Literal::u64_unsuffixed(int);
syn::LitInt::from(x)
}
| 23.241135
| 80
| 0.537534
|
be5ed84c5ea22f77f08c79a7d66db17ff06b1aff
| 5,169
|
asm
|
Assembly
|
Transynther/x86/_processed/AVXALIGN/_st_sm_/i9-9900K_12_0xca_notsx.log_21829_1399.asm
|
ljhsiun2/medusa
|
67d769b8a2fb42c538f10287abaf0e6dbb463f0c
|
[
"MIT"
] | 9
|
2020-08-13T19:41:58.000Z
|
2022-03-30T12:22:51.000Z
|
Transynther/x86/_processed/AVXALIGN/_st_sm_/i9-9900K_12_0xca_notsx.log_21829_1399.asm
|
ljhsiun2/medusa
|
67d769b8a2fb42c538f10287abaf0e6dbb463f0c
|
[
"MIT"
] | 1
|
2021-04-29T06:29:35.000Z
|
2021-05-13T21:02:30.000Z
|
Transynther/x86/_processed/AVXALIGN/_st_sm_/i9-9900K_12_0xca_notsx.log_21829_1399.asm
|
ljhsiun2/medusa
|
67d769b8a2fb42c538f10287abaf0e6dbb463f0c
|
[
"MIT"
] | 3
|
2020-07-14T17:07:07.000Z
|
2022-03-21T01:12:22.000Z
|
.global s_prepare_buffers
s_prepare_buffers:
push %r12
push %r9
push %rax
push %rcx
push %rdi
push %rsi
lea addresses_normal_ht+0x9fc7, %rsi
lea addresses_D_ht+0xcfc7, %rdi
nop
nop
inc %rax
mov $116, %rcx
rep movsq
nop
xor %r9, %r9
lea addresses_WC_ht+0x614f, %r12
nop
and $37188, %rcx
mov (%r12), %r9d
add %r12, %r12
pop %rsi
pop %rdi
pop %rcx
pop %rax
pop %r9
pop %r12
ret
.global s_faulty_load
s_faulty_load:
push %r11
push %r8
push %rax
push %rbp
push %rbx
push %rcx
push %rdi
push %rsi
// Load
lea addresses_RW+0x9da7, %r11
clflush (%r11)
add %rbx, %rbx
mov (%r11), %ebp
nop
nop
nop
nop
nop
add %rbp, %rbp
// REPMOV
mov $0x9c7, %rsi
lea addresses_normal+0x167c7, %rdi
nop
nop
nop
nop
nop
xor $54013, %rbp
mov $16, %rcx
rep movsq
nop
nop
nop
nop
nop
sub %rcx, %rcx
// Store
lea addresses_normal+0x167c7, %rcx
nop
nop
nop
nop
inc %rsi
mov $0x5152535455565758, %rdi
movq %rdi, %xmm2
movups %xmm2, (%rcx)
nop
xor $56468, %rdi
// Faulty Load
lea addresses_normal+0x167c7, %r11
sub $48485, %rbx
mov (%r11), %ecx
lea oracles, %rax
and $0xff, %rcx
shlq $12, %rcx
mov (%rax,%rcx,1), %rcx
pop %rsi
pop %rdi
pop %rcx
pop %rbx
pop %rbp
pop %rax
pop %r8
pop %r11
ret
/*
<gen_faulty_load>
[REF]
{'OP': 'LOAD', 'src': {'same': False, 'type': 'addresses_normal', 'NT': False, 'AVXalign': False, 'size': 4, 'congruent': 0}}
{'OP': 'LOAD', 'src': {'same': False, 'type': 'addresses_RW', 'NT': False, 'AVXalign': False, 'size': 4, 'congruent': 2}}
{'OP': 'REPM', 'src': {'same': False, 'congruent': 6, 'type': 'addresses_P'}, 'dst': {'same': True, 'congruent': 0, 'type': 'addresses_normal'}}
{'OP': 'STOR', 'dst': {'same': True, 'type': 'addresses_normal', 'NT': False, 'AVXalign': False, 'size': 16, 'congruent': 0}}
[Faulty Load]
{'OP': 'LOAD', 'src': {'same': True, 'type': 'addresses_normal', 'NT': True, 'AVXalign': False, 'size': 4, 'congruent': 0}}
<gen_prepare_buffer>
{'OP': 'REPM', 'src': {'same': False, 'congruent': 8, 'type': 'addresses_normal_ht'}, 'dst': {'same': False, 'congruent': 10, 'type': 'addresses_D_ht'}}
{'OP': 'LOAD', 'src': {'same': False, 'type': 'addresses_WC_ht', 'NT': False, 'AVXalign': False, 'size': 4, 'congruent': 0}}
{'58': 21829}
58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58 58
*/
| 43.805085
| 2,999
| 0.659315
|
72d7ed02a7843bc5983430b2aa253bbe3db9d912
| 1,414
|
sql
|
SQL
|
Learn/SQL/Preliminary/Fundamental SQL with SELECT Statement/Fundamental SQL with SELECT Statement.sql
|
IrvanKurnia213/DQLab
|
13469ea4fba29228ac04ce64a9b9a2adeeaf14d1
|
[
"MIT"
] | 22
|
2021-04-06T02:20:44.000Z
|
2022-03-23T11:47:26.000Z
|
Learn/SQL/Preliminary/Fundamental SQL with SELECT Statement/Fundamental SQL with SELECT Statement.sql
|
IrvanKurnia213/DQLab
|
13469ea4fba29228ac04ce64a9b9a2adeeaf14d1
|
[
"MIT"
] | 1
|
2021-02-08T04:58:25.000Z
|
2021-02-08T04:58:25.000Z
|
Learn/SQL/Preliminary/Fundamental SQL with SELECT Statement/Fundamental SQL with SELECT Statement.sql
|
IrvanKurnia213/DQLab
|
13469ea4fba29228ac04ce64a9b9a2adeeaf14d1
|
[
"MIT"
] | 50
|
2021-03-31T10:32:55.000Z
|
2022-03-15T11:04:35.000Z
|
USE dqlab;
-- Perintah SELECT [2/2]
SELECT
*
FROM
ms_produk;
-- Mengambil Satu Kolom dari TABLE
SELECT
nama_produk
FROM
ms_produk;
-- Mengambil Lebih dari Satu Kolom
SELECT
nama_produk,
harga
FROM
ms_produk;
-- Membatasi Pengambilan Jumlah Row Data
SELECT
nama_produk
FROM
ms_produk
LIMIT
3;
-- Menggunakan Prefix pada Nama Kolom
SELECT
ms_produk.kode_produk
FROM
ms_produk;
-- Menggunakan Alias pada Kolom
SELECT
no_urut AS nomor,
nama_produk AS nama
FROM
ms_produk;
-- Menghilangkan Keyword 'AS'
SELECT
no_urut nomor,
nama_produk nama
FROM
ms_produk;
-- Menggabungkan Prefix dan Alias
SELECT
ms_produk.harga AS harga_jual
FROM
ms_produk;
-- Menggunakan Alias pada TABLE
SELECT
*
FROM
ms_produk t2;
-- Prefix dengan Alias TABLE
SELECT
t2.nama_produk,
t2.harga
FROM
ms_produk t2;
-- Menggunakan WHERE
SELECT
*
FROM
ms_produk
WHERE
nama_produk LIKE 'Tas Travel Organizer DQLab';
-- Menggunakan Operand OR
SELECT
*
FROM
ms_produk
WHERE
nama_produk = 'Gantungan Kunci DQLab'
OR nama_produk = 'Tas Travel Organizer DQLab'
OR nama_produk = 'Flashdisk DQLab 64 GB';
-- Filter untuk Angka
SELECT
*
FROM
ms_produk
WHERE
harga > 50000;
-- Menggunakan Operand AND
SELECT
*
FROM
ms_produk
WHERE
nama_produk = 'Gantungan Kunci DQLab'
AND harga < 50000;
| 13.862745
| 50
| 0.693777
|
4feeaf08a3a1cd49f430a14acd8025a6b999ddaf
| 1,157
|
lua
|
Lua
|
tests/lib/charon/dispatcher/dispatchController.lua
|
lazaroness/charonplatform
|
aeee2a803f86995b48c1b5a151b964e41ee7f602
|
[
"BSD-3-Clause"
] | 1
|
2018-02-22T01:09:18.000Z
|
2018-02-22T01:09:18.000Z
|
tests/lib/charon/dispatcher/dispatchController.lua
|
lazaroness/charonplatform
|
aeee2a803f86995b48c1b5a151b964e41ee7f602
|
[
"BSD-3-Clause"
] | null | null | null |
tests/lib/charon/dispatcher/dispatchController.lua
|
lazaroness/charonplatform
|
aeee2a803f86995b48c1b5a151b964e41ee7f602
|
[
"BSD-3-Clause"
] | null | null | null |
local json = require('charon.json')
local dispatcher = require('charon.dispatcher')
local test = {}
package.path = package.path .. ';util/?.lua'
test.beforeAll = function()
dispatcher.output = function() end
end
test.afterAll = function()
end
test.should_return_error_if_action_not_found = function()
local env = {}
env.requestPath = function()
return "/order/unknow"
end
dispatcher.prefix = ""
local status, headers, body = dispatcher.dispatchController(env)
assert( status == 500, status )
assert( #headers == 0, json.encode(headers) )
assert( type(headers) == 'table', json.encode(headers) )
assert( body:contains('action: "unknowAction" not found') == true, body )
end
test.should_return_error_if_action_save = function()
local env = {}
env.requestPath = function()
return "/order/save"
end
dispatcher.prefix = ""
local status, headers, body = dispatcher.dispatchController(env)
assert( status == 200, status )
assert( headers[1] == 'Content-Type: text/html; charset=utf-8', headers[1] )
assert( type(headers) == 'table', json.encode(headers) )
assert( body == 'save !', body )
end
return test
| 27.547619
| 78
| 0.689715
|
a48c1d5873214eee54c35b7d89ba291c2dca5e08
| 40,209
|
dart
|
Dart
|
lib/avl_tree.dart
|
LBXjixiangniao/dart_tree
|
fece33c9904c499cfe5bdb09a0ccf9123e5e63ee
|
[
"MIT"
] | 1
|
2020-12-14T02:16:33.000Z
|
2020-12-14T02:16:33.000Z
|
lib/avl_tree.dart
|
LBXjixiangniao/dart_tree
|
fece33c9904c499cfe5bdb09a0ccf9123e5e63ee
|
[
"MIT"
] | null | null | null |
lib/avl_tree.dart
|
LBXjixiangniao/dart_tree
|
fece33c9904c499cfe5bdb09a0ccf9123e5e63ee
|
[
"MIT"
] | null | null | null |
part of 'dart_tree.dart';
/// AVL树节点
/**
* 二叉树节点的平衡因子A的被定义为高度差(右子树高度-左子树高度)
* 如果二叉搜索树所有节点的平衡因子在{-1,0,1}范围内,则称为AVL树
* 如果节点平衡因子 < 0,被称为“左重”;如果节点平衡因子 > 0,被称为“右重”;如果节点平衡因子 == 0, 有时简称为“平衡”
*/
class _AVLTreeNode<K, Node extends _AVLTreeNode<K, Node>> extends _BinaryTreeNode<K, Node> {
///平衡因子,新节点没有子树,所以平衡因子为0
int factor = 0;
_AVLTreeNode(K key) : super(key);
_AVLTreeNode<K, Node> copy() {
return _AVLTreeNode<K, Node>(key);
}
@override
int get height {
if (this == null) return 0;
if (factor > 0) {
return 1 + (right?.height ?? 0);
} else {
return 1 + (left?.height ?? 0);
}
}
@override
// String get debugString => '$key($factor)';
String get debugString {
if (factor > 0) {
return '$key+';
} else if (factor < 0) {
return '$key-';
} else {
return key.toString();
}
}
}
/// 基于AVL树实现的Set的节点
class _AVLTreeSetNode<K> extends _AVLTreeNode<K, _AVLTreeSetNode<K>> {
_AVLTreeSetNode(K key) : super(key);
_AVLTreeSetNode<K> copy() {
return _AVLTreeSetNode<K>(key);
}
}
/// 基于AVL树实现的Map的节点
/// 一个包含value值的_AVLTreeNode
class _AVLTreeMapNode<K, V> extends _AVLTreeNode<K, _AVLTreeMapNode<K, V>> {
V value;
_AVLTreeMapNode(K key, this.value) : super(key);
_AVLTreeMapNode<K, V> copy() {
return _AVLTreeMapNode<K, V>(key, value);
}
@override
void replaceWith(_AVLTreeMapNode<K, V> node) {
super.replaceWith(node);
value = node?.value;
}
}
/// AVL树实现
abstract class _AVLTree<K, Node extends _AVLTreeNode<K, Node>> {
// AVL树根节点
Node get _root;
set _root(Node newValue);
/// AVL书中元素个数
int _count = 0;
/// 每次增删都会加1,用来识别并发修改
int _modificationCount = 0;
/// 用于比较
Comparator<K> get _compare;
/// 判断是否key有效
_Predicate get _validKey;
///是否调试模式
///调试模式下增、删后会打印出整个AVL树,且搜索的时候会打印出查找路径
bool debug = false;
ValueChanged<String> debugPrintMethod;
void debugPrint(String str) {
if(debugPrintMethod != null) {
debugPrintMethod.call(str);
}
else {
print(str);
}
}
///插入
///node:新插入的节点
///root:指定查找的根结点,如果root不为null,则node会插入在root的子树上
///replaceIfExist:如果存在与node的key相等的节点,则通过replaceIfExist判断是否用node代替已有节点,
///如果replaceIfExist不为null且返回true则代替,否则不代替
void _insert(Node node, {Node root, _ReplaceCheck<Node> replaceIfExist}) {
if (node == null) return;
String searchPath = 'SearchPath:';
assert(() {
if (debug) debugPrint('Insert:${node.key}**********************************\n');
return true;
}());
if (_root == null) {
_root = node;
_count++;
} else {
var compare = _compare;
int comp;
void add(Node parent) {
assert(() {
if (debug) searchPath += '->${parent.key}';
return true;
}());
comp = compare(node.key, parent.key);
if (comp == 0) {
if (replaceIfExist?.call(parent, node) == true) {
///用node替换parent
_replaceNode(parent, node);
}
} else if (comp < 0) {
if (parent.left != null) {
add(parent.left);
} else {
parent.left = node;
node.parent = parent;
_count++;
}
} else {
if (parent.right != null) {
add(parent.right);
} else {
parent.right = node;
node.parent = parent;
_count++;
}
}
}
add(root ?? _root);
_rebalanceForInsert(node);
}
_modificationCount++;
assert(() {
if (debug) debugPrint(searchPath);
return true;
}());
assert(() {
if (debug) debugPrint('TreeStructure:${treeStructureString()}');
return true;
}());
assert(() {
if (debug) debugPrint('End Insert:${node.key}**********************************\n');
return true;
}());
}
///Z刚通过_insert方法插入的节点
void _rebalanceForInsert(Node Z) {
assert(() {
if (debug) debugPrint('RebalanceForInsert:${Z.key.toString()}');
return true;
}());
Node G;
Node N;
for (Node X = Z.parent; X != null; X = Z.parent) {
// Loop (possibly up to the root)
// BalanceFactor(X) has to be updated:
if (Z == X.right) {
// The right subtree increases
if (X.factor > 0) {
// X is right-heavy
// ===> the temporary BalanceFactor(X) == +2
// ===> rebalancing is required.
G = X.parent; // Save parent of X around rotations
if (Z.factor < 0) // Right Left Case (see figure 5)
N = _rotateRightLeft(X, Z); // Double rotation: Right(Z) then Left(X)
else // Right Right Case (see figure 4)
N = _rotateLeft(X, Z); // Single rotation Left(X)
// After rotation adapt parent link
} else {
if (X.factor < 0) {
X.factor = 0; // Z’s height increase is absorbed at X.
break; // Leave the loop
}
X.factor = 1;
Z = X; // Height(Z) increases by 1
continue;
}
} else {
// Z == left_child(X): the left subtree increases
if (X.factor < 0) {
// X is left-heavy
// ===> the temporary BalanceFactor(X) == –2
// ===> rebalancing is required.
G = X.parent; // Save parent of X around rotations
if (Z.factor > 0) // Left Right Case
N = _rotateLeftRight(X, Z); // Double rotation: Left(Z) then Right(X)
else // Left Left Case
N = _rotateRight(X, Z); // Single rotation Right(X)
// After rotation adapt parent link
} else {
if (X.factor > 0) {
X.factor = 0; // Z’s height increase is absorbed at X.
break; // Leave the loop
}
X.factor = -1;
Z = X; // Height(Z) increases by 1
continue;
}
}
// After a rotation adapt parent link:
// N is the new root of the rotated subtree
// Height does not change: Height(N) == old Height(X)
N.parent = G;
if (G != null) {
if (X == G.left)
G.left = N;
else
G.right = N;
} else
_root = N; // N is the new root of the total tree
break;
// There is no fall thru, only break; or continue;
}
// Unless loop is left via break, the height of the total tree increases by 1.
}
///删除
///key: 需要删除的节点的key值
///root:指定查找的根结点,如果root不为null,则会从root开始查找key删除node
Node _delete(K key, {Node root}) {
String searchPath = 'SearchPath:';
assert(() {
if (debug) debugPrint('Delete:$key**********************************\n');
return true;
}());
///用newNode代替oldNode在oldNode.parent中的位置,_delete方法中只有此处才可能导致子树高度减一
///所以在此方法中判断是否需要重新平衡即可
void replaceNodeInParent(Node oldNode, Node newNode) {
void replace() {
newNode?.parent = oldNode.parent;
///删除oldNode
if (oldNode.parent?.left == oldNode) {
oldNode.parent.left = newNode;
} else if (oldNode.parent?.right == oldNode) {
oldNode.parent.right = newNode;
}
}
if (oldNode == null) return;
if (oldNode == _root) {
_root = newNode;
} else {
///进行平衡
if (newNode != null) {
///删除oldNode
replace();
if (newNode.parent.factor != 0) {
_rebalanceForDelete(newNode);
} else if (newNode.parent?.left == newNode) {
newNode.parent.factor = 1;
} else if (newNode.parent?.right == newNode) {
newNode.parent.factor = -1;
}
} else {
///先不删除oldNode,将其认为是树高度减1用于做平衡先,
///平衡完了再删除oldNode
_rebalanceForDelete(oldNode);
///删除oldNode
replace();
}
}
_count--;
_modificationCount++;
}
if (_root == null || key == null)
return null;
else {
var compare = _compare;
int comp;
Node remove(Node parent) {
if (parent == null) return null;
assert(() {
if (debug) searchPath += '->${parent.key}';
return true;
}());
comp = compare(key, parent.key);
if (comp == 0) {
if (parent.left != null && parent.right != null) {
Node min = _findMin(root: parent.right);
bool isDebug = debug;
assert(() {
debug = false;
return true;
}());
_delete(min.key, root: parent.right);
assert(() {
debug = isDebug;
return true;
}());
_replaceNode(parent, min);
} else if (parent.left != null) {
replaceNodeInParent(parent, parent.left);
} else if (parent.right != null) {
replaceNodeInParent(parent, parent.right);
} else {
replaceNodeInParent(parent, null);
}
return parent;
} else if (comp < 0) {
return remove(parent.left);
} else {
return remove(parent.right);
}
}
Node deletedNode = remove(root ?? _root);
assert(() {
if (debug) debugPrint(searchPath);
return true;
}());
assert(() {
if (debug) debugPrint('TreeStructure:${treeStructureString()}');
return true;
}());
assert(() {
if (debug) debugPrint('End Delete:$key**********************************\n');
return true;
}());
return deletedNode?.copy();
}
}
///node为跟的子树高度降低了1,且N是已经已经平衡的AVL子树
void _rebalanceForDelete(Node N) {
assert(() {
if (debug) debugPrint('RebalanceForDelete:${N.key.toString()}');
return true;
}());
Node G;
Node Z;
int b;
for (Node X = N.parent; X != null; X = G) {
// Loop (possibly up to the root)
G = X.parent; // Save parent of X around rotations
// BalanceFactor(X) has not yet been updated!
if (N == X.left) {
// the left subtree decreases
if (X.factor > 0) {
// X is right-heavy
// ===> the temporary BalanceFactor(X) == +2
// ===> rebalancing is required.
Z = X.right; // Sibling of N (higher by 2)
b = Z.factor;
if (b < 0) // Right Left Case (see figure 5)
N = _rotateRightLeft(X, Z); // Double rotation: Right(Z) then Left(X)
else // Right Right Case (see figure 4)
N = _rotateLeft(X, Z); // Single rotation Left(X)
// After rotation adapt parent link
} else {
if (X.factor == 0) {
X.factor = 1; // N’s height decrease is absorbed at X.
break; // Leave the loop
}
N = X;
N.factor = 0; // Height(N) decreases by 1
continue;
}
} else {
// (N == right_child(X)): The right subtree decreases
if (X.factor < 0) {
// X is left-heavy
// ===> the temporary BalanceFactor(X) == –2
// ===> rebalancing is required.
Z = X.left; // Sibling of N (higher by 2)
b = Z.factor;
if (b > 0) // Left Right Case
N = _rotateLeftRight(X, Z); // Double rotation: Left(Z) then Right(X)
else // Left Left Case
N = _rotateRight(X, Z); // Single rotation Right(X)
// After rotation adapt parent link
} else {
if (X.factor == 0) {
X.factor = -1; // N’s height decrease is absorbed at X.
break; // Leave the loop
}
N = X;
N.factor = 0; // Height(N) decreases by 1
continue;
}
}
// After a rotation adapt parent link:
// N is the new root of the rotated subtree
N.parent = G;
if (G != null) {
if (X == G.left)
G.left = N;
else
G.right = N;
} else
_root = N; // N is the new root of the total tree
if (b == 0) break; // Height does not change: Leave the loop
// Height(N) decreases by 1 (== old Height(X)-1)
}
// If (b != 0) the height of the total tree decreases by 1.
}
///用newNode代替oldNode,newNode的parent、left、right都是从oldNode来
void _replaceNode(Node oldNode, Node newNode) {
assert(() {
if (debug) debugPrint('Replace ${oldNode.key.toString()} with ${newNode.key.toString()}\n');
return true;
}());
if (oldNode == null) return;
oldNode.replaceWith(newNode);
}
///查找node.key == key的node
///root:指定查找的根结点,如果root不为null,则会从root开始查找
Node _search(K key, {Node root}) {
String searchPath = 'SearchPath:';
assert(() {
if (debug) debugPrint('Search:**********************************\n');
return true;
}());
if (_root == null || key == null)
return null;
else {
var compare = _compare;
int comp;
int initialModificationCount = _modificationCount;
Node searchRecursively(Node parent) {
if (parent == null) return null;
assert(() {
if (debug) searchPath += '->${parent.key}';
return true;
}());
if (initialModificationCount != _modificationCount) {
throw ConcurrentModificationError(this);
}
comp = compare(key, parent.key);
if (comp == 0) {
return parent;
} else if (comp < 0) {
return searchRecursively(parent.left);
} else {
return searchRecursively(parent.right);
}
}
Node resultNode = searchRecursively(root ?? _root);
assert(() {
if (debug) debugPrint(searchPath);
return true;
}());
return resultNode;
}
}
///单次左旋,对应于右右情况
/**
* X
* / \
* t1 Z
* / \
* t23 t4
*
* Z
* / \
* X t4
* / \
* t1 t23
*
*/
Node _rotateLeft(Node X, Node Z) {
assert(() {
if (debug) debugPrint('RotateLeft:${X.key.toString()},${Z.key.toString()}\n');
return true;
}());
// Z is by 2 higher than its sibling
Node t23 = Z.left; // Inner child of Z
X.right = t23;
if (t23 != null) t23.parent = X;
Z.left = X;
X.parent = Z;
// 1st case, BalanceFactor(Z) == 0, only happens with deletion, not insertion:
if (Z.factor == 0) {
// t23 has been of same height as t4
X.factor = 1; // t23 now higher
Z.factor = -1; // t4 now lower than X
} else {
// 2nd case happens with insertion or deletion:
X.factor = 0;
Z.factor = 0;
}
return Z; // return new root of rotated subtree
}
///单次右旋,对应于左左情况
/**
* X
* / \
* Z t4
* / \
* t1 t23
*
* Z
* / \
* t1 X
* / \
* t23 t4
*
*/
Node _rotateRight(Node X, Node Z) {
assert(() {
if (debug) debugPrint('RotateRight:${X.key.toString()},${Z.key.toString()}\n');
return true;
}());
// Z is by 2 higher than its sibling
Node t23 = Z.right; // Inner child of Z
X.left = t23;
if (t23 != null) t23.parent = X;
Z.right = X;
X.parent = Z;
// 1st case, BalanceFactor(Z) == 0, only happens with deletion, not insertion:
if (Z.factor == 0) {
// t23 has been of same height as t4
X.factor = -1; // t23 now higher
Z.factor = 1; // t4 now lower than X
} else {
// 2nd case happens with insertion or deletion:
X.factor = 0;
Z.factor = 0;
}
return Z; // return new root of rotated subtree
}
///对应于右左情况的旋转,Z的高度比t1高2。t1、t2、t3、t4中,(1)t2或者t3比其他三个高度小1,(2)所有等高。
/**
* X
* / \
* t1 Z
* / \
* Y t4
* / \
* t2 t3
*
* X
* / \
* t1 Y
* / \
* t2 Z
* / \
* t3 t4
*
* Y
* / \
* X Z
* / \ / \
* t1 t2 t3 t4
*
*/
Node _rotateRightLeft(Node X, Node Z) {
assert(() {
if (debug) debugPrint('RotateRightLeft:${X.key.toString()},${Z.key.toString()}\n');
return true;
}());
// Z is by 2 higher than its sibling
Node Y = Z.left; // Inner child of Z
// Y is by 1 higher than sibling
Node t3 = Y.right;
Z.left = t3;
if (t3 != null) t3.parent = Z;
Y.right = Z;
Z.parent = Y;
Node t2 = Y.left;
X.right = t2;
if (t2 != null) t2.parent = X;
Y.left = X;
X.parent = Y;
///修正平衡因子
if (Y.factor > 0) {
// t3 was higher
X.factor = -1; // t1 now higher
Z.factor = 0;
} else if (Y.factor == 0) {
//t2、t3等高
X.factor = 0;
Z.factor = 0;
} else {
// t2 was higher
X.factor = 0;
Z.factor = 1; // t4 now higher
}
Y.factor = 0;
return Y; // return new root of rotated subtree
}
///对应于左右情况的旋转,Z的高度比t4高2。t1、t2、t3、t4中,(1)t2或者t3比其他三个高度小1,(2)所有等高。
/**
* X
* / \
* Z t4
* / \
* t1 Y
* / \
* t2 t3
*
* X
* / \
* Y t4
* / \
* Z t3
* / \
* t1 t2
*
* Y
* / \
* Z X
* / \ / \
* t1 t2 t3 t4
*
*/
Node _rotateLeftRight(Node X, Node Z) {
assert(() {
if (debug) debugPrint('RotateLeftRight:${X.key.toString()},${Z.key.toString()}\n');
return true;
}());
// Z is by 2 higher than its sibling
Node Y = Z.right; // Inner child of Z
// Y is by 1 higher than sibling
Node t2 = Y.left;
Z.right = t2;
if (t2 != null) t2.parent = Z;
Y.left = Z;
Z.parent = Y;
Node t3 = Y.right;
X.left = t3;
if (t3 != null) t3.parent = X;
Y.right = X;
X.parent = Y;
///修正平衡因子
if (Y.factor > 0) {
// t3 was higher
Z.factor = -1; // t1 now higher
X.factor = 0;
} else if (Y.factor == 0) {
//t2、t3等高
X.factor = 0;
Z.factor = 0;
} else {
// t2 was higher
Z.factor = 0;
X.factor = 1; // t4 now higher
}
Y.factor = 0;
return Y; // return new root of rotated subtree
}
///查找最小值
///root:指定查找的根结点,如果root不为null,则会从root开始查找
Node _findMin({Node root}) {
Node minNode = root ?? _root;
if (minNode == null) return minNode;
while (minNode.left != null) {
minNode = minNode.left;
}
return minNode;
}
///查找最大值
///root:指定查找的根结点,如果root不为null,则会从root开始查找
Node _findMax({Node root}) {
Node minNode = root ?? _root;
if (minNode == null) return minNode;
while (minNode.right != null) {
minNode = minNode.right;
}
return minNode;
}
Node get _first {
return _findMin();
}
Node get _last {
return _findMax();
}
/// Get the last node in the tree that is strictly smaller than [key]. Returns
/// `null` if no key was not found.
Node _lastBefore(K key) {
if (key == null) throw ArgumentError(key);
if (_root == null) return null;
var compare = _compare;
int comp;
int initialModificationCount = _modificationCount;
Node resultNode;
void searchRecursively(Node parent) {
if (parent == null) return;
if (initialModificationCount != _modificationCount) {
throw ConcurrentModificationError(this);
}
comp = compare(key, parent.key);
if (comp == 0) {
return;
} else if (comp < 0) {
return searchRecursively(parent.left);
} else {
resultNode = parent;
return searchRecursively(parent.right);
}
}
searchRecursively(_root);
return resultNode;
}
/// Get the first node in the tree that is strictly larger than [key]. Returns
/// `null` if no key was not found.
Node _firstAfter(K key) {
if (key == null) throw ArgumentError(key);
if (_root == null) return null;
var compare = _compare;
int comp;
int initialModificationCount = _modificationCount;
Node resultNode;
void searchRecursively(Node parent) {
if (parent == null) return;
if (initialModificationCount != _modificationCount) {
throw ConcurrentModificationError(this);
}
comp = compare(key, parent.key);
if (comp == 0) {
return;
} else if (comp < 0) {
resultNode = parent;
return searchRecursively(parent.left);
} else {
return searchRecursively(parent.right);
}
}
searchRecursively(_root);
return resultNode;
}
void _clear() {
_root = null;
_count = 0;
_modificationCount++;
}
String treeStructureString() {
return _BinaryTreePrinter.treeStructureString(_root);
}
@visibleForTesting
bool check() {
///返回子树高度,如果node.left.key < node.key < node.right.key 且 node.lefty与node.right高度差不大于1不成立则返回null
int checkNode(Node node) {
if (node == null) {
return 0;
} else {
///满足二叉搜索树规则
bool result = true;
Node left = node.left;
Node right = node.right;
if (left != null && _compare(left.key, node.key) >= 0) {
result = false;
}
if (result && right != null && _compare(right.key, node.key) <= 0) {
result = false;
}
///左右子树高度差不大于1,且节点的平衡因子factor正确
if (result) {
int leftHeight = checkNode(node.left);
if (leftHeight == null) return null;
int rightHeight = checkNode(node.right);
if (rightHeight == null) return null;
if ((leftHeight - rightHeight).abs() > 1) return null;
if (leftHeight == rightHeight && node.factor != 0) return null;
if (leftHeight > rightHeight && node.factor != -1) return null;
if (leftHeight < rightHeight && node.factor != 1) return null;
return max(rightHeight, leftHeight) + 1;
} else {
return null;
}
}
}
return checkNode(_root) != null;
}
}
/// A [Map] of objects that can be ordered relative to each other.
///
/// The map is based on a AVL tree. It allows most operations
/// in amortized logarithmic time.
///
/// Keys of the map are compared using the `compare` function passed in
/// the constructor, both for ordering and for equality.
/// If the map contains only the key `a`, then `map.containsKey(b)`
/// will return `true` if and only if `compare(a, b) == 0`,
/// and the value of `a == b` is not even checked.
/// If the compare function is omitted, the objects are assumed to be
/// [Comparable], and are compared using their [Comparable.compareTo] method.
/// Non-comparable objects (including `null`) will not work as keys
/// in that case.
///
/// To allow calling [operator []], [remove] or [containsKey] with objects
/// that are not supported by the `compare` function, an extra `isValidKey`
/// predicate function can be supplied. This function is tested before
/// using the `compare` function on an argument value that may not be a [K]
/// value. If omitted, the `isValidKey` function defaults to testing if the
/// value is a [K].
class AVLTreeMap<K, V> extends _AVLTree<K, _AVLTreeMapNode<K, V>> with MapMixin<K, V> {
_AVLTreeMapNode<K, V> _root;
Comparator<K> _compare;
_Predicate _validKey;
AVLTreeMap([int Function(K key1, K key2) compare, bool Function(dynamic potentialKey) isValidKey])
: _compare = compare ?? _defaultCompare<K>(),
_validKey = isValidKey ?? ((dynamic v) => v is K);
/// Creates a [AVLTreeMap] that contains all key/value pairs of [other].
///
/// The keys must all be instances of [K] and the values of [V].
/// The [other] map itself can have any type.
factory AVLTreeMap.from(Map<dynamic, dynamic> other,
[int Function(K key1, K key2) compare, bool Function(dynamic potentialKey) isValidKey]) {
if (other is Map<K, V>) {
return AVLTreeMap<K, V>.of(other, compare, isValidKey);
}
AVLTreeMap<K, V> result = AVLTreeMap<K, V>(compare, isValidKey);
other.forEach((dynamic k, dynamic v) {
result[k] = v;
});
return result;
}
/// Creates a [AVLTreeMap] that contains all key/value pairs of [other].
factory AVLTreeMap.of(Map<K, V> other,
[int Function(K key1, K key2) compare, bool Function(dynamic potentialKey) isValidKey]) =>
AVLTreeMap<K, V>(compare, isValidKey)..addAll(other);
/// Creates a [AVLTreeMap] where the keys and values are computed from the
/// [iterable].
///
/// For each element of the [iterable] this constructor computes a key/value
/// pair, by applying [key] and [value] respectively.
///
/// The keys of the key/value pairs do not need to be unique. The last
/// occurrence of a key will simply overwrite any previous value.
///
/// If no functions are specified for [key] and [value] the default is to
/// use the iterable value itself.
factory AVLTreeMap.fromIterable(Iterable iterable,
{K Function(dynamic element) key,
V Function(dynamic element) value,
int Function(K key1, K key2) compare,
bool Function(dynamic potentialKey) isValidKey}) {
AVLTreeMap<K, V> map = AVLTreeMap<K, V>(compare, isValidKey);
_CustomMapBase.fillMapWithMappedIterable(map, iterable, key, value);
return map;
}
/// Creates a [AVLTreeMap] associating the given [keys] to [values].
///
/// This constructor iterates over [keys] and [values] and maps each element
/// of [keys] to the corresponding element of [values].
///
/// If [keys] contains the same object multiple times, the last occurrence
/// overwrites the previous value.
///
/// It is an error if the two [Iterable]s don't have the same length.
factory AVLTreeMap.fromIterables(Iterable<K> keys, Iterable<V> values,
[int Function(K key1, K key2) compare, bool Function(dynamic potentialKey) isValidKey]) {
AVLTreeMap<K, V> map = AVLTreeMap<K, V>(compare, isValidKey);
_CustomMapBase.fillMapWithIterables(map, keys, values);
return map;
}
V operator [](Object key) {
if (!_validKey(key)) return null;
return _search(key)?.value;
}
V remove(Object key) {
if (!_validKey(key)) return null;
return _delete(key)?.value;
}
void operator []=(K key, V value) {
if (key == null) throw ArgumentError(key);
_AVLTreeMapNode<K, V> node = _AVLTreeMapNode<K, V>(key, value);
_insert(node, replaceIfExist: (_, __) => true);
}
V putIfAbsent(K key, V ifAbsent()) {
if (key == null) throw ArgumentError(key);
_AVLTreeMapNode<K, V> node = _AVLTreeMapNode<K, V>(key, null);
bool absent = true;
_insert(
node,
replaceIfExist: (oldValue, newValue) {
///存在对应的key
absent = false;
node.value = oldValue.value;
return false;
},
);
if (absent) {
int modificationCount = _modificationCount;
node.value = ifAbsent();
if (modificationCount != _modificationCount) {
throw ConcurrentModificationError(this);
}
}
return node.value;
}
void addAll(Map<K, V> other) {
other.forEach((K key, V value) {
this[key] = value;
});
}
bool get isEmpty {
return (_root == null);
}
bool get isNotEmpty => !isEmpty;
void forEach(void f(K key, V value)) {
Iterator<_AVLTreeMapNode<K, V>> nodes = _AVLTreeNodeIterator<K, _AVLTreeMapNode<K, V>>(this);
while (nodes.moveNext()) {
_AVLTreeMapNode<K, V> node = nodes.current;
f(node.key, node.value);
}
}
int get length {
return _count;
}
void clear() {
_clear();
}
bool containsKey(Object key) {
return _validKey(key) && _search(key) != null;
}
bool containsValue(Object value) {
int initialModificationCount = _modificationCount;
bool visit(_AVLTreeMapNode<K, V> node) {
while (node != null) {
if (node.value == value) return true;
if (initialModificationCount != _modificationCount) {
throw ConcurrentModificationError(this);
}
if (node.right != null && visit(node.right)) {
return true;
}
node = node.left;
}
return false;
}
return visit(_root);
}
Iterable<K> get keys => _AVLTreeKeyIterable<K, _AVLTreeMapNode<K, V>>(this);
Iterable<V> get values => _AVLTreeValueIterable<K, V>(this);
/// Get the first key in the map. Returns `null` if the map is empty.
K firstKey() {
if (_root == null) return null;
return _first.key;
}
/// Get the last key in the map. Returns `null` if the map is empty.
K lastKey() {
if (_root == null) return null;
return _last.key;
}
/// Get the last key in the map that is strictly smaller than [key]. Returns
/// `null` if no key was not found.
K lastKeyBefore(K key) {
return _lastBefore(key)?.key;
}
/// Get the first key in the map that is strictly larger than [key]. Returns
/// `null` if no key was not found.
K firstKeyAfter(K key) {
return _firstAfter(key)?.key;
}
}
abstract class _AVLTreeIterator<K, Node extends _AVLTreeNode<K, Node>, T> implements Iterator<T> {
final _AVLTree<K, Node> _tree;
/// Worklist of nodes to visit.
///
/// These nodes have been passed over on the way down in a
/// depth-first left-to-right traversal. Visiting each node,
/// and their right subtrees will visit the remainder of
/// the nodes of a full traversal.
///
/// Only valid as long as the original tree isn't reordered.
final List<Node> _workList = [];
/// Original modification counter of [_tree].
///
/// Incremented on [_tree] when a key is added or removed.
/// If it changes, iteration is aborted.
///
/// Not final because some iterators may modify the tree knowingly,
/// and they update the modification count in that case.
int _modificationCount;
/// Current node.
Node _currentNode;
_AVLTreeIterator(_AVLTree<K, Node> tree)
: _tree = tree,
_modificationCount = tree._modificationCount {
_findLeftMostDescendent(tree._root);
}
T get current {
var node = _currentNode;
if (node == null) return null as T;
return _getValue(node);
}
void _findLeftMostDescendent(Node node) {
while (node != null) {
_workList.add(node);
node = node.left;
}
}
bool moveNext() {
if (_modificationCount != _tree._modificationCount) {
throw ConcurrentModificationError(_tree);
}
// Picks the next element in the worklist as current.
// Updates the worklist with the left-most path of the current node's
// right-hand child.
// If the worklist is no longer valid (after a splay), it is rebuild
// from scratch.
if (_workList.isEmpty) {
_currentNode = null;
return false;
}
_currentNode = _workList.removeLast();
_findLeftMostDescendent(_currentNode.right);
return true;
}
T _getValue(Node node);
}
class _AVLTreeKeyIterable<K, Node extends _AVLTreeNode<K, Node>> extends Iterable<K> {
_AVLTree<K, Node> _tree;
_AVLTreeKeyIterable(this._tree);
int get length => _tree._count;
bool get isEmpty => _tree._count == 0;
Iterator<K> get iterator => _AVLTreeKeyIterator<K, Node>(_tree);
Set<K> toSet() {
AVLTreeSet<K> set = AVLTreeSet<K>(_tree._compare, _tree._validKey);
set._count = _tree._count;
set._root = set._copyNode<Node>(_tree._root);
return set;
}
}
class _AVLTreeValueIterable<K, V> extends Iterable<V> {
AVLTreeMap<K, V> _map;
_AVLTreeValueIterable(this._map);
int get length => _map._count;
bool get isEmpty => _map._count == 0;
Iterator<V> get iterator => _AVLTreeValueIterator<K, V>(_map);
}
class _AVLTreeKeyIterator<K, Node extends _AVLTreeNode<K, Node>> extends _AVLTreeIterator<K, Node, K> {
_AVLTreeKeyIterator(_AVLTree<K, Node> map) : super(map);
K _getValue(Node node) => node.key;
}
class _AVLTreeValueIterator<K, V> extends _AVLTreeIterator<K, _AVLTreeMapNode<K, V>, V> {
_AVLTreeValueIterator(AVLTreeMap<K, V> map) : super(map);
V _getValue(_AVLTreeMapNode<K, V> node) => node.value;
}
class _AVLTreeNodeIterator<K, Node extends _AVLTreeNode<K, Node>> extends _AVLTreeIterator<K, Node, Node> {
_AVLTreeNodeIterator(_AVLTree<K, Node> tree) : super(tree);
Node _getValue(Node node) => node;
}
/// A [Set] of objects that can be ordered relative to each other.
///
/// The set is based on a self-balancing binary tree. It allows most operations
/// in amortized logarithmic time.
///
/// Elements of the set are compared using the `compare` function passed in
/// the constructor, both for ordering and for equality.
/// If the set contains only an object `a`, then `set.contains(b)`
/// will return `true` if and only if `compare(a, b) == 0`,
/// and the value of `a == b` is not even checked.
/// If the compare function is omitted, the objects are assumed to be
/// [Comparable], and are compared using their [Comparable.compareTo] method.
/// Non-comparable objects (including `null`) will not work as an element
/// in that case.
class AVLTreeSet<E> extends _AVLTree<E, _AVLTreeSetNode<E>> with IterableMixin<E>, SetMixin<E> {
_AVLTreeSetNode<E> _root;
Comparator<E> _compare;
_Predicate _validKey;
/// Create a new [AVLTreeSet] with the given compare function.
///
/// If the [compare] function is omitted, it defaults to [Comparable.compare],
/// and the elements must be comparable.
///
/// A provided `compare` function may not work on all objects. It may not even
/// work on all `E` instances.
///
/// For operations that add elements to the set, the user is supposed to not
/// pass in objects that doesn't work with the compare function.
///
/// The methods [contains], [remove], [lookup], [removeAll] or [retainAll]
/// are typed to accept any object(s), and the [isValidKey] test can used to
/// filter those objects before handing them to the `compare` function.
///
/// If [isValidKey] is provided, only values satisfying `isValidKey(other)`
/// are compared using the `compare` method in the methods mentioned above.
/// If the `isValidKey` function returns false for an object, it is assumed to
/// not be in the set.
///
/// If omitted, the `isValidKey` function defaults to checking against the
/// type parameter: `other is E`.
AVLTreeSet([int Function(E key1, E key2) compare, bool Function(dynamic potentialKey) isValidKey])
: _compare = compare ?? _defaultCompare<E>(),
_validKey = isValidKey ?? ((dynamic v) => v is E);
/// Creates a [AVLTreeSet] that contains all [elements].
///
/// The set works as if created by `new AVLTreeSet<E>(compare, isValidKey)`.
///
/// All the [elements] should be instances of [E] and valid arguments to
/// [compare].
/// The `elements` iterable itself may have any element type, so this
/// constructor can be used to down-cast a `Set`, for example as:
/// ```dart
/// Set<SuperType> superSet = ...;
/// Set<SubType> subSet =
/// new AVLTreeSet<SubType>.from(superSet.whereType<SubType>());
/// ```
factory AVLTreeSet.from(Iterable elements,
[int Function(E key1, E key2) compare, bool Function(dynamic potentialKey) isValidKey]) {
if (elements is Iterable<E>) {
return AVLTreeSet<E>.of(elements, compare, isValidKey);
}
AVLTreeSet<E> result = AVLTreeSet<E>(compare, isValidKey);
for (var element in elements) {
result.add(element as dynamic);
}
return result;
}
/// Creates a [AVLTreeSet] from [elements].
///
/// The set works as if created by `new AVLTreeSet<E>(compare, isValidKey)`.
///
/// All the [elements] should be valid as arguments to the [compare] function.
factory AVLTreeSet.of(Iterable<E> elements,
[int Function(E key1, E key2) compare, bool Function(dynamic potentialKey) isValidKey]) =>
AVLTreeSet(compare, isValidKey)..addAll(elements);
Set<T> _newSet<T>() => AVLTreeSet<T>((T a, T b) => _compare(a as E, b as E), _validKey);
Set<R> cast<R>() => Set.castFrom<E, R>(this, newSet: _newSet);
// From Iterable.
Iterator<E> get iterator => _AVLTreeKeyIterator<E, _AVLTreeSetNode<E>>(this);
int get length => _count;
bool get isEmpty => _root == null;
bool get isNotEmpty => _root != null;
E get first {
if (_count == 0) throw _IterableElementError.noElement();
return _first.key;
}
E get last {
if (_count == 0) throw _IterableElementError.noElement();
return _last.key;
}
E get single {
if (_count == 0) throw _IterableElementError.noElement();
if (_count > 1) throw _IterableElementError.tooMany();
return _root.key;
}
// From Set.
bool contains(Object element) {
return _validKey(element) && _search(element) != null;
}
bool add(E element) {
_AVLTreeSetNode<E> node = _AVLTreeSetNode<E>(element);
bool b = true;
_insert(node, replaceIfExist: (_, __) {
b = false;
return false;
});
return b;
}
bool remove(Object object) {
if (!_validKey(object)) return false;
return _delete(object) != null;
}
void addAll(Iterable<E> elements) {
for (E element in elements) {
_insert(_AVLTreeSetNode<E>(element));
}
}
void removeAll(Iterable<Object> elements) {
for (Object element in elements) {
if (_validKey(element)) _delete(element as E);
}
}
void retainAll(Iterable<Object> elements) {
// Build a set with the same sense of equality as this set.
AVLTreeSet<E> retainSet = AVLTreeSet<E>(_compare, _validKey);
int modificationCount = _modificationCount;
for (Object object in elements) {
if (modificationCount != _modificationCount) {
// The iterator should not have side effects.
throw ConcurrentModificationError(this);
}
// Equivalent to this.contains(object).
if (_validKey(object) && _search(object) != null) {
retainSet.add(_root.key);
}
}
// Take over the elements from the retained set, if it differs.
if (retainSet._count != _count) {
_root = retainSet._root;
_count = retainSet._count;
_modificationCount++;
}
}
E lookup(Object object) {
if (!_validKey(object)) return null;
return _search(object)?.key;
}
Set<E> intersection(Set<Object> other) {
Set<E> result = AVLTreeSet<E>(_compare, _validKey);
for (E element in this) {
if (other.contains(element)) result.add(element);
}
return result;
}
Set<E> difference(Set<Object> other) {
Set<E> result = AVLTreeSet<E>(_compare, _validKey);
for (E element in this) {
if (!other.contains(element)) result.add(element);
}
return result;
}
Set<E> union(Set<E> other) {
return _clone()..addAll(other);
}
AVLTreeSet<E> _clone() {
var set = AVLTreeSet<E>(_compare, _validKey);
set._count = _count;
set._root = _copyNode<_AVLTreeSetNode<E>>(_root);
return set;
}
// Copies the structure of a AVLTree into a new similar structure.
// Works on _AVLTreeMapNode as well, but only copies the keys,
_AVLTreeSetNode<E> _copyNode<Node extends _AVLTreeNode<E, Node>>(Node node) {
if (node == null) return null;
// Given a source node and a destination node, copy the left
// and right subtrees of the source node into the destination node.
// The left subtree is copied recursively, but the right spine
// of every subtree is copied iteratively.
void copyChildren(Node node, _AVLTreeSetNode<E> dest) {
Node left;
Node right;
do {
left = node.left;
right = node.right;
if (left != null) {
var newLeft = _AVLTreeSetNode<E>(left.key);
dest.left = newLeft;
// Recursively copy the left tree.
copyChildren(left, newLeft);
}
if (right != null) {
var newRight = _AVLTreeSetNode<E>(right.key);
dest.right = newRight;
// Set node and dest to copy the right tree iteratively.
node = right;
dest = newRight;
}
} while (right != null);
}
var result = _AVLTreeSetNode<E>(node.key);
copyChildren(node, result);
return result;
}
void clear() {
_clear();
}
Set<E> toSet() => _clone();
String toString() => IterableBase.iterableToFullString(this, '{', '}');
}
| 29.392544
| 107
| 0.581238
|
da63a47f0342787b0d055e8f15e62b905b6fab90
| 975
|
php
|
PHP
|
application/modules/grafik_responden/models/M_data.php
|
TeamHackathon15sep/hackathon15sep
|
68cf58ce457a1f0582131c1a826cd40979cafc7c
|
[
"MIT"
] | null | null | null |
application/modules/grafik_responden/models/M_data.php
|
TeamHackathon15sep/hackathon15sep
|
68cf58ce457a1f0582131c1a826cd40979cafc7c
|
[
"MIT"
] | null | null | null |
application/modules/grafik_responden/models/M_data.php
|
TeamHackathon15sep/hackathon15sep
|
68cf58ce457a1f0582131c1a826cd40979cafc7c
|
[
"MIT"
] | null | null | null |
<?php if ( ! defined('BASEPATH')) exit('No direct script access allowed');
class M_data extends CI_Model {
function get_data()
{
$query = $this->db->query("SELECT id_survey, COUNT( * ) AS jumlah, DATE_FORMAT(tgl_survey,'%d/%m/%Y') AS tgl_survey FROM survey GROUP BY DAY(tgl_survey) ORDER BY id_survey ASC LIMIT 30"); // YEAR MONTH DAY
return $query->result();
}
function get_data_pertanyaan()
{
/*$query = $this->db->query("SELECT id, year AS TAHUN, COUNT( * ) AS JUMLAH FROM activities GROUP BY TAHUN");*/
$query = $this->db->query("SELECT survey.id_survey, COUNT( survey.id_jwb ) AS counter, survey.id_pertanyaan,
jenis_jawaban.id_jwb, jenis_jawaban.desk_jwb
FROM survey, jenis_jawaban
WHERE survey.id_pertanyaan='1' AND survey.id_jwb=jenis_jawaban.id_jwb
GROUP BY survey.id_jwb");
return $query->result();
//masih statis, hanya 1 pertanyaan
}
}
| 40.625
| 213
| 0.642051
|
4a5f4d7b0c97cfaba7502bd01e75ab34207242db
| 5,068
|
lua
|
Lua
|
docker/minetest/home/mods/advtrains/advtrains_train_subway/init.lua
|
batako/mine_blockly
|
64a2a6114e4281c68caea6b806a18b64f2b5a7d9
|
[
"MIT"
] | 8
|
2018-02-26T12:43:17.000Z
|
2020-06-06T13:47:08.000Z
|
docker/minetest/home/mods/advtrains/advtrains_train_subway/init.lua
|
batako/mine_blockly
|
64a2a6114e4281c68caea6b806a18b64f2b5a7d9
|
[
"MIT"
] | 54
|
2018-03-03T03:19:02.000Z
|
2022-03-30T22:31:52.000Z
|
docker/minetest/home/mods/advtrains/advtrains_train_subway/init.lua
|
batako/mine_blockly
|
64a2a6114e4281c68caea6b806a18b64f2b5a7d9
|
[
"MIT"
] | 2
|
2019-05-30T11:06:57.000Z
|
2021-03-04T14:14:58.000Z
|
local S
if minetest.get_modpath("intllib") then
S = intllib.Getter()
else
S = function(s,a,...)a={a,...}return s:gsub("@(%d+)",function(n)return a[tonumber(n)]end)end
end
advtrains.register_wagon("subway_wagon", {
mesh="advtrains_subway_wagon.b3d",
textures = {"advtrains_subway_wagon.png"},
drives_on={default=true},
max_speed=15,
seats = {
{
name="Driver stand",
attach_offset={x=0, y=10, z=0},
view_offset={x=0, y=0, z=0},
group="dstand",
},
{
name="1",
attach_offset={x=-4, y=8, z=8},
view_offset={x=0, y=0, z=0},
group="pass",
},
{
name="2",
attach_offset={x=4, y=8, z=8},
view_offset={x=0, y=0, z=0},
group="pass",
},
{
name="3",
attach_offset={x=-4, y=8, z=-8},
view_offset={x=0, y=0, z=0},
group="pass",
},
{
name="4",
attach_offset={x=4, y=8, z=-8},
view_offset={x=0, y=0, z=0},
group="pass",
},
},
seat_groups = {
dstand={
name = "Driver Stand",
access_to = {"pass"},
require_doors_open=true,
driving_ctrl_access=true,
},
pass={
name = "Passenger area",
access_to = {"dstand"},
require_doors_open=true,
},
},
assign_to_seat_group = {"pass", "dstand"},
doors={
open={
[-1]={frames={x=0, y=20}, time=1},
[1]={frames={x=40, y=60}, time=1},
sound = "advtrains_subway_dopen",
},
close={
[-1]={frames={x=20, y=40}, time=1},
[1]={frames={x=60, y=80}, time=1},
sound = "advtrains_subway_dclose",
}
},
door_entry={-1, 1},
visual_size = {x=1, y=1},
wagon_span=2,
--collisionbox = {-1.0,-0.5,-1.8, 1.0,2.5,1.8},
collisionbox = {-1.0,-0.5,-1.0, 1.0,2.5,1.0},
is_locomotive=true,
drops={"default:steelblock 4"},
horn_sound = "advtrains_subway_horn",
custom_on_velocity_change = function(self, velocity, old_velocity, dtime)
if not velocity or not old_velocity then return end
if old_velocity == 0 and velocity > 0 then
minetest.sound_play("advtrains_subway_depart", {object = self.object})
end
if velocity < 2 and (old_velocity >= 2 or old_velocity == velocity) and not self.sound_arrive_handle then
self.sound_arrive_handle = minetest.sound_play("advtrains_subway_arrive", {object = self.object})
elseif (velocity > old_velocity) and self.sound_arrive_handle then
minetest.sound_stop(self.sound_arrive_handle)
self.sound_arrive_handle = nil
end
if velocity > 0 and (self.sound_loop_tmr or 0)<=0 then
self.sound_loop_handle = minetest.sound_play({name="advtrains_subway_loop", gain=0.3}, {object = self.object})
self.sound_loop_tmr=3
elseif velocity>0 then
self.sound_loop_tmr = self.sound_loop_tmr - dtime
elseif velocity==0 then
if self.sound_loop_handle then
minetest.sound_stop(self.sound_loop_handle)
self.sound_loop_handle = nil
end
self.sound_loop_tmr=0
end
end,
}, S("Subway Passenger Wagon"), "advtrains_subway_wagon_inv.png")
--wagons
minetest.register_craft({
output = 'advtrains:subway_wagon',
recipe = {
{'default:steelblock', 'default:steelblock', 'default:steelblock'},
{'default:steelblock', 'dye:yellow', 'default:steelblock'},
{'default:steelblock', 'default:steelblock', 'default:steelblock'},
},
})
minetest.register_craftitem(":advtrains:subway_train", {
description = "Subway train, will drive forward when placed",
inventory_image = "advtrains_subway_wagon_inv.png",
wield_image = "advtrains_subway_wagon_inv.png",
on_place = function(itemstack, placer, pointed_thing)
return advtrains.pcall(function()
if not pointed_thing.type == "node" then
return
end
local node=minetest.get_node_or_nil(pointed_thing.under)
if not node then atprint("[advtrains]Ignore at placer position") return itemstack end
local nodename=node.name
if not minetest.check_player_privs(placer, {train_place = true }) and minetest.is_protected(pointed_thing.under, placer:get_player_name()) then
minetest.record_protection_violation(pointed_thing.under, placer:get_player_name())
return
end
local tconns=advtrains.get_track_connections(node.name, node.param2)
local yaw = placer:get_look_horizontal() + (math.pi/2)
local plconnid = advtrains.yawToClosestConn(yaw, tconns)
local prevpos = advtrains.get_adjacent_rail(pointed_thing.under, tconns, plconnid, advtrains.all_tracktypes)
if not prevpos then return end
local id=advtrains.create_new_train_at(pointed_thing.under, prevpos)
for i=1,3 do
local ob=minetest.add_entity(pointed_thing.under, "advtrains:subway_wagon")
if not ob then
atprint("couldn't add_entity, aborting")
end
local le=ob:get_luaentity()
le.owner=placer:get_player_name()
local wagon_uid=le:init_new_instance(id, {})
advtrains.add_wagon_to_train(le, id)
end
minetest.after(1,function()
advtrains.trains[id].tarvelocity=2
advtrains.trains[id].velocity=2
advtrains.trains[id].movedir=1
end)
if not minetest.settings:get_bool("creative_mode") then
itemstack:take_item()
end
return itemstack
end)
end,
})
| 29.811765
| 147
| 0.681334
|
1f08a39acb19398aa2760c6ab940c10b5b5a063d
| 90
|
lua
|
Lua
|
MapLoad.lua
|
JulianoCP/game-dungeonCrawler-lua
|
2cedc6a4f3767790674512b2b9804dd5afdfe241
|
[
"MIT"
] | 1
|
2021-12-15T14:55:19.000Z
|
2021-12-15T14:55:19.000Z
|
MapLoad.lua
|
JulianoCP/game-dungeonCrawler-lua
|
2cedc6a4f3767790674512b2b9804dd5afdfe241
|
[
"MIT"
] | 1
|
2019-12-04T21:46:38.000Z
|
2019-12-04T21:46:38.000Z
|
MapLoad.lua
|
JulianoCP/game-dungeonCrawler-lua
|
2cedc6a4f3767790674512b2b9804dd5afdfe241
|
[
"MIT"
] | 1
|
2019-12-11T21:20:18.000Z
|
2019-12-11T21:20:18.000Z
|
names = {
"Dungeon",
"Fosso",
"Siberia",
"Mix",
"Novo",
}
return names
| 11.25
| 14
| 0.466667
|
9e13a0c0bb3e18b97488238bd230e4ee4427652e
| 11,010
|
cs
|
C#
|
OtherComponents/MDWSvistalayer/MDWS Source/mdo/mdo/src/mdo/dao/oracle/mhv/sm/FolderDao.cs
|
frankfont/RAPTOR_FORK
|
2d9d4a4b11c7d79268495d1ccdf5ae4c7eb45331
|
[
"Apache-2.0"
] | 7
|
2016-02-17T17:54:27.000Z
|
2021-03-18T08:21:32.000Z
|
OtherComponents/MDWSvistalayer/MDWS Source/mdo/mdo/src/mdo/dao/oracle/mhv/sm/FolderDao.cs
|
frankfont/RAPTOR_FORK
|
2d9d4a4b11c7d79268495d1ccdf5ae4c7eb45331
|
[
"Apache-2.0"
] | 6
|
2016-02-03T19:32:29.000Z
|
2017-02-13T19:26:29.000Z
|
OtherComponents/MDWSvistalayer/MDWS Source/mdo/mdo/src/mdo/dao/oracle/mhv/sm/FolderDao.cs
|
frankfont/RAPTOR_FORK
|
2d9d4a4b11c7d79268495d1ccdf5ae4c7eb45331
|
[
"Apache-2.0"
] | 9
|
2015-01-03T16:46:54.000Z
|
2017-07-29T18:22:53.000Z
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Data;
using Oracle.DataAccess.Client;
using Oracle.DataAccess.Types;
using gov.va.medora.mdo.exceptions;
namespace gov.va.medora.mdo.dao.oracle.mhv.sm
{
public class FolderDao
{
MdoOracleConnection _cxn;
delegate OracleDataReader reader();
delegate Int32 nonQuery();
public FolderDao(AbstractConnection cxn)
{
_cxn = (MdoOracleConnection)cxn;
}
#region Folder CRUD
#region Get Folder
public domain.sm.Folder getFolder(Int32 folderId)
{
OracleQuery request = buildGetFolderQuery(folderId);
reader rdr = delegate() { return request.Command.ExecuteReader(); };
OracleDataReader response = (OracleDataReader)_cxn.query(request, rdr);
return toFolder(response);
}
internal OracleQuery buildGetFolderQuery(Int32 folderId)
{
string sql = "SELECT FOLDER_ID, USER_ID, FOLDER_NAME, OPLOCK AS FOLDOPLOCK FROM SMS.FOLDER WHERE FOLDER_ID=:folderId AND ACTIVE=1";
OracleQuery query = new OracleQuery();
query.Command = new OracleCommand(sql);
OracleParameter folderIdParam = new OracleParameter("folderId", OracleDbType.Decimal);
folderIdParam.Value = folderId;
query.Command.Parameters.Add(folderIdParam);
return query;
}
#endregion
#region Delete Folder
public void deleteFolder(Int32 folderId)
{
OracleQuery request = buildDeleteFolderQuery(folderId);
nonQuery qry = delegate() { return request.Command.ExecuteNonQuery(); };
Int32 response = (Int32)_cxn.query(request, qry);
if (response != 1)
{
throw new MdoException("Unable to delete folder");
}
}
internal OracleQuery buildDeleteFolderQuery(Int32 folderId)
{
string sql = "DELETE FROM SMS.FOLDER WHERE FOLDER_ID=:folderId";
OracleQuery query = new OracleQuery();
query.Command = new OracleCommand(sql);
OracleParameter folderIdParam = new OracleParameter("folderId", OracleDbType.Decimal);
folderIdParam.Value = folderId;
query.Command.Parameters.Add(folderIdParam);
return query;
}
#endregion
#region Update Folder
public domain.sm.Folder updateFolder(domain.sm.Folder folder)
{
OracleQuery request = buildUpdateFolderQuery(folder);
nonQuery qry = delegate() { return request.Command.ExecuteNonQuery(); };
Int32 response = (Int32)_cxn.query(request, qry);
if (response != 1)
{
throw new MdoException("Unable to update folder");
}
folder.Oplock++;
return folder;
}
internal OracleQuery buildUpdateFolderQuery(domain.sm.Folder folder)
{
string sql = "UPDATE SMS.FOLDER SET FOLDER_NAME=:folderName, OPLOCK=:oplockPlusOne, MODIFIED_DATE=SYSDATE WHERE FOLDER_ID=:folderId and OPLOCK=:oplock AND ACTIVE=1";
OracleQuery query = new OracleQuery();
query.Command = new OracleCommand(sql);
OracleParameter folderNameParam = new OracleParameter("folderName", OracleDbType.Varchar2, 50);
folderNameParam.Value = folder.Name;
query.Command.Parameters.Add(folderNameParam);
OracleParameter oplockPlusOneParam = new OracleParameter("oplockPlusOne", OracleDbType.Decimal);
oplockPlusOneParam.Value = folder.Oplock + 1;
query.Command.Parameters.Add(oplockPlusOneParam);
//OracleParameter modifiedParam = new OracleParameter("modifiedDate", OracleDbType.Date);
//modifiedParam.Value = new OracleDate(DateTime.Now);
//query.Command.Parameters.Add(modifiedParam);
OracleParameter folderIdParam = new OracleParameter("folderId", OracleDbType.Decimal);
folderIdParam.Value = folder.Id;
query.Command.Parameters.Add(folderIdParam);
OracleParameter oplockParam = new OracleParameter("oplock", OracleDbType.Decimal);
oplockParam.Value = folder.Oplock;
query.Command.Parameters.Add(oplockParam);
return query;
}
#endregion
#region Create Folder
public domain.sm.Folder createFolder(domain.sm.Folder folder)
{
OracleQuery request = buildCreateFolderQuery(folder);
nonQuery qry = delegate() { return request.Command.ExecuteNonQuery(); };
Int32 response = (Int32)_cxn.query(request, qry);
if (response != 1)
{
throw new MdoException("Unable to create folder");
}
folder.Id = ((Oracle.DataAccess.Types.OracleDecimal)request.Command.Parameters["outId"].Value).ToInt32();
return folder;
}
internal OracleQuery buildCreateFolderQuery(domain.sm.Folder folder)
{
string sql = "INSERT INTO SMS.FOLDER (USER_ID, FOLDER_NAME) VALUES (:userId, :folderName) RETURNING FOLDER_ID INTO :outId";
OracleQuery query = new OracleQuery();
query.Command = new OracleCommand(sql);
OracleParameter userIdParam = new OracleParameter("userId", OracleDbType.Decimal);
userIdParam.Value = folder.Owner.Id;
query.Command.Parameters.Add(userIdParam);
OracleParameter folderNameParam = new OracleParameter("folderName", OracleDbType.Varchar2, 50);
folderNameParam.Value = folder.Name;
query.Command.Parameters.Add(folderNameParam);
OracleParameter outIdParam = new OracleParameter("outId", OracleDbType.Decimal);
outIdParam.Direction = ParameterDirection.Output;
query.Command.Parameters.Add(outIdParam);
return query;
}
#endregion
internal domain.sm.Folder toFolder(IDataReader rdr)
{
domain.sm.Folder folder = new domain.sm.Folder();
if (rdr.Read())
{
folder = domain.sm.Folder.getFolderFromReader(rdr);
}
return folder;
}
#endregion
public domain.sm.Addressee moveMessageToFolder(domain.sm.Addressee addressee, domain.sm.Folder newFolder)
{
domain.sm.Addressee dbAddressee = new AddresseeDao(_cxn).getAddressee(addressee.Id);
if (dbAddressee == null || dbAddressee.Id <= 0)
{
throw new MdoException("Couldn't find that addressee record");
}
checkValidMove(Convert.ToInt32(dbAddressee.FolderId), Convert.ToInt32(newFolder.Id), addressee.Owner.Id);
dbAddressee.FolderId = newFolder.Id;
dbAddressee.Oplock = addressee.Oplock;
return new AddresseeDao(_cxn).updateAddressee(dbAddressee);
}
private void checkValidMove(Int32 currentFolder, Int32 newFolder, Int32 userId)
{
if (currentFolder == newFolder)
{
throw new MdoException("Message is already located in that folder");
}
if (currentFolder == (Int32)domain.sm.enums.SystemFolderEnum.Drafts || newFolder == (Int32)domain.sm.enums.SystemFolderEnum.Drafts)
{
throw new MdoException("Can't move message out of or in to drafts folder");
}
if (currentFolder == (Int32)domain.sm.enums.SystemFolderEnum.Sent || newFolder == (Int32)domain.sm.enums.SystemFolderEnum.Sent)
{
throw new MdoException("Can't move message out of or in to sent folder");
}
if (newFolder > 0)
{
domain.sm.Folder folder = getUserFolder(userId, newFolder); // make sure user owns this folder
if (folder != null && folder.Id > 0)
{
// ok
return;
}
}
}
internal domain.sm.Folder getUserFolder(Int32 userId, Int32 folderId)
{
OracleQuery request = buildGetUserFoldersQuery(userId);
reader rdr = delegate() { return request.Command.ExecuteReader(); };
OracleDataReader response = (OracleDataReader)_cxn.query(request, rdr);
if (response.Read())
{
return domain.sm.Folder.getFolderFromReader(response);
}
else
{
throw new MdoException("User does not own that folder");
}
}
internal OracleQuery buildGetUserFolderQuery(Int32 userId, Int32 folderId)
{
string sql = "SELECT FOLDER_ID, FOLDER_NAME, OPLOCK AS FOLDOPLOCK FROM SMS.FOLDER WHERE USER_ID=:userId AND FOLDER_ID=:folderId AND ACTIVE=1";
OracleQuery query = new OracleQuery();
query.Command = new OracleCommand(sql);
OracleParameter userIdParam = new OracleParameter("userId", OracleDbType.Decimal);
userIdParam.Value = Convert.ToDecimal(userId);
query.Command.Parameters.Add(userIdParam);
OracleParameter folderIdParam = new OracleParameter("folderId", OracleDbType.Decimal);
folderIdParam.Value = Convert.ToDecimal(folderId);
query.Command.Parameters.Add(folderIdParam);
return query;
}
internal IList<domain.sm.Folder> getUserFolders(Int32 userId)
{
OracleQuery request = buildGetUserFoldersQuery(userId);
reader rdr = delegate() { return request.Command.ExecuteReader(); };
OracleDataReader response = (OracleDataReader)_cxn.query(request, rdr);
return toFolders(response);
}
internal OracleQuery buildGetUserFoldersQuery(Int32 userId)
{
string sql = "SELECT FOLDER_ID, FOLDER_NAME, OPLOCK AS FOLDOPLOCK FROM SMS.FOLDER WHERE USER_ID=:userId AND ACTIVE=1";
OracleQuery query = new OracleQuery();
query.Command = new OracleCommand(sql);
OracleParameter userIdParam = new OracleParameter("userId", OracleDbType.Decimal);
userIdParam.Value = Convert.ToDecimal(userId);
query.Command.Parameters.Add(userIdParam);
return query;
}
internal IList<domain.sm.Folder> toFolders(IDataReader rdr)
{
IList<domain.sm.Folder> folders = new List<domain.sm.Folder>();
Dictionary<string, bool> folderColumns = QueryUtils.getColumnExistsTable(TableSchemas.FOLDER_COLUMNS, rdr);
while (rdr.Read())
{
folders.Add(domain.sm.Folder.getFolderFromReader(rdr, folderColumns));
}
return folders;
}
}
}
| 38.767606
| 177
| 0.615622
|
05be8fa3598cb3c899491652530770a0eca158b9
| 12,599
|
swift
|
Swift
|
sofastcar/sofastcar/View/Login/CheckDriverLicenseView.swift
|
SoFastCar/SoFastCar-IOS
|
bafe1e9bd1f0f077d672e433f017b1af51f48999
|
[
"MIT"
] | 1
|
2021-04-03T05:35:13.000Z
|
2021-04-03T05:35:13.000Z
|
sofastcar/sofastcar/View/Login/CheckDriverLicenseView.swift
|
SoFastCar/SoFastCar-IOS
|
bafe1e9bd1f0f077d672e433f017b1af51f48999
|
[
"MIT"
] | 102
|
2020-09-01T05:50:09.000Z
|
2020-10-08T01:40:37.000Z
|
sofastcar/sofastcar/View/Login/CheckDriverLicenseView.swift
|
SoFastCar/SoFastCar-IOS
|
bafe1e9bd1f0f077d672e433f017b1af51f48999
|
[
"MIT"
] | 11
|
2020-08-13T12:52:18.000Z
|
2021-08-17T00:01:23.000Z
|
//
// CheckDriverLicenseView.swift
// sofastcar
//
// Created by 김광수 on 2020/09/01.
// Copyright © 2020 김광수. All rights reserved.
//
import UIKit
class CheckDriverLicenseView: UIScrollView {
// MARK: - Properties
let smallPadding: CGFloat = 10
let padding: CGFloat = 15
let contentView: UIView = {
let view = UIView()
view.backgroundColor = .white
view.layoutMargins = UIEdgeInsets(top: 30, left: 15, bottom: 0, right: 15)
return view
}()
let driverLicenseGradeLabel: UILabel = {
let label = UILabel()
label.text = "면허종류"
label.font = .systemFont(ofSize: 16)
label.textColor = .black
return label
}()
let selectDriverLicenseGradeButton: TouButton = {
let button = TouButton(title: "1종 보통", imageName: "chevron.down", textColor: .black, fontSize: 16, style: .authStyle)
if let stringWidth = button.currentAttributedTitle?.size().width {
let leftInset = (button.frame.width - stringWidth)/3*2
button.titleEdgeInsets = .init(top: 0, left: leftInset-20, bottom: 0, right: 0)
}
button.isSelected = true
button.titleLabel?.textAlignment = .left
button.layer.borderColor = UIColor.systemGray4.cgColor
button.layer.borderWidth = 1
return button
}()
// ====== 면허 정보 =======
let driverLicenseNumberLabel: UILabel = {
let label = UILabel()
label.text = "면허 번호"
label.font = .systemFont(ofSize: 16)
label.textColor = .black
return label
}()
let selectDriverLicenseNumber: TouButton = {
let button = TouButton(title: "11", imageName: "chevron.down", textColor: .black, fontSize: 16, style: .authStyle)
button.isSelected = true
button.layer.borderColor = UIColor.systemGray4.cgColor
button.layer.borderWidth = 1
return button
}()
let driverLicenseNumberTextField: LoginUserInputTextField = {
let textField = LoginUserInputTextField()
textField.text = " 12 - 636572 - 56"
textField.keyboardType = .numberPad
return textField
}()
// ====== 주민등록정보 정보 =======
let userBirthLabel: UILabel = {
let label = UILabel()
label.text = "주민등록번호 앞 7자리"
label.font = .systemFont(ofSize: 16)
label.textColor = .black
return label
}()
let userBirthTextField: LoginUserInputTextField = {
let textField = LoginUserInputTextField()
// textField.attributedPlaceholder = NSAttributedString(
// string: "• • • • • •",
// attributes: [NSAttributedString.Key.font: UIFont.systemFont(ofSize: 30)]
// )
textField.text = "921030"
textField.textColor = .black
textField.keyboardType = .numberPad
return textField
}()
let hiphenLabel: UILabel = {
let label = UILabel()
label.attributedText = NSAttributedString(
string: " - ",
attributes: [NSAttributedString.Key.font: UIFont.systemFont(ofSize: 30)]
)
label.textColor = .systemGray4
return label
}()
let userSexTextField: LoginUserInputTextField = {
let textField = LoginUserInputTextField()
// textField.attributedPlaceholder = NSAttributedString(
// string: "1",
// attributes: [NSAttributedString.Key.font: UIFont.systemFont(ofSize: 30)]
// )
textField.text = "1"
textField.textColor = .black
textField.layer.borderWidth = 0
textField.keyboardType = .numberPad
return textField
}()
let userBithPrefixLabel: UILabel = {
let label = UILabel()
label.text = "* * * * * *"
label.font = .systemFont(ofSize: 20)
label.textColor = .systemGray4
return label
}()
// == 적성 검사 만료인
let cardExpDateLabel: UILabel = {
let label = UILabel()
label.text = "적성검사 만료일"
return label
}()
let cardExpDateMonthTextField: LoginUserInputTextField = {
let textfield = LoginUserInputTextField()
textfield.text = "2029 / 10 / 7"
textfield.keyboardType = .numberPad
return textfield
}()
// password
let cardPasswordLable: UILabel = {
let label = UILabel()
label.text = "발급일"
return label
}()
let cardPasswordTextField: LoginUserInputTextField = {
let textfield = LoginUserInputTextField()
textfield.text = "2019 / 10 / 7"
textfield.keyboardType = .numberPad
return textfield
}()
let customAuthAllAgreeButton: TouButton = {
let button = TouButton(title: " 본인 확인 서비스 이용약관 전체 동의",
imageName: "checkmark.circle.fill", textColor: .black, fontSize: 15, style: .touStyle)
button.addImportantMark()
if let stringWidth = button.currentAttributedTitle?.size().width {
let leftInset = (button.frame.width - stringWidth)/2
button.titleEdgeInsets = .init(top: 0, left: leftInset+40, bottom: 0, right: 0)
}
button.layer.borderColor = UIColor.systemGray4.cgColor
button.layer.borderWidth = 1
button.isSelected = false
return button
}()
let bottomInfoLabel: UITextView = {
let textView = UITextView()
textView.text = """
⋅ 운전면허 취득 후 만 1년 이상 경과하여야 승인이 가능합니다.
⋅ 입력된 운전면허증 정보 외 주소 부분을 별도 수집합니다.
"""
textView.font = .systemFont(ofSize: 14)
textView.textColor = .darkGray
textView.backgroundColor = .none
textView.isUserInteractionEnabled = false
return textView
}()
let driverAuthCompleteButton: CompleteButton = {
let button = CompleteButton(frame: .zero, title: "운전면허 등록 완료")
button.isEnabled = true
return button
}()
lazy var guide = contentView.layoutMarginsGuide
// MARK: - Life Cycle
override init(frame: CGRect) {
super.init(frame: frame)
configureDefaultUISetting()
configureDriverGradSelectUI()
confgireDriverLicenseNumberUI()
configureUserBirthInputUI()
configureCardLabelUI()
configurePersonalImfomationAgreeUI()
configureBottomInfoUI()
settingAuthCompleteButtonUI()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
self.endEditing(true)
}
private func configureDefaultUISetting() {
showsVerticalScrollIndicator = false
isScrollEnabled = true
self.frame = CGRect(x: 0, y: 0,
width: UIScreen.main.bounds.width,
height: UIScreen.main.bounds.height)
self.backgroundColor = .systemGray6
self.addSubview(contentView)
// 기기별 스크롤뷰 조절
var heightPadding: CGFloat = 0
if UIScreen.main.bounds.height < 670 { // se, Se2...
heightPadding = bottomInfoLabel.frame.size.height
} else {
// 큰기기의 경우 화면전체가 보임으로 스크롤될 필요 없음
self.isScrollEnabled = false
}
self.contentSize = .init(width: UIScreen.main.bounds.width,
height: UIScreen.main.bounds.height+heightPadding+44)
contentView.frame = CGRect(x: 0, y: 0,
width: UIScreen.main.bounds.width,
height: UIScreen.main.bounds.height+heightPadding+44)
contentView.layoutMargins = UIEdgeInsets(top: 0, left: 20, bottom: 30, right: 20)
}
private func configureDriverGradSelectUI() {
[driverLicenseGradeLabel, selectDriverLicenseGradeButton].forEach {
contentView.addSubview($0)
}
driverLicenseGradeLabel.snp.makeConstraints {
$0.top.equalTo(guide).offset(CommonUI.sectionLabelPadding)
$0.leading.equalTo(guide).offset(10)
$0.height.equalTo(CommonUI.sectionLabelHeight)
}
selectDriverLicenseGradeButton.snp.makeConstraints {
$0.top.equalTo(driverLicenseGradeLabel.snp.bottom).offset(CommonUI.sectionMiddlePadding)
$0.leading.equalTo(guide)
$0.trailing.equalTo(contentView.snp.centerX)
$0.height.equalTo(CommonUI.userInputMenusHeight)
}
}
private func confgireDriverLicenseNumberUI() {
[driverLicenseNumberLabel, selectDriverLicenseNumber, driverLicenseNumberTextField].forEach {
contentView.addSubview($0)
}
driverLicenseNumberLabel.snp.makeConstraints {
$0.top.equalTo(selectDriverLicenseGradeButton.snp.bottom).offset(CommonUI.sectionLabelPadding)
$0.leading.trailing.equalTo(guide).offset(10)
$0.height.equalTo(CommonUI.sectionLabelHeight)
}
selectDriverLicenseNumber.snp.makeConstraints {
$0.top.equalTo(driverLicenseNumberLabel.snp.bottom).offset(CommonUI.sectionMiddlePadding)
$0.leading.equalTo(guide)
$0.height.equalTo(CommonUI.userInputMenusHeight)
}
driverLicenseNumberTextField.snp.makeConstraints {
$0.top.equalTo(driverLicenseNumberLabel.snp.bottom).offset(CommonUI.sectionMiddlePadding)
$0.leading.equalTo(selectDriverLicenseNumber.snp.trailing).offset(-1)
$0.trailing.equalTo(guide)
$0.width.equalTo(selectDriverLicenseNumber.snp.width).multipliedBy(2)
$0.height.equalTo(CommonUI.userInputMenusHeight)
}
}
private func configureUserBirthInputUI() {
[userBirthLabel, userBirthTextField, userSexTextField].forEach {
contentView.addSubview($0)
}
userBirthLabel.snp.makeConstraints {
$0.top.equalTo(driverLicenseNumberTextField.snp.bottom).offset(CommonUI.sectionLabelPadding)
$0.leading.equalTo(guide).offset(10)
$0.height.equalTo(CommonUI.sectionLabelHeight)
}
userBirthTextField.snp.makeConstraints {
$0.top.equalTo(userBirthLabel.snp.bottom).offset(5)
$0.leading.trailing.equalTo(guide)
$0.height.equalTo(CommonUI.userInputMenusHeight)
}
// user Birth TextField
[hiphenLabel, userSexTextField, userBithPrefixLabel].forEach {
userBirthTextField.addSubview($0)
}
userBithPrefixLabel.snp.makeConstraints {
$0.centerY.equalTo(userBirthTextField.snp.centerY).offset(5)
$0.trailing.equalTo(userBirthTextField.snp.trailing).offset(-padding)
$0.height.equalTo(CommonUI.sectionLabelHeight)
}
userSexTextField.snp.makeConstraints {
$0.centerY.equalTo(userBirthTextField.snp.centerY)
$0.trailing.equalTo(userBithPrefixLabel.snp.leading).offset(-padding)
}
hiphenLabel.snp.makeConstraints {
$0.centerY.equalTo(userBirthTextField.snp.centerY)
$0.trailing.equalTo(userSexTextField.snp.leading).offset(-padding/2)
}
}
private func configureCardLabelUI() {
[cardExpDateLabel, cardPasswordLable, cardExpDateLabel, cardPasswordTextField,
cardExpDateMonthTextField].forEach {
contentView.addSubview($0)
}
cardExpDateLabel.snp.makeConstraints {
$0.top.equalTo(userBirthTextField.snp.bottom).offset(CommonUI.sectionLabelPadding)
$0.leading.equalTo(guide).offset(5)
$0.height.equalTo(CommonUI.sectionLabelHeight)
}
cardPasswordLable.snp.makeConstraints {
$0.top.equalTo(userBirthTextField.snp.bottom).offset(CommonUI.sectionLabelPadding)
$0.leading.equalTo(contentView.snp.centerX).offset(5)
$0.centerY.equalTo(cardExpDateLabel.snp.centerY)
$0.height.equalTo(CommonUI.sectionLabelHeight)
}
cardExpDateMonthTextField.snp.makeConstraints {
$0.top.equalTo(cardPasswordLable.snp.bottom).offset(5)
$0.leading.equalTo(guide)
$0.trailing.equalTo(contentView.snp.centerX).offset(-5) // 가운데 -5
$0.height.equalTo(CommonUI.userInputMenusHeight)
}
cardPasswordTextField.snp.makeConstraints {
$0.centerY.equalTo(cardExpDateMonthTextField.snp.centerY)
$0.leading.equalTo(contentView.snp.centerX).offset(5)
$0.trailing.equalTo(guide)
$0.height.equalTo(CommonUI.userInputMenusHeight)
}
}
private func configurePersonalImfomationAgreeUI() {
contentView.addSubview(customAuthAllAgreeButton)
customAuthAllAgreeButton.snp.makeConstraints {
$0.top.equalTo(cardPasswordTextField.snp.bottom).offset(CommonUI.sectionLabelPadding)
$0.leading.trailing.equalTo(guide)
$0.height.equalTo(CommonUI.userInputMenusHeight)
}
}
private func configureBottomInfoUI() {
contentView.addSubview(bottomInfoLabel)
bottomInfoLabel.snp.makeConstraints {
$0.top.equalTo(customAuthAllAgreeButton.snp.bottom).offset(CommonUI.sectionLabelPadding)
$0.leading.trailing.equalTo(guide)
$0.bottom.equalTo(guide)
}
}
private func settingAuthCompleteButtonUI() {
addSubview(driverAuthCompleteButton)
driverAuthCompleteButton.snp.makeConstraints {
$0.leading.trailing.equalTo(self.safeAreaLayoutGuide)
$0.bottom.equalTo(self.safeAreaLayoutGuide).offset(40)
$0.height.equalTo(100)
}
}
}
| 32.809896
| 121
| 0.685451
|
12bbf87ed011ac04e613a11e83d6a41b0acdf092
| 1,218
|
cs
|
C#
|
Gladiator.Application/Gladiator/QueryHandlers/GetAllGladiatorsHandler.cs
|
LexiconGr7-1/gladiator-management-simulator
|
eeb48e8b94ac1fcf6f537278c07ab9fe03395a2d
|
[
"Apache-2.0"
] | null | null | null |
Gladiator.Application/Gladiator/QueryHandlers/GetAllGladiatorsHandler.cs
|
LexiconGr7-1/gladiator-management-simulator
|
eeb48e8b94ac1fcf6f537278c07ab9fe03395a2d
|
[
"Apache-2.0"
] | 25
|
2022-03-24T13:00:58.000Z
|
2022-03-31T12:34:45.000Z
|
Gladiator.Application/Gladiator/QueryHandlers/GetAllGladiatorsHandler.cs
|
LexiconGr7-1/gladiator-management-simulator
|
eeb48e8b94ac1fcf6f537278c07ab9fe03395a2d
|
[
"Apache-2.0"
] | null | null | null |
using Gladiator.Application.Gladiator.Mappers;
using Gladiator.Application.Gladiator.Queries;
using Gladiator.Application.Gladiator.Responses;
using Gladiator.Core.Repositories;
using MediatR;
namespace Gladiator.Application.Gladiator.QueryHandlers
{
public class GetAllGladiatorsHandler : IRequestHandler<
GetAllGladiatorsQuery,
IList<GladiatorFullResponse>>
{
private readonly IGladiatorRepository _gladiatorRepository;
public GetAllGladiatorsHandler(IGladiatorRepository gladiatorRepository)
{
_gladiatorRepository = gladiatorRepository;
}
public async Task<IList<GladiatorFullResponse>> Handle(
GetAllGladiatorsQuery request,
CancellationToken cancellationToken)
{
var gladiators = await _gladiatorRepository.GetAllAsync();
if (gladiators == null)
throw new ApplicationException("Could not get data");
var response = GladiatorMapper.Mapper.Map<IList<GladiatorFullResponse>>(gladiators);
if (response == null)
throw new ApplicationException("Issue with mapper");
return response;
}
}
}
| 31.230769
| 96
| 0.68555
|
39f5957c2adc8b2c15cba3789d951f28be4aff5b
| 336
|
dart
|
Dart
|
lib/src/blocs/lesson_filter/lesson_filter_event.dart
|
striveapp/checkin
|
5f762738c50bd8c2d915bf212c295818eb8886a3
|
[
"Apache-2.0"
] | 5
|
2020-05-10T10:38:39.000Z
|
2022-01-17T14:29:12.000Z
|
lib/src/blocs/lesson_filter/lesson_filter_event.dart
|
striveapp/checkin
|
5f762738c50bd8c2d915bf212c295818eb8886a3
|
[
"Apache-2.0"
] | 2
|
2020-11-23T23:41:04.000Z
|
2020-12-07T18:09:07.000Z
|
lib/src/blocs/lesson_filter/lesson_filter_event.dart
|
striveapp/checkin
|
5f762738c50bd8c2d915bf212c295818eb8886a3
|
[
"Apache-2.0"
] | 1
|
2022-01-17T14:29:17.000Z
|
2022-01-17T14:29:17.000Z
|
import 'package:flutter/foundation.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
part 'lesson_filter_event.freezed.dart';
@freezed
class LessonFilterEvent with _$LessonFilterEvent {
const factory LessonFilterEvent.lessonFilterUpdated({required Set<String> availableLessonTypes}) =
LessonFilterUpdated;
}
| 30.545455
| 100
| 0.821429
|
0d9a446d7d7bd06f8830368bea66ec845c9a5643
| 49,169
|
rb
|
Ruby
|
lib/rubocop/sketchup/features.rb
|
Eneroth3/rubocop-sketchup
|
d0630cfbc2991aa3dc0811786601c81ab4ae47c9
|
[
"MIT"
] | null | null | null |
lib/rubocop/sketchup/features.rb
|
Eneroth3/rubocop-sketchup
|
d0630cfbc2991aa3dc0811786601c81ab4ae47c9
|
[
"MIT"
] | null | null | null |
lib/rubocop/sketchup/features.rb
|
Eneroth3/rubocop-sketchup
|
d0630cfbc2991aa3dc0811786601c81ab4ae47c9
|
[
"MIT"
] | null | null | null |
# frozen_string_literal: true
# How to update this file:
#
# FEATURES constant:
#
# 1. Run the `rubocop-changelog` YARD template on the API stubs repository:
# (https://github.com/SketchUp/rubocop-sketchup/issues/4#issuecomment-370753043)
#
# yardoc -c -t rubocop-changelog -f text > rubocop-changelog.txt
#
# 2. Prune out any unreleased versions.
#
#
# INSTANCE_METHODS constant:
#
# Manually curated list of method names which are believed to yield few false
# positives. The method names should be names that are not commonly used in
# general context.
#
# Since it's difficult to know what a variable in Ruby code represent by static
# analysis, these methods are assumed to belong to the FEATURES list.
#
# When a new version is released and FEATURES is updated the new methods from
# the new version should be considered for this list.
#
# TODO(thomthom): Investigate if Solargraph's static analysis can be used to
# provide a more accurate analysis. Not sure how well it works on code bases
# that isn't well documented with YARD type tags.
#
#
# OBSERVER_METHODS constant:
#
# Currently manually curated.
#
# TODO(thomthom): Tag observer methods in YARD for automatic extraction.
module RuboCop
module SketchUp
module Features
FEATURES = [
{
version: 'LayOut 2020.1',
types: {
method: [
'Layout::Document#export',
'Layout::Grid#clip_to_margins=',
'Layout::Grid#clip_to_margins?',
'Layout::Grid#in_front=',
'Layout::Grid#in_front?',
'Layout::Grid#major_color=',
'Layout::Grid#major_spacing=',
'Layout::Grid#minor_color=',
'Layout::Grid#minor_divisions=',
'Layout::Grid#print=',
'Layout::Grid#show=',
'Layout::Grid#show_major=',
'Layout::Grid#show_minor=',
'Layout::SketchUpModel#camera_modified?',
'Layout::SketchUpModel#effects_modified?',
'Layout::SketchUpModel#layers_modified?',
'Layout::SketchUpModel#reset_camera',
'Layout::SketchUpModel#reset_effects',
'Layout::SketchUpModel#reset_layers',
'Layout::SketchUpModel#reset_style',
'Layout::SketchUpModel#style_modified?',
],
},
},
{
version: 'SketchUp 2020.1',
types: {
method: [
'Sketchup::Entities#weld',
'Sketchup::Page#use_hidden_geometry=',
'Sketchup::Page#use_hidden_geometry?',
'Sketchup::Page#use_hidden_objects=',
'Sketchup::Page#use_hidden_objects?',
],
},
},
{
version: 'SketchUp 2020.0',
types: {
method: [
'Geom.tesselate',
'Sketchup::Layer#display_name',
'Sketchup::Model#active_path=',
'Sketchup::Model#drawing_element_visible?',
'Sketchup::Page#get_drawingelement_visibility',
'Sketchup::Page#set_drawingelement_visibility',
'Sketchup::View#text_bounds',
],
},
},
{
version: 'SketchUp 2019.2',
types: {
method: [
'Sketchup.format_volume',
'Sketchup::Material#owner_type',
'Sketchup::Selection#invert',
'Sketchup::Tool#onMouseWheel',
],
},
},
{
version: 'LayOut 2019',
types: {
method: [
'Geom::Point2d#transform',
'Geom::Point2d#transform!',
'Geom::Transformation2d#*',
'Geom::Transformation2d#inverse',
'Geom::Transformation2d#invert!',
'Geom::Transformation2d.rotation',
'Geom::Transformation2d.scaling',
'Geom::Transformation2d.translation',
'Geom::Vector2d#transform',
'Geom::Vector2d#transform!',
'Layout::Path#winding',
'Layout::SketchUpModel#dash_scale',
],
},
},
{
version: 'SketchUp 2019',
types: {
class: [
'Sketchup::LineStyle',
'Sketchup::LineStyles',
],
method: [
'Sketchup::DimensionLinear#end_attached_to',
'Sketchup::DimensionLinear#end_attached_to=',
'Sketchup::DimensionLinear#start_attached_to',
'Sketchup::DimensionLinear#start_attached_to=',
'Sketchup::Layer#line_style',
'Sketchup::Layer#line_style=',
'Sketchup::LineStyle#name',
'Sketchup::LineStyles#[]',
'Sketchup::LineStyles#at',
'Sketchup::LineStyles#each',
'Sketchup::LineStyles#length',
'Sketchup::LineStyles#names',
'Sketchup::LineStyles#size',
'Sketchup::LineStyles#to_a',
'Sketchup::Model#line_styles',
'Sketchup::Text#attached_to',
'Sketchup::Text#attached_to=',
'Sketchup::Tools#active_tool',
],
},
},
{
version: 'LayOut 2018',
types: {
class: [
'Geom::Bounds2d',
'Geom::OrientedBounds2d',
'Geom::Point2d',
'Geom::Transformation2d',
'Geom::Vector2d',
'Layout::AngularDimension',
'Layout::AutoTextDefinition',
'Layout::AutoTextDefinitions',
'Layout::ConnectionPoint',
'Layout::Document',
'Layout::Ellipse',
'Layout::Entities',
'Layout::Entity',
'Layout::FormattedText',
'Layout::Grid',
'Layout::Group',
'Layout::Image',
'Layout::Label',
'Layout::Layer',
'Layout::LayerInstance',
'Layout::Layers',
'Layout::LinearDimension',
'Layout::LockedEntityError',
'Layout::LockedLayerError',
'Layout::Page',
'Layout::PageInfo',
'Layout::Pages',
'Layout::Path',
'Layout::Rectangle',
'Layout::SketchUpModel',
'Layout::Style',
'Layout::Table',
'Layout::TableCell',
'Layout::TableColumn',
'Layout::TableRow',
],
method: [
'Geom::Bounds2d#==',
'Geom::Bounds2d#height',
'Geom::Bounds2d#initialize',
'Geom::Bounds2d#lower_right',
'Geom::Bounds2d#set!',
'Geom::Bounds2d#to_a',
'Geom::Bounds2d#upper_left',
'Geom::Bounds2d#width',
'Geom::OrientedBounds2d#==',
'Geom::OrientedBounds2d#lower_left',
'Geom::OrientedBounds2d#lower_right',
'Geom::OrientedBounds2d#to_a',
'Geom::OrientedBounds2d#upper_left',
'Geom::OrientedBounds2d#upper_right',
'Geom::Point2d#+',
'Geom::Point2d#-',
'Geom::Point2d#==',
'Geom::Point2d#[]',
'Geom::Point2d#[]=',
'Geom::Point2d#clone',
'Geom::Point2d#distance',
'Geom::Point2d#initialize',
'Geom::Point2d#inspect',
'Geom::Point2d#offset',
'Geom::Point2d#offset!',
'Geom::Point2d#set!',
'Geom::Point2d#to_a',
'Geom::Point2d#to_s',
'Geom::Point2d#vector_to',
'Geom::Point2d#x',
'Geom::Point2d#x=',
'Geom::Point2d#y',
'Geom::Point2d#y=',
'Geom::Transformation2d#==',
'Geom::Transformation2d#clone',
'Geom::Transformation2d#identity?',
'Geom::Transformation2d#initialize',
'Geom::Transformation2d#set!',
'Geom::Transformation2d#to_a',
'Geom::Vector2d#%',
'Geom::Vector2d#*',
'Geom::Vector2d#+',
'Geom::Vector2d#-',
'Geom::Vector2d#==',
'Geom::Vector2d#[]',
'Geom::Vector2d#[]=',
'Geom::Vector2d#angle_between',
'Geom::Vector2d#clone',
'Geom::Vector2d#cross',
'Geom::Vector2d#dot',
'Geom::Vector2d#initialize',
'Geom::Vector2d#inspect',
'Geom::Vector2d#length',
'Geom::Vector2d#length=',
'Geom::Vector2d#normalize',
'Geom::Vector2d#normalize!',
'Geom::Vector2d#parallel?',
'Geom::Vector2d#perpendicular?',
'Geom::Vector2d#reverse',
'Geom::Vector2d#reverse!',
'Geom::Vector2d#same_direction?',
'Geom::Vector2d#set!',
'Geom::Vector2d#to_a',
'Geom::Vector2d#to_s',
'Geom::Vector2d#unit_vector?',
'Geom::Vector2d#valid?',
'Geom::Vector2d#x',
'Geom::Vector2d#x=',
'Geom::Vector2d#y',
'Geom::Vector2d#y=',
'Layout::AngularDimension#angle',
'Layout::AngularDimension#arc_center_point',
'Layout::AngularDimension#custom_text=',
'Layout::AngularDimension#custom_text?',
'Layout::AngularDimension#end_connection_point',
'Layout::AngularDimension#end_connection_point=',
'Layout::AngularDimension#end_extent_point',
'Layout::AngularDimension#end_extent_point=',
'Layout::AngularDimension#end_offset_length=',
'Layout::AngularDimension#end_offset_point',
'Layout::AngularDimension#entities',
'Layout::AngularDimension#initialize',
'Layout::AngularDimension#leader_line_type',
'Layout::AngularDimension#leader_line_type=',
'Layout::AngularDimension#radius',
'Layout::AngularDimension#radius=',
'Layout::AngularDimension#start_connection_point',
'Layout::AngularDimension#start_connection_point=',
'Layout::AngularDimension#start_extent_point',
'Layout::AngularDimension#start_extent_point=',
'Layout::AngularDimension#start_offset_length=',
'Layout::AngularDimension#start_offset_point',
'Layout::AngularDimension#text',
'Layout::AngularDimension#text=',
'Layout::AutoTextDefinition#==',
'Layout::AutoTextDefinition#custom_text',
'Layout::AutoTextDefinition#custom_text=',
'Layout::AutoTextDefinition#date_format',
'Layout::AutoTextDefinition#date_format=',
'Layout::AutoTextDefinition#display_file_extension=',
'Layout::AutoTextDefinition#display_file_extension?',
'Layout::AutoTextDefinition#display_full_path=',
'Layout::AutoTextDefinition#display_full_path?',
'Layout::AutoTextDefinition#mandatory?',
'Layout::AutoTextDefinition#name',
'Layout::AutoTextDefinition#name=',
'Layout::AutoTextDefinition#page_number_style',
'Layout::AutoTextDefinition#page_number_style=',
'Layout::AutoTextDefinition#start_index',
'Layout::AutoTextDefinition#start_index=',
'Layout::AutoTextDefinition#tag',
'Layout::AutoTextDefinition#type',
'Layout::AutoTextDefinitions#[]',
'Layout::AutoTextDefinitions#add',
'Layout::AutoTextDefinitions#each',
'Layout::AutoTextDefinitions#index',
'Layout::AutoTextDefinitions#length',
'Layout::AutoTextDefinitions#remove',
'Layout::AutoTextDefinitions#size',
'Layout::ConnectionPoint#initialize',
'Layout::Document#==',
'Layout::Document#add_entity',
'Layout::Document#auto_text_definitions',
'Layout::Document#grid',
'Layout::Document#grid_snap_enabled=',
'Layout::Document#grid_snap_enabled?',
'Layout::Document#initialize',
'Layout::Document#layers',
'Layout::Document#object_snap_enabled=',
'Layout::Document#object_snap_enabled?',
'Layout::Document#page_info',
'Layout::Document#pages',
'Layout::Document#path',
'Layout::Document#precision',
'Layout::Document#precision=',
'Layout::Document#remove_entity',
'Layout::Document#save',
'Layout::Document#shared_entities',
'Layout::Document#time_created',
'Layout::Document#time_modified',
'Layout::Document#time_published',
'Layout::Document#units',
'Layout::Document#units=',
'Layout::Document.open',
'Layout::Ellipse#initialize',
'Layout::Entities#[]',
'Layout::Entities#each',
'Layout::Entities#length',
'Layout::Entities#reverse_each',
'Layout::Entities#size',
'Layout::Entity#==',
'Layout::Entity#bounds',
'Layout::Entity#document',
'Layout::Entity#drawing_bounds',
'Layout::Entity#group',
'Layout::Entity#layer_instance',
'Layout::Entity#locked=',
'Layout::Entity#locked?',
'Layout::Entity#move_to_group',
'Layout::Entity#move_to_layer',
'Layout::Entity#on_shared_layer?',
'Layout::Entity#page',
'Layout::Entity#style',
'Layout::Entity#style=',
'Layout::Entity#transform!',
'Layout::Entity#transformation',
'Layout::Entity#untransformed_bounds',
'Layout::Entity#untransformed_bounds=',
'Layout::FormattedText#append_plain_text',
'Layout::FormattedText#apply_style',
'Layout::FormattedText#display_text',
'Layout::FormattedText#grow_mode',
'Layout::FormattedText#grow_mode=',
'Layout::FormattedText#initialize',
'Layout::FormattedText#plain_text',
'Layout::FormattedText#plain_text=',
'Layout::FormattedText#rtf',
'Layout::FormattedText#rtf=',
'Layout::FormattedText#style',
'Layout::FormattedText.new_from_file',
'Layout::Grid#major_color',
'Layout::Grid#major_spacing',
'Layout::Grid#minor_color',
'Layout::Grid#minor_divisions',
'Layout::Grid#print?',
'Layout::Grid#show?',
'Layout::Grid#show_major?',
'Layout::Grid#show_minor?',
'Layout::Group#entities',
'Layout::Group#initialize',
'Layout::Group#remove_scale_factor',
'Layout::Group#scale_factor',
'Layout::Group#scale_precision',
'Layout::Group#scale_precision=',
'Layout::Group#scale_units',
'Layout::Group#scale_units=',
'Layout::Group#set_scale_factor',
'Layout::Group#ungroup',
'Layout::Image#clip_mask',
'Layout::Image#clip_mask=',
'Layout::Image#initialize',
'Layout::Label#connect',
'Layout::Label#connection_type',
'Layout::Label#connection_type=',
'Layout::Label#disconnect',
'Layout::Label#entities',
'Layout::Label#initialize',
'Layout::Label#leader_line',
'Layout::Label#leader_line=',
'Layout::Label#leader_line_type',
'Layout::Label#leader_line_type=',
'Layout::Label#text',
'Layout::Label#text=',
'Layout::Layer#==',
'Layout::Layer#document',
'Layout::Layer#layer_instance',
'Layout::Layer#locked=',
'Layout::Layer#locked?',
'Layout::Layer#name',
'Layout::Layer#name=',
'Layout::Layer#set_nonshared',
'Layout::Layer#set_shared',
'Layout::Layer#shared?',
'Layout::LayerInstance#==',
'Layout::LayerInstance#definition',
'Layout::LayerInstance#entities',
'Layout::LayerInstance#entity_index',
'Layout::LayerInstance#reorder_entity',
'Layout::Layers#[]',
'Layout::Layers#active',
'Layout::Layers#active=',
'Layout::Layers#add',
'Layout::Layers#each',
'Layout::Layers#index',
'Layout::Layers#length',
'Layout::Layers#remove',
'Layout::Layers#reorder',
'Layout::Layers#size',
'Layout::LinearDimension#auto_scale=',
'Layout::LinearDimension#auto_scale?',
'Layout::LinearDimension#connect',
'Layout::LinearDimension#custom_text=',
'Layout::LinearDimension#custom_text?',
'Layout::LinearDimension#disconnect',
'Layout::LinearDimension#end_connection_point',
'Layout::LinearDimension#end_connection_point=',
'Layout::LinearDimension#end_extent_point',
'Layout::LinearDimension#end_extent_point=',
'Layout::LinearDimension#end_offset_length=',
'Layout::LinearDimension#end_offset_point',
'Layout::LinearDimension#entities',
'Layout::LinearDimension#initialize',
'Layout::LinearDimension#leader_line_type',
'Layout::LinearDimension#leader_line_type=',
'Layout::LinearDimension#scale',
'Layout::LinearDimension#scale=',
'Layout::LinearDimension#start_connection_point',
'Layout::LinearDimension#start_connection_point=',
'Layout::LinearDimension#start_extent_point',
'Layout::LinearDimension#start_extent_point=',
'Layout::LinearDimension#start_offset_length=',
'Layout::LinearDimension#start_offset_point',
'Layout::LinearDimension#text',
'Layout::LinearDimension#text=',
'Layout::Page#==',
'Layout::Page#document',
'Layout::Page#entities',
'Layout::Page#in_presentation=',
'Layout::Page#in_presentation?',
'Layout::Page#layer_instances',
'Layout::Page#layer_visible?',
'Layout::Page#name',
'Layout::Page#name=',
'Layout::Page#nonshared_entities',
'Layout::Page#set_layer_visibility',
'Layout::PageInfo#bottom_margin',
'Layout::PageInfo#bottom_margin=',
'Layout::PageInfo#color',
'Layout::PageInfo#color=',
'Layout::PageInfo#display_resolution',
'Layout::PageInfo#display_resolution=',
'Layout::PageInfo#height',
'Layout::PageInfo#height=',
'Layout::PageInfo#left_margin',
'Layout::PageInfo#left_margin=',
'Layout::PageInfo#margin_color',
'Layout::PageInfo#margin_color=',
'Layout::PageInfo#output_resolution',
'Layout::PageInfo#output_resolution=',
'Layout::PageInfo#print_margins=',
'Layout::PageInfo#print_margins?',
'Layout::PageInfo#print_paper_color=',
'Layout::PageInfo#print_paper_color?',
'Layout::PageInfo#right_margin',
'Layout::PageInfo#right_margin=',
'Layout::PageInfo#show_margins=',
'Layout::PageInfo#show_margins?',
'Layout::PageInfo#top_margin',
'Layout::PageInfo#top_margin=',
'Layout::PageInfo#width',
'Layout::PageInfo#width=',
'Layout::Pages#[]',
'Layout::Pages#add',
'Layout::Pages#each',
'Layout::Pages#index',
'Layout::Pages#initial',
'Layout::Pages#initial=',
'Layout::Pages#length',
'Layout::Pages#remove',
'Layout::Pages#reorder',
'Layout::Pages#size',
'Layout::Path#append_point',
'Layout::Path#arc',
'Layout::Path#circle',
'Layout::Path#close',
'Layout::Path#closed?',
'Layout::Path#end_arrow',
'Layout::Path#end_point',
'Layout::Path#initialize',
'Layout::Path#parametric_length',
'Layout::Path#point_at',
'Layout::Path#point_types',
'Layout::Path#points',
'Layout::Path#start_arrow',
'Layout::Path#start_point',
'Layout::Path#tangent_at',
'Layout::Path.new_arc',
'Layout::Rectangle#initialize',
'Layout::Rectangle#radius',
'Layout::Rectangle#radius=',
'Layout::Rectangle#type',
'Layout::Rectangle#type=',
'Layout::SketchUpModel#clip_mask',
'Layout::SketchUpModel#clip_mask=',
'Layout::SketchUpModel#current_scene',
'Layout::SketchUpModel#current_scene=',
'Layout::SketchUpModel#current_scene_modified?',
'Layout::SketchUpModel#dash_scale=',
'Layout::SketchUpModel#display_background=',
'Layout::SketchUpModel#display_background?',
'Layout::SketchUpModel#entities',
'Layout::SketchUpModel#initialize',
'Layout::SketchUpModel#line_weight',
'Layout::SketchUpModel#line_weight=',
'Layout::SketchUpModel#model_to_paper_point',
'Layout::SketchUpModel#perspective=',
'Layout::SketchUpModel#perspective?',
'Layout::SketchUpModel#preserve_scale_on_resize=',
'Layout::SketchUpModel#preserve_scale_on_resize?',
'Layout::SketchUpModel#render',
'Layout::SketchUpModel#render_mode',
'Layout::SketchUpModel#render_mode=',
'Layout::SketchUpModel#render_needed?',
'Layout::SketchUpModel#scale',
'Layout::SketchUpModel#scale=',
'Layout::SketchUpModel#scenes',
'Layout::SketchUpModel#view',
'Layout::SketchUpModel#view=',
'Layout::Style#dimension_rotation_alignment',
'Layout::Style#dimension_rotation_alignment=',
'Layout::Style#dimension_units',
'Layout::Style#dimension_vertical_alignment',
'Layout::Style#dimension_vertical_alignment=',
'Layout::Style#end_arrow_size',
'Layout::Style#end_arrow_size=',
'Layout::Style#end_arrow_type',
'Layout::Style#end_arrow_type=',
'Layout::Style#fill_color',
'Layout::Style#fill_color=',
'Layout::Style#font_family',
'Layout::Style#font_family=',
'Layout::Style#font_size',
'Layout::Style#font_size=',
'Layout::Style#get_sub_style',
'Layout::Style#initialize',
'Layout::Style#pattern_fill_origin',
'Layout::Style#pattern_fill_origin=',
'Layout::Style#pattern_fill_path',
'Layout::Style#pattern_fill_path=',
'Layout::Style#pattern_fill_rotation',
'Layout::Style#pattern_fill_rotation=',
'Layout::Style#pattern_fill_scale',
'Layout::Style#pattern_fill_scale=',
'Layout::Style#pattern_filled',
'Layout::Style#pattern_filled=',
'Layout::Style#set_dimension_units',
'Layout::Style#set_sub_style',
'Layout::Style#solid_filled',
'Layout::Style#solid_filled=',
'Layout::Style#start_arrow_size',
'Layout::Style#start_arrow_size=',
'Layout::Style#start_arrow_type',
'Layout::Style#start_arrow_type=',
'Layout::Style#stroke_cap_style',
'Layout::Style#stroke_cap_style=',
'Layout::Style#stroke_color',
'Layout::Style#stroke_color=',
'Layout::Style#stroke_join_style',
'Layout::Style#stroke_join_style=',
'Layout::Style#stroke_pattern',
'Layout::Style#stroke_pattern=',
'Layout::Style#stroke_pattern_scale',
'Layout::Style#stroke_pattern_scale=',
'Layout::Style#stroke_width',
'Layout::Style#stroke_width=',
'Layout::Style#stroked',
'Layout::Style#stroked=',
'Layout::Style#suppress_dimension_units',
'Layout::Style#suppress_dimension_units=',
'Layout::Style#text_alignment',
'Layout::Style#text_alignment=',
'Layout::Style#text_anchor',
'Layout::Style#text_anchor=',
'Layout::Style#text_bold',
'Layout::Style#text_bold=',
'Layout::Style#text_color',
'Layout::Style#text_color=',
'Layout::Style#text_elevation',
'Layout::Style#text_elevation=',
'Layout::Style#text_italic',
'Layout::Style#text_italic=',
'Layout::Style#text_underline',
'Layout::Style#text_underline=',
'Layout::Style.arrow_type_filled?',
'Layout::Table#[]',
'Layout::Table#dimensions',
'Layout::Table#each',
'Layout::Table#entities',
'Layout::Table#get_column',
'Layout::Table#get_row',
'Layout::Table#initialize',
'Layout::Table#insert_column',
'Layout::Table#insert_row',
'Layout::Table#merge',
'Layout::Table#remove_column',
'Layout::Table#remove_row',
'Layout::TableCell#data',
'Layout::TableCell#data=',
'Layout::TableCell#rotation',
'Layout::TableCell#rotation=',
'Layout::TableCell#span',
'Layout::TableColumn#left_edge_style',
'Layout::TableColumn#left_edge_style=',
'Layout::TableColumn#right_edge_style',
'Layout::TableColumn#right_edge_style=',
'Layout::TableColumn#width',
'Layout::TableColumn#width=',
'Layout::TableRow#bottom_edge_style',
'Layout::TableRow#bottom_edge_style=',
'Layout::TableRow#height',
'Layout::TableRow#height=',
'Layout::TableRow#top_edge_style',
'Layout::TableRow#top_edge_style=',
],
module: [
'Layout',
],
},
},
{
version: 'SketchUp 2018',
types: {
class: [
'Sketchup::ImageRep',
],
method: [
'Sketchup.send_to_layout',
'Sketchup::Color#==',
'Sketchup::DefinitionList#remove',
'Sketchup::Image#image_rep',
'Sketchup::ImageRep#bits_per_pixel',
'Sketchup::ImageRep#color_at_uv',
'Sketchup::ImageRep#colors',
'Sketchup::ImageRep#data',
'Sketchup::ImageRep#height',
'Sketchup::ImageRep#initialize',
'Sketchup::ImageRep#load_file',
'Sketchup::ImageRep#row_padding',
'Sketchup::ImageRep#save_file',
'Sketchup::ImageRep#set_data',
'Sketchup::ImageRep#size',
'Sketchup::ImageRep#width',
'Sketchup::Materials#unique_name',
'Sketchup::Page#include_in_animation=',
'Sketchup::Page#include_in_animation?',
'Sketchup::SectionPlane#name',
'Sketchup::SectionPlane#name=',
'Sketchup::SectionPlane#symbol',
'Sketchup::SectionPlane#symbol=',
'Sketchup::Texture#image_rep',
'UI.refresh_toolbars',
],
},
},
{
version: 'SketchUp 2017',
types: {
class: [
'Sketchup::Http::Request',
'Sketchup::Http::Response',
'Sketchup::InstancePath',
'UI::HtmlDialog',
'UI::Notification',
],
method: [
'Sketchup::Entity#persistent_id',
'Sketchup::Http::Request#body',
'Sketchup::Http::Request#body=',
'Sketchup::Http::Request#cancel',
'Sketchup::Http::Request#headers',
'Sketchup::Http::Request#headers=',
'Sketchup::Http::Request#initialize',
'Sketchup::Http::Request#method',
'Sketchup::Http::Request#method=',
'Sketchup::Http::Request#set_download_progress_callback',
'Sketchup::Http::Request#set_upload_progress_callback',
'Sketchup::Http::Request#start',
'Sketchup::Http::Request#status',
'Sketchup::Http::Request#url',
'Sketchup::Http::Response#body',
'Sketchup::Http::Response#headers',
'Sketchup::Http::Response#status_code',
'Sketchup::InputPoint#instance_path',
'Sketchup::InstancePath#==',
'Sketchup::InstancePath#[]',
'Sketchup::InstancePath#each',
'Sketchup::InstancePath#empty?',
'Sketchup::InstancePath#include?',
'Sketchup::InstancePath#initialize',
'Sketchup::InstancePath#leaf',
'Sketchup::InstancePath#length',
'Sketchup::InstancePath#persistent_id_path',
'Sketchup::InstancePath#root',
'Sketchup::InstancePath#size',
'Sketchup::InstancePath#to_a',
'Sketchup::InstancePath#transformation',
'Sketchup::InstancePath#valid?',
'Sketchup::Material#save_as',
'Sketchup::Materials#load',
'Sketchup::Model#find_entity_by_persistent_id',
'Sketchup::Model#instance_path_from_pid_path',
'Sketchup::ModelObserver#onPidChanged',
'UI.scale_factor',
'UI.show_extension_manager',
'UI::HtmlDialog#add_action_callback',
'UI::HtmlDialog#bring_to_front',
'UI::HtmlDialog#center',
'UI::HtmlDialog#close',
'UI::HtmlDialog#execute_script',
'UI::HtmlDialog#initialize',
'UI::HtmlDialog#set_can_close',
'UI::HtmlDialog#set_file',
'UI::HtmlDialog#set_html',
'UI::HtmlDialog#set_on_closed',
'UI::HtmlDialog#set_position',
'UI::HtmlDialog#set_size',
'UI::HtmlDialog#set_url',
'UI::HtmlDialog#show',
'UI::HtmlDialog#show_modal',
'UI::HtmlDialog#visible?',
'UI::Notification#icon_name',
'UI::Notification#icon_name=',
'UI::Notification#icon_tooltip',
'UI::Notification#icon_tooltip=',
'UI::Notification#initialize',
'UI::Notification#message',
'UI::Notification#message=',
'UI::Notification#on_accept',
'UI::Notification#on_accept_title',
'UI::Notification#on_dismiss',
'UI::Notification#on_dismiss_title',
'UI::Notification#show',
],
module: [
'Sketchup::Http',
],
},
},
{
version: 'SketchUp 2016 M1',
types: {
method: [
'Sketchup::RegionalSettings.decimal_separator',
'Sketchup::RegionalSettings.list_separator',
],
module: [
'Sketchup::RegionalSettings',
],
},
},
{
version: 'SketchUp 2016',
types: {
class: [
'Sketchup::Axes',
],
method: [
'Sketchup.debug_mode=',
'Sketchup.debug_mode?',
'Sketchup::Axes#axes',
'Sketchup::Axes#origin',
'Sketchup::Axes#set',
'Sketchup::Axes#sketch_plane',
'Sketchup::Axes#to_a',
'Sketchup::Axes#transformation',
'Sketchup::Axes#xaxis',
'Sketchup::Axes#yaxis',
'Sketchup::Axes#zaxis',
'Sketchup::ComponentDefinition#count_used_instances',
'Sketchup::Model#axes',
'Sketchup::Page#axes',
'Sketchup::PickHelper#boundingbox_pick',
'Sketchup::PickHelper#window_pick',
'Sketchup::Texture#write',
],
},
},
{
version: 'SketchUp 2015',
types: {
class: [
'Sketchup::ClassificationSchema',
'Sketchup::Classifications',
'Sketchup::Licensing::ExtensionLicense',
],
method: [
'Sketchup.is_64bit?',
'Sketchup::AppObserver#onActivateModel',
'Sketchup::Camera#center_2d',
'Sketchup::Camera#fov_is_height?',
'Sketchup::Camera#is_2d?',
'Sketchup::Camera#scale_2d',
'Sketchup::ClassificationSchema#<=>',
'Sketchup::ClassificationSchema#name',
'Sketchup::ClassificationSchema#namespace',
'Sketchup::Classifications#[]',
'Sketchup::Classifications#each',
'Sketchup::Classifications#keys',
'Sketchup::Classifications#length',
'Sketchup::Classifications#load_schema',
'Sketchup::Classifications#size',
'Sketchup::Classifications#unload_schema',
'Sketchup::ComponentDefinition#add_classification',
'Sketchup::ComponentDefinition#get_classification_value',
'Sketchup::ComponentDefinition#remove_classification',
'Sketchup::ComponentDefinition#set_classification_value',
'Sketchup::Group#definition',
'Sketchup::Layers#remove',
'Sketchup::Licensing.get_extension_license',
'Sketchup::Licensing::ExtensionLicense#days_remaining',
'Sketchup::Licensing::ExtensionLicense#error_description',
'Sketchup::Licensing::ExtensionLicense#licensed?',
'Sketchup::Licensing::ExtensionLicense#state',
'Sketchup::Material#colorize_deltas',
'Sketchup::Material#colorize_type',
'Sketchup::Material#colorize_type=',
'Sketchup::Model#classifications',
'Sketchup::Model#close',
'Sketchup::Model#find_entity_by_id',
'UI.select_directory',
],
module: [
'Sketchup::Licensing',
],
},
},
{
version: 'SketchUp 2014',
types: {
class: [
'LanguageHandler',
'Sketchup::Console',
'Sketchup::Dimension',
'Sketchup::DimensionLinear',
'Sketchup::DimensionObserver',
'Sketchup::DimensionRadial',
],
method: [
'Geom::PolygonMesh#set_uv',
'LanguageHandler#[]',
'LanguageHandler#initialize',
'LanguageHandler#resource_path',
'LanguageHandler#strings',
'Sketchup.platform',
'Sketchup.quit',
'Sketchup.temp_dir',
'Sketchup::AppObserver#expectsStartupModelNotifications',
'Sketchup::AttributeDictionaries#count',
'Sketchup::AttributeDictionaries#length',
'Sketchup::AttributeDictionaries#size',
'Sketchup::AttributeDictionary#count',
'Sketchup::ComponentInstance#guid',
'Sketchup::Console#clear',
'Sketchup::Console#hide',
'Sketchup::Console#show',
'Sketchup::Console#visible?',
'Sketchup::DefinitionList#size',
'Sketchup::Dimension#add_observer',
'Sketchup::Dimension#arrow_type',
'Sketchup::Dimension#arrow_type=',
'Sketchup::Dimension#has_aligned_text=',
'Sketchup::Dimension#has_aligned_text?',
'Sketchup::Dimension#plane',
'Sketchup::Dimension#remove_observer',
'Sketchup::Dimension#text',
'Sketchup::Dimension#text=',
'Sketchup::DimensionLinear#aligned_text_position',
'Sketchup::DimensionLinear#aligned_text_position=',
'Sketchup::DimensionLinear#end',
'Sketchup::DimensionLinear#end=',
'Sketchup::DimensionLinear#offset_vector',
'Sketchup::DimensionLinear#offset_vector=',
'Sketchup::DimensionLinear#start',
'Sketchup::DimensionLinear#start=',
'Sketchup::DimensionLinear#text_position',
'Sketchup::DimensionLinear#text_position=',
'Sketchup::DimensionObserver#onTextChanged',
'Sketchup::DimensionRadial#arc_curve',
'Sketchup::DimensionRadial#arc_curve=',
'Sketchup::DimensionRadial#leader_break_point',
'Sketchup::DimensionRadial#leader_break_point=',
'Sketchup::DimensionRadial#leader_points',
'Sketchup::Entities#active_section_plane',
'Sketchup::Entities#active_section_plane=',
'Sketchup::Entities#add_dimension_linear',
'Sketchup::Entities#add_dimension_radial',
'Sketchup::Entities#add_section_plane',
'Sketchup::Entities#size',
'Sketchup::EntitiesObserver#onActiveSectionPlaneChanged',
'Sketchup::Face#get_texture_projection',
'Sketchup::Face#set_texture_projection',
'Sketchup::Group#guid',
'Sketchup::Image#transformation',
'Sketchup::Image#transformation=',
'Sketchup::Layer#color',
'Sketchup::Layer#color=',
'Sketchup::Layers#size',
'Sketchup::LayersObserver#onLayerChanged',
'Sketchup::Materials#size',
'Sketchup::Model#save_copy',
'Sketchup::OptionsManager#length',
'Sketchup::OptionsProvider#length',
'Sketchup::Pages#length',
'Sketchup::RenderingOptions#count',
'Sketchup::RenderingOptions#length',
'Sketchup::RenderingOptions#size',
'Sketchup::SectionPlane#activate',
'Sketchup::SectionPlane#active?',
'Sketchup::Selection#size',
'Sketchup::ShadowInfo#count',
'Sketchup::ShadowInfo#length',
'Sketchup::ShadowInfo#size',
'Sketchup::Styles#length',
'UI::Toolbar#count',
'UI::Toolbar#length',
'UI::Toolbar#size',
'UI::WebDialog#screen_scale_factor',
],
},
},
{
version: 'SketchUp 2013',
types: {
method: [
'SketchupExtension#extension_path',
'SketchupExtension#id',
'SketchupExtension#version_id',
],
},
},
{
version: 'SketchUp 8.0 M2',
types: {
class: [
'Sketchup::ExtensionsManager',
],
method: [
'Sketchup.extensions',
'Sketchup.install_from_archive',
'Sketchup.plugins_disabled=',
'Sketchup.plugins_disabled?',
'Sketchup::ExtensionsManager#[]',
'Sketchup::ExtensionsManager#count',
'Sketchup::ExtensionsManager#each',
'Sketchup::ExtensionsManager#keys',
'Sketchup::ExtensionsManager#length',
'Sketchup::ExtensionsManager#size',
'SketchupExtension#check',
'SketchupExtension#load_on_start?',
'SketchupExtension#loaded?',
'SketchupExtension#registered?',
'SketchupExtension#uncheck',
],
},
},
{
version: 'SketchUp 8.0 M1',
types: {
method: [
'Sketchup.fix_shadow_strings=',
'Sketchup.fix_shadow_strings?',
'Sketchup::Color#alpha=',
'Sketchup::Material#name=',
'Sketchup::Material#write_thumbnail',
'Sketchup::Materials#remove',
'UI::Command#large_icon',
'UI::Command#menu_text',
'UI::Command#small_icon',
'UI::Command#status_bar_text',
'UI::Command#tooltip',
'UI::Toolbar#each',
'UI::Toolbar#name',
],
},
},
{
version: 'SketchUp 8.0',
types: {
method: [
'Sketchup::ComponentInstance#equals?',
'Sketchup::ComponentInstance#intersect',
'Sketchup::ComponentInstance#manifold?',
'Sketchup::ComponentInstance#outer_shell',
'Sketchup::ComponentInstance#show_differences',
'Sketchup::ComponentInstance#split',
'Sketchup::ComponentInstance#subtract',
'Sketchup::ComponentInstance#trim',
'Sketchup::ComponentInstance#union',
'Sketchup::ComponentInstance#volume',
'Sketchup::EntitiesObserver#onElementModified',
'Sketchup::Group#equals?',
'Sketchup::Group#intersect',
'Sketchup::Group#manifold?',
'Sketchup::Group#outer_shell',
'Sketchup::Group#show_differences',
'Sketchup::Group#split',
'Sketchup::Group#subtract',
'Sketchup::Group#trim',
'Sketchup::Group#union',
'Sketchup::Group#volume',
'Sketchup::ModelObserver#onPostSaveModel',
'Sketchup::ModelObserver#onPreSaveModel',
],
},
},
{
version: 'SketchUp 7.1 M1',
types: {
method: [
'Sketchup::Curve#is_polygon?',
],
},
},
{
version: 'SketchUp 7.1',
types: {
method: [
'Sketchup::Model#georeferenced?',
'Sketchup::Model#number_faces',
'Sketchup::View#refresh',
'UI::WebDialog#write_image',
],
},
},
{
version: 'SketchUp 7.0 M1',
types: {
method: [
'Sketchup::Face#get_glued_instances',
],
},
},
{
version: 'SketchUp 7.0',
types: {
method: [
'Sketchup.break_edges=',
'Sketchup.break_edges?',
'Sketchup.is_pro?',
'Sketchup::AppObserver#onUnloadExtension',
'Sketchup::Behavior#no_scale_mask=',
'Sketchup::Behavior#no_scale_mask?',
'Sketchup::ComponentDefinition#refresh_thumbnail',
'Sketchup::ComponentDefinition#save_as',
'Sketchup::ComponentDefinition#save_thumbnail',
'Sketchup::DefinitionList#load_from_url',
'Sketchup::Group#local_bounds',
'Sketchup::Model#active_path',
'Sketchup::Model#edit_transform',
'Sketchup::Model#mipmapping=',
'Sketchup::Model#mipmapping?',
'Sketchup::ModelObserver#onAfterComponentSaveAs',
'Sketchup::ModelObserver#onBeforeComponentSaveAs',
'Sketchup::ModelObserver#onExplode',
'Sketchup::ModelObserver#onPlaceComponent',
'Sketchup::Pages#add_matchphoto_page',
'UI.refresh_inspectors',
'UI::WebDialog#max_height',
'UI::WebDialog#max_height=',
'UI::WebDialog#max_width',
'UI::WebDialog#max_width=',
'UI::WebDialog#min_height',
'UI::WebDialog#min_height=',
'UI::WebDialog#min_width',
'UI::WebDialog#min_width=',
'UI::WebDialog#navigation_buttons_enabled=',
'UI::WebDialog#navigation_buttons_enabled?',
'UI::WebDialog#set_full_security',
],
},
},
].freeze
INSTANCE_METHODS = %i[
active_path
active_path=
active_section_plane
active_section_plane=
active_tool
add_classification
add_dimension_linear
add_dimension_radial
add_matchphoto_page
add_section_plane
aligned_text_position
aligned_text_position=
arc_curve
arc_curve=
attached_to
attached_to=
boundingbox_pick
camera_modified?
center_2d
classifications
clip_to_margins?
clip_to_margins=
colorize_deltas
colorize_type
colorize_type=
count_used_instances
dash_scale
days_remaining
drawing_element_visible?
edit_transform
effects_modified?
end_attached_to
end_attached_to=
error_description
expectsStartupModelNotifications
extension_path
find_entity_by_id
find_entity_by_persistent_id
fov_is_height?
georeferenced?
get_classification_value
get_drawingelement_visibility
get_glued_instances
get_texture_projection
has_aligned_text?
has_aligned_text=
icon_name
icon_name=
icon_tooltip
icon_tooltip=
image_rep
in_front?
in_front=
include_in_animation?
include_in_animation=
instance_path
instance_path_from_pid_path
is_polygon?
large_icon
layers_modified?
leader_break_point
leader_break_point=
leader_points
line_style
line_style=
line_styles
load_from_url
load_on_start?
load_schema
local_bounds
lower_left
lower_right
major_color=
major_spacing=
menu_text
minor_color=
minor_divisions=
mipmapping?
mipmapping=
navigation_buttons_enabled?
navigation_buttons_enabled=
no_scale_mask?
no_scale_mask=
number_faces
offset_vector
offset_vector=
outer_shell
owner_type
persistent_id
persistent_id_path
refresh_thumbnail
remove_classification
reset_camera
reset_effects
reset_layers
reset_style
same_direction?
scale_2d
screen_scale_factor
set_can_close
set_classification_value
set_download_progress_callback
set_drawingelement_visibility
set_full_security
set_on_closed
set_texture_projection
set_upload_progress_callback
set_uv
show_differences
show_major=
show_minor=
sketch_plane
small_icon
start_attached_to
start_attached_to=
status_bar_text
style_modified?
text_bounds
unit_vector?
unload_schema
upper_left
upper_right
winding
window_pick
].freeze
OBSERVER_METHODS = %i[
onActivateModel
onActiveSectionPlaneChanged
onAfterComponentSaveAs
onBeforeComponentSaveAs
onElementModified
onExplode
onLayerChanged
onPidChanged
onPlaceComponent
onPostSaveModel
onPreSaveModel
onTextChanged
onUnloadExtension
].freeze
end
end
end
| 38.503524
| 83
| 0.530416
|
4ba8148da6fb6bef9c21d0fa9310229516ae052c
| 2,290
|
dart
|
Dart
|
live_compilation_server/lib/main.dart
|
wanbing/flutter_ide
|
90464e894dbbd294942e4e732814ce23105c4d19
|
[
"MIT"
] | 1,032
|
2019-01-05T15:47:42.000Z
|
2022-03-27T06:16:12.000Z
|
live_compilation_server/lib/main.dart
|
wanbing/flutter_ide
|
90464e894dbbd294942e4e732814ce23105c4d19
|
[
"MIT"
] | 38
|
2019-01-06T15:54:17.000Z
|
2022-02-13T12:48:01.000Z
|
live_compilation_server/lib/main.dart
|
wanbing/flutter_ide
|
90464e894dbbd294942e4e732814ce23105c4d19
|
[
"MIT"
] | 177
|
2019-01-06T01:43:19.000Z
|
2022-03-28T21:05:11.000Z
|
import 'dart:convert';
import 'dart:io';
import 'package:flutter/material.dart';
import 'temp_widget.dart';
import 'package:flutter/foundation.dart'
show debugDefaultTargetPlatformOverride;
import 'widget_converter.dart';
//import '../../pkg/widget_converter/lib/widget_converter.dart';
//import '/Users/Norbert/workspace/widget_maker_2_0/pkg/widget_converter/lib/widget_converter.dart';
void main() {
debugDefaultTargetPlatformOverride = TargetPlatform.fuchsia;
runApp(MyApp());
}
class MyApp extends StatefulWidget {
@override
_MyAppState createState() => _MyAppState();
}
class _MyAppState extends State<MyApp> {
WebSocket _socket;
@override
void initState() {
super.initState();
initServer();
}
void initServer() async {
int widgetPort = 9244;
await HttpServer.bind(InternetAddress.loopbackIPv4, widgetPort).then((server) {
print("Hot runner server is running on "
"'http://${server.address.address}:$widgetPort/'");
server.transform(WebSocketTransformer())
.listen(handle);
});
}
void handle(WebSocket webSocket) {
_socket = webSocket;
}
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Flutter Demo',
theme: ThemeData(
primarySwatch: Colors.blue,
),
home: WidgetElementCompiler(
child: buildL(),
),
);
}
}
class WidgetElementCompiler extends StatefulWidget {
const WidgetElementCompiler({Key key, this.child, this.socket}) : super(key: key);
final Widget child;
final WebSocket socket;
@override
_WidgetElementCompilerState createState() => _WidgetElementCompilerState();
}
class _WidgetElementCompilerState extends State<WidgetElementCompiler> {
Map convert() {
print(walk(widget.child));
return walk(widget.child);
}
@override
void reassemble() {
super.reassemble();
//convert();
}
Map walk(Widget widget) {
return convertWidget(widget);
//return convert(widget);
}
@override
Widget build(BuildContext context) {
WidgetsBinding.instance.addPostFrameCallback((_) {
var map = convert();
var msg = json.encode(map);
print("Sending off $msg");
widget.socket?.add(msg);
});
return widget.child;
}
}
| 20.818182
| 100
| 0.682096
|
a363a252bcf572a678fd56533f0c4c9083179150
| 8,952
|
java
|
Java
|
bus-office/src/main/java/org/aoju/bus/office/magic/Info.java
|
jingshuai5213/bus
|
f3ec545617acffaf2668ea78e974a05be268cfd1
|
[
"MIT"
] | null | null | null |
bus-office/src/main/java/org/aoju/bus/office/magic/Info.java
|
jingshuai5213/bus
|
f3ec545617acffaf2668ea78e974a05be268cfd1
|
[
"MIT"
] | null | null | null |
bus-office/src/main/java/org/aoju/bus/office/magic/Info.java
|
jingshuai5213/bus
|
f3ec545617acffaf2668ea78e974a05be268cfd1
|
[
"MIT"
] | null | null | null |
/*********************************************************************************
* *
* The MIT License (MIT) *
* *
* Copyright (c) 2015-2020 aoju.org and other contributors. *
* *
* Permission is hereby granted, free of charge, to any person obtaining a copy *
* of this software and associated documentation files (the "Software"), to deal *
* in the Software without restriction, including without limitation the rights *
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell *
* copies of the Software, and to permit persons to whom the Software is *
* furnished to do so, subject to the following conditions: *
* *
* The above copyright notice and this permission notice shall be included in *
* all copies or substantial portions of the Software. *
* *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR *
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE *
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, *
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN *
* THE SOFTWARE. *
********************************************************************************/
package org.aoju.bus.office.magic;
import com.sun.star.beans.XPropertySet;
import com.sun.star.lang.XComponent;
import com.sun.star.lang.XMultiServiceFactory;
import com.sun.star.lang.XServiceInfo;
import com.sun.star.uno.Exception;
import com.sun.star.uno.XComponentContext;
import org.aoju.bus.core.lang.Symbol;
import org.aoju.bus.core.lang.exception.InstrumentException;
import org.aoju.bus.core.toolkit.StringKit;
import org.aoju.bus.logger.Logger;
import org.aoju.bus.office.Builder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
/**
* 实用程序函数,使office信息更容易获取.
*
* @author Kimi Liu
* @version 6.0.1
* @since JDK 1.8+
*/
public final class Info {
/**
* 获取指定上下文是否用于OpenOffice安装.
*
* @param context 上下文.
* @return 如果指定的上下文用于OpenOffice安装,则为{@code true},否则为{@code false}.
* @throws InstrumentException 如果发生UNO异常。UNO异常将导致 {@link InstrumentException}.
*/
public static boolean isOpenOffice(final XComponentContext context) {
return "openoffice".equalsIgnoreCase(getOfficeName(context));
}
/**
* 获取指定上下文是否用于LibreOffice安装.
*
* @param context 上下文.
* @return 如果指定的上下文用于LibreOffice安装,则为{@code true},否则为{@code false}.
* @throws InstrumentException 如果发生UNO异常。UNO异常将导致 {@link InstrumentException}.
*/
public static boolean isLibreOffice(final XComponentContext context) {
return "libreoffice".equalsIgnoreCase(getOfficeName(context));
}
/**
* 获取给定上下文的office产品名称.
*
* @param context 上下文.
* @return 如果无法检索office产品名称,则为{@code null}.
* @throws InstrumentException 如果发生UNO异常。UNO异常将导致 {@link InstrumentException}.
*/
public static String getOfficeName(final XComponentContext context) {
return getConfig(context, "ooName").orElse(null);
}
/**
* 获取给定上下文的office产品版本(长版本号),例如e.g 6.1.0.3
*
* @param context 上下文.
* @return office产品版本,如果无法检索,则为{@code null}.
* @throws InstrumentException 如果发生UNO异常。UNO异常将导致 {@link InstrumentException}.
*/
public static String getOfficeVersionLong(final XComponentContext context) {
return getConfig(context, "ooSetupVersionAboutBox").orElse(null);
}
/**
* 获取给定上下文的office产品版本(短版本), e.g 6.1
*
* @param context The 上下文.
* @return ffice产品版本,如果无法检索,则为{@code null}.
* @throws InstrumentException 如果发生UNO异常。UNO异常将导致 {@link InstrumentException}.
*/
public static String getOfficeVersionShort(final XComponentContext context) {
return getConfig(context, "ooSetupVersion").orElse(null);
}
/**
* 比较两个版本的字符串 (ex. 1.6.1).
*
* @param version1 第一个比较的版本.
* @param version2 第二版比较.
* @param length 用于标准化的版本长度.
* @return -1 if version1 < version2, 1 if version1 > version2, 0 if version1 = version2.
*/
public static int compareVersions(
final String version1, final String version2, final int length) {
if (StringKit.isEmpty(version1) && StringKit.isEmpty(version2)) {
return 0;
} else if (StringKit.isEmpty(version1)) {
return -1;
} else if (StringKit.isEmpty(version2)) {
return 1;
}
final String[] numbers1 = normalizeVersion(version1, length).split("\\.");
final String[] numbers2 = normalizeVersion(version2, length).split("\\.");
for (int i = 0; i < numbers1.length; i++) {
if (Integer.valueOf(numbers1[i]) < Integer.valueOf(numbers2[i])) {
return -1;
} else if (Integer.valueOf(numbers1[i]) > Integer.valueOf(numbers2[i])) {
return 1;
}
}
return 0;
}
/**
* 将版本字符串规范化 使其具有由'.'分隔的版本号
*
* @param version 版本号
* @param length 长度
*/
private static String normalizeVersion(final String version, final int length) {
final List<String> numbers = new ArrayList<>(Arrays.asList(version.split("\\.")));
while (numbers.size() < length) {
numbers.add(Symbol.ZERO);
}
return numbers.stream().collect(Collectors.joining(Symbol.DOT));
}
/**
* 获取指定属性的配置值.
*
* @param context 上下文信息.
* @param propName 要获取的属性值的属性名.
* @return 包含属性值的可选属性.
* @throws InstrumentException 如果发生UNO异常。UNO异常将导致 {@link InstrumentException}.
*/
public static Optional<String> getConfig(final XComponentContext context, final String propName) {
for (String nodePath : Builder.NODE_PATHS) {
final Optional<Object> info = getConfig(context, nodePath, propName);
if (info.isPresent()) {
return info.map(String.class::cast);
}
}
return Optional.empty();
}
/**
* 获取指定路径的指定属性的配置值.
*
* @param context 上下文信息.
* @param nodePath 属性获取的路径.
* @param propName 要获取的属性值的属性名.
* @return 包含属性值的可选属性.
* @throws InstrumentException 如果发生UNO异常。UNO异常将导致 {@link InstrumentException}.
*/
public static Optional<Object> getConfig(
final XComponentContext context, final String nodePath, final String propName) {
return getConfigProperties(context, nodePath)
.map(props -> Props.getProperty(props, propName))
.orElse(Optional.empty());
}
/**
* 获取指定路径的配置属性.
*
* @param context 上下文信息.
* @param nodePath 属性获取的路径.
* @return 可选的{@link XPropertySet},包含指定路径的配置属性.
*/
public static Optional<XPropertySet> getConfigProperties(
final XComponentContext context, final String nodePath) {
final XMultiServiceFactory provider =
Lo.createInstanceMCF(
context,
XMultiServiceFactory.class,
"com.sun.star.configuration.ConfigurationProvider");
if (provider == null) {
Logger.debug("Could not create configuration provider");
return Optional.empty();
}
try {
return Optional.ofNullable(
Lo.qi(
XPropertySet.class,
provider.createInstanceWithArguments(
"com.sun.star.configuration.ConfigurationAccess",
Props.makeProperties("nodepath", nodePath))));
} catch (Exception ex) {
Logger.debug("Unable to access config properties for: " + nodePath, ex);
}
return Optional.empty();
}
/**
* 获取给定文档是否属于给定文档类型.
*
* @param document 文档.
* @param documentType 要检查的文档类型.
* @return 如果文档是指定的类型,则为{@code true},否则为{@code true}.
*/
public static boolean isDocumentType(final XComponent document, final String documentType) {
return Lo.qi(XServiceInfo.class, document).supportsService(documentType);
}
}
| 38.25641
| 102
| 0.583333
|
d706126c3f065c9252b0da25b98f8bea4ab57a56
| 327
|
kt
|
Kotlin
|
src/main/kotlin/com/easy/springboot/simpleloginbackend/controller/HelloWorldController.kt
|
EasySpringBoot/simple-login-back-end
|
681e46ceefff255ae1578599d496a59573eeb910
|
[
"ISC"
] | 3
|
2018-11-10T16:51:54.000Z
|
2021-08-10T02:25:49.000Z
|
src/main/kotlin/com/easy/springboot/simpleloginbackend/controller/HelloWorldController.kt
|
EasySpringBoot/simple-login-back-end
|
681e46ceefff255ae1578599d496a59573eeb910
|
[
"ISC"
] | null | null | null |
src/main/kotlin/com/easy/springboot/simpleloginbackend/controller/HelloWorldController.kt
|
EasySpringBoot/simple-login-back-end
|
681e46ceefff255ae1578599d496a59573eeb910
|
[
"ISC"
] | 1
|
2018-11-14T08:06:19.000Z
|
2018-11-14T08:06:19.000Z
|
package com.easy.springboot.simpleloginbackend.controller
import org.springframework.web.bind.annotation.GetMapping
import org.springframework.web.bind.annotation.RestController
@RestController
class HelloWorldController {
@GetMapping(value = ["/hello"])
fun hello(): String {
return "Hello World!"
}
}
| 21.8
| 61
| 0.755352
|
23cb3335d8e4eecf9b1a2d3ecd8e0761d15a4dbd
| 4,815
|
js
|
JavaScript
|
src/Home/Home.js
|
wangyan4/react-native-HomeApp
|
e02f42ba32256e88c3ddf58e8d23a71d1dbca45a
|
[
"MIT"
] | null | null | null |
src/Home/Home.js
|
wangyan4/react-native-HomeApp
|
e02f42ba32256e88c3ddf58e8d23a71d1dbca45a
|
[
"MIT"
] | 1
|
2021-05-11T08:34:26.000Z
|
2021-05-11T08:34:26.000Z
|
src/Home/Home.js
|
wangyan4/react-native-HomeApp
|
e02f42ba32256e88c3ddf58e8d23a71d1dbca45a
|
[
"MIT"
] | null | null | null |
import React, { Component } from 'react'
import { Text, View ,StyleSheet,TextInput,Dimensions,Image} from 'react-native'
import Icon from 'react-native-vector-icons/FontAwesome5';
import Button from 'react-native-button';
import Swiper from 'react-native-swiper';
const {width} = Dimensions.get('window');
const p = width/600;
export default class Home extends Component {
render() {
return (
<View>
{/* 搜索框 */}
<View style={styles.titlebar}>
<View style={styles.frontbox}>
<View style={styles.grandcss}>
<Icon name='search' style={[styles.icon1]}/>
<TextInput placeholder="请输入您想要搜索的关键字" placeholderTextColor="#fff" style={{fontSize:22*p,color:"#fff"}}/>
</View>
<View style={styles.icon2}><Icon name="shopping-cart" style={styles.icon2} /></View>
</View>
</View>
{/* 轮播图 */}
<View style={styles.wrapper}>
<Swiper showsButtons={false} autoplay activeDotColor="#fd0304" >
<View>
<Image resizeMode="cover" style={{width:"100%",height:"100%"}} source={{uri: 'https://upload.wikimedia.org/wikipedia/commons/d/de/Bananavarieties.jpg'}}/>
</View>
<View>
<Image resizeMode="cover" style={{width:"100%",height:"100%"}} source={{uri: 'https://upload.wikimedia.org/wikipedia/commons/d/de/Bananavarieties.jpg'}}/>
</View>
<View>
<Image resizeMode="cover" style={{width:"100%",height:"100%"}} source={{uri: 'https://upload.wikimedia.org/wikipedia/commons/d/de/Bananavarieties.jpg'}}/>
</View>
</Swiper>
</View>
{/* Tools */}
<View style={{flex:1,justifyContent:"space-between"}}>
<View style={styles.slide}>
<View style={styles.slide1}><View style={[styles.flag,{backgroundColor:"#fcc"}]}><Icon style={styles.text} name='tools'/></View></View>
<View style={styles.slide2}><Text style={styles.text}>居家维修保养</Text></View>
<View style={styles.slide3}><Icon style={styles.text} name="chevron-right"/></View>
</View>
<View style={styles.slide}>
<View style={styles.slide1}><View style={[styles.flag,{backgroundColor:"#ffe1b1"}]}><Icon style={styles.text} name='flag'/></View></View>
<View style={styles.slide2}><Text style={styles.text}>住宿优惠</Text></View>
<View style={styles.slide3}><Icon style={styles.text} name="chevron-right"/></View>
</View>
<View style={styles.slide}>
<View style={styles.slide1}><View style={[styles.flag,{backgroundColor:"#bfe6a8"}]}><Icon style={styles.text} name='stopwatch'/></View></View>
<View style={styles.slide2}><Text style={styles.text}>出行接送</Text></View>
<View style={styles.slide3}><Icon style={styles.text} name="chevron-right"/></View>
</View>
<View style={styles.slide}>
<View style={styles.slide1}><View style={[styles.flag,{backgroundColor:"#c3ddf2"}]}><Icon style={styles.text} name='gift'/></View></View>
<View style={styles.slide2}><Text style={styles.text}>E族活动</Text></View>
<View style={styles.slide3}><Icon style={styles.text} name="chevron-right"/></View>
</View>
<View style={styles.slide}>
<Button style={styles.btn}>发布需求</Button>
</View>
</View>
</View>
)
}
}
const styles=StyleSheet.create({
// 搜索框
titlebar:{
backgroundColor:"#f23030",
paddingBottom:10
},
frontbox:{
flexDirection:"row",
height:50,
marginTop:25,
justifyContent:"space-evenly"
},
grandcss:{
flexDirection:"row",
width:"80%",
backgroundColor:"#fbb8b8",
borderRadius:20
},
fathcss:{
marginLeft:15,
marginTop:10,
marginRight:10,
width:20,
height:20
},
icon2:{
color:"#fff",
marginTop:"1%",
fontSize:35
},
icon1:{
color:"#fff",
marginHorizontal:"5%",
textAlignVertical:"center",
fontSize:28
},
// 轮播图
wrapper: {
height:250
},
// Tools
slide:{
width:width,
height:120*p,
flexDirection:"row",
borderTopWidth:10 *p,
borderTopColor:"#f5f5f5",
justifyContent:"center"
},
slide1:{
flex:3,
justifyContent:"center",
alignItems:"center"
},
slide2:{
flex:7
},
slide3:{
flex:1
},
text:{
fontSize:22*p,
height:120*p,
textAlignVertical:"center"
},
flag:{
width:100*p,
height:100*p,
borderRadius:60*p,
justifyContent:"center",
alignItems:"center"
},
// 发布按钮
btn:{
backgroundColor:"#f23030",
width:545*p,
height:70*p,
color:"#fff",
fontSize:18,
textAlignVertical:"center",
borderRadius:10*p
}
})
| 32.1
| 166
| 0.584839
|
6da490131bf92c2c96336ff53d87a97edab3c66a
| 1,363
|
h
|
C
|
System/Library/Frameworks/Security.framework/XPCServices/TrustedPeersHelper.xpc/OTPrivateKey.h
|
lechium/tvOS130Headers
|
6b47cdcd4a6f453b399aa9d742b5d0f7e3f732fd
|
[
"MIT"
] | 11
|
2019-11-06T04:48:48.000Z
|
2022-02-09T17:48:15.000Z
|
System/Library/Frameworks/Security.framework/XPCServices/TrustedPeersHelper.xpc/OTPrivateKey.h
|
lechium/tvOS130Headers
|
6b47cdcd4a6f453b399aa9d742b5d0f7e3f732fd
|
[
"MIT"
] | 1
|
2020-04-16T01:41:56.000Z
|
2020-04-16T04:32:00.000Z
|
System/Library/Frameworks/Security.framework/XPCServices/TrustedPeersHelper.xpc/OTPrivateKey.h
|
lechium/tvOS130Headers
|
6b47cdcd4a6f453b399aa9d742b5d0f7e3f732fd
|
[
"MIT"
] | 3
|
2019-12-22T20:17:53.000Z
|
2021-01-25T09:47:49.000Z
|
/*
* This header is generated by classdump-dyld 1.0
* on Tuesday, November 5, 2019 at 2:50:47 AM Mountain Standard Time
* Operating System: Version 13.0 (Build 17J586)
* Image Source: /System/Library/Frameworks/Security.framework/XPCServices/TrustedPeersHelper.xpc/TrustedPeersHelper
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos.
*/
#import <TrustedPeersHelper/TrustedPeersHelper-Structs.h>
#import <ProtocolBuffer/PBCodable.h>
#import <TrustedPeersHelper/NSCopying.h>
@class NSData;
@interface OTPrivateKey : PBCodable <NSCopying> {
NSData* _keyData;
int _keyType;
}
@property (assign,nonatomic) int keyType; //@synthesize keyType=_keyType - In the implementation block
@property (nonatomic,retain) NSData * keyData; //@synthesize keyData=_keyData - In the implementation block
+(SecKeyRef)createSecKey:(id)arg1 ;
+(id)fromECKeyPair:(id)arg1 ;
-(BOOL)isEqual:(id)arg1 ;
-(unsigned long long)hash;
-(id)copyWithZone:(NSZone*)arg1 ;
-(id)description;
-(id)dictionaryRepresentation;
-(int)keyType;
-(BOOL)readFrom:(id)arg1 ;
-(void)writeTo:(id)arg1 ;
-(void)mergeFrom:(id)arg1 ;
-(void)copyTo:(id)arg1 ;
-(NSData *)keyData;
-(void)setKeyType:(int)arg1 ;
-(void)setKeyData:(NSData *)arg1 ;
-(id)asECKeyPair:(id*)arg1 ;
-(id)keyTypeAsString:(int)arg1 ;
-(int)StringAsKeyType:(id)arg1 ;
@end
| 30.977273
| 120
| 0.735143
|
dd9f6d7fcea86aa6c9f21898c7a5bfa17bd58f08
| 6,130
|
py
|
Python
|
scripts/pre_adapt_tidefac.py
|
jamal919/SCHISMMB
|
f02106ffafeaeee6da5c7382e33f74ca1c327c37
|
[
"Apache-2.0"
] | 1
|
2022-03-08T13:33:21.000Z
|
2022-03-08T13:33:21.000Z
|
scripts/pre_adapt_tidefac.py
|
jamal919/SCHISMMB
|
f02106ffafeaeee6da5c7382e33f74ca1c327c37
|
[
"Apache-2.0"
] | 1
|
2022-03-26T13:08:56.000Z
|
2022-03-26T13:08:56.000Z
|
scripts/pre_adapt_tidefac.py
|
jamal919/SCHISMMB
|
f02106ffafeaeee6da5c7382e33f74ca1c327c37
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Reads tidefac output and adapt given bctides file as required by the tidefac
outputs.
@author: khan
@email: jamal.khan@legos.obs-mip.fr
"""
import numpy as np
from datetime import datetime, timedelta
import sys
import re
class Bctides(object):
def __init__(self, info='', ntip=0, tip_dp=0, tip=[], nbfr=0, bfr=[], nope=0, boundaries=[]):
self.info = info
self.nitp = ntip
self.tip_dp = tip_dp
self.tip = tip
self.nbfr = nbfr
self.bfr = bfr
self.nope = nope
self.boundaries = boundaries
def read(self, filepath):
with open(filepath) as f:
ds = f.readlines()
# First the dates
self.info = ds[0].split('\n')[0]
__lnproc = 0
# Then the tidal potential information
self.ntip, self.tip_dp = np.fromstring(ds[1].split('!')[0], count=2, sep=' ')
self.ntip = int(self.ntip)
__lnproc = 1
for i in np.arange(self.ntip):
__talpha = ds[__lnproc+1].split('\n')[0]
__jspc, __tamp, __tfreq, __tnf, __tear = np.fromstring(ds[__lnproc+2].split('\n')[0], count=5, sep=' ')
__rec = dict(talpha=__talpha, jspc=__jspc, tamp=__tamp, tfreq=__tfreq, tnf=__tnf, tear=__tear)
self.tip.append(__rec)
__lnproc = __lnproc + 2
# Reading the boundary frequencies
self.nbfr = np.fromstring(ds[__lnproc+1], count=1, sep=' ')
self.nbfr = int(self.nbfr)
__lnproc = __lnproc + 1
self.bfr = []
for i in np.arange(self.nbfr):
__alpha = ds[__lnproc+1].split('\n')[0]
__amig, __ff, __face = np.fromstring(ds[__lnproc+2].split('\n')[0], count=3, sep=' ')
__rec = dict(alpha=__alpha, amig=__amig, ff=__ff, face=__face)
self.bfr.append(__rec)
__lnproc = __lnproc + 2
# Open boundary sagments
self.nope = ds[__lnproc+1].split(' ')[0]
self.nope = int(self.nope)
__lnproc = __lnproc + 1
# For each open boundary sagment
self.boundaries = ds[__lnproc+1:len(ds)]
def update(self, tidefac):
# Update time
self.info = tidefac.info
# Updating the tidal potential nodal factor and equilibrium argument
for __tip in self.tip:
__talpha = __tip['talpha'].strip().upper()
if __talpha in tidefac.const.keys():
__tip['tnf'] = tidefac.const[__talpha][0]
__tip['tear'] = tidefac.const[__talpha][1]
# Updating the Boundary frequency nodal factors and equilibrium argument
for __bfr in self.bfr:
__alpha = __bfr['alpha'].strip().upper()
if __alpha in tidefac.const.keys():
__bfr['ff'] = tidefac.const[__alpha][0]
__bfr['face'] = tidefac.const[__alpha][1]
def write(self, filepath):
with open(filepath, 'w') as f:
# Header information
f.write('{:s}\n'.format(self.info))
# Tidal potential
f.write('{:d} {:3.2f} !ntip, tip_dp\n'.format(int(self.ntip), float(self.tip_dp)))
for __tip in self.tip:
f.write('{:s}\n{:d}\t{:.6f}\t{:.16f}\t{:.5f}\t{:.2f}\n'\
.format(__tip['talpha'].strip().upper(),\
int(__tip['jspc']),\
__tip['tamp'],\
__tip['tfreq'],\
__tip['tnf'],\
__tip['tear']))
# Boundary frequencies
f.write('{:d} !nbfr\n'.format(int(self.nbfr)))
for __bfr in self.bfr:
f.write('{:s}\n{:.16E}\t{:.6f}\t{:.2f}\n'\
.format(__bfr['alpha'].strip().upper(),\
__bfr['amig'],\
__bfr['ff'],\
__bfr['face']))
# Open boundaries
f.write('{:d} !Number of Open Boundaries\n'.format(self.nope))
for __line in self.boundaries:
f.write(__line)
class Tidefacout(object):
def __init__(self, year=0, month=0, day=0, hour=0, rnday=0, const={}):
self.year = year
self.month = month
self.day = day
self.hour = hour
self.rnday = rnday
self.const = const
def read(self, filepath):
# Reading date information
with open(filepath, 'r') as f:
# Reading the date section
__ds = f.readline()
__date = np.fromstring(__ds, dtype=float, count=4, sep=',')
self.year = __date[0]
self.month = int(__date[1])
self.day = int(__date[2])
self.hour = int(__date[3])
# Reading the run length section
__ds = f.readline()
__rnday = np.fromstring(__ds, dtype=float, count=1, sep=',')
self.rnday = __rnday[0]
# Reading the constants, node factor and eq. argument ref. to GM in deg.
__const = np.genfromtxt(fname=filepath, dtype=None, skip_header=6, \
delimiter=None, autostrip=True)
__const = np.array([[i for i in j] for j in __const])
__const = {i[0].upper():[float(j) for j in i[1:3]] for i in __const}
self.const = __const
# Tidefac header information
self.info = '{:.2f} days - {:4.0f}/{:02.0f}/{:02.0f} {:02.2f} UTC'.format(self.rnday,\
self.year, self.month, self.day, self.hour)
def __str__(self):
return(self.info)
if __name__=='__main__':
bctide_source = 'bctides.ini'
bctide_update = 'bctides.in'
tfacfile = 'tide_fac.out'
bctides = Bctides()
bctides.read(filepath=bctide_source)
tfac = Tidefacout()
tfac.read(filepath=tfacfile)
bctides.update(tfac)
bctides.write(filepath=bctide_update)
| 37.378049
| 119
| 0.515824
|
39e3f4622bb7331fb29b68f091cbf6312bf53f34
| 27,039
|
dart
|
Dart
|
test/flutter_sidekick_test.dart
|
Jayshanx/flutter_sidekick
|
decca53a735aa51b7a97c7824b2003b8f351dea6
|
[
"MIT"
] | 287
|
2018-11-03T08:59:13.000Z
|
2022-03-27T00:36:12.000Z
|
test/flutter_sidekick_test.dart
|
silexcorp/flutter_sidekick
|
7f8312e9b39c27d8ad2d202e8324a387917dd8ad
|
[
"MIT"
] | 11
|
2018-11-07T21:13:20.000Z
|
2021-08-30T10:35:04.000Z
|
test/flutter_sidekick_test.dart
|
silexcorp/flutter_sidekick
|
7f8312e9b39c27d8ad2d202e8324a387917dd8ad
|
[
"MIT"
] | 34
|
2018-11-05T06:57:19.000Z
|
2022-03-26T18:02:20.000Z
|
import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:flutter_sidekick/flutter_sidekick.dart';
Duration frameDuration = const Duration(milliseconds: 16);
Key simpleSource = const Key('simple-source');
Key simpleTarget = const Key('simple-target');
class SimpleExample extends StatefulWidget {
SimpleExample([
this.sourceTag = 'source',
this.targetTag = 'target',
]);
final String sourceTag;
final String targetTag;
@override
_SimpleExampleState createState() => _SimpleExampleState();
}
class _SimpleExampleState extends State<SimpleExample>
with TickerProviderStateMixin {
SidekickController controller;
@override
void initState() {
super.initState();
controller =
SidekickController(vsync: this, duration: Duration(seconds: 1));
}
@override
void dispose() {
controller?.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
return Stack(
children: <Widget>[
Positioned(
top: 20.0,
left: 20.0,
width: 100.0,
height: 100.0,
child: GestureDetector(
onTap: () => controller.moveToTarget(context),
child: Card(
margin: const EdgeInsets.all(0.0),
child: Sidekick(
tag: widget.sourceTag,
targetTag: widget.targetTag,
child: Container(
key: simpleSource,
color: Colors.blue,
),
),
),
),
),
Positioned(
bottom: 20.0,
right: 20.0,
width: 150.0,
height: 150.0,
child: GestureDetector(
onTap: () => controller.moveToSource(context),
child: Card(
margin: const EdgeInsets.all(0.0),
child: Sidekick(
tag: widget.targetTag,
child: Container(
key: simpleTarget,
color: Colors.blue,
),
),
),
),
),
],
);
}
}
class Item {
Item(this.index);
final int index;
String get message => 'Item$index';
}
class SidekickTeamBuilderExample extends StatelessWidget {
SidekickTeamBuilderExample(
this.teamKey, [
List<Item> sourceList,
List<Item> targetList,
]) : sourceList = sourceList ?? List.generate(4, (i) => Item(i)),
targetList = targetList ?? List.generate(4, (i) => Item(i + 4));
final List<Item> sourceList;
final List<Item> targetList;
final Key teamKey;
@override
Widget build(BuildContext context) {
return SidekickTeamBuilder<Item>(
key: teamKey,
animationDuration: Duration(milliseconds: 1000),
initialSourceList: sourceList,
initialTargetList: targetList,
builder: (context, sourceBuilderDelegates, targetBuilderDelegates) {
return ListView(
children: <Widget>[
SizedBox(
height: 150.0,
child: Wrap(
children: targetBuilderDelegates.map((builderDelegate) {
return builderDelegate.build(
context,
GestureDetector(
onTap: () =>
builderDelegate.state.move(builderDelegate.message),
child: Container(
height: 30.0,
width: 30.0,
color: Colors.blue,
child: Text(
builderDelegate.message.message,
),
),
),
flightShuttleBuilder: (
context,
animation,
type,
from,
to,
) =>
buildShuttle(
animation,
builderDelegate.message.message,
),
);
}).toList(),
),
),
SizedBox(
height: 50.0,
child: Row(
mainAxisAlignment: MainAxisAlignment.center,
crossAxisAlignment: CrossAxisAlignment.center,
children: <Widget>[
FlatButton(
child: const Text('alltosource'),
onPressed: () => SidekickTeamBuilder.of<String>(context)
.moveAll(SidekickFlightDirection.toSource),
),
RaisedButton(
child: const Text('alltotarget'),
onPressed: () => SidekickTeamBuilder.of<String>(context)
.moveAll(SidekickFlightDirection.toTarget),
),
],
),
),
Wrap(
children: sourceBuilderDelegates.map((builderDelegate) {
return builderDelegate.build(
context,
GestureDetector(
onTap: () =>
builderDelegate.state.move(builderDelegate.message),
child: Container(
height: 50.0,
width: 50.0,
color: Colors.green,
child: Text(
builderDelegate.message.message,
),
),
),
flightShuttleBuilder: (
context,
animation,
type,
from,
to,
) =>
buildShuttle(
animation,
builderDelegate.message.message,
),
);
}).toList(),
),
],
);
},
);
}
Widget buildShuttle(
Animation<double> animation,
String message,
) {
return AnimatedBuilder(
animation: animation,
builder: (_, __) {
return Container(
width: Tween<double>(begin: 50.0, end: 30.0).evaluate(animation),
height: Tween<double>(begin: 50.0, end: 30.0).evaluate(animation),
child: Text(
message,
),
);
},
);
}
}
void main() {
group('Sidekick', () {
testWidgets('Animate to target', (WidgetTester tester) async {
await tester.pumpWidget(MaterialApp(home: SimpleExample()));
// the initial setup.
expect(find.byKey(simpleSource), isInCard);
expect(find.byKey(simpleTarget), isInCard);
await tester.tap(find.byKey(simpleSource));
await tester.pump(); // the animation will start at the next frame.
await tester.pump(frameDuration);
// at this stage, the sidekick just gone on its journey, we are
// seeing them at t=16ms.
expect(find.byKey(simpleSource), findsNothing);
expect(find.byKey(simpleTarget), isNotInCard);
await tester.pump(frameDuration);
// t=32ms for the journey. Surely they are still at it.
expect(find.byKey(simpleSource), findsNothing);
expect(find.byKey(simpleTarget), isNotInCard);
await tester.pump(const Duration(seconds: 1));
// t=1.033s for the journey. The journey has ended (it ends this frame, in
// fact). The sidekicks should be back now.
expect(find.byKey(simpleTarget), isInCard);
});
testWidgets('Animate to source', (WidgetTester tester) async {
await tester.pumpWidget(MaterialApp(home: SimpleExample()));
// the initial setup.
expect(find.byKey(simpleSource), isInCard);
expect(find.byKey(simpleTarget), isInCard);
await tester.tap(find.byKey(simpleTarget));
await tester.pump(); // the animation will start at the next frame.
await tester.pump(frameDuration);
// at this stage, the sidekick just gone on its journey, we are
// seeing them at t=16ms.
expect(find.byKey(simpleTarget), findsNothing);
expect(find.byKey(simpleSource), isNotInCard);
await tester.pump(frameDuration);
// t=32ms for the journey. Surely they are still at it.
expect(find.byKey(simpleTarget), findsNothing);
expect(find.byKey(simpleSource), isNotInCard);
await tester.pump(const Duration(seconds: 1));
// t=1.033s for the journey. The journey has ended (it ends this frame, in
// fact). The sidekicks should be back now.
expect(find.byKey(simpleSource), isInCard);
});
testWidgets('Same key, throws', (WidgetTester tester) async {
await tester.pumpWidget(MaterialApp(home: SimpleExample('tag', 'tag')));
await tester.tap(find.byKey(simpleSource));
await tester.pump(); // the animation will start at the next frame.
expect(tester.takeException(), isFlutterError);
});
testWidgets('Target grows mid-flight', (WidgetTester tester) async {
await tester.pumpWidget(MaterialApp(home: SimpleExample()));
final double initialHeight =
tester.getSize(find.byKey(simpleSource)).height;
await tester.tap(find.byKey(simpleSource));
await tester.pump(); // the animation will start at the next frame.
await tester.pump(frameDuration);
await tester.pump(const Duration(milliseconds: 500));
double midflightHeight = tester.getSize(find.byKey(simpleTarget)).height;
expect(midflightHeight, greaterThan(initialHeight));
expect(midflightHeight, lessThan(150.0));
await tester.pump(const Duration(milliseconds: 500));
await tester.pump();
double finalHeight = tester.getSize(find.byKey(simpleTarget)).height;
expect(finalHeight, 150.0);
});
testWidgets('Source shrinks mid-flight', (WidgetTester tester) async {
await tester.pumpWidget(MaterialApp(home: SimpleExample()));
final double initialHeight =
tester.getSize(find.byKey(simpleTarget)).height;
await tester.tap(find.byKey(simpleTarget));
await tester.pump(); // the animation will start at the next frame.
await tester.pump(frameDuration);
await tester.pump(const Duration(milliseconds: 500));
double midflightHeight = tester.getSize(find.byKey(simpleSource)).height;
expect(midflightHeight, lessThan(initialHeight));
expect(midflightHeight, greaterThan(100.0));
await tester.pump(const Duration(milliseconds: 500));
await tester.pump();
double finalHeight = tester.getSize(find.byKey(simpleSource)).height;
expect(finalHeight, 100.0);
});
testWidgets('Target scrolls mid-flight', (WidgetTester tester) async {
await tester.pumpWidget(MaterialApp(home: SimpleExample()));
final double initialTop = tester.getTopLeft(find.byKey(simpleSource)).dy;
expect(initialTop, 20.0);
await tester.tap(find.byKey(simpleSource));
await tester.pump(); // the animation will start at the next frame.
await tester.pump(frameDuration);
await tester.pump(const Duration(milliseconds: 500));
double midflightTop = tester.getTopLeft(find.byKey(simpleTarget)).dy;
expect(midflightTop, greaterThan(initialTop));
expect(midflightTop, lessThan(430.0));
await tester.pump(const Duration(milliseconds: 500));
await tester.pump();
double finalTop = tester.getTopLeft(find.byKey(simpleTarget)).dy;
expect(finalTop, 430.0);
});
testWidgets('Source scrolls mid-flight', (WidgetTester tester) async {
await tester.pumpWidget(MaterialApp(home: SimpleExample()));
final double initialTop = tester.getTopLeft(find.byKey(simpleTarget)).dy;
expect(initialTop, 430.0);
await tester.tap(find.byKey(simpleTarget));
await tester.pump(); // the animation will start at the next frame.
await tester.pump(frameDuration);
await tester.pump(const Duration(milliseconds: 500));
double midflightTop = tester.getTopLeft(find.byKey(simpleSource)).dy;
expect(midflightTop, lessThan(initialTop));
expect(midflightTop, greaterThan(20.0));
await tester.pump(const Duration(milliseconds: 500));
await tester.pump();
double finalTop = tester.getTopLeft(find.byKey(simpleSource)).dy;
expect(finalTop, 20.0);
});
});
group('SidekickTeamBuilder ', () {
testWidgets('lists are changed after moveAllToSource',
(WidgetTester tester) async {
final key = GlobalKey<SidekickTeamBuilderState<Item>>();
final List<String> logs = <String>[];
await tester
.pumpWidget(MaterialApp(home: SidekickTeamBuilderExample(key)));
final SidekickTeamBuilderState<Item> state = key.currentState;
expect(state.sourceList, containsAllItemsInOrder([0, 1, 2, 3]));
expect(state.targetList, containsAllItemsInOrder([4, 5, 6, 7]));
state.moveAllToSource().then((_) => logs.add('complete'));
await tester.pump();
await tester.pump(const Duration(milliseconds: 1001));
expect(logs, <String>['complete']);
expect(
state.sourceList,
containsAllItemsInOrder([0, 1, 2, 3, 4, 5, 6, 7]),
);
expect(state.targetList.length, 0);
});
testWidgets('lists are changed after moveAllToTarget',
(WidgetTester tester) async {
final key = GlobalKey<SidekickTeamBuilderState<Item>>();
final List<String> logs = <String>[];
await tester
.pumpWidget(MaterialApp(home: SidekickTeamBuilderExample(key)));
final SidekickTeamBuilderState<Item> state = key.currentState;
expect(state.sourceList, containsAllItemsInOrder([0, 1, 2, 3]));
expect(state.targetList, containsAllItemsInOrder([4, 5, 6, 7]));
state.moveAllToTarget().then((_) => logs.add('complete'));
await tester.pump();
await tester.pump(const Duration(milliseconds: 1001));
expect(logs, <String>['complete']);
expect(
state.targetList,
containsAllItemsInOrder([4, 5, 6, 7, 0, 1, 2, 3]),
);
expect(state.sourceList.length, 0);
});
testWidgets('correct item is moved', (WidgetTester tester) async {
final key = GlobalKey<SidekickTeamBuilderState<Item>>();
final List<String> logs = <String>[];
final sourceList = List.generate(4, (i) => Item(i));
final targetList = List.generate(4, (i) => Item(i + 4));
await tester.pumpWidget(MaterialApp(
home: SidekickTeamBuilderExample(key, sourceList, targetList)));
final SidekickTeamBuilderState<Item> state = key.currentState;
expect(state.sourceList, containsAllItemsInOrder([0, 1, 2, 3]));
expect(state.targetList, containsAllItemsInOrder([4, 5, 6, 7]));
state.move(sourceList[2]).then((_) => logs.add('complete'));
await tester.pump();
await tester.pump(const Duration(milliseconds: 1001));
expect(logs, <String>['complete']);
expect(
state.sourceList,
containsAllItemsInOrder([0, 1, 3]),
);
expect(
state.targetList,
containsAllItemsInOrder([4, 5, 6, 7, 2]),
);
state.move(sourceList[2]).then((_) => logs.add('complete'));
await tester.pump();
await tester.pump(const Duration(milliseconds: 1001));
expect(logs, <String>['complete', 'complete']);
expect(
state.sourceList,
containsAllItemsInOrder([0, 1, 3, 2]),
);
expect(
state.targetList,
containsAllItemsInOrder([4, 5, 6, 7]),
);
});
testWidgets('item is animated', (WidgetTester tester) async {
final key = GlobalKey<SidekickTeamBuilderState<Item>>();
final List<String> logs = <String>[];
final sourceList = List.generate(4, (i) => Item(i));
final targetList = List.generate(4, (i) => Item(i + 4));
final item = sourceList[2];
await tester.pumpWidget(MaterialApp(
home: SidekickTeamBuilderExample(key, sourceList, targetList)));
final SidekickTeamBuilderState<Item> state = key.currentState;
expect(state.sourceList, containsAllItemsInOrder([0, 1, 2, 3]));
expect(state.targetList, containsAllItemsInOrder([4, 5, 6, 7]));
expect(find.text(item.message), findsOneWidget);
final double initialTop = tester.getTopLeft(find.text(item.message)).dy;
final double initialHeight =
tester.getSize(find.text(item.message)).height;
expect(initialTop, 200.0);
expect(initialHeight, 50.0);
state.move(sourceList[2]).then((_) => logs.add('complete'));
await tester.pump();
await tester.pump(const Duration(milliseconds: 500));
final double midTop = tester.getTopLeft(find.text(item.message)).dy;
final double midHeight = tester.getSize(find.text(item.message)).height;
expect(midTop, closeTo(100.0, 0.1));
expect(midHeight, closeTo(40.0, 0.1));
await tester.pump(const Duration(milliseconds: 500));
final double finalTop = tester.getTopLeft(find.text(item.message)).dy;
final double finalHeight = tester.getSize(find.text(item.message)).height;
expect(finalTop, 0.0);
expect(finalHeight, 30.0);
await tester.pump(const Duration(milliseconds: 1));
final double finalFrameTop =
tester.getTopLeft(find.text(item.message)).dy;
final double finalFrameHeight =
tester.getSize(find.text(item.message)).height;
expect(finalFrameTop, 0.0);
expect(finalFrameHeight, 30.0);
expect(logs, <String>['complete']);
});
testWidgets('items are animated to target', (WidgetTester tester) async {
final key = GlobalKey<SidekickTeamBuilderState<Item>>();
final List<String> logs = <String>[];
final sourceList = List.generate(4, (i) => Item(i));
final targetList = List.generate(4, (i) => Item(i + 4));
await tester.pumpWidget(MaterialApp(
home: SidekickTeamBuilderExample(key, sourceList, targetList)));
final SidekickTeamBuilderState<Item> state = key.currentState;
expect(state.sourceList, containsAllItemsInOrder([0, 1, 2, 3]));
expect(state.targetList, containsAllItemsInOrder([4, 5, 6, 7]));
for (var item in sourceList) {
expect(find.text(item.message), findsOneWidget);
final double initialTop = tester.getTopLeft(find.text(item.message)).dy;
final double initialHeight =
tester.getSize(find.text(item.message)).height;
expect(initialTop, 200.0);
expect(initialHeight, 50.0);
}
state.moveAllToTarget().then((_) => logs.add('complete'));
await tester.pump();
await tester.pump(const Duration(milliseconds: 500));
for (var item in sourceList) {
final double midTop = tester.getTopLeft(find.text(item.message)).dy;
final double midHeight = tester.getSize(find.text(item.message)).height;
expect(midTop, closeTo(100.0, 0.1));
expect(midHeight, closeTo(40.0, 0.1));
}
await tester.pump(const Duration(milliseconds: 500));
for (var item in sourceList) {
final double finalTop = tester.getTopLeft(find.text(item.message)).dy;
final double finalHeight =
tester.getSize(find.text(item.message)).height;
expect(finalTop, 0.0);
expect(finalHeight, 30.0);
}
await tester.pump(const Duration(milliseconds: 1));
for (var item in sourceList) {
final double finalFrameTop =
tester.getTopLeft(find.text(item.message)).dy;
final double finalFrameHeight =
tester.getSize(find.text(item.message)).height;
expect(finalFrameTop, 0.0);
expect(finalFrameHeight, 30.0);
}
expect(logs, <String>['complete']);
});
testWidgets('items are animated to source', (WidgetTester tester) async {
final key = GlobalKey<SidekickTeamBuilderState<Item>>();
final List<String> logs = <String>[];
final sourceList = List.generate(4, (i) => Item(i));
final targetList = List.generate(4, (i) => Item(i + 4));
await tester.pumpWidget(MaterialApp(
home: SidekickTeamBuilderExample(key, sourceList, targetList)));
final SidekickTeamBuilderState<Item> state = key.currentState;
expect(state.sourceList, containsAllItemsInOrder([0, 1, 2, 3]));
expect(state.targetList, containsAllItemsInOrder([4, 5, 6, 7]));
for (var item in targetList) {
expect(find.text(item.message), findsOneWidget);
final double initialTop = tester.getTopLeft(find.text(item.message)).dy;
final double initialHeight =
tester.getSize(find.text(item.message)).height;
expect(initialTop, 0.0);
expect(initialHeight, 30.0);
}
state.moveAllToSource().then((_) => logs.add('complete'));
await tester.pump();
await tester.pump(const Duration(milliseconds: 500));
for (var item in targetList) {
final double midTop = tester.getTopLeft(find.text(item.message)).dy;
final double midHeight = tester.getSize(find.text(item.message)).height;
expect(midTop, closeTo(100.0, 0.1));
expect(midHeight, closeTo(40.0, 0.1));
}
await tester.pump(const Duration(milliseconds: 500));
for (var item in targetList) {
final double finalTop = tester.getTopLeft(find.text(item.message)).dy;
final double finalHeight =
tester.getSize(find.text(item.message)).height;
expect(finalTop, 200.0);
expect(finalHeight, 50.0);
}
await tester.pump(const Duration(milliseconds: 1));
for (var item in targetList) {
final double finalFrameTop =
tester.getTopLeft(find.text(item.message)).dy;
final double finalFrameHeight =
tester.getSize(find.text(item.message)).height;
expect(finalFrameTop, 200.0);
expect(finalFrameHeight, 50.0);
}
expect(logs, <String>['complete']);
});
testWidgets('items not moved do not animate', (WidgetTester tester) async {
final key = GlobalKey<SidekickTeamBuilderState<Item>>();
final List<String> logs = <String>[];
final sourceList = List.generate(4, (i) => Item(i));
final targetList = List.generate(4, (i) => Item(i + 4));
await tester.pumpWidget(MaterialApp(
home: SidekickTeamBuilderExample(key, sourceList, targetList)));
final SidekickTeamBuilderState<Item> state = key.currentState;
expect(state.sourceList, containsAllItemsInOrder([0, 1, 2, 3]));
expect(state.targetList, containsAllItemsInOrder([4, 5, 6, 7]));
for (var item in sourceList) {
expect(find.text(item.message), findsOneWidget);
final double initialTop = tester.getTopLeft(find.text(item.message)).dy;
final double initialHeight =
tester.getSize(find.text(item.message)).height;
expect(initialTop, 200.0);
expect(initialHeight, 50.0);
}
state.moveAllToSource().then((_) => logs.add('complete'));
await tester.pump();
await tester.pump(const Duration(milliseconds: 500));
for (var item in sourceList) {
final double midTop = tester.getTopLeft(find.text(item.message)).dy;
final double midHeight = tester.getSize(find.text(item.message)).height;
expect(midTop, 200.0);
expect(midHeight, 50.0);
}
await tester.pump(const Duration(milliseconds: 500));
for (var item in sourceList) {
final double finalTop = tester.getTopLeft(find.text(item.message)).dy;
final double finalHeight =
tester.getSize(find.text(item.message)).height;
expect(finalTop, 200.0);
expect(finalHeight, 50.0);
}
await tester.pump(const Duration(milliseconds: 1));
for (var item in sourceList) {
final double finalFrameTop =
tester.getTopLeft(find.text(item.message)).dy;
final double finalFrameHeight =
tester.getSize(find.text(item.message)).height;
expect(finalFrameTop, 200.0);
expect(finalFrameHeight, 50.0);
}
expect(logs, <String>['complete']);
});
testWidgets('new lists rebuild', (WidgetTester tester) async {
final key = GlobalKey<SidekickTeamBuilderState<Item>>();
final sourceList = List.generate(4, (i) => Item(i));
final targetList = List.generate(4, (i) => Item(i + 4));
await tester.pumpWidget(MaterialApp(
home: SidekickTeamBuilderExample(key, sourceList, targetList)));
final SidekickTeamBuilderState<Item> state = key.currentState;
expect(state.sourceList, containsAllItemsInOrder([0, 1, 2, 3]));
expect(state.targetList, containsAllItemsInOrder([4, 5, 6, 7]));
for (var item in sourceList) {
expect(find.text(item.message), findsOneWidget);
final double initialTop = tester.getTopLeft(find.text(item.message)).dy;
final double initialHeight =
tester.getSize(find.text(item.message)).height;
expect(initialTop, 200.0);
expect(initialHeight, 50.0);
}
final newSourceList = List.generate(2, (i) => Item(i));
final newTargetList = List.generate(2, (i) => Item(i + 2));
await tester.pumpWidget(MaterialApp(
home: SidekickTeamBuilderExample(key, newSourceList, newTargetList)));
expect(state.sourceList, containsAllItemsInOrder([0, 1]));
expect(state.targetList, containsAllItemsInOrder([2, 3]));
for (var item in newSourceList) {
expect(find.text(item.message), findsOneWidget);
final double initialTop = tester.getTopLeft(find.text(item.message)).dy;
final double initialHeight =
tester.getSize(find.text(item.message)).height;
expect(initialTop, 200.0);
expect(initialHeight, 50.0);
}
});
});
}
Matcher containsAllItemsInOrder(List<int> expected) =>
new _ItemContainsInOrder(expected);
class _ItemContainsInOrder extends Matcher {
_ItemContainsInOrder(this.ids);
final List<int> ids;
@override
Description describe(Description description) =>
description.add('contains in order(').addDescriptionOf(ids).add(')');
String _test(List<Item> item, Map matchState) {
var matcherIndex = 0;
for (var value in item) {
if (ids[matcherIndex] == value.index) matcherIndex++;
if (matcherIndex == item.length) return null;
}
return new StringDescription()
.add('did not find a value matching ')
.addDescriptionOf(ids[matcherIndex])
.add(' following expected prior values')
.toString();
}
@override
bool matches(item, Map matchState) => _test(item, matchState) == null;
@override
Description describeMismatch(item, Description mismatchDescription,
Map matchState, bool verbose) =>
mismatchDescription.add(_test(item, matchState));
}
| 35.252934
| 80
| 0.612042
|
7daa9f075aaaa9d41fe03d7b69d315e63e6d8c2c
| 707
|
rs
|
Rust
|
lib/src/likelihood/mod.rs
|
ManifoldFR/hawkes-process-rust
|
04d80ff6d01e8bc1857378b07f4548970af4eea0
|
[
"MIT"
] | 28
|
2018-07-01T16:50:00.000Z
|
2021-12-11T21:12:54.000Z
|
lib/src/likelihood/mod.rs
|
ManifoldFR/hawkes-process-rust
|
04d80ff6d01e8bc1857378b07f4548970af4eea0
|
[
"MIT"
] | 4
|
2018-10-14T18:07:16.000Z
|
2019-08-17T15:46:36.000Z
|
lib/src/likelihood/mod.rs
|
ManifoldFR/hawkes-process-rust
|
04d80ff6d01e8bc1857378b07f4548970af4eea0
|
[
"MIT"
] | 1
|
2021-02-03T09:01:38.000Z
|
2021-02-03T09:01:38.000Z
|
//! Utility functions to compute the log-likelihood of the data under the models.
//! The general form is given by
//! $$
//! \ell(\Theta) = \sum_i \log(\lambda_{t_i}) - \int_0^T \lambda_t dt
//! $$
mod hawkes;
pub use hawkes::{hawkes_likelihood,HawkesLikelihood};
use ndarray::prelude::*;
use crate::temporal::{PoissonProcess, DeterministicIntensity};
/// Log-likelihood of the data under the given Poisson model
/// $$ \ell(\lambda) =
/// N\ln\lambda - \lambda T
/// $$
pub fn poisson_likelihood(
times: ArrayView1<f64>,
model: &PoissonProcess,
tmax: f64) -> f64
{
let n_events = times.len();
let lbda = model.intensity(0.);
n_events as f64 * lbda.ln() - lbda * tmax
}
| 25.25
| 81
| 0.653465
|
9ad70178d0c9618f039bf0852346e652fc171cf6
| 14,727
|
py
|
Python
|
epitator/incident_annotator.py
|
langstok/EpiTator
|
721fdc444382a0493702ee5976c987954753f47a
|
[
"Apache-2.0"
] | 40
|
2017-05-27T03:53:22.000Z
|
2021-08-07T16:33:58.000Z
|
epitator/incident_annotator.py
|
langstok/EpiTator
|
721fdc444382a0493702ee5976c987954753f47a
|
[
"Apache-2.0"
] | 25
|
2017-07-17T14:33:24.000Z
|
2021-04-09T10:27:56.000Z
|
epitator/incident_annotator.py
|
langstok/EpiTator
|
721fdc444382a0493702ee5976c987954753f47a
|
[
"Apache-2.0"
] | 9
|
2017-11-15T05:13:53.000Z
|
2021-08-07T16:33:59.000Z
|
#!/usr/bin/env python
"""
Create incidents that group together multiple layers of annotations.
This is based on the createIncidentReportsFromEnhancements function from
EIDR-Connect, although some differences exist in the output structure,
and code related to manual curation (e.g. the accepted attribute)
is not included:
https://github.com/ecohealthalliance/eidr-connect/blob/master/imports/nlp.coffee#L93
"""
from __future__ import absolute_import
from .annotator import Annotator, AnnoTier
from .annospan import AnnoSpan, SpanGroup
from .count_annotator import CountAnnotator
from .date_annotator import DateAnnotator
from .spacy_annotator import SpacyAnnotator
from .geoname_annotator import GeonameAnnotator
from .species_annotator import SpeciesAnnotator
from .disease_annotator import DiseaseAnnotator
from .structured_incident_annotator import StructuredIncidentAnnotator, CANNOT_PARSE
import datetime
import re
from collections import defaultdict
def capitalize(s):
return s[0:1].upper() + s[1:]
def camelize(s):
return "".join(
word if idx == 0 else capitalize(word)
for idx, word in enumerate(s.split("_")))
def format_geoname(geoname):
"""
Format a geoname dictionary in the style of EIDR-Connect.
"""
result = {
"id": geoname["geonameid"]
}
for key, value in geoname.items():
if key in ["geonameid", "nameCount", "namesUsed", "score", "parents"]:
continue
result[camelize(key)] = value
return result
def get_territories(spans, sent_spans, phrase_spans):
"""
A annotation's territory is the sentence containing it,
and all the following sentences until the next annotation.
Annotations in the same sentence are grouped.
If sub-sentence phrase spans are provided, only the spans within each phase
are assigned to its territory, while outside of it the usual rules apply.
This is intended to improve associations when multiple counts for multiple
locations appear in the same sentence.
"""
doc = sent_spans[0].doc
territories = []
for sent_span, span_group in sent_spans.group_spans_by_containing_span(spans):
if len(territories) == 0 or len(span_group) > 0:
territories.append(AnnoSpan(
sent_span.start, sent_span.end, doc,
metadata=span_group))
else:
prev_territory = territories[-1]
prev_single_sent_spans = [
span for span in prev_territory.metadata
if span.metadata.get('scope') == 'sentence']
if len(prev_single_sent_spans) == 0:
territories[-1] = AnnoSpan(
prev_territory.start, sent_span.end, doc,
metadata=prev_territory.metadata)
else:
last_doc_scope_spans = []
for territory in reversed(territories):
last_doc_scope_spans = [
span for span in prev_territory.metadata
if span.metadata.get('scope') == 'document']
if len(last_doc_scope_spans) > 0:
break
territories.append(AnnoSpan(
sent_span.start, sent_span.end, doc,
metadata=last_doc_scope_spans))
phrase_territories = []
for phrase_span, span_group in phrase_spans.group_spans_by_containing_span(spans, allow_partial_containment=True):
if len(span_group) > 0:
phrase_territories.append(AnnoSpan(
phrase_span.start, phrase_span.end, doc,
metadata=span_group))
phrase_territories = AnnoTier(phrase_territories, presorted=True)
return AnnoTier(territories).subtract_overlaps(phrase_territories) + phrase_territories
class IncidentAnnotator(Annotator):
def annotate(self, doc, case_counts=None):
if doc.date:
publish_date = doc.date
else:
publish_date = datetime.datetime.now()
if case_counts:
case_counts = case_counts
else:
case_counts = doc.require_tiers('counts', via=CountAnnotator)
geonames = doc.require_tiers('geonames', via=GeonameAnnotator)
geoname_spans = [
AnnoSpan(
span.start,
span.end,
span.doc,
metadata=dict(geoname=format_geoname(span.metadata['geoname'].to_dict())))
for span in geonames]
geoname_spans += [
AnnoSpan(
span.start,
span.end,
span.doc,
metadata=dict(geoname={
'name': 'Earth',
'id': '6295630',
'asciiname': 'Earth',
'featureCode': 'AREA',
'countryCode': '',
'countryName': '',
'admin1Name': '',
'admin2Name': '',
'admin1Code': '',
'admin2Code': '',
'latitude': 0,
'longitude': 0,
}))
for span in doc.create_regex_tier(r"\b(global(ly)?|worldwide)\b").spans]
geoname_spans += [
AnnoSpan(
span.start,
span.end,
span.doc,
metadata={})
for span in doc.create_regex_tier(r"\b(national(ly)?|nationwide)\b").spans]
geonames = AnnoTier(geoname_spans)
sent_spans = doc.require_tiers('spacy.sentences', via=SpacyAnnotator)
disease_tier = doc.require_tiers('diseases', via=DiseaseAnnotator)
species_tier = doc.require_tiers('species', via=SpeciesAnnotator)
disease_mentions = defaultdict(lambda: 0)
for span in disease_tier:
disease_mentions[span.metadata['disease']['id']] += 1
# Copy disease tier
disease_tier = AnnoTier([
AnnoSpan(span.start, span.end, span.doc, metadata=span.metadata)
for span in disease_tier], presorted=True)
# scope one off disease mentions to sentences.
max_disease = max(disease_mentions.values()) if len(disease_mentions) > 0 else 0
if max_disease > 5:
for span in disease_tier:
if disease_mentions[span.metadata['disease']['id']] == 1:
span.metadata['scope'] = 'sentence'
else:
span.metadata['scope'] = 'document'
species_tier = AnnoTier([
AnnoSpan(span.start, span.end, span.doc, metadata=span.metadata)
for span in species_tier], presorted=True)
# scope one off species mentions to sentences.
for span in species_tier:
if disease_mentions[span.metadata['species']['id']] == 1:
span.metadata['scope'] = 'sentence'
else:
span.metadata['scope'] = 'document'
structured_incidents = doc.require_tiers(
'structured_incidents', via=StructuredIncidentAnnotator)
date_tier = doc.require_tiers('dates', via=DateAnnotator)
dates_out = []
for span in date_tier:
datetime_range = list(span.metadata['datetime_range'])
if datetime_range[0].date() > publish_date.date():
# Omit future dates
continue
if datetime_range[1].date() > publish_date.date():
# Truncate ranges that extend into the future to end at the end
# of the publication date.
datetime_range[1] = datetime.datetime(publish_date.year, publish_date.month, publish_date.day)
datetime_range[1] += datetime.timedelta(1)
dates_out.append(AnnoSpan(span.start, span.end, span.doc, metadata={
'datetime_range': datetime_range
}))
date_tier = AnnoTier(dates_out, presorted=True)
phrase_spans = []
for sent_span, comma_group in sent_spans.group_spans_by_containing_span(doc.create_regex_tier(",")):
phrase_spans += AnnoTier([sent_span]).subtract_overlaps(comma_group).spans
phrase_spans = AnnoTier(phrase_spans)
date_territories = get_territories(date_tier, sent_spans, phrase_spans)
geoname_territories = get_territories(geonames, sent_spans, phrase_spans)
disease_territories = get_territories(disease_tier, sent_spans, phrase_spans)
species_territories = get_territories(species_tier, sent_spans, phrase_spans)
incidents = []
for count_span in case_counts:
count = count_span.metadata.get('count')
attributes = set(count_span.metadata.get('attributes', []))
if not count:
continue
if not set(['case', 'death']) & attributes:
continue
if set(['recovery', 'annual', 'monthly', 'weekly']) & attributes:
continue
incident_spans = [count_span]
geoname_territory = geoname_territories.nearest_to(count_span)
date_territory = date_territories.nearest_to(count_span)
disease_territory = disease_territories.nearest_to(count_span)
species_territory = species_territories.nearest_to(count_span)
# grouping is done to deduplicate geonames
geonames_by_id = {}
for span in geoname_territory.metadata:
geoname = span.metadata.get('geoname')
if geoname:
geonames_by_id[geoname['id']] = geoname
incident_spans.append(span)
incident_data = {
'value': count,
'locations': list(geonames_by_id.values())
}
incident_data['count_annotation'] = count_span
incident_data['date_territory'] = date_territory
incident_data['geoname_territory'] = geoname_territory
incident_data['disease_territory'] = disease_territory
incident_data['species_territory'] = species_territory
# Use the document's date as the default
incident_data['dateRange'] = [
publish_date,
publish_date + datetime.timedelta(days=1)]
has_as_of_date = False
if len(date_territory.metadata) > 0:
date_span = AnnoTier(date_territory.metadata).nearest_to(count_span)
as_of_dates = doc.create_regex_tier(
re.compile(r"\bas of\b", re.I)
).with_following_spans_from([date_span], max_dist=8, allow_overlap=True)
has_as_of_date = len(as_of_dates) > 0
incident_data['dateRange'] = date_span.metadata['datetime_range']
incident_spans.append(date_span)
# A date and location must be in the count territory to create
# an incident.
if len(date_territory.metadata) == 0 or len(geoname_territory.metadata) == 0:
continue
# Detect whether count is cumulative
date_range_duration = incident_data['dateRange'][1] - incident_data['dateRange'][0]
duration_days = date_range_duration.total_seconds() / 60 / 60 / 24
incident_data['duration'] = duration_days
cumulative = False
if date_range_duration.total_seconds() >= 60 * 60 * 48:
cumulative = False
elif has_as_of_date:
cumulative = True
elif 'incremental' in attributes:
cumulative = False
elif 'cumulative' in attributes:
cumulative = True
elif date_range_duration.total_seconds() == 0:
cumulative = True
# Infer cumulative is case rate is greater than 300 per day
elif count / duration_days > 300:
cumulative = True
if 'ongoing' in attributes:
incident_data['type'] = 'activeCount'
elif cumulative:
if 'case' in attributes:
incident_data['type'] = 'cumulativeCaseCount'
if 'death' in attributes:
incident_data['type'] = 'cumulativeDeathCount'
else:
if 'case' in attributes:
incident_data['type'] = 'caseCount'
if 'death' in attributes:
incident_data['type'] = 'deathCount'
disease_span = AnnoTier(disease_territory.metadata).nearest_to(count_span)
if disease_span:
incident_data['resolvedDisease'] = dict(disease_span.metadata['disease'])
incident_spans.append(disease_span)
# Suggest humans as a default
incident_data['species'] = {
'id': 'tsn:180092',
'label': 'Homo sapiens'
}
species_span = AnnoTier(species_territory.metadata).nearest_to(count_span)
if species_span:
incident_data['species'] = species_span.metadata['species']
incident_spans.append(species_span)
incident_data['approximate'] = 'approximate' in attributes
if 'suspected' in attributes:
incident_data['status'] = 'suspected'
elif 'confirmed' in attributes:
incident_data['status'] = 'confirmed'
incidents.append(SpanGroup(incident_spans, metadata=incident_data))
for incident in structured_incidents:
if not incident.metadata.get('dateRange') or not incident.metadata.get('location'):
continue
required_properties = [
incident.metadata['type'],
incident.metadata['dateRange'],
incident.metadata['location'],
incident.metadata['value']]
if CANNOT_PARSE in required_properties:
continue
metadata = dict(incident.metadata)
if metadata['species'] == CANNOT_PARSE:
metadata['species'] = {
'id': 'tsn:180092',
'label': 'Homo sapiens'
}
if metadata['resolvedDisease'] == CANNOT_PARSE:
del metadata['resolvedDisease']
if "suspected" in metadata['attributes']:
metadata['status'] = "suspected"
elif "confirmed" in metadata['attributes']:
metadata['status'] = "confirmed"
metadata['locations'] = [format_geoname(metadata['location'])]
del metadata['location']
incidents.append(SpanGroup([incident], metadata=metadata))
return {'incidents': AnnoTier(incidents)}
| 45.736025
| 118
| 0.595437
|
f31809f8d12666c73ef39ee61f4ac004c8d1feb7
| 1,724
|
dart
|
Dart
|
lib/main.dart
|
kvutien/steve_b
|
4e9b586ceb6445ae644af7dde9d505fe37d312ad
|
[
"MIT"
] | null | null | null |
lib/main.dart
|
kvutien/steve_b
|
4e9b586ceb6445ae644af7dde9d505fe37d312ad
|
[
"MIT"
] | null | null | null |
lib/main.dart
|
kvutien/steve_b
|
4e9b586ceb6445ae644af7dde9d505fe37d312ad
|
[
"MIT"
] | 1
|
2022-03-06T05:04:27.000Z
|
2022-03-06T05:04:27.000Z
|
/*
Flutter 2.8.1 stable, Android Studio 2021.1 Bumblebee, Android SDK 32
Main program of Steve Observer, (c) Vu Tien Khang, Jan 2022
*/
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter_inappwebview/flutter_inappwebview.dart';
// import 'package:provider/provider.dart';
import 'package:steve_b/steve_webview.dart';
Future main() async {
// binding to Binary Messenger, for Flutter to call platform-specific APIs
WidgetsFlutterBinding.ensureInitialized();
if (Platform.isAndroid) {
await AndroidInAppWebViewController.setWebContentsDebuggingEnabled(true);
}
// the usual runApp starts here
runApp(const SteveApp());
}
class SteveApp extends StatelessWidget {
// This widget is the root of your application.
const SteveApp({Key? key}) : super(key: key);
/* prepare future routing between screens if needed
// final _screen1Manager = Screen1Manager();
// final _screen2Manager = Screen2Manager(); */
@override
Widget build(BuildContext context) {
return /*MultiProvider( // prepare future routing between screens
providers: [
ChangeNotifierProvider(
create: (context) => _screen1Manager,
),
ChangeNotifierProvider(
create: (context) => _screen2Manager,
),
// Add AppStateManager ChangeNotifierProvider
],
child:*/
MaterialApp(
title: 'Steve_B Earth Observer',
debugShowCheckedModeBanner: false,
theme: ThemeData(
brightness: Brightness.light,
primaryColor: Colors.white,
primarySwatch: Colors.lightGreen,
visualDensity: VisualDensity.adaptivePlatformDensity,
),
home: const SteveWebview(),
);
}
}
| 31.345455
| 77
| 0.694896
|
bea5a22313e340ac35cf21570cb60ba55ceec9a7
| 913
|
ts
|
TypeScript
|
dist/PackType.d.ts
|
Joelant05/mc-project-core
|
e7810269080c20f0f8412d990878d30883a759c2
|
[
"MIT"
] | 1
|
2021-11-09T15:21:13.000Z
|
2021-11-09T15:21:13.000Z
|
dist/PackType.d.ts
|
Joelant05/mc-project-core
|
e7810269080c20f0f8412d990878d30883a759c2
|
[
"MIT"
] | 1
|
2022-01-31T12:15:53.000Z
|
2022-01-31T21:18:05.000Z
|
dist/PackType.d.ts
|
Joelant05/mc-project-core
|
e7810269080c20f0f8412d990878d30883a759c2
|
[
"MIT"
] | 2
|
2022-01-21T19:54:31.000Z
|
2022-01-30T20:49:56.000Z
|
import { ProjectConfig } from './ProjectConfig';
export interface IPackType {
id: TPackTypeId;
matcher: string | string[];
color: string;
icon: string;
}
export declare type TPackTypeId = 'behaviorPack' | 'resourcePack' | 'skinPack' | 'worldTemplate';
export declare abstract class PackType<TSetupArg> {
protected projectConfig: ProjectConfig | undefined;
protected packTypes: IPackType[];
protected extensionPackTypes: Set<IPackType>;
constructor(projectConfig: ProjectConfig | undefined);
setProjectConfig(projectConfig: ProjectConfig): void;
abstract setup(arg: TSetupArg): Promise<void>;
get all(): IPackType[];
getFromId(packId: TPackTypeId): IPackType | undefined;
get(filePath: string): IPackType | undefined;
getId(filePath: string): "unknown" | TPackTypeId;
addExtensionPackType(packType: IPackType): {
dispose: () => boolean;
};
}
| 38.041667
| 97
| 0.710843
|
93a9ae84b63dce933117e0b6ee37dd1e8c496bb3
| 1,201
|
cs
|
C#
|
Discreet-GUI/Services/Extensions/BitmapEx.cs
|
DiscreetNetwork/discreet-gui
|
fb0ec3bd956559d6c9e8dccaccc108277ed6533b
|
[
"MIT"
] | null | null | null |
Discreet-GUI/Services/Extensions/BitmapEx.cs
|
DiscreetNetwork/discreet-gui
|
fb0ec3bd956559d6c9e8dccaccc108277ed6533b
|
[
"MIT"
] | null | null | null |
Discreet-GUI/Services/Extensions/BitmapEx.cs
|
DiscreetNetwork/discreet-gui
|
fb0ec3bd956559d6c9e8dccaccc108277ed6533b
|
[
"MIT"
] | null | null | null |
using QRCoder;
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace Services.Extensions
{
/// <summary>
/// Bitmap class to extend static methods
/// </summary>
public static class BitmapEx
{
/// <summary>
/// Creates a QRCode System.Drawing.Bitmap and converts it to a Avalonia.Media.Imaging.Bitmap to be used for Avalonia.Controls.Image controls
/// </summary>
/// <param name="text">The content to be encoded into the QR Code</param>
/// <returns>Avalonia specific Bitmap</returns>
public static Avalonia.Media.Imaging.Bitmap CreateQRCode(string text)
{
QRCodeGenerator qrGenerator = new QRCodeGenerator();
QRCodeData qrCodeData = qrGenerator.CreateQrCode(text, QRCodeGenerator.ECCLevel.Q);
var qrCode = new BitmapByteQRCode(qrCodeData);
byte[] data = qrCode.GetGraphic(20);
using MemoryStream ms = new MemoryStream(data);
ms.Position = 0;
Avalonia.Media.Imaging.Bitmap avaloniaBitmap = new Avalonia.Media.Imaging.Bitmap(ms);
return avaloniaBitmap;
}
}
}
| 33.361111
| 149
| 0.644463
|
ecb4d7a51e915443f93a6f160b6692b8d67675e4
| 12,898
|
sql
|
SQL
|
src/main/resources/static/create.sql
|
TH-Brandenburg/University-Evaluation-Backend
|
fc43a166f62770a558da4e38e455b06ec5bf0019
|
[
"Apache-2.0"
] | 1
|
2016-06-22T13:49:25.000Z
|
2016-06-22T13:49:25.000Z
|
src/main/resources/static/create.sql
|
TH-Brandenburg/University-Evaluation-Backend
|
fc43a166f62770a558da4e38e455b06ec5bf0019
|
[
"Apache-2.0"
] | 37
|
2016-06-01T13:09:10.000Z
|
2017-10-07T13:14:34.000Z
|
src/main/resources/static/create.sql
|
TH-Brandenburg/University-Evaluation-Backend
|
fc43a166f62770a558da4e38e455b06ec5bf0019
|
[
"Apache-2.0"
] | null | null | null |
-- phpMyAdmin SQL Dump
-- version 4.2.7.1
-- http://www.phpmyadmin.net
--
-- Host: localhost
-- Erstellungszeit: 05. Jan 2016 um 15:37
-- Server Version: 5.6.20
-- PHP-Version: 5.5.15
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
--
-- Datenbank: `caeb`
--
--
-- Daten für Tabelle `choice`
--
INSERT INTO `choice` (`id`, `grade`, `text`) VALUES
(1, 0, 'keine Angabe'),
(2, 1, 'sehr gut'),
(3, 2, 'gut'),
(4, 3, 'befriedigend'),
(5, 4, 'ausreichend'),
(6, 5, 'ungenügend'),
(7, 1, 'ja, immer'),
(8, 2, 'sehr häufig'),
(9, 3, 'oft'),
(10, 4, 'selten'),
(11, 5, 'nie'),
(12, 1, 'ja, sehr'),
(13, 2, 'durchaus'),
(14, 3, 'mittelmäßig'),
(15, 4, 'eher nicht'),
(16, 5, 'überhaupt nicht'),
(17, 3, 'zu hoch'),
(18, 2, 'hoch'),
(19, 1, 'angemessen'),
(20, 2, 'niedrig'),
(21, 3, 'zu niedrig'),
(22, 1, 'sehr laut, sehr deutlich'),
(23, 2, 'laut, präzise'),
(24, 3, 'verständlich'),
(25, 4, 'leise, eher undeutlich'),
(26, 5, 'zu leise, undeutlich'),
(27, 1, 'ja, hervorragend'),
(28, 2, 'ja, fast immer'),
(29, 3, 'in der Regel ja'),
(30, 4, 'manchmal klappt es'),
(31, 5, 'nein, nie'),
(32, 1, 'ja, immer Dialog mit Studenten'),
(33, 2, 'überwiegend Dialog'),
(34, 3, 'gute Mischung'),
(35, 4, 'zu oft Monolog'),
(36, 5, 'nein, nur Monolog'),
(37, 2, 'ja, wenn Zeit war'),
(38, 1, 'sehr klar'),
(39, 2, 'gut strukturiert'),
(40, 4, 'sprunghaft'),
(41, 5, 'Roter Faden fehlte'),
(42, 5, 'zu viel Stoff in zuwenig Zeit'),
(43, 3, 'viel Stoff'),
(44, 3, 'wenig Stoff'),
(45, 5, 'zu viel Zeit für zuwenig Stoff'),
(46, 0, 'keine Ü/L vorhanden'),
(47, 1, 'stimme zu'),
(48, 2, 'stimme weitgehend zu'),
(49, 3, 'unentschieden'),
(50, 4, 'stimme weitgehend nicht zu'),
(51, 5, 'stimme nicht zu'),
(52, 5, 'viel zu viele Medien eingesetzt'),
(53, 3, 'etwas zu viele Medien eingesetzt'),
(54, 1, 'Medieneinsatz adäquat'),
(55, 3, 'etwas zu wenige Medien eingesetzt'),
(56, 5, 'viel zu wenig Medien eingesetzt'),
(57, 0, 'weiss ich nicht'),
(58, 1, 'habe sehr viel gelernt'),
(59, 2, 'habe viel gelernt'),
(60, 3, 'habe etwas gelernt.'),
(61, 4, 'habe wenig gelernt'),
(62, 5, 'habe sehr wenig gelernt'),
(63, 0, 'No comment'),
(64, 1, 'Positive answer'),
(65, 2, 'Negative answer'),
(66, 2, 'Neutral answer'),
(67, 3, 'Negative answer'),
(68, 2, 'Slightly positive answer'),
(69, 3, 'Slightly negative answer'),
(70, 4, 'Negative answer'),
(71, 3, 'neutral answer'),
(72, 4, 'Slightly negative answer'),
(73, 5, 'Negative answer'),
(74, 1, 'Very positive answer'),
(75, 2, 'positive answer'),
(76, 3, 'Slightly positive answer'),
(77, 6, 'Very negative answer'),
(78, 4, 'Neutral answer'),
(79, 5, 'Slightly negative answer'),
(80, 6, 'Negative answer'),
(81, 7, 'Very negative answer');
--
-- Daten für Tabelle `mc_question`
--
INSERT INTO `mc_question` (`id`, `text`) VALUES
(8, 'Ging der/die Dozent(in) auf Fragen innerhalb der LV ein?'),
(2, 'Haben Sie die Veranstaltung regelmässig besucht?'),
(3, 'Haben Sie Interesse an diesem Fach?'),
(20, 'Interface for question with 2 + 1 possible answers.'),
(21, 'Interface for question with 3 + 1 possible answers.'),
(22, 'Interface for question with 3 + 1 possible answers. The best answer placed in the middle.'),
(23, 'Interface for question with 4 + 1 possible answers.'),
(24, 'Interface for question with 5 + 1 possible answers.'),
(25, 'Interface for question with 5 + 1 possible answers. The best answer placed in the middle.'),
(26, 'Interface for question with 6 + 1 possible answers.'),
(28, 'Interface for question with 6 + 1 possible answers. The best answer placed in the middle.'),
(27, 'Interface for question with 7 + 1 possible answers.'),
(6, 'Kann er/sie schwierige Sachverhalte verständlich erklären?'),
(7, 'Versuchte der/die Dozent(in) festzustellen, ob die Studenten der LV folgen können?'),
(10, 'War der/die Dozent(in) gut vorbereitet?'),
(9, 'War er/sie auch ausserhalb der LV zu diesen Themen ansprechbar?'),
(11, 'Welche Gesamtnote geben Sie dem/der Dozenten(in)?'),
(12, 'Welche Gesamtnote geben Sie den Lehrunterlagen?'),
(19, 'Welche Gesamtnote geben Sie der Lehrveranstaltung?'),
(17, 'Wie beurteilen Sie den Medieneinsatz (Beamer, Tafel, Overhead-Projektor, usw.)?'),
(16, 'Wie beurteilen Sie die Ausstattung des Übungs- oder Laborraumes?'),
(1, 'Wie beurteilen Sie die Lehrveranstaltung insgesamt? Auf einer Skala von 1 bis 5; 1 = sehr gut, 5 = nicht gut'),
(18, 'Wie beurteilen Sie Ihren persönlichen Lernerfolg in dieser Lehrveranstaltung?'),
(4, 'Wie empfanden Sie das Niveau der Lehrveranstaltung?'),
(15, 'Die Übung war nützlich. Sie war sehr gut geeignet, die Vorlesungsinhalte zu verdeutlichen und zu vertiefen.'),
(14, 'Wie war die Stoffmenge im Verhältnis zur verfügbaren Zeit?'),
(13, 'Wie war die Vorgehensweise und Stoffpräsentation in der LV?'),
(5, 'Wie waren Sprache und Ausdrucksweise des Dozenten/der Dozentin?');
--
-- Daten für Tabelle `mc_question_choices`
--
INSERT INTO `mc_question_choices` (`mc_question_id`, `choices_id`) VALUES
(1, 1),
(1, 2),
(1, 3),
(1, 4),
(1, 5),
(1, 6),
(2, 1),
(2, 7),
(2, 8),
(2, 9),
(2, 10),
(2, 11),
(3, 1),
(3, 12),
(3, 13),
(3, 14),
(3, 15),
(3, 16),
(4, 1),
(4, 17),
(4, 18),
(4, 19),
(4, 20),
(4, 21),
(5, 1),
(5, 22),
(5, 23),
(5, 24),
(5, 25),
(5, 26),
(6, 1),
(6, 27),
(6, 28),
(6, 29),
(6, 30),
(6, 31),
(7, 1),
(7, 32),
(7, 33),
(7, 34),
(7, 35),
(7, 36),
(8, 1),
(8, 7),
(8, 37),
(8, 29),
(8, 10),
(8, 31),
(9, 1),
(9, 7),
(9, 37),
(9, 29),
(9, 10),
(9, 31),
(10, 1),
(10, 7),
(10, 8),
(10, 9),
(10, 10),
(10, 11),
(11, 1),
(11, 2),
(11, 3),
(11, 4),
(11, 5),
(11, 6),
(12, 1),
(12, 2),
(12, 3),
(12, 4),
(12, 5),
(12, 6),
(13, 1),
(13, 38),
(13, 39),
(13, 24),
(13, 40),
(13, 41),
(14, 1),
(14, 42),
(14, 43),
(14, 19),
(14, 44),
(14, 45),
(15, 46),
(15, 47),
(15, 48),
(15, 49),
(15, 50),
(15, 51),
(16, 46),
(16, 2),
(16, 3),
(16, 4),
(16, 5),
(16, 6),
(17, 1),
(17, 52),
(17, 53),
(17, 54),
(17, 55),
(17, 56),
(18, 57),
(18, 58),
(18, 59),
(18, 60),
(18, 61),
(18, 62),
(19, 1),
(19, 2),
(19, 3),
(19, 4),
(19, 5),
(19, 6),
(20, 63),
(20, 64),
(20, 65),
(21, 63),
(21, 64),
(21, 66),
(21, 67),
(22, 63),
(22, 67),
(22, 64),
(22, 67),
(23, 63),
(23, 64),
(23, 68),
(23, 69),
(23, 70),
(24, 63),
(24, 64),
(24, 68),
(24, 71),
(24, 72),
(24, 73),
(25, 63),
(25, 73),
(25, 69),
(25, 64),
(25, 69),
(25, 73),
(26, 63),
(26, 74),
(26, 75),
(26, 76),
(26, 72),
(26, 73),
(26, 77),
(27, 63),
(27, 74),
(27, 75),
(27, 76),
(27, 78),
(27, 79),
(27, 80),
(27, 81),
(28, 63),
(28, 81),
(28, 73),
(28, 69),
(28, 64),
(28, 69),
(28, 73),
(28, 81);
--
-- Daten für Tabelle `question`
--
INSERT INTO `question` (`id`, `max_length`, `only_numbers`, `text`) VALUES
(1, 2, b'1', 'Wie hoch ist Ihr gesamter Arbeitsaufwand für die Lehrveranstaltung(inkl. Vor- und Nachbereitung) in Stunden pro Woche?'),
(2, 1000, b'0', 'Was fanden Sie an der Lehrveranstaltung gut?'),
(3, 1000, b'0', 'Was fanden Sie an der Lehrveranstaltung weniger gut?'),
(4, 1000, b'0', 'Welche Verbesserungsvorschläge für die Lehrveranstaltung haben Sie?'),
(5, 1000, b'0', 'Was fanden Sie positiv?'),
(6, 1000, b'0', 'Was fanden Sie negativ?'),
(7, 1000, b'0', 'Welche Verbesserungsvorschläge würden Sie unterbreiten?'),
(8, 1000, b'0', 'Haben Sie weitere Anmerkungen?'),
(9, 1000, b'0', 'This shows the interface for a question which can be answered by text or with a photo.'),
(10, 1000, b'0', 'This shows how text questions behave when next to each other.');
--
-- Daten für Tabelle `question_revision`
--
INSERT INTO `question_revision` (`id`, `name`, `text_questions_first`) VALUES
(1, 'Wirtschaft', b'1'),
(2, 'Informatik und Medien', b'0'),
(3, 'Demo Evaluation', b'0');
--
-- Daten für Tabelle `question_revision_choices`
--
INSERT INTO `question_revision_choices` (`question_revision_id`, `choices_id`) VALUES
(1, 1),
(1, 2),
(1, 3),
(1, 4),
(1, 5),
(1, 6),
(2, 1),
(2, 7),
(2, 8),
(2, 9),
(2, 10),
(2, 11),
(2, 1),
(2, 12),
(2, 13),
(2, 14),
(2, 15),
(2, 16),
(2, 1),
(2, 17),
(2, 18),
(2, 19),
(2, 20),
(2, 21),
(2, 1),
(2, 22),
(2, 23),
(2, 24),
(2, 25),
(2, 26),
(2, 1),
(2, 27),
(2, 28),
(2, 29),
(2, 30),
(2, 31),
(2, 1),
(2, 32),
(2, 33),
(2, 34),
(2, 35),
(2, 36),
(2, 1),
(2, 7),
(2, 37),
(2, 29),
(2, 10),
(2, 31),
(2, 1),
(2, 7),
(2, 37),
(2, 29),
(2, 10),
(2, 31),
(2, 1),
(2, 7),
(2, 8),
(2, 9),
(2, 10),
(2, 11),
(2, 1),
(2, 2),
(2, 3),
(2, 4),
(2, 5),
(2, 6),
(2, 1),
(2, 2),
(2, 3),
(2, 4),
(2, 5),
(2, 6),
(2, 1),
(2, 38),
(2, 39),
(2, 24),
(2, 40),
(2, 41),
(2, 1),
(2, 42),
(2, 43),
(2, 19),
(2, 44),
(2, 45),
(2, 46),
(2, 47),
(2, 48),
(2, 49),
(2, 50),
(2, 51),
(2, 46),
(2, 2),
(2, 3),
(2, 4),
(2, 5),
(2, 6),
(2, 1),
(2, 52),
(2, 53),
(2, 54),
(2, 55),
(2, 56),
(2, 57),
(2, 58),
(2, 59),
(2, 60),
(2, 61),
(2, 62),
(2, 1),
(2, 2),
(2, 3),
(2, 4),
(2, 5),
(2, 6),
(3, 63),
(3, 64),
(3, 65),
(3, 63),
(3, 64),
(3, 66),
(3, 67),
(3, 63),
(3, 67),
(3, 64),
(3, 67),
(3, 63),
(3, 64),
(3, 68),
(3, 69),
(3, 70),
(3, 63),
(3, 64),
(3, 68),
(3, 71),
(3, 72),
(3, 73),
(3, 63),
(3, 73),
(3, 69),
(3, 64),
(3, 69),
(3, 73),
(3, 63),
(3, 74),
(3, 75),
(3, 76),
(3, 72),
(3, 73),
(3, 77),
(3, 63),
(3, 74),
(3, 75),
(3, 76),
(3, 78),
(3, 79),
(3, 80),
(3, 81),
(3, 63),
(3, 81),
(3, 73),
(3, 69),
(3, 64),
(3, 69),
(3, 73),
(3, 81);
--
-- Daten für Tabelle `question_revision_mc_questions`
--
INSERT INTO `question_revision_mc_questions` (`question_revision_id`, `mc_questions_id`) VALUES
(1, 1),
(2, 2),
(2, 3),
(2, 4),
(2, 5),
(2, 6),
(2, 7),
(2, 8),
(2, 9),
(2, 10),
(2, 11),
(2, 12),
(2, 13),
(2, 14),
(2, 15),
(2, 16),
(2, 17),
(2, 18),
(2, 19),
(3, 20),
(3, 21),
(3, 22),
(3, 23),
(3, 24),
(3, 25),
(3, 26),
(3, 27),
(3, 28);
--
-- Daten für Tabelle `question_revision_questions`
--
INSERT INTO `question_revision_questions` (`question_revision_id`, `questions_id`) VALUES
(1, 1),
(1, 2),
(1, 3),
(1, 4),
(2, 5),
(2, 6),
(2, 7),
(2, 8),
(3, 9),
(3, 10);
--
-- Daten für Tabelle `study_path`
--
INSERT INTO `study_path` (`id`, `degree`, `department`, `name`) VALUES
(1, 0, 2, 'Wirtschaftsinformatik'),
(2, 0, 2, 'Berufsbegleitender Bachelor Betriebswirtschaftslehre'),
(3, 0, 2, 'Betriebswirtschaftslehre'),
(4, 1, 2, 'Betriebswirtschaftslehre'),
(5, 1, 2, 'Security Management'),
(6, 1, 2, 'Technologie- und Innovationsmanagement'),
(7, 1, 2, 'Wirtschaftsinformatik'),
(8, 0, 0, 'Applied Computer Science'),
(9, 0, 0, 'Informatik'),
(10, 0, 0, 'Medizininformatik'),
(11, 0, 0, 'Medieninformatik'),
(12, 1, 0, 'Informatik'),
(13, 1, 0, 'Digitale Medien'),
(14, 1, 0, 'Medieninformatik');
--
-- Daten für Tabelle `subject`
--
INSERT INTO `subject` (`id`, `degree`, `department`, `name`) VALUES
(1, 1, 2, 'Unternehmensführung'),
(2, 1, 2, 'Wertorientiertes IT-Management'),
(3, 1, 2, 'Theorien der Informatik'),
(4, 1, 2, 'Advanced Software Engineering'),
(5, 1, 2, 'Modellierung und Analyse von Prozessen'),
(6, 0, 0, 'Mathematik I'),
(7, 0, 0, 'Informatik und Logik'),
(8, 0, 0, 'Mathematik II'),
(9, 0, 0, 'Mathematik III'),
(10, 0, 0, 'Alternative Programmierparadigmen'),
(11, 0, 0, 'Mathematische Programmierung'),
(12, 0, 0, 'Human-Computer Interaction'),
(13, 0, 0, 'Grundlagen der Wissensverarbeitung'),
(14, 0, 0, 'Autonome Mobile Systeme'),
(15, 0, 0, 'Wissensbasierte Systeme in der Medizin'),
(16, 0, 0, 'Künstliche Intelligenz'),
(17, 1, 0, 'Mathematik'),
(18, 1, 0, 'Künstliche Intelligenz'),
(19, 1, 0, 'Einführung in Wissenschaftliches Arbeiten und Schreiben'),
(20, 1, 0, 'Mobile Informationssysteme'),
(21, 0, 0, 'Dummy1'),
(22, 0, 0, 'Dummy2'),
(23, 0, 0, 'Dummy3'),
(24, 0, 0, 'Mensch-Computer-Interaktion');
--
-- Daten für Tabelle `tutor`
--
INSERT INTO `tutor` (`id`, `department`, `family_name`, `name`) VALUES
(1, 2, 'Franz', 'Robert'),
(2, 2, 'Scheeg', 'Jochen'),
(3, 2, 'Hoeding', 'Michael'),
(4, 2, 'Pfister', 'Winfried'),
(5, 2, 'Wikarski', 'Dietmar'),
(6, 0, 'Socher', 'Rolf'),
(7, 0, 'Boersch', 'Ingo'),
(8, 0, 'Heinsohn', 'Jochen'),
(9, 0, 'Loose', 'Harald'),
(10, 0, 'Preuss', 'Thomas'),
(11, 0, 'Kindsmueller', 'Martin Christof'),
(12, 0, 'Buchholz', 'Sven'),
(13, 0, 'Schmidt', 'Gabriele'),
(14, 0, 'Busse', 'Susanne'),
(15, 0, 'Kim', 'Stefan'),
(16, 0, 'Syrjakow', 'Michael'),
(17, 0, 'Vielhauer', 'Claus'),
(18, 0, 'Homeister', 'Mathias'),
(19, 0, 'Kell', 'Gerald'),
(20, 0, 'Creuzburg', 'Reiner'),
(21, 0, 'Hasche', 'Eberhard'),
(22, 0, 'Urban', 'Alexander'),
(23, 0, 'Schafföner', 'Martin'),
(24, 0, 'Beck', 'Eberhard'),
(25, 0, 'Schrader', 'Thomas'),
(26, 0, 'Jänicke', 'Karl-Heinz'),
(27, 2, 'Hausmann', 'Dietmar'),
(28, 2, 'Sens', 'Katrin'),
(29, 0, 'Account', 'Developer');
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
| 19.904321
| 135
| 0.577764
|
69445abf21ecbad844220e1bef5c62b0421d9310
| 76
|
kt
|
Kotlin
|
common/src/commonMain/kotlin/com/trikot/sample/viewmodels/base/BaseNavigationDelegate.kt
|
mirego/trikot.patron
|
e03afc848e8043c5aaeb852e351fa9665c26456e
|
[
"BSD-3-Clause"
] | 12
|
2020-05-27T19:51:35.000Z
|
2021-06-14T13:39:06.000Z
|
common/src/commonMain/kotlin/com/trikot/sample/viewmodels/base/BaseNavigationDelegate.kt
|
mirego/trikot.patron
|
e03afc848e8043c5aaeb852e351fa9665c26456e
|
[
"BSD-3-Clause"
] | 11
|
2020-05-20T21:10:45.000Z
|
2022-01-14T17:03:40.000Z
|
common/src/commonMain/kotlin/com/trikot/sample/viewmodels/base/BaseNavigationDelegate.kt
|
mirego/trikot.patron
|
e03afc848e8043c5aaeb852e351fa9665c26456e
|
[
"BSD-3-Clause"
] | 2
|
2020-12-22T10:26:11.000Z
|
2021-03-19T02:56:50.000Z
|
package com.trikot.sample.viewmodels.base
interface BaseNavigationDelegate
| 19
| 41
| 0.881579
|
9ff82e1e50901d8f1404bd9895ee3c2613ee71a2
| 149
|
py
|
Python
|
multispinsys/TensorOps/__init__.py
|
Marcupio/SpinProgram
|
4d47e5e5048423ae69869a0300558e1fee809bf0
|
[
"Apache-2.0"
] | null | null | null |
multispinsys/TensorOps/__init__.py
|
Marcupio/SpinProgram
|
4d47e5e5048423ae69869a0300558e1fee809bf0
|
[
"Apache-2.0"
] | null | null | null |
multispinsys/TensorOps/__init__.py
|
Marcupio/SpinProgram
|
4d47e5e5048423ae69869a0300558e1fee809bf0
|
[
"Apache-2.0"
] | null | null | null |
from . import Reduce
from . import spinops
from . import spintensor
__all__ = ["Reduce",
"spinops",
"spintensor"
]
| 16.555556
| 24
| 0.557047
|
8d049b47e82b183ebbd60dd3771dbff7f8b90f86
| 639
|
kt
|
Kotlin
|
app/src/main/java/com/example/chadrick/datalabeling/Models/ServerInfo.kt
|
chadrick-kwag/datalabeling_app
|
f73ff0c64718c20692a157ca6b5ece499512ff24
|
[
"MIT"
] | null | null | null |
app/src/main/java/com/example/chadrick/datalabeling/Models/ServerInfo.kt
|
chadrick-kwag/datalabeling_app
|
f73ff0c64718c20692a157ca6b5ece499512ff24
|
[
"MIT"
] | null | null | null |
app/src/main/java/com/example/chadrick/datalabeling/Models/ServerInfo.kt
|
chadrick-kwag/datalabeling_app
|
f73ff0c64718c20692a157ca6b5ece499512ff24
|
[
"MIT"
] | null | null | null |
package com.example.chadrick.datalabeling.Models
import android.content.Context
import java.io.*
/**
* Created by chadrick on 17. 12. 4.
*/
class ServerInfo private constructor() {
private object holder {
val INSTANCE = ServerInfo()
}
lateinit var serveraddress: String
companion object {
val instance = holder.INSTANCE
}
fun config(configfilestream: InputStream) {
val reader = BufferedReader(InputStreamReader(configfilestream))
// assumes the first line is the serveraddress
serveraddress = reader.readLine().trim()
configfilestream.close()
}
}
| 18.257143
| 72
| 0.672926
|
7be7e64476cd97a7a8a935625fc1d6b4b1e345a0
| 21,222
|
cpp
|
C++
|
libraries/mne/c/mne_proj_op.cpp
|
Andrey1994/mne-cpp
|
6264b1107b9447b7db64309f73f09e848fd198c4
|
[
"BSD-3-Clause"
] | 2
|
2021-11-16T19:38:12.000Z
|
2021-11-18T20:52:08.000Z
|
libraries/mne/c/mne_proj_op.cpp
|
Andrey1994/mne-cpp
|
6264b1107b9447b7db64309f73f09e848fd198c4
|
[
"BSD-3-Clause"
] | null | null | null |
libraries/mne/c/mne_proj_op.cpp
|
Andrey1994/mne-cpp
|
6264b1107b9447b7db64309f73f09e848fd198c4
|
[
"BSD-3-Clause"
] | 1
|
2021-11-16T19:39:01.000Z
|
2021-11-16T19:39:01.000Z
|
//=============================================================================================================
/**
* @file mne_proj_op.cpp
* @author Lorenz Esch <lesch@mgh.harvard.edu>;
* Matti Hamalainen <msh@nmr.mgh.harvard.edu>;
* Christoph Dinh <chdinh@nmr.mgh.harvard.edu>
* @version dev
* @date January, 2017
*
* @section LICENSE
*
* Copyright (C) 2017, Lorenz Esch, Matti Hamalainen, Christoph Dinh. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that
* the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this list of conditions and the
* following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
* the following disclaimer in the documentation and/or other materials provided with the distribution.
* * Neither the name of MNE-CPP authors nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*
* @brief Definition of the MNEProjOp Class.
*
*/
//*************************************************************************************************************
//=============================================================================================================
// INCLUDES
//=============================================================================================================
#include <fiff/fiff_constants.h>
#include <fiff/fiff_tag.h>
#include "mne_proj_op.h"
#include "mne_proj_item.h"
#include <QFile>
#include <Eigen/Core>
#ifndef TRUE
#define TRUE 1
#endif
#ifndef FALSE
#define FALSE 0
#endif
#ifndef FAIL
#define FAIL -1
#endif
#ifndef OK
#define OK 0
#endif
#define MALLOC_23(x,t) (t *)malloc((x)*sizeof(t))
#define REALLOC_23(x,y,t) (t *)((x == NULL) ? malloc((y)*sizeof(t)) : realloc((x),(y)*sizeof(t)))
#define FREE_23(x) if ((char *)(x) != NULL) free((char *)(x))
#define FREE_CMATRIX_23(m) mne_free_cmatrix_23((m))
void mne_free_cmatrix_23 (float **m)
{
if (m) {
FREE_23(*m);
FREE_23(m);
}
}
#define ALLOC_CMATRIX_23(x,y) mne_cmatrix_23((x),(y))
static void matrix_error_23(int kind, int nr, int nc)
{
if (kind == 1)
printf("Failed to allocate memory pointers for a %d x %d matrix\n",nr,nc);
else if (kind == 2)
printf("Failed to allocate memory for a %d x %d matrix\n",nr,nc);
else
printf("Allocation error for a %d x %d matrix\n",nr,nc);
if (sizeof(void *) == 4) {
printf("This is probably because you seem to be using a computer with 32-bit architecture.\n");
printf("Please consider moving to a 64-bit platform.");
}
printf("Cannot continue. Sorry.\n");
exit(1);
}
float **mne_cmatrix_23(int nr,int nc)
{
int i;
float **m;
float *whole;
m = MALLOC_23(nr,float *);
if (!m) matrix_error_23(1,nr,nc);
whole = MALLOC_23(nr*nc,float);
if (!whole) matrix_error_23(2,nr,nc);
for(i=0;i<nr;i++)
m[i] = whole + i*nc;
return m;
}
float mne_dot_vectors_23 (float *v1,
float *v2,
int nn)
{
#ifdef BLAS
int one = 1;
float res = sdot(&nn,v1,&one,v2,&one);
return res;
#else
float res = 0.0;
int k;
for (k = 0; k < nn; k++)
res = res + v1[k]*v2[k];
return res;
#endif
}
//============================= mne_named_matrix.c =============================
void mne_string_to_name_list_23(const QString& s, QStringList& listp,int &nlistp)
/*
* Convert a colon-separated list into a string array
*/
{
QStringList list;
if (!s.isEmpty() && s.size() > 0) {
list = FIFFLIB::FiffStream::split_name_list(s);
//list = s.split(":");
}
listp = list;
nlistp = list.size();
return;
}
void fromFloatEigenMatrix_23(const Eigen::MatrixXf& from_mat, float **& to_mat, const int m, const int n)
{
for ( int i = 0; i < m; ++i)
for ( int j = 0; j < n; ++j)
to_mat[i][j] = from_mat(i,j);
}
void fromFloatEigenMatrix_23(const Eigen::MatrixXf& from_mat, float **& to_mat)
{
fromFloatEigenMatrix_23(from_mat, to_mat, from_mat.rows(), from_mat.cols());
}
QString mne_name_list_to_string_23(const QStringList& list)
/*
* Convert a string array to a colon-separated string
*/
{
int nlist = list.size();
QString res;
if (nlist == 0 || list.isEmpty())
return res;
// res[0] = '\0';
for (int k = 0; k < nlist-1; k++) {
res += list[k];
res += ":";
}
res += list[nlist-1];
return res;
}
QString mne_channel_names_to_string_23(const QList<FIFFLIB::FiffChInfo>& chs, int nch)
/*
* Make a colon-separated string out of channel names
*/
{
QStringList names;
QString res;
if (nch <= 0)
return res;
for (int k = 0; k < nch; k++)
names.append(chs.at(k).ch_name);
res = mne_name_list_to_string_23(names);
return res;
}
//*************************************************************************************************************
//=============================================================================================================
// USED NAMESPACES
//=============================================================================================================
using namespace Eigen;
using namespace FIFFLIB;
using namespace MNELIB;
//*************************************************************************************************************
//=============================================================================================================
// DEFINE MEMBER METHODS
//=============================================================================================================
MneProjOp::MneProjOp()
: nitems (0)
, names (NULL)
, nch (0)
, nvec (0)
, proj_data (NULL)
{
}
//*************************************************************************************************************
MneProjOp::~MneProjOp()
{
// mne_free_proj_op
for (int k = 0; k < nitems; k++)
if(items[k])
delete items[k];
// mne_free_proj_op_proj
}
//*************************************************************************************************************
void MneProjOp::mne_free_proj_op_proj(MneProjOp *op)
{
if (op == NULL)
return;
FREE_CMATRIX_23(op->proj_data);
op->names.clear();
op->nch = 0;
op->nvec = 0;
op->proj_data = NULL;
return;
}
//*************************************************************************************************************
MneProjOp *MneProjOp::mne_proj_op_combine(MneProjOp *to, MneProjOp *from)
/*
* Copy items from 'from' operator to 'to' operator
*/
{
int k;
MneProjItem* it;
if (to == NULL)
to = new MneProjOp();
if (from) {
for (k = 0; k < from->nitems; k++) {
it = from->items[k];
mne_proj_op_add_item(to,it->vecs,it->kind,it->desc);
to->items[to->nitems-1]->active_file = it->active_file;
}
}
return to;
}
//*************************************************************************************************************
void MneProjOp::mne_proj_op_add_item_act(MneProjOp *op, MneNamedMatrix *vecs, int kind, const QString& desc, int is_active)
/*
* Add a new item to an existing projection operator
*/
{
MneProjItem* new_item;
int k;
// op->items = REALLOC(op->items,op->nitems+1,mneProjItem);
// op->items[op->nitems] = new_item = new MneProjItem();
new_item = new MneProjItem();
op->items.append(new_item);
new_item->active = is_active;
new_item->vecs = new MneNamedMatrix(*vecs);
if (kind == FIFFV_MNE_PROJ_ITEM_EEG_AVREF) {
new_item->has_meg = FALSE;
new_item->has_eeg = TRUE;
}
else {
for (k = 0; k < vecs->ncol; k++) {
if (vecs->collist[k].contains("EEG"))//strstr(vecs->collist[k],"EEG") == vecs->collist[k])
new_item->has_eeg = TRUE;
if (vecs->collist[k].contains("MEG"))//strstr(vecs->collist[k],"MEG") == vecs->collist[k])
new_item->has_meg = TRUE;
}
if (!new_item->has_meg && !new_item->has_eeg) {
new_item->has_meg = TRUE;
new_item->has_eeg = FALSE;
}
else if (new_item->has_meg && new_item->has_eeg) {
new_item->has_meg = TRUE;
new_item->has_eeg = FALSE;
}
}
if (!desc.isEmpty())
new_item->desc = desc;
new_item->kind = kind;
new_item->nvec = new_item->vecs->nrow;
op->nitems++;
MneProjOp::mne_free_proj_op_proj(op); /* These data are not valid any more */
return;
}
//*************************************************************************************************************
void MneProjOp::mne_proj_op_add_item(MneProjOp *op, MneNamedMatrix *vecs, int kind, const QString& desc)
{
mne_proj_op_add_item_act(op, vecs, kind, desc, TRUE);
}
//*************************************************************************************************************
MneProjOp *MneProjOp::mne_dup_proj_op(MneProjOp *op)
/*
* Provide a duplicate (item data only)
*/
{
MneProjOp* dup = new MneProjOp();
MneProjItem* it;
int k;
if (!op)
return NULL;
for (k = 0; k < op->nitems; k++) {
it = op->items[k];
mne_proj_op_add_item_act(dup,it->vecs,it->kind,it->desc,it->active);
dup->items[k]->active_file = it->active_file;
}
return dup;
}
//*************************************************************************************************************
MneProjOp *MneProjOp::mne_proj_op_average_eeg_ref(const QList<FiffChInfo>& chs, int nch)
/*
* Make the projection operator for average electrode reference
*/
{
int eegcount = 0;
int k;
float **vec_data;
QStringList names;
MneNamedMatrix* vecs;
MneProjOp* op;
for (k = 0; k < nch; k++)
if (chs.at(k).kind == FIFFV_EEG_CH)
eegcount++;
if (eegcount == 0) {
qCritical("No EEG channels specified for average reference.");
return NULL;
}
vec_data = ALLOC_CMATRIX_23(1,eegcount);
for (k = 0; k < nch; k++)
if (chs.at(k).kind == FIFFV_EEG_CH)
names.append(chs.at(k).ch_name);
for (k = 0; k < eegcount; k++)
vec_data[0][k] = 1.0/sqrt((double)eegcount);
QStringList emptyList;
vecs = MneNamedMatrix::build_named_matrix(1,eegcount,emptyList,names,vec_data);
op = new MneProjOp();
mne_proj_op_add_item(op,vecs,FIFFV_MNE_PROJ_ITEM_EEG_AVREF,"Average EEG reference");
return op;
}
//*************************************************************************************************************
int MneProjOp::mne_proj_op_affect(MneProjOp *op, const QStringList& list, int nlist)
{
int k;
int naff;
if (!op)
return 0;
for (k = 0, naff = 0; k < op->nitems; k++)
if (op->items[k]->active && MneProjItem::mne_proj_item_affect(op->items[k],list,nlist))
naff += op->items[k]->nvec;
return naff;
}
//*************************************************************************************************************
int MneProjOp::mne_proj_op_affect_chs(MneProjOp *op, const QList<FiffChInfo>& chs, int nch)
{
QString ch_string;
int res;
QStringList list;
int nlist;
if (nch == 0)
return FALSE;
ch_string = mne_channel_names_to_string_23(chs,nch);
mne_string_to_name_list_23(ch_string,list,nlist);
res = mne_proj_op_affect(op,list,nlist);
list.clear();
return res;
}
//*************************************************************************************************************
int MneProjOp::mne_proj_op_proj_vector(MneProjOp *op, float *vec, int nvec, int do_complement)
/*
* Apply projection operator to a vector (floats)
* Assume that all dimension checking etc. has been done before
*/
{
static float *res = NULL;
int res_size = 0;
float *pvec;
float w;
int k,p;
if (!op || op->nitems <= 0 || op->nvec <= 0)
return OK;
if (op->nch != nvec) {
printf("Data vector size does not match projection operator");
return FAIL;
}
if (op->nch > res_size) {
res = REALLOC_23(res,op->nch,float);
res_size = op->nch;
}
for (k = 0; k < op->nch; k++)
res[k] = 0.0;
for (p = 0; p < op->nvec; p++) {
pvec = op->proj_data[p];
w = mne_dot_vectors_23(pvec,vec,op->nch);
for (k = 0; k < op->nch; k++)
res[k] = res[k] + w*pvec[k];
}
if (do_complement) {
for (k = 0; k < op->nch; k++)
vec[k] = vec[k] - res[k];
}
else {
for (k = 0; k < op->nch; k++)
vec[k] = res[k];
}
return OK;
}
//*************************************************************************************************************
MneProjOp *MneProjOp::mne_read_proj_op_from_node(FiffStream::SPtr &stream, const FiffDirNode::SPtr &start)
/*
* Load all the linear projection data
*/
{
MneProjOp* op = NULL;
QList<FiffDirNode::SPtr> proj;
FiffDirNode::SPtr start_node;
QList<FiffDirNode::SPtr> items;
FiffDirNode::SPtr node;
int k;
QString item_desc,desc_tag;
int global_nchan,item_nchan,nlist;
QStringList item_names;
int item_kind;
float **item_vectors = NULL;
int item_nvec;
int item_active;
MneNamedMatrix* item;
FiffTag::SPtr t_pTag;
if (!stream) {
qCritical("File not open mne_read_proj_op_from_node");
goto bad;
}
if (!start || start->isEmpty())
start_node = stream->dirtree();
else
start_node = start;
op = new MneProjOp();
proj = start_node->dir_tree_find(FIFFB_PROJ);
if (proj.size() == 0 || proj[0]->isEmpty()) /* The caller must recognize an empty projection */
goto out;
/*
* Only the first projection block is recognized
*/
items = proj[0]->dir_tree_find(FIFFB_PROJ_ITEM);
if (items.size() == 0 || items[0]->isEmpty()) /* The caller must recognize an empty projection */
goto out;
/*
* Get a common number of channels
*/
node = proj[0];
if(!node->find_tag(stream, FIFF_NCHAN, t_pTag))
global_nchan = 0;
else {
global_nchan = *t_pTag->toInt();
// TAG_FREE(tag);
}
/*
* Proceess each item
*/
for (k = 0; k < items.size(); k++) {
node = items[k];
/*
* Complicated procedure for getting the description
*/
item_desc.clear();
if (node->find_tag(stream, FIFF_NAME, t_pTag)) {
item_desc += t_pTag->toString();
}
/*
* Take the first line of description if it exists
*/
if (node->find_tag(stream, FIFF_DESCRIPTION, t_pTag)) {
desc_tag = t_pTag->toString();
int pos;
if((pos = desc_tag.indexOf("\n")) >= 0)
desc_tag.truncate(pos);
if (!item_desc.isEmpty())
item_desc += " ";
item_desc += desc_tag;
}
/*
* Possibility to override number of channels here
*/
if (!node->find_tag(stream, FIFF_NCHAN, t_pTag)) {
item_nchan = global_nchan;
}
else {
item_nchan = *t_pTag->toInt();
}
if (item_nchan <= 0) {
qCritical("Number of channels incorrectly specified for one of the projection items.");
goto bad;
}
/*
* Take care of the channel names
*/
if (!node->find_tag(stream, FIFF_PROJ_ITEM_CH_NAME_LIST, t_pTag))
goto bad;
item_names = FiffStream::split_name_list(t_pTag->toString());
if (item_names.size() != item_nchan) {
printf("Channel name list incorrectly specified for proj item # %d",k+1);
item_names.clear();
goto bad;
}
/*
* Kind of item
*/
if (!node->find_tag(stream, FIFF_PROJ_ITEM_KIND, t_pTag))
goto bad;
item_kind = *t_pTag->toInt();
/*
* How many vectors
*/
if (!node->find_tag(stream,FIFF_PROJ_ITEM_NVEC, t_pTag))
goto bad;
item_nvec = *t_pTag->toInt();
/*
* The projection data
*/
if (!node->find_tag(stream,FIFF_PROJ_ITEM_VECTORS, t_pTag))
goto bad;
MatrixXf tmp_item_vectors = t_pTag->toFloatMatrix().transpose();
item_vectors = ALLOC_CMATRIX_23(tmp_item_vectors.rows(),tmp_item_vectors.cols());
fromFloatEigenMatrix_23(tmp_item_vectors, item_vectors);
/*
* Is this item active?
*/
if (node->find_tag(stream, FIFF_MNE_PROJ_ITEM_ACTIVE, t_pTag)) {
item_active = *t_pTag->toInt();
}
else
item_active = FALSE;
/*
* Ready to add
*/
QStringList emptyList;
item = MneNamedMatrix::build_named_matrix(item_nvec,item_nchan,emptyList,item_names,item_vectors);
mne_proj_op_add_item_act(op,item,item_kind,item_desc,item_active);
delete item;
op->items[op->nitems-1]->active_file = item_active;
}
out :
return op;
bad : {
if(op)
delete op;
return NULL;
}
}
//*************************************************************************************************************
MneProjOp *MneProjOp::mne_read_proj_op(const QString &name)
{
QFile file(name);
FiffStream::SPtr stream(new FiffStream(&file));
if(!stream->open())
return NULL;
MneProjOp* res = NULL;
FiffDirNode::SPtr t_default;
res = mne_read_proj_op_from_node(stream,t_default);
stream->close();
return res;
}
//*************************************************************************************************************
void MneProjOp::mne_proj_op_report_data(FILE *out, const char *tag, MneProjOp *op, int list_data, char **exclude, int nexclude)
/*
* Output info about the projection operator
*/
{
int j,k,p,q;
MneProjItem* it;
MneNamedMatrix* vecs;
int found;
if (out == NULL)
return;
if (op == NULL)
return;
if (op->nitems <= 0) {
fprintf(out,"Empty operator\n");
return;
}
for (k = 0; k < op->nitems; k++) {
it = op->items[k];
if (list_data && tag)
fprintf(out,"%s\n",tag);
if (tag)
fprintf(out,"%s",tag);
fprintf(out,"# %d : %s : %d vecs : %d chs %s %s\n",
k+1,it->desc.toUtf8().constData(),it->nvec,it->vecs->ncol,
it->has_meg ? "MEG" : "EEG",
it->active ? "active" : "idle");
if (list_data && tag)
fprintf(out,"%s\n",tag);
if (list_data) {
vecs = op->items[k]->vecs;
for (q = 0; q < vecs->ncol; q++) {
fprintf(out,"%-10s",vecs->collist[q].toUtf8().constData());
fprintf(out,q < vecs->ncol-1 ? " " : "\n");
}
for (p = 0; p < vecs->nrow; p++)
for (q = 0; q < vecs->ncol; q++) {
for (j = 0, found = 0; j < nexclude; j++) {
if (QString::compare(exclude[j],vecs->collist[q]) == 0) {
found = 1;
break;
}
}
fprintf(out,"%10.5g ",found ? 0.0 : vecs->data[p][q]);
fprintf(out,q < vecs->ncol-1 ? " " : "\n");
}
if (list_data && tag)
fprintf(out,"%s\n",tag);
}
}
return;
}
//*************************************************************************************************************
void MneProjOp::mne_proj_op_report(FILE *out, const char *tag, MneProjOp *op)
{
mne_proj_op_report_data(out,tag,op, FALSE, NULL, 0);
}
| 27.66884
| 127
| 0.499105
|
7b295abe56ce46d9e00fc11a94901fada02ddc92
| 5,235
|
rb
|
Ruby
|
lib/rester/client.rb
|
roberthoner/rester
|
ee1c31ac7164614bd745b947d4d4af71a3410442
|
[
"MIT"
] | null | null | null |
lib/rester/client.rb
|
roberthoner/rester
|
ee1c31ac7164614bd745b947d4d4af71a3410442
|
[
"MIT"
] | null | null | null |
lib/rester/client.rb
|
roberthoner/rester
|
ee1c31ac7164614bd745b947d4d4af71a3410442
|
[
"MIT"
] | null | null | null |
require 'json'
require 'active_support/inflector'
require 'logger'
module Rester
class Client
autoload(:Adapters, 'rester/client/adapters')
autoload(:Resource, 'rester/client/resource')
autoload(:Response, 'rester/client/response')
autoload(:Middleware, 'rester/client/middleware')
attr_reader :adapter
attr_reader :version
attr_reader :error_threshold
attr_reader :retry_period
attr_reader :logger
def initialize(adapter, params={})
self.adapter = adapter
self.version = params[:version]
@error_threshold = (params[:error_threshold] || 3).to_i
@retry_period = (params[:retry_period] || 1).to_f
self.logger = params[:logger]
@_breaker_enabled = params.fetch(:circuit_breaker_enabled,
ENV['RACK_ENV'] != 'test' && ENV['RAILS_ENV'] != 'test'
)
@_resource = Resource.new(self)
_init_requester
# Send a test ping request to the service so we can store the producer's
# name for future request logs
fail Errors::ConnectionError unless connected?
end
def connected?
adapter.connected? && @_requester.call(:get, '/ping', {}).successful?
rescue Exception => e
logger.error("Connection Error: #{e.inspect}")
false
end
def circuit_breaker_enabled?
!!@_breaker_enabled
end
def logger=(logger)
logger = Utils::LoggerWrapper.new(logger) if logger
@logger = logger
end
def logger
@logger || Rester.logger
end
def name
@_producer_name
end
def request(verb, path, params={})
path = _path_with_version(path)
@_requester.call(verb, path, params)
rescue Utils::CircuitBreaker::CircuitOpenError
# Translate this error so it's easier handle for clients.
# Also, at some point we may want to extract CircuitBreaker into its own
# gem, and this will make that easier.
raise Errors::CircuitOpenError
end
##
# This is only implemented by the StubAdapter.
def with_context(*args, &block)
adapter.with_context(*args, &block)
end
protected
def adapter=(adapter)
@adapter = adapter
end
def version=(version)
unless (@version = (version || 1).to_i) > 0
fail ArgumentError, 'version must be > 0'
end
end
private
##
# Submits the method to the adapter.
def method_missing(meth, *args, &block)
@_resource.send(:method_missing, meth, *args, &block)
end
##
# Sets up the circuit breaker for making requests to the service.
#
# Any exception raised by the `_request` method will count as a failure for
# the circuit breaker. Once the threshold for errors has been reached, the
# circuit opens and all subsequent requests will raise a CircuitOpenError.
#
# When the circuit is opened or closed, a message is sent to the logger for
# the client.
def _init_requester
if circuit_breaker_enabled?
@_requester = Utils::CircuitBreaker.new(
threshold: error_threshold, retry_period: retry_period
) { |*args| _request(*args) }
@_requester.on_open do
logger.error("circuit opened for #{name}")
end
@_requester.on_close do
logger.info("circuit closed for #{name}")
end
else
@_requester = proc { |*args| _request(*args) }
end
end
##
# Add a correlation ID to the header and send the request to the adapter
def _request(verb, path, params)
Rester.wrap_request do
Rester.request_info[:producer_name] = name
Rester.request_info[:path] = path
Rester.request_info[:verb] = verb
logger.info('sending request')
_set_default_headers
start_time = Time.now.to_f
begin
response = adapter.request(verb, path, params)
_process_response(start_time, verb, path, *response)
rescue Errors::TimeoutError
logger.error('timed out')
raise
end
end
end
def _set_default_headers
adapter.headers(
'X-Rester-Correlation-ID' => Rester.correlation_id,
'X-Rester-Consumer-Name' => Rester.service_name,
'X-Rester-Producer-Name' => name
)
end
def _path_with_version(path)
Utils.join_paths("/v#{version}", path)
end
def _process_response(start_time, verb, path, status, headers, body)
elapsed_ms = (Time.now.to_f - start_time) * 1000
response = Response.new(status, _parse_json(body))
@_producer_name = headers['X-Rester-Producer-Name']
logger.info("received status #{status} after %0.3fms" % elapsed_ms)
unless [200, 201, 400].include?(status)
case status
when 401
fail Errors::AuthenticationError
when 403
fail Errors::ForbiddenError
when 404
fail Errors::NotFoundError, path
else
fail Errors::ServerError, response[:message]
end
end
response
end
def _parse_json(data)
if data.is_a?(String) && !data.empty?
JSON.parse(data, symbolize_names: true)
else
{}
end
end
end # Client
end # Rester
| 27.845745
| 79
| 0.633811
|
79a8ba7507d474c23652f943ecc436fe5eb5989a
| 4,103
|
php
|
PHP
|
resources/views/auth/passwords/reset.blade.php
|
PrismPrince/SSG-Electronic-Dropbox
|
56f67a667e957edb83072b3ee812f02964253954
|
[
"MIT"
] | null | null | null |
resources/views/auth/passwords/reset.blade.php
|
PrismPrince/SSG-Electronic-Dropbox
|
56f67a667e957edb83072b3ee812f02964253954
|
[
"MIT"
] | null | null | null |
resources/views/auth/passwords/reset.blade.php
|
PrismPrince/SSG-Electronic-Dropbox
|
56f67a667e957edb83072b3ee812f02964253954
|
[
"MIT"
] | null | null | null |
@extends('layouts.app')
@section('content')
<div class="container root-content">
<div class="row">
<div class="col-md-8 col-md-offset-2" v-cloak>
@if (session('status'))
<alert-success>
<span class="glyphicon glyphicon-ok-sign"></span>
{{ session('status') }}
</alert-success>
@endif
@if ($errors->has('email') || $errors->has('password'))
<alert-danger>
<span class="glyphicon glyphicon-exclamation-sign"></span>
<strong>Error!</strong>
<ul>
@if ($errors->has('email'))
<li>{{ $errors->first('email') }}</li>
@endif
@if ($errors->has('password'))
<li>{{ $errors->first('password') }}</li>
@endif
</ul>
</alert-danger>
@endif
<div class="panel panel-default">
<div class="panel-heading">Reset Password</div>
<div class="panel-body">
<input type="hidden" id="errEmail" value="{{ old('email') }}">
<form class="form-horizontal" role="form" method="POST" action="{{ url('/password/reset') }}">
{{ csrf_field() }}
<input type="hidden" name="token" value="{{ $token }}">
<div
class="form-group"
:class="errors.email.status != errors.email.dirty ? 'has-error' : ''"
>
<label for="email" class="col-md-4 control-label">E-Mail Address</label>
<div class="col-md-6">
<input
id="email"
type="email"
class="form-control"
name="email"
required
v-model="email"
@keyup.enter.prevent="focus('#password')"
>
<span class="help-block" v-if="errors.email.status != errors.email.dirty">
<strong>@{{errors.email.text}}</strong>
</span>
</div>
</div>
<div
class="form-group"
:class="errors.password.status != errors.password.dirty ? 'has-error' : ''"
>
<label for="password" class="col-md-4 control-label">Password</label>
<div class="col-md-6">
<input
id="password"
type="password"
class="form-control"
name="password"
required
v-model="password"
@keyup.enter.prevent="focus('#password-confirm')"
>
<span class="help-block" v-if="errors.password.status != errors.password.dirty">
<strong>@{{errors.password.text}}</strong>
</span>
</div>
</div>
<div
class="form-group"
:class="errors.password_confirm.status != errors.password_confirm.dirty ? 'has-error' : ''"
>
<label for="password-confirm" class="col-md-4 control-label">Confirm Password</label>
<div class="col-md-6">
<input
id="password-confirm"
type="password"
class="form-control"
name="password_confirmation"
required
v-model="password_confirm"
>
<span class="help-block" v-if="errors.password_confirm.status != errors.password_confirm.dirty">
<strong>@{{errors.password_confirm.text}}</strong>
</span>
</div>
</div>
<div class="form-group">
<div class="col-md-6 col-md-offset-4">
<button type="submit" class="btn btn-primary" :disabled="btnDisabled">
Reset Password
</button>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
@endsection
@push('scripts')
<script src="{{ asset('/js/reset.min.js') }}"></script>
@endpush
| 32.824
| 112
| 0.46137
|
45b40c3a76715f4f8c204708b5e7ea962011d45e
| 7,209
|
py
|
Python
|
shoot/shoot_simple.py
|
EliRockenbeck/gamedev
|
1d0786703cf32fbb7320e9ea5ce2fb7408ff15ad
|
[
"MIT"
] | 63
|
2016-01-17T15:44:19.000Z
|
2022-01-19T00:40:32.000Z
|
shoot/shoot_simple.py
|
EliRockenbeck/gamedev
|
1d0786703cf32fbb7320e9ea5ce2fb7408ff15ad
|
[
"MIT"
] | 1
|
2017-01-14T22:23:37.000Z
|
2017-01-15T03:39:23.000Z
|
shoot/shoot_simple.py
|
EliRockenbeck/gamedev
|
1d0786703cf32fbb7320e9ea5ce2fb7408ff15ad
|
[
"MIT"
] | 53
|
2016-04-06T17:41:44.000Z
|
2022-03-06T01:25:41.000Z
|
# Shoot!
# by KidsCanCode 2014
# A generic space shooter - prototype (no art)
# For educational purposes only
import pygame
import sys
import random
# define some colors
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
BGCOLOR = BLACK
class Meteor(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
# start with a random speed
self.speed = random.randrange(3, 12)
self.image = pygame.Surface([36, 54])
self.image.fill(RED)
self.rect = self.image.get_rect()
# start off the top of the screen
self.rect.y = random.randrange(-50, -30)
self.rect.x = random.randrange(WIDTH)
def update(self):
# move the sprite
self.rect.y += self.speed
if self.rect.y > HEIGHT + 10:
self.rect.y = random.randrange(-50, 30)
self.rect.x = random.randrange(WIDTH)
class Player(pygame.sprite.Sprite):
speed = 12
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.speed_x = 0
self.level = 0
self.score = 0
self.shoot_sound = pygame.mixer.Sound("snd/laser4.wav")
self.hit_snd = pygame.mixer.Sound("snd/explode.wav")
self.explode_snd = pygame.mixer.Sound("snd/die.wav")
self.image = pygame.Surface([36, 54])
self.image.fill(GREEN)
self.rect = self.image.get_rect()
self.rect.x = WIDTH / 2
self.rect.bottom = HEIGHT - 10
def update(self):
# move the sprite
self.rect.x += self.speed_x
# check for edges
if self.rect.left < 0:
self.rect.left = 0
self.speed_x = 0
if self.rect.right > WIDTH:
self.rect.right = WIDTH
self.speed_x = 0
def go(self, dir):
if dir == 'L':
self.speed_x = -self.speed
elif dir == 'R':
self.speed_x = self.speed
def stop(self):
self.speed_x = 0
def shoot(self):
bullet = Bullet(self.rect.midtop, self.rect.y, self.level)
active_sprite_list.add(bullet)
bullet_sprite_list.add(bullet)
self.shoot_sound.play()
class Bullet(pygame.sprite.Sprite):
speed = -15
def __init__(self, x, y, level):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.Surface([10, 20])
self.image.fill(RED)
self.rect = self.image.get_rect()
self.rect.midtop = x
self.rect.y = y
def update(self):
self.rect.y += self.speed
if self.rect.bottom < 0:
self.kill()
def draw_text(text, size, x, y):
# utility function to draw text on screen
font_name = pygame.font.match_font('arial')
font = pygame.font.Font(font_name, size)
text_surface = font.render(text, True, WHITE)
text_rect = text_surface.get_rect()
text_rect.midtop = (x, y)
screen.blit(text_surface, text_rect)
def show_start_screen():
# Display the starting screen
screen.fill(BGCOLOR)
draw_text("Shoot!", 72, WIDTH/2, HEIGHT/4)
draw_text("Move with the arrow keys", 24, WIDTH/2, HEIGHT/2)
draw_text("Shoot the meteors", 24, WIDTH/2, HEIGHT*5/8)
draw_text("Press a key to begin", 24, WIDTH/2, HEIGHT*3/4)
pygame.display.update()
# wait for a keypress to start
wait_for_key()
while True:
if wait_for_key():
pygame.event.get()
return
def show_go_screen(score):
# display the Game Over screen
screen.fill(BGCOLOR)
draw_text("GAME OVER", 58, WIDTH/2, HEIGHT/4)
text = "Score: %s" % score
draw_text(text, 24, WIDTH/2, HEIGHT/2)
draw_text("Press a key to begin", 24, WIDTH/2, HEIGHT*3/4)
pygame.display.update()
# pause for a moment and then wait for key
pygame.time.wait(500)
wait_for_key()
while True:
if wait_for_key():
pygame.event.get()
return
def wait_for_key():
# utility function to pause waiting for a keypress
# still allow Esc to exit
# Actually, we look for KEYUP event, not KEYPRESS
if len(pygame.event.get(pygame.QUIT)) > 0:
pygame.quit()
sys.exit()
keyup_events = pygame.event.get(pygame.KEYUP)
if len(keyup_events) == 0:
return None
if keyup_events[0].key == pygame.K_ESCAPE:
pygame.quit()
sys.exit()
return keyup_events[0].key
# basic constants for your game options
WIDTH = 360
HEIGHT = 480
FPS = 30
# initialize pygame
pygame.init()
# initialize sound - remove if you're not using sound
pygame.mixer.init()
screen = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption("Shoot!")
clock = pygame.time.Clock()
running = True
show_start_screen()
while True:
active_sprite_list = pygame.sprite.Group()
meteor_sprite_list = pygame.sprite.Group()
bullet_sprite_list = pygame.sprite.Group()
player = Player()
active_sprite_list.add(player)
for i in range(10):
meteor = Meteor()
active_sprite_list.add(meteor)
meteor_sprite_list.add(meteor)
# play the game!
while running:
clock.tick(FPS)
# check for all your events
for event in pygame.event.get():
# this one checks for the window being closed
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
# now check for keypresses
elif event.type == pygame.KEYDOWN:
# this one quits if the player presses Esc
if event.key == pygame.K_ESCAPE:
pygame.quit()
sys.exit()
if event.key == pygame.K_LEFT:
player.go('L')
if event.key == pygame.K_RIGHT:
player.go('R')
if event.key == pygame.K_SPACE:
player.shoot()
elif event.type == pygame.KEYUP:
if event.key in (pygame.K_LEFT, pygame.K_RIGHT):
player.stop()
# add any other key events here
# Game logic goes here #########
active_sprite_list.update()
# check for collisions
# first, ship with meteors
hit = pygame.sprite.spritecollideany(player, meteor_sprite_list)
if hit:
# you die
player.explode_snd.play()
pygame.time.wait(500)
running = False
# next, check bullets with meteors
hits = pygame.sprite.groupcollide(meteor_sprite_list, bullet_sprite_list,
True, True)
# for each meteor destroyed, spawn a new one
for hit in hits:
player.hit_snd.play()
player.score += 10
newmeteor = Meteor()
active_sprite_list.add(newmeteor)
meteor_sprite_list.add(newmeteor)
# Draw/update screen ########
screen.fill(BGCOLOR)
active_sprite_list.draw(screen)
text = 'Score: %s' % player.score
draw_text(text, 18, 45, 10)
# after drawing, flip the display
pygame.display.flip()
show_go_screen(player.score)
running = True
| 30.16318
| 81
| 0.588847
|
e6b6b59cbc9c4a0a0fd81e6e5401c96acea5c4a5
| 119
|
sql
|
SQL
|
models/tmp/stg_youtube__channel_demographics_tmp.sql
|
fivetran-chloe/dbt_youtube_analytics_source
|
c3ab92c68ce960d3836b1fa679fedf7f61d6486b
|
[
"Apache-2.0"
] | null | null | null |
models/tmp/stg_youtube__channel_demographics_tmp.sql
|
fivetran-chloe/dbt_youtube_analytics_source
|
c3ab92c68ce960d3836b1fa679fedf7f61d6486b
|
[
"Apache-2.0"
] | 2
|
2021-08-23T19:42:56.000Z
|
2022-03-30T18:01:27.000Z
|
models/tmp/stg_youtube__channel_demographics_tmp.sql
|
fivetran-chloe/dbt_youtube_analytics_source
|
c3ab92c68ce960d3836b1fa679fedf7f61d6486b
|
[
"Apache-2.0"
] | 1
|
2021-08-19T22:00:40.000Z
|
2021-08-19T22:00:40.000Z
|
{{ config(enabled=var('youtube__using_channel_demographics', true)) }}
select *
from {{ var('channel_demographics') }}
| 29.75
| 70
| 0.739496
|
9054a203f2c0bc1252398588e6a19a77c65891b3
| 1,175
|
h
|
C
|
components/base/include/cgv/base/node.h
|
tobias-haenel/cgv-density-estimation
|
3be1b07a7b21d1cfd956fb19b5f0d83fb51bd308
|
[
"BSD-3-Clause"
] | 1
|
2020-07-26T10:54:41.000Z
|
2020-07-26T10:54:41.000Z
|
components/base/include/cgv/base/node.h
|
tobias-haenel/cgv-density-estimation
|
3be1b07a7b21d1cfd956fb19b5f0d83fb51bd308
|
[
"BSD-3-Clause"
] | null | null | null |
components/base/include/cgv/base/node.h
|
tobias-haenel/cgv-density-estimation
|
3be1b07a7b21d1cfd956fb19b5f0d83fb51bd308
|
[
"BSD-3-Clause"
] | null | null | null |
#pragma once
#include "named.h"
#include <cgv/data/ref_ptr.h>
#include "lib_begin.h"
namespace cgv {
namespace base {
class CGV_API node;
/// ref counted pointer to a node
typedef data::ref_ptr<node,true> node_ptr;
/** The node class keeps a pointer to its parent */
class CGV_API node : public named
{
protected:
/// declare node_ptr to be a friend class
friend class data::ref_ptr<node,true>;
/// store a pointer to the parent node
node_ptr parent;
public:
/// construct from name
node(const std::string& name = "");
/// return the parent node
node_ptr get_parent() const;
/// return the root node by traversing parents until no more parent is available
base_ptr get_root() const;
/// set a new parent node
void set_parent(node_ptr _parent);
/// cast upward to node
data::ref_ptr<node,true> get_node();
/// overload to return the type name of this object
std::string get_type_name() const;
};
template <>
struct cast_helper<node>
{
inline static data::ref_ptr<node,true> cast(base* b)
{
return b->get_node();
}
};
#if _MSC_VER >= 1400
CGV_TEMPLATE template class CGV_API data::ref_ptr<node>;
#endif
}
}
#include <cgv/config/lib_end.h>
| 21.363636
| 81
| 0.71234
|
44001c8a179b931d505378b7957ab8c0d4d1d91d
| 2,957
|
py
|
Python
|
python/pysnips/ml/stats.py
|
obilaniu/PySnips
|
b543cba5661b2c562ea542af891ea8d26bab4cd8
|
[
"MIT"
] | 2
|
2017-01-30T23:57:19.000Z
|
2017-02-17T21:13:57.000Z
|
python/pysnips/ml/stats.py
|
obilaniu/PySnips
|
b543cba5661b2c562ea542af891ea8d26bab4cd8
|
[
"MIT"
] | 1
|
2018-01-29T00:01:04.000Z
|
2018-01-29T00:24:14.000Z
|
python/pysnips/ml/stats.py
|
obilaniu/PySnips
|
b543cba5661b2c562ea542af891ea8d26bab4cd8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Imports
import numpy as np, warnings
#
# Collect statistics on tensors in a dictionary.
#
# Interesting statistics about tensors:
#
# - Shape
# - CountElem/Count0/CountPInf/CountNInf/CountPFin/CountNFin/CountNaN
# - Sum
# - L1/L2 Norm
# - Min/Mean/Med/Max
# - Std/Var
# - Histogram of IEEE Std 754 sign&exponents
#
def tensorstats(tensor, prefixName=None):
# Assert Numpy Tensor
assert isinstance(tensor, np.ndarray)
#
# Construct histogram bins carefully.
#
# All the bins are [lower, upper) inclusive-exclusive except the last one,
# which is [lower, upper] inclusive.
#
pbins = [2.0**k for k in xrange(-127,+127)]
nbins = map(lambda x:float(np.nextafter(-x, 0)), pbins[::-1])
bins = [-np.inf, float(np.nextafter(-np.inf, 0))] + \
nbins + \
[0.0, float(np.nextafter(0.0, 1))] + \
pbins + \
[+np.inf, +np.inf]
#
# Compute statistics while suppressing RuntimeWarnings
#
with warnings.catch_warnings():
statShape = map(int, tensor.shape)
statCountElem = int (tensor.size)
statCount0 = int (np.count_nonzero(tensor == 0))
statCountPInf = int (np.count_nonzero(tensor == +np.inf))
statCountNInf = int (np.count_nonzero(tensor == -np.inf))
statCountPFin = int (np.count_nonzero(tensor > 0)) - statCountPInf
statCountNFin = int (np.count_nonzero(tensor < 0)) - statCountNInf
statCountNaN = int (np.count_nonzero(tensor != tensor))
statSum = float(np.nansum(tensor))
statL1 = float(np.nansum(np.abs(tensor)))
statL2 = float(np.nansum(np.abs(tensor)**2))
statMin = float(np.nanmin(tensor))
statMean = float(np.nanmean(tensor))
statMedian = float(np.nanmedian(tensor))
statMax = float(np.nanmax(tensor))
statStd = float(np.nanstd(tensor))
statVar = float(np.nanvar(tensor))
statHisto = map(int, np.histogram(tensor, bins)[0].tolist())
#
# Handle prefix in dictionary entries
#
if prefixName in [None, ""]:
prefixName = ""
else:
prefixName += "" if prefixName.endswith("/") else "/"
#
# Construct and return statistics dictionary
#
return {
prefixName+"shape": statShape,
prefixName+"countElem": statCountElem,
prefixName+"count0": statCount0,
prefixName+"countPInf": statCountPInf,
prefixName+"countNInf": statCountNInf,
prefixName+"countPFin": statCountPFin,
prefixName+"countNFin": statCountNFin,
prefixName+"countNaN": statCountNaN,
prefixName+"sum": statSum,
prefixName+"l1": statL1,
prefixName+"l2": statL2,
prefixName+"min": statMin,
prefixName+"mean": statMean,
prefixName+"median": statMedian,
prefixName+"max": statMax,
prefixName+"std": statStd,
prefixName+"var": statVar,
prefixName+"histo": statHisto,
}
| 30.484536
| 75
| 0.623943
|
e040c9ffc928b3ac105f7dcfd21ed7d307b571e1
| 621
|
cs
|
C#
|
Scripts/Items/Components/LooseScrew.cs
|
SeracStudio/rocket-miner
|
7d8f7c1ae5617663bab6f05366815f50334507c5
|
[
"OLDAP-2.2.1"
] | null | null | null |
Scripts/Items/Components/LooseScrew.cs
|
SeracStudio/rocket-miner
|
7d8f7c1ae5617663bab6f05366815f50334507c5
|
[
"OLDAP-2.2.1"
] | 1
|
2020-12-24T18:50:20.000Z
|
2020-12-24T18:50:20.000Z
|
Scripts/Items/Components/LooseScrew.cs
|
SeracStudio/rocket-miner
|
7d8f7c1ae5617663bab6f05366815f50334507c5
|
[
"OLDAP-2.2.1"
] | null | null | null |
using Photon.Pun;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class LooseScrew : MonoBehaviour
{
Player player;
private void Start()
{
player = GetComponent<Player>();
MapController.RUNNING.OnRoomLoaded += Effect;
}
private void Effect()
{
//player.inversed = false;
player.TriggerRPC("SetInversedMovement", RpcTarget.All, false);
if (Random.Range(0, 6) != 0) return;
//player.inversed = true;
Debug.Log("INVERSED");
player.TriggerRPC("SetInversedMovement", RpcTarget.All, true);
}
}
| 23.884615
| 71
| 0.644122
|
390ac0c4a654a89f8ec1ad844d38973b24d5a68e
| 1,268
|
py
|
Python
|
tests/hs2019_default.py
|
backupbrain/drf-keypair-permissions
|
d3e427b4780b20033d66d934d22dbb9ce7f439f5
|
[
"0BSD"
] | 2
|
2021-05-19T23:01:32.000Z
|
2021-05-20T10:57:17.000Z
|
tests/hs2019_default.py
|
backupbrain/drf-keypair-permissions
|
d3e427b4780b20033d66d934d22dbb9ce7f439f5
|
[
"0BSD"
] | null | null | null |
tests/hs2019_default.py
|
backupbrain/drf-keypair-permissions
|
d3e427b4780b20033d66d934d22dbb9ce7f439f5
|
[
"0BSD"
] | null | null | null |
#!/usr/bin/env/python3
from keys import p256_private_key_string, p256_public_key_id
from securehttpclient import SecureHttpClient
"""
Test default settings for ECDSA-P256.
If a list of headers is not included,
the date is the only header that is signed by default for hs2019.
The string to sign would be:
-----------------------------------
date: Sun, 05 Jan 2014 21:31:40 GMT
-----------------------------------
Resulting Signature:
Signature algorithm="hs2019",keyId="P256Test",headers="(request-target) host date",signature="MEUCIQCtmaLAdg5gTruZntyRo/Wy5qEEeyoq94leGtms0VSHYwIgJ6qux2OnOeYWZ8MS3IuY0fcL0GdgrlGBSPFx9z2KCWM="
"""
date = 'Sun, 05 Jan 2014 21:31:40 GMT'
url = 'http://127.0.0.1:8002/foo'
data_body = '{"hello": "world"}'
client = SecureHttpClient()
client.headers['Date'] = date
client.signing_algorithm = 'ECDSA-P256'
client.hashing_algorithm = 'SHA512'
client.data_body = data_body
client.authorization_parameters['keyId'] = p256_public_key_id
client.authorization_parameters['algorithm'] = 'hs2019'
client.private_key_string = p256_private_key_string
client.set_url(url)
client.build_secure_request()
print(client.signing_string)
print("")
print(client.headers['Authorization'])
print("")
response = client.post()
print(response.status_code)
| 30.190476
| 191
| 0.745268
|
5ae21d665f099becb108431731ba62b5dc121a45
| 1,213
|
cs
|
C#
|
source.contracts/OpenFMSL.Contracts/OpenFMSL.Core/ThermodynamicModels/SafeLiquidDensity.cs
|
Nukleon84/OpenFMSL
|
5ab22296a6c4af84fc06fcc15091980093d5eec2
|
[
"MIT"
] | 10
|
2018-03-24T17:52:56.000Z
|
2021-11-13T12:01:53.000Z
|
source.contracts/OpenFMSL.Contracts/OpenFMSL.Core/ThermodynamicModels/SafeLiquidDensity.cs
|
Nukleon84/OpenFMSL
|
5ab22296a6c4af84fc06fcc15091980093d5eec2
|
[
"MIT"
] | 1
|
2018-06-21T21:12:12.000Z
|
2018-06-21T21:12:12.000Z
|
source.contracts/OpenFMSL.Contracts/OpenFMSL.Core/ThermodynamicModels/SafeLiquidDensity.cs
|
Nukleon84/OpenFMSL
|
5ab22296a6c4af84fc06fcc15091980093d5eec2
|
[
"MIT"
] | 2
|
2019-05-28T02:53:14.000Z
|
2021-01-07T08:09:06.000Z
|
using OpenFMSL.Core.Expressions;
using OpenFMSL.Core.Thermodynamics;
using OpenFMSL.Core.UnitsOfMeasure;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace OpenFMSL.Core.ThermodynamicModels
{
public class SafeLiquidDensity : Expression
{
Expression _densityLiquid;
Expression _densityVapor;
public SafeLiquidDensity(Expression densityLiquid, Expression densityVapor)
{
_densityLiquid = densityLiquid;
_densityVapor = densityVapor;
Symbol = "+";
EvalFunctional = (c) =>
{
var dens = _densityLiquid.Eval(c);
if (Double.IsNaN(dens) || Math.Abs(dens) < 1e-12)
return _densityVapor.Eval(c);
else
return dens;
};
DiffFunctional = (c, v) => _densityLiquid.Diff(c, v);
}
public override Expression SymbolicDiff(Variable var)
{
return _densityLiquid.SymbolicDiff(var);
}
public override string ToString()
{
return "SafeLiquidDensity()";
}
}
}
| 25.808511
| 83
| 0.590272
|
6353aa85747b07252b0a24f121ff4f4752860800
| 673
|
sql
|
SQL
|
SchoolDatabase/SchoolDatabase/dspUtil/Stored Procedures/LogEnable.sql
|
directsp/SchoolSample
|
bb7c95a11247d7f27ac28763752ea8e93d9662f5
|
[
"MIT"
] | null | null | null |
SchoolDatabase/SchoolDatabase/dspUtil/Stored Procedures/LogEnable.sql
|
directsp/SchoolSample
|
bb7c95a11247d7f27ac28763752ea8e93d9662f5
|
[
"MIT"
] | null | null | null |
SchoolDatabase/SchoolDatabase/dspUtil/Stored Procedures/LogEnable.sql
|
directsp/SchoolSample
|
bb7c95a11247d7f27ac28763752ea8e93d9662f5
|
[
"MIT"
] | null | null | null |
CREATE PROCEDURE [dspUtil].[LogEnable]
@RemoveAllFilters AS BIT = 0
AS
BEGIN
SET @RemoveAllFilters = ISNULL(@RemoveAllFilters, 0);
-- install log system if it is not installed
IF (dspUtil.LogIsInstalled()=0)
EXEC dspUtil.LogInstall;
-- Set enable flag
IF NOT EXISTS(SELECT 1 FROM dspUtil.LogUser AS LU WHERE LU.UserName = SYSTEM_USER)
INSERT dspUtil.LogUser ( UserName, IsEnabled ) VALUES ( SYSTEM_USER, 1);
ELSE
UPDATE dspUtil.LogUser SET IsEnabled = 1 WHERE UserName = SYSTEM_USER;
PRINT 'LogSystem> LogSystem has been enabled.';
-- Remove All old filters
IF (@RemoveAllFilters = 1)
BEGIN
EXEC dspUtil.LogRemoveFilter @Filter = NULL;
END
END
| 28.041667
| 83
| 0.73997
|
a390a0875e679def4b4b50a163b4210f4d94aec7
| 930
|
ts
|
TypeScript
|
forgerock-openbanking-ui/projects/cdr-tpp/src/app/pages/transactions/transactions.component.ts
|
MbqIIB/openbanking-tpp
|
54ce830d1ad04cf8119b10fbb6221cffc521cdbb
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-06-12T13:13:46.000Z
|
2021-07-20T19:08:53.000Z
|
forgerock-openbanking-ui/projects/cdr-tpp/src/app/pages/transactions/transactions.component.ts
|
MbqIIB/openbanking-tpp
|
54ce830d1ad04cf8119b10fbb6221cffc521cdbb
|
[
"ECL-2.0",
"Apache-2.0"
] | 94
|
2019-12-11T17:24:27.000Z
|
2022-02-25T11:36:50.000Z
|
forgerock-openbanking-ui/projects/cdr-tpp/src/app/pages/transactions/transactions.component.ts
|
MbqIIB/openbanking-tpp
|
54ce830d1ad04cf8119b10fbb6221cffc521cdbb
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-03-20T08:28:34.000Z
|
2021-03-20T08:28:34.000Z
|
import {
Component,
OnInit,
ChangeDetectionStrategy,
ElementRef,
NgZone,
OnDestroy
} from '@angular/core';
import { CdkScrollable, ScrollDispatcher } from '@angular/cdk/overlay';
@Component({
selector: 'app-transactions',
template: `
<app-transactions-container></app-transactions-container>
`,
styles: [],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class TransactionsComponent implements OnInit, OnDestroy {
cdkScrollable: CdkScrollable;
constructor(private hostRef: ElementRef, private scrollDispatcher: ScrollDispatcher, protected ngZone: NgZone) {}
ngOnInit() {
this.cdkScrollable = new CdkScrollable(this.hostRef, this.scrollDispatcher, this.ngZone);
this.cdkScrollable.ngOnInit();
// this.scrollDispatcher.register(this.cdkScrollable);
}
ngOnDestroy() {
this.cdkScrollable.ngOnDestroy();
// this.scrollDispatcher.deregister(this.cdkScrollable);
}
}
| 28.181818
| 115
| 0.746237
|
15d86b43b67272dea8f75b938c5574a5b36c1fb2
| 2,916
|
rb
|
Ruby
|
app/controllers/application_controller.rb
|
Ilinicz/cupofme
|
b84365388e562562d7fbd602ad5c987b3a27ba7e
|
[
"MIT"
] | null | null | null |
app/controllers/application_controller.rb
|
Ilinicz/cupofme
|
b84365388e562562d7fbd602ad5c987b3a27ba7e
|
[
"MIT"
] | null | null | null |
app/controllers/application_controller.rb
|
Ilinicz/cupofme
|
b84365388e562562d7fbd602ad5c987b3a27ba7e
|
[
"MIT"
] | null | null | null |
class ApplicationController < ActionController::Base
if ENV['BASIC_AUTH']
user, pass = ENV['BASIC_AUTH'].split(':')
http_basic_authenticate_with name: user, password: pass
end
before_action :meta, if: "request.get?"
# Prevent CSRF attacks by raising an exception.
# For APIs, you may want to use :null_session instead.
protect_from_forgery with: :exception
# Devise, require authenticate by default
before_filter :authenticate_user!
# CanCan, check authorization unless authorizing with devise
check_authorization unless: :skip_check_authorization?
# Special handling for ajax requests.
# Must appear before other rescue_from statements.
rescue_from Exception, with: :handle_uncaught_exception
include CommonHelper
include ErrorReportingConcern
include AuthorizationErrorsConcern
protected
def skip_check_authorization?
devise_controller? || is_a?(RailsAdmin::ApplicationController)
end
# Reset response so redirect or render can be called again.
# This is an undocumented hack but it works.
def reset_response
self.instance_variable_set(:@_response_body, nil)
end
# Respond to uncaught exceptions with friendly error message during ajax requets
def handle_uncaught_exception(exception)
if request.format == :js
report_error(exception)
flash.now[:error] = Rails.env.development? ? exception.message : I18n.t('errors.unknown')
render 'layouts/uncaught_error.js'
else
raise
end
end
def meta(options={})
site_name = "Cup Of Me"
description = options[:description] || "Делись знаниями"
image = options[:image] || "/banner.jpg"
type = options[:type] || "article"
current_url = request.url
noindex = options[:noindex] || Rails.env.production? ? false : true
# options[:t] - динамическая часть тега title
page_namespace = "#{controller_name}.#{action_name}"
title = I18n.t("titles.#{page_namespace}", title: options[:t]) unless I18n.t("titles.#{page_namespace}", default: '').blank?
defaults = {
site: site_name,
title: title,
image: image,
image_src: image,
description: description,
reverse: true,
noindex: noindex,
twitter: {
site_name: site_name,
site: '@cupofme_dev',
card: 'summary',
description: description,
image: image
},
og: {
url: current_url,
site_name: site_name,
title: title,
image: {
_: image,
width: 1200,
height: 600,
type: 'image/jpg'
},
description: description,
type: type
}
}
options.reverse_merge!(defaults)
set_meta_tags(options.except(:t))
end
def only_admin!
if !current_user.is_admin?
redirect_to root_path, alert: 'Нет доступа'
end
end
end
| 28.31068
| 129
| 0.65775
|
0794f4ba604bd530cf15bedaf6f13d2529aec5dd
| 21,257
|
cpp
|
C++
|
lib/Conversion/StencilToStandard/ConvertStencilToStandard.cpp
|
Mogball/open-earth-compiler
|
81e137b9b15ac6c49d291ac86ee36666abc15b62
|
[
"Apache-2.0"
] | null | null | null |
lib/Conversion/StencilToStandard/ConvertStencilToStandard.cpp
|
Mogball/open-earth-compiler
|
81e137b9b15ac6c49d291ac86ee36666abc15b62
|
[
"Apache-2.0"
] | null | null | null |
lib/Conversion/StencilToStandard/ConvertStencilToStandard.cpp
|
Mogball/open-earth-compiler
|
81e137b9b15ac6c49d291ac86ee36666abc15b62
|
[
"Apache-2.0"
] | null | null | null |
#include "Conversion/StencilToStandard/ConvertStencilToStandard.h"
#include "Conversion/StencilToStandard/Passes.h"
#include "Dialect/Stencil/StencilDialect.h"
#include "Dialect/Stencil/StencilOps.h"
#include "Dialect/Stencil/StencilTypes.h"
#include "Dialect/Stencil/StencilUtils.h"
#include "PassDetail.h"
#include "mlir/Dialect/Affine/IR/AffineOps.h"
#include "mlir/Dialect/SCF/SCF.h"
#include "mlir/Dialect/StandardOps/IR/Ops.h"
#include "mlir/IR/AffineMap.h"
#include "mlir/IR/BlockAndValueMapping.h"
#include "mlir/IR/Builders.h"
#include "mlir/IR/Function.h"
#include "mlir/IR/MLIRContext.h"
#include "mlir/IR/Module.h"
#include "mlir/IR/PatternMatch.h"
#include "mlir/IR/StandardTypes.h"
#include "mlir/IR/Value.h"
#include "mlir/Pass/Pass.h"
#include "mlir/Support/LLVM.h"
#include "mlir/Support/LogicalResult.h"
#include "mlir/Transforms/DialectConversion.h"
#include "llvm/ADT/ArrayRef.h"
#include "llvm/ADT/None.h"
#include "llvm/ADT/STLExtras.h"
#include "llvm/Support/raw_ostream.h"
#include <cstdint>
#include <cstddef>
#include <tuple>
using namespace mlir;
using namespace stencil;
using namespace scf;
namespace {
//===----------------------------------------------------------------------===//
// Rewriting Pattern
//===----------------------------------------------------------------------===//
class FuncOpLowering : public StencilOpToStdPattern<FuncOp> {
public:
using StencilOpToStdPattern<FuncOp>::StencilOpToStdPattern;
LogicalResult
matchAndRewrite(Operation *operation, ArrayRef<Value> operands,
ConversionPatternRewriter &rewriter) const override {
auto loc = operation->getLoc();
auto funcOp = cast<FuncOp>(operation);
// Convert the original function arguments
TypeConverter::SignatureConversion result(funcOp.getNumArguments());
for (auto &en : llvm::enumerate(funcOp.getType().getInputs()))
result.addInputs(en.index(), typeConverter.convertType(en.value()));
auto funcType =
FunctionType::get(result.getConvertedTypes(),
funcOp.getType().getResults(), funcOp.getContext());
// Replace the function by a function with an updated signature
auto newFuncOp =
rewriter.create<FuncOp>(loc, funcOp.getName(), funcType, llvm::None);
rewriter.inlineRegionBefore(funcOp.getBody(), newFuncOp.getBody(),
newFuncOp.end());
// Convert the signature and delete the original operation
rewriter.applySignatureConversion(&newFuncOp.getBody(), result);
rewriter.eraseOp(funcOp);
return success();
}
};
class AssertOpLowering : public StencilOpToStdPattern<stencil::AssertOp> {
public:
using StencilOpToStdPattern<stencil::AssertOp>::StencilOpToStdPattern;
LogicalResult
matchAndRewrite(Operation *operation, ArrayRef<Value> operands,
ConversionPatternRewriter &rewriter) const override {
auto loc = operation->getLoc();
auto assertOp = cast<stencil::AssertOp>(operation);
// Compute the static shape of the field and cast the input memref
auto fieldType = assertOp.field().getType().cast<FieldType>();
auto shape = computeShape(operation);
auto resultType = typeConverter.convertFieldType(fieldType, shape);
rewriter.create<MemRefCastOp>(loc, operands[0], resultType);
rewriter.eraseOp(assertOp);
return success();
}
};
class LoadOpLowering : public StencilOpToStdPattern<stencil::LoadOp> {
public:
using StencilOpToStdPattern<stencil::LoadOp>::StencilOpToStdPattern;
LogicalResult
matchAndRewrite(Operation *operation, ArrayRef<Value> operands,
ConversionPatternRewriter &rewriter) const override {
auto loc = operation->getLoc();
auto loadOp = cast<stencil::LoadOp>(operation);
// Get the assert and the cast operation
auto castOp = getUserOp<MemRefCastOp>(operands[0]);
assert(castOp && "exepected operands[0] to point to the input field");
// Get the temp and field types
auto fieldType = loadOp.field().getType().cast<FieldType>();
auto tempType = loadOp.res().getType().cast<TempType>();
// Compute the shape of the subview
auto subViewShape = computeSubViewShape(fieldType, operation,
valueToLB.lookup(loadOp.field()));
assert(std::get<1>(subViewShape) == tempType.getMemRefShape() &&
"expected to get result memref shape");
// Replace the load op by a subview op
auto subViewOp = rewriter.create<SubViewOp>(
loc, castOp.getResult(), std::get<0>(subViewShape),
std::get<1>(subViewShape), std::get<2>(subViewShape), ValueRange(),
ValueRange(), ValueRange());
rewriter.replaceOp(operation, subViewOp.getResult());
return success();
}
};
class ApplyOpLowering : public StencilOpToStdPattern<stencil::ApplyOp> {
public:
using StencilOpToStdPattern<stencil::ApplyOp>::StencilOpToStdPattern;
LogicalResult
matchAndRewrite(Operation *operation, ArrayRef<Value> operands,
ConversionPatternRewriter &rewriter) const override {
auto loc = operation->getLoc();
auto applyOp = cast<stencil::ApplyOp>(operation);
auto shapeOp = cast<ShapeOp>(operation);
// Allocate storage for every stencil output
SmallVector<Value, 10> newResults;
for (unsigned i = 0, e = applyOp.getNumResults(); i != e; ++i) {
assert(applyOp.getResult(i).getType().cast<TempType>().hasStaticShape() &&
"expected the result types have a static shape");
auto allocType = typeConverter.convertType(applyOp.getResult(i).getType())
.cast<MemRefType>();
auto allocOp = rewriter.create<AllocOp>(loc, allocType);
newResults.push_back(allocOp.getResult());
}
// Compute the loop bounds starting from zero
// (in case of loop unrolling adjust the step of the loop)
SmallVector<Value, 3> lb, ub, steps;
auto returnOp = cast<stencil::ReturnOp>(applyOp.getBody()->getTerminator());
for (int64_t i = 0, e = shapeOp.getRank(); i != e; ++i) {
lb.push_back(rewriter.create<ConstantIndexOp>(loc, shapeOp.getLB()[i]));
ub.push_back(rewriter.create<ConstantIndexOp>(loc, shapeOp.getUB()[i]));
steps.push_back(rewriter.create<ConstantIndexOp>(
loc, returnOp.unroll().hasValue() ? returnOp.getUnroll()[i] : 1));
}
// Convert the signature of the apply op body
// (access the apply op operands directly and introduce the loop indicies)
TypeConverter::SignatureConversion result(applyOp.getNumOperands());
for (auto &en : llvm::enumerate(applyOp.getOperands())) {
result.remapInput(en.index(), operands[en.index()]);
}
SmallVector<Type, 3> indexes(steps.size(),
IndexType::get(applyOp.getContext()));
result.addInputs(indexes);
rewriter.applySignatureConversion(&applyOp.region(), result);
// Replace the stencil apply operation by a parallel loop
// (we clone the loop op to remove the existing body)
auto loopOp = rewriter.create<ParallelOp>(loc, lb, ub, steps);
auto clonedOp = rewriter.cloneWithoutRegions(loopOp);
rewriter.inlineRegionBefore(applyOp.region(), clonedOp.region(),
clonedOp.region().begin());
rewriter.setInsertionPointToEnd(clonedOp.getBody());
rewriter.create<YieldOp>(loc);
rewriter.replaceOp(applyOp, newResults);
rewriter.eraseOp(loopOp);
// Deallocate the temporary storage
rewriter.setInsertionPoint(
applyOp.getParentRegion()->back().getTerminator());
for (auto newResult : newResults) {
rewriter.create<DeallocOp>(loc, newResult);
}
return success();
}
};
class ReturnOpLowering : public StencilOpToStdPattern<stencil::ReturnOp> {
public:
using StencilOpToStdPattern<stencil::ReturnOp>::StencilOpToStdPattern;
LogicalResult
matchAndRewrite(Operation *operation, ArrayRef<Value> operands,
ConversionPatternRewriter &rewriter) const override {
auto loc = operation->getLoc();
auto returnOp = cast<stencil::ReturnOp>(operation);
/// Compute unroll factor and dimension
auto unrollFac = returnOp.getUnrollFactor();
size_t unrollDim = returnOp.getUnrollDimension();
// Get the loop operation
if (!isa<ParallelOp>(operation->getParentOp()))
return failure();
auto loopOp = cast<ParallelOp>(operation->getParentOp());
// Get allocations of result buffers
SmallVector<Value, 10> allocVals;
unsigned allocCount = returnOp.getNumOperands() / unrollFac;
auto *node = loopOp.getOperation();
while (node && allocVals.size() < allocCount) {
if (auto allocOp = dyn_cast<AllocOp>(node))
allocVals.insert(allocVals.begin(), allocOp.getResult());
node = node->getPrevNode();
}
assert(allocVals.size() == allocCount &&
"expected allocation for every result of the stencil operator");
// Introduce a store for every return value
for (unsigned i = 0, e = allocCount; i != e; ++i) {
for (unsigned j = 0, e = unrollFac; j != e; ++j) {
rewriter.setInsertionPoint(returnOp);
unsigned operandIdx = i * unrollFac + j;
auto definingOp = returnOp.getOperand(operandIdx).getDefiningOp();
if (definingOp && returnOp.getParentOp() == definingOp->getParentOp())
rewriter.setInsertionPointAfter(definingOp);
// Compute the store offset
auto offset = valueToLB[returnOp.getOperand(operandIdx)];
llvm::transform(offset, offset.begin(), std::negate<int64_t>());
offset[unrollDim] += j;
// Compute the index values and introduce the store operation
SmallVector<bool, 3> allocation(offset.size(), true);
auto storeOffset = computeIndexValues(loopOp.getInductionVars(), offset,
allocation, rewriter);
rewriter.create<mlir::StoreOp>(loc, returnOp.getOperand(operandIdx),
allocVals[i], storeOffset);
}
}
rewriter.eraseOp(operation);
return success();
}
};
class AccessOpLowering : public StencilOpToStdPattern<stencil::AccessOp> {
public:
using StencilOpToStdPattern<stencil::AccessOp>::StencilOpToStdPattern;
LogicalResult
matchAndRewrite(Operation *operation, ArrayRef<Value> operands,
ConversionPatternRewriter &rewriter) const override {
auto loc = operation->getLoc();
auto accessOp = cast<stencil::AccessOp>(operation);
auto offsetOp = cast<OffsetOp>(accessOp.getOperation());
// Get the parallel loop
auto loopOp = operation->getParentOfType<ParallelOp>();
if (!loopOp)
return failure();
assert(loopOp.getNumLoops() == offsetOp.getOffset().size() &&
"expected loop nest and access offset to have the same size");
// Add the lower bound of the temporary to the access offset
auto totalOffset =
applyFunElementWise(offsetOp.getOffset(), valueToLB[accessOp.temp()],
std::minus<int64_t>());
auto tempType = accessOp.temp().getType().cast<TempType>();
auto loadOffset = computeIndexValues(loopOp.getInductionVars(), totalOffset,
tempType.getAllocation(), rewriter);
// Replace the access op by a load op
rewriter.replaceOpWithNewOp<mlir::LoadOp>(operation, operands[0],
loadOffset);
return success();
}
};
class IndexOpLowering : public StencilOpToStdPattern<stencil::IndexOp> {
public:
using StencilOpToStdPattern<stencil::IndexOp>::StencilOpToStdPattern;
LogicalResult
matchAndRewrite(Operation *operation, ArrayRef<Value> operands,
ConversionPatternRewriter &rewriter) const override {
auto loc = operation->getLoc();
auto indexOp = cast<stencil::IndexOp>(operation);
auto offsetOp = cast<OffsetOp>(indexOp.getOperation());
// Get the parallel loop
auto loopOp = operation->getParentOfType<ParallelOp>();
if (!loopOp)
return failure();
assert(loopOp.getNumLoops() == offsetOp.getOffset().size() &&
"expected loop nest and access offset to have the same size");
auto inductionVars = loopOp.getInductionVars();
auto dim = indexOp.getDim();
auto expr = rewriter.getAffineDimExpr(0) + rewriter.getAffineDimExpr(1);
auto map = AffineMap::get(2, 0, expr);
SmallVector<Value, 2> params = {
inductionVars[dim],
rewriter.create<ConstantIndexOp>(loc, offsetOp.getOffset()[dim])
.getResult()};
// replace the index ob by an affine apply op
rewriter.replaceOpWithNewOp<mlir::AffineApplyOp>(operation, map, params);
return success();
}
};
class StoreOpLowering : public StencilOpToStdPattern<stencil::StoreOp> {
public:
using StencilOpToStdPattern<stencil::StoreOp>::StencilOpToStdPattern;
LogicalResult
matchAndRewrite(Operation *operation, ArrayRef<Value> operands,
ConversionPatternRewriter &rewriter) const override {
auto loc = operation->getLoc();
auto storeOp = cast<stencil::StoreOp>(operation);
// Get the assert and the cast operation
auto castOp = getUserOp<MemRefCastOp>(operands[1]);
assert(castOp && "exepected operands[1] to point to the output field");
// Get the temp and field types
auto fieldType = storeOp.field().getType().cast<FieldType>();
auto tempType = storeOp.temp().getType().cast<TempType>();
// Compute the shape of the subview
auto subViewShape = computeSubViewShape(fieldType, operation,
valueToLB.lookup(storeOp.field()));
assert(std::get<1>(subViewShape) == tempType.getMemRefShape() &&
"expected to get result memref shape");
// Replace the allocation by a subview
auto allocOp = operands[0].getDefiningOp();
rewriter.setInsertionPoint(allocOp);
auto subViewOp = rewriter.create<SubViewOp>(
loc, castOp.getResult(), std::get<0>(subViewShape),
std::get<1>(subViewShape), std::get<2>(subViewShape), ValueRange(),
ValueRange(), ValueRange());
rewriter.replaceOp(allocOp, subViewOp.getResult());
// Remove the deallocation and the store operation
auto deallocOp = getUserOp<DeallocOp>(operands[0]);
assert(deallocOp && "expected dealloc operation");
rewriter.eraseOp(deallocOp);
rewriter.eraseOp(operation);
return success();
}
};
//===----------------------------------------------------------------------===//
// Conversion Target
//===----------------------------------------------------------------------===//
class StencilToStdTarget : public ConversionTarget {
public:
explicit StencilToStdTarget(MLIRContext &context)
: ConversionTarget(context) {}
bool isDynamicallyLegal(Operation *op) const override {
if (auto funcOp = dyn_cast<FuncOp>(op)) {
return !funcOp.getAttr(
stencil::StencilDialect::getStencilProgramAttrName()) &&
!funcOp.getAttr(
stencil::StencilDialect::getStencilFunctionAttrName());
}
return true;
}
};
//===----------------------------------------------------------------------===//
// Rewriting Pass
//===----------------------------------------------------------------------===//
struct StencilToStandardPass
: public StencilToStandardPassBase<StencilToStandardPass> {
void runOnOperation() override;
};
void StencilToStandardPass::runOnOperation() {
OwningRewritePatternList patterns;
auto module = getOperation();
// Check all shapes are set
bool allShapesValid = true;
module.walk([&](ShapeOp shapeOp) {
if (!shapeOp.hasShape()) {
allShapesValid = false;
shapeOp.emitOpError("expected to have a valid shape");
signalPassFailure();
}
});
if (!allShapesValid)
return;
// Store the lower bounds of the input stencil program
DenseMap<Value, Index> valueToLB;
module.walk([&](stencil::AssertOp assertOp) {
auto shapeOp = cast<ShapeOp>(assertOp.getOperation());
valueToLB[assertOp.field()] = shapeOp.getLB();
});
module.walk([&](stencil::ApplyOp applyOp) {
// Store the lower bounds for all arguments
for (auto en : llvm::enumerate(applyOp.getOperands())) {
if (auto shapeOp = dyn_cast_or_null<ShapeOp>(en.value().getDefiningOp()))
valueToLB[applyOp.getBody()->getArgument(en.index())] = shapeOp.getLB();
}
// Store the lower bounds for all results
auto LB = cast<ShapeOp>(applyOp.getOperation()).getLB();
for (auto value : applyOp.getBody()->getTerminator()->getOperands()) {
valueToLB[value] = LB;
}
});
StencilTypeConverter typeConverter(module.getContext());
populateStencilToStdConversionPatterns(typeConverter, valueToLB, patterns);
StencilToStdTarget target(*(module.getContext()));
target.addLegalDialect<AffineDialect>();
target.addLegalDialect<StandardOpsDialect>();
target.addLegalDialect<SCFDialect>();
target.addDynamicallyLegalOp<FuncOp>();
target.addLegalOp<ModuleOp, ModuleTerminatorOp>();
if (failed(applyFullConversion(module, target, patterns))) {
signalPassFailure();
}
}
} // namespace
namespace mlir {
namespace stencil {
// Populate the conversion pattern list
void populateStencilToStdConversionPatterns(
StencilTypeConverter &typeConveter, DenseMap<Value, Index> &valueToLB,
mlir::OwningRewritePatternList &patterns) {
patterns
.insert<FuncOpLowering, AssertOpLowering, LoadOpLowering, ApplyOpLowering,
ReturnOpLowering, AccessOpLowering, IndexOpLowering,
StoreOpLowering>(typeConveter, valueToLB);
}
//===----------------------------------------------------------------------===//
// Stencil Type Converter
//===----------------------------------------------------------------------===//
StencilTypeConverter::StencilTypeConverter(MLIRContext *context_)
: context(context_) {
// Add a type conversion for the stencil field type
addConversion([&](GridType type) {
return MemRefType::get(type.getMemRefShape(), type.getElementType());
});
addConversion([&](Type type) -> Optional<Type> {
if (auto gridType = type.dyn_cast<GridType>())
return llvm::None;
return type;
});
}
Type StencilTypeConverter::convertFieldType(FieldType fieldType,
ArrayRef<int64_t> shape) {
Index revShape;
for (auto en : llvm::enumerate(fieldType.getAllocation())) {
// Insert at the front to convert from column to row-major
if (en.value())
revShape.insert(revShape.begin(), shape[en.index()]);
}
return MemRefType::get(revShape, fieldType.getElementType());
}
//===----------------------------------------------------------------------===//
// Stencil Pattern Base Class
//===----------------------------------------------------------------------===//
StencilToStdPattern::StencilToStdPattern(StringRef rootOpName,
StencilTypeConverter &typeConverter_,
DenseMap<Value, Index> &valueToLB_,
PatternBenefit benefit)
: ConversionPattern(rootOpName, benefit, typeConverter_.getContext()),
typeConverter(typeConverter_), valueToLB(valueToLB_) {}
Index StencilToStdPattern::computeShape(ShapeOp shapeOp) const {
return applyFunElementWise(shapeOp.getUB(), shapeOp.getLB(),
std::minus<int64_t>());
}
std::tuple<Index, Index, Index>
StencilToStdPattern::computeSubViewShape(FieldType fieldType, ShapeOp accessOp,
Index assertLB) const {
auto shape = computeShape(accessOp);
Index revShape, revOffset, revStrides;
for (auto en : llvm::enumerate(fieldType.getAllocation())) {
// Insert values at the front to convert from column- to row-major
if (en.value()) {
revShape.insert(revShape.begin(), shape[en.index()]);
revStrides.insert(revStrides.begin(), 1);
revOffset.insert(revOffset.begin(),
accessOp.getLB()[en.index()] - assertLB[en.index()]);
}
}
return std::make_tuple(revOffset, revShape, revStrides);
}
SmallVector<Value, 3> StencilToStdPattern::computeIndexValues(
ValueRange inductionVars, Index offset, ArrayRef<bool> allocation,
ConversionPatternRewriter &rewriter) const {
auto loc = rewriter.getInsertionPoint()->getLoc();
auto expr = rewriter.getAffineDimExpr(0) + rewriter.getAffineDimExpr(1);
auto map = AffineMap::get(2, 0, expr);
SmallVector<Value, 3> resOffset;
for (auto en : llvm::enumerate(allocation)) {
// Insert values at the front to convert from column- to row-major
if (en.value()) {
SmallVector<Value, 2> params = {
inductionVars[en.index()],
rewriter.create<ConstantIndexOp>(loc, offset[en.index()])
.getResult()};
auto affineApplyOp = rewriter.create<AffineApplyOp>(loc, map, params);
resOffset.insert(resOffset.begin(), affineApplyOp.getResult());
}
}
return resOffset;
}
} // namespace stencil
} // namespace mlir
std::unique_ptr<Pass> mlir::createConvertStencilToStandardPass() {
return std::make_unique<StencilToStandardPass>();
}
| 39.075368
| 80
| 0.657289
|
b8e161f655c4bd4c0dbd0877ac0a8cbb52070fe1
| 563
|
c
|
C
|
src/QQ29.c
|
LeyvinoBezerra/LISTA1-C-RESOLUCAO
|
cf112cb626927560302d0fdc23444a9402913f49
|
[
"MIT"
] | null | null | null |
src/QQ29.c
|
LeyvinoBezerra/LISTA1-C-RESOLUCAO
|
cf112cb626927560302d0fdc23444a9402913f49
|
[
"MIT"
] | null | null | null |
src/QQ29.c
|
LeyvinoBezerra/LISTA1-C-RESOLUCAO
|
cf112cb626927560302d0fdc23444a9402913f49
|
[
"MIT"
] | null | null | null |
#include <stdio.h>
#include <stdlib.h>
int main()
{
int maior, menor, vet[5], i;
for(i = 0;i < 5;i++)
{
printf("Entre com o %d valor do vetor: ", i+1);
scanf("%d", &vet[i]);
}
maior = vet[0];
menor = vet[0];
for(i = 1;i < 5;i++)
{
if(maior < vet[i])
maior = vet[i];
}
for(i = 1;i < 5;i++)
{
if(menor > vet[i])
menor = vet[i];
}
printf("\nO maior valor do vetor e %d: \n", maior);
printf("O menor valor do vetor e %d: ", menor);
return 0;
}
| 16.558824
| 55
| 0.435169
|
959a62b4f6bb4643c3d35fdcf3f4337fdad3fd0f
| 224
|
lua
|
Lua
|
migrations/0.1.0.lua
|
maartenpeels/LTN-Combinator
|
dc51b0c207d944b01e740a08b75b2faab0e47eba
|
[
"MIT"
] | 1
|
2020-10-11T21:09:03.000Z
|
2020-10-11T21:09:03.000Z
|
migrations/0.1.0.lua
|
maartenpeels/LTN-Combinator
|
dc51b0c207d944b01e740a08b75b2faab0e47eba
|
[
"MIT"
] | 1
|
2020-11-29T19:52:45.000Z
|
2021-03-24T11:04:13.000Z
|
migrations/0.1.0.lua
|
maartenpeels/LTN-Combinator
|
dc51b0c207d944b01e740a08b75b2faab0e47eba
|
[
"MIT"
] | 2
|
2020-12-04T15:34:00.000Z
|
2020-12-16T18:53:17.000Z
|
for index, force in pairs(game.forces) do
if force.technologies["logistic-train-network"] ~= nil and force.technologies["logistic-train-network"].researched then
force.recipes["ltn-combinator"].enabled = true
end
end
| 44.8
| 121
| 0.758929
|
ec54b6c943989f8d856c0730c5763b14b9e2eac8
| 29,896
|
asm
|
Assembly
|
hasher/asm/nasm/SHA1x86.asm
|
Hitsounds/ShokoServer
|
9b895e425576790107b4240e9a76ed43b05b1750
|
[
"MIT"
] | 183
|
2017-07-19T09:49:32.000Z
|
2022-03-30T00:53:52.000Z
|
hasher/asm/nasm/SHA1x86.asm
|
Hitsounds/ShokoServer
|
9b895e425576790107b4240e9a76ed43b05b1750
|
[
"MIT"
] | 331
|
2017-07-17T02:38:31.000Z
|
2022-03-26T21:09:33.000Z
|
hasher/asm/nasm/SHA1x86.asm
|
Hitsounds/ShokoServer
|
9b895e425576790107b4240e9a76ed43b05b1750
|
[
"MIT"
] | 68
|
2017-07-24T01:52:47.000Z
|
2022-02-07T20:25:58.000Z
|
section .text
global _SHA_Compile_p5@0
align 10h
_SHA_Compile_p5@0:
mov edi, [ebp+0]
bswap edi
mov [esp+4h], edi
mov esi, [ebp+4]
bswap esi
mov [esp+8h], esi
mov ebx, [ebp+8]
bswap ebx
mov [esp+0Ch], ebx
mov ebx, [ebp+0Ch]
bswap ebx
mov [esp+10h], ebx
mov ebx, [ebp+10h]
bswap ebx
mov [esp+14h], ebx
mov ebx, [ebp+14h]
bswap ebx
mov [esp+18h], ebx
mov ebx, [ebp+18h]
bswap ebx
mov [esp+1Ch], ebx
mov ebx, [ebp+1Ch]
bswap ebx
mov [esp+20h], ebx
mov ebx, [ebp+20h]
bswap ebx
mov [esp+24h], ebx
mov ebx, [ebp+24h]
bswap ebx
mov [esp+28h], ebx
mov ebx, [ebp+28h]
bswap ebx
mov [esp+2Ch], ebx
mov ebx, [ebp+2Ch]
bswap ebx
mov [esp+30h], ebx
mov ebx, [ebp+30h]
bswap ebx
mov [esp+34h], ebx
mov edx, [ebp+34h]
bswap edx
mov [esp+38h], edx
mov ecx, [ebp+38h]
bswap ecx
mov [esp+3Ch], ecx
mov ebx, [ebp+3Ch]
bswap ebx
mov [esp+40h], ebx
xor edx, edi
mov edi, [esp+0Ch]
xor edx, edi
xor edx, [esp+24h]
rol edx, 1
mov [esp+44h], edx
xor ecx, esi
mov esi, [esp+10h]
xor ecx, esi
xor ecx, [esp+28h]
rol ecx, 1
mov [esp+48h], ecx
xor ebx, edi
mov edi, [esp+14h]
xor ebx, edi
xor ebx, [esp+2Ch]
rol ebx, 1
mov [esp+4Ch], ebx
xor edx, esi
mov esi, [esp+18h]
xor edx, esi
xor edx, [esp+30h]
rol edx, 1
mov [esp+50h], edx
xor ecx, edi
mov edi, [esp+1Ch]
xor ecx, edi
xor ecx, [esp+34h]
rol ecx, 1
mov [esp+54h], ecx
xor ebx, esi
mov esi, [esp+20h]
xor ebx, esi
xor ebx, [esp+38h]
rol ebx, 1
mov [esp+58h], ebx
xor edx, edi
mov edi, [esp+24h]
xor edx, edi
xor edx, [esp+3Ch]
rol edx, 1
mov [esp+5Ch], edx
xor ecx, esi
mov esi, [esp+28h]
xor ecx, esi
xor ecx, [esp+40h]
rol ecx, 1
mov [esp+60h], ecx
xor ebx, edi
mov edi, [esp+2Ch]
xor ebx, edi
xor ebx, [esp+44h]
rol ebx, 1
mov [esp+64h], ebx
xor edx, esi
mov esi, [esp+30h]
xor edx, esi
xor edx, [esp+48h]
rol edx, 1
mov [esp+68h], edx
xor ecx, edi
mov edi, [esp+34h]
xor ecx, edi
xor ecx, [esp+4Ch]
rol ecx, 1
mov [esp+6Ch], ecx
xor ebx, esi
mov esi, [esp+38h]
xor ebx, esi
xor ebx, [esp+50h]
rol ebx, 1
mov [esp+70h], ebx
xor edx, edi
mov edi, [esp+3Ch]
xor edx, edi
xor edx, [esp+54h]
rol edx, 1
mov [esp+74h], edx
xor ecx, esi
mov esi, [esp+40h]
xor ecx, esi
xor ecx, [esp+58h]
rol ecx, 1
mov [esp+78h], ecx
xor ebx, edi
mov edi, [esp+44h]
xor ebx, edi
xor ebx, [esp+5Ch]
rol ebx, 1
mov [esp+7Ch], ebx
xor edx, esi
mov esi, [esp+48h]
xor edx, esi
xor edx, [esp+60h]
rol edx, 1
mov [esp+80h], edx
xor ecx, edi
mov edi, [esp+4Ch]
xor ecx, edi
xor ecx, [esp+64h]
rol ecx, 1
mov [esp+84h], ecx
xor ebx, esi
mov esi, [esp+50h]
xor ebx, esi
xor ebx, [esp+68h]
rol ebx, 1
mov [esp+88h], ebx
xor edx, edi
mov edi, [esp+54h]
xor edx, edi
xor edx, [esp+6Ch]
rol edx, 1
mov [esp+8Ch], edx
xor ecx, esi
mov esi, [esp+58h]
xor ecx, esi
xor ecx, [esp+70h]
rol ecx, 1
mov [esp+90h], ecx
xor ebx, edi
mov edi, [esp+5Ch]
xor ebx, edi
xor ebx, [esp+74h]
rol ebx, 1
mov [esp+94h], ebx
xor edx, esi
mov esi, [esp+60h]
xor edx, esi
xor edx, [esp+78h]
rol edx, 1
mov [esp+98h], edx
xor ecx, edi
mov edi, [esp+64h]
xor ecx, edi
xor ecx, [esp+7Ch]
rol ecx, 1
mov [esp+9Ch], ecx
xor ebx, esi
mov esi, [esp+68h]
xor ebx, esi
xor ebx, [esp+80h]
rol ebx, 1
mov [esp+0A0h], ebx
xor edx, edi
mov edi, [esp+6Ch]
xor edx, edi
xor edx, [esp+84h]
rol edx, 1
mov [esp+0A4h], edx
xor ecx, esi
mov esi, [esp+70h]
xor ecx, esi
xor ecx, [esp+88h]
rol ecx, 1
mov [esp+0A8h], ecx
xor ebx, edi
mov edi, [esp+74h]
xor ebx, edi
xor ebx, [esp+8Ch]
rol ebx, 1
mov [esp+0ACh], ebx
xor edx, esi
mov esi, [esp+78h]
xor edx, esi
xor edx, [esp+90h]
rol edx, 1
mov [esp+0B0h], edx
xor ecx, edi
mov edi, [esp+7Ch]
xor ecx, edi
xor ecx, [esp+94h]
rol ecx, 1
mov [esp+0B4h], ecx
xor ebx, esi
mov esi, [esp+80h]
xor ebx, esi
xor ebx, [esp+98h]
rol ebx, 1
mov [esp+0B8h], ebx
xor edx, edi
mov edi, [esp+84h]
xor edx, edi
xor edx, [esp+9Ch]
rol edx, 1
mov [esp+0BCh], edx
xor ecx, esi
mov esi, [esp+88h]
xor ecx, esi
xor ecx, [esp+0A0h]
rol ecx, 1
mov [esp+0C0h], ecx
xor ebx, edi
mov edi, [esp+8Ch]
xor ebx, edi
xor ebx, [esp+0A4h]
rol ebx, 1
mov [esp+0C4h], ebx
xor edx, esi
mov esi, [esp+90h]
xor edx, esi
xor edx, [esp+0A8h]
rol edx, 1
mov [esp+0C8h], edx
xor ecx, edi
mov edi, [esp+94h]
xor ecx, edi
xor ecx, [esp+0ACh]
rol ecx, 1
mov [esp+0CCh], ecx
xor ebx, esi
mov esi, [esp+98h]
xor ebx, esi
xor ebx, [esp+0B0h]
rol ebx, 1
mov [esp+0D0h], ebx
xor edx, edi
mov edi, [esp+9Ch]
xor edx, edi
xor edx, [esp+0B4h]
rol edx, 1
mov [esp+0D4h], edx
xor ecx, esi
mov esi, [esp+0A0h]
xor ecx, esi
xor ecx, [esp+0B8h]
rol ecx, 1
mov [esp+0D8h], ecx
xor ebx, edi
mov edi, [esp+0A4h]
xor ebx, edi
xor ebx, [esp+0BCh]
rol ebx, 1
mov [esp+0DCh], ebx
xor edx, esi
mov esi, [esp+0A8h]
xor edx, esi
xor edx, [esp+0C0h]
rol edx, 1
mov [esp+0E0h], edx
xor ecx, edi
mov edi, [esp+0ACh]
xor ecx, edi
xor ecx, [esp+0C4h]
rol ecx, 1
mov [esp+0E4h], ecx
xor ebx, esi
mov esi, [esp+0B0h]
xor ebx, esi
xor ebx, [esp+0C8h]
rol ebx, 1
mov [esp+0E8h], ebx
xor edx, edi
mov edi, [esp+0B4h]
xor edx, edi
xor edx, [esp+0CCh]
rol edx, 1
mov [esp+0ECh], edx
xor ecx, esi
mov esi, [esp+0B8h]
xor ecx, esi
xor ecx, [esp+0D0h]
rol ecx, 1
mov [esp+0F0h], ecx
xor ebx, edi
mov edi, [esp+0BCh]
xor ebx, edi
xor ebx, [esp+0D4h]
rol ebx, 1
mov [esp+0F4h], ebx
xor edx, esi
mov esi, [esp+0C0h]
xor edx, esi
xor edx, [esp+0D8h]
rol edx, 1
mov [esp+0F8h], edx
xor ecx, edi
mov edi, [esp+0C4h]
xor ecx, edi
xor ecx, [esp+0DCh]
rol ecx, 1
mov [esp+0FCh], ecx
xor ebx, esi
mov esi, [esp+0C8h]
xor ebx, esi
xor ebx, [esp+0E0h]
rol ebx, 1
mov [esp+100h], ebx
xor edx, edi
mov edi, [esp+0CCh]
xor edx, edi
xor edx, [esp+0E4h]
rol edx, 1
mov [esp+104h], edx
xor ecx, esi
mov esi, [esp+0D0h]
xor ecx, esi
xor ecx, [esp+0E8h]
rol ecx, 1
mov [esp+108h], ecx
xor ebx, edi
mov edi, [esp+0D4h]
xor ebx, edi
xor ebx, [esp+0ECh]
rol ebx, 1
mov [esp+10Ch], ebx
xor edx, esi
mov esi, [esp+0D8h]
xor edx, esi
xor edx, [esp+0F0h]
rol edx, 1
mov [esp+110h], edx
xor ecx, edi
mov edi, [esp+0DCh]
xor ecx, edi
xor ecx, [esp+0F4h]
rol ecx, 1
mov [esp+114h], ecx
xor ebx, esi
mov esi, [esp+0E0h]
xor ebx, esi
xor ebx, [esp+0F8h]
rol ebx, 1
mov [esp+118h], ebx
xor edx, edi
mov edi, [esp+0E4h]
xor edx, edi
xor edx, [esp+0FCh]
rol edx, 1
mov [esp+11Ch], edx
xor ecx, esi
mov esi, [esp+0E8h]
xor ecx, esi
xor ecx, [esp+100h]
rol ecx, 1
mov [esp+120h], ecx
xor ebx, edi
mov edi, [esp+0ECh]
xor ebx, edi
xor ebx, [esp+104h]
rol ebx, 1
mov [esp+124h], ebx
xor edx, esi
mov esi, [esp+0F0h]
xor edx, esi
xor edx, [esp+0108h]
rol edx, 1
mov [esp+128h], edx
xor ecx, edi
mov edi, [esp+0F4h]
xor ecx, edi
xor ecx, [esp+10Ch]
rol ecx, 1
mov [esp+12Ch], ecx
xor ebx, esi
mov esi, [esp+0F8h]
xor ebx, esi
xor ebx, [esp+110h]
rol ebx, 1
mov [esp+130h], ebx
xor edx, edi
mov edi, [esp+0FCh]
xor edx, edi
xor edx, [esp+114h]
rol edx, 1
mov [esp+134h], edx
xor ecx, esi
mov esi, [esp+100h]
xor ecx, esi
xor ecx, [esp+118h]
rol ecx, 1
mov [esp+138h], ecx
xor ebx, edi
xor ebx, [esp+104h]
xor ebx, [esp+11Ch]
rol ebx, 1
mov [esp+13Ch], ebx
xor edx, esi
xor edx, [esp+108h]
xor edx, [esp+120h]
rol edx, 1
mov [esp+140h], edx
mov ebp, [esp+168h]
mov eax, [ebp+8]
mov ebx, [ebp+0Ch]
mov ecx, [ebp+10h]
mov edx, [ebp+14h]
mov esi, [ebp+18h]
mov edi, eax
mov ebp, ecx
rol eax, 5
xor ebp, edx
add eax, esi
and ebp, ebx
add eax, 5A827999h
xor ebp, edx
add eax, [esp+4h]
ror ebx, 2
add eax, ebp
mov esi, eax
mov ebp, ebx
rol eax, 5
xor ebp, ecx
add eax, edx
and ebp, edi
add eax, 5A827999h
xor ebp, ecx
add eax, [esp+8h]
ror edi, 2
add eax, ebp
mov edx, eax
mov ebp, edi
rol eax, 5
xor ebp, ebx
add eax, ecx
and ebp, esi
add eax, 5A827999h
xor ebp, ebx
add eax, [esp+0Ch]
ror esi, 2
add eax, ebp
mov ecx, eax
mov ebp, esi
rol eax, 5
xor ebp, edi
add eax, ebx
and ebp, edx
add eax, 5A827999h
xor ebp, edi
add eax, [esp+10h]
ror edx, 2
add eax, ebp
mov ebx, eax
mov ebp, edx
rol eax, 5
xor ebp, esi
add eax, edi
and ebp, ecx
add eax, 5A827999h
xor ebp, esi
add eax, [esp+14h]
ror ecx, 2
add eax, ebp
mov edi, eax
mov ebp, ecx
rol eax, 5
xor ebp, edx
add eax, esi
and ebp, ebx
add eax, 5A827999h
xor ebp, edx
add eax, [esp+18h]
ror ebx, 2
add eax, ebp
mov esi, eax
mov ebp, ebx
rol eax, 5
xor ebp, ecx
add eax, edx
and ebp, edi
add eax, 5A827999h
xor ebp, ecx
add eax, [esp+1Ch]
ror edi, 2
add eax, ebp
mov edx, eax
mov ebp, edi
rol eax, 5
xor ebp, ebx
add eax, ecx
and ebp, esi
add eax, 5A827999h
xor ebp, ebx
add eax, [esp+20h]
ror esi, 2
add eax, ebp
mov ecx, eax
mov ebp, esi
rol eax, 5
xor ebp, edi
add eax, ebx
and ebp, edx
add eax, 5A827999h
xor ebp, edi
add eax, [esp+24h]
ror edx, 2
add eax, ebp
mov ebx, eax
mov ebp, edx
rol eax, 5
xor ebp, esi
add eax, edi
and ebp, ecx
add eax, 5A827999h
xor ebp, esi
add eax, [esp+28h]
ror ecx, 2
add eax, ebp
mov edi, eax
mov ebp, ecx
rol eax, 5
xor ebp, edx
add eax, esi
and ebp, ebx
add eax, 5A827999h
xor ebp, edx
add eax, [esp+2Ch]
ror ebx, 2
add eax, ebp
mov esi, eax
mov ebp, ebx
rol eax, 5
xor ebp, ecx
add eax, edx
and ebp, edi
add eax, 5A827999h
xor ebp, ecx
add eax, [esp+30h]
ror edi, 2
add eax, ebp
mov edx, eax
mov ebp, edi
rol eax, 5
xor ebp, ebx
add eax, ecx
and ebp, esi
add eax, 5A827999h
xor ebp, ebx
add eax, [esp+34h]
ror esi, 2
add eax, ebp
mov ecx, eax
mov ebp, esi
rol eax, 5
xor ebp, edi
add eax, ebx
and ebp, edx
add eax, 5A827999h
xor ebp, edi
add eax, [esp+38h]
ror edx, 2
add eax, ebp
mov ebx, eax
mov ebp, edx
rol eax, 5
xor ebp, esi
add eax, edi
and ebp, ecx
add eax, 5A827999h
xor ebp, esi
add eax, [esp+3Ch]
ror ecx, 2
add eax, ebp
mov edi, eax
mov ebp, ecx
rol eax, 5
xor ebp, edx
add eax, esi
and ebp, ebx
add eax, 5A827999h
xor ebp, edx
add eax, [esp+40h]
ror ebx, 2
add eax, ebp
mov esi, eax
mov ebp, ebx
rol eax, 5
xor ebp, ecx
add eax, edx
and ebp, edi
add eax, 5A827999h
xor ebp, ecx
add eax, [esp+44h]
ror edi, 2
add eax, ebp
mov edx, eax
mov ebp, edi
rol eax, 5
xor ebp, ebx
add eax, ecx
and ebp, esi
add eax, 5A827999h
xor ebp, ebx
add eax, [esp+48h]
ror esi, 2
add eax, ebp
mov ecx, eax
mov ebp, esi
rol eax, 5
xor ebp, edi
add eax, ebx
and ebp, edx
add eax, 5A827999h
xor ebp, edi
add eax, [esp+4Ch]
ror edx, 2
add eax, ebp
mov ebx, eax
mov ebp, edx
rol eax, 5
xor ebp, esi
add eax, edi
and ebp, ecx
add eax, 5A827999h
xor ebp, esi
add eax, [esp+50h]
ror ecx, 2
add eax, ebp
mov edi, eax
rol eax, 5
mov ebp, edx
add eax, esi
xor ebp, ecx
add eax, 6ED9EBA1h
xor ebp, ebx
add eax, [esp+54h]
ror ebx, 2
add eax, ebp
mov esi, eax
rol eax, 5
mov ebp, ecx
add eax, edx
xor ebp, ebx
add eax, 6ED9EBA1h
xor ebp, edi
add eax, [esp+58h]
ror edi, 2
add eax, ebp
mov edx, eax
rol eax, 5
mov ebp, ebx
add eax, ecx
xor ebp, edi
add eax, 6ED9EBA1h
xor ebp, esi
add eax, [esp+5Ch]
ror esi, 2
add eax, ebp
mov ecx, eax
rol eax, 5
mov ebp, edi
add eax, ebx
xor ebp, esi
add eax, 6ED9EBA1h
xor ebp, edx
add eax, [esp+60h]
ror edx, 2
add eax, ebp
mov ebx, eax
rol eax, 5
mov ebp, esi
add eax, edi
xor ebp, edx
add eax, 6ED9EBA1h
xor ebp, ecx
add eax, [esp+64h]
ror ecx, 2
add eax, ebp
mov edi, eax
rol eax, 5
mov ebp, edx
add eax, esi
xor ebp, ecx
add eax, 6ED9EBA1h
xor ebp, ebx
add eax, [esp+68h]
ror ebx, 2
add eax, ebp
mov esi, eax
rol eax, 5
mov ebp, ecx
add eax, edx
xor ebp, ebx
add eax, 6ED9EBA1h
xor ebp, edi
add eax, [esp+6Ch]
ror edi, 2
add eax, ebp
mov edx, eax
rol eax, 5
mov ebp, ebx
add eax, ecx
xor ebp, edi
add eax, 6ED9EBA1h
xor ebp, esi
add eax, [esp+70h]
ror esi, 2
add eax, ebp
mov ecx, eax
rol eax, 5
mov ebp, edi
add eax, ebx
xor ebp, esi
add eax, 6ED9EBA1h
xor ebp, edx
add eax, [esp+74h]
ror edx, 2
add eax, ebp
mov ebx, eax
rol eax, 5
mov ebp, esi
add eax, edi
xor ebp, edx
add eax, 6ED9EBA1h
xor ebp, ecx
add eax, [esp+78h]
ror ecx, 2
add eax, ebp
mov edi, eax
rol eax, 5
mov ebp, edx
add eax, esi
xor ebp, ecx
add eax, 6ED9EBA1h
xor ebp, ebx
add eax, [esp+7Ch]
ror ebx, 2
add eax, ebp
mov esi, eax
rol eax, 5
mov ebp, ecx
add eax, edx
xor ebp, ebx
add eax, 6ED9EBA1h
xor ebp, edi
add eax, [esp+80h]
ror edi, 2
add eax, ebp
mov edx, eax
rol eax, 5
mov ebp, ebx
add eax, ecx
xor ebp, edi
add eax, 6ED9EBA1h
xor ebp, esi
add eax, [esp+84h]
ror esi, 2
add eax, ebp
mov ecx, eax
rol eax, 5
mov ebp, edi
add eax, ebx
xor ebp, esi
add eax, 6ED9EBA1h
xor ebp, edx
add eax, [esp+88h]
ror edx, 2
add eax, ebp
mov ebx, eax
rol eax, 5
mov ebp, esi
add eax, edi
xor ebp, edx
add eax, 6ED9EBA1h
xor ebp, ecx
add eax, [esp+8Ch]
ror ecx, 2
add eax, ebp
mov edi, eax
rol eax, 5
mov ebp, edx
add eax, esi
xor ebp, ecx
add eax, 6ED9EBA1h
xor ebp, ebx
add eax, [esp+90h]
ror ebx, 2
add eax, ebp
mov esi, eax
rol eax, 5
mov ebp, ecx
add eax, edx
xor ebp, ebx
add eax, 6ED9EBA1h
xor ebp, edi
add eax, [esp+94h]
ror edi, 2
add eax, ebp
mov edx, eax
rol eax, 5
mov ebp, ebx
add eax, ecx
xor ebp, edi
add eax, 6ED9EBA1h
xor ebp, esi
add eax, [esp+98h]
ror esi, 2
add eax, ebp
mov ecx, eax
rol eax, 5
mov ebp, edi
add eax, ebx
xor ebp, esi
add eax, 6ED9EBA1h
xor ebp, edx
add eax, [esp+9Ch]
ror edx, 2
add eax, ebp
mov ebx, eax
rol eax, 5
mov ebp, esi
add eax, edi
xor ebp, edx
add eax, 6ED9EBA1h
xor ebp, ecx
add eax, [esp+0A0h]
ror ecx, 2
add eax, ebp
mov ebp, edx
mov edi, eax
rol eax, 5
xor ebp, ecx
add eax, esi
and ebp, ebx
add eax, 8F1BBCDCh
mov esi, ecx
add eax, [esp+0A4h]
and esi, edx
xor ebp, esi
ror ebx, 2
add eax, ebp
mov ebp, ecx
mov esi, eax
rol eax, 5
xor ebp, ebx
add eax, edx
and ebp, edi
add eax, 8F1BBCDCh
mov edx, ebx
add eax, [esp+0A8h]
and edx, ecx
xor ebp, edx
ror edi, 2
add eax, ebp
mov ebp, ebx
mov edx, eax
rol eax, 5
xor ebp, edi
add eax, ecx
and ebp, esi
add eax, 8F1BBCDCh
mov ecx, edi
add eax, [esp+0ACh]
and ecx, ebx
xor ebp, ecx
ror esi, 2
add eax, ebp
mov ebp, edi
mov ecx, eax
rol eax, 5
xor ebp, esi
add eax, ebx
and ebp, edx
add eax, 8F1BBCDCh
mov ebx, esi
add eax, [esp+0B0h]
and ebx, edi
xor ebp, ebx
ror edx, 2
add eax, ebp
mov ebp, esi
mov ebx, eax
rol eax, 5
xor ebp, edx
add eax, edi
and ebp, ecx
add eax, 8F1BBCDCh
mov edi, edx
add eax, [esp+0B4h]
and edi, esi
xor ebp, edi
ror ecx, 2
add eax, ebp
mov ebp, edx
mov edi, eax
rol eax, 5
xor ebp, ecx
add eax, esi
and ebp, ebx
add eax, 8F1BBCDCh
mov esi, ecx
add eax, [esp+0B8h]
and esi, edx
xor ebp, esi
ror ebx, 2
add eax, ebp
mov ebp, ecx
mov esi, eax
rol eax, 5
xor ebp, ebx
add eax, edx
and ebp, edi
add eax, 8F1BBCDCh
mov edx, ebx
add eax, [esp+0BCh]
and edx, ecx
xor ebp, edx
ror edi, 2
add eax, ebp
mov ebp, ebx
mov edx, eax
rol eax, 5
xor ebp, edi
add eax, ecx
and ebp, esi
add eax, 8F1BBCDCh
mov ecx, edi
add eax, [esp+0C0h]
and ecx, ebx
xor ebp, ecx
ror esi, 2
add eax, ebp
mov ebp, edi
mov ecx, eax
rol eax, 5
xor ebp, esi
add eax, ebx
and ebp, edx
add eax, 8F1BBCDCh
mov ebx, esi
add eax, [esp+0C4h]
and ebx, edi
xor ebp, ebx
ror edx, 2
add eax, ebp
mov ebp, esi
mov ebx, eax
rol eax, 5
xor ebp, edx
add eax, edi
and ebp, ecx
add eax, 8F1BBCDCh
mov edi, edx
add eax, [esp+0C8h]
and edi, esi
xor ebp, edi
ror ecx, 2
add eax, ebp
mov ebp, edx
mov edi, eax
rol eax, 5
xor ebp, ecx
add eax, esi
and ebp, ebx
add eax, 8F1BBCDCh
mov esi, ecx
add eax, [esp+0CCh]
and esi, edx
xor ebp, esi
ror ebx, 2
add eax, ebp
mov ebp, ecx
mov esi, eax
rol eax, 5
xor ebp, ebx
add eax, edx
and ebp, edi
add eax, 8F1BBCDCh
mov edx, ebx
add eax, [esp+0D0h]
and edx, ecx
xor ebp, edx
ror edi, 2
add eax, ebp
mov ebp, ebx
mov edx, eax
rol eax, 5
xor ebp, edi
add eax, ecx
and ebp, esi
add eax, 8F1BBCDCh
mov ecx, edi
add eax, [esp+0D4h]
and ecx, ebx
xor ebp, ecx
ror esi, 2
add eax, ebp
mov ebp, edi
mov ecx, eax
rol eax, 5
xor ebp, esi
add eax, ebx
and ebp, edx
add eax, 8F1BBCDCh
mov ebx, esi
add eax, [esp+0D8h]
and ebx, edi
xor ebp, ebx
ror edx, 2
add eax, ebp
mov ebp, esi
mov ebx, eax
rol eax, 5
xor ebp, edx
add eax, edi
and ebp, ecx
add eax, 8F1BBCDCh
mov edi, edx
add eax, [esp+0DCh]
and edi, esi
xor ebp, edi
ror ecx, 2
add eax, ebp
mov ebp, edx
mov edi, eax
rol eax, 5
xor ebp, ecx
add eax, esi
and ebp, ebx
add eax, 8F1BBCDCh
mov esi, ecx
add eax, [esp+0E0h]
and esi, edx
xor ebp, esi
ror ebx, 2
add eax, ebp
mov ebp, ecx
mov esi, eax
rol eax, 5
xor ebp, ebx
add eax, edx
and ebp, edi
add eax, 8F1BBCDCh
mov edx, ebx
add eax, [esp+0E4h]
and edx, ecx
xor ebp, edx
ror edi, 2
add eax, ebp
mov ebp, ebx
mov edx, eax
rol eax, 5
xor ebp, edi
add eax, ecx
and ebp, esi
add eax, 8F1BBCDCh
mov ecx, edi
add eax, [esp+0E8h]
and ecx, ebx
xor ebp, ecx
ror esi, 2
add eax, ebp
mov ebp, edi
mov ecx, eax
rol eax, 5
xor ebp, esi
add eax, ebx
and ebp, edx
add eax, 8F1BBCDCh
mov ebx, esi
add eax, [esp+0ECh]
and ebx, edi
xor ebp, ebx
ror edx, 2
add eax, ebp
mov ebp, esi
mov ebx, eax
rol eax, 5
xor ebp, edx
add eax, edi
and ebp, ecx
add eax, 8F1BBCDCh
mov edi, edx
add eax, [esp+0F0h]
and edi, esi
xor ebp, edi
ror ecx, 2
add eax, ebp
mov edi, eax
rol eax, 5
mov ebp, edx
add eax, esi
xor ebp, ecx
add eax, 0CA62C1D6h
xor ebp, ebx
add eax, [esp+0F4h]
ror ebx, 2
add eax, ebp
mov esi, eax
rol eax, 5
mov ebp, ecx
add eax, edx
xor ebp, ebx
add eax, 0CA62C1D6h
xor ebp, edi
add eax, [esp+0F8h]
ror edi, 2
add eax, ebp
mov edx, eax
rol eax, 5
mov ebp, ebx
add eax, ecx
xor ebp, edi
add eax, 0CA62C1D6h
xor ebp, esi
add eax, [esp+0FCh]
ror esi, 2
add eax, ebp
mov ecx, eax
rol eax, 5
mov ebp, edi
add eax, ebx
xor ebp, esi
add eax, 0CA62C1D6h
xor ebp, edx
add eax, [esp+100h]
ror edx, 2
add eax, ebp
mov ebx, eax
rol eax, 5
mov ebp, esi
add eax, edi
xor ebp, edx
add eax, 0CA62C1D6h
xor ebp, ecx
add eax, [esp+104h]
ror ecx, 2
add eax, ebp
mov edi, eax
rol eax, 5
mov ebp, edx
add eax, esi
xor ebp, ecx
add eax, 0CA62C1D6h
xor ebp, ebx
add eax, [esp+108h]
ror ebx, 2
add eax, ebp
mov esi, eax
rol eax, 5
mov ebp, ecx
add eax, edx
xor ebp, ebx
add eax, 0CA62C1D6h
xor ebp, edi
add eax, [esp+10Ch]
ror edi, 2
add eax, ebp
mov edx, eax
rol eax, 5
mov ebp, ebx
add eax, ecx
xor ebp, edi
add eax, 0CA62C1D6h
xor ebp, esi
add eax, [esp+110h]
ror esi, 2
add eax, ebp
mov ecx, eax
rol eax, 5
mov ebp, edi
add eax, ebx
xor ebp, esi
add eax, 0CA62C1D6h
xor ebp, edx
add eax, [esp+114h]
ror edx, 2
add eax, ebp
mov ebx, eax
rol eax, 5
mov ebp, esi
add eax, edi
xor ebp, edx
add eax, 0CA62C1D6h
xor ebp, ecx
add eax, [esp+118h]
ror ecx, 2
add eax, ebp
mov edi, eax
rol eax, 5
mov ebp, edx
add eax, esi
xor ebp, ecx
add eax, 0CA62C1D6h
xor ebp, ebx
add eax, [esp+11Ch]
ror ebx, 2
add eax, ebp
mov esi, eax
rol eax, 5
mov ebp, ecx
add eax, edx
xor ebp, ebx
add eax, 0CA62C1D6h
xor ebp, edi
add eax, [esp+120h]
ror edi, 2
add eax, ebp
mov edx, eax
rol eax, 5
mov ebp, ebx
add eax, ecx
xor ebp, edi
add eax, 0CA62C1D6h
xor ebp, esi
add eax, [esp+124h]
ror esi, 2
add eax, ebp
mov ecx, eax
rol eax, 5
mov ebp, edi
add eax, ebx
xor ebp, esi
add eax, 0CA62C1D6h
xor ebp, edx
add eax, [esp+128h]
ror edx, 2
add eax, ebp
mov ebx, eax
rol eax, 5
mov ebp, esi
add eax, edi
xor ebp, edx
add eax, 0CA62C1D6h
xor ebp, ecx
add eax, [esp+12Ch]
ror ecx, 2
add eax, ebp
mov edi, eax
rol eax, 5
mov ebp, edx
add eax, esi
xor ebp, ecx
add eax, 0CA62C1D6h
xor ebp, ebx
add eax, [esp+130h]
ror ebx, 2
add eax, ebp
mov esi, eax
rol eax, 5
mov ebp, ecx
add eax, edx
xor ebp, ebx
add eax, 0CA62C1D6h
xor ebp, edi
add eax, [esp+134h]
ror edi, 2
add eax, ebp
mov edx, eax
rol eax, 5
mov ebp, ebx
add eax, ecx
xor ebp, edi
add eax, 0CA62C1D6h
xor ebp, esi
add eax, [esp+138h]
ror esi, 2
add eax, ebp
mov ecx, eax
rol eax, 5
mov ebp, edi
add eax, ebx
xor ebp, esi
add eax, 0CA62C1D6h
xor ebp, edx
add eax, [esp+13Ch]
ror edx, 2
add eax, ebp
mov ebx, eax
rol eax, 5
mov ebp, esi
add eax, edi
xor ebp, edx
add eax, 0CA62C1D6h
xor ebp, ecx
add eax, [esp+140h]
ror ecx, 2
add eax, ebp
mov ebp, [esp+168h]
add [ebp+8], eax
add [ebp+0Ch], ebx
add [ebp+10h], ecx
add [ebp+14h], edx
add [ebp+18h], esi
retn
align 10h
global _SHA1_Add_p5@12
_SHA1_Add_p5@12:
pusha
sub esp, 140h
mov ecx, [esp+160h+0Ch]
and ecx, ecx
jz get_out
xor edx, edx
mov ebp, [esp+160h+8h]
mov edi, [esp+160h+4h]
mov ebx, [edi]
mov eax, ebx
add ebx, ecx
mov [edi], ebx
adc [edi+4], edx
and eax, 3Fh
jnz partial_buffer
full_blocks:
mov ecx, [esp+160h+0Ch]
and ecx, ecx
jz get_out
sub ecx, 40h
jb end_of_stream
mov [esp+160h+0Ch], ecx
call _SHA_Compile_p5@0
mov ebp, [esp+160h+8h]
add ebp, 40h
mov [esp+160h+8h], ebp
jmp full_blocks
end_of_stream:
mov edi, [esp+160h+4h]
mov esi, ebp
lea edi, [edi+1Ch]
add ecx, 40h
rep movsb
jmp get_out
partial_buffer:
add ecx, eax
cmp ecx, 40h
jb short_stream
mov ecx, 0FFFFFFC0h
add ecx, eax
add [esp+160h+0Ch], ecx
loc:
mov bl, [ebp+0]
inc ebp
mov [edi+ecx+5Ch], bl
inc ecx
jnz loc
mov [esp+160h+8h], ebp
lea ebp, [edi+1Ch]
call _SHA_Compile_p5@0
mov ebp, [esp+160h+8h]
jmp full_blocks
short_stream:
sub ecx, eax
mov esi, ebp
lea edi, [edi+eax+1Ch]
rep movsb
get_out:
add esp, 140h
popa
retn 0Ch
| 21.400143
| 30
| 0.461533
|
dba04956eb83c5c333453249b2759ecd1ef6e33d
| 128
|
php
|
PHP
|
resources/views/public/video.blade.php
|
videouri/api
|
a3d4f77b9ce5fb3ca4c643979be8a731b777b9e4
|
[
"MIT"
] | 1
|
2015-08-12T13:02:55.000Z
|
2015-08-12T13:02:55.000Z
|
resources/views/public/video.blade.php
|
videouri/videouri
|
a3d4f77b9ce5fb3ca4c643979be8a731b777b9e4
|
[
"MIT"
] | 5
|
2016-07-09T14:10:56.000Z
|
2016-09-26T22:54:37.000Z
|
resources/views/public/video.blade.php
|
videouri/videouri
|
a3d4f77b9ce5fb3ca4c643979be8a731b777b9e4
|
[
"MIT"
] | null | null | null |
@extends('app')
@section('content')
<video-page :video="{{ $video }}" :user="{{ Auth::user() }}"></video-page>
@endsection
| 21.333333
| 78
| 0.578125
|
f47d2ed5486eaa22951483b4fac1cede1a589f91
| 42,750
|
rs
|
Rust
|
libindy/indy-wallet/src/cache/wallet_cache.rs
|
btosello/sync
|
064d931967a8d6b0d090e81f7428183969e4db22
|
[
"Apache-2.0"
] | 7
|
2021-10-18T16:49:15.000Z
|
2022-02-06T13:44:49.000Z
|
libindy/indy-wallet/src/cache/wallet_cache.rs
|
alejandroalffer/cheqd-sdk
|
7605689df046788652f1437d5c740faa23c7f4d8
|
[
"Apache-2.0"
] | 13
|
2021-08-23T09:13:42.000Z
|
2021-09-16T16:09:58.000Z
|
libindy/indy-wallet/src/cache/wallet_cache.rs
|
alejandroalffer/cheqd-sdk
|
7605689df046788652f1437d5c740faa23c7f4d8
|
[
"Apache-2.0"
] | 2
|
2021-12-10T11:03:22.000Z
|
2022-03-12T02:09:16.000Z
|
use crate::{
cache::{
lru::LruCache,
cache::Cache,
},
storage::{
Tag::{Encrypted, PlainText},
TagName::{OfEncrypted, OfPlain},
StorageRecord, Tag, TagName,
},
wallet::EncryptedValue,
RecordOptions,
};
use std::{
collections::{HashSet, HashMap},
iter::FromIterator,
sync::atomic::{AtomicUsize, Ordering},
};
use indy_api_types::domain::wallet::{CacheConfig, CachingAlgorithm};
use async_std::sync::{RwLock, Mutex};
#[derive(PartialEq, Eq, Hash)]
pub struct WalletCacheKey {
type_: Vec<u8>,
id: Vec<u8>,
}
pub struct WalletCacheValue {
value: EncryptedValue,
tags: Vec<Tag>,
}
pub struct WalletCache {
cache: Option<Mutex<Box<dyn Cache + Send>>>,
cache_entities: HashSet<String>,
}
impl WalletCache {
pub fn new(config: Option<CacheConfig>) -> Self {
match config {
Some(cache_config) if cache_config.size > 0 && !cache_config.entities.is_empty() => {
let cache = match cache_config.algorithm {
CachingAlgorithm::LRU => LruCache::new(cache_config.size),
};
WalletCache {
cache: Some(Mutex::new(Box::new(cache))),
cache_entities: HashSet::from_iter(cache_config.entities.iter().cloned()),
}
}
_ => {
WalletCache { // no cache
cache: None,
cache_entities: HashSet::new(),
}
}
}
}
pub fn is_type_cacheable(&self, type_: &str) -> bool {
self.cache.is_some() && self.cache_entities.contains(&type_.to_owned())
}
pub async fn add(
&self,
type_: &str,
etype: &[u8],
eid: &[u8],
evalue: &EncryptedValue,
etags: &[Tag],
) {
if let Some(protected_cache) = &self.cache {
if self.cache_entities.contains(&type_.to_owned()) {
let key = WalletCacheKey {
type_: etype.to_owned(),
id: eid.to_owned(),
};
let value = WalletCacheValue {
value: evalue.to_owned(),
tags: etags.to_owned(),
};
let _ = protected_cache.lock().await.put(key, value);
}
}
}
pub async fn add_tags(
&self,
type_: &str,
etype: &[u8],
eid: &[u8],
etags: &[Tag],
) {
if let Some(protected_cache) = &self.cache {
if self.cache_entities.contains(&type_.to_owned()) {
let key = WalletCacheKey {
type_: etype.to_owned(),
id: eid.to_owned(),
};
let _ = protected_cache.lock().await.get_mut(&key).map(|v|{
v.tags.append(&mut etags.to_owned())
});
}
}
}
pub async fn update_tags(
&self,
type_: &str,
etype: &[u8],
eid: &[u8],
etags: &[Tag],
) {
if let Some(protected_cache) = &self.cache {
if self.cache_entities.contains(&type_.to_owned()) {
let key = WalletCacheKey {
type_: etype.to_owned(),
id: eid.to_owned(),
};
let _ = protected_cache.lock().await.get_mut(&key).map(|v|{
v.tags = etags.to_vec()
});
}
}
}
pub async fn delete_tags(
&self,
type_: &str,
etype: &[u8],
eid: &[u8],
etag_names: &[TagName],
) {
if let Some(protected_cache) = &self.cache {
if self.cache_entities.contains(&type_.to_owned()) {
let key = WalletCacheKey {
type_: etype.to_owned(),
id: eid.to_owned(),
};
let mut enc_tag_names = HashSet::new();
let mut plain_tag_names = HashSet::new();
for x in etag_names {
match x {
OfEncrypted(value) => enc_tag_names.insert(value),
OfPlain(value) => plain_tag_names.insert(value),
};
}
let _ = protected_cache.lock().await.get_mut(&key).map(|v|{
v.tags.retain(|el| {
match el {
Encrypted(tag_name, _) => {
!enc_tag_names.contains(tag_name)
},
PlainText(tag_name, _) => {
!plain_tag_names.contains(tag_name)
}
}
});
});
}
}
}
pub async fn update(
&self,
type_: &str,
etype: &[u8],
eid: &[u8],
evalue: &EncryptedValue,
) {
if let Some(protected_cache) = &self.cache {
if self.cache_entities.contains(&type_.to_owned()) {
let key = WalletCacheKey {
type_: etype.to_owned(),
id: eid.to_owned(),
};
let _ = protected_cache.lock().await.get_mut(&key).map(|v|{
v.value = evalue.to_owned()
});
}
}
}
pub async fn get(
&self,
type_: &str,
etype: &[u8],
eid: &[u8],
options: &RecordOptions
) -> Option<StorageRecord> {
if let Some(protected_cache) = &self.cache {
if self.cache_entities.contains(&type_.to_owned()) {
let key = WalletCacheKey {
type_: etype.to_owned(),
id: eid.to_owned(),
};
protected_cache.lock().await.get(&key).map(|v|{
StorageRecord {
id: eid.to_owned(),
value: if options.retrieve_value {Some(v.value.clone())} else {None},
type_: if options.retrieve_type {Some(etype.to_owned())} else {None},
tags: if options.retrieve_tags {Some(v.tags.clone())} else {None},
}
})
} else {
None
}
} else {
None
}
}
pub async fn delete(&self, type_: &str, etype: &[u8], eid: &[u8]) {
if let Some(protected_cache) = &self.cache {
if self.cache_entities.contains(&type_.to_owned()) {
let key = WalletCacheKey {
type_: etype.to_owned(),
id: eid.to_owned(),
};
let _ = protected_cache.lock().await.pop(&key);
}
}
}
}
#[derive(Default, Debug)]
pub struct WalletCacheHitData {
pub hit: AtomicUsize,
pub miss: AtomicUsize,
pub not_cached: AtomicUsize,
}
impl WalletCacheHitData {
fn inc(var: &AtomicUsize, increment: usize) -> usize {
var.fetch_add(increment, Ordering::Relaxed)
}
fn get(var: &AtomicUsize) -> usize {
var.load(Ordering::Relaxed)
}
pub fn inc_hit(&self) -> usize {
WalletCacheHitData::inc(&self.hit, 1)
}
pub fn inc_miss(&self) -> usize {
WalletCacheHitData::inc(&self.miss, 1)
}
pub fn inc_not_cached(&self) -> usize {
WalletCacheHitData::inc(&self.not_cached, 1)
}
pub fn get_hit(&self) -> usize {
WalletCacheHitData::get(&self.hit)
}
pub fn get_miss(&self) -> usize {
WalletCacheHitData::get(&self.miss)
}
pub fn get_not_cached(&self) -> usize {
WalletCacheHitData::get(&self.not_cached)
}
}
impl Clone for WalletCacheHitData {
fn clone(&self) -> Self {
WalletCacheHitData {
hit: AtomicUsize::from(self.get_hit()),
miss: AtomicUsize::from(self.get_miss()),
not_cached: AtomicUsize::from(self.get_not_cached())
}
}
fn clone_from(&mut self, source: &Self) {
*self.hit.get_mut() = source.get_hit();
*self.miss.get_mut() = source.get_miss();
*self.not_cached.get_mut() = source.get_not_cached();
}
}
pub struct WalletCacheHitMetrics {
pub data: RwLock<HashMap<String, WalletCacheHitData>>,
}
impl WalletCacheHitMetrics {
pub fn new() -> Self {
WalletCacheHitMetrics {
data: RwLock::new(HashMap::new())
}
}
pub async fn inc_cache_hit(&self, type_: &str) -> usize {
self.update_data(type_, |x| x.inc_hit()).await
}
pub async fn inc_cache_miss(&self, type_: &str) -> usize {
self.update_data(type_, |x| x.inc_miss()).await
}
pub async fn inc_not_cached(&self, type_: &str) -> usize {
self.update_data(type_, |x| x.inc_not_cached()).await
}
async fn update_data(&self, type_: &str, f: fn(&WalletCacheHitData) -> usize) -> usize {
let read_guard = self.data.read().await;
match read_guard.get(type_) {
Some(x) => f(x),
None => {
drop(read_guard);
let mut write_guard = self.data.write().await;
// check if data is inserted in the mean time until write lock is acquired.
match write_guard.get(type_) {
Some(x) => f(x),
None => {
// we are now holding exclusive access, so insert the item in map.
let d = Default::default();
let result = f(&d);
write_guard.insert(type_.to_string(), d);
result
}
}
}
}
}
#[allow(dead_code)]
pub async fn get_data_for_type(&self, type_: &str) -> Option<WalletCacheHitData> {
self.data.read().await.get(type_).map(|x|x.clone())
}
pub async fn get_data(&self) -> HashMap<String, WalletCacheHitData> {
self.data.read().await.clone()
}
}
#[cfg(test)]
mod tests {
extern crate rand;
use super::*;
use crate::storage::{Tag, TagName};
use rand::{distributions::Uniform, distributions::Alphanumeric, Rng};
use indy_api_types::domain::wallet::DEFAULT_CACHE_SIZE;
use futures::Future;
use std::time::Duration;
const TYPE_A: &str = "TypeA";
const TYPE_B: &str = "TypeB";
const TYPE_NON_CACHED: &str = "TypeNonCached";
const ETYPE1: &[u8] = &[1, 2, 3, 1];
const EID1: &[u8] = &[2, 3, 4, 1];
const EID2: &[u8] = &[2, 3, 4, 2];
const FULL_OPTIONS: RecordOptions = RecordOptions {
retrieve_type: true,
retrieve_value: true,
retrieve_tags: true
};
fn _rand_vec(size: usize) -> Vec<u8> {
rand::thread_rng().sample_iter(&Uniform::new(0, 255)).take(size).collect()
}
fn _rand_str(size: usize) -> String {
rand::thread_rng().sample_iter(&Alphanumeric).take(size).map(char::from).collect()
}
fn _enc_value() -> EncryptedValue {
EncryptedValue {
data: _rand_vec(200),
key: _rand_vec(20)
}
}
fn _enc_tag() -> Tag {
if rand::thread_rng().gen::<u8>() % 2 == 1 {
Tag::Encrypted(_rand_vec(20), _rand_vec(100))
} else {
Tag::PlainText(_rand_vec(20), _rand_str(100))
}
}
fn _cache() -> WalletCache {
let config = CacheConfig {
size: 10,
entities: vec![TYPE_A.to_string(), TYPE_B.to_string()],
algorithm: CachingAlgorithm::LRU
};
WalletCache::new(Some(config))
}
fn _no_cache() -> WalletCache {
let config = CacheConfig {
size: 10,
entities: vec![],
algorithm: CachingAlgorithm::LRU
};
WalletCache::new(Some(config))
}
fn _vec_to_hash_set(items: &[&str]) -> HashSet<String> {
HashSet::from_iter(items.into_iter().map(|el|el.to_string()))
}
fn _tag_names(tags: &[Tag]) -> Vec<TagName> {
tags.into_iter().map(|el|{
match el {
Encrypted(key, _) => TagName::OfEncrypted(key.to_owned()),
PlainText(key, _) => TagName::OfPlain(key.to_owned()),
}
}).collect()
}
#[test]
fn new_with_no_config_works() {
let cache = WalletCache::new(None);
assert!(cache.cache.is_none());
assert_eq!(cache.cache_entities.len(), 0);
}
#[test]
fn new_with_default_config_works() {
let config = CacheConfig {
size: DEFAULT_CACHE_SIZE,
entities: vec![],
algorithm: CachingAlgorithm::LRU
};
let cache = WalletCache::new(Some(config));
assert!(cache.cache.is_none());
assert_eq!(cache.cache_entities.len(), 0);
}
#[test]
fn new_with_size_but_no_entities_in_config_works() {
let config = CacheConfig {
size: 20,
entities: vec![],
algorithm: CachingAlgorithm::LRU
};
let cache = WalletCache::new(Some(config));
assert!(cache.cache.is_none());
assert_eq!(cache.cache_entities.len(), 0);
}
#[test]
fn new_with_default_size_in_config_works() {
let config_str = json!({
"entities": vec![TYPE_A.to_string(), TYPE_B.to_string()]
}).to_string();
let config: CacheConfig = serde_json::from_str(&config_str).unwrap();
let wallet_cache = WalletCache::new(Some(config));
assert!(wallet_cache.cache.is_some());
let mut cache = wallet_cache.cache.unwrap();
assert_eq!(cache.get_mut().cap(), DEFAULT_CACHE_SIZE);
assert_eq!(cache.get_mut().len(), 0);
assert_eq!(wallet_cache.cache_entities.len(), 2);
assert_eq!(wallet_cache.cache_entities, _vec_to_hash_set(&[TYPE_A, TYPE_B]));
}
#[test]
fn new_with_size_in_config_works() {
let config = CacheConfig {
size: 20,
entities: vec![TYPE_A.to_string(), TYPE_B.to_string()],
algorithm: CachingAlgorithm::LRU
};
let wallet_cache = WalletCache::new(Some(config));
assert!(wallet_cache.cache.is_some());
let mut cache = wallet_cache.cache.unwrap();
assert_eq!(cache.get_mut().cap(), 20);
assert_eq!(cache.get_mut().len(), 0);
assert_eq!(wallet_cache.cache_entities.len(), 2);
assert_eq!(wallet_cache.cache_entities, _vec_to_hash_set(&[TYPE_A, TYPE_B]));
}
#[test]
fn is_type_cacheable_works() {
let cache = _cache();
let result = cache.is_type_cacheable(TYPE_A);
assert_eq!(result, true);
}
#[test]
fn is_type_cacheable_for_noncacheable_type_works() {
let cache = _cache();
let result = cache.is_type_cacheable(TYPE_NON_CACHED);
assert_eq!(result, false);
}
#[test]
fn is_type_cacheable_for_no_cache_enabled_works() {
let cache = _no_cache();
let result = cache.is_type_cacheable(TYPE_A);
assert_eq!(result, false);
}
#[async_std::test]
async fn add_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1.clone(), tag2.clone()]).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![tag1, tag2]);
}
#[async_std::test]
async fn add_without_tags_works() {
let value = _enc_value();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[]).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![]);
}
#[async_std::test]
async fn add_for_non_cacheable_type_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _cache();
cache.add(TYPE_NON_CACHED, ETYPE1, EID1, &value, &[tag1, tag2]).await;
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 0);
}
#[async_std::test]
async fn add_for_no_cache_enabled_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _no_cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1, tag2]).await;
assert!(cache.cache.is_none());
}
#[async_std::test]
async fn add_tags_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let tag3 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1.clone(), tag2.clone()]).await;
cache.add_tags(TYPE_A, ETYPE1, EID1, &[tag3.clone()]).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![tag1, tag2, tag3]);
}
#[async_std::test]
async fn add_tags_on_item_without_tags_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[]).await;
cache.add_tags(TYPE_A, ETYPE1, EID1, &[tag1.clone(), tag2.clone()]).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![tag1, tag2]);
}
#[async_std::test]
async fn add_tags_on_non_cached_item_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let tag3 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1.clone(), tag2.clone()]).await;
cache.add_tags(TYPE_A, ETYPE1, EID2, &[tag3]).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![tag1, tag2]);
let key2 = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID2.to_vec()
};
assert!(lru.peek(&key2).is_none());
}
#[async_std::test]
async fn add_tags_for_non_cacheable_type_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let tag3 = _enc_tag();
let cache = _cache();
cache.add(TYPE_NON_CACHED, ETYPE1, EID1, &value, &[tag1, tag2]).await;
cache.add_tags(TYPE_NON_CACHED, ETYPE1, EID1, &[tag3]).await;
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 0);
}
#[async_std::test]
async fn add_tags_for_no_cache_enabled_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let tag3 = _enc_tag();
let cache = _no_cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1, tag2]).await;
cache.add_tags(TYPE_A, ETYPE1, EID1, &[tag3]).await;
assert!(cache.cache.is_none());
}
#[async_std::test]
async fn update_tags_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let tag3 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1, tag2]).await;
cache.update_tags(TYPE_A, ETYPE1, EID1, &[tag3.clone()]).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![tag3]);
}
#[async_std::test]
async fn update_tags_on_item_without_tags_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[]).await;
cache.update_tags(TYPE_A, ETYPE1, EID1, &[tag1.clone()]).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![tag1]);
}
#[async_std::test]
async fn update_tags_on_non_cached_item_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let tag3 = _enc_tag();
let tag4 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1.clone(), tag2.clone()]).await;
cache.update_tags(TYPE_A, ETYPE1, EID2, &[tag3, tag4]).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![tag1, tag2]);
let key2 = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID2.to_vec()
};
assert!(lru.peek(&key2).is_none());
}
#[async_std::test]
async fn update_tags_for_non_cacheable_type_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let tag3 = _enc_tag();
let cache = _cache();
cache.add(TYPE_NON_CACHED, ETYPE1, EID1, &value, &[tag1, tag2]).await;
cache.update_tags(TYPE_NON_CACHED, ETYPE1, EID1, &[tag3]).await;
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 0);
}
#[async_std::test]
async fn update_tags_for_no_cache_enabled_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let tag3 = _enc_tag();
let cache = _no_cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1, tag2]).await;
cache.update_tags(TYPE_A, ETYPE1, EID1, &[tag3]).await;
assert!(cache.cache.is_none());
}
#[async_std::test]
async fn delete_tags_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let tag3 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1.clone(), tag2.clone()]).await;
cache.delete_tags(TYPE_A, ETYPE1, EID1, &_tag_names(&[tag1, tag3])).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![tag2]);
}
#[async_std::test]
async fn delete_tags_on_item_without_tags_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[]).await;
cache.delete_tags(TYPE_A, ETYPE1, EID1, &_tag_names(&[tag1])).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![]);
}
#[async_std::test]
async fn delete_tags_on_non_cached_item_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1.clone(), tag2.clone()]).await;
cache.delete_tags(TYPE_A, ETYPE1, EID2, &_tag_names(&[tag1.clone()])).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![tag1, tag2]);
let key2 = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID2.to_vec()
};
assert!(lru.peek(&key2).is_none());
}
#[async_std::test]
async fn delete_tags_for_non_cacheable_type_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _cache();
cache.add(TYPE_NON_CACHED, ETYPE1, EID1, &value, &[tag1.clone(), tag2.clone()]).await;
cache.delete_tags(TYPE_NON_CACHED, ETYPE1, EID1, &_tag_names(&[tag1.clone()])).await;
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 0);
}
#[async_std::test]
async fn delete_tags_for_no_cache_enabled_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _no_cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1.clone(), tag2.clone()]).await;
cache.delete_tags(TYPE_A, ETYPE1, EID1, &_tag_names(&[tag1])).await;
assert!(cache.cache.is_none());
}
#[async_std::test]
async fn update_works() {
let value = _enc_value();
let value2 = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1.clone(), tag2.clone()]).await;
cache.update(TYPE_A, ETYPE1, EID1, &value2).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value2);
assert_eq!(cached.tags, vec![tag1, tag2]);
}
#[async_std::test]
async fn update_on_item_without_tags_works() {
let value = _enc_value();
let value2 = _enc_value();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[]).await;
cache.update(TYPE_A, ETYPE1, EID1, &value2).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value2);
assert_eq!(cached.tags, vec![]);
}
#[async_std::test]
async fn update_on_non_cached_item_works() {
let value = _enc_value();
let value2 = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1.clone(), tag2.clone()]).await;
cache.update(TYPE_A, ETYPE1, EID2, &value2).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![tag1, tag2]);
let key2 = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID2.to_vec()
};
assert!(lru.peek(&key2).is_none());
}
#[async_std::test]
async fn update_for_non_cacheable_type_works() {
let value = _enc_value();
let value2 = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _cache();
cache.add(TYPE_NON_CACHED, ETYPE1, EID1, &value, &[tag1, tag2]).await;
cache.update(TYPE_NON_CACHED, ETYPE1, EID1, &value2).await;
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 0);
}
#[async_std::test]
async fn update_for_no_cache_enabled_works() {
let value = _enc_value();
let value2 = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _no_cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1, tag2]).await;
cache.update(TYPE_A, ETYPE1, EID1, &value2).await;
assert!(cache.cache.is_none());
}
#[async_std::test]
async fn delete_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1, tag2]).await;
cache.delete(TYPE_A, ETYPE1, EID1).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 0);
assert!(lru.peek(&key).is_none());
}
#[async_std::test]
async fn delete_on_item_without_tags_works() {
let value = _enc_value();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[]).await;
cache.delete(TYPE_A, ETYPE1, EID1).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 0);
assert!(lru.peek(&key).is_none());
}
#[async_std::test]
async fn delete_on_non_cached_item_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1.clone(), tag2.clone()]).await;
cache.delete(TYPE_A, ETYPE1, EID2).await;
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![tag1, tag2]);
let key2 = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID2.to_vec()
};
assert!(lru.peek(&key2).is_none());
}
#[async_std::test]
async fn delete_for_non_cacheable_type_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _cache();
cache.add(TYPE_NON_CACHED, ETYPE1, EID1, &value, &[tag1, tag2]).await;
cache.delete(TYPE_NON_CACHED, ETYPE1, EID1).await;
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 0);
}
#[async_std::test]
async fn delete_for_no_cache_enabled_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _no_cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1, tag2]).await;
cache.delete(TYPE_A, ETYPE1, EID1).await;
assert!(cache.cache.is_none());
}
#[async_std::test]
async fn get_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1.clone(), tag2.clone()]).await;
let result = cache.get(TYPE_A, ETYPE1, EID1, &FULL_OPTIONS).await.unwrap();
assert_eq!(result.id, EID1);
assert_eq!(result.type_, Some(ETYPE1.to_owned()));
assert_eq!(result.value, Some(value.clone()));
assert_eq!(result.tags, Some(vec![tag1.clone(), tag2.clone()]));
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![tag1, tag2]);
}
#[async_std::test]
async fn get_for_item_without_tags_works() {
let value = _enc_value();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[]).await;
let result = cache.get(TYPE_A, ETYPE1, EID1, &FULL_OPTIONS).await.unwrap();
assert_eq!(result.id, EID1);
assert_eq!(result.type_, Some(ETYPE1.to_owned()));
assert_eq!(result.value, Some(value.clone()));
assert_eq!(result.tags, Some(vec![]));
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![]);
}
#[async_std::test]
async fn get_for_non_cached_item_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1.clone(), tag2.clone()]).await;
let result = cache.get(TYPE_A, ETYPE1, EID2, &FULL_OPTIONS).await;
assert!(result.is_none());
let key = WalletCacheKey {
type_: ETYPE1.to_vec(),
id: EID1.to_vec()
};
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 1);
let cached = lru.peek(&key).unwrap();
assert_eq!(cached.value, value);
assert_eq!(cached.tags, vec![tag1, tag2]);
}
#[async_std::test]
async fn get_for_non_cacheable_type_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _cache();
cache.add(TYPE_NON_CACHED, ETYPE1, EID1, &value, &[tag1, tag2]).await;
let result = cache.get(TYPE_A, ETYPE1, EID1, &FULL_OPTIONS).await;
assert!(result.is_none());
let mut internal_cache = cache.cache.unwrap();
let lru = internal_cache.get_mut();
assert_eq!(lru.len(), 0);
}
#[async_std::test]
async fn get_for_no_cache_enabled_works() {
let value = _enc_value();
let tag1 = _enc_tag();
let tag2 = _enc_tag();
let cache = _no_cache();
cache.add(TYPE_A, ETYPE1, EID1, &value, &[tag1, tag2]).await;
let result = cache.get(TYPE_A, ETYPE1, EID1, &FULL_OPTIONS).await;
assert!(result.is_none());
assert!(cache.cache.is_none());
}
#[async_std::test]
async fn wallet_cache_hit_metrics_new_works() {
let mut metrics = WalletCacheHitMetrics::new();
assert!(metrics.data.get_mut().is_empty());
}
#[async_std::test]
async fn wallet_cache_hit_metrics_inc_cache_hit_works() {
let metrics = WalletCacheHitMetrics::new();
metrics.inc_cache_hit(TYPE_A).await;
let type_data = metrics.get_data_for_type(TYPE_A).await.unwrap();
assert_eq!(type_data.get_hit(), 1);
assert_eq!(type_data.get_miss(), 0);
assert_eq!(type_data.get_not_cached(), 0);
}
#[async_std::test]
async fn wallet_cache_hit_metrics_inc_cache_miss_works() {
let metrics = WalletCacheHitMetrics::new();
metrics.inc_cache_miss(TYPE_A).await;
let type_data = metrics.get_data_for_type(TYPE_A).await.unwrap();
assert_eq!(type_data.get_hit(), 0);
assert_eq!(type_data.get_miss(), 1);
assert_eq!(type_data.get_not_cached(), 0);
}
#[async_std::test]
async fn wallet_cache_hit_metrics_inc_not_cached_works() {
let metrics = WalletCacheHitMetrics::new();
metrics.inc_not_cached(TYPE_A).await;
let type_data = metrics.get_data_for_type(TYPE_A).await.unwrap();
assert_eq!(type_data.get_hit(), 0);
assert_eq!(type_data.get_miss(), 0);
assert_eq!(type_data.get_not_cached(), 1);
}
#[async_std::test]
async fn wallet_cache_hit_metrics_get_data_works() {
let metrics = WalletCacheHitMetrics::new();
let fut1 = metrics.inc_cache_hit(TYPE_A);
let fut2 = metrics.inc_cache_miss(TYPE_A);
let fut3 = metrics.inc_cache_miss(TYPE_B);
let fut4 = metrics.inc_not_cached(TYPE_NON_CACHED);
let result = futures::future::join4(fut1, fut2, fut3, fut4).await;
assert_eq!(result, (0, 0, 0, 0));
let data = metrics.get_data().await;
assert_eq!(data.len(), 3);
assert_eq!(data.get(TYPE_A).unwrap().get_hit(), 1);
assert_eq!(data.get(TYPE_A).unwrap().get_miss(), 1);
assert_eq!(data.get(TYPE_A).unwrap().get_not_cached(), 0);
assert_eq!(data.get(TYPE_B).unwrap().get_hit(), 0);
assert_eq!(data.get(TYPE_B).unwrap().get_miss(), 1);
assert_eq!(data.get(TYPE_B).unwrap().get_not_cached(), 0);
assert_eq!(data.get(TYPE_NON_CACHED).unwrap().get_hit(), 0);
assert_eq!(data.get(TYPE_NON_CACHED).unwrap().get_miss(), 0);
assert_eq!(data.get(TYPE_NON_CACHED).unwrap().get_not_cached(), 1);
}
#[async_std::test]
async fn wallet_cache_hit_metrics_get_data_for_type_works() {
let metrics = WalletCacheHitMetrics::new();
let fut1 = metrics.inc_cache_hit(TYPE_A);
let fut2 = metrics.inc_cache_miss(TYPE_A);
let fut3 = metrics.inc_cache_miss(TYPE_B);
let fut4 = metrics.inc_not_cached(TYPE_NON_CACHED);
let result = futures::future::join4(fut1, fut2, fut3, fut4).await;
assert_eq!(result, (0, 0, 0, 0));
let data_a = metrics.get_data_for_type(TYPE_A).await.unwrap();
let data_b = metrics.get_data_for_type(TYPE_B).await.unwrap();
let data_nc = metrics.get_data_for_type(TYPE_NON_CACHED).await.unwrap();
assert_eq!(data_a.get_hit(), 1);
assert_eq!(data_a.get_miss(), 1);
assert_eq!(data_a.get_not_cached(), 0);
assert_eq!(data_b.get_hit(), 0);
assert_eq!(data_b.get_miss(), 1);
assert_eq!(data_b.get_not_cached(), 0);
assert_eq!(data_nc.get_hit(), 0);
assert_eq!(data_nc.get_miss(), 0);
assert_eq!(data_nc.get_not_cached(), 1);
}
#[async_std::test]
async fn wallet_cache_hit_metrics_get_data_works_with_empty() {
let metrics = WalletCacheHitMetrics::new();
assert!(metrics.get_data().await.is_empty());
}
#[async_std::test]
async fn wallet_cache_hit_metrics_get_data_for_type_works_with_empty() {
let metrics = WalletCacheHitMetrics::new();
assert!(metrics.get_data_for_type(TYPE_A).await.is_none());
}
async fn _execute_with_random_delay<F>(future: F) -> usize
where F: Future<Output=usize>
{
async_std::task::sleep(Duration::from_millis(rand::thread_rng().gen_range(0, 1000))).await;
future.await + 0
}
#[async_std::test]
async fn wallet_cache_hit_metrics_work_correctly_under_concurrent_load() {
let metrics = WalletCacheHitMetrics::new();
let mut futures1 = vec![];
let mut futures2 = vec![];
let mut futures3 = vec![];
for _ in 0..1000 {
futures1.push(_execute_with_random_delay(metrics.inc_cache_hit(TYPE_A)));
futures2.push(_execute_with_random_delay(metrics.inc_cache_miss(TYPE_A)));
futures3.push(_execute_with_random_delay(metrics.inc_not_cached(TYPE_NON_CACHED)));
}
let result = futures::future::join3(
futures::future::join_all(futures1),
futures::future::join_all(futures2),
futures::future::join_all(futures3)
).await;
println!("result: {:?}", result);
let type_a_data = metrics.get_data_for_type(TYPE_A).await.unwrap();
assert!(metrics.get_data_for_type(TYPE_B).await.is_none());
let type_b_data = metrics.get_data_for_type(TYPE_NON_CACHED).await.unwrap();
assert_eq!(type_a_data.get_hit(), 1000);
assert_eq!(type_a_data.get_miss(), 1000);
assert_eq!(type_a_data.get_not_cached(), 0);
assert_eq!(type_b_data.get_hit(), 0);
assert_eq!(type_b_data.get_miss(), 0);
assert_eq!(type_b_data.get_not_cached(), 1000);
}
}
| 30.866426
| 99
| 0.555977
|
b75cfe6dbda9e66acdf42f4480e39184ba41ea21
| 449
|
cpp
|
C++
|
code/data-structures/segment_tree_node.cpp
|
viswamy/CompetitiveProgramming
|
497d58adce25cfe4fc327301d977da275ad80201
|
[
"MIT"
] | null | null | null |
code/data-structures/segment_tree_node.cpp
|
viswamy/CompetitiveProgramming
|
497d58adce25cfe4fc327301d977da275ad80201
|
[
"MIT"
] | null | null | null |
code/data-structures/segment_tree_node.cpp
|
viswamy/CompetitiveProgramming
|
497d58adce25cfe4fc327301d977da275ad80201
|
[
"MIT"
] | 1
|
2019-07-28T03:12:29.000Z
|
2019-07-28T03:12:29.000Z
|
#ifndef STNODE
#define STNODE
struct node {
int l, r;
ll x, lazy;
node() {}
node(int _l, int _r) : l(_l), r(_r), x(0), lazy(0) { }
node(int _l, int _r, ll _x) : node(_l,_r) { x = _x; }
node(node a, node b) : node(a.l,b.r) { x = a.x + b.x; }
void update(ll v) { x = v; }
void range_update(ll v) { lazy = v; }
void apply() { x += lazy * (r - l + 1); lazy = 0; }
void push(node &u) { u.lazy += lazy; } };
#endif
| 29.933333
| 59
| 0.492205
|
5436d7bd76c73cec0e99784dbaed9df2fdbfd5bd
| 35,847
|
lua
|
Lua
|
sc/space/hsc_space_blockchain.lua
|
aergoio/horde-smart-contract
|
0704bb3c54a70ce3c85989b14f7bd97aafe28d0c
|
[
"MIT"
] | 1
|
2019-01-04T07:21:53.000Z
|
2019-01-04T07:21:53.000Z
|
sc/space/hsc_space_blockchain.lua
|
aergoio/horde-smart-contract
|
0704bb3c54a70ce3c85989b14f7bd97aafe28d0c
|
[
"MIT"
] | null | null | null |
sc/space/hsc_space_blockchain.lua
|
aergoio/horde-smart-contract
|
0704bb3c54a70ce3c85989b14f7bd97aafe28d0c
|
[
"MIT"
] | 2
|
2019-08-03T11:01:36.000Z
|
2019-10-30T12:49:42.000Z
|
--
-- Horde Smart Contract (HSC): Blockchain space
--
MODULE_NAME = "__HSC_SPACE_BLOCKCHAIN__"
MODULE_NAME_DB = "__MANIFEST_DB__"
MODULE_NAME_COMPUTING = "__HSC_SPACE_COMPUTING__"
state.var {
-- contant variables
_MANIFEST_ADDRESS = state.value(),
}
local function __init__(manifestAddress)
_MANIFEST_ADDRESS:set(manifestAddress)
local scAddress = system.getContractID()
system.print(MODULE_NAME .. "__init__: sc_address=" .. scAddress)
contract.call(_MANIFEST_ADDRESS:get(),
"__init_module__", MODULE_NAME, scAddress)
end
local function __callFunction(module_name, func_name, ...)
system.print(MODULE_NAME .. "__callFucntion: module_name=" .. module_name
.. ", func_name=" .. func_name)
return contract.call(_MANIFEST_ADDRESS:get(),
"__call_module_function__", module_name, func_name, ...)
end
--[[ ====================================================================== ]]--
function constructor(manifestAddress)
__init__(manifestAddress)
system.print(MODULE_NAME
.. "constructor: manifestAddress=" .. manifestAddress)
-- create Chain metadata table
-- * is_public = [1=public, 0=permissioned]
__callFunction(MODULE_NAME_DB, "createTable",
[[CREATE TABLE IF NOT EXISTS chains(
chain_creator TEXT NOT NULL,
chain_name TEXT,
chain_id TEXT NOT NULL,
chain_is_public INTEGER DEFAULT 0,
chain_block_no INTEGER DEFAULT NULL,
chain_tx_id TEXT NOT NULL,
chain_metadata TEXT,
PRIMARY KEY (chain_id)
)]])
-- create Node metadata table
__callFunction(MODULE_NAME_DB, "createTable",
[[CREATE TABLE IF NOT EXISTS nodes(
chain_id TEXT NOT NULL,
node_creator TEXT NOT NULL,
node_name TEXT,
node_id TEXT NOT NULL,
node_block_no INTEGER DEFAULT NULL,
node_tx_id TEXT NOT NULL,
node_metadata TEXT,
PRIMARY KEY (chain_id, node_id),
FOREIGN KEY (chain_id) REFERENCES chains(chain_id)
ON DELETE CASCADE ON UPDATE NO ACTION
)]])
-- create Chain access control table
-- * ac_detail = [TODO: categorize all object and then designate (CREATE/READ/WRITE/DELETE)]
__callFunction(MODULE_NAME_DB, "createTable",
[[CREATE TABLE IF NOT EXISTS chains_ac_list(
chain_id TEXT NOT NULL,
account_address TEXT NOT NULL,
ac_detail TEXT,
PRIMARY KEY (chain_id, account_address)
FOREIGN KEY (chain_id) REFERENCES chains(chain_id)
ON DELETE CASCADE ON UPDATE NO ACTION
)]])
end
local function isEmpty(v)
return nil == v or 0 == string.len(v)
end
local function generateDposGenesisJson(chain_info)
system.print(MODULE_NAME
.. "generateDposGenesisJson: chain_info=" .. json:encode(chain_info))
local chain_metadata = chain_info['chain_metadata']
local bp_cnt = chain_metadata['bp_cnt']
local genesis_json = chain_metadata['genesis_json']
if nil ~= chain_metadata and nil ~= genesis_json then
if bp_cnt == table.getn(genesis_json['bps']) then
return genesis_json
end
end
local node_list = chain_info['node_list']
local bp_list = {}
for _, node in pairs(node_list) do
local node_metadata = node['node_metadata']
if node_metadata['is_bp'] then
table.insert(bp_list, node)
end
end
local n_bp_list = table.getn(bp_list)
system.print(MODULE_NAME
.. "generateDposGenesisJson: n_bp_list=" .. n_bp_list)
if bp_cnt <= table.getn(bp_list) then
local genesis = {
chain_id = {
magic = chain_info['chain_name'],
public = chain_info['chain_is_public'],
mainnet = chain_metadata['is_mainnet'],
consensus = 'dpos',
},
balance = {},
bps = {}
}
-- generate balance list
for _, b in pairs(chain_metadata['coin_holders']) do
local address = b['address']
local amount = b['amount']
genesis['balance'][address] = amount
end
-- generate BP list
for i = 1, bp_cnt do
table.insert(genesis['bps'], bp_list[i]['node_metadata']['server_id'])
end
return genesis
else
return nil
end
end
function createChain(chain_id, chain_name, is_public, metadata)
if type(metadata) == 'string' then
metadata = json:decode(metadata)
end
local metadata_raw = json:encode(metadata)
system.print(MODULE_NAME .. "createChain: chain_id=" .. tostring(chain_id)
.. ", chain_name=" .. tostring(chain_name)
.. ", is_public=" .. tostring(is_public)
.. ", metadata=" .. metadata_raw)
local creator = system.getOrigin()
local block_no = system.getBlockheight()
system.print(MODULE_NAME .. "createChain: creator=" .. creator
.. ", block_no=" .. block_no)
-- if not exist critical arguments, (400 Bad Request)
if isEmpty(chain_id) then
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "createChain",
__status_code = "400",
__status_sub_code = "",
__err_msg = "bad request: miss critical arguments",
sender = creator,
chain_id = chain_id
}
end
-- check new nodes
local new_node_list = metadata['new_node_list']
if nil == new_node_list then
new_node_list = {}
else
metadata["new_node_list"] = nil
metadata_raw = json:encode(metadata)
end
system.print(MODULE_NAME
.. "createChain: new_node_list="
.. json:encode(new_node_list))
-- to shrink size of metadata
local node_list = metadata['node_list']
if nil ~= node_list then
metadata["node_list"] = nil
metadata_raw = json:encode(metadata)
end
-- read created Chain
local res = getChain(chain_id)
system.print(MODULE_NAME .. "createChain: res=" .. json:encode(res))
if "404" == res["__status_code"] then
-- check whether Chain is public
local is_public_value = 0
if is_public then
is_public_value = 1
else
is_public_value = 0
end
-- tx id
local tx_id = system.getTxhash()
system.print(MODULE_NAME .. "createChain: tx_id=" .. tx_id)
__callFunction(MODULE_NAME_DB, "insert",
[[INSERT INTO chains(chain_creator,
chain_name,
chain_id,
chain_is_public,
chain_block_no,
chain_tx_id,
chain_metadata)
VALUES (?, ?, ?, ?, ?, ?, ?)]],
creator, chain_name, chain_id, is_public_value,
block_no, tx_id, metadata_raw)
end
-- check and insert the created Node info from Horde
for _, node in pairs(new_node_list) do
local node_id = node['node_id']
local node_name = node['node_name']
local node_metadata = node['node_metadata']
local res = createNode(chain_id, node_id, node_name, node_metadata)
if "201" ~= res["__status_code"] then
return res
end
end
-- read created all Nodes of Chain
local res = getAllNodes(chain_id)
system.print(MODULE_NAME .. "createChain: res=" .. json:encode(res))
if "200" ~= res["__status_code"] and "404" ~= res["__status_code"] then
return res
end
local chain_metadata = metadata
chain_metadata['node_list'] = res['node_list']
local consensus_alg = chain_metadata['consensus_alg']
if consensus_alg ~= nil then
if 'dpos' == consensus_alg then
chain_metadata['genesis_json'] = generateDposGenesisJson(res)
elseif 'raft' == consensus_alg then
elseif 'poa' == consensus_alg then
elseif 'pow' == consensus_alg then
end
local res2 = updateChain(chain_id, chain_name, is_public, chain_metadata)
if "201" ~= res2["__status_code"] then
return res2
end
end
-- TODO: save this activity
-- success to write (201 Created)
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "createChain",
__status_code = "201",
__status_sub_code = "",
chain_creator = res['chain_creator'],
chain_id = res['chain_id'],
--[[
chain_name = res['chain_name'],
chain_metadata = chain_metadata,
chain_block_no = res['chain_block_no'],
chain_tx_id = res['chain_tx_id'],
chain_is_public = res['chain_is_public'],
node_list = res['node_list'],
]]
}
end
function getPublicChains()
system.print(MODULE_NAME .. "getPublicChains")
local chain_list = {}
local exist = false
-- check all public Chains
local rows = __callFunction(MODULE_NAME_DB, "select",
[[SELECT chain_id, chain_name, chain_creator, chain_metadata,
chain_block_no, chain_tx_id
FROM chains
WHERE chain_is_public = 1
ORDER BY chain_block_no DESC]])
for _, v in pairs(rows) do
local chain_id = v[1]
local node_list = {}
local res = getAllNodes(chain_id)
system.print(MODULE_NAME .. "getPublicChains: res=" .. json:encode(res))
if "200" ~= res["__status_code"] and "404" ~= res["__status_code"] then
return res
elseif "200" == res["__status_code"] then
node_list = res['node_list']
end
local pond = {
chain_id = v[1],
chain_name = v[2],
chain_creator = v[3],
chain_metadata = json:decode(v[4]),
chain_block_no = v[5],
chain_tx_id = v[6],
chain_is_public = true,
node_list = node_list,
}
table.insert(chain_list, pond)
exist = true
end
local sender = system.getOrigin()
local block_no = system.getBlockheight()
system.print(MODULE_NAME .. "getPublicChains: sender=" .. tostring(sender)
.. ", block_no=" .. tostring(block_no))
-- if not exist, (404 Not Found)
if not exist then
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "getPublicChains",
__status_code = "404",
__status_sub_code = "",
__err_msg = "cannot find any public chain",
sender = sender
}
end
-- 200 OK
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "getPublicChains",
__status_code = "200",
__status_sub_code = "",
sender = sender,
chain_list = chain_list
}
end
function getAllChains(creator)
system.print(MODULE_NAME .. "getAllChains: creator=" .. tostring(creator))
-- check all public Chains
local res = getPublicChains()
system.print(MODULE_NAME .. "getAllChains: res=" .. json:encode(res))
if isEmpty(creator) then
return res
end
local chain_list
local exist = false
if "404" == res["__status_code"] then
chain_list = {}
elseif "200" == res["__status_code"] then
chain_list = res["chain_list"]
exist = true
else
return res
end
local sender = system.getOrigin()
local block_no = system.getBlockheight()
system.print(MODULE_NAME .. "getAllChains: sender=" .. tostring(sender)
.. ", block_no=" .. tostring(block_no))
-- check all creator's private Chains
local rows = __callFunction(MODULE_NAME_DB, "select",
[[SELECT DISTINCT chain_id, chain_name, chain_metadata,
chain_block_no, chain_tx_id
FROM chains
JOIN (
SELECT DISTINCT
clusters.cluster_id AS c_id,
machines.machine_id AS m_id
FROM clusters JOIN machines
WHERE clusters.cluster_owner=? OR machines.machine_owner=?
) AS cm
WHERE
chains.chain_is_public = 0
AND (
chains.chain_creator=?
OR chains.chain_creator=cm.c_id
OR chains.chain_creator=cm.m_id
)
ORDER BY chain_block_no DESC]],
creator, creator, creator)
for _, v in pairs(rows) do
local chain_id = v[1]
local node_list = {}
-- read all Nodes of Chain
local res = getAllNodes(chain_id)
system.print(MODULE_NAME .. "getAllChains: res=" .. json:encode(res))
if "200" ~= res["__status_code"] and "404" ~= res["__status_code"] then
return res
elseif "200" == res["__status_code"] then
node_list = res['node_list']
end
local pond = {
chain_creator = creator,
chain_id = chain_id,
chain_name = v[2],
chain_metadata = json:decode(v[3]),
chain_block_no = v[4],
chain_tx_id = v[5],
chain_is_public = false,
node_list = node_list,
}
table.insert(chain_list, pond)
exist = true
end
-- if not exist, (404 Not Found)
if not exist then
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "getAllChains",
__status_code = "404",
__status_sub_code = "",
__err_msg = "cannot find any chain",
sender = sender,
chain_creator = creator,
}
end
-- 200 OK
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "getAllChains",
__status_code = "200",
__status_sub_code = "",
sender = sender,
chain_list = chain_list
}
end
function getChain(chain_id)
system.print(MODULE_NAME .. "getChain: chain_id=" .. tostring(chain_id))
local sender = system.getOrigin()
local block_no = system.getBlockheight()
system.print(MODULE_NAME .. "getChain: sender=" .. tostring(sender)
.. ", block_no=" .. tostring(block_no))
-- if not exist critical arguments, (400 Bad Request)
if isEmpty(chain_id) then
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "getChain",
__status_code = "400",
__status_sub_code = "",
__err_msg = "bad request: miss critical arguments",
sender = sender,
chain_id = chain_id,
}
end
-- check inserted data
local rows = __callFunction(MODULE_NAME_DB, "select",
[[SELECT chain_creator, chain_name, chain_is_public, chain_metadata,
chain_block_no, chain_tx_id
FROM chains
WHERE chain_id = ?
ORDER BY chain_block_no DESC]], chain_id)
local chain_creator
local chain_name
local chain_is_public
local chain_metadata
local chain_block_no
local chain_tx_id
local exist = false
for _, v in pairs(rows) do
chain_creator = v[1]
chain_name = v[2]
if 1 == v[3] then
chain_is_public = true
else
chain_is_public = false
end
chain_metadata = json:decode(v[4])
chain_block_no = v[5]
chain_tx_id = v[6]
exist = true
end
--[[ TODO: cannot check the sender of a query contract
-- check permissions (403.2 Read access forbidden)
if sender ~= creator then
if not is_public then
-- TODO: check sender's reading permission of pond
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "getChain",
__status_code = "403",
__status_sub_code = "2",
__err_msg = "Sender (" .. sender .. ") doesn't allow to read the chain (" .. chain_id .. ")",
sender = sender,
chain_id = chain_id
}
end
end
]]--
-- if not exist, (404 Not Found)
if not exist then
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "getChain",
__status_code = "404",
__status_sub_code = "",
__err_msg = "cannot find the chain",
sender = sender,
chain_id = chain_id
}
end
-- 200 OK
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "getChain",
__status_code = "200",
__status_sub_code = "",
sender = sender,
chain_creator = chain_creator,
chain_id = chain_id,
chain_name = chain_name,
chain_metadata = chain_metadata,
chain_block_no = chain_block_no,
chain_tx_id = chain_tx_id,
chain_is_public = chain_is_public
}
end
function deleteChain(chain_id)
system.print(MODULE_NAME .. "deleteChain: chain_id=" .. tostring(chain_id))
local sender = system.getOrigin()
local block_no = system.getBlockheight()
system.print(MODULE_NAME .. "deleteChain: sender=" .. sender
.. ", block_no=" .. block_no)
-- if not exist critical arguments, (400 Bad Request)
if isEmpty(chain_id) then
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "deleteChain",
__status_code = "400",
__status_sub_code = "",
__err_msg = "bad request: miss critical arguments",
sender = sender,
chain_id = chain_id,
}
end
-- read created Chain
local res = getChain(chain_id)
if "200" ~= res["__status_code"] then
return res
end
system.print(MODULE_NAME .. "deleteChain: res=" .. json:encode(res))
local chain_creator = res["chain_creator"]
-- check permissions (403.1 Execute access forbidden)
if sender ~= chain_creator then
-- TODO: check sender's delete permission of pond
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "deleteChain",
__status_code = "403",
__status_sub_code = "1",
__err_msg = "sender doesn't allow to delete the chain",
sender = sender,
chain_id = chain_id
}
end
-- delete Chain
__callFunction(MODULE_NAME_DB, "delete",
"DELETE FROM chains WHERE chain_id = ?", chain_id)
-- TODO: save this activity
-- 201 Created
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "deleteChain",
__status_code = "201",
__status_sub_code = "",
sender = sender,
chain_creator = chain_creator,
chain_id = chain_id,
--[[
chain_name = res['chain_name'],
chain_metadata = res['chain_metadata'],
chain_block_no = res['chain_block_no'],
chain_tx_id = res['chain_tx_id'],
chain_is_public = res['chain_is_public']
]]
}
end
function updateChain(chain_id, chain_name, is_public, metadata)
if type(metadata) == 'string' then
metadata = json:decode(metadata)
end
local metadata_raw = json:encode(metadata)
system.print(MODULE_NAME .. "updateChain: chain_id=" .. tostring(chain_id)
.. ", chain_name=" .. tostring(chain_name)
.. ", is_public=" .. tostring(is_public)
.. ", metadata=" .. metadata_raw)
local sender = system.getOrigin()
local block_no = system.getBlockheight()
system.print(MODULE_NAME .. "updateChain: sender=" .. sender
.. ", block_no=" .. block_no)
-- if not exist critical arguments, (400 Bad Request)
if isEmpty(chain_id) then
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "updateChain",
__status_code = "400",
__status_sub_code = "",
__err_msg = "bad request: miss critical arguments",
sender = sender,
chain_id = chain_id,
}
end
-- read created Chain
local res = getChain(chain_id)
if "200" ~= res["__status_code"] then
return res
end
system.print(MODULE_NAME .. "updateChain: res=" .. json:encode(res))
local chain_creator = res["chain_creator"]
local node_list = metadata['node_list']
local found_c_or_m = false
for _, node in pairs(node_list) do
local node_metadata = node['node_metadata']
local cluster
local machine
if node_metadata == nil then
cluster = node['cluster']
machine = node['machine']
else
cluster = node_metadata['cluster']
machine = node_metadata['machine']
end
if sender == cluster['id'] or sender == machine['id'] then
found_c_or_m = true
break
end
end
-- check permissions (403.3 Write access forbidden)
if sender ~= chain_creator and not found_c_or_m then
-- TODO: check sender's update permission of pond
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "updateChain",
__status_code = "403",
__status_sub_code = "3",
__err_msg = "sender doesn't allow to update the chain info",
sender = sender,
chain_id = chain_id
}
end
-- check arguments
if isEmpty(chain_name) then
chain_name = res["chain_name"]
end
if nil == is_public then
is_public = res["chain_is_public"]
end
local is_public_value = 0
if is_public then
is_public_value = 1
else
is_public_value = 0
end
if nil == metadata or isEmpty(metadata_raw) then
metadata = res["chain_metadata"]
metadata_raw = json:encode(metadata)
end
-- to shrink size of metadata
local node_list = metadata['node_list']
if nil ~= node_list then
metadata["node_list"] = nil
metadata_raw = json:encode(metadata)
end
__callFunction(MODULE_NAME_DB, "update",
[[UPDATE chains SET chain_name = ?, chain_is_public = ?, chain_metadata = ?
WHERE chain_id = ?]],
chain_name, is_public_value, metadata_raw, chain_id)
-- TODO: save this activity
-- 201 Created
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "updateChain",
__status_code = "201",
__status_sub_code = "",
sender = sender,
chain_creator = chain_creator,
chain_id = chain_id,
--[[
chain_name = chain_name,
chain_metadata = metadata,
chain_block_no = res['chain_block_no'],
chain_tx_id = res['chain_tx_id'],
chain_is_public = is_public
]]
}
end
function createNode(chain_id, node_id, node_name, metadata)
if type(metadata) == 'string' then
metadata = json:decode(metadata)
end
local metadata_raw = json:encode(metadata)
system.print(MODULE_NAME .. "createNode: chain_id=" .. tostring(chain_id)
.. ", node_id=" .. tostring(node_id)
.. ", node_name=" .. tostring(node_name)
.. ", metadata=" .. metadata_raw)
local sender = system.getOrigin()
local block_no = system.getBlockheight()
system.print(MODULE_NAME .. "createNode: sender=" .. sender
.. ", block_no=" .. block_no)
-- if not exist critical arguments, (400 Bad Request)
if isEmpty(chain_id) or isEmpty(node_id) then
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "createNode",
__status_code = "400",
__status_sub_code = "",
__err_msg = "bad request: miss critical arguments",
sender = sender,
chain_id = chain_id,
node_id = node_id,
}
end
-- read created Chain
local res = getChain(chain_id)
system.print(MODULE_NAME .. "createNode: res=" .. json:encode(res))
if "200" ~= res["__status_code"] then
return res
end
local chain_creator = res["chain_creator"]
local chain_is_public = res["chain_is_public"]
local cluster_id = metadata["cluster"]["id"]
local machine_id = metadata["machine"]["id"]
-- check permissions (403.1 Execute access forbidden)
if sender ~= chain_creator
and sender ~= cluster_id
and sender ~= machine_id then
if not chain_is_public then
-- TODO: check sender's create Node permission of pond
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "createNode",
__status_code = "403",
__status_sub_code = "1",
__err_msg = "sender doesn't allow to create a new node for the chain",
sender = sender,
chain_id = chain_id
}
end
end
-- tx id
local tx_id = system.getTxhash()
system.print(MODULE_NAME .. "createNode: tx_id=" .. tx_id)
__callFunction(MODULE_NAME_DB, "insert",
[[INSERT OR REPLACE INTO nodes(chain_id,
node_creator,
node_name,
node_id,
node_block_no,
node_tx_id,
node_metadata)
VALUES (?, ?, ?, ?, ?, ?, ?)]],
chain_id, sender, node_name, node_id,
block_no, tx_id, metadata_raw)
-- TODO: save this activity
-- success to write (201 Created)
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "createNode",
__status_code = "201",
__status_sub_code = "",
sender = sender,
chain_creator = chain_creator,
chain_id = chain_id,
node_creator = sender,
node_id = node_id,
--[[
chain_name = res['chain_name'],
chain_metadata = res['chain_metadata'],
chain_block_no = res['chain_block_no'],
chain_tx_id = res['chain_tx_id'],
chain_is_public = chain_is_public,
node_list = {
{
node_creator = sender,
node_name = node_name,
node_id = node_id,
node_metadata = metadata,
node_block_no = block_no,
node_tx_id = tx_id
}
}
]]
}
end
function getAllNodes(chain_id)
system.print(MODULE_NAME .. "getAllNodes: chain_id=" .. tostring(chain_id))
local sender = system.getOrigin()
local block_no = system.getBlockheight()
system.print(MODULE_NAME .. "getAllNodes: sender=" .. tostring(sender)
.. ", block_no=" .. tostring(block_no))
-- if not exist critical arguments, (400 Bad Request)
if isEmpty(chain_id) then
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "getAllNodes",
__status_code = "400",
__status_sub_code = "",
__err_msg = "bad request: miss critical arguments",
sender = sender,
chain_id = chain_id,
}
end
-- read created Chain
local res = getChain(chain_id)
system.print(MODULE_NAME .. "getAllNodes: res=" .. json:encode(res))
if "200" ~= res["__status_code"] then
return res
end
local chain_creator = res["chain_creator"]
local chain_name = res["chain_name"]
local chain_is_public = res["chain_is_public"]
local chain_metadata = res["chain_metadata"]
local chain_block_no = res["chain_block_no"]
local chain_tx_id = res['chain_tx_id']
-- check inserted data
local rows = __callFunction(MODULE_NAME_DB, "select",
[[SELECT node_creator, node_id, node_name, node_metadata,
node_block_no, node_tx_id
FROM nodes
WHERE chain_id = ? ORDER BY node_block_no DESC]],
chain_id)
local node_list = {}
local exist = false
for _, v in pairs(rows) do
local node = {
node_creator = v[1],
node_id = v[2],
node_name = v[3],
node_metadata = json:decode(v[4]),
node_block_no = v[5],
node_tx_id = v[6]
}
table.insert(node_list, node)
exist = true
end
-- if not exist, (404 Not Found)
if not exist then
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "getAllNodes",
__status_code = "404",
__status_sub_code = "",
__err_msg = "cannot find any node in the chain",
sender = sender,
chain_creator = chain_creator,
chain_id = chain_id,
chain_name = chain_name,
chain_metadata = chain_metadata,
chain_block_no = chain_block_no,
chain_tx_id = chain_tx_id,
chain_is_public = chain_is_public
}
end
-- 200 OK
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "getAllNodes",
__status_code = "200",
__status_sub_code = "",
sender = sender,
chain_creator = chain_creator,
chain_id = chain_id,
chain_name = chain_name,
chain_metadata = chain_metadata,
chain_block_no = chain_block_no,
chain_tx_id = chain_tx_id,
chain_is_public = chain_is_public,
node_list = node_list
}
end
function getNode(chain_id, node_id)
system.print(MODULE_NAME .. "getNode: chain_id=" .. tostring(chain_id)
.. ", node_id=" .. tostring(node_id))
local sender = system.getOrigin()
local block_no = system.getBlockheight()
system.print(MODULE_NAME .. "getNode: sender=" .. tostring(sender)
.. ", block_no=" .. tostring(block_no))
-- if not exist critical arguments, (400 Bad Request)
if isEmpty(chain_id) or isEmpty(node_id) then
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "getNode",
__status_code = "400",
__status_sub_code = "",
__err_msg = "bad request: miss critical arguments",
sender = sender,
chain_id = chain_id,
node_id = node_id,
}
end
-- read created Chain
local res = getChain(chain_id)
system.print(MODULE_NAME .. "getNode: res=" .. json:encode(res))
if "200" ~= res["__status_code"] then
return res
end
local chain_creator = res["chain_creator"]
local chain_name = res["chain_name"]
local chain_is_public = res["chain_is_public"]
local chain_metadata = res["chain_metadata"]
local chain_block_no = res["chain_block_no"]
local chain_tx_id = res['chain_tx_id']
-- check inserted data
local rows = __callFunction(MODULE_NAME_DB, "select",
[[SELECT node_creator, node_name, node_metadata,
node_block_no, node_tx_id
FROM nodes
WHERE chain_id = ? AND node_id = ?
ORDER BY node_block_no DESC]],
chain_id, node_id)
local node_list = {}
local exist = false
for _, v in pairs(rows) do
local node = {
node_id = node_id,
node_creator = v[1],
node_name = v[2],
node_metadata = json:decode(v[3]),
node_block_no = v[4],
node_tx_id = v[5]
}
table.insert(node_list, node)
exist = true
end
-- if not exist, (404 Not Found)
if not exist then
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "getNode",
__status_code = "404",
__status_sub_code = "",
__err_msg = "cannot find the node",
sender = sender,
chain_creator = chain_creator,
chain_id = chain_id,
chain_name = chain_name,
chain_metadata = chain_metadata,
chain_block_no = chain_block_no,
chain_tx_id = chain_tx_id,
chain_is_public = chain_is_public,
node_id = node_id
}
end
-- 200 OK
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "getNode",
__status_code = "200",
__status_sub_code = "",
sender = sender,
chain_creator = chain_creator,
chain_id = chain_id,
chain_name = chain_name,
chain_metadata = chain_metadata,
chain_block_no = chain_block_no,
chain_tx_id = chain_tx_id,
chain_is_public = chain_is_public,
node_list = node_list
}
end
function deleteNode(chain_id, node_id)
system.print(MODULE_NAME .. "deleteNode: chain_id=" .. tostring(chain_id)
.. ", node_id=" .. tostring(node_id))
local sender = system.getOrigin()
local block_no = system.getBlockheight()
system.print(MODULE_NAME .. "deleteNode: sender=" .. sender
.. ", block_no=" .. block_no)
-- if not exist critical arguments, (400 Bad Request)
if isEmpty(chain_id) or isEmpty(node_id) then
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "deleteNode",
__status_code = "400",
__status_sub_code = "",
__err_msg = "bad request: miss critical arguments",
sender = sender,
chain_id = chain_id,
node_id = node_id,
}
end
-- read created Node
local res = getNode(chain_id, node_id)
system.print(MODULE_NAME .. "deleteNode: res=" .. json:encode(res))
if "200" ~= res["__status_code"] then
return res
end
local chain_creator = res["chain_creator"]
local node_info = res["node_list"][1]
local node_creator = node_info["node_creator"]
-- check permissions (403.1 Execute access forbidden)
if sender ~= chain_creator then
if sender ~= node_creator then
-- TODO: check sender's delete permission of pond
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "deleteNode",
__status_code = "403",
__status_sub_code = "1",
__err_msg = "sender doesn't allow to delete the node",
sender = sender,
chain_id = chain_id,
node_id = node_id
}
end
end
-- delete Node
__callFunction(MODULE_NAME_DB, "delete",
"DELETE FROM nodes WHERE chain_id = ? AND node_id = ?",
chain_id, node_id)
-- TODO: save this activity
-- 201 Created
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "deleteNode",
__status_code = "201",
__status_sub_code = "",
sender = sender,
chain_creator = chain_creator,
chain_id = chain_id,
node_creator = node_creator,
node_id = node_id,
--[[
chain_name = res["chain_name"],
chain_metadata = res["chain_metadata"],
chain_block_no = res['chain_block_no'],
chain_tx_id = res['chain_tx_id'],
chain_is_public = res["chain_is_public"],
node_list = res["node_list"]
]]
}
end
function updateNode(chain_id, node_id, node_name, metadata)
if type(metadata) == 'string' then
metadata = json:decode(metadata)
end
local metadata_raw = json:encode(metadata)
system.print(MODULE_NAME .. "updateNode: chain_id=" .. tostring(chain_id)
.. ", node_id=" .. tostring(node_id)
.. ", node_name=" .. tostring(node_name)
.. ", metadata=" .. metadata_raw)
local sender = system.getOrigin()
local block_no = system.getBlockheight()
system.print(MODULE_NAME .. "updateNode: sender=" .. sender
.. ", block_no=" .. block_no)
-- if not exist critical arguments, (400 Bad Request)
if isEmpty(chain_id) or isEmpty(node_id) then
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "updateNode",
__status_code = "400",
__status_sub_code = "",
__err_msg = "bad request: miss critical arguments",
sender = sender,
chain_id = chain_id,
node_id = node_id,
}
end
-- read created Node
local res = getNode(chain_id, node_id)
system.print(MODULE_NAME .. "updateNode: res=" .. json:encode(res))
if "200" ~= res["__status_code"] then
return res
end
local chain_creator = res["chain_creator"]
local node_info = res["node_list"][1]
local node_creator = node_info["node_creator"]
-- check permissions (403.3 Write access forbidden)
if sender ~= chain_creator then
if sender ~= node_creator then
-- TODO: check sender's update permission of pond
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "updateNode",
__status_code = "403",
__status_sub_code = "3",
__err_msg = "sender doesn't allow to update the node info",
sender = sender,
chain_id = chain_id,
node_id = node_id
}
end
end
-- check arguments
if isEmpty(node_name) then
node_name = node_info["node_name"]
end
if nil == metadata or isEmpty(metadata_raw) then
metadata = node_info["node_metadata"]
metadata_raw = json:encode(metadata)
end
__callFunction(MODULE_NAME_DB, "update",
[[UPDATE nodes SET node_name = ?, node_metadata = ?
WHERE chain_id = ? AND node_id = ?]],
node_name, metadata_raw, chain_id, node_id)
-- TODO: save this activity
-- 201 Created
return {
__module = MODULE_NAME,
__block_no = block_no,
__func_name = "updateNode",
__status_code = "201",
__status_sub_code = "",
sender = sender,
chain_creator = chain_creator,
chain_id = chain_id,
node_creator = node_creator,
node_id = node_id,
--[[
chain_name = res["chain_name"],
chain_metadata = res["chain_metadata"],
chain_block_no = res['chain_block_no'],
chain_tx_id = res['chain_tx_id'],
chain_is_public = res["chain_is_public"],
node_list = {
{
node_creator = node_creator,
node_name = node_name,
node_id = node_id,
node_metadata = metadata,
node_block_no = node_info['node_block_no'],
node_tx_id = node_info['node_tx_id']
}
}
]]
}
end
-- exposed functions
abi.register(createChain, getPublicChains, getAllChains,
getChain, deleteChain, updateChain,
createNode, getAllNodes, getNode, deleteNode, updateNode)
| 28.359968
| 101
| 0.63679
|
a32ac86f53c9b9358864fe408cdd722b6c0dba68
| 2,976
|
java
|
Java
|
app/src/androidTest/java/com/arles/swissmanager/test/ui/TourneyActivityTest.java
|
floring/SwissManager
|
0857bde171397f1b08f5e1a755c4adfea9accdd9
|
[
"Apache-2.0"
] | 3
|
2015-09-01T20:21:17.000Z
|
2019-12-28T17:46:38.000Z
|
app/src/androidTest/java/com/arles/swissmanager/test/ui/TourneyActivityTest.java
|
floring/SwissManager
|
0857bde171397f1b08f5e1a755c4adfea9accdd9
|
[
"Apache-2.0"
] | 8
|
2015-07-27T15:18:16.000Z
|
2015-09-01T17:10:47.000Z
|
app/src/androidTest/java/com/arles/swissmanager/test/ui/TourneyActivityTest.java
|
floring/SwissManager
|
0857bde171397f1b08f5e1a755c4adfea9accdd9
|
[
"Apache-2.0"
] | null | null | null |
/*
* Copyright (C) 2015 Arles. All rights reserved.
* <p/>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arles.swissmanager.test.ui;
import android.support.test.InstrumentationRegistry;
import android.support.test.espresso.intent.rule.IntentsTestRule;
import android.support.test.runner.AndroidJUnit4;
import android.test.suitebuilder.annotation.LargeTest;
import com.arles.swissmanager.R;
import com.arles.swissmanager.ui.activity.TourneyActivity;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import static android.support.test.espresso.Espresso.onView;
import static android.support.test.espresso.Espresso.openActionBarOverflowOrOptionsMenu;
import static android.support.test.espresso.action.ViewActions.click;
import static android.support.test.espresso.assertion.ViewAssertions.matches;
import static android.support.test.espresso.intent.Intents.intended;
import static android.support.test.espresso.intent.matcher.ComponentNameMatchers.hasShortClassName;
import static android.support.test.espresso.intent.matcher.IntentMatchers.hasComponent;
import static android.support.test.espresso.matcher.ViewMatchers.isDisplayed;
import static android.support.test.espresso.matcher.ViewMatchers.withText;
/**
* Created by Admin on 07.07.2015.
*/
@RunWith(AndroidJUnit4.class)
@LargeTest
public class TourneyActivityTest {
private final String SHORT_CLASS_NAME = ".ui.activity.RoundActivity";
@Rule
public IntentsTestRule<TourneyActivity> mActivityRule = new IntentsTestRule<>(TourneyActivity.class);
/**
* Tests that action bar item has required actions options
*/
@Test
public void testActionBarItemClick() {
openActionBarOverflowOrOptionsMenu(InstrumentationRegistry.getTargetContext());
onView(withText(R.string.action_start_round)).check(matches(isDisplayed()));
onView(withText(R.string.action_end_round)).check(matches(isDisplayed()));
onView(withText(R.string.action_settings)).check(matches(isDisplayed()));
}
/**
* Tests that correct intent is sent by clicking on action bar option "Start round"
*/
@Test
public void testActionBarOptions_Click_SendIntent() {
openActionBarOverflowOrOptionsMenu(InstrumentationRegistry.getTargetContext());
onView(withText(R.string.action_start_round)).perform(click());
intended(hasComponent(hasShortClassName(SHORT_CLASS_NAME)));
}
}
| 38.649351
| 105
| 0.775538
|
a167a5d2586237b6d8da2804ca0f1889a4e59e03
| 5,137
|
ps1
|
PowerShell
|
functions/Select-MenuEntryFromList.ps1
|
netcloud/powershell-cli-menu-helpers
|
1328a72dd94c78c8d564c2a34eb37272376061c0
|
[
"MIT"
] | 7
|
2021-08-17T09:02:00.000Z
|
2022-01-27T12:52:59.000Z
|
functions/Select-MenuEntryFromList.ps1
|
netcloud/powershell-cli-menu-helpers
|
1328a72dd94c78c8d564c2a34eb37272376061c0
|
[
"MIT"
] | null | null | null |
functions/Select-MenuEntryFromList.ps1
|
netcloud/powershell-cli-menu-helpers
|
1328a72dd94c78c8d564c2a34eb37272376061c0
|
[
"MIT"
] | null | null | null |
function Select-MenuEntryFromList {
<#
.Synopsis
Offers the possibility to render a menu inside the cli and to select one entry.
.DESCRIPTION
Offers the possibility to render a menu inside the cli and to select one entry.
The function will return the selected menu entry.
.PARAMETER MenuTitle
The title of the menu.
.PARAMETER MenuEntries
The entries to show inside the menu.
.PARAMETER BackOption
Enables the back option.
.PARAMETER ExitOption
Enables the exit option.
.INPUTS
None. You cannot pipe objects to Select-MenuEntryFromList.
.OUTPUTS
System.String
Select-MenuEntryFromList returns the selected menu item as string in the following format.
[PSCustomObject]@{
MenuEntry = $null # System.String
State = $null # %OK% / %BACK% / %EXIT%
}
.EXAMPLE
# Render a menu with back and exit option
# Set the menu title and the menu entries
[System.String[]]$MenuEntries = @('Menu 1', 'Menu 2', 'Menu 3')
[System.String]$MenuTitle = "=============== Test Menu ==============="
# Ask user for the menu entry
[PSCustomObject]$SelectedMenuEntry = Select-MenuEntryFromList -MenuTitle $MenuTitle -MenuEntries $MenuEntries -ExitOption -BackOption
# Do the action based on the selection
switch ($SelectedMenuEntry.State)
{
'%OK%'
{
Write-Host -Object "Selected menu entry: $($SelectedMenuEntry.MenuEntry)"
}
'%BACK%'
{
Write-Host -Object "Back option selected"
}
'%EXIT%'
{
Write-Host -Object "Exit option selected"
}
}
.LINK
https://github.com/netcloud/powershell-cli-menu-helpers
#>
[OutputType([System.Management.Automation.PSObject[]])]
Param(
[System.String]$MenuTitle,
[System.String[]]$MenuEntries,
[System.Management.Automation.SwitchParameter]$BackOption,
[System.Management.Automation.SwitchParameter]$ExitOption
)
Begin {
# Prepare the output object
[PSCustomObject]$ReturnObj = [PSCustomObject]@{
MenuEntry = $null
State = $null # %OK% / %BACK% / %EXIT%
}
# Define the possible options
[System.Array]$Options = @(
[PSCustomObject]@{display = $BackOption; letter = 'b'; description = 'Back' },
[PSCustomObject]@{display = $ExitOption; letter = 'e'; description = 'Exit' }
)
}
Process {
# Display the list in a loop
[System.Boolean]$RunMenu = $true
Clear-Host
Write-Host -Object ''
Write-Host -Object ''
while ($RunMenu) {
# Render the menu title
if ($MenuTitle -ne '') {
Write-Host -Object $MenuTitle
}
# Render the menu entries
for ($i = 1; $i -le $MenuEntries.Count; $i ++) {
Write-Host -Object "$i) $($MenuEntries[$i - 1])"
}
Write-Host -Object ''
Write-Host -Object ''
# Render the list options
foreach ($Option in $Options) {
if ($Option.display) {
Write-Host -Object "$($Option.letter)" -NoNewline -ForegroundColor Yellow
Write-Host -Object ": $($Option.description)"
}
}
Write-Host -Object ''
Write-Host -Object ''
# Get user input
[System.String]$UserInput = Read-Host -Prompt 'Enter selection'
# Get all menu entry id's
[System.Int32[]]$MenuEntryIDs = @()
if ($MenuEntries.Count -ne 0) {
$MenuEntryIDs = (1..$MenuEntries.Count)
}
# Process the user input
switch ($UserInput) {
# Return selected menu entry
{ ($_ -match '^\d+$') -and ($MenuEntryIDs -contains $_) } {
# Add the item to the return object
$ReturnObj.State = '%OK%'
$ReturnObj.MenuEntry = $MenuEntries[$_ - 1]
return
}
# Return back if the option is activated
{ ($_ -eq 'b' -and $BackOption) } {
$ReturnObj.State = '%BACK%'
return
}
# Return exit if the option is activated
{ ($_ -eq 'e' -and $ExitOption) } {
$ReturnObj.State = '%EXIT%'
return
}
# If user input is not valid
default {
Clear-Host
Write-Host -Object "Input '$_' is not valid!" -ForegroundColor Yellow
Write-Host -Object ''
}
}
}
}
End {
# Return the object
return $ReturnObj
}
}
| 31.515337
| 141
| 0.503407
|
3f722506c6715b6b381014af46335ccb203485c7
| 6,122
|
php
|
PHP
|
application/controllers/Main.php
|
fahmi0721/ppk_sorong_ok
|
927302fe600d7a762cd242551409eddf5a6bfcfe
|
[
"MIT"
] | null | null | null |
application/controllers/Main.php
|
fahmi0721/ppk_sorong_ok
|
927302fe600d7a762cd242551409eddf5a6bfcfe
|
[
"MIT"
] | null | null | null |
application/controllers/Main.php
|
fahmi0721/ppk_sorong_ok
|
927302fe600d7a762cd242551409eddf5a6bfcfe
|
[
"MIT"
] | null | null | null |
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
class Main extends CI_Controller {
/**
* Index Page for this controller.
*
* Maps to the following URL
* http://example.com/index.php/welcome
* - or -
* http://example.com/index.php/welcome/index
* - or -
* Since this controller is set as the default controller in
* config/routes.php, it's displayed at http://example.com/
*
* So any other public methods not prefixed with an underscore will
* map to /index.php/welcome/<method_name>
* @see https://codeigniter.com/user_guide/general/urls.html
*/
protected $myGuzzle;
protected $token;
protected $api_url;
protected $api_url_anggaran;
protected $api_url_pejabat;
protected $api_url_penunjukan_peyedia;
protected $api_url_spk;
protected $api_url_spks;
protected $api_url_pphp;
protected $api_url_baphp;
protected $api_url_bastb;
protected $api_url_vendor;
protected $api_url_spk_tahun_ini;
protected $api_url_ba_bayar;
function __construct(){
parent::__construct();
$this->load->model('M_Login','ml');
$this->ml->cek_login();
$this->load->library('GuzzleMe');
$this->myGuzzle = new GuzzleMe();
$this->api_url = base_url().'api/hps';
$this->api_url_anggaran = base_url().'api/anggaran';
$this->api_url_pejabat = base_url().'api/pejabat';
$this->api_url_penunjukan_peyedia = base_url().'api/data_pekerjaan/penunjukan_penuedia';
$this->api_url_spk = base_url().'api/data_pekerjaan/spk';
$this->api_url_spks = base_url().'api/spk';
$this->api_url_pphp = base_url().'api/data_pekerjaan/pphp';
$this->api_url_baphp = base_url().'api/data_pekerjaan/baphp';
$this->api_url_bastb = base_url().'api/data_pekerjaan/bastb';
$this->api_url_ba_bayar = base_url().'api/data_pekerjaan/ba_bayar';
$this->api_url_spk_tahun_ini = base_url().'api/data_pekerjaan/spk_tahun_ini';
$this->api_url_vendor = base_url().'api/vendor';
$this->token = $this->session->userdata('token');
}
public function index()
{
$param= array(
"headers" => array("Authorization" => $this->token)
);
$response = json_decode($this->myGuzzle->request_get($this->api_url_spks,$param),true);
$vendor = json_decode($this->myGuzzle->request_get($this->api_url_vendor,$param),true);
$tahun_ini_jalan = json_decode($this->myGuzzle->request_get($this->api_url_spk_tahun_ini,$param),true);
if($response['status'] === true && $vendor['status'] === true){
$data['row'] = 1;
$data['data'] = $response['data'];
$data['tot_vendor'] = count($vendor['data']);
$data['tahun_ini_jalan'] = $tahun_ini_jalan['data'];
$this->load->view('_template/header');
$this->load->view('_template/sidebar');
$this->load->view('main',$data);
$this->load->view('_template/footer');
}else{
$data['row'] = 0;
$data['data'] = "";
$data['ctrl'] = $this;
$this->load->view('_template/header');
$this->load->view('_template/sidebar');
$this->load->view('main');
$this->load->view('_template/footer');
}
}
public function progres($Id)
{
$data = array();
$Tot =1;
/**
* getData HPS
*
*/
$data['Id'] = $Id;
$param= array(
"query" => $data,
"headers" => array("Authorization" => $this->token)
);
$response = json_decode($this->myGuzzle->request_get($this->api_url,$param),true);
$data['hps'] = $response['data'][0];
/**
* getData Penunjukan Penyedia/Vendor
*/
$datas['NoSuratHps'] = $data['hps']['NoSurat'];
$param_penunjukan_peyedia= array(
"form_params" => $datas,
"headers" => array("Authorization" => $this->token)
);
$response_penunjukan_peyedia = json_decode($this->myGuzzle->request_post($this->api_url_penunjukan_peyedia,$param_penunjukan_peyedia),true);
unset($datas['NoSuratHps']);
$data['penunjukan_peyedia'] = $response_penunjukan_peyedia;
$Tot = $response_penunjukan_peyedia['row'] > 0 ? $Tot + 1 : $Tot;
/**
* getData SPK
*
*/
$datas['NoSuratHps'] = $data['hps']['NoSurat'];
$param_spk= array(
"form_params" => $datas,
"headers" => array("Authorization" => $this->token)
);
$response_spk = json_decode($this->myGuzzle->request_post($this->api_url_spk,$param_spk),true);
unset($datas['NoSuratHps']);
$data['spk'] = $response_spk;
$Tot = $response_spk['row'] > 0 ? $Tot + 1 : $Tot;
/**
* getData PPHP
*
*/
$datas['NoSuratHps'] = $data['hps']['NoSurat'];
$param_pphp= array(
"form_params" => $datas,
"headers" => array("Authorization" => $this->token)
);
$response_pphp = json_decode($this->myGuzzle->request_post($this->api_url_pphp,$param_pphp),true);
unset($datas['NoSuratHps']);
$data['pphp'] = $response_pphp;
$Tot = $response_pphp['row'] > 0 ? $Tot + 1 : $Tot;
/**
* getData BASTB
*
*/
$datas['NoSuratHps'] = $data['hps']['NoSurat'];
$param_bastb= array(
"form_params" => $datas,
"headers" => array("Authorization" => $this->token)
);
$response_bastb = json_decode($this->myGuzzle->request_post($this->api_url_bastb,$param_bastb),true);
unset($datas['NoSuratHps']);
$data['bastb'] = $response_bastb;
$Tot = $response_bastb['row'] > 0 ? $Tot + 1 : $Tot;
/**
* getData BAPHP
*
*/
$datas['NoSuratHps'] = $data['hps']['NoSurat'];
$param_baphp= array(
"form_params" => $datas,
"headers" => array("Authorization" => $this->token)
);
$response_baphp = json_decode($this->myGuzzle->request_post($this->api_url_baphp,$param_baphp),true);
unset($datas['NoSuratHps']);
$data['baphp'] = $response_baphp;
$Tot = $response_baphp['row'] > 0 ? $Tot + 1 : $Tot;
/**
* getData BA_BAYAR
*
*/
$datas['NoSuratHps'] = $data['hps']['NoSurat'];
$param_ba_bayar= array(
"form_params" => $datas,
"headers" => array("Authorization" => $this->token)
);
$response_ba_bayar = json_decode($this->myGuzzle->request_post($this->api_url_ba_bayar,$param_ba_bayar),true);
unset($datas['NoSuratHps']);
$data['ba_bayar'] = $response_ba_bayar;
$Tot = $response_ba_bayar['row'] > 0 ? $Tot + 1 : $Tot;
$data['Tot'] = $Tot;
$data['Progress'] = ($Tot/8)*100;
return $data['Progress'];
}
}
| 31.394872
| 142
| 0.647664
|
e27f3bae35662ddcab3f2b26fec016ee1e7e0189
| 3,088
|
py
|
Python
|
tests/test_settings.py
|
pedrobcst/Xerus
|
09df088e0207176df0d20715e1c9778d09d28250
|
[
"MIT"
] | 18
|
2021-12-10T03:05:49.000Z
|
2022-03-25T15:48:35.000Z
|
tests/test_settings.py
|
pedrobcst/Xerus
|
09df088e0207176df0d20715e1c9778d09d28250
|
[
"MIT"
] | 14
|
2022-02-24T11:09:26.000Z
|
2022-03-30T07:42:17.000Z
|
tests/test_settings.py
|
pedrobcst/Xerus
|
09df088e0207176df0d20715e1c9778d09d28250
|
[
"MIT"
] | 1
|
2022-02-25T16:26:54.000Z
|
2022-02-25T16:26:54.000Z
|
# Test for configuration & GSAS Binaries
import sys, os
from pathlib import Path
from . import INSTALL_PATH
from Xerus.settings.settings import TEST_XRD, INSTR_PARAMS, GSAS2_BIN
sys.path.append(GSAS2_BIN)
import GSASIIscriptable as G2sc
from Xerus.db.localdb import LocalDB
from Xerus.settings.settings import MP_API_KEY
from pymatgen.ext.matproj import MPRestError
from pymatgen import MPRester
MP_API_KEY_WRONG = "bQEFQ!"
import pytest
@pytest.mark.filterwarnings('ignore::RuntimeWarning')
@pytest.mark.filterwarnings('ignore::UserWarning')
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
@pytest.mark.user
def test_dbconn():
"""
Tests connection to localdatabase
The purpose is to check if local database is correctly configurated
Returns
-------
"""
def connect():
client = LocalDB().client
try:
info = client.server_info()
return True
except:
return False
assert connect() == True, "CIF Database connection failed"
@pytest.mark.filterwarnings('ignore::RuntimeWarning')
@pytest.mark.filterwarnings('ignore::UserWarning')
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
@pytest.mark.user
def test_mpconn():
"""
Tests Materials Project Connection
Purpose is to test mainly API Key is correctly set.
Returns
-------
True if OK, False or Not
"""
try:
MPRester(MP_API_KEY).get_data("HoB2")
assert True
except MPRestError:
assert False, "Failed to connect to MP Project"
@pytest.mark.filterwarnings('ignore::RuntimeWarning')
@pytest.mark.filterwarnings('ignore::UserWarning')
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
def test_gsas2settings():
"""
Test configuration for GSASII
Returns
-------
True or False
"""
try:
import sys, os
# Create a gpx project
filename="test.gpx"
gpx = G2sc.G2Project(filename=filename)
# Add histogram
histogram = gpx.add_powder_histogram(datafile=TEST_XRD, iparams=INSTR_PARAMS)
# Add test cif
gpx.add_phase(phasefile=INSTALL_PATH / "cif/HoB2_MP_mp-2267.cif", phasename="HoB2 test", histograms=[histogram])
# Refine:
refdict0 = {"set": {"Background": {"no. coeffs": 6, "refine": True},
"Cell": True,
"Instrument Parameters": ["Zero"],
"Scale": True}}
refdict4a = {"set": {'Sample Parameters': ['Shift', 'DisplaceX', 'DisplaceY', 'Scale']}}
refdict5c = {"set": {'Instrument Parameters': ['X', 'Y']}}
dictList = [refdict0, refdict4a, refdict5c]
gpx.do_refinements(dictList)
# Check rwp
rwp = gpx.histograms()[0].get_wR()
os.remove(filename)
os.remove(filename.replace(".gpx", ".lst"))
os.remove(filename.replace(".gpx", ".bak0.gpx"))
if rwp <= 50:
assert True, "RWP lower than 50%"
else:
assert False, "Large rwp."
except:
assert False, "GSAS II test failed."
| 31.191919
| 120
| 0.643135
|
dbc38841a4725d2314f00b0b6b6352d1973f6874
| 2,423
|
php
|
PHP
|
src/Schema/Normalizer/EventNormalizer.php
|
Becklyn/schema-org
|
cf525c19e44c5b1dda4b7475a47788ee1d4191e1
|
[
"BSD-3-Clause"
] | 4
|
2020-08-05T21:29:33.000Z
|
2022-01-16T22:59:49.000Z
|
src/Schema/Normalizer/EventNormalizer.php
|
Becklyn/schema-org
|
cf525c19e44c5b1dda4b7475a47788ee1d4191e1
|
[
"BSD-3-Clause"
] | null | null | null |
src/Schema/Normalizer/EventNormalizer.php
|
Becklyn/schema-org
|
cf525c19e44c5b1dda4b7475a47788ee1d4191e1
|
[
"BSD-3-Clause"
] | null | null | null |
<?php declare(strict_types=1);
namespace Becklyn\SchemaOrg\Schema\Normalizer;
use Becklyn\SchemaOrg\Data\Event;
use Becklyn\SchemaOrg\Data\SchemaOrgDataInterface;
use Becklyn\SchemaOrg\Schema\MetaDataNormalizerRegistry;
class EventNormalizer extends ThingNormalizer
{
public const SCHEMA_TYPE = "Event";
/**
* @inheritDoc
*/
public function getSchemaType () : string
{
return self::SCHEMA_TYPE;
}
/**
* @inheritDoc
*/
public function getEntityClass () : string
{
return Event::class;
}
/**
* @inheritDoc
*/
public function normalize (MetaDataNormalizerRegistry $registry, SchemaOrgDataInterface $entity, ?string $usage = null, array $context = [], bool $isNested = false) : ?array
{
\assert($entity instanceof Event);
$thingNormalized = parent::normalize($registry, $entity, $usage, $context, true);
$normalized = [
"about" => $this->normalizeDataOrPrimitive($registry, $entity->getAbout(), $usage, $context, true),
"doorTime" => $this->normalizeDateTime($entity->getDoorTime()),
"duration" => $this->normalizeDataOrPrimitive($registry, $entity->getDuration(), $usage, $context, true),
"endDate" => $this->normalizeDateTime($entity->getEndDate()),
"eventStatus" => $this->normalizeDataOrPrimitive($registry, $entity->getEventStatus(), $usage, $context, true),
"inLanguage" => $entity->getInLanguage(),
"isAccessibleForFree" => $entity->getIsAccessibleForFree(),
"location" => $this->normalizeDataOrPrimitive($registry, $entity->getLocation(), $usage, $context, true),
"maximumAttendeeCapacity" => $entity->getMaximumAttendeeCapacity(),
"maximumPhysicalAttendeeCapacity" => $entity->getMaximumPhysicalAttendeeCapacity(),
"maximumVirtualAttendeeCapacity" => $entity->getMaximumVirtualAttendeeCapacity(),
"organizer" => $this->normalizeDataOrPrimitive($registry, $entity->getOrganizer(), $usage, $context, true),
"previousStartDate" => $this->normalizeDateTime($entity->getPreviousStartDate()),
"startDate" => $this->normalizeDateTime($entity->getStartDate()),
];
return $this->createMetaData($registry, $this->getSchemaType(), \array_replace($thingNormalized, $normalized), $usage, $context, $isNested);
}
}
| 40.383333
| 177
| 0.655799
|
694b396602a0cb388d085a6b01f9c7ef5d5f7fad
| 536
|
rb
|
Ruby
|
app/models/spree/order_decorator.rb
|
Top-Form-Investment/cash_on_delivery
|
69148a99d4bb9e669cda97b30e4d25385553f4fc
|
[
"BSD-3-Clause"
] | null | null | null |
app/models/spree/order_decorator.rb
|
Top-Form-Investment/cash_on_delivery
|
69148a99d4bb9e669cda97b30e4d25385553f4fc
|
[
"BSD-3-Clause"
] | null | null | null |
app/models/spree/order_decorator.rb
|
Top-Form-Investment/cash_on_delivery
|
69148a99d4bb9e669cda97b30e4d25385553f4fc
|
[
"BSD-3-Clause"
] | null | null | null |
Spree::Order.class_eval do
before_save :change_sp_state, if: ->(order) { order.state == 'complete' && order.shipments.present?}
# Return available payment methods
def available_payment_methods
@available_payment_methods ||= (Spree::PaymentMethod.available(:front_end) + Spree::PaymentMethod.available(:both)).uniq
end
# Update payment information on order complete
# Associates the specified user with the order.
def change_sp_state
self.shipment_state = 'ready'
self.payment_state = 'cod'
end
end
| 35.733333
| 124
| 0.735075
|
8eb1fa4384d53e81c32941d87c37152f44d4470b
| 542
|
js
|
JavaScript
|
app/render/notification.js
|
zx2c4-forks/irccloud-desktop
|
fbb21b8f485d07194759c0f83ca9c1b4636edecc
|
[
"Apache-2.0"
] | null | null | null |
app/render/notification.js
|
zx2c4-forks/irccloud-desktop
|
fbb21b8f485d07194759c0f83ca9c1b4636edecc
|
[
"Apache-2.0"
] | null | null | null |
app/render/notification.js
|
zx2c4-forks/irccloud-desktop
|
fbb21b8f485d07194759c0f83ca9c1b4636edecc
|
[
"Apache-2.0"
] | null | null | null |
var remote = require('electron').remote;
function listenNotification() {
remote.getCurrentWindow().webContents.executeJavaScript(
'new Promise((resolve, reject) => { if (SESSION) { SESSION.once("notificationClick", function () { resolve(); }); } });'
).then(() => {
remote.app.emit('activate');
listenNotification();
});
}
function setupNotificationHandler() {
document.addEventListener("DOMContentLoaded", function (event) {
listenNotification();
});
}
module.exports = setupNotificationHandler;
| 30.111111
| 125
| 0.671587
|
aff5d1fb8c3f2b1ced5a7ee6e563dd5a18e6fd72
| 2,650
|
py
|
Python
|
src/systems/crafting_system.py
|
CGirdlestone/TextAdventure
|
6127d6c98ce3ad4c6e4d4fd0262310e74f2e4fad
|
[
"MIT"
] | null | null | null |
src/systems/crafting_system.py
|
CGirdlestone/TextAdventure
|
6127d6c98ce3ad4c6e4d4fd0262310e74f2e4fad
|
[
"MIT"
] | null | null | null |
src/systems/crafting_system.py
|
CGirdlestone/TextAdventure
|
6127d6c98ce3ad4c6e4d4fd0262310e74f2e4fad
|
[
"MIT"
] | null | null | null |
"""crafting_system.py
This class represents a simple crafting system. All recipe-related data is
stored externally in a JSON file.
"""
class CraftingSystem:
def __init__(self, event_queue, **kwargs):
self.event_queue = event_queue
self.event_queue.register_system(self)
self.__dict__.update(**kwargs)
def validate_components(self, container, recipe_number):
"""Checks whether the player has the necessary recipe components."""
if isinstance(self.recipe_components[recipe_number], int):
return self.recipe_components[recipe_number] in container
recipe_set = set(self.recipe_components[recipe_number])
has_components = True
for c in recipe_set:
component_count = self.recipe_components[recipe_number].count(c)
if container.count(c) != component_count:
has_components = False
return has_components
def remove_components(self, container, recipe_number):
"""Removes the recipe components from the player's inventory."""
if isinstance(self.recipe_components[recipe_number], int):
container.remove(self.recipe_components[recipe_number])
else:
for id in self.recipe_components[recipe_number]:
container.remove(id)
def add_output(self, container, recipe_number):
"""Adds the recipe output into the player's inventory."""
container.append(self.recipe_outputs[recipe_number])
if self.recipe_names[recipe_number][0] in ["a", "e", "i", "o", "u"]:
msg = ("Using your knowledge, you create an {}."
.format(self.recipe_names[recipe_number]))
self.event_queue.add_event({"message": msg})
else:
msg = ("Using your knowledge, you create a {}."
.format(self.recipe_names[recipe_number]))
self.event_queue.add_event({"message": msg})
def craft(self, event):
"""Crafts an item."""
recipe = list(event.values())[0][0]
player = list(event.values())[0][1]
recipe_number = ord(recipe) - 97
if self.validate_components(player.container, recipe_number):
self.remove_components(player.container, recipe_number)
self.add_output(player.container, recipe_number)
else:
msg = "You don't have the required components!"
self.event_queue.add_event({"message": msg})
def receive(self, event):
"""Handles the actioning of received events."""
if list(event.keys())[0] == self.action_word:
self.craft(event)
| 36.805556
| 76
| 0.637736
|
da208fcff4f9c76fb76d6f51974ae30f5b2be18f
| 3,417
|
php
|
PHP
|
src/Html.php
|
bm2ilabs/favicon
|
ee42aef84568f280154dd30c24a2a3c8b72380e1
|
[
"MIT"
] | 9
|
2015-08-29T19:16:59.000Z
|
2020-07-06T20:46:14.000Z
|
src/Html.php
|
bm2ilabs/favicon
|
ee42aef84568f280154dd30c24a2a3c8b72380e1
|
[
"MIT"
] | 2
|
2015-06-24T14:47:55.000Z
|
2017-07-16T14:11:45.000Z
|
src/Html.php
|
bm2ilabs/favicon
|
ee42aef84568f280154dd30c24a2a3c8b72380e1
|
[
"MIT"
] | 4
|
2015-11-04T02:34:10.000Z
|
2019-01-08T14:49:08.000Z
|
<?php
namespace HieuLe\Favicon;
/**
* Output HTML tags based on a config
*
* @author Hieu Le <letrunghieu.cse09@gmail.com>
*/
class Html
{
/**
* Write meta and link tags
*
* @param bool $noOldApple exclude old apple touch link
* @param bool $noAndroid exclude android manifest.json file
* @param bool $noMs exclude msapplication meta tags
* @param tring $tileColor the color of Windows tile
* @param string $browserConfigFile the path to browserconfig.xml file or null to disable this
* @param string $appName the name of application when pinned
*
* @return string
*/
public static function output($noOldApple = false, $noAndroid = false, $noMs = false, $tileColor = '#FFFFFF', $browserConfigFile = '', $appName = '')
{
$result = array();
if (!$noMs)
{
if (!$browserConfigFile)
{
$result[] = '<meta name="msapplication-config" content="none" />';
}
else
{
$result[] = '<meta name="msapplication-config" content="/' . $browserConfigFile . '" />';
}
}
if (!$noOldApple)
{
$result[] = '<link rel="apple-touch-icon" sizes="57x57" href="/apple-touch-icon-57x57.png" />';
$result[] = '<link rel="apple-touch-icon" sizes="60x60" href="/apple-touch-icon-60x60.png" />';
$result[] = '<link rel="apple-touch-icon" sizes="72x72" href="/apple-touch-icon-72x72.png" />';
$result[] = '<link rel="apple-touch-icon" sizes="114x114" href="/apple-touch-icon-114x114.png" />';
}
$result[] = '<link rel="apple-touch-icon" sizes="76x76" href="/apple-touch-icon-76x76.png" />';
$result[] = '<link rel="apple-touch-icon" sizes="120x120" href="/apple-touch-icon-120x120.png" />';
$result[] = '<link rel="apple-touch-icon" sizes="152x152" href="/apple-touch-icon-152x152.png" />';
$result[] = '<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon-180x180.png" />';
$result[] = '<link rel="icon" type="image/png" href="/favicon-32x32.png" sizes="32x32" />';
$result[] = '<link rel="icon" type="image/png" href="/android-chrome-192x192.png" sizes="192x192" />';
$result[] = '<link rel="icon" type="image/png" href="/favicon-16x16.png" sizes="16x16" />';
if (!$noAndroid)
{
$result[] = '<link rel="manifest" href="/manifest.json" />';
}
if (!$noMs)
{
if ($appName)
{
$result[] = '<meta name="application-name" content="' . $appName . '" />';
}
$result[] = '<meta name="msapplication-TileColor" content="' . $tileColor . '" />';
$result[] = '<meta name="msapplication-TileImage" content="/mstile-144x144.png" />';
$result[] = '<meta name="msapplication-square70x70logo" content="/mstile-70x70.png" />';
$result[] = '<meta name="msapplication-square150x150logo" content="/mstile-150x150.png" />';
$result[] = '<meta name="msapplication-wide310x150logo" content="/mstile-310x150.png" />';
$result[] = '<meta name="msapplication-square310x310logo" content="/mstile-310x310.png" />';
}
return implode("\n", $result);
}
}
| 45.56
| 154
| 0.551653
|
bfdfd694ba9595535a1c49a1dcd805389c953b32
| 2,562
|
dart
|
Dart
|
chain_app/lib/pages/user/daily_task_page.dart
|
Crabsclaws/FlutterExamples
|
cb3072add88bb0c206b8188acd8eebeebf39e70c
|
[
"MIT"
] | null | null | null |
chain_app/lib/pages/user/daily_task_page.dart
|
Crabsclaws/FlutterExamples
|
cb3072add88bb0c206b8188acd8eebeebf39e70c
|
[
"MIT"
] | null | null | null |
chain_app/lib/pages/user/daily_task_page.dart
|
Crabsclaws/FlutterExamples
|
cb3072add88bb0c206b8188acd8eebeebf39e70c
|
[
"MIT"
] | null | null | null |
import 'package:chain_app/models/task_record_list.dart';
import 'package:chain_app/style/w_style.dart';
import 'package:chain_app/tools/routes.dart';
import 'package:chain_app/tools/s_manager.dart';
import 'package:chain_app/tools/services/news_services.dart';
import 'package:flutter/material.dart';
class DailyTaskPage extends StatefulWidget {
@override
_DailyTaskPageState createState() => _DailyTaskPageState();
}
class _DailyTaskPageState extends State<DailyTaskPage> {
bool completed = false;
TaskRecord record = TaskRecord('', '', 0);
@override
void initState() {
super.initState();
_request();
}
///
/// 获取当天任务状态 如果有记录 为已完成
/// 如果没有记录 当天为未完成
///
///
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text('今日任务'),
actions: <Widget>[
FlatButton(
child: Text(
'任务记录',
style: TextStyle(color: Colors.white),
),
onPressed: () {
Navigator.of(context).pushNamed(Routes.task_record);
},
),
],
),
body: ListTile(
title: RichText(
text: TextSpan(
text: '今日任务 可获取',
style: TextStyle(color: Colors.black87),
children: <TextSpan>[
TextSpan(
text: record.earn.toStringAsFixed(3),
style: TextStyle(color: Colors.green),
),
],
),
), //Text('今日任务 可获取 ${record.earn.toStringAsFixed(3)}'),
subtitle: Text(completed ? '状态: 已完成' : "状态:未完成"),
trailing: FlatButton(
child: completed
? Text('已完成')
: Text(
'去完成',
style: TextStyle(color: Colors.white),
),
color: completed ? Colors.grey : Colors.orange,
shape: WStyle.roundedBorder20,
onPressed: () {
if (completed) {
SManager.showMessage('当日任务已完成');
return;
}
Navigator.of(context).popUntil(ModalRoute.withName(Routes.tab));
},
),
),
);
}
_request() async {
return NewsServices.taskCheck().then((value) {
this.record = TaskRecord.fromJson(value.data);
if (value.statusCode == 200) {
completed = false;
} else if (value.statusCode == 202) {
completed = true;
}
setState(() {});
}).catchError((error) {
SManager.dioErrorHandle(context, error);
});
}
}
| 26.968421
| 76
| 0.544106
|
aa05a9313b301f532381ea2a4d0b5591355de16f
| 280
|
rb
|
Ruby
|
spec/spec_helper.rb
|
Unpakt/lobot
|
33d31986a2a7c1a02c34173a3bcfd1e6c4071d15
|
[
"MIT"
] | null | null | null |
spec/spec_helper.rb
|
Unpakt/lobot
|
33d31986a2a7c1a02c34173a3bcfd1e6c4071d15
|
[
"MIT"
] | null | null | null |
spec/spec_helper.rb
|
Unpakt/lobot
|
33d31986a2a7c1a02c34173a3bcfd1e6c4071d15
|
[
"MIT"
] | null | null | null |
require "rubygems"
require 'rails/all'
require 'rails/generators'
require File.expand_path('../lib/lobot', File.dirname(__FILE__))
require 'generator_spec/test_case'
require 'generator_spec'
require File.expand_path('../../lib/generators/lobot/install_generator.rb', __FILE__)
| 25.454545
| 85
| 0.782143
|
387ba8c4c6a8f43d2d14adfbc5ee823da72ed82d
| 4,519
|
php
|
PHP
|
application/views/template/footer.php
|
achmunib/utsg.co.id-3
|
ae7ec0680527297f056975c2a93c1248478955bf
|
[
"MIT"
] | null | null | null |
application/views/template/footer.php
|
achmunib/utsg.co.id-3
|
ae7ec0680527297f056975c2a93c1248478955bf
|
[
"MIT"
] | null | null | null |
application/views/template/footer.php
|
achmunib/utsg.co.id-3
|
ae7ec0680527297f056975c2a93c1248478955bf
|
[
"MIT"
] | null | null | null |
<!-- ======= Footer ======= -->
<footer id="footer" >
<div class="footer-top">
<div class="container">
<div class="row">
<div class="col-lg-3 col-md-6 footer-contact">
<h3 class="logo">
<a href="<?= base_url()?>">
<img src="<?= base_url()?>asset/img/utsg.png" alt="" sizes="" srcset="">
</a>
</h3>
<p>
Desa Sumberarum, Kerek<br>
Tuban – 62356<br>
Jawa Timur, Indonesia<br><br>
<strong>Phone:</strong> (0356) 711 800<br>
<strong>Email:</strong> office@utsg.co.id<br>
</p>
</div>
<div class="col-lg-3 col-md-6 footer-links">
<h4>Link Bantu</h4>
<ul class="col-6 col-md-4">
<li><i class="bx bx-chevron-right"></i> <a href="#hero">Home</a></li>
<li><i class="bx bx-chevron-right"></i> <a href="#about">About</a></li>
<li><i class="bx bx-chevron-right"></i> <a href="#services">Services</a></li>
<li><i class="bx bx-chevron-right"></i> <a href="#team">Our Teams</a></li>
<li><i class="bx bx-chevron-right"></i> <a href="#portfolio">Program</a></li>
<li><i class="bx bx-chevron-right"></i> <a href="#news">News</a></li>
<li><i class="bx bx-chevron-right"></i> <a href="#contact">Contact Us</a></li>
</ul>
</div>
<div class="col-lg-3 col-md-6 footer-links">
<h4>Pelayanan Special dari kami</h4>
<ul>
<li><i class="bx bx-chevron-right"></i> <a href="#">Jasa Pertambangan</a></li>
<li><i class="bx bx-chevron-right"></i> <a href="#">Jasa Konstruksi</a></li>
<li><i class="bx bx-chevron-right"></i> <a href="#">Rental Peralatan</a></li>
</ul>
</div>
<div class="col-lg-3 col-md-6 footer-links">
<h4>Jejaring Sosial Kami</h4>
<p>Banyak informasi terbaru yang dapat diketahui tentang kami.</p>
<div class="social-links mt-3">
<a href="#" class="youtube"><i class="bx bxl-youtube"></i></a>
<a href="#" class="instagram"><i class="bx bxl-instagram"></i></a>
<a href="#" class="linkedin"><i class="bx bxl-linkedin"></i></a>
</div>
</div>
</div>
</div>
</div>
<div class="container py-4">
<div class="copyright">
© Copyright <strong><span>UTSG</span></strong>. Corporate Strategic & Technology Department
</div>
<div class="credits">
<!-- All the links in the footer should remain intact. -->
<!-- You can delete the links only if you purchased the pro version. -->
<!-- Licensing information: https://bootstrapmade.com/license/ -->
<!-- Purchase the pro version with working PHP/AJAX contact form: https://bootstrapmade.com/bizland-bootstrap-business-template/ -->
<!-- Designed by <a href="https://bootstrapmade.com/">BootstrapMade</a> -->
</div>
</div>
</footer><!-- End Footer -->
<div id="preloader"></div>
<a href="#" class="back-to-top d-flex align-items-center justify-content-center"><i class="bi bi-arrow-up-short"></i></a>
<!-- Vendor JS Files -->
<script src="<?=base_url();?>vendor/jquery/dist/jquery.min.js"></script>
<script src="<?=base_url();?>vendor/sweetalert/dist/sweetalert.min.js"></script>
<script src="<?=base_url();?>vendor/purecounter/purecounter.js"></script>
<script src="<?=base_url();?>vendor/aos/aos.js"></script>
<script src="<?=base_url();?>vendor/bootstrap/js/bootstrap.bundle.min.js"></script>
<script src="<?=base_url();?>vendor/glightbox/js/glightbox.min.js"></script>
<script src="<?=base_url();?>vendor/isotope-layout/isotope.pkgd.min.js"></script>
<script src="<?=base_url();?>vendor/swiper/swiper-bundle.min.js"></script>
<script src="<?=base_url();?>vendor/waypoints/noframework.waypoints.js"></script>
<script src="<?=base_url();?>vendor/php-email-form/validate.js"></script>
<script src="https://pagination.js.org/dist/2.1.5/pagination.js"></script>
<script src="https://pagination.js.org/dist/2.1.5/pagination.min.js"></script>
<!-- Template Main JS File -->
<script src="<?=base_url();?>asset/js/main.js"></script>
<script src="<?=base_url();?>asset/js/style.js"></script>
<script src="<?=base_url();?>asset/js/myscript.js"></script>
</body>
</html>
| 45.646465
| 140
| 0.549458
|
98166217004180b5505a43409079844cfaefab0c
| 1,179
|
lua
|
Lua
|
cocos/cc_extension/Manifest.lua
|
Xrysnow/lstgx_Doc
|
8495153f3f19c65eefa1972f556e6fde337dde52
|
[
"MIT"
] | 2
|
2019-01-27T17:38:39.000Z
|
2019-08-24T08:27:32.000Z
|
cocos/cc_extension/Manifest.lua
|
Xrysnow/lstgx_LuaDoc
|
8495153f3f19c65eefa1972f556e6fde337dde52
|
[
"MIT"
] | null | null | null |
cocos/cc_extension/Manifest.lua
|
Xrysnow/lstgx_LuaDoc
|
8495153f3f19c65eefa1972f556e6fde337dde52
|
[
"MIT"
] | null | null | null |
--------------------------------
-- @module Manifest
-- @extend Ref
-- @parent_module cc
---@class cc.Manifest:cc.Ref
local Manifest = {}
cc.Manifest = Manifest
--------------------------------
--- @brief Gets remote manifest file url.
---@return string
function Manifest:getManifestFileUrl()
end
--------------------------------
--- @brief Check whether the version informations have been fully loaded
---@return boolean
function Manifest:isVersionLoaded()
end
--------------------------------
--- @brief Check whether the manifest have been fully loaded
---@return boolean
function Manifest:isLoaded()
end
--------------------------------
--- @brief Gets remote package url.
---@return string
function Manifest:getPackageUrl()
end
--------------------------------
--- @brief Gets manifest version.
---@return string
function Manifest:getVersion()
end
--------------------------------
--- @brief Gets remote version file url.
---@return string
function Manifest:getVersionFileUrl()
end
--------------------------------
--- @brief Get the search paths list related to the Manifest.
---@return array_table
function Manifest:getSearchPaths()
end
return nil
| 19.327869
| 73
| 0.573367
|
b0928bb26745a4a1c2cc95ce0206a81733880280
| 14,963
|
py
|
Python
|
src/analysis/correlation_perFreq.py
|
phoebsc/hyperscanning_BCI
|
1cd55e289fdc85fcbbab9c49706d52fb34a31ff7
|
[
"MIT"
] | 6
|
2021-05-25T00:56:59.000Z
|
2021-11-23T14:40:50.000Z
|
src/analysis/correlation_perFreq.py
|
phoebsc/hyperscanning_BCI
|
1cd55e289fdc85fcbbab9c49706d52fb34a31ff7
|
[
"MIT"
] | 16
|
2021-05-19T02:57:55.000Z
|
2021-09-17T11:56:04.000Z
|
src/analysis/correlation_perFreq.py
|
phoebsc/hyperscanning_BCI
|
1cd55e289fdc85fcbbab9c49706d52fb34a31ff7
|
[
"MIT"
] | 1
|
2022-03-06T12:22:42.000Z
|
2022-03-06T12:22:42.000Z
|
"""
Correlation module calculating connectivity values from data
"""
import logging
import numpy as np
import os
from itertools import islice
from pylsl import local_clock
from scipy.signal import hilbert
from scipy.signal import lfilter
from scipy.stats import zscore
from astropy.stats import circmean
from itertools import product
from osc4py3.as_allthreads import *
from osc4py3 import oscbuildparse
from osc4py3 import oscchannel as osch
import warnings
warnings.filterwarnings("ignore")
current = os.path.dirname(__file__)
LAST_CALCULATION = local_clock()
ORDER = 5
class Correlation:
def __init__(self, sample_rate, channel_count, mode, chn_type, corr_params, OSC_params, compute_pow, norm_params,
window_length, COEFFICIENTS, HANN, CONNECTIONS, OUTLET, OUTLET_POWER):
"""
Class computing connectivity values
:param sample_rate: sampling rate
:param channel_count: channel count
:param mode: connectivity mode. See notes for options.
:param chn_type: compute all electrode pairs if 'all-to-all';
alternatively, compute only corresponding electrode pairs if 'one-to-one'
:param corr_params: a list of three lists: frequency parameters, channel parameters, weight parameters
:param OSC_params: OSC parameters for OSC transmission
:param compute_pow: boolean variable determining whether to compute and transmit power values
:param norm_params: a list of two numbers. min and max values for MinMax normalization
:param COEFFICIENTS: band-pass filtering coefficients
:param HANN: Hanning window coefficients
:param CONNECTIONS: number of connections
:param OUTLET: StreamOutlet object for connectivity value output
:param OUTLET_POWER: StreamOutlet object for power value output
Note:
**supported connectivity measures**
- 'envelope correlation': envelope correlation
- 'power correlation': power correlation
- 'plv': phase locking value
- 'ccorr': circular correlation coefficient
- 'coherence': coherence
- 'imaginary coherence': imaginary coherence
"""
self.logger = logging.getLogger(__name__)
self.sample_rate = sample_rate
self.window_length = window_length # number of samples in the analysis window
self.channel_count = channel_count
self.freqParams, self.chnParams, self.weightParams = corr_params
self.OSC_params = OSC_params
self.compute_pow = compute_pow
self.norm_min, self.norm_max = norm_params
self.mode = mode
self.chn_type = chn_type
self.timestamp = None
self.SAMPLE_RATE = self.sample_rate
self.CHANNEL_COUNT = self.channel_count
# read setup tools
self.COEFFICIENTS = COEFFICIENTS
self.HANN = HANN
self.CONNECTIONS = CONNECTIONS
self.OUTLET = OUTLET
if self.compute_pow:
self.OUTLET_POWER = OUTLET_POWER
if OSC_params[0] is not None:
self._setup_OSC()
def run(self, buffers):
"""
running the analysis
:return: connectivity values
"""
global LAST_CALCULATION
trailing_timestamp = self._find_trailing_timestamp(buffers)
if trailing_timestamp != LAST_CALCULATION:
LAST_CALCULATION = trailing_timestamp
# select data for analysis based on the last timestamp
analysis_window = self._select_analysis_window(trailing_timestamp, buffers)
# apply Hanning window
# analysis_window = self._apply_window_weights(analysis_window)
# band-pass filter and compute analytic signal
analytic_matrix = self._calculate_all(analysis_window)
# compute connectivity values
rvalues = self._calculate_rvalues(analytic_matrix, self.mode)
if self.compute_pow:
power_values = self._calculate_power(analytic_matrix)
self.OUTLET_POWER.push_sample(power_values, timestamp=trailing_timestamp)
# sending LSL packets
if self.OUTLET:
self.logger.warning("Sending {} R values with timestamp {}".format(len(rvalues), trailing_timestamp))
self.OUTLET.push_sample(rvalues, timestamp=trailing_timestamp)
# sending OSC packets
if self.OSC_params[0] is not None: # if sending OSC
sample_size = self.CONNECTIONS * len(self.freqParams)
msg = oscbuildparse.OSCMessage("/Rvalues/me", ","+'f'*sample_size, rvalues)
osc_send(msg, 'Rvalues')
osc_process()
return rvalues
else:
self.logger.debug("Still waiting for new data to arrive, skipping analysis")
return
def _clamp(self, n):
"""
helper function to clamp a float variable between 0 and 1
"""
return max(min(1, n), 0)
def _apply_window_weights(self, analysis_window):
"""
applying hanning window to data
:param analysis_window: dictionary with EEG data streams
:return: dictionary of the same shape after applying hanning window
"""
for uid in analysis_window.keys():
analysis_window[uid] = np.multiply(analysis_window[uid], self.HANN[:, None])
self.logger.debug("Applying window weights with %s samples and %s channels." % analysis_window[uid].shape)
return analysis_window
def _setup_OSC(self):
"""
setting up OSC outlet
"""
# reading params
IP = self.OSC_params[0]
port = int(self.OSC_params[1])
# Start the system.
osc_startup()
# Make client channels to send packets.
try:
osc_udp_client(IP, int(port), "Rvalues")
except:
osch.terminate_all_channels()
osc_udp_client(IP, int(port), "Rvalues")
# first message is empty (removed this bc it's causing OSC msg to be all zeros)
# msg = oscbuildparse.OSCMessage("/Rvalues/me", ","+'f'*sample_size, [0]*sample_size)
# osc_send(msg, 'Rvalues')
def _calculate_power(self, analytic_matrix):
"""
compute power values from analytic signals
:param analytic_matrix: shape is (n_freq_bands, n_subjects, n_channel_count, n_sample_size). filtered analytic signal
:return: a vector that can be reshaped into (n_freq_bands, n_subjects, n_channel_count). Power values
"""
return np.nanmean(np.abs(analytic_matrix)**2, axis=3).reshape(-1)
def _find_trailing_timestamp(self, buffers):
trailing_timestamp = local_clock()
for buffer in buffers.values():#self.buffers.values():
timestamp, _ = buffer[-1]
if trailing_timestamp > timestamp:
trailing_timestamp = timestamp
return trailing_timestamp
def _select_analysis_window(self, trailing_timestamp, buffers):
"""
construct the analysis window based on the timestamp from last window
:param trailing_timestamp: timestamp from the last window
:return: a dictionary containing data. each value is a matrix of size (n_sample_size, n_channel_count)
"""
analysis_window = {}
for uid, buffer in buffers.items():#self.buffers.items():
# compute the sample start
latest_sample_at, _ = buffer[-1]
sample_offset = int(round((latest_sample_at - trailing_timestamp) * self.sample_rate))
sample_start = len(buffer) - self.window_length - sample_offset
if sample_start < 0:
self.logger.info("Not enough data to process in buffer {}, using dummy data".format(uid))
analysis_window[uid] = np.zeros((self.window_length, self.channel_count))
else:
# take data from buffer
timestamped_window = list(islice(buffer, sample_start, sample_start + self.window_length))
analysis_window[uid] = np.array([sample[1] for sample in timestamped_window])
return analysis_window
def _calculate_all(self, analysis_window):
"""
compute analytic signal from the analysis window
:param analysis_window: a dictionary containing data
:return: a matrix of shape (n_freq_bands, n_subjects, n_channel_count, n_sample_size)
"""
all_analytic = zscore(np.swapaxes(np.array(list(analysis_window.values())),1,2), axis=-1) # shape = (n_sub, n_chn, n_times)
all_analytic = np.array([hilbert(lfilter(coeff[0], coeff[1], all_analytic)) for c, coeff in enumerate(self.COEFFICIENTS)])
return all_analytic
# helper function
def _multiply_conjugate(self, real: np.ndarray, imag: np.ndarray, transpose_axes: tuple) -> np.ndarray:
"""
Helper function to compute the product of a complex array and its conjugate.
It is designed specifically to collapse the last dimension of a four-dimensional array.
Arguments:
real: the real part of the array.
imag: the imaginary part of the array.
transpose_axes: axes to transpose for matrix multiplication.
Returns:
product: the product of the array and its complex conjugate.
"""
formula = 'ilm,imk->ilk'
product = np.einsum(formula, real, real.transpose(transpose_axes)) + \
np.einsum(formula, imag, imag.transpose(transpose_axes)) - 1j * \
(np.einsum(formula, real, imag.transpose(transpose_axes)) - \
np.einsum(formula, imag, real.transpose(transpose_axes)))
return product
def compute_sync(self, complex_signal: np.ndarray, mode: str) -> np.ndarray:
"""
helper function for computing connectivity value.
The result is a connectivity matrix of all possible electrode pairs between the dyad, including inter- and intra-brain connectivities.
:param complex_signal: complex signal of shape (n_freq, 2, n_channel_count, n_sample_size). data for one dyad.
:param mode: connectivity mode. see notes for details.
:return: connectivity matrix of shape (n_freq, 2*n_channel_count, 2*channel_count)
"""
n_ch, n_freq, n_samp = complex_signal.shape[2], complex_signal.shape[0], \
complex_signal.shape[3]
complex_signal = complex_signal.reshape(n_freq, 2 * n_ch, n_samp)
transpose_axes = (0, 2, 1)
if mode.lower() == 'plv':
phase = complex_signal / np.abs(complex_signal)
c = np.real(phase)
s = np.imag(phase)
dphi = self._multiply_conjugate(c, s, transpose_axes=transpose_axes)
con = abs(dphi) / n_samp
elif mode.lower() == 'envelope correlation':
env = np.abs(complex_signal)
mu_env = np.mean(env, axis=2).reshape(n_freq, 2 * n_ch, 1)
env = env - mu_env
con = np.einsum('ilm,imk->ilk', env, env.transpose(transpose_axes)) / \
np.sqrt(np.einsum('il,ik->ilk', np.sum(env ** 2, axis=2), np.sum(env ** 2, axis=2)))
elif mode.lower() == 'power correlation':
env = np.abs(complex_signal) ** 2
mu_env = np.mean(env, axis=2).reshape(n_freq, 2 * n_ch, 1)
env = env - mu_env
con = np.einsum('ilm,imk->ilk', env, env.transpose(transpose_axes)) / \
np.sqrt(np.einsum('il,ik->ilk', np.sum(env ** 2, axis=2), np.sum(env ** 2, axis=2)))
elif mode.lower() == 'coherence':
c = np.real(complex_signal)
s = np.imag(complex_signal)
amp = np.abs(complex_signal) ** 2
dphi = self._multiply_conjugate(c, s, transpose_axes=transpose_axes)
con = np.abs(dphi) / np.sqrt(np.einsum('il,ik->ilk', np.nansum(amp, axis=2),
np.nansum(amp, axis=2)))
# self.logger.warning('con '+str(con[2,18:,0:18]))
elif mode.lower() == 'imaginary coherence':
c = np.real(complex_signal)
s = np.imag(complex_signal)
amp = np.abs(complex_signal) ** 2
dphi = self._multiply_conjugate(c, s, transpose_axes=transpose_axes)
con = np.abs(np.imag(dphi)) / np.sqrt(np.einsum('il,ik->ilk', np.nansum(amp, axis=2),
np.nansum(amp, axis=2)))
elif mode.lower() == 'ccorr':
angle = np.angle(complex_signal)
mu_angle = circmean(angle, axis=2).reshape(n_freq, 2 * n_ch, 1)
angle = np.sin(angle - mu_angle)
formula = 'ilm,imk->ilk'
con = np.einsum(formula, angle, angle.transpose(transpose_axes)) / \
np.sqrt(np.einsum('il,ik->ilk', np.sum(angle ** 2, axis=2), np.sum(angle ** 2, axis=2)))
else:
ValueError('Metric type not supported.')
return con
def _calculate_rvalues(self, analytic_matrix, mode):
"""
computes connectivity value from the analytic signal
:param analytic_matrix: analytic signal of shape (n_freq_bands, n_subjects, n_channel_count, n_sample_size)
:param mode: connectivity mode. see notes for details.
:return: a list of length = n_connections * n_freq. connectivity values
"""
# compute all possible pair combinations
pair_index = [a for a in
list(product(np.arange(0, analytic_matrix.shape[1]), np.arange(0, analytic_matrix.shape[1])))
if a[0] < a[1]]
rvals = []
# iterate for each combination
for pair in pair_index:
con = np.abs(self.compute_sync(analytic_matrix[:, pair, :, :], mode))
# the connectivity matrix for the current pair. shape is (n_freq, n_ch, n_ch)
con = con[:, 0:self.channel_count, self.channel_count:]
if 'all-to-all' in self.chn_type: # all to all correlation
result = [np.nanmean(con[i, self.chnParams[freq]][:, self.chnParams[freq]], axis=(0, 1))
for i, freq in enumerate(self.freqParams.keys())]
else: # channel to channel correlation
result = [np.nanmean(np.diagonal(con[i], axis1=0, axis2=1)[self.chnParams[freq]])
for i, freq in enumerate(self.freqParams.keys())]
# adjust result according to weight parameters
weights = list(self.weightParams.values())
result = [r*weight for r, weight in zip(result, weights)]
result = [self._clamp((r-minn)/(maxx-minn)) for r, minn, maxx in zip(result, self.norm_min, self.norm_max)]
rvals.extend(result)
return rvals # a list of length n_connections * n_freq
| 47.201893
| 142
| 0.627615
|
2766ed8ddf8ac80745811efa11c2e01bcdc2f994
| 230
|
rs
|
Rust
|
crate/src/models/tile.rs
|
Semester-Project-WS19-20/bomberman-wasm
|
bbe60c8c45341aaf4b917705c9f0d5facccebd94
|
[
"MIT"
] | null | null | null |
crate/src/models/tile.rs
|
Semester-Project-WS19-20/bomberman-wasm
|
bbe60c8c45341aaf4b917705c9f0d5facccebd94
|
[
"MIT"
] | null | null | null |
crate/src/models/tile.rs
|
Semester-Project-WS19-20/bomberman-wasm
|
bbe60c8c45341aaf4b917705c9f0d5facccebd94
|
[
"MIT"
] | 1
|
2020-04-15T16:18:08.000Z
|
2020-04-15T16:18:08.000Z
|
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
#[repr(u8)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Tile {
Empty,
HardBlock,
SoftBlock,
PowerupBombNumber,
PowerupBombPower,
PowerupSpeed,
PowerupBoots,
}
| 15.333333
| 44
| 0.704348
|
a00a6fe56fe5b799b52e0911fa5aa9d9b07d4a8b
| 260
|
ts
|
TypeScript
|
src/app/pages/pricing/pricing.module.ts
|
tyayers/apigee.material
|
0d7de6d4df026ff27c8b8679375c297e1a867fe7
|
[
"Apache-2.0"
] | null | null | null |
src/app/pages/pricing/pricing.module.ts
|
tyayers/apigee.material
|
0d7de6d4df026ff27c8b8679375c297e1a867fe7
|
[
"Apache-2.0"
] | null | null | null |
src/app/pages/pricing/pricing.module.ts
|
tyayers/apigee.material
|
0d7de6d4df026ff27c8b8679375c297e1a867fe7
|
[
"Apache-2.0"
] | null | null | null |
import { NgModule, OnInit } from '@angular/core';
import { PricingComponent } from './pricing.component';
@NgModule({
imports: [
PricingComponent
],
exports: [],
declarations: [],
providers: [],
})
export class PricingModule {}
| 21.666667
| 55
| 0.615385
|
60829c2a22950c3fb262a1926d2cc056c15050cb
| 534
|
asm
|
Assembly
|
programs/oeis/087/A087656.asm
|
neoneye/loda
|
afe9559fb53ee12e3040da54bd6aa47283e0d9ec
|
[
"Apache-2.0"
] | 22
|
2018-02-06T19:19:31.000Z
|
2022-01-17T21:53:31.000Z
|
programs/oeis/087/A087656.asm
|
neoneye/loda
|
afe9559fb53ee12e3040da54bd6aa47283e0d9ec
|
[
"Apache-2.0"
] | 41
|
2021-02-22T19:00:34.000Z
|
2021-08-28T10:47:47.000Z
|
programs/oeis/087/A087656.asm
|
neoneye/loda
|
afe9559fb53ee12e3040da54bd6aa47283e0d9ec
|
[
"Apache-2.0"
] | 5
|
2021-02-24T21:14:16.000Z
|
2021-08-09T19:48:05.000Z
|
; A087656: Let f be defined on the rationals by f(p/q) =(p+1)/(q+1)=p_{1}/q_{1} where (p_{1},q_{1})=1. Let f^k(p/q)=p_{k}/q_{k} where (p_{k},q_{k})=1. Sequence gives least k such that p_{k}-q_{k} = 1 starting at n.
; 1,2,2,4,3,6,3,4,5,10,4,12,7,6,4,16,5,18,6,8,11,22,5,8,13,6,8,28,7,30,5,12,17,10,6,36,19,14,7,40,9,42,12,8,23,46,6,12,9,18,14,52,7,14,9,20,29,58,8,60,31,10,6,16,13,66,18,24,11,70,7,72,37,10,20,16,15,78,8,8,41,82
add $0,2
mov $2,1
mov $3,$0
lpb $0
sub $0,$2
add $1,1
mov $2,$0
gcd $2,$3
lpe
sub $1,1
mov $0,$1
| 35.6
| 214
| 0.58427
|
8e524a26d6a7b657ecd35a26d580c2f3bd240d38
| 622
|
sql
|
SQL
|
openGaussBase/testcase/KEYWORDS/force/Opengauss_Function_Keyword_Force_Case0029.sql
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/KEYWORDS/force/Opengauss_Function_Keyword_Force_Case0029.sql
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/KEYWORDS/force/Opengauss_Function_Keyword_Force_Case0029.sql
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
-- @testpoint:opengauss关键字force(非保留),作为表空间名
--关键字不带引号,创建成功
drop tablespace if exists force;
CREATE TABLESPACE force RELATIVE LOCATION 'hdfs_tablespace/hdfs_tablespace_1';
drop tablespace force;
--关键字带双引号,创建成功
drop tablespace if exists "force";
CREATE TABLESPACE "force" RELATIVE LOCATION 'hdfs_tablespace/hdfs_tablespace_1';
drop tablespace "force";
--关键字带单引号,合理报错
drop tablespace if exists 'force';
CREATE TABLESPACE 'force' RELATIVE LOCATION 'hdfs_tablespace/hdfs_tablespace_1';
--关键字带反引号,合理报错
drop tablespace if exists `force`;
CREATE TABLESPACE `force` RELATIVE LOCATION 'hdfs_tablespace/hdfs_tablespace_1';
| 28.272727
| 81
| 0.805466
|
a14830c4d9c9e06557d59572fd0589f7f79aa0be
| 4,932
|
ts
|
TypeScript
|
packages/util/http-utils/src/__tests__/httpUtils.test.ts
|
navikt/k9-frontend-modules
|
b935c3b8c346515f14000bfe1624dc27b319aa05
|
[
"MIT"
] | null | null | null |
packages/util/http-utils/src/__tests__/httpUtils.test.ts
|
navikt/k9-frontend-modules
|
b935c3b8c346515f14000bfe1624dc27b319aa05
|
[
"MIT"
] | 2
|
2021-08-23T13:00:46.000Z
|
2022-02-22T10:16:19.000Z
|
packages/util/http-utils/src/__tests__/httpUtils.test.ts
|
navikt/k9-frontend-modules
|
b935c3b8c346515f14000bfe1624dc27b319aa05
|
[
"MIT"
] | null | null | null |
import axios from 'axios';
import { get, post } from './../httpUtils';
import * as responseHelpers from './../responseHelpers';
jest.mock('axios');
const axiosMock = axios as jest.Mocked<typeof axios>;
describe('httpUtils', () => {
const mockedErrorHandler = () => null;
beforeAll(() => {
jest.spyOn(console, 'error').mockImplementation(() => null);
});
describe('get', () => {
const goodResponseMock = { data: 'mockedData' };
const badRequestResponseMock = { response: { status: 400, headers: {} } };
it('should return the data-property from the response when the promise resolved', async () => {
axiosMock.get.mockImplementation(() => Promise.resolve(goodResponseMock));
const data = await get('', () => null);
expect(data).toEqual(goodResponseMock.data);
});
it('should throw an error and console.error when the promise is rejected', async () => {
axiosMock.get.mockImplementation(() => Promise.reject(badRequestResponseMock));
const error = get('', () => null);
await expect(error).rejects.toThrow();
expect(console.error).toHaveBeenCalledWith(badRequestResponseMock);
});
it('should call function triggering the provided httpErrorHandler when required', async () => {
const httpErrorHandlerCaller = jest.spyOn(responseHelpers, 'handleErrorExternally');
const checkerFn = jest.spyOn(responseHelpers, 'httpErrorShouldBeHandledExternally');
checkerFn.mockReturnValueOnce(true);
axiosMock.get.mockImplementation(() => Promise.reject(badRequestResponseMock));
const error = get('', mockedErrorHandler);
await expect(error).rejects.toThrow('');
expect(httpErrorHandlerCaller).toHaveBeenCalledWith(badRequestResponseMock, mockedErrorHandler);
httpErrorHandlerCaller.mockReset();
});
it('should avoid calling function triggering httpErrorHandler when unneccessary', async () => {
const httpErrorHandlerCaller = jest.spyOn(responseHelpers, 'handleErrorExternally');
const checkerFn = jest.spyOn(responseHelpers, 'httpErrorShouldBeHandledExternally');
checkerFn.mockReturnValueOnce(false);
axiosMock.get.mockImplementation(() => Promise.reject(badRequestResponseMock));
await expect(get('', mockedErrorHandler)).rejects.toThrow('');
expect(httpErrorHandlerCaller).not.toHaveBeenCalled();
httpErrorHandlerCaller.mockReset();
});
});
describe('post', () => {
const goodResponseMock = { data: 'mockedData' };
const badRequestResponseMock = { response: { status: 400, headers: {} } };
it('should return the data-property from the response when the promise resolved', async () => {
axiosMock.post.mockImplementation(() => Promise.resolve(goodResponseMock));
const data = await post('', null, null);
expect(data).toEqual(goodResponseMock.data);
});
it('should throw an error and console.error when the promise is rejected', async () => {
axiosMock.post.mockImplementation(() => Promise.reject(badRequestResponseMock));
const error = post('', null, null);
await expect(error).rejects.toEqual(badRequestResponseMock);
expect(console.error).toHaveBeenCalledWith(badRequestResponseMock);
});
it('should call function triggering the provided httpErrorHandler when required', async () => {
const httpErrorHandlerCaller = jest.spyOn(responseHelpers, 'handleErrorExternally');
const checkerFn = jest.spyOn(responseHelpers, 'httpErrorShouldBeHandledExternally');
checkerFn.mockReturnValueOnce(true);
axiosMock.post.mockImplementation(() => Promise.reject(badRequestResponseMock));
const error = post('', null, mockedErrorHandler);
await expect(error).rejects.toEqual(badRequestResponseMock);
expect(httpErrorHandlerCaller).toHaveBeenCalledWith(badRequestResponseMock, mockedErrorHandler);
httpErrorHandlerCaller.mockReset();
});
it('should avoid calling function triggering httpErrorHandler when unneccessary', async () => {
const httpErrorHandlerCaller = jest.spyOn(responseHelpers, 'handleErrorExternally');
const checkerFn = jest.spyOn(responseHelpers, 'httpErrorShouldBeHandledExternally');
checkerFn.mockReturnValueOnce(false);
axiosMock.post.mockImplementation(() => Promise.reject(badRequestResponseMock));
await expect(post('', null, mockedErrorHandler)).rejects.toEqual(badRequestResponseMock);
expect(httpErrorHandlerCaller).not.toHaveBeenCalled();
httpErrorHandlerCaller.mockReset();
});
});
});
| 48.831683
| 108
| 0.658151
|
1f5f565e83dffaeef0e6790e2919bee739d1be22
| 170
|
cs
|
C#
|
LibraCore/LevelBuilding/AnimationLoopDescriptor.cs
|
KamiNeko/Libra
|
96bea89c923ed80ae9938dff4ee5e651c39402d5
|
[
"MIT"
] | 2
|
2020-07-04T16:44:30.000Z
|
2020-08-18T05:54:38.000Z
|
LibraCore/LevelBuilding/AnimationLoopDescriptor.cs
|
KamiNeko/Libra
|
96bea89c923ed80ae9938dff4ee5e651c39402d5
|
[
"MIT"
] | null | null | null |
LibraCore/LevelBuilding/AnimationLoopDescriptor.cs
|
KamiNeko/Libra
|
96bea89c923ed80ae9938dff4ee5e651c39402d5
|
[
"MIT"
] | 1
|
2019-11-03T21:12:19.000Z
|
2019-11-03T21:12:19.000Z
|
namespace LibraCore.LevelBuilding
{
public class AnimationLoopDescriptor
{
public bool Active { get; set; }
public int Key { get; set; }
}
}
| 18.888889
| 40
| 0.617647
|
28da37d5295b111557e86e98e48a1d3f2d353399
| 98
|
swift
|
Swift
|
Tests/LinuxMain.swift
|
craterdog-bali/swift-bali-document-notation
|
a7a78eebc9652db841e4199b531c031cec597daf
|
[
"MIT"
] | null | null | null |
Tests/LinuxMain.swift
|
craterdog-bali/swift-bali-document-notation
|
a7a78eebc9652db841e4199b531c031cec597daf
|
[
"MIT"
] | null | null | null |
Tests/LinuxMain.swift
|
craterdog-bali/swift-bali-document-notation
|
a7a78eebc9652db841e4199b531c031cec597daf
|
[
"MIT"
] | null | null | null |
import XCTest
import BDN
var tests = [XCTestCaseEntry]()
tests += BDN.allTests()
XCTMain(tests)
| 12.25
| 31
| 0.734694
|
1a562b9def7b7b81504ab8431af6bc7aba62a43c
| 473
|
cs
|
C#
|
EnvironmentalSensor/EnvironmentalSensor/Usb/Payloads/LatestDataLongCommandPayload.cs
|
kasanami/EnvironmentalSensor
|
040808344da0a4ea0a9c854b8c5f5cf7c581ef6c
|
[
"MIT"
] | null | null | null |
EnvironmentalSensor/EnvironmentalSensor/Usb/Payloads/LatestDataLongCommandPayload.cs
|
kasanami/EnvironmentalSensor
|
040808344da0a4ea0a9c854b8c5f5cf7c581ef6c
|
[
"MIT"
] | null | null | null |
EnvironmentalSensor/EnvironmentalSensor/Usb/Payloads/LatestDataLongCommandPayload.cs
|
kasanami/EnvironmentalSensor
|
040808344da0a4ea0a9c854b8c5f5cf7c581ef6c
|
[
"MIT"
] | null | null | null |
namespace EnvironmentalSensor.Usb.Payloads
{
/// <summary>
/// 最新データを要求するペイロード
/// </summary>
public class LatestDataLongCommandPayload : CommandPayload
{
public override FrameCommand Command { get => FrameCommand.Read; }
public override FrameAddress Address { get => FrameAddress.LatestDataLong; }
/// <summary>
/// Dataなし
/// </summary>
public override byte[] Data { get; set; } = new byte[0];
}
}
| 29.5625
| 84
| 0.615222
|
19219fae101f9901ff3ef5708953545e2f14d65d
| 226
|
html
|
HTML
|
_includes/header.html
|
elyday/jekyll-swift-theme
|
0afeaaddea164a07a019487a87a32c92f0d1465c
|
[
"MIT"
] | null | null | null |
_includes/header.html
|
elyday/jekyll-swift-theme
|
0afeaaddea164a07a019487a87a32c92f0d1465c
|
[
"MIT"
] | 4
|
2021-10-11T22:11:31.000Z
|
2021-10-30T01:40:00.000Z
|
_includes/header.html
|
elyday/jekyll-swift-theme
|
0afeaaddea164a07a019487a87a32c92f0d1465c
|
[
"MIT"
] | null | null | null |
<header class="nav">
<nav class="nav-menu">
<a href="/" class="nav-brand nav_item">{{ site.title }}</a>
<div class="nav_bar-wrap">
<div class="nav_bar"></div>
</div>
</nav>
</header>
| 28.25
| 67
| 0.504425
|
dd6d71ee103a6c655730c2ac2474e8bf0bd45df2
| 12,119
|
java
|
Java
|
src/br/ufc/lps/view/panels/ViewerPanelResultFeatures.java
|
anderson-uchoa/DyMMer-NFP
|
cef913244aa2daeb3d41039b2595c7ff19f098a9
|
[
"MIT"
] | 5
|
2017-01-11T13:25:03.000Z
|
2017-08-19T13:52:28.000Z
|
src/br/ufc/lps/view/panels/ViewerPanelResultFeatures.java
|
anderson-uchoa/DyMMer
|
cef913244aa2daeb3d41039b2595c7ff19f098a9
|
[
"MIT"
] | null | null | null |
src/br/ufc/lps/view/panels/ViewerPanelResultFeatures.java
|
anderson-uchoa/DyMMer
|
cef913244aa2daeb3d41039b2595c7ff19f098a9
|
[
"MIT"
] | null | null | null |
package br.ufc.lps.view.panels;
import java.awt.BorderLayout;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JFileChooser;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.border.EmptyBorder;
import javax.swing.table.DefaultTableModel;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
import br.ufc.lps.controller.xml.ControladorXml;
import br.ufc.lps.repository.SchemeXml;
import br.ufc.lps.splar.core.fm.FeatureTreeNode;
import br.ufc.lps.view.Main;
import br.ufc.lps.view.Main.listItens;
import br.ufc.lps.view.panels.dialogs.JOptionPaneListItensSelectOrderModelsVersion;
public class ViewerPanelResultFeatures extends JPanel {
private static String colunas[] = {"N°","Nome"};
private DefaultTableModel mDefaultTableModel;
private ControladorXml controladorXml;
private List<SchemeXml> listaItens;
private Main main;
private JTable tabela;
private JButton open;
private JButton edit;
private JButton create;
private JButton delete;
private JButton refresh;
private JButton medidas;
private JButton download;
private JLabel labelMensagens;
private JLabel loader;
public ViewerPanelResultFeatures(final Main main) {
loader = new JLabel(new ImageIcon("images/ajax-loader.gif"), JLabel.CENTER);
isShowLoader(false);
controladorXml = new ControladorXml();
this.main = main;
setBorder(new EmptyBorder(5, 5, 5, 5));
setLayout(new BorderLayout(0, 0));
//TABELA
JPanel painelTabela = new JPanel();
add(painelTabela, BorderLayout.CENTER);
painelTabela.setLayout(new GridLayout(0, 1, 0, 0));
//MENSAGENS
JPanel painelMensagens = new JPanel();
add(painelMensagens, BorderLayout.SOUTH);
painelMensagens.setLayout(new GridLayout(2, 1, 0, 0));
labelMensagens = new JLabel();
//painelMensagens.add(labelMensagens);
//Painel de opções
JPanel painelOpcoes = new JPanel();
add(painelOpcoes, BorderLayout.EAST);
painelOpcoes.setLayout(new GridLayout(0, 1, 0, 0));
//BOTAO ABRIR
JPanel painelBotaoOpen = new JPanel();
painelOpcoes.add(painelBotaoOpen, BorderLayout.NORTH);
painelBotaoOpen.setLayout(new GridLayout(8, 0, 0, 0));
open = new JButton("Open");
painelBotaoOpen.add(open);
edit = new JButton("Edit");
painelBotaoOpen.add(edit);
create = new JButton("Create New Model");
painelBotaoOpen.add(create);
delete = new JButton("Delete");
painelBotaoOpen.add(delete);
medidas = new JButton("Measures");
painelBotaoOpen.add(medidas);
refresh = new JButton("Refresh");
download = new JButton("Download");
painelBotaoOpen.add(download);
painelBotaoOpen.add(refresh);
painelBotaoOpen.add(loader);
loader.setEnabled(true);
mDefaultTableModel = new DefaultTableModel(new String[][]{}, colunas){
@Override
public boolean isCellEditable(int row, int column) {
return false;
}
};
tabela = new JTable(mDefaultTableModel);
JScrollPane barraRolagem = new JScrollPane(tabela);
painelTabela.add(barraRolagem);
delete.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
int selecao = tabela.getSelectedRow();
if(selecao > -1){
int resp = JOptionPane.showConfirmDialog(null, "Are you sure that you want to delete this model?");
if(resp == JOptionPane.YES_OPTION){
SchemeXml selecionado = listaItens.get(selecao);
if(controladorXml.delete(selecionado)){
System.out.println("deleted successful!");
carregarItens();
}
}
}else
mensagemSelecionarLinha();
}
});
open.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
new Thread(new Runnable() {
@Override
public void run() {
int [] selecao = tabela.getSelectedRows();
if(selecao.length > 0 && selecao.length < 41){
for(int i=0; i < selecao.length; i++){
SchemeXml selecionado = listaItens.get(selecao[i]);
File file = ControladorXml.createFileFromXml(selecionado.getXml());
selecionado.setFile(file);
main.abrirArquivosDoRepositorio(selecionado);
}
}else
JOptionPane.showMessageDialog(null, "Select an appropriate range of models in the table (Up to 40 at a time)");
}
}).start();
}
});
download.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
int selecao = tabela.getSelectedRow();
if(selecao > -1){
SchemeXml selecionado = listaItens.get(selecao);
File file = ControladorXml.createFileFromXml(selecionado.getXml());
JFileChooser chooser = new JFileChooser();
chooser.setCurrentDirectory(new java.io.File("."));
chooser.setDialogTitle("Select the path");
chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
chooser.setAcceptAllFileFilterUsed(false);
if (chooser.showOpenDialog(ViewerPanelResultFeatures.this) == JFileChooser.APPROVE_OPTION) {
System.out.println("getCurrentDirectory(): "
+ chooser.getCurrentDirectory());
System.out.println("getSelectedFile() : "
+ chooser.getSelectedFile());
String nomeArquivo = JOptionPane.showInputDialog("Type the name of File", selecionado.getNameXml());
File file2 = new File(chooser.getSelectedFile()+"/"+nomeArquivo+".xml");
saveInLocalFile( file.getAbsolutePath(), file2);
}else {
System.out.println("No Selection ");
}
}else
mensagemSelecionarLinha();
}
});
create.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
String name = JOptionPane.showInputDialog("Type the name of feature root:");
if(name== null || name.equals("")){
JOptionPane.showMessageDialog(null, "Type a name valid");
return;
}
main.createModelFeature(name);
}
});
refresh.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
new Thread(new Runnable() {
@Override
public void run() {
carregarItens();
}
}).start();
}
});
medidas.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
int selecao = tabela.getSelectedRow();
if(selecao > -1){
SchemeXml selecionado = listaItens.get(selecao);
main.abrirMedidas(selecionado);
}else
mensagemSelecionarLinha();
}
});
edit.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
new Thread(new Runnable() {
@Override
public void run() {
int [] selecao = tabela.getSelectedRows();
if(selecao.length > 0 && selecao.length < 41){
for(int i=0; i < selecao.length; i++){
SchemeXml selecionado = listaItens.get(selecao[i]);
File file = ControladorXml.createFileFromXml(selecionado.getXml());
selecionado.setFile(file);
main.editarArquivosDoRepositorio(selecionado);
}
}else
JOptionPane.showMessageDialog(null, "Select an appropriate range of models in the table (Up to 40 at a time)");
}
}).start();
}
});
new Thread(new Runnable() {
@Override
public void run() {
carregarItens();
}
}).start();
}
private void setBotoes(boolean status){
open.setEnabled(status);
edit.setEnabled(status);
delete.setEnabled(status);
medidas.setEnabled(status);
}
private int getWidthByNumber(Integer count){
String numero = count.toString();
System.out.println(numero.length());
return numero.length()*18;
}
public void isShowLoader(boolean val){
loader.setVisible(val);
}
public synchronized void carregarItens(){
isShowLoader(true);
listaItens = controladorXml.getXml();
mDefaultTableModel.setRowCount(0);
int count = 1;
if(listaItens!=null){
if(listaItens.size() > 0){
for(SchemeXml sc : listaItens){
mDefaultTableModel.addRow(new String[]{(count++)+"-", sc.getNameXml()});
}
setBotoes(true);
} else {
setBotoes(false);
}
tabela.getColumnModel().getColumn(0).setMaxWidth(getWidthByNumber(count));
}else{
labelMensagens.setText("There was a problem connecting");
JOptionPane.showMessageDialog(null, "There was a problem connecting");
}
isShowLoader(false);
}
public List<SchemeXml> getAllSelectedItensList(){
int [] selecao = tabela.getSelectedRows();
if(selecao.length >= 2){
List<SchemeXml> list = new ArrayList<>();
for(int i=0; i < selecao.length; i++)
list.add(listaItens.get(selecao[i]));
return list;
}
mensagemSelecionarMultiplasLinhas();
return null;
}
public void getAllSelectedItensPriorityList(listItens ready){
int [] selecao = tabela.getSelectedRows();
if(selecao.length >= 2){
List<SchemeXml> list = new ArrayList<>();
for(int i=0; i < selecao.length; i++)
list.add(listaItens.get(selecao[i]));
JOptionPaneListItensSelectOrderModelsVersion modelv = new JOptionPaneListItensSelectOrderModelsVersion();
modelv.displayGUI(main, list, ready);
}
}
public List<SchemeXml> getAllItensList(){
return listaItens;
}
public SchemeXml getOneItemList(){
int selecao = tabela.getSelectedRow();
if(selecao > -1){
SchemeXml selecionado = listaItens.get(selecao);
return selecionado;
}
mensagemSelecionarLinha();
return null;
}
private void mensagemSelecionarLinha(){
JOptionPane.showMessageDialog(null, "Select a model in the table");
}
private void mensagemSelecionarMultiplasLinhas(){
JOptionPane.showMessageDialog(null, "Select the models in the table");
}
private void saveInLocalFile(String path, File file){
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
try {
DocumentBuilder db = dbf.newDocumentBuilder();
Document doc = db.parse(path);
Transformer transformer = TransformerFactory.newInstance().newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
DOMSource source = new DOMSource(doc);
StreamResult console = new StreamResult(new FileOutputStream(file));
transformer.transform(source, console);
JOptionPane.showMessageDialog(ViewerPanelResultFeatures.this,
"Save Successfuly");
} catch (SAXException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (ParserConfigurationException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (TransformerConfigurationException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (TransformerFactoryConfigurationError e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (TransformerException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
}
}
| 28.249417
| 118
| 0.698572
|
af9dc5332b2bd51c46e4d524c10c9d03f6428190
| 3,286
|
py
|
Python
|
testing/cubep3m/CubeP3MData.py
|
egpbos/egp
|
5e82c2de9e6884795b4ee89f2b15ed5dde70388f
|
[
"Apache-2.0"
] | null | null | null |
testing/cubep3m/CubeP3MData.py
|
egpbos/egp
|
5e82c2de9e6884795b4ee89f2b15ed5dde70388f
|
[
"Apache-2.0"
] | null | null | null |
testing/cubep3m/CubeP3MData.py
|
egpbos/egp
|
5e82c2de9e6884795b4ee89f2b15ed5dde70388f
|
[
"Apache-2.0"
] | null | null | null |
import os
class CubeP3MData(object):
"""
Load a CubeP3M checkpoint file and gather related meta-data from the
parameter files present in the run directory. The run directory is
assumed to be one directory up from the checkpoint's location. If not,
you need to specify the run_path in the initialization.
Default instantiation argument is filename, including full path.
"""
def __init__(self, filename, run_path = None):
self.filename = os.path.abspath(filename)
if not run_path:
self.run_path = os.path.dirname(self.filename)[:-6] # cut off "output"
self.load_metadata()
self.Ntotal = self.metadata['N']
self.offset = 11 + self.metadata['pp_run'] # file offset due to header
xvint = np.memmap(self.filename, dtype='int32', mode='r')
N = xvint[0]
if N != self.Ntotal:
self.Ntotal = N
print "N.B.: particles have been deleted from the ICs!\nAdjusted particle number from %i to %i." % (self.metadata['N'], N)
self.xv = np.memmap(self.filename, dtype='float32', mode='r', offset = self.offset*4)
order = property()
@order.setter
def order(self, order):
self._order = order
@order.getter
def order(self):
try:
return self._order
except AttributeError:
# Load particle IDs and use them to build an ordering array that
# will be used to order the other data by ID.
if self.metadata['pid_flag']:
pid_filename = self.filename[:self.filename.find('xv')]+'PID0.dat'
idarray = np.memmap(pid_filename, dtype='int64', offset=self.offset)
self.order = np.argsort(idarray).astype('uint32')
del idarray
else:
self.order = np.arange(self.Ntotal)
return self._order
pos = property()
@pos.setter
def pos(self, pos):
self._pos = pos
@pos.getter
def pos(self):
try:
return self._pos
except AttributeError:
# Load the particle positions into a NumPy array called self._pos,
# ordered by ID number.
self.pos = self.xv.reshape(self.Ntotal, 6)[:,:3]
self.pos *= self.metadata['boxlen']/self.metadata['nc'] # Mpc h^-1
self.pos = self.pos[self.order]
return self._pos
vel = property()
@vel.setter
def vel(self, vel):
self._vel = vel
@vel.getter
def vel(self):
try:
return self._vel
except AttributeError:
# Load the particle velocities into a NumPy array called self._vel,
# ordered by ID number.
self.vel = self.xv.reshape(self.Ntotal, 6)[:,3:]
self.vel *= (150*(1+self.metadata['redshift']) * self.metadata['boxlen'] / self.metadata['nc'] * np.sqrt(self.metadata['omega_m'])) # km/s
self.vel = self.vel[self.order]
return self._vel
def load_metadata(self):
"""Loads the pickled parameters. Assumes that simulation was setup with
this code, which saves parameters as a Python pickle file."""
self.metadata = pickle.load(open(self.run_path+'parameters.pickle', 'rb'))
| 40.073171
| 150
| 0.59434
|
4375dcfa52427b44f7af2671f33498849c03934c
| 1,613
|
ts
|
TypeScript
|
src/app/middleware/responseHandler.ts
|
izhostov/interview-task-be
|
7b9a9159f5a6eaf120663e6f9c90d1b8da57a793
|
[
"MIT"
] | 7
|
2020-01-22T08:42:47.000Z
|
2021-07-08T15:06:45.000Z
|
src/app/middleware/responseHandler.ts
|
izhostov/interview-task-be
|
7b9a9159f5a6eaf120663e6f9c90d1b8da57a793
|
[
"MIT"
] | 9
|
2021-03-02T01:09:04.000Z
|
2022-02-27T01:29:14.000Z
|
src/app/middleware/responseHandler.ts
|
izhostov/interview-task-be
|
7b9a9159f5a6eaf120663e6f9c90d1b8da57a793
|
[
"MIT"
] | 1
|
2022-01-17T12:18:09.000Z
|
2022-01-17T12:18:09.000Z
|
import { DefaultContext } from 'koa';
import { constants } from 'http2';
export default () => async (ctx: DefaultContext, next: () => Promise<any>) => {
ctx.success = ({statusCode, data = undefined}: any) => {
const status = 'success';
if (!!statusCode && (statusCode < constants.HTTP_STATUS_BAD_REQUEST))
ctx.status = statusCode;
else if (!(ctx.status < constants.HTTP_STATUS_BAD_REQUEST))
ctx.status = constants.HTTP_STATUS_OK;
ctx.body = {status, data};
};
ctx.error = ({statusCode, code, message = undefined}: any) => {
const status = 'error';
if (!!statusCode && (statusCode >= constants.HTTP_STATUS_BAD_REQUEST && statusCode < 600))
ctx.status = statusCode;
else if (!(ctx.status >= constants.HTTP_STATUS_INTERNAL_SERVER_ERROR && ctx.status < 600))
ctx.status = constants.HTTP_STATUS_INTERNAL_SERVER_ERROR;
ctx.body = {status, code, message};
};
ctx.ok = (params: any = {}) => {
ctx.success({
...params,
statusCode: constants.HTTP_STATUS_OK,
});
};
ctx.created = (params: any = {}) => {
ctx.success({
...params,
statusCode: constants.HTTP_STATUS_CREATED,
});
};
ctx.accepted = (params: any = {}) => {
ctx.success({
...params,
statusCode: constants.HTTP_STATUS_ACCEPTED,
});
};
ctx.noContent = () => {
ctx.success({
statusCode: constants.HTTP_STATUS_NO_CONTENT,
});
};
await next();
};
| 29.327273
| 98
| 0.553627
|
8e03861dbcf385a2c51499969416566d8fffefb7
| 3,931
|
rb
|
Ruby
|
cookbooks/volgactf/recipes/netdata.rb
|
VolgaCTF/public-infra
|
d3b02d2a8e820c0701c74f4261713832eb4f62cd
|
[
"MIT"
] | null | null | null |
cookbooks/volgactf/recipes/netdata.rb
|
VolgaCTF/public-infra
|
d3b02d2a8e820c0701c74f4261713832eb4f62cd
|
[
"MIT"
] | null | null | null |
cookbooks/volgactf/recipes/netdata.rb
|
VolgaCTF/public-infra
|
d3b02d2a8e820c0701c74f4261713832eb4f62cd
|
[
"MIT"
] | null | null | null |
# frozen_string_literal: true
if node['netdata']['enabled']
%w[autoconf autoconf-archive autogen automake cmake curl gcc git gzip libelf-dev libjson-c-dev libjudy-dev liblz4-dev libmnl-dev libssl-dev libtool libuv1-dev make netcat pkg-config python3 tar uuid-dev zlib1g-dev].each do |pkg_name|
package pkg_name
end
netdata_install 'default' do
install_method 'source'
git_repository node['netdata']['git_repository']
git_revision node['netdata']['git_revision']
git_source_directory '/opt/netdata'
autoupdate false
update node['netdata']['update']
end
netdata_global_conf = {
'memory mode' => 'dbengine',
'page cache size' => node['netdata']['global']['page cache size'],
'dbengine multihost disk space' => node['netdata']['global']['dbengine multihost disk space']
}
unless node['netdata']['global']['hostname'].nil?
netdata_global_conf['hostname'] = node['netdata']['global']['hostname']
end
netdata_global_conf['bind to'] = node['netdata']['global']['bind to'] unless node['netdata']['global']['bind to'].nil?
netdata_config 'global' do
owner 'netdata'
group 'netdata'
configurations netdata_global_conf
end
netdata_registry_conf = {
'enabled' => node['netdata']['registry']['enabled'] ? 'yes' : 'no',
'registry to announce' => node['netdata']['registry']['registry to announce']
}
unless node['netdata']['registry']['registry hostname'].nil?
netdata_registry_conf['registry hostname'] = node['netdata']['registry']['registry hostname']
end
netdata_config 'registry' do
owner 'netdata'
group 'netdata'
configurations netdata_registry_conf
end
unless node['netdata']['global']['hostname'].nil?
tls_vlt = ::Vlt::Client.new(::Vlt.file_auth_provider, 'tls')
tls_vlt_provider = -> { tls_vlt }
tls = ::ChefCookbook::TLS.new(node, vlt_provider: tls_vlt_provider, vlt_format: 2)
if tls.has_ec_certificate?(node['netdata']['global']['hostname'])
tls_ec_certificate node['netdata']['global']['hostname'] do
owner 'netdata'
group 'netdata'
vlt_provider tls_vlt_provider
vlt_format 2
action :deploy
end
certificate_entry = tls.ec_certificate_entry(node['netdata']['global']['hostname'])
netdata_config 'web' do
owner 'netdata'
group 'netdata'
configurations(lazy do
{
'tls version' => '1.3',
'# ssl certificate checksum' => certificate_entry.certificate_checksum,
'ssl certificate' => certificate_entry.certificate_path,
'# ssl key checksum' => certificate_entry.certificate_private_key_checksum,
'ssl key' => certificate_entry.certificate_private_key_path
}
end)
end
end
end
service 'netdata' do
action :nothing
end
file '/var/lib/netdata/cloud.d/cloud.conf' do
owner 'netdata'
group 'netdata'
content "[global]\n enabled = no"
mode mode '0644'
action :create
notifies :restart, 'service[netdata]', :delayed
end
vlt = ::Vlt::Client.new(::Vlt.file_auth_provider)
template '/etc/netdata/health_alarm_notify.conf' do
source 'netdata/health_alarm_notify.conf.erb'
owner 'netdata'
group 'netdata'
mode 0o644
variables(lazy do
{
send_telegram: node['netdata']['health_alarm_notify']['telegram']['enabled'],
telegram_bot_token: node['netdata']['health_alarm_notify']['telegram']['enabled'] ? vlt.read(node['netdata']['health_alarm_notify']['telegram']['credential'], prefix: 'telegram', key: 'bot_token') : '',
default_recipient_telegram: node['netdata']['health_alarm_notify']['telegram']['enabled'] ? vlt.read(node['netdata']['health_alarm_notify']['telegram']['credential'], prefix: 'telegram', key: 'chat_id') : ''
}
end)
action :create
notifies :restart, 'service[netdata]', :delayed
end
end
| 34.482456
| 235
| 0.666243
|
7b2195cdf4c51fd72538d28c99e8edd7885e052b
| 3,672
|
rb
|
Ruby
|
migrate/helper.rb
|
pjmtdw/kagetra
|
eeaad91052ab5d563146d2e420a6a6371921b16c
|
[
"MIT"
] | 2
|
2016-04-30T05:28:18.000Z
|
2017-01-23T11:31:32.000Z
|
migrate/helper.rb
|
pjmtdw/kagetra
|
eeaad91052ab5d563146d2e420a6a6371921b16c
|
[
"MIT"
] | 18
|
2016-08-02T05:57:30.000Z
|
2020-07-19T08:33:59.000Z
|
migrate/helper.rb
|
pjmtdw/kagetra
|
eeaad91052ab5d563146d2e420a6a6371921b16c
|
[
"MIT"
] | 4
|
2016-04-30T05:28:32.000Z
|
2021-06-25T14:28:10.000Z
|
module Sequel
class Database
CUSTOM_EXTRA_BLOCKS = {
base: lambda{|x|
primary_key :id
DateTime :created_at, index:true, null:false
DateTime :updated_at, index:true, null:false
},
env: lambda{|x|
String :remote_host, size:72
String :remote_addr, size:48
String :user_agent, size:255
},
thread: lambda{|x|
DateTime :last_comment_date, index: true, comment:"スレッドに最後に書き込んだ日時"
foreign_key :last_comment_user_id, :users, on_delete: :set_null, comment: "スレッドに最後に書き込んだユーザ"
Integer :comment_count, null:false, default:0, comment:"コメント数(毎回aggregateするのは遅いのでキャッシュ)"
},
attached: lambda{|thread|
lambda{|x|
String :path, size:255
String :orig_name, size:128, comment:"元のファイル名"
TrueClass :is_public, null:false, default:false, comment:"公開されているか"
String :description, text:true
Integer :size, null:false
foreign_key :owner_id, :users, on_delete: :set_null
foreign_key :thread_id, thread, null:false, on_delete: :cascade
}
},
comment: lambda{|thread|
lambda{|x|
String :body, text:true, null:false, comment:"内容"
String :user_name, size:24, null:false, comment:"書き込んだ人の名前"
String :real_name, size:24, comment:"内部的な名前と書き込んだ名前が違う場合に使用"
foreign_key :thread_id, thread, null:false, on_delete: :cascade
foreign_key :user_id, :users, on_delete: :set_null
}
},
patch: lambda{|table,table_pk|
lambda{|x|
Integer :revision, null:false
String :patch, text:true, null:false, comment:"差分情報"
foreign_key :user_id, :users, on_delete: :set_null
foreign_key table_pk, table, on_delete: :cascade
index [:revision, table_pk], unique:true
}
},
image: lambda{|x|
String :path, size:255, null:false, unique: true
Integer :width, null:false
Integer :height, null:false
String :format, size:50
foreign_key :album_item_id, :album_items, null:false, unique:true, on_delete: :cascade
}
}
def create_table_custom(name, extra_blocks, options=OPTS, &block)
create_table(name, options.merge(charset:"utf8")){
extra_blocks.each{|b|
if b.is_a?(Symbol) then
instance_eval(&CUSTOM_EXTRA_BLOCKS[b])
elsif b.is_a?(Array) then
instance_eval(&(CUSTOM_EXTRA_BLOCKS[b[0]].call(*b[1..-1])))
else
raise Exception.new("#{b.class} is not supported for extra block in #{name}")
end
}
instance_eval(&block) if block
}
end
end
# TODO: following code is not working in MySQL
# this code adds sequel to insert comments
# see lib/sequel/database/schema_methods.rb and lib/sequel/adapters/shared/postgres.rb
# module Postgres
# module DatabaseMethods
# COLUMN_DEFINITION_ORDER.insert(COLUMN_DEFINITION_ORDER.index(:auto_increment)+1,:comment)
# def column_definition_comment_sql(sql, column)
# sql << " COMMENT #{literal(column[:comment])}" if column.include?(:comment)
# end
# # http://stackoverflow.com/questions/4470108/when-monkey-patching-a-method-can-you-call-the-overridden-method-from-the-new-i
# original_create_table_sql = instance_method(:create_table_sql)
# define_method(:create_table_sql){ |name, generator, options = OPTS |
# comment = options.fetch(:comment, nil)
# "#{original_create_table_sql.bind(self).(name,generator,options)}#{ " COMMENT='#{comment}'" if comment }"
# }
# end
# end
end
| 40.351648
| 131
| 0.634804
|
268628d0a70dca551a79d9513dd11f17363f5738
| 3,386
|
swift
|
Swift
|
RaceReviews/Controllers/LocationsResultsController.swift
|
joinpursuit/Pursuit-Core-iOS-RaceReviews
|
11e9f1921a18db9114b5dede874f4c1d8e228d7c
|
[
"MIT"
] | null | null | null |
RaceReviews/Controllers/LocationsResultsController.swift
|
joinpursuit/Pursuit-Core-iOS-RaceReviews
|
11e9f1921a18db9114b5dede874f4c1d8e228d7c
|
[
"MIT"
] | null | null | null |
RaceReviews/Controllers/LocationsResultsController.swift
|
joinpursuit/Pursuit-Core-iOS-RaceReviews
|
11e9f1921a18db9114b5dede874f4c1d8e228d7c
|
[
"MIT"
] | 4
|
2019-02-13T16:03:39.000Z
|
2019-11-09T00:40:37.000Z
|
//
// LocationsResultsControllerViewController.swift
// RaceReviews
//
// Created by Alex Paul on 2/16/19.
// Copyright © 2019 Alex Paul. All rights reserved.
//
import UIKit
import MapKit
protocol LocationResultsControllerDelegate: AnyObject {
func didSelectCoordinate(_ locationResultsController: LocationsResultsController, coordinate: CLLocationCoordinate2D)
func didScrollTableView(_ locationResultsController: LocationsResultsController)
}
class LocationsResultsController: UIViewController {
@IBOutlet weak var tableView: UITableView!
private let searchCompleter = MKLocalSearchCompleter()
private var completerResults = [MKLocalSearchCompletion]()
weak var delegate: LocationResultsControllerDelegate?
override func viewDidLoad() {
super.viewDidLoad()
tableView.dataSource = self
tableView.delegate = self
searchCompleter.delegate = self
}
}
extension LocationsResultsController: UITableViewDataSource {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return completerResults.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "LocationCell", for: indexPath)
let suggestion = completerResults[indexPath.row]
// Each suggestion is a MKLocalSearchCompletion with a title, subtitle
cell.textLabel?.text = suggestion.title
cell.detailTextLabel?.text = suggestion.subtitle
return cell
}
}
extension LocationsResultsController: UITableViewDelegate {
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
let suggestion = completerResults[indexPath.row]
let addressString = suggestion.subtitle.isEmpty ? suggestion.title : suggestion.subtitle
LocationService.getCoordinate(addressString: addressString) { (coordinate, error) in
if let error = error {
print("error getting coordinate: \(error)")
} else {
print(coordinate)
self.delegate?.didSelectCoordinate(self, coordinate: coordinate)
}
}
dismiss(animated: true)
}
}
extension LocationsResultsController: UISearchResultsUpdating {
func updateSearchResults(for searchController: UISearchController) {
// Ask `MKLocalSearchCompleter` for new completion suggestions based on the change in the text entered in `UISearchBar`.
searchCompleter.queryFragment = searchController.searchBar.text ?? ""
}
}
extension LocationsResultsController: MKLocalSearchCompleterDelegate {
/// - Tag: QueryResults
func completerDidUpdateResults(_ completer: MKLocalSearchCompleter) {
// As the user types, new completion suggestions are continuously returned to this method.
// Overwrite the existing results, and then refresh the UI with the new results.
completerResults = completer.results
tableView.reloadData()
}
func completer(_ completer: MKLocalSearchCompleter, didFailWithError error: Error) {
// Handle any errors returned from MKLocalSearchCompleter.
if let error = error as NSError? {
print("MKLocalSearchCompleter encountered an error: \(error.localizedDescription)")
}
}
}
extension LocationsResultsController: UIScrollViewDelegate {
func scrollViewDidScroll(_ scrollView: UIScrollView) {
delegate?.didScrollTableView(self)
}
}
| 35.270833
| 124
| 0.763142
|
4ceb7a07ea52fd2eaf7d9f44e5a03e05646b91d4
| 947
|
py
|
Python
|
module/DilatedConvolutions.py
|
ForrestPi/DL_module
|
1ddd041ac742b670217fab0098b3939ff252ee26
|
[
"MIT"
] | 1
|
2019-11-14T10:34:39.000Z
|
2019-11-14T10:34:39.000Z
|
module/DilatedConvolutions.py
|
ForrestPi/DL_module
|
1ddd041ac742b670217fab0098b3939ff252ee26
|
[
"MIT"
] | null | null | null |
module/DilatedConvolutions.py
|
ForrestPi/DL_module
|
1ddd041ac742b670217fab0098b3939ff252ee26
|
[
"MIT"
] | null | null | null |
import numpy as np
from torchvision.transforms import Compose, ToTensor
from torch import nn
import torch.nn.init as init
def transform():
return Compose([
ToTensor(),
# Normalize((12,12,12),std = (1,1,1)),
])
arr = range(1,26)
arr = np.reshape(arr,[5,5])
arr = np.expand_dims(arr,2)
arr = arr.astype(np.float32)
# arr = arr.repeat(3,2)
print(arr.shape) #(5, 5, 1)
arr = transform()(arr)
arr = arr.unsqueeze(0)
print(arr)
print(arr.shape)#torch.Size([1, 1, 5, 5])
conv1 = nn.Conv2d(1, 1, 3, stride=1, bias=False, dilation=1, padding=0) # 普通卷积
conv2 = nn.Conv2d(1, 1, 3, stride=1, bias=False, dilation=2, padding=0) # dilation就是空洞率,即间隔
init.constant_(conv1.weight, 1)
init.constant_(conv2.weight, 1)
out1 = conv1(arr)
print(out1.shape) #torch.Size([1, 1, 3, 3])
out2 = conv2(arr)
print(out2.shape) #torch.Size([1, 1, 1, 1])
print('standare conv:\n', out1.detach().numpy())
print('dilated conv:\n', out2.detach().numpy())
| 30.548387
| 92
| 0.665259
|
a38883c29bcbefbdbbc78642b19c10d259b82d93
| 539
|
java
|
Java
|
content/modules/maps/examples/ex1/src/main/java/maps/ex1/screen/order/OrderEdit.java
|
SevDan/jmix-docs
|
309f1a4e7cf838e16098a141eec9ae067f358621
|
[
"CC-BY-4.0"
] | 12
|
2020-11-27T14:45:39.000Z
|
2022-03-17T07:23:41.000Z
|
content/modules/maps/examples/ex1/src/main/java/maps/ex1/screen/order/OrderEdit.java
|
SevDan/jmix-docs
|
309f1a4e7cf838e16098a141eec9ae067f358621
|
[
"CC-BY-4.0"
] | 309
|
2020-11-10T12:04:33.000Z
|
2022-03-18T12:20:43.000Z
|
content/modules/maps/examples/ex1/src/main/java/maps/ex1/screen/order/OrderEdit.java
|
SevDan/jmix-docs
|
309f1a4e7cf838e16098a141eec9ae067f358621
|
[
"CC-BY-4.0"
] | 9
|
2021-03-29T10:33:53.000Z
|
2022-02-14T11:05:11.000Z
|
package maps.ex1.screen.order;
import io.jmix.mapsui.component.GeoMap;
import io.jmix.mapsui.component.GeoMapImpl;
import io.jmix.mapsui.component.layer.VectorLayer;
import io.jmix.mapsui.component.leaflet.translators.GeoObjectWrapper;
import io.jmix.ui.screen.*;
import maps.ex1.entity.Order;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Map;
@UiController("mapst_Order.edit")
@UiDescriptor("order-edit.xml")
@EditedEntityContainer("orderDc")
public class OrderEdit extends StandardEditor<Order> {
}
| 29.944444
| 69
| 0.816327
|
257d0671a5a21c5906b0cdf16f4c00944a482866
| 232
|
js
|
JavaScript
|
src/routes/news.js
|
nguyenvanvy1999/Course-shop
|
39893804a77b10fd9a0523d4adf6c8bfa9bc0319
|
[
"MIT"
] | 1
|
2021-02-12T14:54:52.000Z
|
2021-02-12T14:54:52.000Z
|
src/routes/news.js
|
nguyenvanvy1999/Course-shop
|
39893804a77b10fd9a0523d4adf6c8bfa9bc0319
|
[
"MIT"
] | null | null | null |
src/routes/news.js
|
nguyenvanvy1999/Course-shop
|
39893804a77b10fd9a0523d4adf6c8bfa9bc0319
|
[
"MIT"
] | null | null | null |
const express = require('express'),
router = express.Router(),
NewsController = require('../app/controllers/news');
router.get('/:slug', NewsController.show);
router.get('/', NewsController.index);
module.exports = router;
| 29
| 56
| 0.698276
|
b7813484cca95908f3d11b44181fb9af9562b0d0
| 33,137
|
cs
|
C#
|
WodiLib/WodiLib.Test/Common/Model/Internal/CommonEventSpecialNumberArgDesc.InnerDescDatabaseTest.cs
|
kameske/WodiLib
|
f8d74b5565f38cc550b37e26b63e105c4691be2d
|
[
"MIT"
] | 12
|
2019-02-24T09:14:20.000Z
|
2022-02-03T05:53:29.000Z
|
WodiLib/WodiLib.Test/Common/Model/Internal/CommonEventSpecialNumberArgDesc.InnerDescDatabaseTest.cs
|
kameske/WodiLib
|
f8d74b5565f38cc550b37e26b63e105c4691be2d
|
[
"MIT"
] | 10
|
2019-03-18T13:02:36.000Z
|
2021-02-28T03:31:24.000Z
|
WodiLib/WodiLib.Test/Common/Model/Internal/CommonEventSpecialNumberArgDesc.InnerDescDatabaseTest.cs
|
kameske/WodiLib
|
f8d74b5565f38cc550b37e26b63e105c4691be2d
|
[
"MIT"
] | 4
|
2020-02-28T12:54:25.000Z
|
2021-09-09T13:41:57.000Z
|
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using Commons;
using NUnit.Framework;
using WodiLib.Common;
using WodiLib.Database;
using WodiLib.Sys;
using WodiLib.Test.Tools;
namespace WodiLib.Test.Common.Internal
{
[TestFixture]
public class CommonEventSpecialNumberArgDesc_InnerDescDatabaseTest
{
private static Logger logger;
[SetUp]
public static void Setup()
{
LoggerInitializer.SetupLoggerForDebug();
logger = Logger.GetInstance();
}
[Test]
public static void ArgTypeTest()
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedPropertyList.Add(args.PropertyName); };
// 取得した値が意図した値であること
var type = instance.ArgType;
Assert.AreEqual(type, CommonEventArgType.ReferDatabase);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedPropertyList.Count, 0);
}
[Test]
public static void DatabaseDbKindTest()
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedPropertyList.Add(args.PropertyName); };
var errorOccured = false;
try
{
var _ = instance.DatabaseUseDbKind;
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生しないこと
Assert.IsFalse(errorOccured);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedPropertyList.Count, 0);
}
[Test]
public static void DatabaseDbTypeIdTest()
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedPropertyList.Add(args.PropertyName); };
var errorOccured = false;
try
{
var _ = instance.DatabaseDbTypeId;
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生しないこと
Assert.IsFalse(errorOccured);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedPropertyList.Count, 0);
}
[Test]
public static void DatabaseUseAdditionalItemsFlagTest()
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedPropertyList.Add(args.PropertyName); };
var errorOccured = false;
try
{
var _ = instance.DatabaseUseAdditionalItemsFlag;
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生しないこと
Assert.IsFalse(errorOccured);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedPropertyList.Count, 0);
}
[Test]
public static void SpecialArgCaseListTest()
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedPropertyList.Add(args.PropertyName); };
var errorOccured = false;
try
{
var _ = instance.SpecialArgCaseList;
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生しないこと
Assert.IsFalse(errorOccured);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedPropertyList.Count, 0);
}
private static readonly object[] SetDatabaseReferTestCaseSource =
{
new object[] {DBKind.Changeable, 0, false},
new object[] {DBKind.User, 99, false},
new object[] {DBKind.System, 30, false},
new object[] {null, 55, true},
};
[TestCaseSource(nameof(SetDatabaseReferTestCaseSource))]
public static void SetDatabaseReferTest(DBKind dbKind, int dbTypeId, bool isError)
{
var typeId = (TypeId) dbTypeId;
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedPropertyList.Add(args.PropertyName); };
var errorOccured = false;
try
{
instance.SetDatabaseRefer(dbKind, typeId);
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーフラグが一致すること
Assert.AreEqual(errorOccured, isError);
// 意図したとおりプロパティ変更通知が発火していること
if (isError)
{
Assert.AreEqual(changedPropertyList.Count, 0);
}
else
{
Assert.AreEqual(changedPropertyList.Count, 2);
Assert.IsTrue(changedPropertyList[0]
.Equals(nameof(CommonEventSpecialNumberArgDesc.InnerDescDatabase.DatabaseUseDbKind)));
Assert.IsTrue(changedPropertyList[1]
.Equals(nameof(CommonEventSpecialNumberArgDesc.InnerDescDatabase.DatabaseDbTypeId)));
}
}
[TestCase(true)]
[TestCase(false)]
public static void SetDatabaseUseAdditionalItemsFlagTest(bool flag)
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedPropertyList.Add(args.PropertyName); };
var errorOccured = false;
try
{
instance.SetDatabaseUseAdditionalItemsFlag(flag);
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生しないこと
Assert.IsFalse(errorOccured);
// 意図したとおりプロパティ変更通知が発火していること
Assert.AreEqual(changedPropertyList.Count, 1);
Assert.IsTrue(changedPropertyList[0]
.Equals(nameof(CommonEventSpecialNumberArgDesc.InnerDescDatabase.DatabaseUseAdditionalItemsFlag)));
}
[TestCase(true, 3)]
[TestCase(false, 0)]
public static void GetSpecialCaseTest(bool flag, int answerLength)
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
instance.SetDatabaseUseAdditionalItemsFlag(flag);
var changedDescPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedDescPropertyList.Add(args.PropertyName); };
var changedSpecialArgCaseListPropertyList = new List<string>();
instance.SpecialArgCaseList.PropertyChanged += (sender, args) =>
{
changedSpecialArgCaseListPropertyList.Add(args.PropertyName);
};
var changedSpecialArgCaseListCollectionArgList = new List<NotifyCollectionChangedEventArgs>();
instance.SpecialArgCaseList.CollectionChanged += (sender, args) =>
{
changedSpecialArgCaseListCollectionArgList.Add(args);
};
var errorOccured = false;
try
{
instance.GetAllSpecialCase();
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生しないこと
Assert.IsFalse(errorOccured);
// 取得した配列数が意図した値と一致すること
var argCaseLength = instance.GetAllSpecialCase().Count;
Assert.AreEqual(argCaseLength, answerLength);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedDescPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListCollectionArgList.Count, 0);
}
private static readonly object[] GetAllSpecialCaseNumberTestCaseSource =
{
new object[] {DBKind.Changeable, 0, true, 3, 2, 1},
new object[] {DBKind.User, 10, false, 3, 1, 0},
new object[] {DBKind.System, 99, false, 3, 0, 0},
};
[TestCaseSource(nameof(GetAllSpecialCaseNumberTestCaseSource))]
public static void GetAllSpecialCaseNumberTest(DBKind dbKind, int dbTypeId, bool isUseAddition,
int answerCaseNumberLength, int answerDbTypeCode, int answerUseAdditionValue)
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
instance.SetDatabaseRefer(dbKind, dbTypeId);
instance.SetDatabaseUseAdditionalItemsFlag(isUseAddition);
var changedDescPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedDescPropertyList.Add(args.PropertyName); };
var changedSpecialArgCaseListPropertyList = new List<string>();
instance.SpecialArgCaseList.PropertyChanged += (sender, args) =>
{
changedSpecialArgCaseListPropertyList.Add(args.PropertyName);
};
var changedSpecialArgCaseListCollectionArgList = new List<NotifyCollectionChangedEventArgs>();
instance.SpecialArgCaseList.CollectionChanged += (sender, args) =>
{
changedSpecialArgCaseListCollectionArgList.Add(args);
};
var errorOccured = false;
try
{
instance.GetAllSpecialCaseNumber();
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生しないこと
Assert.IsFalse(errorOccured);
// 意図した値が取得できること
var caseNumberList = instance.GetAllSpecialCaseNumber();
Assert.AreEqual(caseNumberList.Count, answerCaseNumberLength);
Assert.AreEqual(caseNumberList[0], answerDbTypeCode);
Assert.AreEqual(caseNumberList[1], dbTypeId);
Assert.AreEqual(caseNumberList[2], answerUseAdditionValue);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedDescPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListCollectionArgList.Count, 0);
}
private static readonly object[] GetAllSpecialCaseDescriptionTestCaseSource =
{
new object[] {true, "a", "b", "c", 3},
new object[] {true, "a", null, "c", 3},
new object[] {false, null, null, "c", 0},
new object[] {false, null, null, null, 0},
};
[TestCaseSource(nameof(GetAllSpecialCaseDescriptionTestCaseSource))]
public static void GetAllSpecialCaseDescriptionTest(bool isUseAddition,
string strMinus1, string strMinus2, string strMinus3, int resultLength)
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
instance.SetDatabaseUseAdditionalItemsFlag(isUseAddition);
if (strMinus1 != null)
{
instance.UpdateDatabaseSpecialCase(-1, strMinus1);
}
if (strMinus2 != null)
{
instance.UpdateDatabaseSpecialCase(-2, strMinus2);
}
if (strMinus3 != null)
{
instance.UpdateDatabaseSpecialCase(-3, strMinus3);
}
var changedDescPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedDescPropertyList.Add(args.PropertyName); };
var changedSpecialArgCaseListPropertyList = new List<string>();
instance.SpecialArgCaseList.PropertyChanged += (sender, args) =>
{
changedSpecialArgCaseListPropertyList.Add(args.PropertyName);
};
var changedSpecialArgCaseListCollectionArgList = new List<NotifyCollectionChangedEventArgs>();
instance.SpecialArgCaseList.CollectionChanged += (sender, args) =>
{
changedSpecialArgCaseListCollectionArgList.Add(args);
};
var errorOccured = false;
try
{
instance.GetAllSpecialCaseDescription();
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生しないこと
Assert.IsFalse(errorOccured);
// 意図した値が取得できること
var caseDescList = instance.GetAllSpecialCaseDescription();
var caseDescListCount = caseDescList.Count;
Assert.AreEqual(caseDescListCount, resultLength);
if (caseDescListCount == 3)
{
Assert.AreEqual(caseDescList[0], strMinus1 ?? string.Empty);
Assert.AreEqual(caseDescList[1], strMinus2 ?? string.Empty);
Assert.AreEqual(caseDescList[2], strMinus3 ?? string.Empty);
}
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedDescPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListCollectionArgList.Count, 0);
}
[Test]
public static void AddSpecialCaseTest()
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedDescPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedDescPropertyList.Add(args.PropertyName); };
var changedSpecialArgCaseListPropertyList = new List<string>();
instance.SpecialArgCaseList.PropertyChanged += (sender, args) =>
{
changedSpecialArgCaseListPropertyList.Add(args.PropertyName);
};
var changedSpecialArgCaseListCollectionArgList = new List<NotifyCollectionChangedEventArgs>();
instance.SpecialArgCaseList.CollectionChanged += (sender, args) =>
{
changedSpecialArgCaseListCollectionArgList.Add(args);
};
var errorOccured = false;
try
{
var argCase = new CommonEventSpecialArgCase(0, "");
instance.AddSpecialCase(argCase);
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生すること
Assert.IsTrue(errorOccured);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedDescPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListCollectionArgList.Count, 0);
}
[Test]
public static void AddRangeSpecialCaseTest()
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedDescPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedDescPropertyList.Add(args.PropertyName); };
var changedSpecialArgCaseListPropertyList = new List<string>();
instance.SpecialArgCaseList.PropertyChanged += (sender, args) =>
{
changedSpecialArgCaseListPropertyList.Add(args.PropertyName);
};
var changedSpecialArgCaseListCollectionArgList = new List<NotifyCollectionChangedEventArgs>();
instance.SpecialArgCaseList.CollectionChanged += (sender, args) =>
{
changedSpecialArgCaseListCollectionArgList.Add(args);
};
var errorOccured = false;
try
{
var argCaseList = new List<CommonEventSpecialArgCase>
{
new CommonEventSpecialArgCase(0, "")
};
instance.AddRangeSpecialCase(argCaseList);
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生すること
Assert.IsTrue(errorOccured);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedDescPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListCollectionArgList.Count, 0);
}
[Test]
public static void InsertSpecialCaseTest()
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedDescPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedDescPropertyList.Add(args.PropertyName); };
var changedSpecialArgCaseListPropertyList = new List<string>();
instance.SpecialArgCaseList.PropertyChanged += (sender, args) =>
{
changedSpecialArgCaseListPropertyList.Add(args.PropertyName);
};
var changedSpecialArgCaseListCollectionArgList = new List<NotifyCollectionChangedEventArgs>();
instance.SpecialArgCaseList.CollectionChanged += (sender, args) =>
{
changedSpecialArgCaseListCollectionArgList.Add(args);
};
var errorOccured = false;
try
{
var argCase = new CommonEventSpecialArgCase(0, "");
instance.InsertSpecialCase(0, argCase);
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生すること
Assert.IsTrue(errorOccured);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedDescPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListCollectionArgList.Count, 0);
}
[Test]
public static void InsertRangeSpecialCaseTest()
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedDescPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedDescPropertyList.Add(args.PropertyName); };
var changedSpecialArgCaseListPropertyList = new List<string>();
instance.SpecialArgCaseList.PropertyChanged += (sender, args) =>
{
changedSpecialArgCaseListPropertyList.Add(args.PropertyName);
};
var changedSpecialArgCaseListCollectionArgList = new List<NotifyCollectionChangedEventArgs>();
instance.SpecialArgCaseList.CollectionChanged += (sender, args) =>
{
changedSpecialArgCaseListCollectionArgList.Add(args);
};
var errorOccured = false;
try
{
var argCaseList = new List<CommonEventSpecialArgCase>
{
new CommonEventSpecialArgCase(0, "")
};
instance.InsertRangeSpecialCase(0, argCaseList);
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生すること
Assert.IsTrue(errorOccured);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedDescPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListCollectionArgList.Count, 0);
}
private static readonly object[] UpdateDatabaseSpecialCaseTestCaseSource =
{
new object[] {true, -4, null, true},
new object[] {true, -4, "", true},
new object[] {true, -4, "abc", true},
new object[] {true, -4, "あいうえお", true},
new object[] {true, -4, "New\r\nLine\r\nCRLF", true},
new object[] {true, -4, "New\nLine\nLF", true},
new object[] {true, -3, null, true},
new object[] {true, -3, "", false},
new object[] {true, -3, "abc", false},
new object[] {true, -3, "あいうえお", false},
new object[] {true, -3, "New\r\nLine\r\nCRLF", true},
new object[] {true, -3, "New\nLine\nLF", true},
new object[] {true, -1, null, true},
new object[] {true, -1, "", false},
new object[] {true, -1, "abc", false},
new object[] {true, -1, "あいうえお", false},
new object[] {true, -1, "New\r\nLine\r\nCRLF", true},
new object[] {true, -1, "New\nLine\nLF", true},
new object[] {true, 0, null, true},
new object[] {true, 0, "", true},
new object[] {true, 0, "abc", true},
new object[] {true, 0, "あいうえお", true},
new object[] {true, 0, "New\r\nLine\r\nCRLF", true},
new object[] {true, 0, "New\nLine\nLF", true},
new object[] {false, -4, null, true},
new object[] {false, -4, "", true},
new object[] {false, -4, "abc", true},
new object[] {false, -4, "あいうえお", true},
new object[] {false, -4, "New\r\nLine\r\nCRLF", true},
new object[] {false, -4, "New\nLine\nLF", true},
new object[] {false, -3, null, true},
new object[] {false, -3, "", false},
new object[] {false, -3, "abc", false},
new object[] {false, -3, "あいうえお", false},
new object[] {false, -3, "New\r\nLine\r\nCRLF", true},
new object[] {false, -3, "New\nLine\nLF", true},
new object[] {false, -1, null, true},
new object[] {false, -1, "", false},
new object[] {false, -1, "abc", false},
new object[] {false, -1, "あいうえお", false},
new object[] {false, -1, "New\r\nLine\r\nCRLF", true},
new object[] {false, -1, "New\nLine\nLF", true},
new object[] {false, 0, null, true},
new object[] {false, 0, "", true},
new object[] {false, 0, "abc", true},
new object[] {false, 0, "あいうえお", true},
new object[] {false, 0, "New\r\nLine\r\nCRLF", true},
new object[] {false, 0, "New\nLine\nLF", true},
};
[TestCaseSource(nameof(UpdateDatabaseSpecialCaseTestCaseSource))]
public static void UpdateDatabaseSpecialCaseTest(bool isUseAddition,
int caseNumber, string description, bool isError)
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
instance.SetDatabaseUseAdditionalItemsFlag(isUseAddition);
var changedDescPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedDescPropertyList.Add(args.PropertyName); };
var changedSpecialArgCaseListPropertyList = new List<string>();
instance.SpecialArgCaseList.PropertyChanged += (sender, args) =>
{
changedSpecialArgCaseListPropertyList.Add(args.PropertyName);
};
var changedSpecialArgCaseListCollectionArgList = new List<NotifyCollectionChangedEventArgs>();
instance.SpecialArgCaseList.CollectionChanged += (sender, args) =>
{
changedSpecialArgCaseListCollectionArgList.Add(args);
};
var errorOccured = false;
try
{
instance.UpdateDatabaseSpecialCase(caseNumber, description);
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーフラグが一致すること
Assert.AreEqual(errorOccured, isError);
// 意図したとおりプロパティ変更通知が発火していること
if (isError)
{
Assert.AreEqual(changedDescPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListCollectionArgList.Count, 0);
}
else
{
Assert.AreEqual(changedDescPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListPropertyList.Count, 1);
Assert.IsTrue(changedSpecialArgCaseListPropertyList[0]
.Equals(ListConstant.IndexerName));
Assert.AreEqual(changedSpecialArgCaseListCollectionArgList.Count, 1);
Assert.AreEqual(changedSpecialArgCaseListCollectionArgList[0].Action,
NotifyCollectionChangedAction.Replace);
}
}
[Test]
public static void UpdateManualSpecialCaseTest()
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedDescPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedDescPropertyList.Add(args.PropertyName); };
var changedSpecialArgCaseListPropertyList = new List<string>();
instance.SpecialArgCaseList.PropertyChanged += (sender, args) =>
{
changedSpecialArgCaseListPropertyList.Add(args.PropertyName);
};
var changedSpecialArgCaseListCollectionArgList = new List<NotifyCollectionChangedEventArgs>();
instance.SpecialArgCaseList.CollectionChanged += (sender, args) =>
{
changedSpecialArgCaseListCollectionArgList.Add(args);
};
var errorOccured = false;
try
{
var argCase = new CommonEventSpecialArgCase(0, "");
instance.UpdateManualSpecialCase(-1, argCase);
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生すること
Assert.IsTrue(errorOccured);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedDescPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListCollectionArgList.Count, 0);
}
[Test]
public static void RemoveSpecialCaseAtTest()
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedDescPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedDescPropertyList.Add(args.PropertyName); };
var changedSpecialArgCaseListPropertyList = new List<string>();
instance.SpecialArgCaseList.PropertyChanged += (sender, args) =>
{
changedSpecialArgCaseListPropertyList.Add(args.PropertyName);
};
var changedSpecialArgCaseListCollectionArgList = new List<NotifyCollectionChangedEventArgs>();
instance.SpecialArgCaseList.CollectionChanged += (sender, args) =>
{
changedSpecialArgCaseListCollectionArgList.Add(args);
};
var errorOccured = false;
try
{
instance.RemoveSpecialCaseAt(0);
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生すること
Assert.IsTrue(errorOccured);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedDescPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListCollectionArgList.Count, 0);
}
[Test]
public static void RemoveSpecialCaseRangeTest()
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedDescPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedDescPropertyList.Add(args.PropertyName); };
var changedSpecialArgCaseListPropertyList = new List<string>();
instance.SpecialArgCaseList.PropertyChanged += (sender, args) =>
{
changedSpecialArgCaseListPropertyList.Add(args.PropertyName);
};
var changedSpecialArgCaseListCollectionArgList = new List<NotifyCollectionChangedEventArgs>();
instance.SpecialArgCaseList.CollectionChanged += (sender, args) =>
{
changedSpecialArgCaseListCollectionArgList.Add(args);
};
var errorOccured = false;
try
{
instance.RemoveSpecialCaseRange(0, 1);
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生すること
Assert.IsTrue(errorOccured);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedDescPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListCollectionArgList.Count, 0);
}
[Test]
public static void ClearSpecialCaseTest()
{
var instance = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
var changedDescPropertyList = new List<string>();
instance.PropertyChanged += (sender, args) => { changedDescPropertyList.Add(args.PropertyName); };
var changedSpecialArgCaseListPropertyList = new List<string>();
instance.SpecialArgCaseList.PropertyChanged += (sender, args) =>
{
changedSpecialArgCaseListPropertyList.Add(args.PropertyName);
};
var changedSpecialArgCaseListCollectionArgList = new List<NotifyCollectionChangedEventArgs>();
instance.SpecialArgCaseList.CollectionChanged += (sender, args) =>
{
changedSpecialArgCaseListCollectionArgList.Add(args);
};
var errorOccured = false;
try
{
instance.ClearSpecialCase();
}
catch (Exception ex)
{
logger.Exception(ex);
errorOccured = true;
}
// エラーが発生すること
Assert.IsTrue(errorOccured);
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedDescPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListPropertyList.Count, 0);
Assert.AreEqual(changedSpecialArgCaseListCollectionArgList.Count, 0);
}
[Test]
public static void SerializeTest()
{
var target = new CommonEventSpecialNumberArgDesc.InnerDescDatabase();
target.SetDatabaseRefer(DBKind.System, 20);
var changedPropertyList = new List<string>();
target.PropertyChanged += (sender, args) => { changedPropertyList.Add(args.PropertyName); };
var clone = DeepCloner.DeepClone(target);
Assert.IsTrue(clone.Equals(target));
// プロパティ変更通知が発火していないこと
Assert.AreEqual(changedPropertyList.Count, 0);
}
}
}
| 40.708845
| 116
| 0.564203
|
df1c7de13f8f4109069cb8e02a45dddd18d2c1d3
| 702
|
rb
|
Ruby
|
lib/simple-info/tag.rb
|
kubenstein/simple-info
|
52b7b847102f66619df5d66997a5fbfc282f98f0
|
[
"MIT"
] | 1
|
2015-10-16T12:34:04.000Z
|
2015-10-16T12:34:04.000Z
|
lib/simple-info/tag.rb
|
kubenstein/simple-info
|
52b7b847102f66619df5d66997a5fbfc282f98f0
|
[
"MIT"
] | null | null | null |
lib/simple-info/tag.rb
|
kubenstein/simple-info
|
52b7b847102f66619df5d66997a5fbfc282f98f0
|
[
"MIT"
] | null | null | null |
module SimpleInfo
class Tag
TAG_NAME = 'simple_info_storage'
def edit
execute("git tag -f -a #{TAG_NAME} `git log --format=%H | tail -1`")
end
def show
return '' unless info_tag_exists?
execute("git cat-file tag #{TAG_NAME}").split("\n\n")[1].strip
end
def pull(remote)
execute("git fetch #{remote} tag #{TAG_NAME}")
end
def push(remote)
execute("git push --force #{remote} refs/tags/#{TAG_NAME}:refs/tags/#{TAG_NAME}")
end
private
def info_tag_exists?
execute("git tag").include? TAG_NAME
end
def execute(command)
command.include?('-a') ? Kernel.system(command) : `#{command}`
end
end
end
| 20.057143
| 87
| 0.603989
|
4ba51fe2d0e110efb1d029bbed86e7d898449a8e
| 3,889
|
rs
|
Rust
|
nes/src/emulator/test/mod.rs
|
DiscoViking/mos-6500
|
fba56c079832d8ee0656b75a1663b1a60456b417
|
[
"MIT"
] | 3
|
2018-12-06T07:40:00.000Z
|
2020-03-05T23:49:38.000Z
|
nes/src/emulator/test/mod.rs
|
DiscoViking/mos-6500
|
fba56c079832d8ee0656b75a1663b1a60456b417
|
[
"MIT"
] | 36
|
2018-10-23T05:34:07.000Z
|
2018-11-10T15:08:49.000Z
|
nes/src/emulator/test/mod.rs
|
DiscoViking/nes
|
fba56c079832d8ee0656b75a1663b1a60456b417
|
[
"MIT"
] | null | null | null |
mod image_capture;
mod instr_misc;
mod instr_test_v5;
mod instr_timing;
mod mappers;
mod nestest;
mod ppu_sprite_hit;
mod ppu_sprite_overflow;
use std::cell::RefCell;
use std::env;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use md5::{Digest, Md5};
use crate::emulator::ines;
use crate::emulator::io;
use crate::emulator::io::event::EventBus;
use crate::emulator::NES;
use image_capture::ImageCapture;
fn run_for(nes: &mut NES, cycles: u64) {
let mut n = 0;
while n <= cycles {
n += nes.tick();
}
}
fn prepare_ete_test<P: AsRef<Path>>(path: P) -> (NES, Rc<RefCell<EventBus>>, ImageCapture) {
let rom = ines::ROM::load(path);
let event_bus = Rc::new(RefCell::new(EventBus::new()));
let output = Rc::new(RefCell::new(io::Screen::new()));
let audio = io::nop::DummyAudio {};
let image = ImageCapture::new(output.clone());
let nes = NES::new(event_bus.clone(), output, audio, rom);
(nes, event_bus, image)
}
fn load_and_run_blargg_test_rom<P: AsRef<Path>>(rom_path: P) -> (u8, String) {
load_and_run_blargg_test_rom_with_cycles(rom_path, 100_000_000)
}
fn load_and_run_blargg_test_rom_with_cycles<P: AsRef<Path>>(
rom_path: P,
max_cycles: u64,
) -> (u8, String) {
let (mut nes, _, _) = prepare_ete_test(rom_path);
run_blargg_test_rom(&mut nes, max_cycles)
}
fn run_blargg_test_rom(nes: &mut NES, max_cycles: u64) -> (u8, String) {
let mut cycles = 0;
// Run until the status byte says the test is running.
let mut status = nes.cpu.borrow_mut().load_memory(0x6000);
while status != 0x80 {
cycles += nes.tick();
status = nes.cpu.borrow_mut().load_memory(0x6000);
if cycles > 20_000_000 {
panic!(
"Test took too long to start. Gave up after {} cycles.",
cycles
);
}
}
// Run until completion.
while status == 0x80 {
cycles += nes.tick();
status = nes.cpu.borrow_mut().load_memory(0x6000);
cycles += 1;
if cycles > max_cycles {
let output = collect_test_output(nes);
panic!(
"Test took too long to end. Gave up after {} cycles. Current output: {}",
cycles, output
);
}
}
let output = collect_test_output(nes);
println!("{}", output);
(status, output)
}
fn collect_test_output(nes: &mut NES) -> String {
// Collect output.
let mut text_buf = vec![];
for ix in 0..1000 {
let byte = nes.cpu.borrow_mut().load_memory(0x6004 + ix);
if byte == 0x00 {
break;
} else {
text_buf.push(byte);
}
}
match String::from_utf8(text_buf) {
Err(cause) => panic!("Error converting output to string: {}", cause),
Ok(string) => string,
}
}
pub fn assert_image(capture: &ImageCapture, exp_file: PathBuf) {
let tmp_dir = env::temp_dir();
let mut out_file = tmp_dir.clone();
out_file.push(exp_file.file_name().unwrap());
capture.save_bmp(&out_file);
println!("Saving image to tempfile at: {}", out_file.display());
assert_eq!(file_digest(out_file), file_digest(exp_file));
}
pub fn test_resource_path(name: &str) -> PathBuf {
let mut buf = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
buf.push("src/emulator/test/resources/");
buf.push(name);
buf
}
pub fn file_digest<P: AsRef<Path>>(path: P) -> String {
let mut file = match File::open(&path) {
Err(cause) => panic!("Couldn't open file: {}", cause),
Ok(file) => file,
};
let mut hasher = Md5::new();
let mut contents = vec![];
match file.read_to_end(&mut contents) {
Err(cause) => panic!("Couldn't read file: {}", cause),
Ok(_) => (),
};
hasher.input(contents);
base64::encode(&hasher.result())
}
| 27.195804
| 92
| 0.602469
|
da87ddd2a61501ecb78b86613bec31b34d086be9
| 298
|
php
|
PHP
|
application/config/Parameters/Crm/preview.php
|
eelco2k/repucaution
|
0579b7d3c56b0be1ee7bbf3e8e7ced5f27f65665
|
[
"CC0-1.0"
] | 23
|
2016-11-09T12:18:32.000Z
|
2022-03-29T15:16:31.000Z
|
application/config/Parameters/Crm/preview.php
|
DittoPardo/repucaution
|
0579b7d3c56b0be1ee7bbf3e8e7ced5f27f65665
|
[
"CC0-1.0"
] | null | null | null |
application/config/Parameters/Crm/preview.php
|
DittoPardo/repucaution
|
0579b7d3c56b0be1ee7bbf3e8e7ced5f27f65665
|
[
"CC0-1.0"
] | 19
|
2016-04-29T17:24:25.000Z
|
2021-06-28T18:24:27.000Z
|
<?php
/**
* Settings of limit activities for loading and displaying on client activity page
*/
$config['parameters.crm.preview.options'] = array(
'activities_limit' => 10,
'directories_limit' => 20
);
| 37.25
| 82
| 0.489933
|
ccbf4f2c5feb8effcb330529da8b3965cb69602c
| 614
|
rb
|
Ruby
|
app/services/actions/parse_and_update_categories.rb
|
TheLocusCo/locuscorev3
|
bfe602ae3e89df1c709f88794eb0e7ce0ecf48c5
|
[
"MIT"
] | null | null | null |
app/services/actions/parse_and_update_categories.rb
|
TheLocusCo/locuscorev3
|
bfe602ae3e89df1c709f88794eb0e7ce0ecf48c5
|
[
"MIT"
] | null | null | null |
app/services/actions/parse_and_update_categories.rb
|
TheLocusCo/locuscorev3
|
bfe602ae3e89df1c709f88794eb0e7ce0ecf48c5
|
[
"MIT"
] | 1
|
2019-09-04T19:26:02.000Z
|
2019-09-04T19:26:02.000Z
|
module Actions
class ParseAndUpdateCategories
extend LightService::Action
expects :params, :main_object
promises :params, :main_object
# errors for invalid categories are caught in application controller
executed do |context|
next context if !context.params.key?(:categories) || context.params[:categories].empty?
categories = []
context.params[:categories].each do |cat|
next if cat[:name] == 'All Categories'
categories << Category.find_or_create_by(cat)
end
context.main_object.categories = categories
end
end
end
| 29.238095
| 94
| 0.672638
|
b79f065dd24efde05544cd69cfb7560c26402f21
| 573
|
lua
|
Lua
|
lua/entities/wm_bottle/cl_init.lua
|
BLEEPBLOOPforLife/Alcohol-Mod
|
05a9e60145e6ff8e7f7f32004993223cc8df1b26
|
[
"MIT"
] | 1
|
2021-11-18T20:35:07.000Z
|
2021-11-18T20:35:07.000Z
|
lua/entities/wm_bottle/cl_init.lua
|
eggroll-bot/alcohol-mod
|
05a9e60145e6ff8e7f7f32004993223cc8df1b26
|
[
"MIT"
] | 1
|
2016-12-31T14:57:34.000Z
|
2017-01-06T23:35:01.000Z
|
lua/entities/wm_bottle/cl_init.lua
|
BLEEPBLOOPforLife/Alcohol-Mod
|
05a9e60145e6ff8e7f7f32004993223cc8df1b26
|
[
"MIT"
] | null | null | null |
include( "shared.lua" )
function ENT:Initialize( )
end
function ENT:Draw( )
self:DrawModel( )
end
net.Receive( "StartAMWineEffect", function( )
local DrunkBlurTime = net.ReadFloat()
if timer.Exists( "EggrollAMDrunkBlur" ) then
timer.Remove( "EggrollAMDrunkBlur" )
end
timer.Create( "EggrollAMDrunkBlur", DrunkBlurTime, 1, function( ) end )
end )
local function AMStartEffect()
if timer.Exists( "EggrollAMDrunkBlur" ) then
DrawMotionBlur( 0.03, 10, 0 )
end
end
hook.Add( "RenderScreenspaceEffects", "AMStartEffect", AMStartEffect )
| 22.038462
| 73
| 0.705061
|
052f329207beb0994eb0edbc5e1fb2b22eacc98e
| 100
|
rb
|
Ruby
|
lib/escape_java_properties/parsing.rb
|
trace-devops/escape_java_properties
|
928a8c45fe0d165268e9a04bcdc870fbf3931582
|
[
"MIT"
] | null | null | null |
lib/escape_java_properties/parsing.rb
|
trace-devops/escape_java_properties
|
928a8c45fe0d165268e9a04bcdc870fbf3931582
|
[
"MIT"
] | null | null | null |
lib/escape_java_properties/parsing.rb
|
trace-devops/escape_java_properties
|
928a8c45fe0d165268e9a04bcdc870fbf3931582
|
[
"MIT"
] | null | null | null |
require 'escape_java_properties/parsing/normalizer'
require 'escape_java_properties/parsing/parser'
| 33.333333
| 51
| 0.88
|
da6d2f97c87697cc2177c9e7317c4f9e12c42e6a
| 162
|
php
|
PHP
|
src/Entity/PickupPoint.php
|
errogaht/delivery-aggregator
|
477f9ba28d2bf6ebf0d6da7db16d79ed01a2f363
|
[
"MIT"
] | 3
|
2017-11-08T19:17:44.000Z
|
2022-01-23T10:08:51.000Z
|
src/Entity/PickupPoint.php
|
errogaht/delivery-aggregator
|
477f9ba28d2bf6ebf0d6da7db16d79ed01a2f363
|
[
"MIT"
] | null | null | null |
src/Entity/PickupPoint.php
|
errogaht/delivery-aggregator
|
477f9ba28d2bf6ebf0d6da7db16d79ed01a2f363
|
[
"MIT"
] | null | null | null |
<?php
/**
* Created by PhpStorm.
* User: errogaht
* Date: 18.05.2017
* Time: 17:34
*/
namespace Errogaht\DeliveryAggregator\Entity;
class PickupPoint
{
}
| 10.8
| 45
| 0.67284
|
da4c4ad1eb5fff00a1eb9bb873a205b0e4e0f95a
| 993
|
php
|
PHP
|
resources/views/install/done.blade.php
|
swagat-dash/mc-sms-mail
|
091a3094b0b8755a9a902fb908e362fbb647b419
|
[
"MIT"
] | null | null | null |
resources/views/install/done.blade.php
|
swagat-dash/mc-sms-mail
|
091a3094b0b8755a9a902fb908e362fbb647b419
|
[
"MIT"
] | null | null | null |
resources/views/install/done.blade.php
|
swagat-dash/mc-sms-mail
|
091a3094b0b8755a9a902fb908e362fbb647b419
|
[
"MIT"
] | null | null | null |
@extends('../layout/side-menu')
@section('head')
<title>Swagmail - Email & SMS Marketing Application</title>
@endsection
@section('content')
<div class="container">
<!-- BEGIN: Congratulations Page -->
<div class="page flex flex-col lg:flex-row items-center justify-center h-screen text-center lg:text-left">
<div class="-intro-x lg:mr-20 mb-4">
<img alt="#swagmail" class="w-full" src="{{ asset('congo.png') }}">
</div>
<div class="text-white mt-10 lg:mt-0">
<div class="intro-x text-4xl font-medium">Swagmail - Email & SMS Marketing Application</div>
<a href="{{route('frontend.index')}}" class="button w-full inline-block text-xl px-5 py-4 mr-1 mb-2 border text-white-700 dark:bg-dark-5 dark:text-white-300">Lets Start Swagmail</a>
<h2>SwagatDash.com</h2>
</div>
</div>
<!-- END: Congratulations Page -->
</div>
@endsection
| 41.375
| 198
| 0.581067
|
7bfe9d91357541f1b1fb30f6d6c7e9cfe3598492
| 8,374
|
rb
|
Ruby
|
lib/money_s3/parsers/pol_objedn_type.rb
|
Masa331/moneys3
|
97304d5500d407ff724ae73bcff1fd79a67e3a66
|
[
"MIT"
] | 4
|
2018-02-07T19:30:53.000Z
|
2020-10-16T16:27:01.000Z
|
lib/money_s3/parsers/pol_objedn_type.rb
|
Masa331/moneys3
|
97304d5500d407ff724ae73bcff1fd79a67e3a66
|
[
"MIT"
] | 1
|
2020-04-25T18:22:37.000Z
|
2020-04-25T18:22:37.000Z
|
lib/money_s3/parsers/pol_objedn_type.rb
|
Masa331/moneys3
|
97304d5500d407ff724ae73bcff1fd79a67e3a66
|
[
"MIT"
] | 1
|
2021-04-20T23:15:19.000Z
|
2021-04-20T23:15:19.000Z
|
module MoneyS3
module Parsers
class PolObjednType
include ParserCore::BaseParser
def popis
at 'Popis'
end
def popis_attributes
attributes_at 'Popis'
end
def poznamka
at 'Poznamka'
end
def poznamka_attributes
attributes_at 'Poznamka'
end
def pocet_mj
at 'PocetMJ'
end
def pocet_mj_attributes
attributes_at 'PocetMJ'
end
def zbyva_mj
at 'ZbyvaMJ'
end
def zbyva_mj_attributes
attributes_at 'ZbyvaMJ'
end
def cena
at 'Cena'
end
def cena_attributes
attributes_at 'Cena'
end
def souhrn_dph
submodel_at(SouhrnDPHPolType, 'SouhrnDPH')
end
def sazba_dph
at 'SazbaDPH'
end
def sazba_dph_attributes
attributes_at 'SazbaDPH'
end
def typ_ceny
at 'TypCeny'
end
def typ_ceny_attributes
attributes_at 'TypCeny'
end
def sleva
at 'Sleva'
end
def sleva_attributes
attributes_at 'Sleva'
end
def vystaveno
at 'Vystaveno'
end
def vystaveno_attributes
attributes_at 'Vystaveno'
end
def vyridit_nej
at 'VyriditNej'
end
def vyridit_nej_attributes
attributes_at 'VyriditNej'
end
def vyridit_do
at 'Vyridit_do'
end
def vyridit_do_attributes
attributes_at 'Vyridit_do'
end
def vyrizeno
at 'Vyrizeno'
end
def vyrizeno_attributes
attributes_at 'Vyrizeno'
end
def poradi
at 'Poradi'
end
def poradi_attributes
attributes_at 'Poradi'
end
def stredisko
at 'Stredisko'
end
def stredisko_attributes
attributes_at 'Stredisko'
end
def zakazka
at 'Zakazka'
end
def zakazka_attributes
attributes_at 'Zakazka'
end
def cinnost
at 'Cinnost'
end
def cinnost_attributes
attributes_at 'Cinnost'
end
def cenova_hlad
at 'CenovaHlad'
end
def cenova_hlad_attributes
attributes_at 'CenovaHlad'
end
def valuty
at 'Valuty'
end
def valuty_attributes
attributes_at 'Valuty'
end
def kod_statu_puv
at 'KodStatuPuv'
end
def kod_statu_puv_attributes
attributes_at 'KodStatuPuv'
end
def typ_transakce
at 'TypTransakce'
end
def typ_transakce_attributes
attributes_at 'TypTransakce'
end
def hmotnost
at 'Hmotnost'
end
def hmotnost_attributes
attributes_at 'Hmotnost'
end
def zvl_rezim
at 'ZvlRezim'
end
def zvl_rezim_attributes
attributes_at 'ZvlRezim'
end
def zvl_dph
at 'ZvlDPH'
end
def zvl_dph_attributes
attributes_at 'ZvlDPH'
end
def rezim_eet
at 'RezimEET'
end
def rezim_eet_attributes
attributes_at 'RezimEET'
end
def pred_pc
at 'PredPC'
end
def pred_pc_attributes
attributes_at 'PredPC'
end
def predm_pln
at 'PredmPln'
end
def predm_pln_attributes
attributes_at 'PredmPln'
end
def cena_po_sleve
at 'CenaPoSleve'
end
def cena_po_sleve_attributes
attributes_at 'CenaPoSleve'
end
def sklad
submodel_at(SkladType, 'Sklad')
end
def km_karta
submodel_at(KmKartaType, 'KmKarta')
end
def seznam_vc
array_of_at(VyrobniCisloType, ['SeznamVC', 'VyrobniCislo'])
end
def slozeni
array_of_at(SubPolObjType, ['Slozeni', 'SubPolozka'])
end
def neskl_polozka
submodel_at(NesklPolozka2, 'NesklPolozka')
end
def uzivatelska_pole
at 'UzivatelskaPole'
end
def uzivatelska_pole_attributes
attributes_at 'UzivatelskaPole'
end
def to_h
hash = {}
hash[:attributes] = attributes
hash[:popis] = popis if has? 'Popis'
hash[:popis_attributes] = popis_attributes if has? 'Popis'
hash[:poznamka] = poznamka if has? 'Poznamka'
hash[:poznamka_attributes] = poznamka_attributes if has? 'Poznamka'
hash[:pocet_mj] = pocet_mj if has? 'PocetMJ'
hash[:pocet_mj_attributes] = pocet_mj_attributes if has? 'PocetMJ'
hash[:zbyva_mj] = zbyva_mj if has? 'ZbyvaMJ'
hash[:zbyva_mj_attributes] = zbyva_mj_attributes if has? 'ZbyvaMJ'
hash[:cena] = cena if has? 'Cena'
hash[:cena_attributes] = cena_attributes if has? 'Cena'
hash[:souhrn_dph] = souhrn_dph.to_h if has? 'SouhrnDPH'
hash[:sazba_dph] = sazba_dph if has? 'SazbaDPH'
hash[:sazba_dph_attributes] = sazba_dph_attributes if has? 'SazbaDPH'
hash[:typ_ceny] = typ_ceny if has? 'TypCeny'
hash[:typ_ceny_attributes] = typ_ceny_attributes if has? 'TypCeny'
hash[:sleva] = sleva if has? 'Sleva'
hash[:sleva_attributes] = sleva_attributes if has? 'Sleva'
hash[:vystaveno] = vystaveno if has? 'Vystaveno'
hash[:vystaveno_attributes] = vystaveno_attributes if has? 'Vystaveno'
hash[:vyridit_nej] = vyridit_nej if has? 'VyriditNej'
hash[:vyridit_nej_attributes] = vyridit_nej_attributes if has? 'VyriditNej'
hash[:vyridit_do] = vyridit_do if has? 'Vyridit_do'
hash[:vyridit_do_attributes] = vyridit_do_attributes if has? 'Vyridit_do'
hash[:vyrizeno] = vyrizeno if has? 'Vyrizeno'
hash[:vyrizeno_attributes] = vyrizeno_attributes if has? 'Vyrizeno'
hash[:poradi] = poradi if has? 'Poradi'
hash[:poradi_attributes] = poradi_attributes if has? 'Poradi'
hash[:stredisko] = stredisko if has? 'Stredisko'
hash[:stredisko_attributes] = stredisko_attributes if has? 'Stredisko'
hash[:zakazka] = zakazka if has? 'Zakazka'
hash[:zakazka_attributes] = zakazka_attributes if has? 'Zakazka'
hash[:cinnost] = cinnost if has? 'Cinnost'
hash[:cinnost_attributes] = cinnost_attributes if has? 'Cinnost'
hash[:cenova_hlad] = cenova_hlad if has? 'CenovaHlad'
hash[:cenova_hlad_attributes] = cenova_hlad_attributes if has? 'CenovaHlad'
hash[:valuty] = valuty if has? 'Valuty'
hash[:valuty_attributes] = valuty_attributes if has? 'Valuty'
hash[:kod_statu_puv] = kod_statu_puv if has? 'KodStatuPuv'
hash[:kod_statu_puv_attributes] = kod_statu_puv_attributes if has? 'KodStatuPuv'
hash[:typ_transakce] = typ_transakce if has? 'TypTransakce'
hash[:typ_transakce_attributes] = typ_transakce_attributes if has? 'TypTransakce'
hash[:hmotnost] = hmotnost if has? 'Hmotnost'
hash[:hmotnost_attributes] = hmotnost_attributes if has? 'Hmotnost'
hash[:zvl_rezim] = zvl_rezim if has? 'ZvlRezim'
hash[:zvl_rezim_attributes] = zvl_rezim_attributes if has? 'ZvlRezim'
hash[:zvl_dph] = zvl_dph if has? 'ZvlDPH'
hash[:zvl_dph_attributes] = zvl_dph_attributes if has? 'ZvlDPH'
hash[:rezim_eet] = rezim_eet if has? 'RezimEET'
hash[:rezim_eet_attributes] = rezim_eet_attributes if has? 'RezimEET'
hash[:pred_pc] = pred_pc if has? 'PredPC'
hash[:pred_pc_attributes] = pred_pc_attributes if has? 'PredPC'
hash[:predm_pln] = predm_pln if has? 'PredmPln'
hash[:predm_pln_attributes] = predm_pln_attributes if has? 'PredmPln'
hash[:cena_po_sleve] = cena_po_sleve if has? 'CenaPoSleve'
hash[:cena_po_sleve_attributes] = cena_po_sleve_attributes if has? 'CenaPoSleve'
hash[:sklad] = sklad.to_h if has? 'Sklad'
hash[:km_karta] = km_karta.to_h if has? 'KmKarta'
hash[:seznam_vc] = seznam_vc.map(&:to_h) if has? 'SeznamVC'
hash[:slozeni] = slozeni.map(&:to_h) if has? 'Slozeni'
hash[:neskl_polozka] = neskl_polozka.to_h if has? 'NesklPolozka'
hash[:uzivatelska_pole] = uzivatelska_pole if has? 'UzivatelskaPole'
hash[:uzivatelska_pole_attributes] = uzivatelska_pole_attributes if has? 'UzivatelskaPole'
hash
end
end
end
end
| 25.766154
| 98
| 0.621686
|
b0674e18143cdd4dde785b9d81bf6ebcff85810a
| 4,393
|
py
|
Python
|
sentiment_analyser/sentiment_analyser.py
|
JanelleTang/COMP90024_Assignment_2
|
c6ef17f8d20ebca3c7f525a4d0c9483f1002a78c
|
[
"Apache-2.0"
] | 1
|
2021-06-04T11:11:22.000Z
|
2021-06-04T11:11:22.000Z
|
sentiment_analyser/sentiment_analyser.py
|
JanelleTang/COMP90024_Assignment_2
|
c6ef17f8d20ebca3c7f525a4d0c9483f1002a78c
|
[
"Apache-2.0"
] | null | null | null |
sentiment_analyser/sentiment_analyser.py
|
JanelleTang/COMP90024_Assignment_2
|
c6ef17f8d20ebca3c7f525a4d0c9483f1002a78c
|
[
"Apache-2.0"
] | null | null | null |
# ============= COMP90024 - Assignment 2 ============= #
#
# The University of Melbourne
# Team 37
#
# ** Authors: **
#
# JJ Burke 1048105
# Janelle Tang 694209
# Shuang Qiu 980433
# Declan Baird-Watson 640975
# Avinash Rao 1024577
#
# Location: Melbourne
# ====================================================
from nltk.corpus import stopwords
import nltk
import re
from nltk.sentiment import SentimentIntensityAnalyzer
from nltk.tokenize import word_tokenize
from nltk.stem.porter import PorterStemmer
import string
from geopy.geocoders import Nominatim
from geopy.exc import GeocoderTimedOut
nltk.download([
"names",
"stopwords",
"state_union",
"twitter_samples",
"movie_reviews",
"averaged_perceptron_tagger",
"vader_lexicon",
"punkt",
])
sia = SentimentIntensityAnalyzer()
geolocator = Nominatim(user_agent="tweets")
stemmer = PorterStemmer()
## Check geo tag ##
class CleanTweet:
def __init__(self, text,location,hashtags,coordinates=False):
self.cleaned_text = self.process_text(text)
self.hashtags = [x.lower() for x in hashtags]
self.sentiment_dict = sia.polarity_scores(self.cleaned_text)
self.sentiment_value = self.sentiment_dict['compound']
location = self.get_lga(location,coordinates)
if location!= None:
self.lga,self.city,self.state = location
else:
self.city = None
def __str__(self):
print("This tweet has sentiment: ".format(self.sentiment_dict["compound"]))
def process_text(self,text):
#lowercase text
text = text.lower()
#remove urls
text = re.sub(r"http\S+|www\S+|https\S+", '', text, flags=re.MULTILINE)
#remove tweet mentions and hashtags
text = re.sub(r'\@\w+|\#\w+|','', text)
#remove numbers
text = ''.join([x for x in text if not x.isdigit()])
#remove punctuation
text = text.translate(str.maketrans('', '', string.punctuation))
#tokenize
tweet_tokens = word_tokenize(text)
#remove stopwords
processed_tweet = [x for x in tweet_tokens if not x in stopwords.words('english')]
stemmed_tweet = [stemmer.stem(x) for x in processed_tweet]
return " ".join(stemmed_tweet)
def get_lga(self,input,coordinates=False):
"""
convert user location or geotag into LGA, City, and State labels.
"""
try:
if coordinates:
location = geolocator.reverse(""+str(input[1])+", "+str(input[0]),addressdetails=True,timeout=1000)
print('coordinates location {}'.location)
else:
location = geolocator.geocode(input,addressdetails=True,timeout=1000)
except GeocoderTimedOut as e:
print("Error: geocode failed on input %s with message %s"%(input, e.message))
return self.get_lga(input)
if location == None:
return None
address = location.raw['address']
if len(address) < 3:
return None
try:
if address['country']!="Australia":
return None
except:
return None
if 'territory' in address:
pass
if 'town' in address:
address.pop('municipality', None)
try:
lga = address["municipality"].lower()
city = address["city"].lower()
state = address["state"].lower()
except KeyError:
try:
city = address["city"].lower()
lga="no_lga"
state = address["state"].lower()
except KeyError:
return None
return([lga,city,state])
def get_dict(self):
if self.city == None:
return None
result = {"lga": self.lga,
"city": self.city,
"state":self.state,
"hashtags":self.hashtags_to_dict(),
'aggregate_data':{"total_sentiment":self.sentiment_value,
"total_tweets":1}}
return result
def hashtags_to_dict(self):
results ={}
for tag in self.hashtags:
results[tag] = self.hashtags.count(tag)
return results
| 30.296552
| 115
| 0.563852
|
7f151484d3fd3f042016a79c2ea7e4f3b7ea6b64
| 2,187
|
cs
|
C#
|
DotNet/DotNet/UE4/Generated/MovieScene/MovieSceneEvaluationTrack.cs
|
UE4DotNet/Plugin
|
08240b2a965904a69b8ef680d94261f300cf1652
|
[
"MIT"
] | 2
|
2021-04-25T03:00:29.000Z
|
2021-09-17T03:14:54.000Z
|
DotNet/DotNet/UE4/Generated/MovieScene/MovieSceneEvaluationTrack.cs
|
UE4DotNet/Plugin
|
08240b2a965904a69b8ef680d94261f300cf1652
|
[
"MIT"
] | null | null | null |
DotNet/DotNet/UE4/Generated/MovieScene/MovieSceneEvaluationTrack.cs
|
UE4DotNet/Plugin
|
08240b2a965904a69b8ef680d94261f300cf1652
|
[
"MIT"
] | 2
|
2019-08-08T02:09:16.000Z
|
2021-09-17T03:14:56.000Z
|
// Copyright 2018 by JCoder58. See License.txt for license
// Auto-generated --- Do not modify.
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using UE4.Core;
using UE4.CoreUObject;
using UE4.CoreUObject.Native;
using UE4.InputCore;
using UE4.Native;
namespace UE4.MovieScene{
///<summary>Evaluation track that is stored within an evaluation template for a sequence.</summary>
///<remarks>Contains user-defined evaluation templates, and an optional track implementation</remarks>
[StructLayout( LayoutKind.Explicit, Size=248 )]
public unsafe struct MovieSceneEvaluationTrack {
///<summary>ID of the possessable or spawnable within the UMovieScene this track belongs to, if any. Zero guid where this relates to a master track.</summary>
[FieldOffset(0)] FGuid ObjectBindingID;
[FieldOffset(16)] byte EvaluationPriority; //TODO: numeric uint16 EvaluationPriority
[FieldOffset(18)] byte EvaluationMethod; //TODO: enum EEvaluationMethod EvaluationMethod
///<summary>Array of segmented ranges contained within the track.</summary>
[FieldOffset(24)] MovieSceneEvaluationTrackSegments Segments;
[FieldOffset(56)]
private IntPtr SourceTrack_field;
///<summary>The movie scene track that created this evaluation track.</summary>
public MovieSceneTrack SourceTrack {
get {return SourceTrack_field;}
set {SourceTrack_field = value;}
}
///<summary>Evaluation tree specifying what happens at any given time.</summary>
[FieldOffset(64)] SectionEvaluationDataTree EvaluationTree;
[FieldOffset(160)] byte ChildTemplates; //TODO: array TArray ChildTemplates
///<summary>Domain-specific track implementation override.</summary>
[FieldOffset(176)] MovieSceneTrackImplementationPtr TrackTemplate;
[FieldOffset(232)] public Name EvaluationGroup;
[FieldOffset(244)] public bool bEvaluateInPreroll;
[FieldOffset(244)] public bool bEvaluateInPostroll;
}
}
| 42.057692
| 170
| 0.690444
|