repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
RadeonOpenCompute/ROCm-OpenCL-Runtime
|
tests/ocltst/module/runtime/OCLDeviceAtomic.cpp
|
<filename>tests/ocltst/module/runtime/OCLDeviceAtomic.cpp
/* Copyright (c) 2010 - 2021 Advanced Micro Devices, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. */
#include "OCLDeviceAtomic.h"
#include <assert.h>
#include <stdio.h>
#include <string.h>
#include "CL/cl.h"
static const cl_uint TotalElements = 256 * 1024 * 1024;
static const cl_uint ArraySize = 256;
static cl_uint hostArray[ArraySize];
#define KERNEL_CODE(...) #__VA_ARGS__
const static char* strKernel[] = {
KERNEL_CODE(
\n __kernel void atomic_test1(__global uint* res) {
__global atomic_uint* inc = (__global atomic_uint*)res;
atomic_fetch_add_explicit(inc, 1, memory_order_acq_rel,
memory_scope_device);
}
\n __kernel void atomic_test2(__global uint* res) {
__global atomic_uint* inc = (__global atomic_uint*)res;
atomic_fetch_add_explicit(inc, 1, memory_order_acq_rel,
memory_scope_device);
}
\n),
KERNEL_CODE(
\n __kernel void atomic_test1(__global uint* res) {
for (uint i = 0; i < 256 * 1024; ++i) {
for (uint j = 0; j < 256; ++j) {
__global atomic_uint* inc = (__global atomic_uint*)&res[j];
uint val = atomic_load_explicit(inc, memory_order_acquire,
memory_scope_device);
if (0 != val) {
res[1] = get_global_id(0);
res[2] = i;
return;
}
}
}
}
\n __kernel void atomic_test2(__global uint* res) {
if (get_global_id(0) == 64 * 1000 * 1000) {
__global atomic_uint* inc = (__global atomic_uint*)res;
// atomic_fetch_add_explicit(inc, 1, memory_order_acq_rel,
// memory_scope_device);
atomic_store_explicit(inc, get_global_id(0), memory_order_release,
memory_scope_device);
}
}
\n)};
OCLDeviceAtomic::OCLDeviceAtomic()
: hostQueue_(NULL), failed_(false), kernel2_(NULL) {
_numSubTests = 2;
}
OCLDeviceAtomic::~OCLDeviceAtomic() {}
void OCLDeviceAtomic::open(unsigned int test, char* units, double& conversion,
unsigned int deviceId) {
OCLTestImp::open(test, units, conversion, deviceId);
CHECK_RESULT((error_ != CL_SUCCESS), "Error opening test");
testID_ = test;
size_t param_size = 0;
char* strVersion = 0;
error_ = _wrapper->clGetDeviceInfo(devices_[_deviceId], CL_DEVICE_VERSION, 0,
0, ¶m_size);
CHECK_RESULT(error_ != CL_SUCCESS, "clGetDeviceInfo failed");
strVersion = new char[param_size];
error_ = _wrapper->clGetDeviceInfo(devices_[_deviceId], CL_DEVICE_VERSION,
param_size, strVersion, 0);
CHECK_RESULT(error_ != CL_SUCCESS, "clGetDeviceInfo failed");
if (strVersion[7] < '2') {
failed_ = true;
return;
}
delete strVersion;
char dbuffer[1024] = {0};
program_ = _wrapper->clCreateProgramWithSource(context_, 1, &strKernel[test],
NULL, &error_);
CHECK_RESULT((error_ != CL_SUCCESS), "clCreateProgramWithSource() failed");
error_ = _wrapper->clBuildProgram(program_, 1, &devices_[deviceId],
"-cl-std=CL2.0", NULL, NULL);
if (error_ != CL_SUCCESS) {
char programLog[1024];
_wrapper->clGetProgramBuildInfo(program_, devices_[deviceId],
CL_PROGRAM_BUILD_LOG, 1024, programLog, 0);
printf("\n%s\n", programLog);
fflush(stdout);
}
CHECK_RESULT((error_ != CL_SUCCESS), "clBuildProgram() failed");
kernel_ = _wrapper->clCreateKernel(program_, "atomic_test1", &error_);
CHECK_RESULT((error_ != CL_SUCCESS), "clCreateKernel() failed");
kernel2_ = _wrapper->clCreateKernel(program_, "atomic_test2", &error_);
CHECK_RESULT((error_ != CL_SUCCESS), "clCreateKernel() failed");
cl_mem buffer;
memset(hostArray, 0, sizeof(hostArray));
buffer = _wrapper->clCreateBuffer(context_, CL_MEM_COPY_HOST_PTR,
sizeof(hostArray), &hostArray, &error_);
CHECK_RESULT((error_ != CL_SUCCESS), "clCreateBuffer() failed");
buffers_.push_back(buffer);
#if defined(CL_VERSION_2_0)
const cl_queue_properties cprops[] = {CL_QUEUE_PROPERTIES,
static_cast<cl_queue_properties>(0), 0};
hostQueue_ = _wrapper->clCreateCommandQueueWithProperties(
context_, devices_[deviceId], cprops, &error_);
CHECK_RESULT((error_ != CL_SUCCESS),
"clCreateCommandQueueWithProperties() failed");
#endif
}
static void CL_CALLBACK notify_callback(const char* errinfo,
const void* private_info, size_t cb,
void* user_data) {}
void OCLDeviceAtomic::run(void) {
if (failed_) return;
cl_mem buffer = buffers()[0];
size_t gws[1] = {TotalElements};
size_t gws2[1] = {1};
size_t gws3[1] = {TotalElements};
error_ = _wrapper->clSetKernelArg(kernel_, 0, sizeof(cl_mem), &buffer);
CHECK_RESULT((error_ != CL_SUCCESS), "clSetKernelArg() failed");
if (testID_ == 0) {
error_ = _wrapper->clEnqueueNDRangeKernel(cmdQueues_[_deviceId], kernel_, 1,
NULL, gws, NULL, 0, NULL, NULL);
CHECK_RESULT((error_ != CL_SUCCESS), "clEnqueueNDRangeKernel() failed");
} else {
error_ = _wrapper->clEnqueueNDRangeKernel(cmdQueues_[_deviceId], kernel_, 1,
NULL, gws2, NULL, 0, NULL, NULL);
CHECK_RESULT((error_ != CL_SUCCESS), "clEnqueueNDRangeKernel() failed");
}
error_ = _wrapper->clSetKernelArg(kernel2_, 0, sizeof(cl_mem), &buffer);
CHECK_RESULT((error_ != CL_SUCCESS), "clSetKernelArg() failed");
if (testID_ == 0) {
error_ = _wrapper->clEnqueueNDRangeKernel(hostQueue_, kernel2_, 1, NULL,
gws, NULL, 0, NULL, NULL);
} else {
error_ = _wrapper->clEnqueueNDRangeKernel(hostQueue_, kernel2_, 1, NULL,
gws3, NULL, 0, NULL, NULL);
}
CHECK_RESULT((error_ != CL_SUCCESS), "clEnqueueNDRangeKernel() failed");
_wrapper->clFlush(cmdQueues_[_deviceId]);
_wrapper->clFlush(hostQueue_);
_wrapper->clFinish(cmdQueues_[_deviceId]);
_wrapper->clFinish(hostQueue_);
error_ = _wrapper->clEnqueueReadBuffer(hostQueue_, buffer, CL_TRUE, 0,
sizeof(hostArray), hostArray, 0, NULL,
NULL);
CHECK_RESULT((error_ != CL_SUCCESS), "clEnqueueReadBuffer() failed");
if (testID_ == 0) {
if (hostArray[0] != 2 * TotalElements) {
printf("Counter: %d, expected: %d\n", hostArray[0], 2 * TotalElements);
CHECK_RESULT(true, "Incorrect result for device atomic inc!\n");
}
} else {
printf("Value: %d, thread: %d, iter: %d\n", hostArray[0], hostArray[1],
hostArray[2]);
if (hostArray[0] == 0) {
CHECK_RESULT(true, "Incorrect result for device atomic inc!\n");
}
}
}
unsigned int OCLDeviceAtomic::close(void) {
if (NULL != hostQueue_) {
_wrapper->clReleaseCommandQueue(hostQueue_);
}
if (NULL != kernel2_) {
_wrapper->clReleaseKernel(kernel2_);
}
return OCLTestImp::close();
}
|
bluedawnstar/algorithm_library
|
library/integer/grayCode.h
|
<filename>library/integer/grayCode.h
#pragma once
// https://e-maxx-eng.appspot.com/algebra/gray-code.html
// n = 0, 1, 2, ...
inline int nthGrayCode(int n) {
return n ^ (n >> 1);
}
// n = inverse(nthGrayCode(n))
inline int inverseGrayCode(int g) {
int n = 0;
for (; g; g >>= 1)
n ^= g;
return n;
}
|
siloutil/openmpi
|
opal/mca/pmix/pmix2x/pmix/src/mca/pnet/opa/pnet_opa.h
|
/*
* Copyright (c) 2015-2016 Intel, Inc. All rights reserved.
*
* $COPYRIGHT$
*
* Additional copyrights may follow
*
* $HEADER$
*/
#ifndef PMIX_PNET_OPA_H
#define PMIX_PNET_OPA_H
#include <src/include/pmix_config.h>
#include "src/mca/pnet/pnet.h"
BEGIN_C_DECLS
/* the component must be visible data for the linker to find it */
PMIX_EXPORT extern pmix_pnet_base_component_t mca_pnet_opa_component;
extern pmix_pnet_module_t pmix_opa_module;
END_C_DECLS
#endif
|
oshai/vertx-examples
|
core-examples/src/main/ruby/io/vertx/example/core/execblocking/exec_blocking_example.rb
|
$vertx.create_http_server().request_handler() { |request|
# Let's say we have to call a blocking API (e.g. JDBC) to execute a query for each
# request. We can't do this directly or it will block the event loop
# But you can do this using executeBlocking:
$vertx.execute_blocking(lambda { |future|
# Do the blocking operation in here
# Imagine this was a call to a blocking API to get the result
begin
Java::JavaLang::Thread.sleep(500)
rescue
end
result = "armadillos!"
future.complete(result)
}) { |res_err,res|
if (res_err == nil)
request.response().put_header("content-type", "text/plain").end(res)
else
res_err.print_stack_trace()
end
}
}.listen(8080)
|
AlhonGelios/AO
|
org/apache/poi/poifs/filesystem/DirectoryEntry.java
|
<filename>org/apache/poi/poifs/filesystem/DirectoryEntry.java
package org.apache.poi.poifs.filesystem;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Iterator;
import java.util.Set;
import org.apache.poi.hpsf.ClassID;
import org.apache.poi.poifs.filesystem.DocumentEntry;
import org.apache.poi.poifs.filesystem.Entry;
import org.apache.poi.poifs.filesystem.POIFSWriterListener;
public interface DirectoryEntry extends Entry, Iterable {
Iterator getEntries();
Set getEntryNames();
boolean isEmpty();
int getEntryCount();
boolean hasEntry(String var1);
Entry getEntry(String var1) throws FileNotFoundException;
DocumentEntry createDocument(String var1, InputStream var2) throws IOException;
DocumentEntry createDocument(String var1, int var2, POIFSWriterListener var3) throws IOException;
DirectoryEntry createDirectory(String var1) throws IOException;
ClassID getStorageClsid();
void setStorageClsid(ClassID var1);
}
|
elvinn/leetcode-cn
|
2021/6-zigzag-conversion.js
|
<reponame>elvinn/leetcode-cn
// https://leetcode-cn.com/problems/zigzag-conversion/
/**
* @param {string} s
* @param {number} numRows
* @return {string}
*/
const convert = function (s, numRows) {
const rowTotal = Math.min(s.length, numRows)
if (rowTotal === 1) {
return s
}
// 存放每一行的字符串
const stringRows = new Array(rowTotal).fill('')
// 每组容纳的字符个数
const interval = 2 * numRows - 2
// true 表示向下移动,false 表示向上移动
let isDown = true
// 当前行下标
let rowIndex = 0
for (let i = 0; i < s.length; i++) {
stringRows[rowIndex] += s[i]
// 判断是否反向
if (i % interval === rowTotal - 1) {
isDown = false
} else if (i % interval === 0) {
isDown = true
}
rowIndex += isDown ? 1 : -1
}
return stringRows.join('')
}
|
wchen1990/Mekanism
|
src/main/java/mekanism/common/world/ResizableOreFeatureConfig.java
|
<gh_stars>1-10
package mekanism.common.world;
import com.mojang.serialization.Codec;
import com.mojang.serialization.codecs.RecordCodecBuilder;
import java.util.function.IntSupplier;
import mekanism.common.config.MekanismConfig;
import mekanism.common.registries.MekanismBlocks;
import mekanism.common.resource.OreType;
import net.minecraft.block.BlockState;
import net.minecraft.world.gen.feature.IFeatureConfig;
import net.minecraft.world.gen.feature.template.RuleTest;
public class ResizableOreFeatureConfig implements IFeatureConfig {
public static final Codec<ResizableOreFeatureConfig> CODEC = RecordCodecBuilder.create(builder -> builder.group(
RuleTest.field_237127_c_.fieldOf("target").forGetter(config -> config.target),
OreType.CODEC.fieldOf("oreType").forGetter(config -> config.oreType)
).apply(builder, ResizableOreFeatureConfig::create));
private static ResizableOreFeatureConfig create(RuleTest target, OreType oreType) {
return new ResizableOreFeatureConfig(target, oreType, MekanismConfig.world.ores.get(oreType).maxVeinSize);
}
public final BlockState state;
public final RuleTest target;
public final OreType oreType;
public final IntSupplier size;
public ResizableOreFeatureConfig(RuleTest target, OreType oreType, IntSupplier size) {
this.target = target;
this.oreType = oreType;
this.size = size;
this.state = MekanismBlocks.ORES.get(oreType).getBlock().getDefaultState();
}
}
|
mgaray99/final_team02
|
src/api/model/IKeyPressFunctions.java
|
<gh_stars>0
package api.model;
/**
* A class that remembers which key functions are activated at any given time.
* It stores four booleans, which indicate whether the game is paused, whether
* the player is moving left, whether the player is moving right, and if the player
* is jumping. This class contains getters and setters for those booleans.
*
* This is used by Level to determine how entities should be moved.
*
* @author <NAME>
*/
public interface IKeyPressFunctions {
/**
* Getter for isPaused
* @return isPaused (if it returns true, the model should "short-circuit" the methods in its step
* method, skipping over them and pausing the game)
*/
boolean isPaused();
/**
* Getter for isPlayerMovingLeft
* @return movePlayerLeft (if it returns true, the model should attempt to move the player left)
*/
boolean isPlayerMovingLeft();
/**
* Getter for isPlayerMovingRight
* @return movePlayerRight (if it returns true, the model should attempt to move the player right)
*/
boolean isPlayerMovingRight();
/**
* Getter for isPlayerJumping
* @return jumpPlayer (if it returns true, the model should attempt to make the player jump)
*/
boolean isPlayerJumping();
/**
* Method that sets isPaused to true
*/
void pauseGame();
/**
* Method that sets isPaused to false
*/
void resumeGame();
/**
* Method that sets movePlayerLeft to true
*/
void startMovingPlayerLeft();
/**
* Method that sets movePlayerLeft to false
*/
void stopMovingPlayerLeft();
/**
* Method that sets movePlayerRight to true
*/
void startMovingPlayerRight();
/**
* Method that sets movePlayerRight to false
*/
void stopMovingPlayerRight();
/**
* Method that sets jumpPlayer to true
*/
void startPlayerJumping();
/**
* Method that sets jumpPlayer to false
*/
void stopPlayerJumping();
}
|
m4ta1l/airbrake-ruby
|
lib/airbrake-ruby/thread_pool.rb
|
module Airbrake
# ThreadPool implements a simple thread pool that can configure the number of
# worker threads and the size of the queue to process.
#
# @example
# # Initialize a new thread pool with 5 workers and a queue size of 100. Set
# # the block to be run concurrently.
# thread_pool = ThreadPool.new(
# worker_size: 5,
# queue_size: 100,
# block: proc { |message| print "ECHO: #{message}..."}
# )
#
# # Send work.
# 10.times { |i| thread_pool << i }
# #=> ECHO: 0...ECHO: 1...ECHO: 2...
#
# @api private
# @since v4.6.1
class ThreadPool
include Loggable
# @return [ThreadGroup] the list of workers
# @note This is exposed for eaiser unit testing
attr_reader :workers
def initialize(worker_size:, queue_size:, block:)
@worker_size = worker_size
@queue_size = queue_size
@block = block
@queue = SizedQueue.new(queue_size)
@workers = ThreadGroup.new
@mutex = Mutex.new
@pid = nil
@closed = false
has_workers?
end
# Adds a new message to the thread pool. Rejects messages if the queue is at
# its capacity.
#
# @param [Object] message The message that gets passed to the block
# @return [Boolean] true if the message was successfully sent to the pool,
# false if the queue is full
def <<(message)
if backlog >= @queue_size
logger.error(
"#{LOG_LABEL} ThreadPool has reached its capacity of " \
"#{@queue_size} and the following message will not be " \
"processed: #{message.inspect}",
)
return false
end
@queue << message
true
end
# @return [Integer] how big the queue is at the moment
def backlog
@queue.size
end
# Checks if a thread pool has any workers. A thread pool doesn't have any
# workers only in two cases: when it was closed or when all workers
# crashed. An *active* thread pool doesn't have any workers only when
# something went wrong.
#
# Workers are expected to crash when you +fork+ the process the workers are
# living in. In this case we detect a +fork+ and try to revive them here.
#
# Another possible scenario that crashes workers is when you close the
# instance on +at_exit+, but some other +at_exit+ hook prevents the process
# from exiting.
#
# @return [Boolean] true if an instance wasn't closed, but has no workers
# @see https://goo.gl/oydz8h Example of at_exit that prevents exit
def has_workers?
@mutex.synchronize do
return false if @closed
if @pid != Process.pid && @workers.list.empty?
@pid = Process.pid
@workers = ThreadGroup.new
spawn_workers
end
!@closed && @workers.list.any?
end
end
# Closes the thread pool making it a no-op (it shut downs all worker
# threads). Before closing, waits on all unprocessed tasks to be processed.
#
# @return [void]
# @raise [Airbrake::Error] when invoked more than one time
def close
threads = @mutex.synchronize do
raise Airbrake::Error, 'this thread pool is closed already' if @closed
unless @queue.empty?
msg = "#{LOG_LABEL} waiting to process #{@queue.size} task(s)..."
logger.debug(msg + ' (Ctrl-C to abort)')
end
@worker_size.times { @queue << :stop }
@closed = true
@workers.list.dup
end
threads.each(&:join)
logger.debug("#{LOG_LABEL} thread pool closed")
end
def closed?
@closed
end
def spawn_workers
@worker_size.times { @workers.add(spawn_worker) }
@workers.enclose
end
private
def spawn_worker
Thread.new do
while (message = @queue.pop)
break if message == :stop
@block.call(message)
end
end
end
end
end
|
PD75/practical-startpage
|
appdev/app/badges/gmail/gmail.js
|
<reponame>PD75/practical-startpage
/*global X2JS */
(function() {
"use strict";
angular.module('ps.badges')
.controller("gmailBadgeCtrl", gmailBadgeCtrl)
.directive('psbGmail', gmailDirective);
function gmailBadgeCtrl($http, $interval, $scope, $timeout, $sce, i18n, permissionService) {
var vm = this;
vm.locale = locale;
vm.authorizePermissions = authorizePermissions;
vm.formatDate = formatDate;
vm.popup = {
popup: 'psb-gmail .ui.popup',
hoverable: true,
variation: 'basic',
};
vm.connection = true;
activate();
function activate() {
permissionService
.checkPermissions(['https://mail.google.com/'])
.then(function(result) {
vm.permission = result;
if (result) {
vm.unRead = '0';
getMail();
var stop = $interval(getMail, 300000); //Check every 5 min
$scope.$on('$destroy', function() {
$interval.cancel(stop);
});
} else {
vm.unRead = '-1';
}
});
}
function getMail() {
var conf = {
method: 'GET',
url: 'https://mail.google.com/mail/feed/atom',
timeout: 5000,
transformResponse: function(data) {
var x2js = new X2JS();
return x2js.xml_str2json(data);
},
};
$http(conf)
.then(function(result) {
vm.connection = true;
vm.unRead = result.data.feed.fullcount;
vm.mail = result.data.feed;
if (vm.unRead === "1") {
vm.mail.entry = [result.data.feed.entry];
}
$timeout(function() {
$scope.$apply();
});
}, function() {
vm.connection = false;
vm.unRead = '-1';
});
}
function authorizePermissions() {
permissionService
.requestPermissions(['https://mail.google.com/'])
.then(function() {
activate();
});
}
function formatDate(date) {
var newDate = new Date(date);
return newDate.toLocaleString();
}
function locale(text, placeholders) {
return $sce.trustAsHtml(i18n.get(text, placeholders));
}
}
function gmailDirective() {
return {
restrict: 'E',
templateUrl: 'app/badges/gmail/gmail.html',
controller: 'gmailBadgeCtrl',
controllerAs: 'vm',
scope: {},
bindToController: true,
};
}
})();
|
tianyapiaozi/tensorflow
|
tensorflow/python/kernel_tests/distributions/uniform_test.py
|
<reponame>tianyapiaozi/tensorflow
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Uniform distribution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import importlib
import numpy as np
from tensorflow.python.eager import backprop
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import errors
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.distributions import uniform as uniform_lib
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
def try_import(name): # pylint: disable=invalid-name
module = None
try:
module = importlib.import_module(name)
except ImportError as e:
tf_logging.warning("Could not import %s: %s" % (name, str(e)))
return module
stats = try_import("scipy.stats")
class UniformTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testUniformRange(self):
with self.test_session():
a = 3.0
b = 10.0
uniform = uniform_lib.Uniform(low=a, high=b)
self.assertAllClose(a, self.evaluate(uniform.low))
self.assertAllClose(b, self.evaluate(uniform.high))
self.assertAllClose(b - a, self.evaluate(uniform.range()))
@test_util.run_in_graph_and_eager_modes
def testUniformPDF(self):
with self.test_session():
a = constant_op.constant([-3.0] * 5 + [15.0])
b = constant_op.constant([11.0] * 5 + [20.0])
uniform = uniform_lib.Uniform(low=a, high=b)
a_v = -3.0
b_v = 11.0
x = np.array([-10.5, 4.0, 0.0, 10.99, 11.3, 17.0], dtype=np.float32)
def _expected_pdf():
pdf = np.zeros_like(x) + 1.0 / (b_v - a_v)
pdf[x > b_v] = 0.0
pdf[x < a_v] = 0.0
pdf[5] = 1.0 / (20.0 - 15.0)
return pdf
expected_pdf = _expected_pdf()
pdf = uniform.prob(x)
self.assertAllClose(expected_pdf, self.evaluate(pdf))
log_pdf = uniform.log_prob(x)
self.assertAllClose(np.log(expected_pdf), self.evaluate(log_pdf))
@test_util.run_in_graph_and_eager_modes
def testUniformShape(self):
with self.test_session():
a = constant_op.constant([-3.0] * 5)
b = constant_op.constant(11.0)
uniform = uniform_lib.Uniform(low=a, high=b)
self.assertEqual(self.evaluate(uniform.batch_shape_tensor()), (5,))
self.assertEqual(uniform.batch_shape, tensor_shape.TensorShape([5]))
self.assertAllEqual(self.evaluate(uniform.event_shape_tensor()), [])
self.assertEqual(uniform.event_shape, tensor_shape.TensorShape([]))
@test_util.run_in_graph_and_eager_modes
def testUniformPDFWithScalarEndpoint(self):
with self.test_session():
a = constant_op.constant([0.0, 5.0])
b = constant_op.constant(10.0)
uniform = uniform_lib.Uniform(low=a, high=b)
x = np.array([0.0, 8.0], dtype=np.float32)
expected_pdf = np.array([1.0 / (10.0 - 0.0), 1.0 / (10.0 - 5.0)])
pdf = uniform.prob(x)
self.assertAllClose(expected_pdf, self.evaluate(pdf))
@test_util.run_in_graph_and_eager_modes
def testUniformCDF(self):
with self.test_session():
batch_size = 6
a = constant_op.constant([1.0] * batch_size)
b = constant_op.constant([11.0] * batch_size)
a_v = 1.0
b_v = 11.0
x = np.array([-2.5, 2.5, 4.0, 0.0, 10.99, 12.0], dtype=np.float32)
uniform = uniform_lib.Uniform(low=a, high=b)
def _expected_cdf():
cdf = (x - a_v) / (b_v - a_v)
cdf[x >= b_v] = 1
cdf[x < a_v] = 0
return cdf
cdf = uniform.cdf(x)
self.assertAllClose(_expected_cdf(), self.evaluate(cdf))
log_cdf = uniform.log_cdf(x)
self.assertAllClose(np.log(_expected_cdf()), self.evaluate(log_cdf))
@test_util.run_in_graph_and_eager_modes
def testUniformEntropy(self):
with self.test_session():
a_v = np.array([1.0, 1.0, 1.0])
b_v = np.array([[1.5, 2.0, 3.0]])
uniform = uniform_lib.Uniform(low=a_v, high=b_v)
expected_entropy = np.log(b_v - a_v)
self.assertAllClose(expected_entropy, self.evaluate(uniform.entropy()))
@test_util.run_in_graph_and_eager_modes
def testUniformAssertMaxGtMin(self):
with self.test_session():
a_v = np.array([1.0, 1.0, 1.0], dtype=np.float32)
b_v = np.array([1.0, 2.0, 3.0], dtype=np.float32)
with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
"x < y"):
uniform = uniform_lib.Uniform(low=a_v, high=b_v, validate_args=True)
self.evaluate(uniform.low)
@test_util.run_in_graph_and_eager_modes
def testUniformSample(self):
with self.test_session():
a = constant_op.constant([3.0, 4.0])
b = constant_op.constant(13.0)
a1_v = 3.0
a2_v = 4.0
b_v = 13.0
n = constant_op.constant(100000)
uniform = uniform_lib.Uniform(low=a, high=b)
samples = uniform.sample(n, seed=137)
sample_values = self.evaluate(samples)
self.assertEqual(sample_values.shape, (100000, 2))
self.assertAllClose(
sample_values[::, 0].mean(), (b_v + a1_v) / 2, atol=1e-1, rtol=0.)
self.assertAllClose(
sample_values[::, 1].mean(), (b_v + a2_v) / 2, atol=1e-1, rtol=0.)
self.assertFalse(
np.any(sample_values[::, 0] < a1_v) or np.any(sample_values >= b_v))
self.assertFalse(
np.any(sample_values[::, 1] < a2_v) or np.any(sample_values >= b_v))
@test_util.run_in_graph_and_eager_modes
def _testUniformSampleMultiDimensional(self):
# DISABLED: Please enable this test once b/issues/30149644 is resolved.
with self.test_session():
batch_size = 2
a_v = [3.0, 22.0]
b_v = [13.0, 35.0]
a = constant_op.constant([a_v] * batch_size)
b = constant_op.constant([b_v] * batch_size)
uniform = uniform_lib.Uniform(low=a, high=b)
n_v = 100000
n = constant_op.constant(n_v)
samples = uniform.sample(n)
self.assertEqual(samples.get_shape(), (n_v, batch_size, 2))
sample_values = self.evaluate(samples)
self.assertFalse(
np.any(sample_values[:, 0, 0] < a_v[0]) or
np.any(sample_values[:, 0, 0] >= b_v[0]))
self.assertFalse(
np.any(sample_values[:, 0, 1] < a_v[1]) or
np.any(sample_values[:, 0, 1] >= b_v[1]))
self.assertAllClose(
sample_values[:, 0, 0].mean(), (a_v[0] + b_v[0]) / 2, atol=1e-2)
self.assertAllClose(
sample_values[:, 0, 1].mean(), (a_v[1] + b_v[1]) / 2, atol=1e-2)
@test_util.run_in_graph_and_eager_modes
def testUniformMean(self):
with self.test_session():
a = 10.0
b = 100.0
uniform = uniform_lib.Uniform(low=a, high=b)
if not stats:
return
s_uniform = stats.uniform(loc=a, scale=b - a)
self.assertAllClose(self.evaluate(uniform.mean()), s_uniform.mean())
@test_util.run_in_graph_and_eager_modes
def testUniformVariance(self):
with self.test_session():
a = 10.0
b = 100.0
uniform = uniform_lib.Uniform(low=a, high=b)
if not stats:
return
s_uniform = stats.uniform(loc=a, scale=b - a)
self.assertAllClose(self.evaluate(uniform.variance()), s_uniform.var())
@test_util.run_in_graph_and_eager_modes
def testUniformStd(self):
with self.test_session():
a = 10.0
b = 100.0
uniform = uniform_lib.Uniform(low=a, high=b)
if not stats:
return
s_uniform = stats.uniform(loc=a, scale=b - a)
self.assertAllClose(self.evaluate(uniform.stddev()), s_uniform.std())
@test_util.run_in_graph_and_eager_modes
def testUniformNans(self):
with self.test_session():
a = 10.0
b = [11.0, 100.0]
uniform = uniform_lib.Uniform(low=a, high=b)
no_nans = constant_op.constant(1.0)
nans = constant_op.constant(0.0) / constant_op.constant(0.0)
self.assertTrue(self.evaluate(math_ops.is_nan(nans)))
with_nans = array_ops.stack([no_nans, nans])
pdf = uniform.prob(with_nans)
is_nan = self.evaluate(math_ops.is_nan(pdf))
self.assertFalse(is_nan[0])
self.assertTrue(is_nan[1])
@test_util.run_in_graph_and_eager_modes
def testUniformSamplePdf(self):
with self.test_session():
a = 10.0
b = [11.0, 100.0]
uniform = uniform_lib.Uniform(a, b)
self.assertTrue(
self.evaluate(
math_ops.reduce_all(uniform.prob(uniform.sample(10)) > 0)))
@test_util.run_in_graph_and_eager_modes
def testUniformBroadcasting(self):
with self.test_session():
a = 10.0
b = [11.0, 20.0]
uniform = uniform_lib.Uniform(a, b)
pdf = uniform.prob([[10.5, 11.5], [9.0, 19.0], [10.5, 21.0]])
expected_pdf = np.array([[1.0, 0.1], [0.0, 0.1], [1.0, 0.0]])
self.assertAllClose(expected_pdf, self.evaluate(pdf))
@test_util.run_in_graph_and_eager_modes
def testUniformSampleWithShape(self):
with self.test_session():
a = 10.0
b = [11.0, 20.0]
uniform = uniform_lib.Uniform(a, b)
pdf = uniform.prob(uniform.sample((2, 3)))
# pylint: disable=bad-continuation
expected_pdf = [
[[1.0, 0.1], [1.0, 0.1], [1.0, 0.1]],
[[1.0, 0.1], [1.0, 0.1], [1.0, 0.1]],
]
# pylint: enable=bad-continuation
self.assertAllClose(expected_pdf, self.evaluate(pdf))
pdf = uniform.prob(uniform.sample())
expected_pdf = [1.0, 0.1]
self.assertAllClose(expected_pdf, self.evaluate(pdf))
def testFullyReparameterized(self):
a = constant_op.constant(0.1)
b = constant_op.constant(0.8)
with backprop.GradientTape() as tape:
tape.watch(a)
tape.watch(b)
uniform = uniform_lib.Uniform(a, b)
samples = uniform.sample(100)
grad_a, grad_b = tape.gradient(samples, [a, b])
self.assertIsNotNone(grad_a)
self.assertIsNotNone(grad_b)
# Eager doesn't pass due to a type mismatch in one of the ops.
def testUniformFloat64(self):
uniform = uniform_lib.Uniform(
low=np.float64(0.), high=np.float64(1.))
self.assertAllClose(
[1., 1.],
self.evaluate(uniform.prob(np.array([0.5, 0.6], dtype=np.float64))))
self.assertAllClose(
[0.5, 0.6],
self.evaluate(uniform.cdf(np.array([0.5, 0.6], dtype=np.float64))))
self.assertAllClose(0.5, self.evaluate(uniform.mean()))
self.assertAllClose(1 / 12., self.evaluate(uniform.variance()))
self.assertAllClose(0., self.evaluate(uniform.entropy()))
if __name__ == "__main__":
test.main()
|
brainliubo/openairstudy
|
RRC_Rel14/LTE_SystemInformationBlockType17-r12.c
|
/*
* Generated by asn1c-0.9.29 (http://lionet.info/asn1c)
* From ASN.1 module "EUTRA-RRC-Definitions"
* found in "/home/guicliu/ue_folder/openair2/RRC/LTE/MESSAGES/asn1c/ASN1_files/lte-rrc-14.7.0.asn1"
* `asn1c -pdu=all -fcompound-names -gen-PER -no-gen-OER -no-gen-example -D /home/guicliu/ue_folder/cmake_targets/lte_noS1_build_oai/build/CMakeFiles/RRC_Rel14`
*/
#include "LTE_SystemInformationBlockType17-r12.h"
static int
memb_LTE_wlan_OffloadInfoPerPLMN_List_r12_constraint_1(const asn_TYPE_descriptor_t *td, const void *sptr,
asn_app_constraint_failed_f *ctfailcb, void *app_key) {
size_t size;
if(!sptr) {
ASN__CTFAIL(app_key, td, sptr,
"%s: value not given (%s:%d)",
td->name, __FILE__, __LINE__);
return -1;
}
/* Determine the number of elements */
size = _A_CSEQUENCE_FROM_VOID(sptr)->count;
if((size >= 1 && size <= 6)) {
/* Perform validation of the inner elements */
return td->encoding_constraints.general_constraints(td, sptr, ctfailcb, app_key);
} else {
ASN__CTFAIL(app_key, td, sptr,
"%s: constraint failed (%s:%d)",
td->name, __FILE__, __LINE__);
return -1;
}
}
static asn_per_constraints_t asn_PER_type_LTE_wlan_OffloadInfoPerPLMN_List_r12_constr_2 CC_NOTUSED = {
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
{ APC_CONSTRAINED, 3, 3, 1, 6 } /* (SIZE(1..6)) */,
0, 0 /* No PER value map */
};
static asn_per_constraints_t asn_PER_memb_LTE_wlan_OffloadInfoPerPLMN_List_r12_constr_2 CC_NOTUSED = {
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
{ APC_CONSTRAINED, 3, 3, 1, 6 } /* (SIZE(1..6)) */,
0, 0 /* No PER value map */
};
static asn_TYPE_member_t asn_MBR_LTE_wlan_OffloadInfoPerPLMN_List_r12_2[] = {
{ ATF_POINTER, 0, 0,
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2)),
0,
&asn_DEF_LTE_WLAN_OffloadInfoPerPLMN_r12,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
""
},
};
static const ber_tlv_tag_t asn_DEF_LTE_wlan_OffloadInfoPerPLMN_List_r12_tags_2[] = {
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static asn_SET_OF_specifics_t asn_SPC_LTE_wlan_OffloadInfoPerPLMN_List_r12_specs_2 = {
sizeof(struct LTE_SystemInformationBlockType17_r12__wlan_OffloadInfoPerPLMN_List_r12),
offsetof(struct LTE_SystemInformationBlockType17_r12__wlan_OffloadInfoPerPLMN_List_r12, _asn_ctx),
0, /* XER encoding is XMLDelimitedItemList */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_LTE_wlan_OffloadInfoPerPLMN_List_r12_2 = {
"wlan-OffloadInfoPerPLMN-List-r12",
"wlan-OffloadInfoPerPLMN-List-r12",
&asn_OP_SEQUENCE_OF,
asn_DEF_LTE_wlan_OffloadInfoPerPLMN_List_r12_tags_2,
sizeof(asn_DEF_LTE_wlan_OffloadInfoPerPLMN_List_r12_tags_2)
/sizeof(asn_DEF_LTE_wlan_OffloadInfoPerPLMN_List_r12_tags_2[0]) - 1, /* 1 */
asn_DEF_LTE_wlan_OffloadInfoPerPLMN_List_r12_tags_2, /* Same as above */
sizeof(asn_DEF_LTE_wlan_OffloadInfoPerPLMN_List_r12_tags_2)
/sizeof(asn_DEF_LTE_wlan_OffloadInfoPerPLMN_List_r12_tags_2[0]), /* 2 */
{ 0, &asn_PER_type_LTE_wlan_OffloadInfoPerPLMN_List_r12_constr_2, SEQUENCE_OF_constraint },
asn_MBR_LTE_wlan_OffloadInfoPerPLMN_List_r12_2,
1, /* Single element */
&asn_SPC_LTE_wlan_OffloadInfoPerPLMN_List_r12_specs_2 /* Additional specs */
};
asn_TYPE_member_t asn_MBR_LTE_SystemInformationBlockType17_r12_1[] = {
{ ATF_POINTER, 2, offsetof(struct LTE_SystemInformationBlockType17_r12, wlan_OffloadInfoPerPLMN_List_r12),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
0,
&asn_DEF_LTE_wlan_OffloadInfoPerPLMN_List_r12_2,
0,
{ 0, &asn_PER_memb_LTE_wlan_OffloadInfoPerPLMN_List_r12_constr_2, memb_LTE_wlan_OffloadInfoPerPLMN_List_r12_constraint_1 },
0, 0, /* No default value */
"wlan-OffloadInfoPerPLMN-List-r12"
},
{ ATF_POINTER, 1, offsetof(struct LTE_SystemInformationBlockType17_r12, lateNonCriticalExtension),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_OCTET_STRING,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"lateNonCriticalExtension"
},
};
static const int asn_MAP_LTE_SystemInformationBlockType17_r12_oms_1[] = { 0, 1 };
static const ber_tlv_tag_t asn_DEF_LTE_SystemInformationBlockType17_r12_tags_1[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static const asn_TYPE_tag2member_t asn_MAP_LTE_SystemInformationBlockType17_r12_tag2el_1[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* wlan-OffloadInfoPerPLMN-List-r12 */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 } /* lateNonCriticalExtension */
};
asn_SEQUENCE_specifics_t asn_SPC_LTE_SystemInformationBlockType17_r12_specs_1 = {
sizeof(struct LTE_SystemInformationBlockType17_r12),
offsetof(struct LTE_SystemInformationBlockType17_r12, _asn_ctx),
asn_MAP_LTE_SystemInformationBlockType17_r12_tag2el_1,
2, /* Count of tags in the map */
asn_MAP_LTE_SystemInformationBlockType17_r12_oms_1, /* Optional members */
2, 0, /* Root/Additions */
2, /* First extension addition */
};
asn_TYPE_descriptor_t asn_DEF_LTE_SystemInformationBlockType17_r12 = {
"SystemInformationBlockType17-r12",
"SystemInformationBlockType17-r12",
&asn_OP_SEQUENCE,
asn_DEF_LTE_SystemInformationBlockType17_r12_tags_1,
sizeof(asn_DEF_LTE_SystemInformationBlockType17_r12_tags_1)
/sizeof(asn_DEF_LTE_SystemInformationBlockType17_r12_tags_1[0]), /* 1 */
asn_DEF_LTE_SystemInformationBlockType17_r12_tags_1, /* Same as above */
sizeof(asn_DEF_LTE_SystemInformationBlockType17_r12_tags_1)
/sizeof(asn_DEF_LTE_SystemInformationBlockType17_r12_tags_1[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_LTE_SystemInformationBlockType17_r12_1,
2, /* Elements count */
&asn_SPC_LTE_SystemInformationBlockType17_r12_specs_1 /* Additional specs */
};
|
ricardyn/ironpython-stubs
|
stubs.min/Autodesk/Revit/DB/__init___parts/ResourceVersionStatus.py
|
class ResourceVersionStatus(Enum,IComparable,IFormattable,IConvertible):
"""
An enum indicating whether a resource is current or out of date.
enum ResourceVersionStatus,values: Current (0),OutOfDate (1),Unknown (2)
"""
def __eq__(self,*args):
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self,*args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self,*args):
pass
def __gt__(self,*args):
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self,*args):
pass
def __lt__(self,*args):
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
Current=None
OutOfDate=None
Unknown=None
value__=None
|
odp-reactor/ld-r
|
components/dataset/viewer/BasicResourceList.js
|
import React from 'react';
import PropTypes from 'prop-types';
import { NavLink } from 'fluxible-router';
import URIUtil from '../../utils/URIUtil';
import { Header, Table } from 'semantic-ui-react';
import BasicAggregateMapView from '../../object/viewer/aggregate/BasicAggregateMapView';
import classNames from 'classnames/bind';
import ObjectIViewer from '../../object/ObjectIViewer';
const PUBLIC_URL = process.env.PUBLIC_URL ? process.env.PUBLIC_URL : '';
class BasicResourceList extends React.Component {
componentDidMount() {}
buildLink(useA, v, g, title, image, icon, cloneable) {
let self = this;
let cloneDIV = '';
if (cloneable) {
cloneDIV = (
<span
className="mini ui circular basic icon button"
onClick={self.handleCloneResource.bind(
self,
decodeURIComponent(g),
decodeURIComponent(v)
)}
title="clone this resource"
>
<i className="icon teal superscript"></i>
</span>
);
}
//on the map: todo:handle it with React DOM
if (useA) {
let titleHTML = `
<div class="content">
<a href="${PUBLIC_URL}/dataset/${g}/resource/${v}" target="_blank" class="ui"> <i class="${icon}"></i>${title}</a>
</div>
`;
if (this.props.config && this.props.config.resourceImageProperty) {
return `
<div>
<div class="content">
<div class="ui fluid card" style="max-width: 150px; max-height: 235px; min-height: 235px;">
<div class="image">
<a href="${PUBLIC_URL}/dataset/${g}/resource/${v}" target="_blank" class="ui"> <img class="ui small image" src="${
image ? image : `${PUBLIC_URL}/assets/img/image.png`
}" style="max-height: 150px; min-height: 150px;" /></a>
</div>
${titleHTML}
</div>
</div>
</div>
`;
} else {
return titleHTML;
}
}
//in the faceted browser
if (this.props.OpenInNewTab) {
let titleDIV = (
<div className="content">
<a
href={'/dataset/' + g + '/resource/' + v}
target="_blank"
className="ui"
>
{' '}
<i className={icon}></i>
{title}{' '}
</a>
</div>
);
if (this.props.config && this.props.config.resourceImageProperty) {
return (
<div>
<div className="content">
<div
className="ui fluid card"
style={{
maxWidth: 150,
maxHeight: 235,
minHeight: 235
}}
>
<div className="image">
<a
href={
'/dataset/' + g + '/resource/' + v
}
target="_blank"
className="ui"
>
{' '}
<img
className="ui small image"
src={
image
? image
: `${PUBLIC_URL}/assets/img/image.png`
}
style={{
maxHeight: 150,
minHeight: 150
}}
/>
</a>
</div>
{titleDIV}
</div>
</div>
</div>
);
} else {
return <div>{titleDIV}</div>;
}
} else {
let titleDIV = (
<div className="content">
<NavLink
routeName="resource"
className="ui"
href={'/dataset/' + g + '/resource/' + v}
>
{' '}
<i className={icon}></i>
{title}
</NavLink>
{cloneDIV}
</div>
);
if (this.props.config && this.props.config.resourceImageProperty) {
return (
<div>
<div className="content">
<div
className="ui fluid card"
style={{
maxWidth: 150,
maxHeight: 235,
minHeight: 235
}}
>
<div className="image">
<NavLink
routeName="resource"
className="ui"
href={
'/dataset/' + g + '/resource/' + v
}
>
{' '}
<img
className="ui small image"
src={
image
? image
: `${PUBLIC_URL}/assets/img/image.png`
}
style={{
maxHeight: 150,
minHeight: 150
}}
/>
</NavLink>
</div>
{titleDIV}
</div>
</div>
</div>
);
} else {
return <div>{titleDIV}</div>;
}
}
}
handleCloneResource(datasetURI, resourceURI, e) {
this.props.onCloneResource(datasetURI, resourceURI);
e.stopPropagation();
}
checkAnalysisProps() {
let out = 0;
if (this.props.resources.length) {
if (
this.props.resources[0].propsForAnalysis &&
Object.keys(this.props.resources[0].propsForAnalysis).length
) {
out = 1;
return out;
} else {
return 0;
}
} else {
return 0;
}
return out;
}
getAnalysisPropsConfgis(facetConfigs) {
let out = {};
let index,
tmp = [];
if (!facetConfigs || !Object.keys(facetConfigs).length) {
return out;
}
if (this.props.resources.length) {
if (this.props.resources[0].propsForAnalysis) {
for (let prop in this.props.resources[0].propsForAnalysis) {
tmp = prop.split('_');
if (tmp.length > 1) {
index = tmp[1];
//hanlde multiple _
if (tmp.length > 2) {
tmp.shift();
index = tmp.join('_');
}
if (facetConfigs) {
for (let prop2 in facetConfigs) {
if (prop2.indexOf(index) !== -1) {
out[prop] = facetConfigs[prop2];
}
}
}
}
return out;
}
}
}
return out;
}
render() {
//to apply the same config in result list
let analysisPropsConfgis = this.getAnalysisPropsConfgis(
this.props.facetConfigs
);
//console.log(analysisPropsConfgis);
let self = this;
let user = this.context.getUser();
let datasetURI = this.props.datasetURI;
let userAccess,
itemClass,
title,
image,
resourceDIV,
geo,
instances = [],
list,
dbClass = 'black cube icon';
let theaderDIV,
dtableHeaders = [],
dtableCells = [];
let cloneable = 0;
if (
self.props.config &&
typeof self.props.config.allowResourceClone !== 'undefined' &&
parseInt(self.props.config.allowResourceClone)
) {
cloneable = 1;
}
if (!self.props.cloneable) {
cloneable = 0;
}
if (!this.props.resources.length) {
list = (
<div className="ui warning message">
<div className="header">
There was no resource in the selected dataset! This
might be due to the connection problems or because the
estimated query execution time exceeds the configured
limit. Please check the connection parameters of your
dataset's Sparql endpoint or add resources to your
dataset...
</div>
</div>
);
} else {
let sortedProps = [];
if (this.checkAnalysisProps()) {
//first sort the analysis property
for (let prop in this.props.resources[0].propsForAnalysis) {
sortedProps.push(prop);
sortedProps.sort();
}
dtableHeaders = sortedProps.map(item => {
return (
<Table.HeaderCell key={item}>{item}</Table.HeaderCell>
);
});
theaderDIV = (
<Table.Header>
<Table.Row>
<Table.HeaderCell singleLine>
Title
</Table.HeaderCell>
{dtableHeaders}
</Table.Row>
</Table.Header>
);
}
list = this.props.resources.map((node, index) => {
title = node.title
? node.title
: node.label
? node.label
: URIUtil.getURILabel(node.v);
image = node.image ? node.image : '';
geo = node.geo ? node.geo : '';
itemClass = classNames({
ui: true,
'item fadeIn': true,
animated: !cloneable
});
if (!self.props.enableAuthentication) {
dbClass = 'black cube icon';
if (
self.props.config &&
typeof self.props.config.readOnly !== 'undefined' &&
!self.props.config.readOnly
) {
dbClass = 'green cube icon';
}
} else {
userAccess = node.accessLevel;
if (userAccess.access) {
if (userAccess.type === 'full') {
dbClass = 'green cube icon';
} else {
dbClass = 'yellow cube icon';
}
} else {
dbClass = 'black cube icon';
}
}
dtableCells = [];
if (self.checkAnalysisProps()) {
//get the values in order
dtableCells = sortedProps.map(prop => {
return (
<Table.Cell
key={'c' + prop}
title={node.propsForAnalysis[prop]}
>
{Object.keys(analysisPropsConfgis).length &&
analysisPropsConfgis[prop] ? (
<ObjectIViewer
datasetURI={this.props.datasetURI}
property={prop}
spec={{
value: node.propsForAnalysis[prop]
}}
config={analysisPropsConfgis[prop]}
/>
) : (
URIUtil.getURILabel(
node.propsForAnalysis[prop]
)
)}
</Table.Cell>
);
});
resourceDIV = (
<Table.Row key={index}>
<Table.Cell>
{self.buildLink(
0,
encodeURIComponent(node.v),
encodeURIComponent(node.d),
title,
image,
dbClass,
cloneable
)}
</Table.Cell>
{dtableCells}
</Table.Row>
);
} else {
resourceDIV = (
<div className={itemClass} key={index}>
{self.buildLink(
0,
encodeURIComponent(node.v),
encodeURIComponent(node.d),
title,
image,
dbClass,
cloneable
)}
</div>
);
}
if (
self.props.config &&
self.props.config.resourceGeoProperty &&
geo
) {
instances.push({
value: geo,
hint: self.buildLink(
1,
encodeURIComponent(node.v),
encodeURIComponent(node.d),
title,
image,
dbClass,
cloneable
)
});
}
return resourceDIV;
});
}
let listClasses = classNames({
ui: true,
big: this.props.isBig,
animated: !cloneable,
'divided list':
this.props.config && !this.props.config.resourceImageProperty,
cards: this.props.config && this.props.config.resourceImageProperty
});
let finalOutDIV = list;
if (self.checkAnalysisProps()) {
finalOutDIV = (
<Table celled padded striped selectable compact>
{theaderDIV}
<Table.Body>{list}</Table.Body>
</Table>
);
}
return (
<div
className={listClasses}
ref="resourceList"
style={{ overflow: 'auto' }}
>
{this.props.config && this.props.config.resourceGeoProperty ? (
<BasicAggregateMapView
mapWidth={950}
mapHeight={620}
zoomLevel={2}
spec={{ instances: instances }}
config={this.props.config}
/>
) : (
finalOutDIV
)}
</div>
);
}
}
BasicResourceList.contextTypes = {
executeAction: PropTypes.func.isRequired,
getUser: PropTypes.func
};
export default BasicResourceList;
|
Masa-Yasuno/oase
|
oase-root/web_app/views/event/urls.py
|
<filename>oase-root/web_app/views/event/urls.py
# Copyright 2019 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.urls import path, re_path
from . import event, evtimer
app_name = 'event'
urlpatterns = [
path('event/eventsrequest', event.eventsrequest, name='eventsrequest'),
#path('event/bulk_eventsrequest', event.bulk_eventsrequest, name='bulk_eventsrequest'),
re_path(r'^evtimer/(?P<ettype>\w+)/(?P<config_id>\w+)/(?P<query>\w+)/$', evtimer.evtimer, name='evtimer'),
]
|
rohith-hegde/job-application-portfolio
|
coin-tradingbot/src/testv2/TrendlineJobTestChartFeeder.java
|
package testv2;
import java.util.ArrayList;
import java.util.List;
import commonv2.Candle;
import commonv2.MarketID;
import net.hammereditor.designutilities.design.RunnableThrowableCallback;
import recorderv2.ChartFeeder;
public class TrendlineJobTestChartFeeder extends ChartFeeder<Candle>
{
private List<Candle> chartData;
public TrendlineJobTestChartFeeder(RunnableThrowableCallback onError, int dataIntervalS, List<Candle> chartData)
{
super(onError, dataIntervalS);
this.chartData = chartData;
}
/**
* feed the pre-recorded candles one at a time
*/
public void runThread() throws Exception
{
List<Candle> cl = new ArrayList<Candle> ();
while (chartData.size() > 0)
{
for (MarketID mk : super.getConsumingMarketIDs())
{
cl.add(chartData.remove(0));
super.feedChartData(mk, cl);
cl.clear();
}
}
}
public void initialFeed(int dataNum) throws Exception
{
List<Candle> cl = new ArrayList<Candle> ();
for (MarketID mk : super.getConsumingMarketIDs())
{
for (int i = 0; i < dataNum; i++)
cl.add(chartData.remove(0));
super.feedChartData(mk, cl);
cl.clear();
}
}
}
|
solitardj9/micro-iot-service
|
micro-iot-service/src/main/java/com/solitardj9/microiot/application/thing/thingManager/model/exception/ExceptionThingTokenNotAvailable.java
|
<filename>micro-iot-service/src/main/java/com/solitardj9/microiot/application/thing/thingManager/model/exception/ExceptionThingTokenNotAvailable.java<gh_stars>0
package com.solitardj9.microiot.application.thing.thingManager.model.exception;
import org.springframework.http.HttpStatus;
public class ExceptionThingTokenNotAvailable extends Exception{
//
private static final long serialVersionUID = 1330924554379479395L;
private final int errCode;
private final HttpStatus httpStatus;
public ExceptionThingTokenNotAvailable() {
//
super(ExceptionCodeThingManager.Thing_Token_Not_Available.getMessage());
errCode = ExceptionCodeThingManager.Thing_Token_Not_Available.getCode();
httpStatus = ExceptionCodeThingManager.Thing_Token_Not_Available.getHttpStatus();
}
public ExceptionThingTokenNotAvailable(Throwable cause) {
//
super(ExceptionCodeThingManager.Thing_Token_Not_Available.getMessage(), cause);
errCode = ExceptionCodeThingManager.Thing_Token_Not_Available.getCode();
httpStatus = ExceptionCodeThingManager.Thing_Token_Not_Available.getHttpStatus();
}
public int getErrCode() {
//
return errCode;
}
public HttpStatus getHttpStatus() {
//
return httpStatus;
}
}
|
Crupette/advent-2018
|
Brick-Breaker/src/Player.cpp
|
#include "Player.h"
#include <Engine/RendererDefault.h>
#include <Engine/InputManager.h>
Player::Player(){
}
Player::~Player(){
}
void Player::init(){
object.init(glm::vec2(1024 / 2.f, 5), glm::vec2(150, 20), 0.f, "blank");
}
void Player::update(){
if(DecentEngine::InputManager::isKeyDown(SDLK_d)){
object.translateVelocity(glm::vec2(512.f, 0.f));
}
if(DecentEngine::InputManager::isKeyDown(SDLK_a)){
object.translateVelocity(glm::vec2(-512.f, 0.f));
}
object.update(5.f);
glm::vec4 bounds = object.aabb.getBounds();
if(bounds.x + bounds.z > 1024){
object.setPosition(glm::vec2(1024 - bounds.z, 5));
}
if(bounds.x < 0){
object.setPosition(glm::vec2(0, 5));
}
}
void Player::render(){
DecentEngine::RendererDefault::addObject(&object);
}
|
msgbuf/msgbuf
|
de.haumacher.msgbuf/src/main/java/de/haumacher/msgbuf/graph/ScopeMixin.java
|
<reponame>msgbuf/msgbuf
/*
* Copyright (c) 2021 <NAME> et al. All Rights Reserved.
*/
package de.haumacher.msgbuf.graph;
import java.io.IOException;
import java.util.Map;
import de.haumacher.msgbuf.json.JsonReader;
import de.haumacher.msgbuf.json.JsonWriter;
/**
* {@link Scope} implementation that can be mixed in to another class.
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public interface ScopeMixin extends Scope {
@Override
default SharedGraphNode resolveOrFail(int id) {
SharedGraphNode result = index().get(id);
if (result == null) {
throw new IllegalArgumentException("No object with ID '" + id + "'.");
}
return result;
}
/**
* Assigns the given ID to the given node.
*/
default void enter(SharedGraphNode node, int id) {
initId(node, id);
SharedGraphNode clash = index().put(id, node);
assert clash == null : "Clash of ID " + id + ": " + clash + " vs. " + node;
}
@Override
default void readData(SharedGraphNode node, int id, JsonReader in) throws IOException {
enter(node, id);
in.beginObject();
node.readFields(this, in);
in.endObject();
}
@Override
default void writeRefOrData(JsonWriter out, SharedGraphNode node) throws IOException {
int id = id(node);
if (id == 0) {
id = newId();
enter(node, id);
node.writeData(this, out, id);
} else {
out.value(id);
}
}
/**
* Looks up the ID of the given node in this {@link Scope}.
*
* @see #initId(SharedGraphNode, int)
*/
int id(SharedGraphNode node);
/**
* Assigns the given ID to the given node.
*
* @see #id(SharedGraphNode)
*/
void initId(SharedGraphNode node, int id);
/**
* Creates a fresh ID.
*/
int newId();
/**
* The index implementation associating object with IDs.
*/
Map<Object, SharedGraphNode> index();
}
|
mariacaslop/Environment-of-model-design-based-on-fuzzy-automaton
|
FuzzyAutomaton/es.ucm.modelling.fuzzyautomaton/src/fuzzyAutomaton/FuzzyRelation.java
|
/**
*/
package fuzzyAutomaton;
import org.eclipse.emf.ecore.EObject;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Fuzzy Relation</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* </p>
* <ul>
* <li>{@link fuzzyAutomaton.FuzzyRelation#getTFRelation <em>TF Relation</em>}</li>
* <li>{@link fuzzyAutomaton.FuzzyRelation#getExpression1 <em>Expression1</em>}</li>
* <li>{@link fuzzyAutomaton.FuzzyRelation#getExpression2 <em>Expression2</em>}</li>
* <li>{@link fuzzyAutomaton.FuzzyRelation#getExpression3 <em>Expression3</em>}</li>
* <li>{@link fuzzyAutomaton.FuzzyRelation#getDelta <em>Delta</em>}</li>
* </ul>
*
* @see fuzzyAutomaton.FuzzyAutomatonPackage#getFuzzyRelation()
* @model annotation="gmf.node label='tFRelation, expression1, expression2, expression3, delta' label.readOnly='true' label.parser='MessageFormatParser' label.pattern='{1} {0} {2} {0} {3} {4}' size='150,25' tool.name='Fuzzy Relation'"
* @generated
*/
public interface FuzzyRelation extends EObject {
/**
* Returns the value of the '<em><b>TF Relation</b></em>' attribute.
* The literals are from the enumeration {@link fuzzyAutomaton.FuzzyRelationType}.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>TF Relation</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>TF Relation</em>' attribute.
* @see fuzzyAutomaton.FuzzyRelationType
* @see #setTFRelation(FuzzyRelationType)
* @see fuzzyAutomaton.FuzzyAutomatonPackage#getFuzzyRelation_TFRelation()
* @model required="true"
* @generated
*/
FuzzyRelationType getTFRelation();
/**
* Sets the value of the '{@link fuzzyAutomaton.FuzzyRelation#getTFRelation <em>TF Relation</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>TF Relation</em>' attribute.
* @see fuzzyAutomaton.FuzzyRelationType
* @see #getTFRelation()
* @generated
*/
void setTFRelation(FuzzyRelationType value);
/**
* Returns the value of the '<em><b>Expression1</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Expression1</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Expression1</em>' attribute.
* @see #setExpression1(String)
* @see fuzzyAutomaton.FuzzyAutomatonPackage#getFuzzyRelation_Expression1()
* @model required="true"
* @generated
*/
String getExpression1();
/**
* Sets the value of the '{@link fuzzyAutomaton.FuzzyRelation#getExpression1 <em>Expression1</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Expression1</em>' attribute.
* @see #getExpression1()
* @generated
*/
void setExpression1(String value);
/**
* Returns the value of the '<em><b>Expression2</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Expression2</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Expression2</em>' attribute.
* @see #setExpression2(String)
* @see fuzzyAutomaton.FuzzyAutomatonPackage#getFuzzyRelation_Expression2()
* @model required="true"
* @generated
*/
String getExpression2();
/**
* Sets the value of the '{@link fuzzyAutomaton.FuzzyRelation#getExpression2 <em>Expression2</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Expression2</em>' attribute.
* @see #getExpression2()
* @generated
*/
void setExpression2(String value);
/**
* Returns the value of the '<em><b>Expression3</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Expression3</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Expression3</em>' attribute.
* @see #setExpression3(String)
* @see fuzzyAutomaton.FuzzyAutomatonPackage#getFuzzyRelation_Expression3()
* @model
* @generated
*/
String getExpression3();
/**
* Sets the value of the '{@link fuzzyAutomaton.FuzzyRelation#getExpression3 <em>Expression3</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Expression3</em>' attribute.
* @see #getExpression3()
* @generated
*/
void setExpression3(String value);
/**
* Returns the value of the '<em><b>Delta</b></em>' attribute.
* The default value is <code>"0"</code>.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Delta</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Delta</em>' attribute.
* @see #setDelta(Double)
* @see fuzzyAutomaton.FuzzyAutomatonPackage#getFuzzyRelation_Delta()
* @model default="0" required="true"
* @generated
*/
Double getDelta();
/**
* Sets the value of the '{@link fuzzyAutomaton.FuzzyRelation#getDelta <em>Delta</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Delta</em>' attribute.
* @see #getDelta()
* @generated
*/
void setDelta(Double value);
} // FuzzyRelation
|
dishanilahiri/LeetCode-Premium
|
Maximum Points You Can Obtain from Cards.cpp
|
<reponame>dishanilahiri/LeetCode-Premium
Question->
There are several cards arranged in a row, and each card has an associated number of points. The points are given in the integer array cardPoints.
In one step, you can take one card from the beginning or from the end of the row. You have to take exactly k cards.
Your score is the sum of the points of the cards you have taken.
Given the integer array cardPoints and the integer k, return the maximum score you can obtain.
Example 1:
Input: cardPoints = [1,2,3,4,5,6,1], k = 3
Output: 12
Explanation: After the first step, your score will always be 1. However, choosing the rightmost card first will maximize your total score. The optimal strategy is to take the three cards on the right, giving a final score of 1 + 6 + 5 = 12.
Example 2:
Input: cardPoints = [2,2,2], k = 2
Output: 4
Explanation: Regardless of which two cards you take, your score will always be 4.
Example 3:
Input: cardPoints = [9,7,7,9,7,7,9], k = 7
Output: 55
Explanation: You have to take all the cards. Your score is the sum of points of all cards.
Example 4:
Input: cardPoints = [1,1000,1], k = 1
Output: 1
Explanation: You cannot take the card in the middle. Your best score is 1.
Example 5:
Input: cardPoints = [1,79,80,1,1,1,200,1], k = 3
Output: 202
Constraints:
1 <= cardPoints.length <= 105
1 <= cardPoints[i] <= 104
1 <= k <= cardPoints.length
Topic-> DP
Code:
class Solution {
public:
int maxScore(vector<int>& cardPoints, int k) {
int n=cardPoints.size();
vector<int> cs_front(n+1,0);
vector<int> cs_back(n+1,0);
for(int i=0;i<n;i++)
{
cs_front[i+1]=cs_front[i]+cardPoints[i];
}
for(int i=n-1;i>=0;i--)
{
cs_back[i]=cs_back[i+1]+cardPoints[i];
}
reverse(cs_back.begin(),cs_back.end());
int maxx=0;
for(int i=0;i<=k;i++)
{
maxx=max(maxx,cs_front[i]+cs_back[k-i]);
}
return maxx;
}
};
|
alyamaharanipj/adopsiku
|
server/routes/pet/petOfferRouter.js
|
import express from "express";
import {
getPets,
getPetById,
createPet,
updatePet,
getPetsByProviderID,
deletePet,
updatePetStatus,
setReportDuration,
getProviderPets
} from "../../controller/pet/petController.js";
const router = express.Router();
router.get("/", getPets);
router.get("/:id", getPetById);
router.post("/:type", createPet);
router.put("/:id", updatePet);
router.get("/prov/:id", getPetsByProviderID);
router.delete("/:provid/:id", deletePet);
router.put("/status/:id/", updatePetStatus);
router.get("/providerProfile/:id", getProviderPets);
router.put("/reportduration/:id/", setReportDuration);
export default router;
|
stonebinox/dustmail
|
node_modules/@mapbox/gl-matrix/rollup.config.js
|
import cleanup from 'rollup-plugin-cleanup';
export default {
entry: 'src/mapbox-build.js',
dest: 'dist/gl-matrix.js',
format: 'umd',
moduleName: 'glMatrix',
plugins: [cleanup()]
};
|
TecArt/servicecatalog-development
|
oscm-common/javasrc/org/oscm/types/enumtypes/TimerType.java
|
/*******************************************************************************
*
* Copyright FUJITSU LIMITED 2017
*
* Creation Date: 16.06.2009
*
*******************************************************************************/
package org.oscm.types.enumtypes;
import org.oscm.internal.types.enumtypes.ConfigurationKey;
/**
* Represents the supported timer types.
*
* @author <NAME>
*
*/
public enum TimerType {
/**
* Indicates a timer that is used to control checks to remove organization
* accounts that have not been confirmed by a login of the initial
* administrator within a certain period of time.
*/
ORGANIZATION_UNCONFIRMED(ConfigurationKey.TIMER_INTERVAL_ORGANIZATION,
ConfigurationKey.TIMER_INTERVAL_ORGANIZATION_OFFSET),
/**
* Indicates a timer that is used to ensure that subscriptions can only be
* used for the time specified in the product related parameters. If this
* period is exceeded, the timer related operations must ensure that the
* subscription cannot be used anymore, unless the price model is changed.
*/
RESTRICTED_SUBSCRIPTION_USAGE_PERIOD(
ConfigurationKey.TIMER_INTERVAL_SUBSCRIPTION_EXPIRATION,
ConfigurationKey.TIMER_INTERVAL_SUBSCRIPTION_EXPIRATION_OFFSET),
/**
* Indicates a timer that check pending subscriptions for a reached timeout
* time. If this time is reached, an e-mail is sent to the technical
* products organization administrators informing about the timeout.
*/
TENANT_PROVISIONING_TIMEOUT(
ConfigurationKey.TIMER_INTERVAL_TENANT_PROVISIONING_TIMEOUT,
ConfigurationKey.TIMER_INTERVAL_TENANT_PROVISIONING_TIMEOUT_OFFSET),
/**
* Indicates a timer that runs the billing service every day and also causes
* the debit operations to be run against the PSP interface. The timer does
* not have a cyclic expiration time given in ms but as a period setting.
*/
BILLING_INVOCATION(Period.DAY,
ConfigurationKey.TIMER_INTERVAL_BILLING_OFFSET),
/**
* Indicates a timer that runs the check of discount to end coming every
* day. The timer does not have a cyclic expiration time given in ms but as
* a period setting.
*/
DISCOUNT_END_CHECK(Period.DAY,
ConfigurationKey.TIMER_INTERVAL_DISCOUNT_END_NOTIFICATION_OFFSET),
/**
* Indicates a timer that will scan for inactive on-behalf users regularly
* and remove them, if they are inactive for a longer time than specified in
* the configuration setting
* {@link ConfigurationKey#PERMITTED_PERIOD_INACTIVE_ON_BEHALF_USERS}.
*/
INACTIVE_ON_BEHALF_USERS_REMOVAL(
ConfigurationKey.TIMER_INTERVAL_INACTIVE_ON_BEHALF_USERS,
ConfigurationKey.TIMER_INTERVAL_INACTIVE_ON_BEHALF_USERS_OFFSET),
/**
* Indicates a timer that runs the check if the amount of users currently
* registered has exceeded the amount of users allowed for the platform.No
* offset is necessary for this timer.
*
*/
USER_NUM_CHECK(ConfigurationKey.TIMER_INTERVAL_USER_COUNT, null);
private ConfigurationKey keyForIntervalTime;
private ConfigurationKey keyForIntervalOffset;
private Period periodSetting;
private boolean isBasedOnFixPeriod;
/**
* Creates a cyclic timer recurring every keyIntervalTime ms using an offset
* of keyIntervalOffset ms.
*
* @param keyIntervalTime
* The time between the cyclic expirations of the timer.
* @param keyIntervalOffset
* The offset to be used to configure this timer.
*/
private TimerType(ConfigurationKey keyIntervalTime,
ConfigurationKey keyIntervalOffset) {
this.keyForIntervalTime = keyIntervalTime;
this.keyForIntervalOffset = keyIntervalOffset;
this.isBasedOnFixPeriod = true;
}
private TimerType(Period periodSetting, ConfigurationKey keyIntervalOffset) {
this.periodSetting = periodSetting;
this.keyForIntervalOffset = keyIntervalOffset;
this.isBasedOnFixPeriod = false;
}
public ConfigurationKey getKeyForIntervalTime() {
return keyForIntervalTime;
}
public ConfigurationKey getKeyForIntervalOffset() {
return keyForIntervalOffset;
}
public Period getPeriodSetting() {
return periodSetting;
}
/**
* Indicates whether the timer is based on an absolutely fix base period
* given in milliseconds (what would return <code>true</code>) or if it is
* based on a period like a month that is flexible.
*
* @return <code>true</code> in case the timer is based on a fix amount of
* ms, <code>false</code> otherwise.
*/
public boolean isBasedOnFixPeriod() {
return isBasedOnFixPeriod;
}
}
|
JaakkoTulkki/javascriptmvc-cypress
|
jmvc/generate/test/scaffold.js
|
load('steal/rhino/rhino.js');
load('steal/test/test.js');
steal('steal/test', function(s){
s.test.module("jquery/generate/scaffold")
STEALPRINT = false;
s.test.test("make app and scaffold", function(t){
_args = ['cookbook'];
load('jquery/generate/app');
_args = ['Cookbook.Models.Recipe'];
load('jquery/generate/scaffold');
load('steal/rhino/rhino.js');
var cookbookContent = readFile('cookbook/cookbook.js')
+"\n.then('./models/recipe.js')"
+"\n.then('./controllers/recipe_controller.js')";
new steal.File('cookbook/cookbook.js').save( cookbookContent );
var qunitContent = readFile('cookbook/test/qunit/qunit.js').
replace("cookbook_test", "recipe_test");
new steal.File('cookbook/test/qunit/qunit.js').save( qunitContent );
var funcunitContent = readFile('cookbook/test/funcunit/funcunit.js').
replace("cookbook_test", "recipe_controller_test");
new steal.File('cookbook/test/funcunit/funcunit.js').save( funcunitContent );
t.clear();
print('trying to open ...')
t.open('cookbook/cookbook.html', false)
t.ok(Cookbook.Controllers.Recipe, "Recipe Controller")
t.ok(Cookbook.Models.Recipe, "Recipe Controller")
t.clear();
});
//now see if unit and functional run
// s.test.test("scaffold unit tests", function(t){
//
// load('steal/rhino/rhino.js');
// load('funcunit/loader.js');
// FuncUnit.load('cookbook/qunit.html');
// });
//
// s.test.test("scaffold functional tests", function(t){
// load('steal/rhino/rhino.js');
// load('funcunit/loader.js');
// FuncUnit.load('cookbook/funcunit.html');
//
// });
//
// s.test.test("documentjs", function(t){
// t.clear();
// load('steal/rhino/rhino.js');
// _args = ['cookbook/cookbook.html']
// load("documentjs/documentjs.js");
// DocumentJS('cookbook/cookbook.html');
// });
s.test.test("compress", function(t){
t.clear();
load("cookbook/scripts/build.js")
var cookbookPage = readFile('cookbook/cookbook.html').
replace("steal.js?cookbook,development", "steal.js?cookbook,production");
new steal.File('cookbook/cookbook.html').save( cookbookPage );
t.clear();
t.open('cookbook/cookbook.html', false)
t.ok(Cookbook.Controllers.Recipe, "Recipe Controller")
t.ok(Cookbook.Models.Recipe, "Recipe Controller")
t.clear();
});
//print("-- cleanup --");
// s.File("cookbook").removeDir();
})
|
mpolatcan/zepyhrus
|
crescent/core/mapping.py
|
from .typesafe_dict import TypeSafeDict
class Mapping(TypeSafeDict):
def __init__(self, id: str = None):
super(Mapping, self).__init__()
self.__id = id
self._set_field(self.__id, {})
def __rearrange_mapping(self):
kvs = self.__get_field__(self.KeyValue.__name__)
json = self.__get_field__(self.Json.__name__)
if json or kvs:
for key, value in kvs.items() if kvs else json.items():
self.__get_field__(self.__id).update({key: value})
self._pop_field(self.KeyValue.__name__ if kvs else self.Json.__name__)
def __to_dict__(self, **kwargs):
conversion_success, conversion_result = super().__to_dict__(id=self.__id)
if conversion_success:
self.__rearrange_mapping()
return conversion_success, conversion_result
|
execomrt/newton-dynamics
|
newton-4.00/applications/ndSandbox/ndContactCallback.cpp
|
<filename>newton-4.00/applications/ndSandbox/ndContactCallback.cpp<gh_stars>1000+
/* Copyright (c) <2003-2021> <Newton Game Dynamics>
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any damages
* arising from the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely
*/
#include "ndSandboxStdafx.h"
#include "ndContactCallback.h"
ndContactCallback::ndContactCallback()
:ndContactNotify()
,m_materialMap()
{
m_materialMap.Insert(ndMaterial(), ndMaterailKey(0, 0));
}
void ndContactCallback::OnBodyAdded(ndBodyKinematic* const) const
{
}
void ndContactCallback::OnBodyRemoved(ndBodyKinematic* const) const
{
}
ndMaterial& ndContactCallback::RegisterMaterial(dUnsigned32 id0, dUnsigned32 id1)
{
ndMaterailKey key(id0, id1);
dTree<ndMaterial, ndMaterailKey>::dNode* node = m_materialMap.Find(key);
if (!node)
{
node = m_materialMap.Insert(ndMaterial(), key);
}
return node->GetInfo();
}
//ndMaterial ndContactCallback::GetMaterial(const ndContact* const contactJoint, const ndShapeInstance& instance0, const ndShapeInstance& instance1) const
ndMaterial ndContactCallback::GetMaterial(const ndContact* const, const ndShapeInstance& instance0, const ndShapeInstance& instance1) const
{
ndMaterailKey key(instance0.GetMaterial().m_userId, instance1.GetMaterial().m_userId);
dTree<ndMaterial, ndMaterailKey>::dNode* const node = m_materialMap.Find(key);
return node ? node->GetInfo() : ndMaterial();
}
bool ndContactCallback::OnAabbOverlap(const ndContact* const contactJoint, dFloat32)
{
const ndBodyKinematic* const body0 = contactJoint->GetBody0();
const ndBodyKinematic* const body1 = contactJoint->GetBody1();
const ndShapeInstance& instanceShape0 = body0->GetCollisionShape();
const ndShapeInstance& instanceShape1 = body1->GetCollisionShape();
if ((instanceShape0.GetUserDataID() == m_dedris) && (instanceShape1.GetUserDataID() == m_dedris))
{
return false;
}
return true;
}
void ndContactCallback::PlaySoundTest(const ndContact* const contactJoint)
{
const ndBodyKinematic* const body0 = contactJoint->GetBody0();
const ndBodyKinematic* const body1 = contactJoint->GetBody1();
const ndContactPointList& contactPoints = contactJoint->GetContactPoints();
dFloat32 maxNornalSpeed = dFloat32 (0.0f);
dFloat32 maxTangentSpeed = dFloat32(0.0f);
const ndContactMaterial* normalContact = nullptr;
const ndContactMaterial* tangentContact = nullptr;
for (ndContactPointList::dNode* contactNode = contactPoints.GetFirst(); contactNode; contactNode = contactNode->GetNext())
{
const ndContactMaterial& contactPoint = contactNode->GetInfo();
const dVector pointVeloc0(body0->GetVelocityAtPoint(contactPoint.m_point));
const dVector pointVeloc1(body1->GetVelocityAtPoint(contactPoint.m_point));
const dVector veloc(pointVeloc1 - pointVeloc0);
const dFloat32 verticalSpeed = contactPoint.m_normal.DotProduct(veloc).GetScalar();
const dFloat32 nornalSpeed = dAbs(verticalSpeed);
if (nornalSpeed > maxNornalSpeed)
{
maxNornalSpeed = nornalSpeed;
normalContact = &contactPoint;
}
dVector tangVeloc(veloc - contactPoint.m_normal.Scale(verticalSpeed));
const dFloat32 tangentSpeed = tangVeloc.DotProduct(tangVeloc).GetScalar();
if (tangentSpeed > maxTangentSpeed)
{
maxTangentSpeed = tangentSpeed;
tangentContact = &contactPoint;
}
}
const ndShapeInstance& instance0 = body0->GetCollisionShape();
const ndShapeInstance& instance1 = body1->GetCollisionShape();
const dFloat32 speedThreshold = dMax(instance0.GetMaterial().m_userParam[0].m_floatData, instance1.GetMaterial().m_userParam[0].m_floatData);
if (maxNornalSpeed > speedThreshold)
{
// play impact sound here;
}
maxTangentSpeed = dSqrt(maxTangentSpeed);
if (maxTangentSpeed > speedThreshold)
{
// play scratching sound here;
}
}
//void ndContactCallback::OnContactCallback(dInt32 threadIndex, const ndContact* const contactJoint, dFloat32 timestep)
void ndContactCallback::OnContactCallback(dInt32, const ndContact* const contactJoint, dFloat32)
{
const ndMaterial& material = contactJoint->GetMaterial();
if (material.m_userFlags & m_playSound)
{
PlaySoundTest(contactJoint);
}
}
|
shachindrasingh/apiman
|
gateway/engine/core/src/test/java/io/apiman/gateway/engine/impl/PolicyChainTest.java
|
/*
* Copyright 2014 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.gateway.engine.impl;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.util.ArrayList;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import io.apiman.gateway.engine.async.IAsyncHandler;
import io.apiman.gateway.engine.beans.PolicyFailure;
import io.apiman.gateway.engine.beans.ApiRequest;
import io.apiman.gateway.engine.beans.ApiResponse;
import io.apiman.gateway.engine.beans.exceptions.ConfigurationParseException;
import io.apiman.gateway.engine.io.IApimanBuffer;
import io.apiman.gateway.engine.policy.IPolicy;
import io.apiman.gateway.engine.policy.IPolicyChain;
import io.apiman.gateway.engine.policy.IPolicyContext;
import io.apiman.gateway.engine.policy.PolicyWithConfiguration;
import io.apiman.gateway.engine.policy.RequestChain;
import io.apiman.gateway.engine.policy.ResponseChain;
import io.apiman.gateway.engine.util.PassthroughPolicy;
/**
* Test {@link RequestChain} and {@link ResponseChain} functionality.
*
* @author <NAME> <<EMAIL>>
*/
@SuppressWarnings({ "nls" })
public class PolicyChainTest {
private RequestChain requestChain;
private ResponseChain responseChain;
private ApiRequest mockRequest;
private ApiResponse mockResponse;
private IApimanBuffer mockBuffer;
private IAsyncHandler<IApimanBuffer> mockBodyHandler;
private IAsyncHandler<Void> mockEndHandler;
private PassthroughPolicy policyOne;
private PassthroughPolicy policyTwo;
private PolicyWithConfiguration pwcOne;
private PolicyWithConfiguration pwcTwo;
private IPolicyContext mockContext;
private List<PolicyWithConfiguration> policies;
@Before
public void setup() {
policies = new ArrayList<>();
policyOne = spy(new PassthroughPolicy("1"));
policyTwo = spy(new PassthroughPolicy("2"));
pwcOne = new PolicyWithConfiguration(policyOne, new Object());
pwcTwo = new PolicyWithConfiguration(policyTwo, new Object());
//mockChain = mock(IPolicyChain.class);
mockContext = mock(IPolicyContext.class);
mockRequest = mock(ApiRequest.class);
given(mockRequest.getApiKey()).willReturn("bacon");
given(mockRequest.getDestination()).willReturn("mars");
given(mockRequest.getType()).willReturn("request");
mockResponse = mock(ApiResponse.class);
given(mockRequest.getApiKey()).willReturn("bacon");
given(mockRequest.getDestination()).willReturn("mars");
given(mockRequest.getType()).willReturn("response");
mockBuffer = mock(IApimanBuffer.class);
given(mockBuffer.toString()).willReturn("bananas");
mockBodyHandler = mock(IAsyncHandler.class);
mockEndHandler = mock(IAsyncHandler.class);
}
@Test
public void shouldExecuteRequestChainTwice() {
policies.add(pwcOne);
policies.add(pwcTwo);
requestChain = new RequestChain(policies, mockContext);
requestChain.bodyHandler(mockBodyHandler);
requestChain.endHandler(mockEndHandler);
requestChain.doApply(mockRequest);
requestChain.write(mockBuffer);
requestChain.write(mockBuffer);
requestChain.end();
verify(mockBodyHandler, times(2)).handle(mockBuffer);
verify(mockEndHandler, times(1)).handle((Void) null);
InOrder order = inOrder(policyOne, policyTwo);
order.verify(policyOne).apply(mockRequest, mockContext, pwcOne.getConfiguration(), requestChain);
order.verify(policyTwo).apply(mockRequest, mockContext, pwcTwo.getConfiguration(), requestChain);
}
@Test
public void shouldExecuteResponseChainTwice() {
policies.add(pwcOne);
policies.add(pwcTwo);
responseChain = new ResponseChain(policies, mockContext);
responseChain.bodyHandler(mockBodyHandler);
responseChain.endHandler(mockEndHandler);
responseChain.doApply(mockResponse);
responseChain.write(mockBuffer);
responseChain.write(mockBuffer);
responseChain.end();
verify(mockBodyHandler, times(2)).handle(mockBuffer);
verify(mockEndHandler, times(1)).handle((Void) null);
InOrder order = inOrder(policyTwo, policyOne);
order.verify(policyTwo).apply(mockResponse, mockContext, pwcTwo.getConfiguration(), responseChain);
order.verify(policyOne).apply(mockResponse, mockContext, pwcOne.getConfiguration(), responseChain);
}
@Test
public void shouldExecuteWithoutHandlers() {
policies.add(pwcOne);
requestChain = new RequestChain(policies, mockContext);
requestChain.doApply(mockRequest);
requestChain.end();
}
@Test
public void shouldPreserveBufferOrder() {
policies.add(pwcOne);
requestChain = new RequestChain(policies, mockContext);
requestChain.bodyHandler(mockBodyHandler);
requestChain.endHandler(mockEndHandler);
requestChain.doApply(mockRequest);
IApimanBuffer buffer1 = mock(IApimanBuffer.class);
IApimanBuffer buffer2 = mock(IApimanBuffer.class);
IApimanBuffer buffer3 = mock(IApimanBuffer.class);
requestChain.write(buffer1);
requestChain.write(buffer2);
requestChain.write(buffer3);
requestChain.end();
InOrder order = inOrder(mockBodyHandler, mockEndHandler);
order.verify(mockBodyHandler).handle(buffer1);
order.verify(mockBodyHandler).handle(buffer2);
order.verify(mockBodyHandler).handle(buffer3);
order.verify(mockEndHandler).handle((Void) null);
}
@Test
public void shouldCallFailureHandlerOnDoFail() {
policies.add(pwcOne);
policies.add(pwcTwo);
requestChain = new RequestChain(policies, mockContext);
IAsyncHandler<PolicyFailure> mPolicyFailureHandler = mock(IAsyncHandler.class);
PolicyFailure mPolicyFailure = mock(PolicyFailure.class);
requestChain.policyFailureHandler(mPolicyFailureHandler);
requestChain.bodyHandler(mockBodyHandler);
requestChain.endHandler(mockEndHandler);
requestChain.doApply(mockRequest);
requestChain.doFailure(mPolicyFailure);
verify(mPolicyFailureHandler).handle(mPolicyFailure);
}
@Test
public void shouldCallErrorHandlerOnThrowError() {
policies.add(pwcOne);
policies.add(pwcTwo);
requestChain = new RequestChain(policies, mockContext);
IAsyncHandler<Throwable> mThrowableFailureHandler = mock(IAsyncHandler.class);
Throwable mThrowable = mock(Throwable.class);
requestChain.policyErrorHandler(mThrowableFailureHandler);
requestChain.bodyHandler(mockBodyHandler);
requestChain.endHandler(mockEndHandler);
requestChain.doApply(mockRequest);
requestChain.throwError(mThrowable);
verify(mThrowableFailureHandler).handle(mThrowable);
}
@Test
public void shouldEndChainImmediatelyWhenSkipCalled() {
IPolicy skipPolicy = spy(new IPolicy() {
@Override
public Object parseConfiguration(String jsonConfiguration) throws ConfigurationParseException {
return null;
}
@Override
public void apply(ApiRequest request, IPolicyContext context, Object config,
IPolicyChain<ApiRequest> chain) {
chain.doSkip(request);
}
@Override
public void apply(ApiResponse response, IPolicyContext context, Object config,
IPolicyChain<ApiResponse> chain) {
chain.doSkip(response);
}
});
PolicyWithConfiguration pwcSkip = new PolicyWithConfiguration(skipPolicy, null);
policies.add(pwcSkip);
policies.add(pwcTwo);
requestChain = new RequestChain(policies, mockContext);
requestChain.bodyHandler(mockBodyHandler);
requestChain.endHandler(mockEndHandler);
requestChain.doApply(mockRequest);
requestChain.end();
verify(mockEndHandler, times(1)).handle((Void) null);
// Should only be called once, as the second is skipped
verify(skipPolicy, times(1)).apply(mockRequest, mockContext, null, requestChain);
verify(policyOne, never()).apply(mockRequest, mockContext, null, requestChain);
}
}
|
jmswen/eden
|
eden/fs/utils/ProcessAccessLog.h
|
<reponame>jmswen/eden
/*
* Copyright (c) 2018-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#pragma once
#include <folly/Synchronized.h>
#include <unistd.h>
#include <chrono>
#include "eden/fs/utils/BucketedLog.h"
namespace facebook {
namespace eden {
class ProcessNameCache;
/**
* An inexpensive mechanism for counting accesses by pids. Intended for counting
* FUSE and Thrift calls from external processes.
*
* The first time a thread calls recordAccess, that thread is then coupled to
* this particular ProcessAccessLog, even if it calls recordAccess on another
* ProcessAccessLog instance. Thus, use one ProcessAccessLog per pool of
* threads.
*/
class ProcessAccessLog {
public:
explicit ProcessAccessLog(std::shared_ptr<ProcessNameCache> processNameCache);
~ProcessAccessLog();
/**
* Records an access by a process ID. The first call to recordAccess by a
* particular thread binds that thread to this access log. Future recordAccess
* calls on that thread will accumulate within this access log.
*
* Process IDs passed to recordAccess are also inserted into the
* ProcessNameCache.
*/
void recordAccess(pid_t pid);
/**
* Returns the number of times each pid was passed to recordAccess() in
* `lastNSeconds`.
*
* Note: ProcessAccessLog buckets by whole seconds, so this number should be
* considered an approximation.
*/
std::unordered_map<pid_t, size_t> getAllAccesses(
std::chrono::seconds lastNSeconds);
private:
// Data for one second.
struct Bucket {
void clear();
/**
* Returns whether the added pid is newly observed or not in the `isNew` out
* parameter.
*/
void add(pid_t pid, bool& isNew);
void merge(const Bucket& other);
std::unordered_map<pid_t, size_t> accessCounts;
};
// Keep up to ten seconds of data, but use a power of two so BucketedLog
// generates smaller, faster code.
static constexpr uint64_t kBucketCount = 16;
using Buckets = BucketedLog<Bucket, kBucketCount>;
struct State {
Buckets buckets;
};
const std::shared_ptr<ProcessNameCache> processNameCache_;
folly::Synchronized<State> state_;
friend struct ThreadLocalBucket;
};
} // namespace eden
} // namespace facebook
|
hiowenluke/kdo
|
examples/05-kdo.flow()-or-kdo.dirFn()/08-with-options.return.js
|
<reponame>hiowenluke/kdo
const flow = require('./08-flow');
const expect = 'f11f121f122f123f13f14f15f21f22f23f31f32f33f4f5';
const fn = async () => {
const args = {str: ''};
const options = {return: 'str'};
const result = await flow(args, options);
return result === expect;
};
module.exports = fn;
|
L-Net-1992/Paddle
|
python/paddle/fluid/tests/unittests/test_index_sample_op.py
|
<reponame>L-Net-1992/Paddle
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import paddle
import paddle.fluid as fluid
import numpy as np
from op_test import OpTest
class TestIndexSampleOp(OpTest):
def setUp(self):
self.op_type = "index_sample"
self.python_api = paddle.index_sample
self.config()
xnp = np.random.random(self.x_shape).astype(self.x_type)
indexnp = np.random.randint(low=0,
high=self.x_shape[1],
size=self.index_shape).astype(
self.index_type)
self.inputs = {'X': xnp, 'Index': indexnp}
index_array = []
for i in range(self.index_shape[0]):
for j in indexnp[i]:
index_array.append(xnp[i, j])
index_array = np.array(index_array).astype(self.x_type)
out = np.reshape(index_array, self.index_shape)
self.outputs = {'Out': out}
def test_check_output(self):
self.check_output(check_eager=True)
def test_check_grad(self):
self.check_grad(['X'], 'Out', check_eager=True)
def config(self):
"""
For multi-dimension input
"""
self.x_shape = (10, 20)
self.x_type = "float64"
self.index_shape = (10, 10)
self.index_type = "int32"
class TestCase1(TestIndexSampleOp):
def config(self):
"""
For one dimension input
"""
self.x_shape = (100, 1)
self.x_type = "float64"
self.index_shape = (100, 1)
self.index_type = "int32"
class TestCase2(TestIndexSampleOp):
def config(self):
"""
For int64_t index type
"""
self.x_shape = (10, 100)
self.x_type = "float64"
self.index_shape = (10, 10)
self.index_type = "int64"
class TestCase3(TestIndexSampleOp):
def config(self):
"""
For int index type
"""
self.x_shape = (10, 100)
self.x_type = "float64"
self.index_shape = (10, 10)
self.index_type = "int32"
class TestCase4(TestIndexSampleOp):
def config(self):
"""
For int64 index type
"""
self.x_shape = (10, 128)
self.x_type = "float64"
self.index_shape = (10, 64)
self.index_type = "int64"
class TestIndexSampleShape(unittest.TestCase):
def test_shape(self):
paddle.enable_static()
# create x value
x_shape = (2, 5)
x_type = "float64"
x_np = np.random.random(x_shape).astype(x_type)
# create index value
index_shape = (2, 3)
index_type = "int32"
index_np = np.random.randint(low=0, high=x_shape[1],
size=index_shape).astype(index_type)
x = fluid.data(name='x', shape=[-1, 5], dtype='float64')
index = fluid.data(name='index', shape=[-1, 3], dtype='int32')
output = paddle.index_sample(x=x, index=index)
place = fluid.CPUPlace()
exe = fluid.Executor(place=place)
exe.run(fluid.default_startup_program())
feed = {'x': x_np, 'index': index_np}
res = exe.run(feed=feed, fetch_list=[output])
class TestIndexSampleDynamic(unittest.TestCase):
def test_result(self):
with fluid.dygraph.guard():
x = paddle.to_tensor([[1.0, 2.0, 3.0, 4.0], [5.0, 6.0, 7.0, 8.0],
[9.0, 10.0, 11.0, 12.0]],
dtype='float32')
index = paddle.to_tensor([[0, 1, 2], [1, 2, 3], [0, 0, 0]],
dtype='int32')
out_z1 = paddle.index_sample(x, index)
except_output = np.array([[1.0, 2.0, 3.0], [6.0, 7.0, 8.0],
[9.0, 9.0, 9.0]])
assert out_z1.numpy().all() == except_output.all()
if __name__ == "__main__":
paddle.enable_static()
unittest.main()
|
tony2u/Medusa
|
Medusa/MedusaCore/CoreLib/win/MemoryLeakDetector.h
|
<reponame>tony2u/Medusa<gh_stars>100-1000
#pragma once
#include "CorePlatform/CorePlatformDefines.h"
#ifdef MEDUSA_MEMORY_LEAK_DETECT
#ifdef MEDUSA_WINDOWS
#ifdef MEDUSA_DEBUG
#define _CRTDBG_MAP_ALLOC
#include <stdlib.h>
#include "crtdbg.h"
inline void* debug_malloc(const char* file, int line, size_t size)
{
return malloc(size);
}
inline void debug_free(const char* file, int line, void* p)
{
free(p);
}
inline void* operator new(size_t size, const char *file, int line)
{
return debug_malloc(file, line, size);
}
#define DEBUG_NEW new( _NORMAL_BLOCK, __FILE__, __LINE__)
#define new DEBUG_NEW
namespace std
{
//put in std namespace to avoid std::malloc error
inline void* debug_malloc(const char* file, int line, size_t size)
{
return malloc(size);
}
inline void debug_free(const char* file, int line, void* p)
{
free(p);
}
}
#define malloc(size) debug_malloc(__FILE__, __LINE__, size)
#define free(ptr) debug_free (__FILE__, __LINE__, ptr);
#else
#define DEBUG_NEW
#endif
namespace Medusa
{
namespace MemoryLeakDetector
{
//place at first in main
inline void Start()
{
#ifdef MEDUSA_DEBUG
_CrtSetDbgFlag(_CrtSetDbgFlag(_CRTDBG_REPORT_FLAG) | _CRTDBG_LEAK_CHECK_DF);
#endif
}
//break at how many times to alloc memory
inline void SetBreakPoint(int memoryAllocTimes)
{
#ifdef MEDUSA_DEBUG
_CrtSetBreakAlloc(memoryAllocTimes);
#endif
}
}
}
#endif
#endif
|
lnyu/jfinalshop
|
jfinalshop-4.0-core/src/main/java/com/jfinalshop/Util/AARProtocol.java
|
<reponame>lnyu/jfinalshop<filename>jfinalshop-4.0-core/src/main/java/com/jfinalshop/Util/AARProtocol.java<gh_stars>1-10
package com.jfinalshop.Util;
import java.io.UnsupportedEncodingException;
import java.security.PrivateKey;
import java.security.PublicKey;
/**
* aes and rsa protocol
* @author: polfdark
*
*/
public class AARProtocol {
/**
*
* @param data
* @param publicKey
* @return
*/
public static String encrypt(byte[] data,String publicKey){
PublicKey pub = RsaKit.loadPubKey(publicKey);
byte[] aesKey = AesKit.getRawKey(System.currentTimeMillis(),128);
byte[] encrypt = AesKit.encrypt(data,aesKey);
byte[] aesEncrypt = RsaKit.encrypt(aesKey,pub);
return HexKit.bytesToHexStr(aesEncrypt)+"|"+HexKit.bytesToHexStr(encrypt);
}
/**
*
* @param data
* @param publicKey
* @return
*/
public static String encrypt(String data,String publicKey){
byte[] dataByte = new byte[0];
try
{
dataByte = data.getBytes("utf-8");
}
catch (UnsupportedEncodingException e)
{
e.printStackTrace();
}
PublicKey pub = RsaKit.loadPubKey(publicKey);
byte[] aesKey = AesKit.getRawKey(System.currentTimeMillis(),128);
byte[] encrypt = new byte[0];
encrypt = AesKit.encrypt(dataByte,aesKey);
byte[] aesEncrypt = RsaKit.encrypt(aesKey,pub);
return HexKit.bytesToHexStr(aesEncrypt)+"|"+HexKit.bytesToHexStr(encrypt);
}
public static String decrypt(String request, PrivateKey privateKey){
int idx = request.indexOf("|");
String rsaEncrypt = request.substring(0, idx);
String encryptBody = request.substring(idx + 1);
byte[] aesKey = RsaKit.decrypt(rsaEncrypt,privateKey);
byte[] body = AesKit.decrypt(encryptBody,aesKey);
return new String(body);
}
public static String decrypt(String request, String privateKey){
int idx = request.indexOf("|");
String rsaEncrypt = request.substring(0, idx);
String encryptBody = request.substring(idx + 1);
PrivateKey pri = RsaKit.loadPriKey(privateKey);
byte[] aesKey = RsaKit.decrypt(rsaEncrypt,pri);
byte[] body = AesKit.decrypt(encryptBody,aesKey);
try {
return new String(body,"utf-8");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
return "";
}
}
|
suyash-naithani/apicurio-registry
|
utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/EntityReader.java
|
/*
* Copyright 2021 Red Hat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apicurio.registry.utils.impexp;
import java.io.IOException;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.apicurio.registry.utils.IoUtil;
/**
* @author <EMAIL>
*/
public class EntityReader {
private static final ObjectMapper mapper;
static {
JsonFactory jsonFactory = new JsonFactory();
jsonFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
mapper = new ObjectMapper(jsonFactory);
}
private final transient ZipInputStream zip;
/**
* Constructor.
* @param zip
*/
public EntityReader(ZipInputStream zip) {
this.zip = zip;
}
public Entity readEntity() throws IOException {
ZipEntry entry = zip.getNextEntry();
if (entry != null) {
String path = entry.getName();
EntityType entityType = parseEntityType(path);
if (entityType != null) {
switch (entityType) {
case ArtifactRule:
return readArtifactRule(entry);
case ArtifactVersion:
return readArtifactVersion(entry);
case Content:
return readContent(entry);
case GlobalRule:
return readGlobalRule(entry);
case Group:
return readGroup(entry);
case Manifest:
return readManifest(entry);
}
}
}
return null;
}
private ContentEntity readContent(ZipEntry entry) throws IOException {
if (entry.getName().endsWith(".json")) {
ContentEntity entity = this.readEntry(entry, ContentEntity.class);
ZipEntry dataEntry = zip.getNextEntry();
if (!dataEntry.getName().endsWith(".Content.data")) {
// TODO what to do if this isn't the file we expect??
}
entity.contentBytes = IoUtil.toBytes(zip, false);
zip.read(entity.contentBytes);
return entity;
} else {
throw new IOException("Not yet supported: found .Content.data file before .Content.json");
}
}
private ManifestEntity readManifest(ZipEntry entry) throws IOException {
return readEntry(entry, ManifestEntity.class);
}
private GroupEntity readGroup(ZipEntry entry) throws IOException {
return readEntry(entry, GroupEntity.class);
}
private ArtifactVersionEntity readArtifactVersion(ZipEntry entry) throws IOException {
return this.readEntry(entry, ArtifactVersionEntity.class);
}
private ArtifactRuleEntity readArtifactRule(ZipEntry entry) throws IOException {
return this.readEntry(entry, ArtifactRuleEntity.class);
}
private GlobalRuleEntity readGlobalRule(ZipEntry entry) throws IOException {
return this.readEntry(entry, GlobalRuleEntity.class);
}
private EntityType parseEntityType(String path) {
String[] split = path.split("\\.");
if (split.length > 2) {
String typeStr = split[split.length - 2];
EntityType type = EntityType.valueOf(typeStr);
return type;
}
return null;
}
private <T> T readEntry(ZipEntry entry, Class<T> theClass) throws IOException {
byte [] bytes = IoUtil.toBytes(zip, false);
T entity = mapper.readerFor(theClass).readValue(bytes);
return entity;
}
}
|
alex-w/iers2010
|
src/arg2.cpp
|
#include "iers2010.hpp"
#ifdef USE_EXTERNAL_CONSTS
#include "iersc.hpp"
#endif
/// @details The purpose of the function is to compute the angular astronomical
/// argument, which depends on time, for 11 tidal argument
/// calculations. The order of the 11 angular quantities in vector
/// angle are given below: 01-M2, 02-S2, 03-N2, 04-K2, 05-K1, 06-O1,
/// 07-P1, 08-Q1, 09-Mf, 10-Mm, 11-Ssa (See Reference 1) This function
/// is a translation/wrapper for the fortran ARG2 subroutine, found
/// here : http://maia.usno.navy.mil/conv2010/software.html
///
/// @param[in] iyear Four digit year (Note 1)
/// @param[in] day Day of Year Greenwich Time (Note 2)
/// @param[out] angle Angular argument for Schwiderski computation, in radians
/// (Notes 3, 4 and 5). Array of size > 11.
/// @return An integer value which can be:
/// Returned Value | Status
/// ---------------|-------------------------------------------
/// -1 | Error; Invalid year
/// 0 | All ok
///
/// @note
/// -# This subroutine is valid only after 1973 CE. A validation
/// test has been added to stop the subroutine if an invalid
/// year is used.
/// -# Example: 32.5 for February 1 12 Noon
/// Example: 1.25 for January 1 6 AM
/// -# Ocean loading phases computed from Schwiderski's models
/// refer to the phase of the associated solid Earth tide
/// generating potential at the zero meridian according to <br>
/// OL_DR = OL_AMP ' COS (SE_PHASE" - OL_PHASE) <br>
/// where OL = OCEAN LOADING TIDE,<br>
/// SE = SOLID EARTH TIDE GENERATING POTENTIAL.<br>
/// If the harmonic tide development of Cartwright, et al.
/// (CTE) (1971, 1973) is used, make sure that SE_PHASE"
/// take into account:
/// - the sign of SE_AMP in the tables of Cartwright et al.
/// - that CTE'S SE_PHASE refers to a sine rather than a
/// cosine function if (N+M) = (DEGREE + ORDER) of the tide
/// spherical harmonic is odd.
/// i.e. SE_PHASE" = TAU(T) ' N1 + S(T) ' N2 + H(T) ' N3 <br>
/// + P(T) ' N4 + N'(T) ' N5 + PS(T) ' N6 <br>
/// + PI If CTE'S amplitude coefficient < 0 <br>
/// + PI/2 If (DEGREE + N1) is odd <br>
/// where TAU ... PS = astronomical arguments,<br>
/// N1 ... N6 = CTE'S argument numbers.<br>
/// Most tide generating software compute SE_PHASE" (for use
/// with cosines).
/// -# The double precision change from the original routine ARG.f
/// to ARG2.F yields output differences on the order of 10^-9 radians.
/// -# The input array angle must be able to hold at least 11 doubles.
/// -# Status: Canonical model
///
/// @version 07.10.2011
///
/// @cite <NAME>. and <NAME>. (eds.), IERS Conventions (2010), IERS
/// Technical Note No. 36, BKG (2010)
/// @cite <NAME>., 1983, "Atlas of Ocean Tidal Charts and Maps, Part I:
/// The Semidiurnal Principal Lunar Tide M2," <NAME>, 6,
/// pp. 219-256.
int iers2010::arg2(int iyear, double day, double *angle) noexcept {
// Constants
constexpr int k(11);
constexpr int iymin(1974);
constexpr double dtr(0.174532925199e-1);
#ifdef USE_EXTERNAL_CONSTS
constexpr double TWOPI(iers2010::D2PI);
#else
constexpr double TWOPI(6.283185307179586476925287e0);
#endif
// Speed of all terms given in radians per second
constexpr double speed[] = {
1.405190e-4, 1.454440e-4, 1.378800e-4, 1.458420e-4,
0.729210e-4, 0.675980e-4, 0.725230e-4, 0.649590e-4,
0.053234e-4, 0.026392e-4, 0.003982e-4,
};
// these are not used for now ...
// const double sigm2 = 1.40519e-4;
// const double sigs2 = 1.45444e-4;
// const double sign2 = 1.37880e-4;
// const double sigk2 = 1.45842e-4;
// const double sigk1 = 0.72921e-4;
// const double sigo1 = 0.67598e-4;
// const double sigp1 = 0.72523e-4;
// const double sigq1 = 0.64959e-4;
// const double sigmf = 0.053234e-4;
// const double sigmm = 0.026392e-4;
// const double sigssa = 0.003982e-4;
constexpr double angfac[][4] = {
{0.200e+01, -0.200e+01, 0.000e+00, 0.000e+00},
{0.000e+00, 0.000e+00, 0.000e+00, 0.000e+00},
{0.200e+01, -0.300e+01, 0.100e+01, 0.000e+00},
{0.200e+01, 0.000e+00, 0.000e+00, 0.000e+00},
{0.100e+01, 0.000e+00, 0.000e+00, 0.250e+00},
{0.100e+01, -0.200e+01, 0.000e+00, -0.250e+00},
{-0.100e+01, 0.000e+00, 0.000e+00, -0.250e+00},
{0.100e+01, -0.300e+01, 0.100e+01, -0.250e+00},
{0.000e+00, 0.200e+01, 0.000e+00, 0.000e+00},
{0.000e+00, 0.100e+01, -0.100e+01, 0.000e+00},
{0.200e+01, 0.000e+00, 0.000e+00, 0.000e+00}};
// Validate year
if (iyear < iymin)
return -1;
// Initialize day of year
double id, fraction;
fraction = std::modf(day, &id);
// Compute fractional part of day in seconds
const double fday(fraction * 86400e0);
// Revision 07 October 2011: ICAPD modified
const int icapd((int)id + 365 * (iyear - 1975) + ((iyear - 1973) / 4));
const double capt((27392.500528e0 + 1.000000035e0 * (double)icapd) / 36525e0);
// Compute mean longitude of Sun at beginning of day
const double h0((279.69668e0 + (36000.768930485e0 + 3.03e-4 * capt) * capt) *
dtr);
// Compute mean longitude of Moon at beginning of day
const double s0(
(((1.9e-6 * capt - .001133e0) * capt + 481267.88314137e0) * capt +
270.434358e0) *
dtr);
// Compute mean longitude of lunar perigee at beginning of day
const double p0(
(((-1.2e-5 * capt - .010325e0) * capt + 4069.0340329577e0) * capt +
334.329653e0) *
dtr);
// Compute the tidal angle arguments
for (int i = 0; i < k; i++) {
angle[i] = speed[i] * fday + angfac[i][0] * h0 + angfac[i][1] * s0 +
angfac[i][2] * p0 + angfac[i][3] * TWOPI;
angle[i] = std::fmod(angle[i], TWOPI);
while (angle[i] < 0e0)
angle[i] += TWOPI;
}
// Finished.
return 0;
}
|
I-Al-Istannen/velcom
|
backend/backend/src/main/java/de/aaaaaaah/velcom/backend/storage/repo/exception/DirectoryAlreadyExistsException.java
|
package de.aaaaaaah.velcom.backend.storage.repo.exception;
import java.nio.file.Path;
import java.util.Objects;
/**
* An exception that is thrown when a certain directory already exists.
*/
public class DirectoryAlreadyExistsException extends Exception {
private final Path directory;
/**
* Constructs a new {@link DirectoryAlreadyExistsException}.
*
* @param directory the directory that already exists
*/
public DirectoryAlreadyExistsException(Path directory) {
this.directory = Objects.requireNonNull(directory);
}
/**
* @return Returns the directry that already exists
*/
public Path getDirectory() {
return directory;
}
}
|
georaio/ethsigner
|
ethsigner/core/src/integration-test/java/tech/pegasys/ethsigner/jsonrpcproxy/model/jsonrpc/PrivateTransactionBuilder.java
|
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.ethsigner.jsonrpcproxy.model.jsonrpc;
import java.util.List;
public class PrivateTransactionBuilder {
private String from;
private String nonce;
private String gasPrice;
private String gas;
private String to;
private String value;
private String data;
private String privateFrom;
private List<String> privateFor;
private String restriction;
public PrivateTransactionBuilder withFrom(final String from) {
this.from = from;
return this;
}
public PrivateTransactionBuilder withNonce(final String nonce) {
this.nonce = nonce;
return this;
}
public PrivateTransactionBuilder withGasPrice(final String gasPrice) {
this.gasPrice = gasPrice;
return this;
}
public PrivateTransactionBuilder withGas(final String gas) {
this.gas = gas;
return this;
}
public PrivateTransactionBuilder withTo(final String to) {
this.to = to;
return this;
}
public PrivateTransactionBuilder withValue(final String value) {
this.value = value;
return this;
}
public PrivateTransactionBuilder withData(final String data) {
this.data = data;
return this;
}
public PrivateTransactionBuilder withPrivateFrom(final String privateFrom) {
this.privateFrom = privateFrom;
return this;
}
public PrivateTransactionBuilder withPrivateFor(final List<String> privateFor) {
this.privateFor = privateFor;
return this;
}
public PrivateTransactionBuilder withRestriction(final String restriction) {
this.restriction = restriction;
return this;
}
public PrivateTransaction build() {
return new PrivateTransaction(
from, nonce, gasPrice, gas, to, value, data, privateFrom, privateFor, restriction);
}
}
|
LemADEC/Cyberware
|
src/main/java/flaxbeard/cyberware/api/hud/NotificationInstance.java
|
package flaxbeard.cyberware.api.hud;
public class NotificationInstance
{
private float time;
private INotification notification;
public NotificationInstance(float time, INotification notification)
{
this.time = time;
this.notification = notification;
}
public float getCreatedTime()
{
return time;
}
public INotification getNotification()
{
return notification;
}
}
|
perfectrecall/aws-sdk-cpp
|
aws-cpp-sdk-fsx/source/model/SecurityStyle.cpp
|
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/fsx/model/SecurityStyle.h>
#include <aws/core/utils/HashingUtils.h>
#include <aws/core/Globals.h>
#include <aws/core/utils/EnumParseOverflowContainer.h>
using namespace Aws::Utils;
namespace Aws
{
namespace FSx
{
namespace Model
{
namespace SecurityStyleMapper
{
static const int UNIX_HASH = HashingUtils::HashString("UNIX");
static const int NTFS_HASH = HashingUtils::HashString("NTFS");
static const int MIXED_HASH = HashingUtils::HashString("MIXED");
SecurityStyle GetSecurityStyleForName(const Aws::String& name)
{
int hashCode = HashingUtils::HashString(name.c_str());
if (hashCode == UNIX_HASH)
{
return SecurityStyle::UNIX;
}
else if (hashCode == NTFS_HASH)
{
return SecurityStyle::NTFS;
}
else if (hashCode == MIXED_HASH)
{
return SecurityStyle::MIXED;
}
EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer();
if(overflowContainer)
{
overflowContainer->StoreOverflow(hashCode, name);
return static_cast<SecurityStyle>(hashCode);
}
return SecurityStyle::NOT_SET;
}
Aws::String GetNameForSecurityStyle(SecurityStyle enumValue)
{
switch(enumValue)
{
case SecurityStyle::UNIX:
return "UNIX";
case SecurityStyle::NTFS:
return "NTFS";
case SecurityStyle::MIXED:
return "MIXED";
default:
EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer();
if(overflowContainer)
{
return overflowContainer->RetrieveOverflow(static_cast<int>(enumValue));
}
return {};
}
}
} // namespace SecurityStyleMapper
} // namespace Model
} // namespace FSx
} // namespace Aws
|
GrieferAtWork/KOS
|
include/kos/__exception_data.h
|
<reponame>GrieferAtWork/KOS
/* Copyright (c) 2018 Griefer@Work *
* *
* This software is provided 'as-is', without any express or implied *
* warranty. In no event will the authors be held liable for any damages *
* arising from the use of this software. *
* *
* Permission is granted to anyone to use this software for any purpose, *
* including commercial applications, and to alter it and redistribute it *
* freely, subject to the following restrictions: *
* *
* 1. The origin of this software must not be misrepresented; you must not *
* claim that you wrote the original software. If you use this software *
* in a product, an acknowledgement in the product documentation would be *
* appreciated but is not required. *
* 2. Altered source versions must be plainly marked as such, and must not be *
* misrepresented as being the original software. *
* 3. This notice may not be removed or altered from any source distribution. *
*/
#ifndef _KOS___EXCEPTION_DATA_H
#define _KOS___EXCEPTION_DATA_H 1
#include <__stdinc.h>
#include <hybrid/host.h>
#if defined(__i386__) || defined(__x86_64__)
#include "i386-kos/bits/__exception_data.h"
#else
#error "Unsupported arch"
#endif
__DECL_BEGIN
#ifndef __ARCH_SYSCALL_MAX_ARGC
#define __ARCH_SYSCALL_MAX_ARGC 6
#endif
/* Default exception data containers. */
#ifndef __PRIVATE_DEFAULT_FOREACH_EXCEPTION_DATA
#define __PRIVATE_DEFAULT_FOREACH_EXCEPTION_DATA(FUNC) \
FUNC(noncontinuable) \
FUNC(badalloc) \
FUNC(invalid_handle) \
FUNC(segfault) \
FUNC(divide_by_zero) \
FUNC(index_error) \
FUNC(buffer_too_small) \
FUNC(filesystem_error) \
FUNC(net_error) \
FUNC(not_executable) \
FUNC(no_device) \
FUNC(unhandled_interrupt) \
FUNC(unknown_systemcall) \
FUNC(exit) \
FUNC(system) \
FUNC(illegal_instruction) \
FUNC(invalid_segment) \
FUNC(retry_rwlock) \
/**/
#endif /* !__PRIVATE_DEFAULT_FOREACH_EXCEPTION_DATA */
#ifndef __PRIVATE_ARCH_FOREACH_EXCEPTION_DATA
#define __PRIVATE_ARCH_FOREACH_EXCEPTION_DATA(FUNC) /* nothing */
#endif
#ifndef __PRIVATE_USER_FOREACH_EXCEPTION_DATA
#define __PRIVATE_USER_FOREACH_EXCEPTION_DATA(FUNC) /* nothing */
#endif
/* Enumerate all exception data containers. */
#ifndef __PRIVATE_FOREACH_EXCEPTION_DATA
#define __PRIVATE_FOREACH_EXCEPTION_DATA(FUNC) \
__PRIVATE_DEFAULT_FOREACH_EXCEPTION_DATA(FUNC) \
__PRIVATE_ARCH_FOREACH_EXCEPTION_DATA(FUNC) \
__PRIVATE_USER_FOREACH_EXCEPTION_DATA(FUNC)
#endif /* !__PRIVATE_FOREACH_EXCEPTION_DATA */
/* The number of extended exception information data pointers. */
#ifndef __EXCEPTION_INFO_NUM_DATA_POINTERS
#define __EXCEPTION_INFO_NUM_DATA_POINTERS 15
#endif
#define __EXCEPTION_INFO_SIZEOF_DATA \
(__EXCEPTION_INFO_NUM_DATA_POINTERS*__SIZEOF_POINTER__)
__DECL_END
#endif /* !_KOS___EXCEPTION_DATA_H */
|
shaoxt/weave
|
weave-service/weave-service-spring/src/main/java/io/aftersound/weave/service/AdminServiceMetadataManager.java
|
package io.aftersound.weave.service;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.aftersound.weave.service.metadata.ServiceMetadata;
import java.nio.file.Path;
import java.util.Map;
/**
* This {@link AdminServiceMetadataManager} manages the lifecycle of
* {@link ServiceMetadata} (s) related to administration functionalities.
*/
class AdminServiceMetadataManager extends ServiceMetadataManager {
private final ObjectMapper serviceMetadataReader;
private final Path metadataDirectory;
public AdminServiceMetadataManager(ObjectMapper serviceMetadataReader, Path metadataDirectory) {
this.serviceMetadataReader = serviceMetadataReader;
this.metadataDirectory = metadataDirectory;
}
@Override
public void init() {
// load administration related ServiceMetadata once and only once
Map<String, ServiceMetadata> serviceMetadataMap = new ServiceMetadataLoader(
serviceMetadataReader,
metadataDirectory
).load();
serviceMetadataById.putAll(serviceMetadataMap);
}
}
|
zipated/src
|
third_party/blink/renderer/core/frame/frame.cc
|
<reponame>zipated/src
/*
* Copyright (C) 1998, 1999 <NAME> <<EMAIL>>
* 1999 <NAME> <<EMAIL>>
* 1999 <NAME> <<EMAIL>>
* 2000 <NAME> <<EMAIL>>
* 2000 <NAME> <<EMAIL>>
* 2001 <NAME> <<EMAIL>>
* Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Apple Inc. All
* rights reserved.
* Copyright (C) 2005 <NAME> <<EMAIL>>
* Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)
* Copyright (C) 2008 <NAME> <<EMAIL>>
* Copyright (C) 2008 Google Inc.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "third_party/blink/renderer/core/frame/frame.h"
#include <memory>
#include "third_party/blink/public/web/web_frame_client.h"
#include "third_party/blink/public/web/web_remote_frame_client.h"
#include "third_party/blink/renderer/bindings/core/v8/window_proxy_manager.h"
#include "third_party/blink/renderer/core/dom/document_type.h"
#include "third_party/blink/renderer/core/dom/events/event.h"
#include "third_party/blink/renderer/core/dom/node_computed_style.h"
#include "third_party/blink/renderer/core/dom/user_gesture_indicator.h"
#include "third_party/blink/renderer/core/frame/local_dom_window.h"
#include "third_party/blink/renderer/core/frame/settings.h"
#include "third_party/blink/renderer/core/frame/use_counter.h"
#include "third_party/blink/renderer/core/html/html_frame_element_base.h"
#include "third_party/blink/renderer/core/input/event_handler.h"
#include "third_party/blink/renderer/core/layout/layout_embedded_content.h"
#include "third_party/blink/renderer/core/loader/empty_clients.h"
#include "third_party/blink/renderer/core/loader/navigation_scheduler.h"
#include "third_party/blink/renderer/core/page/focus_controller.h"
#include "third_party/blink/renderer/core/page/page.h"
#include "third_party/blink/renderer/core/probe/core_probes.h"
#include "third_party/blink/renderer/platform/feature_policy/feature_policy.h"
#include "third_party/blink/renderer/platform/instance_counters.h"
#include "third_party/blink/renderer/platform/loader/fetch/resource_error.h"
#include "third_party/blink/renderer/platform/wtf/assertions.h"
namespace blink {
using namespace HTMLNames;
Frame::~Frame() {
InstanceCounters::DecrementCounter(InstanceCounters::kFrameCounter);
DCHECK(!owner_);
DCHECK_EQ(lifecycle_.GetState(), FrameLifecycle::kDetached);
}
void Frame::Trace(blink::Visitor* visitor) {
visitor->Trace(tree_node_);
visitor->Trace(page_);
visitor->Trace(owner_);
visitor->Trace(window_proxy_manager_);
visitor->Trace(dom_window_);
visitor->Trace(client_);
}
void Frame::Detach(FrameDetachType type) {
DCHECK(client_);
detach_stack_ = base::debug::StackTrace();
// By the time this method is called, the subclasses should have already
// advanced to the Detaching state.
DCHECK_EQ(lifecycle_.GetState(), FrameLifecycle::kDetaching);
client_->SetOpener(nullptr);
// After this, we must no longer talk to the client since this clears
// its owning reference back to our owning LocalFrame.
client_->Detached(type);
client_ = nullptr;
// Mark the frame as detached once |client_| is null, as most of the frame has
// been torn down at this point.
// TODO(dcheng): Once https://crbug.com/820782 is fixed, Frame::Client() will
// also assert that it is only accessed when the frame is not detached.
lifecycle_.AdvanceTo(FrameLifecycle::kDetached);
// TODO(dcheng): This currently needs to happen after calling
// FrameClient::Detached() to make it easier for FrameClient::Detached()
// implementations to detect provisional frames and avoid removing them from
// the frame tree. https://crbug.com/578349.
DisconnectOwnerElement();
page_ = nullptr;
}
void Frame::DisconnectOwnerElement() {
if (!owner_)
return;
// TODO(https://crbug.com/578349): If this is a provisional frame, the frame
// owner doesn't actually point to this frame, so don't clear it. Note that
// this can't use IsProvisional() because the |client_| is null already.
if (owner_->ContentFrame() == this)
owner_->ClearContentFrame();
owner_ = nullptr;
}
Page* Frame::GetPage() const {
return page_;
}
bool Frame::IsMainFrame() const {
return !Tree().Parent();
}
HTMLFrameOwnerElement* Frame::DeprecatedLocalOwner() const {
return owner_ && owner_->IsLocal() ? ToHTMLFrameOwnerElement(owner_)
: nullptr;
}
static ChromeClient& GetEmptyChromeClient() {
DEFINE_STATIC_LOCAL(EmptyChromeClient, client, (EmptyChromeClient::Create()));
return client;
}
ChromeClient& Frame::GetChromeClient() const {
if (Page* page = this->GetPage())
return page->GetChromeClient();
return GetEmptyChromeClient();
}
Frame* Frame::FindUnsafeParentScrollPropagationBoundary() {
Frame* current_frame = this;
Frame* ancestor_frame = Tree().Parent();
while (ancestor_frame) {
if (!ancestor_frame->GetSecurityContext()->GetSecurityOrigin()->CanAccess(
GetSecurityContext()->GetSecurityOrigin()))
return current_frame;
current_frame = ancestor_frame;
ancestor_frame = ancestor_frame->Tree().Parent();
}
return nullptr;
}
LayoutEmbeddedContent* Frame::OwnerLayoutObject() const {
if (!DeprecatedLocalOwner())
return nullptr;
return DeprecatedLocalOwner()->GetLayoutEmbeddedContent();
}
Settings* Frame::GetSettings() const {
if (GetPage())
return &GetPage()->GetSettings();
return nullptr;
}
WindowProxy* Frame::GetWindowProxy(DOMWrapperWorld& world) {
return window_proxy_manager_->GetWindowProxy(world);
}
void Frame::DidChangeVisibilityState() {
HeapVector<Member<Frame>> child_frames;
for (Frame* child = Tree().FirstChild(); child;
child = child->Tree().NextSibling())
child_frames.push_back(child);
for (size_t i = 0; i < child_frames.size(); ++i)
child_frames[i]->DidChangeVisibilityState();
}
void Frame::NotifyUserActivationInLocalTree() {
user_activation_state_.Activate();
for (Frame* parent = Tree().Parent(); parent;
parent = parent->Tree().Parent()) {
parent->user_activation_state_.Activate();
}
}
void Frame::NotifyUserActivation() {
NotifyUserActivationInLocalTree();
ToLocalFrame(this)->Client()->SetHasReceivedUserGesture();
}
bool Frame::ConsumeTransientUserActivation() {
for (Frame* parent = Tree().Parent(); parent;
parent = parent->Tree().Parent()) {
parent->user_activation_state_.ConsumeIfActive();
}
for (Frame* child = Tree().FirstChild(); child;
child = child->Tree().TraverseNext(this)) {
child->user_activation_state_.ConsumeIfActive();
}
return user_activation_state_.ConsumeIfActive();
}
// static
std::unique_ptr<UserGestureIndicator> Frame::NotifyUserActivation(
LocalFrame* frame,
UserGestureToken::Status status) {
if (frame)
frame->NotifyUserActivation();
return std::make_unique<UserGestureIndicator>(status);
}
// static
std::unique_ptr<UserGestureIndicator> Frame::NotifyUserActivation(
LocalFrame* frame,
UserGestureToken* token) {
if (frame)
frame->NotifyUserActivation();
return std::make_unique<UserGestureIndicator>(token);
}
// static
bool Frame::HasTransientUserActivation(LocalFrame* frame,
bool checkIfMainThread) {
if (RuntimeEnabledFeatures::UserActivationV2Enabled()) {
return frame ? frame->HasTransientUserActivation() : false;
}
return checkIfMainThread
? UserGestureIndicator::ProcessingUserGestureThreadSafe()
: UserGestureIndicator::ProcessingUserGesture();
}
// static
bool Frame::ConsumeTransientUserActivation(LocalFrame* frame,
bool checkIfMainThread) {
if (RuntimeEnabledFeatures::UserActivationV2Enabled()) {
return frame ? frame->ConsumeTransientUserActivation() : false;
}
return checkIfMainThread
? UserGestureIndicator::ConsumeUserGestureThreadSafe()
: UserGestureIndicator::ConsumeUserGesture();
}
bool Frame::IsFeatureEnabled(mojom::FeaturePolicyFeature feature) const {
FeaturePolicy* feature_policy = GetSecurityContext()->GetFeaturePolicy();
// The policy should always be initialized before checking it to ensure we
// properly inherit the parent policy.
DCHECK(feature_policy);
// Otherwise, check policy.
return feature_policy->IsFeatureEnabled(feature);
}
void Frame::SetOwner(FrameOwner* owner) {
owner_ = owner;
UpdateInertIfPossible();
UpdateInheritedEffectiveTouchActionIfPossible();
}
void Frame::UpdateInertIfPossible() {
if (owner_ && owner_->IsLocal()) {
ToHTMLFrameOwnerElement(owner_)->UpdateDistributionForFlatTreeTraversal();
if (ToHTMLFrameOwnerElement(owner_)->IsInert())
SetIsInert(true);
}
}
void Frame::UpdateInheritedEffectiveTouchActionIfPossible() {
if (owner_) {
Frame* owner_frame = owner_->ContentFrame();
if (owner_frame) {
SetInheritedEffectiveTouchAction(
owner_frame->InheritedEffectiveTouchAction());
}
}
}
const CString& Frame::ToTraceValue() {
// token's ToString() is latin1.
if (!trace_value_)
trace_value_ = CString(devtools_frame_token_.ToString().c_str());
return trace_value_.value();
}
Frame::Frame(FrameClient* client,
Page& page,
FrameOwner* owner,
WindowProxyManager* window_proxy_manager)
: tree_node_(this),
page_(&page),
owner_(owner),
client_(client),
window_proxy_manager_(window_proxy_manager),
is_loading_(false),
devtools_frame_token_(client->GetDevToolsFrameToken()),
create_stack_(base::debug::StackTrace()) {
InstanceCounters::IncrementCounter(InstanceCounters::kFrameCounter);
if (owner_)
owner_->SetContentFrame(*this);
else
page_->SetMainFrame(this);
}
STATIC_ASSERT_ENUM(FrameDetachType::kRemove,
WebFrameClient::DetachType::kRemove);
STATIC_ASSERT_ENUM(FrameDetachType::kSwap, WebFrameClient::DetachType::kSwap);
STATIC_ASSERT_ENUM(FrameDetachType::kRemove,
WebRemoteFrameClient::DetachType::kRemove);
STATIC_ASSERT_ENUM(FrameDetachType::kSwap,
WebRemoteFrameClient::DetachType::kSwap);
} // namespace blink
|
MikeMirzayanov/pbox
|
src/site/src/main/java/me/pbox/site/captions/dao/CaptionDao.java
|
/*
* Copyright by <NAME>
*/
package me.pbox.site.captions.dao;
import me.pbox.site.captions.model.Caption;
import java.util.List;
public interface CaptionDao {
String shaHex(String s);
Caption find(long id);
Caption find(String shortcutSha1, String locale);
List<Caption> findAll();
void save(Caption caption);
void insert(Caption caption);
void clearCache();
void deleteByShortcutSha1(String shortcutSha1);
List<Caption> findByShortcutSha1(String shortcutSha1);
List<Caption> findByValue(String value);
}
|
drbild/scintuit
|
core/src/main/scala/scintuit/util/auth.scala
|
package scintuit.util
import java.security.PrivateKey
import scintuit.util.oauth.OAuthConsumer
import scintuit.util.saml.SamlIssuer
object auth {
case class AuthConfig(
signingKey: PrivateKey,
samlIssuer: SamlIssuer,
oauthConsumer: OAuthConsumer
)
}
|
arvidl/dynamical-systems-with-applications-using-python
|
Anaconda-files/Program_06a.py
|
<gh_stars>100-1000
# Program 06a: Contour plot. See Figure 6.2(a).
import numpy as np
import matplotlib.pyplot as plt
xlist = np.linspace(-10.0, 10.0, 100)
ylist = np.linspace(-4.0, 4.0, 100)
X, Y = np.meshgrid(xlist, ylist)
Z = Y**2 / 2 - 5*np.cos(X)
plt.figure()
plt.contour(X,Y,Z)
plt.xlabel(r'$\theta$', fontsize=20)
plt.ylabel(r'$\phi$', fontsize=20)
plt.tick_params(labelsize=20)
plt.show()
|
ebadkamil/nicos
|
nicos_mlz/puma/setups/detslit.py
|
description = 'Slits before detector'
group = 'optional'
includes = ['motorbus6']
devices = dict(
st_dslit = device('nicos_mlz.puma.devices.ipc.Motor',
bus = 'motorbus6',
addr = 67,
slope = 4500,
unit = 'mm',
abslimits = (-5.5, 30),
zerosteps = 500000,
lowlevel = True,
),
co_dslit = device('nicos_mlz.puma.devices.ipc.Coder',
bus = 'motorbus6',
addr = 97,
poly = [-159./80, 1./80],
unit = 'mm',
lowlevel = True,
),
dslit = device('nicos.devices.generic.Axis',
description = 'Slit before detector',
motor = 'st_dslit',
coder = 'co_dslit',
precision = 0.05,
offset = 0,
maxtries = 10,
),
)
|
Alejo2040/Algoritmos_programacion_C4G2
|
Taller Estructuras de control repeticion/Ejercicio 03.py
|
<gh_stars>0
"""
Entrada
No hay
Salida
Sumatoria de los números dentro del rango dado-->
"""
#Caja negra
a=0
for i in range(97, 1003):
if(i % 2 == 0):
a = a+i
#Salidas
print(a)
|
lauracristinaes/aula-java
|
hibernate-release-5.3.7.Final/project/hibernate-core/src/main/java/org/hibernate/boot/model/source/internal/hbm/TypeDefinitionBinder.java
|
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.boot.model.source.internal.hbm;
import org.hibernate.boot.jaxb.hbm.spi.JaxbHbmTypeDefinitionType;
import org.hibernate.boot.model.TypeDefinition;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.jboss.logging.Logger;
/**
* @author <NAME>
*/
public class TypeDefinitionBinder {
private static final Logger log = Logger.getLogger( TypeDefinitionBinder.class );
/**
* Handling for a {@code <typedef/>} declaration
*
* @param context Access to information relative to the mapping document containing this binding
* @param typeDefinitionBinding The {@code <typedef/>} binding
*/
public static void processTypeDefinition(
HbmLocalMetadataBuildingContext context,
JaxbHbmTypeDefinitionType typeDefinitionBinding) {
final ClassLoaderService cls = context.getBuildingOptions().getServiceRegistry().getService( ClassLoaderService.class );
final TypeDefinition definition = new TypeDefinition(
typeDefinitionBinding.getName(),
cls.classForName( typeDefinitionBinding.getClazz() ),
null,
ConfigParameterHelper.extractConfigParameters( typeDefinitionBinding )
);
log.debugf(
"Processed type-definition : %s -> %s",
definition.getName(),
definition.getTypeImplementorClass().getName()
);
context.getMetadataCollector().addTypeDefinition( definition );
}
}
|
FreCap/sql
|
sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/config/FetchSizeProperty.java
|
package com.amazon.opendistroforelasticsearch.jdbc.config;
public class FetchSizeProperty extends IntConnectionProperty {
public static final String KEY = "fetchSize";
public FetchSizeProperty() {
super(KEY);
}
}
|
davidivkovic/web21
|
api/dist/spa/src/components/header/requests/PendingRequest.js
|
<gh_stars>0
import { ref } from '/modules/vue.js'
import Request from '/src/components/header/requests/Request.js'
import Button from '/src/components/ui/Button.js'
import SpinIcon from '/src/icons/SpinIcon.js'
export default {
template: `
<Request :request="request">
<template v-slot:text>wants to be your friend</template>
<template v-slot:buttons>
<div class="space-x-2 flex ml-10">
<Button
@click="acceptFriendRequest()"
class="font-medium h-[30px] py-0 leading-4 !text-[13px] !w-20 text-white bg-black flex items-center justify-center border-gray-300 whitespace-nowrap"
>
<p v-if="!isLoading">Accept</p>
<SpinIcon v-else class="w-full" />
</Button>
<Button
@click="declineFriendRequest()"
class="font-medium h-[30px] py-0 leading-4 !text-[13px] !px-4 text-black border border-gray-300 whitespace-nowrap"
>Delete</Button>
</div>
</template>
</Request>
`,
props: ['request'],
emits: ['accepted', 'declined'],
components: {
Request,
Button,
SpinIcon,
},
setup(props, { emit }) {
const isLoading = ref(false)
const acceptFriendRequest = () => {
emit('accepted', props.request.id)
isLoading.value = true
setTimeout(() => {
isLoading.value = false
}, 250)
}
const declineFriendRequest = () => {
emit('declined', props.request.id)
}
return {
isLoading,
acceptFriendRequest,
declineFriendRequest,
}
},
}
|
Yankee24/ruoyi-vue-pro
|
yudao-module-member/yudao-module-member-biz/src/main/java/cn/iocoder/yudao/module/member/framework/package-info.java
|
/**
* 属于 system 模块的 framework 封装
*
* @author 芋道源码
*/
package cn.iocoder.yudao.module.member.framework;
|
katekaseth/Project_One
|
client-app/src/components/bookmarkPage/BookmarkPage.js
|
<gh_stars>1-10
import React from 'react';
import { makeStyles } from '@material-ui/styles';
import { Typography, Grid, CardMedia } from '@material-ui/core';
import bookmarkedIcon from '../../icons/svg/bookmarked.svg';
import unbookmarkedIcon from '../../icons/svg/unbookmarked.svg';
import { SearchResults } from '../searchPage/SearchResults';
import { SearchBar } from '../SearchBar';
export default ({
setPage,
bookmarks,
alertError,
updateSearchBookmarkTerms,
searchedBookmarkTerms,
}) => {
const classes = useStyles();
return (
<Grid container direction='column'>
<Grid xs container item className={classes.bottomSpacing}>
<Grid item>
<CardMedia src={bookmarkedIcon} component='img' className={classes.bookmark} />
</Grid>
<Grid item>
<Typography className={classes.bookmarkTitle} variant='h4'>
Your Bookmarks -
</Typography>
<Typography className={classes.bookmarkNum} variant='h4'>
{bookmarks ? bookmarks.length : 'None'}
</Typography>
</Grid>
</Grid>
<Grid item className={classes.bottomSpacing}>
{bookmarks && bookmarks.length !== 0 && (
<SearchBar
redirect={() => {}}
updateSearchTerms={updateSearchBookmarkTerms}
searchedTerms={[searchedBookmarkTerms]}
isBookmark={true}
/>
)}
</Grid>
<Grid item container>
{bookmarks && bookmarks.length === 0 && <NoBookmarksHelper />}
{bookmarks && bookmarks.length !== 0 && (
<SearchResults setPage={setPage} results={bookmarks} alertError={alertError} />
)}
</Grid>
</Grid>
);
};
const NoBookmarksHelper = () => {
const classes = useStyles();
return (
<Grid>
<Typography variant='h6'>Looks like you don't have any bookmarks!</Typography>
<Typography>
To add new bookmarks, look for this symbol{' '}
<CardMedia
src={unbookmarkedIcon}
component='img'
className={classes.littleBookmark}
/>{' '}
on a report overview. Click it to save the bookmark.
</Typography>
<br />
<Typography variant='body2'>
<CardMedia
src={bookmarkedIcon}
component='img'
className={classes.littlestBookmark}
/>{' '}
If the icon is purple, it means the report has been bookmarked. Click to unbookmark
it.
</Typography>
<Typography variant='body2'>
<CardMedia
src={unbookmarkedIcon}
component='img'
className={classes.littlestBookmark}
/>{' '}
If it's grey, it means it hasn't been bookmarked. Click to bookmark it.
</Typography>
</Grid>
);
};
const useStyles = makeStyles({
bookmarkTitle: {
color: '#5E5B5B',
fontFamily: 'Encode-sans, sans-serif',
fontWeight: 'bold',
display: 'inline-block',
paddingRight: '1rem',
},
bookmarkNum: {
color: '#5E5B5B',
fontFamily: 'Encode-sans, sans-serif',
display: 'inline-block',
},
bookmark: {
width: '35px',
height: 'auto',
paddingRight: '1rem',
},
bottomSpacing: {
marginBottom: '1rem',
},
littleBookmark: {
width: '20px',
display: 'inline',
marginBottom: '-4px',
marginRight: '3px',
marginLeft: '3px',
},
littlestBookmark: {
width: '10px',
display: 'inline',
marginRight: '3px',
marginLeft: '3px',
marginTop: '5px',
},
});
|
bshp/midpoint
|
gui/admin-gui/src/main/java/com/evolveum/midpoint/web/page/admin/certification/DefinitionStagesPanel.java
|
<reponame>bshp/midpoint<filename>gui/admin-gui/src/main/java/com/evolveum/midpoint/web/page/admin/certification/DefinitionStagesPanel.java
/*
* Copyright (c) 2010-2015 Evolveum and contributors
*
* This work is dual-licensed under the Apache License 2.0
* and European Union Public License. See LICENSE file for details.
*/
package com.evolveum.midpoint.web.page.admin.certification;
import com.evolveum.midpoint.gui.api.component.BasePanel;
import com.evolveum.midpoint.gui.api.util.WebComponentUtil;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.web.component.AjaxSubmitButton;
import com.evolveum.midpoint.web.component.TabbedPanel;
import com.evolveum.midpoint.web.component.dialog.ConfirmationPanel;
import com.evolveum.midpoint.web.component.util.VisibleEnableBehaviour;
import com.evolveum.midpoint.web.page.admin.certification.dto.StageDefinitionDto;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AccessCertificationStageDefinitionType;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.extensions.markup.html.tabs.AbstractTab;
import org.apache.wicket.extensions.markup.html.tabs.ITab;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author mederly
*/
public class DefinitionStagesPanel extends BasePanel<List<StageDefinitionDto>> {
private static final String ID_TAB_PANEL = "tabPanel";
private static final String ID_ADD_NEW_STAGE = "addNewStage";
private static final String ID_MOVE_STAGE_RIGHT = "moveStageRight";
private static final String ID_MOVE_STAGE_LEFT = "moveStageLeft";
private static final String ID_DELETE_STAGE = "deleteStage";
private static final String DEFAULT_STAGE_NAME_PREFIX = "Stage ";
private TabbedPanel<ITab> tabPanel;
private PageCertDefinition parentPage;
public DefinitionStagesPanel(String id, IModel<List<StageDefinitionDto>> model, PageCertDefinition parentPage) {
super(id, model);
this.parentPage = parentPage;
initLayout();
}
private void initLayout() {
List<ITab> tabs = new ArrayList<>();
createTabs(tabs);
tabPanel = WebComponentUtil.createTabPanel(ID_TAB_PANEL, parentPage, tabs, null);
add(tabPanel);
AjaxSubmitButton addNewStage = new AjaxSubmitButton(ID_ADD_NEW_STAGE, createStringResource("StageDefinitionPanel.addNewStageButton")) {
@Override
public void onSubmit(AjaxRequestTarget target) {
super.onSubmit(target);
addPerformed(target);
}
};
add(addNewStage);
// use the same isVisible for all buttons to avoid changing buttons' placement (especially dangerous is "delete stage" one)
// we also don't use isEnabled as it seems to have no visual effect
VisibleEnableBehaviour visibleIfMoreTabs = new VisibleEnableBehaviour() {
@Override
public boolean isVisible() {
return getModelObject().size() > 1;
}
};
AjaxSubmitButton moveLeft = new AjaxSubmitButton(ID_MOVE_STAGE_LEFT, createStringResource("StageDefinitionPanel.moveStageLeftButton")) {
@Override
public void onSubmit(AjaxRequestTarget target) {
super.onSubmit(target);
moveLeftPerformed(target);
}
};
moveLeft.add(visibleIfMoreTabs);
add(moveLeft);
AjaxSubmitButton moveRight = new AjaxSubmitButton(ID_MOVE_STAGE_RIGHT, createStringResource("StageDefinitionPanel.moveStageRightButton")) {
@Override
public void onSubmit(AjaxRequestTarget target) {
super.onSubmit(target);
moveRightPerformed(target);
}
};
moveRight.add(visibleIfMoreTabs);
add(moveRight);
AjaxSubmitButton delete = new AjaxSubmitButton(ID_DELETE_STAGE, createStringResource("StageDefinitionPanel.deleteStageButton")) {
@Override
public void onSubmit(AjaxRequestTarget target) {
super.onSubmit(target);
deletePerformed(target);
}
};
delete.add(visibleIfMoreTabs);
add(delete);
setOutputMarkupId(true);
}
private void deletePerformed(AjaxRequestTarget target) {
ConfirmationPanel dialog = new ConfirmationPanel(getPageBase().getMainPopupBodyId(), new IModel<String>() {
@Override
public String getObject() {
StageDefinitionDto dto = getModelObject().get(tabPanel.getSelectedTab());
return getString("DefinitionStagesPanel.confirmDeleteText", dto.getName());
}
}){
@Override
public void yesPerformed(AjaxRequestTarget target) {
deleteConfirmedPerformed(target);
}
};
getPageBase().showMainPopup(dialog, target);
}
private void addPerformed(AjaxRequestTarget target) {
StageDefinitionDto newStageDefinitionDto = createNewStageDefinitionDto();
getModelObject().add(newStageDefinitionDto);
recreateTabs();
tabPanel.setSelectedTab(getModelObject().size()-1);
target.add(parentPage.getTabPanel());
}
private void deleteConfirmedPerformed(AjaxRequestTarget target) {
int selected = tabPanel.getSelectedTab();
getModelObject().remove(selected);
recreateTabs();
if (tabPanel.getSelectedTab() >= getModelObject().size()) {
tabPanel.setSelectedTab(getModelObject().size()-1);
}
target.add(parentPage.getTabPanel());
}
private void moveLeftPerformed(AjaxRequestTarget target) {
int selected = tabPanel.getSelectedTab();
List<StageDefinitionDto> list = getModelObject();
if (selected > 0) {
Collections.swap(list, selected-1, selected);
setOrder(list, selected-1);
setOrder(list, selected);
recreateTabs();
tabPanel.setSelectedTab(selected-1);
target.add(this);
}
}
private void moveRightPerformed(AjaxRequestTarget target) {
int selected = tabPanel.getSelectedTab();
List<StageDefinitionDto> list = getModelObject();
if (selected < list.size()-1) {
Collections.swap(list, selected, selected+1);
setOrder(list, selected);
setOrder(list, selected+1);
recreateTabs();
tabPanel.setSelectedTab(selected+1);
target.add(this);
}
}
private void setOrder(List<StageDefinitionDto> list, int i) {
list.get(i).setNumber(i+1);
}
private void recreateTabs() {
List<ITab> tabs = tabPanel.getTabs().getObject();
tabs.clear();
createTabs(tabs);
}
private void createTabs(List<ITab> tabs) {
for (final StageDefinitionDto definitionDto : getModelObject()) {
tabs.add(createTab(definitionDto, parentPage));
}
}
@NotNull
private AbstractTab createTab(final StageDefinitionDto definitionDto, final PageCertDefinition parentPage) {
return new AbstractTab(new Model<>(definitionDto.getName())) {
@Override
public WebMarkupContainer getPanel(String panelId) {
return new DefinitionStagePanel(panelId, new Model(definitionDto));
}
};
}
private StageDefinitionDto createNewStageDefinitionDto(){
try {
AccessCertificationStageDefinitionType def = new AccessCertificationStageDefinitionType(parentPage.getPrismContext());
def.setNumber(getModel().getObject().size() + 1);
def.setName(DEFAULT_STAGE_NAME_PREFIX + def.getNumber());
return new StageDefinitionDto(def, parentPage);
} catch (SchemaException e) {
throw new IllegalStateException(e);
}
}
}
|
jassem-lab/LMS_MERN
|
client/src/shared/actions/registerUser.js
|
<reponame>jassem-lab/LMS_MERN
import axios from 'axios';
import { CLEAR_ERRORS, GET_ERRORS } from '../actionTypes';
import { isEmpty } from '../utils';
const registerUser = (userData, profileData, history) => (dispatch) => {
// return new Promise((resolve, reject) => {
// axios
// .post('/api/account/add-account', userData)
// .then(res => {
// axios
// .post('/api/profile/add', profileData)
// .then(res => {
// dispatch({ type: CLEAR_ERRORS, payload: {} });
// history.push('/dashboard')
// .resolve(true)
// })
// .catch(err => {
// dispatch({ type: GET_ERRORS, payload: err.response.data });
// reject(false);
// });
// })
// .catch(err => {
// if (!isEmpty(err.response)) {
// dispatch({ type: GET_ERRORS, payload: err.response.data });
// reject(false);
// }
// });
// });
console.log(userData, profileData)
return new Promise((resolve,reject)=>{
axios
.all([
axios.post('/api/account/add-account', userData),
axios.post('/api/profile/add', profileData),
])
.then(
axios.spread((userData, profileData) => {
console.log('userData', userData, 'profileData', profileData);
dispatch({ type: CLEAR_ERRORS, payload: {} });
resolve(true)
})
)
.catch(err=>{
if(!isEmpty(err.response)){
dispatch({type : GET_ERRORS, payload : err.response.data})
reject(false)
}
console.log(err.response)
})
;})
};
export default registerUser;
|
patil215/v8
|
fuzzer_output/interesting/sample_1554111532688.js
|
<gh_stars>0
function main() {
const v3 = [13.37,13.37,13.37,13.37];
const v4 = {exec:9007199254740991};
const v5 = {replace:9007199254740991,forEach:13.37,tan:v4,fromEntries:v3,global:v4,log:Math,getPrototypeOf:Math};
const v12 = [1337];
let v17 = undefined;
const v19 = [1337];
const v20 = {exec:v19};
let v22 = "undefined";
delete v5.getPrototypeOf;
let v27 = 0;
const v28 = v27 + 1;
v27 = v28;
}
%NeverOptimizeFunction(main);
main();
|
abenkdh/NetCipher
|
sample-webviewclient/src/sample/netcipher/webviewclient/GenericWebViewClient.java
|
package sample.netcipher.webviewclient;
import android.webkit.WebResourceRequest;
import android.webkit.WebResourceResponse;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.CookieManager;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
class GenericWebViewClient extends WebViewClient {
private int requestCounter;
private MainActivity mainActivity;
private CookieManager cookieManager;
public GenericWebViewClient(MainActivity mainActivity) {
this.mainActivity = mainActivity;
requestCounter = 0;
}
interface RequestCounterListener {
void countChanged(int requestCount);
}
private volatile RequestCounterListener requestCounterListener = null;
public void setRequestCounterListener(RequestCounterListener requestCounterListener) {
this.requestCounterListener = requestCounterListener;
}
@Override
public WebResourceResponse shouldInterceptRequest(WebView view, WebResourceRequest request) {
requestCounter++;
if (requestCounterListener != null) {
requestCounterListener.countChanged(requestCounter);
}
String urlString = request.getUrl().toString().split("#")[0];
try {
HttpURLConnection connection = null;
boolean proxied = false;
if (proxied) {
Proxy proxy = new Proxy(Proxy.Type.SOCKS, new InetSocketAddress("localhost", 9050));
connection = (HttpURLConnection) new URL(urlString).openConnection(proxy);
} else {
connection = (HttpURLConnection) new URL(urlString).openConnection();
}
connection.setRequestMethod(request.getMethod());
for (Map.Entry<String, String> requestHeader : request.getRequestHeaders().entrySet()) {
connection.setRequestProperty(requestHeader.getKey(), requestHeader.getValue());
}
// transform response to required format for WebResourceResponse parameters
InputStream in = new BufferedInputStream(connection.getInputStream());
String encoding = connection.getContentEncoding();
connection.getHeaderFields();
Map<String, String> responseHeaders = new HashMap<>();
for (String key : connection.getHeaderFields().keySet()) {
responseHeaders.put(key, connection.getHeaderField(key));
}
String mimeType = "text/plain";
if (connection.getContentType() != null && !connection.getContentType().isEmpty()) {
mimeType = connection.getContentType().split("; ")[0];
}
return new WebResourceResponse(mimeType, encoding, connection.getResponseCode(), connection.getResponseMessage(), responseHeaders, in);
//return new WebResourceResponse(mimeType, "binary", in);
} catch (UnsupportedEncodingException e) {
} catch (IOException e) {
}
// failed doing proxied http request: return empty response
return new WebResourceResponse("text/plain", "UTF-8", 204, "No Content", new HashMap<String, String>(), new ByteArrayInputStream(new byte[]{}));
}
}
|
moriyoshi/aws-sdk-go-v2
|
service/ssm/api_op_CreateOpsItem.go
|
// Code generated by smithy-go-codegen DO NOT EDIT.
package ssm
import (
"context"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/ssm/types"
"github.com/awslabs/smithy-go/middleware"
smithyhttp "github.com/awslabs/smithy-go/transport/http"
)
// Creates a new OpsItem. You must have permission in AWS Identity and Access
// Management (IAM) to create a new OpsItem. For more information, see Getting
// started with OpsCenter
// (https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter-getting-started.html)
// in the AWS Systems Manager User Guide. Operations engineers and IT professionals
// use OpsCenter to view, investigate, and remediate operational issues impacting
// the performance and health of their AWS resources. For more information, see AWS
// Systems Manager OpsCenter
// (https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter.html) in
// the AWS Systems Manager User Guide.
func (c *Client) CreateOpsItem(ctx context.Context, params *CreateOpsItemInput, optFns ...func(*Options)) (*CreateOpsItemOutput, error) {
if params == nil {
params = &CreateOpsItemInput{}
}
result, metadata, err := c.invokeOperation(ctx, "CreateOpsItem", params, optFns, addOperationCreateOpsItemMiddlewares)
if err != nil {
return nil, err
}
out := result.(*CreateOpsItemOutput)
out.ResultMetadata = metadata
return out, nil
}
type CreateOpsItemInput struct {
// Information about the OpsItem.
//
// This member is required.
Description *string
// The origin of the OpsItem, such as Amazon EC2 or Systems Manager. The source
// name can't contain the following strings: aws, amazon, and amzn.
//
// This member is required.
Source *string
// A short heading that describes the nature of the OpsItem and the impacted
// resource.
//
// This member is required.
Title *string
// Specify a category to assign to an OpsItem.
Category *string
// The Amazon Resource Name (ARN) of an SNS topic where notifications are sent when
// this OpsItem is edited or changed.
Notifications []*types.OpsItemNotification
// Operational data is custom data that provides useful reference details about the
// OpsItem. For example, you can specify log files, error strings, license keys,
// troubleshooting tips, or other relevant data. You enter operational data as
// key-value pairs. The key has a maximum length of 128 characters. The value has a
// maximum size of 20 KB. Operational data keys can't begin with the following:
// amazon, aws, amzn, ssm, /amazon, /aws, /amzn, /ssm. You can choose to make the
// data searchable by other users in the account or you can restrict search access.
// Searchable data means that all users with access to the OpsItem Overview page
// (as provided by the DescribeOpsItems API action) can view and search on the
// specified data. Operational data that is not searchable is only viewable by
// users who have access to the OpsItem (as provided by the GetOpsItem API action).
// Use the /aws/resources key in OperationalData to specify a related resource in
// the request. Use the /aws/automations key in OperationalData to associate an
// Automation runbook with the OpsItem. To view AWS CLI example commands that use
// these keys, see Creating OpsItems manually
// (https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter-creating-OpsItems.html#OpsCenter-manually-create-OpsItems)
// in the AWS Systems Manager User Guide.
OperationalData map[string]*types.OpsItemDataValue
// The importance of this OpsItem in relation to other OpsItems in the system.
Priority *int32
// One or more OpsItems that share something in common with the current OpsItems.
// For example, related OpsItems can include OpsItems with similar error messages,
// impacted resources, or statuses for the impacted resource.
RelatedOpsItems []*types.RelatedOpsItem
// Specify a severity to assign to an OpsItem.
Severity *string
// Optional metadata that you assign to a resource. You can restrict access to
// OpsItems by using an inline IAM policy that specifies tags. For more
// information, see Getting started with OpsCenter
// (https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter-getting-started.html#OpsCenter-getting-started-user-permissions)
// in the AWS Systems Manager User Guide. Tags use a key-value pair. For example:
// Key=Department,Value=Finance To add tags to an existing OpsItem, use the
// AddTagsToResource action.
Tags []*types.Tag
}
type CreateOpsItemOutput struct {
// The ID of the OpsItem.
OpsItemId *string
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
}
func addOperationCreateOpsItemMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsAwsjson11_serializeOpCreateOpsItem{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsAwsjson11_deserializeOpCreateOpsItem{}, middleware.After)
if err != nil {
return err
}
awsmiddleware.AddRequestInvocationIDMiddleware(stack)
smithyhttp.AddContentLengthMiddleware(stack)
addResolveEndpointMiddleware(stack, options)
v4.AddComputePayloadSHA256Middleware(stack)
addRetryMiddlewares(stack, options)
addHTTPSignerV4Middleware(stack, options)
awsmiddleware.AddAttemptClockSkewMiddleware(stack)
addClientUserAgent(stack)
smithyhttp.AddErrorCloseResponseBodyMiddleware(stack)
smithyhttp.AddCloseResponseBodyMiddleware(stack)
addOpCreateOpsItemValidationMiddleware(stack)
stack.Initialize.Add(newServiceMetadataMiddleware_opCreateOpsItem(options.Region), middleware.Before)
addRequestIDRetrieverMiddleware(stack)
addResponseErrorMiddleware(stack)
return nil
}
func newServiceMetadataMiddleware_opCreateOpsItem(region string) awsmiddleware.RegisterServiceMetadata {
return awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "ssm",
OperationName: "CreateOpsItem",
}
}
|
davindratulsi/qiskit-nature
|
test/problems/second_quantization/vibrational/builders/test_hopping_ops_builder.py
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Tests Hopping Operators builder."""
from test import QiskitNatureTestCase
from test.algorithms.excited_state_solvers.test_bosonic_esc_calculation import _DummyBosonicDriver
from qiskit.opflow import PauliSumOp
from qiskit.quantum_info import SparsePauliOp
from qiskit.utils import algorithm_globals
from qiskit_nature.mappers.second_quantization import DirectMapper
from qiskit_nature.operators.second_quantization.qubit_converter import QubitConverter
from qiskit_nature.problems.second_quantization import VibrationalStructureProblem
from qiskit_nature.problems.second_quantization.vibrational.builders.hopping_ops_builder import \
_build_qeom_hopping_ops
class TestHoppingOpsBuilder(QiskitNatureTestCase):
"""Tests Hopping Operators builder."""
def setUp(self):
super().setUp()
algorithm_globals.random_seed = 8
self.driver = _DummyBosonicDriver()
self.qubit_converter = QubitConverter(DirectMapper())
self.basis_size = 2
self.truncation_order = 2
self.vibrational_problem = VibrationalStructureProblem(self.driver, self.basis_size,
self.truncation_order)
self.qubit_converter = QubitConverter(DirectMapper())
self.vibrational_problem.second_q_ops()
self.watson_hamiltonian = self.vibrational_problem.molecule_data
self.num_modals = [self.basis_size] * self.watson_hamiltonian.num_modes
def test_build_hopping_operators(self):
"""Tests that the correct hopping operator is built from QMolecule."""
# TODO extract it somewhere
expected_hopping_operators = (
{'E_0': PauliSumOp(
SparsePauliOp([[True, True, False, False, False, False, False, False],
[True, True, False, False, False, True, False, False],
[True, True, False, False, True, False, False, False],
[True, True, False, False, True, True, False, False]],
coeffs=[0.25 + 0.j, 0. - 0.25j, 0. + 0.25j, 0.25 + 0.j]), coeff=1.0),
'Edag_0': PauliSumOp(
SparsePauliOp([[True, True, False, False, False, False, False, False],
[True, True, False, False, False, True, False, False],
[True, True, False, False, True, False, False, False],
[True, True, False, False, True, True, False, False]],
coeffs=[0.25 + 0.j, 0. + 0.25j, 0. - 0.25j, 0.25 + 0.j]),
coeff=1.0),
'E_1': PauliSumOp(
SparsePauliOp([[False, False, True, True, False, False, False, False],
[False, False, True, True, False, False, False, True],
[False, False, True, True, False, False, True, False],
[False, False, True, True, False, False, True, True]],
coeffs=[0.25 + 0.j, 0. - 0.25j, 0. + 0.25j, 0.25 + 0.j]),
coeff=1.0),
'Edag_1': PauliSumOp(
SparsePauliOp([[False, False, True, True, False, False, False, False],
[False, False, True, True, False, False, False, True],
[False, False, True, True, False, False, True, False],
[False, False, True, True, False, False, True, True]],
coeffs=[0.25 + 0.j, 0. + 0.25j, 0. - 0.25j, 0.25 + 0.j]),
coeff=1.0),
'E_2': PauliSumOp(
SparsePauliOp([[True, True, True, True, False, False, False, False],
[True, True, True, True, False, False, False, True],
[True, True, True, True, False, False, True, False],
[True, True, True, True, False, False, True, True],
[True, True, True, True, False, True, False, False],
[True, True, True, True, False, True, False, True],
[True, True, True, True, False, True, True, False],
[True, True, True, True, False, True, True, True],
[True, True, True, True, True, False, False, False],
[True, True, True, True, True, False, False, True],
[True, True, True, True, True, False, True, False],
[True, True, True, True, True, False, True, True],
[True, True, True, True, True, True, False, False],
[True, True, True, True, True, True, False, True],
[True, True, True, True, True, True, True, False],
[True, True, True, True, True, True, True, True]],
coeffs=[0.0625 + 0.j, 0. - 0.0625j, 0. + 0.0625j,
0.0625 + 0.j,
0. - 0.0625j, -0.0625 + 0.j, 0.0625 + 0.j,
0. - 0.0625j,
0. + 0.0625j, 0.0625 + 0.j, -0.0625 + 0.j,
0. + 0.0625j,
0.0625 + 0.j, 0. - 0.0625j, 0. + 0.0625j,
0.0625 + 0.j]), coeff=1.0),
'Edag_2': PauliSumOp(
SparsePauliOp([[True, True, True, True, False, False, False, False],
[True, True, True, True, False, False, False, True],
[True, True, True, True, False, False, True, False],
[True, True, True, True, False, False, True, True],
[True, True, True, True, False, True, False, False],
[True, True, True, True, False, True, False, True],
[True, True, True, True, False, True, True, False],
[True, True, True, True, False, True, True, True],
[True, True, True, True, True, False, False, False],
[True, True, True, True, True, False, False, True],
[True, True, True, True, True, False, True, False],
[True, True, True, True, True, False, True, True],
[True, True, True, True, True, True, False, False],
[True, True, True, True, True, True, False, True],
[True, True, True, True, True, True, True, False],
[True, True, True, True, True, True, True, True]],
coeffs=[0.0625 + 0.j, 0. + 0.0625j, 0. - 0.0625j,
0.0625 + 0.j,
0. + 0.0625j, -0.0625 + 0.j, 0.0625 + 0.j,
0. + 0.0625j,
0. - 0.0625j, 0.0625 + 0.j, -0.0625 + 0.j,
0. - 0.0625j,
0.0625 + 0.j, 0. + 0.0625j, 0. - 0.0625j,
0.0625 + 0.j]), coeff=1.0)}, {},
{'E_0': ((0,), (1,)), 'Edag_0': ((1,), (0,)), 'E_1': ((2,), (3,)),
'Edag_1': ((3,), (2,)),
'E_2': ((0, 2), (1, 3)), 'Edag_2': ((1, 3), (0, 2))})
hopping_operators = _build_qeom_hopping_ops(self.num_modals, self.qubit_converter)
self.assertEqual(hopping_operators, expected_hopping_operators)
|
zhaofeng092/python_auto_office
|
公众号/auto_office/pdf/merge.py
|
# -*- coding: utf-8 -*-
# @Time : 2021/3/16 18:21
# @Author :liuzf
# @File : merge.py
# @Software: PyCharm
# @Description:
from PyPDF2 import PdfFileReader, PdfFileWriter
def merge_pdfs(paths, output):
pdf_writer = PdfFileWriter()
for path in paths:
pdf_reader = PdfFileReader(path)
for page in range(pdf_reader.getNumPages()):
# 把每张PDF页面加入到这个可读取对象中
pdf_writer.addPage(pdf_reader.getPage(page))
# 把这个已合并了的PDF文档存储起来
with open(output, 'wb') as out:
pdf_writer.write(out)
if __name__ == '__main__':
paths = ['静夜思.pdf', '静夜思.pdf']
merge_pdfs(paths, output='merged.pdf')
|
okfn/dpm-old
|
dpm/tests/test_cli.py
|
import os
import commands
import tempfile
import shutil
import logging
import dpm.tests.base
import dpm.cli
import dpm.util
import dpm.repository
import dpm.package
class CLIBase(dpm.tests.base.TestCase):
@classmethod
def setup_class(self):
self.tmpdir = self.make_tmpdir()
self.index_path = os.path.join(self.tmpdir, 'dpm-index')
self.repo_path = os.path.join(self.tmpdir, 'dpm-repo')
self.index_spec = 'file://%s' % self.index_path
self.repo_spec = 'file://%s' % self.repo_path
if os.path.exists(self.tmpdir):
shutil.rmtree(self.tmpdir)
os.makedirs(self.tmpdir)
self.cwd = os.getcwd()
self.cmd_base = 'dpm --debug '
self.pkg_name = u'mytestpkg'
self.pkg_title = u'Test Title'
self.pkg_path = os.path.join(self.tmpdir, self.pkg_name)
self.abc_filepath = os.path.join(self.pkg_path, 'abc.txt')
pkg = dpm.package.Package(
name=self.pkg_name,
title=self.pkg_title,
download_url='file://%s' % self.abc_filepath
)
pkg.write(self.pkg_path)
fo = open(self.abc_filepath, 'w')
fo.write('Ideas are cheap, implementation is costly.')
fo.close()
self.file_spec = u'file://%s' % self.pkg_path
@classmethod
def teardown_class(self):
# do not teardown directory in order to allow investigation on error
# reset cwd or problems in other tests
os.chdir(self.cwd)
class TestCLI(CLIBase):
def test_01_about(self):
cmd = 'dpm about'
status, output = commands.getstatusoutput(cmd)
exp = 'dpm version'
assert exp in output
def test_02_walkthrough(self):
cmd = self.cmd_base + 'list %s' % (self.index_spec)
status, output = dpm.util.getstatusoutput(cmd)
assert not status, output
assert not self.pkg_name in output, (cmd, output)
# init
create_path = os.path.join(self.tmpdir, 'test-create-xxx')
cmd = self.cmd_base + 'init %s' % create_path
status, output = dpm.util.getstatusoutput(cmd)
assert not status, output
assert os.path.exists(create_path)
# info: from disk
cmd = self.cmd_base + 'info %s' % self.file_spec
status, output = dpm.util.getstatusoutput(cmd)
assert not status, output
assert self.pkg_name in output, output
assert self.pkg_title in output, output
# register
cmd = self.cmd_base + 'register %s %s' % (self.file_spec,
self.index_spec)
status, output = dpm.util.getstatusoutput(cmd)
assert not status, output
destpath = os.path.join(self.index_path, self.pkg_name)
assert os.path.exists(destpath), os.listdir(self.index_path)
pkg = dpm.package.Package.load(destpath)
assert pkg.title == self.pkg_title
cmd = self.cmd_base + 'list %s' % (self.index_spec)
status, output = dpm.util.getstatusoutput(cmd)
assert not status, output
assert self.pkg_name in output
cmd = self.cmd_base + 'search %s %s' % (self.index_spec, self.pkg_name)
status, output = dpm.util.getstatusoutput(cmd)
assert not status, output
assert self.pkg_name in output
# not a particularly good test because we won't change anything
cmd = self.cmd_base + 'update %s %s' % (self.file_spec, self.index_spec)
status, output = dpm.util.getstatusoutput(cmd)
assert not status, output
# download
cmd = self.cmd_base + 'download %s %s "*"' % (self.file_spec,
os.path.join(self.repo_path, self.pkg_name))
status, output = dpm.util.getstatusoutput(cmd)
assert not status, output
print output
# dest path with be self.pkg_name-version-*
# dirs = os.listdir(repo.installed_path)
# filtered = filter(lambda x: x.startswith(self.pkg_name), dirs)
# assert len(filtered) > 0, dirs
dest_path = os.path.join(self.repo_path, self.pkg_name)
assert os.path.exists(dest_path), dest_path
# inspect - not yet implemented
# cmd = self.cmd_base + 'inspect %s' % self.pkg_name
# status, output = dpm.util.getstatusoutput(cmd)
# assert not status, output
# dump
offset = 'abc.txt'
cmd = self.cmd_base + 'dump %s %s' % ('file://' + self.pkg_path, offset)
status, output = dpm.util.getstatusoutput(cmd)
assert not status, output
# TODO: re-enable (2011-11-18)
# Disabling as requires ofs and pairtree installed
# Plus upload is not that important/functional atm
def _test_03_upload(self):
# sets up config for uploading and a directory upload_dir
self.setup_for_upload()
import dpm
# Remember: commands run in a separate process so no access to config
# now overwrite config with our test config
cfg_path = os.path.join(self.tmpdir, 'dpmrc')
dpm.CONFIG.write(open(cfg_path, 'w'))
# and set up cmd base to use it
our_cmd_base = self.cmd_base + '--config %s ' % cfg_path
cmd = our_cmd_base + 'upload %s %s' % (self.abc_filepath,
'mypairtree://mb/abc.txt')
status, output = dpm.util.getstatusoutput(cmd)
assert not status, output
destpath = os.path.join(self.upload_dir, 'pairtree_root', 'mb', 'obj', 'abc.txt')
assert os.path.exists(destpath), destpath
class TestCkan(CLIBase):
'''For this need dummy ckan running locally with standard test data
'''
# TODO: set __test__ based on a check of whether local ckan is running
__test__ = False
def test_walkthrough(self):
# dpm.config
# localckan = 'http://localhost:5000/api/'
localckan = 'http://test.ckan.net/api/'
apikey = 'tester'
ckanbase = 'dpm --repository %s ' % localckan
ckanbase += '--api-key %s ' % apikey
# list
listcmd = ckanbase + 'list'
status, output = dpm.util.getstatusoutput(listcmd)
assert not status, output
assert 'annakarenina' in output, output
# info
cmd = ckanbase + 'info %s' % 'annakarenina'
status, output = dpm.util.getstatusoutput(cmd)
assert not status, output
assert 'annakarenina' in output, output
# create dummy package if not created already
if not os.path.exists(self.pkg_path):
self._test_create()
# register
registercmd = ckanbase + 'register %s' % self.pkg_path
status, output = dpm.util.getstatusoutput(registercmd)
assert not status, output
# check actually registered
listcmd = ckanbase + 'info %s' % self.pkg_name
status, output = dpm.util.getstatusoutput(listcmd)
assert not status, output
assert self.pkg_name in output, output
# TODO: test other info is registered
# update - won't do anything as nothing has changed
registercmd = ckanbase + 'update %s' % self.pkg_path
status, output = dpm.util.getstatusoutput(registercmd)
assert not status, output
# requires external access
def test_6_ckan_readonly(self):
# this depends on dpmdemo existing on ckan.net
localckan = 'http://ckan.net/api'
ckanspec = 'ckan://%s' % localckan
ckanbase = 'dpm '
pkg_name = u'dpmdemo'
pkg_version = '0.1'
# TODO: not unpacked yet
fullname = '%s-%s.tar.gz' % (pkg_name, pkg_version)
# install
cmd = ckanbase + 'install %s/%s %s' % (ckanspec, pkg_name,
self.repo_spec)
status, output = dpm.util.getstatusoutput(cmd)
assert not status, output
dest_path = os.path.join(self.repo_path, pkg_name, fullname)
assert os.path.exists(dest_path), dest_path
|
JKot-Coder/slang
|
source/slang/slang-ir-layout.cpp
|
// slang-ir-layout.cpp
#include "slang-ir-layout.h"
#include "slang-ir-insts.h"
// This file implements facilities for computing and caching layout
// information on IR types.
//
// Unlike the AST-level layout system, this code currently only
// handles the notion of "natural" layout for IR types, which is
// the layout they use when stored in general-purpose memory
// without additional constraints.
//
// In general, "natural" layout for all targets is assumed to follow
// the same basic rules:
//
// * Scalars are all naturally aligned and have the "obvious" size
//
// * Arrays are laid out by separating elements by their "stride" (size rounded up to alignment)
//
// * Vectors are laid out as arrays of elements
//
// * Matrices are laid out as arrays of rows
//
// * Structures are laid out by packing fields in order, placing each field on the "next"
// suitably aligned offset. The alignment of a structure is the maximum alignment of
// its fields.
//
// Right now this file implements a one-size-fits-all version of natural
// layout that might not be a perfect fit for all targets. In particular
// this code currently assumes:
//
// * The `bool` type is laid out as 4 bytes (equivalent to an `int`)
//
// * The size of a structure or array type is *not* rounded up to a multiple
// of its alignment. This means that fields may be laid out in
// the "tail padding" of previous fields in the same structure. This is
// correct behavior for VK/D3D, but does not match the behavior of typical
// C/C++ compilers.
//
// * All matrices are laid out in row-major order, regardless of any
// settings in user code.
//
// TODO: Addressing the above issues would require extending this file to somehow
// get target-specific layout information as an input. One option would be
// to attach information about "natural" layout on the target to the `IRModuleInst`
// as a decoration, similar to how an LLVM IR module stores a "layout string."
namespace Slang
{
static Result _calcNaturalArraySizeAndAlignment(
TargetRequest* target,
IRType* elementType,
IRInst* elementCountInst,
IRSizeAndAlignment* outSizeAndAlignment)
{
auto elementCountLit = as<IRIntLit>(elementCountInst);
if(!elementCountLit)
return SLANG_FAIL;
auto elementCount = elementCountLit->getValue();
if( elementCount == 0 )
{
*outSizeAndAlignment = IRSizeAndAlignment(0, 1);
return SLANG_OK;
}
IRSizeAndAlignment elementTypeLayout;
SLANG_RETURN_ON_FAIL(getNaturalSizeAndAlignment(target, elementType, &elementTypeLayout));
auto elementStride = elementTypeLayout.getStride();
*outSizeAndAlignment = IRSizeAndAlignment(
elementStride * (elementCount - 1) + elementTypeLayout.size,
elementTypeLayout.alignment);
return SLANG_OK;
}
IRIntegerValue getIntegerValueFromInst(IRInst* inst)
{
SLANG_ASSERT(inst->getOp() == kIROp_IntLit);
return as<IRIntLit>(inst)->value.intVal;
}
static Result _calcNaturalSizeAndAlignment(
TargetRequest* target,
IRType* type,
IRSizeAndAlignment* outSizeAndAlignment)
{
switch( type->getOp() )
{
#define CASE(TYPE, SIZE, ALIGNMENT) \
case kIROp_##TYPE##Type: \
*outSizeAndAlignment = IRSizeAndAlignment(SIZE, ALIGNMENT); \
return SLANG_OK \
/* end */
// Most base types are "naturally aligned" (meaning alignment and size are the same)
#define BASE(TYPE, SIZE) CASE(TYPE, SIZE, SIZE)
BASE(Int8, 1);
BASE(UInt8, 1);
BASE(Int16, 2);
BASE(UInt16, 2);
BASE(Half, 2);
BASE(Int, 4);
BASE(UInt, 4);
BASE(Float, 4);
BASE(Int64, 8);
BASE(UInt64, 8);
BASE(Double, 8);
// We are currently handling `bool` following the HLSL
// precednet of storing it in 4 bytes.
//
// TODO: It would be good to try to make this follow
// per-platform conventions, or at least to be able
// to use a 1-byte encoding where available.
//
BASE(Bool, 4);
// The Slang `void` type is treated as a zero-byte
// type, so that it does not influence layout at all.
//
CASE(Void, 0, 1);
#undef CASE
#undef CASE
case kIROp_StructType:
{
auto structType = cast<IRStructType>(type);
IRSizeAndAlignment structLayout;
for( auto field : structType->getFields() )
{
IRSizeAndAlignment fieldTypeLayout;
SLANG_RETURN_ON_FAIL(getNaturalSizeAndAlignment(target, field->getFieldType(), &fieldTypeLayout));
structLayout.size = align(structLayout.size, fieldTypeLayout.alignment);
structLayout.alignment = std::max(structLayout.alignment, fieldTypeLayout.alignment);
IRIntegerValue fieldOffset = structLayout.size;
if( auto module = type->getModule() )
{
// If we are in a situation where attaching new
// decorations is possible, then we want to
// cache the field offset on the IR field
// instruction.
//
SharedIRBuilder sharedBuilder(module);
IRBuilder builder(sharedBuilder);
auto intType = builder.getIntType();
builder.addDecoration(
field,
kIROp_NaturalOffsetDecoration,
builder.getIntValue(intType, fieldOffset));
}
structLayout.size += fieldTypeLayout.size;
}
*outSizeAndAlignment = structLayout;
return SLANG_OK;
}
break;
case kIROp_ArrayType:
{
auto arrayType = cast<IRArrayType>(type);
return _calcNaturalArraySizeAndAlignment(
target,
arrayType->getElementType(),
arrayType->getElementCount(),
outSizeAndAlignment);
}
break;
case kIROp_VectorType:
{
auto vecType = cast<IRVectorType>(type);
return _calcNaturalArraySizeAndAlignment(
target,
vecType->getElementType(),
vecType->getElementCount(),
outSizeAndAlignment);
}
break;
case kIROp_MatrixType:
{
auto matType = cast<IRMatrixType>(type);
auto rowCount = getIntegerValueFromInst(matType->getRowCount());
auto colCount = getIntegerValueFromInst(matType->getColumnCount());
SharedIRBuilder sharedBuilder(type->getModule());
IRBuilder builder(sharedBuilder);
return _calcNaturalArraySizeAndAlignment(
target, matType->getElementType(),
builder.getIntValue(builder.getUIntType(), rowCount * colCount),
outSizeAndAlignment);
}
break;
default:
break;
}
if( areResourceTypesBindlessOnTarget(target) )
{
// TODO: need this to be based on target, instead of hard-coded
int pointerSize = sizeof(void*);
if(as<IRTextureType>(type) )
{
*outSizeAndAlignment = IRSizeAndAlignment(pointerSize, pointerSize);
return SLANG_OK;
}
else if(as<IRSamplerStateTypeBase>(type) )
{
*outSizeAndAlignment = IRSizeAndAlignment(pointerSize, pointerSize);
return SLANG_OK;
}
// TODO: the remaining cases for "bindless" resources on CPU/CUDA targets
}
return SLANG_FAIL;
}
Result getNaturalSizeAndAlignment(TargetRequest* target, IRType* type, IRSizeAndAlignment* outSizeAndAlignment)
{
if( auto decor = type->findDecoration<IRNaturalSizeAndAlignmentDecoration>() )
{
*outSizeAndAlignment = IRSizeAndAlignment(decor->getSize(), (int)decor->getAlignment());
return SLANG_OK;
}
IRSizeAndAlignment sizeAndAlignment;
SLANG_RETURN_ON_FAIL(_calcNaturalSizeAndAlignment(target, type, &sizeAndAlignment));
if( auto module = type->getModule() )
{
SharedIRBuilder sharedBuilder(module);
IRBuilder builder(sharedBuilder);
auto intType = builder.getIntType();
builder.addDecoration(
type,
kIROp_NaturalSizeAndAlignmentDecoration,
builder.getIntValue(intType, sizeAndAlignment.size),
builder.getIntValue(intType, sizeAndAlignment.alignment));
}
*outSizeAndAlignment = sizeAndAlignment;
return SLANG_OK;
}
Result getNaturalOffset(TargetRequest* target, IRStructField* field, IRIntegerValue* outOffset)
{
if( auto decor = field->findDecoration<IRNaturalOffsetDecoration>() )
{
*outOffset = decor->getOffset();
return SLANG_OK;
}
// Offsets are computed as part of layout out types,
// so we expect that layout of the "parent" type
// of the field should add an offset to it if
// possible.
auto structType = as<IRStructType>(field->getParent());
if(!structType)
return SLANG_FAIL;
IRSizeAndAlignment structTypeLayout;
SLANG_RETURN_ON_FAIL(getNaturalSizeAndAlignment(target, structType, &structTypeLayout));
if( auto decor = field->findDecoration<IRNaturalOffsetDecoration>() )
{
*outOffset = decor->getOffset();
return SLANG_OK;
}
// If attempting to lay out the parent type didn't
// cause the field to get an offset, then we are
// in an unexpected case with no easy answer.
//
return SLANG_FAIL;
}
}
|
meagon/sisdb
|
src/core/sis_file.c
|
#include <sis_file.h>
#include <sis_malloc.h>
s_sis_file *sis_file_create()
{
s_sis_file * o =sis_malloc(sizeof(s_sis_file));
memset(o, 0, sizeof(s_sis_file));
return o;
}
void sis_file_destroy(s_sis_file *file_)
{
sis_free(file_);
}
s_sis_sds sis_file_read_to_sds(const char *fn_)
{
s_sis_file_handle fp = sis_file_open(fn_, SIS_FILE_IO_READ, 0);
if (!fp)
{
LOG(3)("cann't open file [%s].\n", fn_);
return NULL;
}
size_t size = sis_file_size(fp);
sis_file_seek(fp, 0, SEEK_SET);
if (size == 0)
{
sis_file_close(fp);
return NULL;
}
s_sis_sds buffer = sis_sdsnewlen(NULL, size + 1);
sis_file_read(fp, buffer, size);
sis_file_close(fp);
return buffer;
}
bool sis_file_sds_write(const char *fn_, s_sis_sds buffer_)
{
char path[SIS_PATH_LEN];
sis_file_getpath(fn_, path, SIS_PATH_LEN);
if (!sis_path_mkdir(path))
{
LOG(3)("cann't create dir [%s].\n", path);
return false;
}
s_sis_file_handle fp = sis_file_open(fn_, SIS_FILE_IO_CREATE | SIS_FILE_IO_WRITE | SIS_FILE_IO_TRUNC, 0);
if (!fp)
{
LOG(3)("cann't open file [%s].\n", fn_);
return false;
}
sis_file_seek(fp, 0, SEEK_SET);
sis_file_write(fp, buffer_, sis_sdslen(buffer_));
sis_file_close(fp);
return true;
}
void sis_cat_fixed_path(char *srcpath_, const char *inpath_, char *outpath_, int size_)
{
if (!inpath_) {
sis_sprintf(outpath_,size_,"%s", srcpath_);
} else {
if (*inpath_== SIS_PATH_SEPARATOR ||!srcpath_) {
// 如果为根目录,就直接使用
sis_sprintf(outpath_,size_,"%s", inpath_);
} else {
// 如果为相对目录,就合并配置文件的目录
sis_sprintf(outpath_,size_,"%s%s", srcpath_,inpath_);
}
}
// 创建目录
sis_path_complete(outpath_,SIS_PATH_LEN);
if(!sis_path_mkdir(outpath_))
{
LOG(3)("cann't create dir [%s].\n", outpath_);
}
LOG(5)("outpath_:%s\n",outpath_);
}
void sis_check_path(const char *fn_)
{
char outpath[SIS_PATH_LEN];
sis_file_getpath(fn_, outpath, SIS_PATH_LEN);
sis_path_complete(outpath, SIS_PATH_LEN);
if(!sis_path_mkdir(outpath))
{
LOG(3)("cann't create dir [%s].\n", outpath);
}
LOG(5)("outpath_:%s\n",outpath);
}
#if 0
// test large file
int main()
{
msec_t start = sis_time_get_now_msec();
#if 1
int fp = sis_open("large.log", SIS_FILE_IO_DSYNC | SIS_FILE_IO_TRUNC | SIS_FILE_IO_CREATE | SIS_FILE_IO_RDWR, SIS_FILE_MODE_NORMAL);
// 采用上面的写入方式,安全,写入速度 66秒/1G(固态硬盘)
// int fp = sis_open("large.log", O_TRUNC | O_CREAT | O_RDWR | O_APPEND);
// 采用上面的写入方式,安全,写入速度 70秒/1G(固态硬盘)
// for (int i = 0; i < 15625 ; i++)
// {
// char buffer[32*10000];
// size_t bytes = sis_write(fp, buffer, 32*10000);
// if (bytes!=32*10000||i%3125==0)
for (int i = 0; i < 5*1000*1000; i++)
{
char buffer[1000];
size_t bytes = sis_write(fp, buffer, 1000);
if (bytes!=1000||i%1000000==0)
{
printf("[%4d] %llu size= %zu %zu\n", i, sis_time_get_now_msec() - start, bytes, sis_seek(fp, 0, SEEK_CUR));
}
}
size_t size = sis_size(fp);
printf("msec %llu %x size= %zu\n", sis_time_get_now_msec() - start, O_RSYNC, size);
sis_seek(fp, -100 ,SEEK_END);
size_t pos;
sis_getpos(fp, &pos);
printf("pos = %zu\n", pos);
pos = 4999990000;
sis_setpos(fp, &pos);
printf("pos = %zu\n", pos);
sis_getpos(fp, &pos);
printf("pos = %zu\n", pos);
sis_close(fp);
#else
// 采用下面的写入方式,不安全,写入时如果有其他进程读 就会写失败 写入速度是 66秒/1G(固态硬盘)
s_sis_file_handle fp = sis_file_open("large.log", SIS_FILE_IO_CREATE | SIS_FILE_IO_TRUNC | SIS_FILE_IO_RDWR, 0);
//write
for (int i = 0; i < 15625 ; i++)
{
char buffer[32*10000];
size_t bytes = sis_file_write(fp, buffer, 32*10000);
if (bytes!=32*10000||i%3125==0)
// for (int i = 0; i < 5*1000*1000; i++)
// {
// char buffer[1000];
// size_t bytes = sis_file_write(fp, buffer, 1000);
// if (bytes!=1000||i%1000000==0)
{
printf("[%4d] %llu size= %zu %zu\n", i, sis_time_get_now_msec() - start, bytes, sis_file_seek(fp, 0, SEEK_CUR));
}
}
size_t size = sis_file_size(fp);
printf("msec %llu size= %zu\n", sis_time_get_now_msec() - start, size);
sis_file_seek(fp, -100 ,SEEK_END);
size_t pos;
sis_file_getpos(fp, &pos);
printf("pos = %zu\n", pos);
pos = 4999990000;
sis_file_setpos(fp, &pos);
printf("pos = %zu\n", pos);
sis_file_getpos(fp, &pos);
printf("pos = %zu\n", pos);
sis_file_close(fp);
#endif
return 0;
}
#endif
|
maciejg-git/vue-bootstrap-icons
|
dist-mdi/mdi/calendar-clock.js
|
import { h } from 'vue'
export default {
name: "CalendarClock",
vendor: "Mdi",
type: "",
tags: ["calendar","clock"],
render() {
return h(
"svg",
{"xmlns":"http://www.w3.org/2000/svg","width":"24","height":"24","viewBox":"0 0 24 24","class":"v-icon","fill":"currentColor","data-name":"mdi-calendar-clock","innerHTML":"<path d='M15,13H16.5V15.82L18.94,17.23L18.19,18.53L15,16.69V13M19,8H5V19H9.67C9.24,18.09 9,17.07 9,16A7,7 0 0,1 16,9C17.07,9 18.09,9.24 19,9.67V8M5,21C3.89,21 3,20.1 3,19V5C3,3.89 3.89,3 5,3H6V1H8V3H16V1H18V3H19A2,2 0 0,1 21,5V11.1C22.24,12.36 23,14.09 23,16A7,7 0 0,1 16,23C14.09,23 12.36,22.24 11.1,21H5M16,11.15A4.85,4.85 0 0,0 11.15,16C11.15,18.68 13.32,20.85 16,20.85A4.85,4.85 0 0,0 20.85,16C20.85,13.32 18.68,11.15 16,11.15Z' />"},
)
}
}
|
dvdmandt/EnderIO
|
src/main/java/crazypants/enderio/machine/light/ElectricLightRenderer.java
|
<reponame>dvdmandt/EnderIO
package crazypants.enderio.machine.light;
import net.minecraft.block.Block;
import net.minecraft.client.renderer.RenderBlocks;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.util.IIcon;
import net.minecraft.world.IBlockAccess;
import com.enderio.core.client.render.BoundingBox;
import com.enderio.core.client.render.CubeRenderer;
import com.enderio.core.client.render.RenderUtil;
import cpw.mods.fml.client.registry.ISimpleBlockRenderingHandler;
public class ElectricLightRenderer implements ISimpleBlockRenderingHandler {
@Override
public void renderInventoryBlock(Block block, int metadata, int modelID, RenderBlocks renderer) {
BoundingBox bb = new BoundingBox(0, 0, 0, 1, 0.2, 1);
boolean doDraw = false;
Tessellator.instance.startDrawingQuads();
IIcon[] textures = RenderUtil.getBlockTextures(block, metadata);
CubeRenderer.render(bb, textures, null, null);
Tessellator.instance.draw();
}
@Override
public boolean renderWorldBlock(IBlockAccess world, int x, int y, int z, Block block, int modelId, RenderBlocks renderer) {
block.setBlockBoundsBasedOnState(world, x, y, z);
BoundingBox bb = new BoundingBox(block.getBlockBoundsMinX(), block.getBlockBoundsMinY(), block.getBlockBoundsMinZ(), block.getBlockBoundsMaxX(),
block.getBlockBoundsMaxY(), block.getBlockBoundsMaxZ());
bb = bb.translate(x, y, z);
RenderUtil.setTesselatorBrightness(world, x, y, z);
IIcon[] textures = RenderUtil.getBlockTextures(world, x, y, z);
if(renderer.hasOverrideBlockTexture()) {
CubeRenderer.render(bb, renderer.overrideBlockTexture);
} else {
CubeRenderer.render(bb, textures, null, null);
}
return true;
}
@Override
public int getRenderId() {
return BlockElectricLight.renderId;
}
@Override
public boolean shouldRender3DInInventory(int modelId) {
return true;
}
}
|
eantcal/nubasic
|
lib/nu_stmt_read.cc
|
<reponame>eantcal/nubasic
//
// This file is part of nuBASIC
// Copyright (c) <NAME> (<EMAIL>)
// All rights reserved.
// Licensed under the MIT License.
// See COPYING file in the project root for full license information.
//
/* -------------------------------------------------------------------------- */
#include "nu_stmt_read.h"
#include "nu_os_console.h"
#include "nu_rt_prog_ctx.h"
#include <cstdio>
/* -------------------------------------------------------------------------- */
namespace nu {
/* -------------------------------------------------------------------------- */
void stmt_read_t::run(rt_prog_ctx_t& ctx)
{
rt_error_code_t::get_instance().throw_if(
ctx.read_data_store_index >= ctx.read_data_store.size(),
ctx.runtime_pc.get_line(), rt_error_code_t::value_t::E_VAL_OUT_OF_RANGE,
"Read");
for (auto const& variable : _vars) {
auto index = variable.second;
auto name = variable.first;
const auto & data_value = ctx.read_data_store[ctx.read_data_store_index++];
bool is_vector = index != nullptr;
if (is_vector) {
var_scope_t::handle_t scope
= ctx.proc_scope.get(ctx.proc_scope.get_type(name));
rt_error_code_t::get_instance().throw_if(!scope->is_defined(name),
ctx.runtime_pc.get_line(), rt_error_code_t::value_t::E_VAR_UNDEF,
"'" + name + "'");
size_t idx = index->eval(ctx).to_int();
auto& v = (*scope)[name];
variant_t var = v.first;
const bool const_var = (v.second & VAR_ACCESS_RO) == VAR_ACCESS_RO;
rt_error_code_t::get_instance().throw_if(const_var,
ctx.runtime_pc.get_line(), rt_error_code_t::value_t::E_CANNOT_MOD_CONST,
"'" + name + "'");
rt_error_code_t::get_instance().throw_if(idx >= var.vector_size(),
ctx.runtime_pc.get_line(),
rt_error_code_t::value_t::E_VEC_IDX_OUT_OF_RANGE, "'" + name + "'");
variant_t::type_t t = var.get_type();
if (t == variant_t::type_t::UNDEFINED)
t = variable_t::type_by_name(name);
switch (t) {
case variant_t::type_t::UNDEFINED:
case variant_t::type_t::ANY:
case variant_t::type_t::OBJECT:
case variant_t::type_t::STRUCT:
rt_error_code_t::get_instance().throw_if(true,
ctx.runtime_pc.get_line(), rt_error_code_t::value_t::E_TYPE_ILLEGAL,
"'" + name + "'");
break;
case variant_t::type_t::STRING:
var.set_str(data_value.to_str(), idx);
break;
case variant_t::type_t::FLOAT:
var.set_real(data_value.to_real(), idx);
break;
case variant_t::type_t::DOUBLE:
var.set_double(data_value.to_double(), idx);
break;
case variant_t::type_t::INTEGER:
var.set_int(data_value.to_int(), idx);
break;
case variant_t::type_t::BYTEVECTOR:
var.set_bvect(data_value.to_int(), idx);
break;
case variant_t::type_t::BOOLEAN:
var.set_bool(data_value.to_bool(), idx);
break;
case variant_t::type_t::LONG64:
var.set_long64(data_value.to_long64(), idx);
break;
}
scope->define(name, var_value_t(var, VAR_ACCESS_RW));
}
else {
var_scope_t::handle_t scope
= ctx.proc_scope.get(ctx.proc_scope.get_type(name));
auto& v = (*scope)[name];
const bool const_var = (v.second & VAR_ACCESS_RO) == VAR_ACCESS_RO;
rt_error_code_t::get_instance().throw_if(const_var,
ctx.runtime_pc.get_line(), rt_error_code_t::value_t::E_CANNOT_MOD_CONST,
"'" + name + "'");
variant_t var = v.first;
variant_t::type_t t = var.get_type();
if (t == variant_t::type_t::UNDEFINED)
t = variable_t::type_by_name(name);
switch (t) {
case variable_t::type_t::STRING:
scope->define(name, var_value_t(data_value.to_str(), VAR_ACCESS_RW));
break;
case variable_t::type_t::DOUBLE:
scope->define(name, var_value_t(data_value.to_double(), VAR_ACCESS_RW));
break;
case variable_t::type_t::FLOAT:
scope->define(name, var_value_t(data_value.to_real(), VAR_ACCESS_RW));
break;
case variable_t::type_t::LONG64:
scope->define(name, var_value_t(data_value.to_long64(), VAR_ACCESS_RW));
break;
case variable_t::type_t::BOOLEAN:
scope->define(name, var_value_t(data_value.to_bool(), VAR_ACCESS_RW));
break;
case variable_t::type_t::INTEGER:
default:
scope->define(name, var_value_t(data_value.to_int(), VAR_ACCESS_RW));
break;
case variant_t::type_t::UNDEFINED:
case variant_t::type_t::STRUCT:
rt_error_code_t::get_instance().throw_if(true,
ctx.runtime_pc.get_line(), rt_error_code_t::value_t::E_TYPE_ILLEGAL,
"'" + name + "'");
break;
}
}
}
ctx.go_to_next();
}
/* -------------------------------------------------------------------------- */
} // namespace nu
|
kawaja/Kilda
|
src-java/northbound-service/northbound/src/main/java/org/openkilda/northbound/controller/v1/NetworkController.java
|
/* Copyright 2019 Telstra Open Source
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openkilda.northbound.controller.v1;
import org.openkilda.messaging.payload.network.PathsDto;
import org.openkilda.model.FlowEncapsulationType;
import org.openkilda.model.PathComputationStrategy;
import org.openkilda.model.SwitchId;
import org.openkilda.northbound.controller.BaseController;
import org.openkilda.northbound.editor.CaseInsensitiveEnumEditor;
import org.openkilda.northbound.service.NetworkService;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.PropertySource;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.WebDataBinder;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.InitBinder;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import java.util.concurrent.CompletableFuture;
/**
* REST Controller for network info.
*/
@RestController
@RequestMapping("/v1/network")
@PropertySource("classpath:northbound.properties")
public class NetworkController extends BaseController {
@Autowired
private NetworkService networkService;
@GetMapping(path = "/paths")
@ApiOperation(value = "Get paths between two switches", response = PathsDto.class)
@ResponseStatus(HttpStatus.OK)
public CompletableFuture<PathsDto> getPaths(
@RequestParam("src_switch") SwitchId srcSwitchId, @RequestParam("dst_switch") SwitchId dstSwitchId,
@ApiParam(value = "Valid values are: TRANSIT_VLAN, VXLAN. If encapsulation type is not specified, default "
+ "value from Kilda Configuration will be used")
@RequestParam(value = "encapsulation_type", required = false) FlowEncapsulationType encapsulationType,
@ApiParam(value = "Valid values are: COST, LATENCY, MAX_LATENCY, COST_AND_AVAILABLE_BANDWIDTH. If path "
+ "computation strategy is not specified, default value from Kilda Configuration will be used")
@RequestParam(value = "path_computation_strategy", required = false)
PathComputationStrategy pathComputationStrategy,
@ApiParam(value = "Maximum latency of flow path in milliseconds. Required for MAX_LATENCY strategy. "
+ "Other strategies will ignore this parameter. If max_latency is 0 LATENCY strategy will be used "
+ "instead of MAX_LATENCY")
@RequestParam(value = "max_latency", required = false) Long maxLatency,
@ApiParam(value = "Second tier for flow path latency in milliseconds. If there is no path with required "
+ "max_latency, max_latency_tier2 with be used instead. Used only with MAX_LATENCY strategy. "
+ "Other strategies will ignore this parameter.")
@RequestParam(value = "max_latency_tier2", required = false)
Long maxLatencyTier2) {
return networkService.getPaths(srcSwitchId, dstSwitchId, encapsulationType, pathComputationStrategy, maxLatency,
maxLatencyTier2);
}
/**
* This method adds custom Editor to parse Enums from string ignoring case.
*/
@InitBinder
public void initBinder(WebDataBinder binder) {
binder.registerCustomEditor(FlowEncapsulationType.class,
new CaseInsensitiveEnumEditor(FlowEncapsulationType.class));
binder.registerCustomEditor(PathComputationStrategy.class,
new CaseInsensitiveEnumEditor(PathComputationStrategy.class));
}
}
|
genesis-2/trunk
|
src/edu/virginia/vcgr/genii/container/bes/activity/resource/DBBESActivityResource.java
|
<gh_stars>1-10
/*
* Copyright 2006 University of Virginia
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package edu.virginia.vcgr.genii.container.bes.activity.resource;
import java.io.File;
import java.sql.SQLException;
import org.ggf.bes.factory.UnknownActivityIdentifierFaultType;
import org.oasis_open.docs.wsrf.r_2.ResourceUnknownFaultType;
import org.oasis_open.wsrf.basefaults.BaseFaultTypeDescription;
import edu.virginia.vcgr.genii.client.jsdl.FilesystemManager;
import edu.virginia.vcgr.genii.client.resource.ResourceException;
import edu.virginia.vcgr.genii.client.wsrf.FaultManipulator;
import edu.virginia.vcgr.genii.container.bes.BES;
import edu.virginia.vcgr.genii.container.bes.BESUtilities;
import edu.virginia.vcgr.genii.container.bes.activity.BESActivity;
import edu.virginia.vcgr.genii.client.jsdl.personality.common.BESWorkingDirectory;
import edu.virginia.vcgr.genii.container.db.ServerDatabaseConnectionPool;
import edu.virginia.vcgr.genii.container.resource.ResourceKey;
import edu.virginia.vcgr.genii.container.resource.db.BasicDBResource;
public class DBBESActivityResource extends BasicDBResource implements IBESActivityResource
{
@Override
public void destroy() throws ResourceException
{
FilesystemManager fsManager = (FilesystemManager) getProperty(FILESYSTEM_MANAGER);
fsManager.releaseAll();
String fuseMountDirString = (String) getProperty(FUSE_MOUNT_PROPERTY);
super.destroy();
BES bes = BES.findBESForActivity(_resourceKey);
if (bes == null)
throw new ResourceException("Unable to find bes for activity " + _resourceKey);
BESActivity activity = bes.findActivity(_resourceKey);
BESWorkingDirectory dir = activity.getActivityCWD();
if (fuseMountDirString != null) {
File f;
if (fuseMountDirString.startsWith("/"))
f = new File(fuseMountDirString);
else
f = new File(dir.getWorkingDirectory(), fuseMountDirString);
File[] entries = f.listFiles();
if (entries == null || entries.length == 0) {
if (BESUtilities.isDeletable(dir.getWorkingDirectory()) || dir.mustDelete())
PersistentDelete.persistentDelete(dir.getWorkingDirectory());
}
} else {
if (BESUtilities.isDeletable(dir.getWorkingDirectory()) || dir.mustDelete())
PersistentDelete.persistentDelete(dir.getWorkingDirectory());
}
try {
bes.deleteActivity(getConnection(), _resourceKey);
} catch (UnknownActivityIdentifierFaultType uaift) {
throw new ResourceException("Unable to delete activity.", uaift);
} catch (SQLException sqe) {
throw new ResourceException("Unable to remove activity from database.", sqe);
}
}
public DBBESActivityResource(ResourceKey parentKey, ServerDatabaseConnectionPool connectionPool) throws SQLException
{
super(parentKey, connectionPool);
}
public BESActivity findActivity() throws ResourceUnknownFaultType
{
BES bes = BES.findBESForActivity(_resourceKey);
if (bes == null)
throw FaultManipulator.fillInFault(new ResourceUnknownFaultType(null, null, null, null,
new BaseFaultTypeDescription[] { new BaseFaultTypeDescription("Unknown BES \"" + _resourceKey + "\".") }, null));
BESActivity activity = bes.findActivity(_resourceKey);
if (activity == null)
throw FaultManipulator.fillInFault(new ResourceUnknownFaultType(null, null, null, null,
new BaseFaultTypeDescription[] { new BaseFaultTypeDescription("Unknown BES \"" + _resourceKey + "\".") }, null));
return activity;
}
}
|
cleberengineer/kuba
|
pixels/tokens/src/fontFamily.js
|
import styled from '@kuba/styled'
export default styled.style`
:root {
--font-family-highlight: 'Spectral', serif;
--font-family-base: 'Roboto', sans-serif;
}
`
|
liangjisheng/go-books
|
projects/go-clean-arch/v1/author/repository/repository.go
|
<reponame>liangjisheng/go-books
package repository
import "v1/author"
// AuthorRepository ...
type AuthorRepository interface {
GetByID(id int64) (*author.Author, error)
}
|
Lora-net/SWDM001
|
platform/bare_metal/stm32cube/shield/e516v02a/smtc_shield.c
|
<gh_stars>0
/**
* \file
*
* \brief File describing and initializing transceiver shield hardware
*/
/**
* \file
*
* The Clear BSD License
* Copyright Semtech Corporation 2022. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted (subject to the limitations in the disclaimer
* below) provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Semtech corporation nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY
* THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
* NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SEMTECH CORPORATION BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/*
* -----------------------------------------------------------------------------
* --- DEPENDENCIES ------------------------------------------------------------
*/
#include <sxlib/System/config/Rtos_config.h>
#include <sxlib/stm32_ll_base.h>
#include <sxlib/Gpio/Led/McuHal_Led_impl.h>
#include <sxlib/Comm/spi_device/McuHal_spi_device_impl.h>
#include <sxlib/Gpio/Input/McuHal_Input_impl.h>
#include <sxlib/Gpio/Output/McuHal_Output_impl.h>
#include <sxlib/Radio/plain_spi_radio/Generic_plain_spi_radio_impl.h>
#include <sxlib/System/irq_line/McuHal_irq_line_impl.h>
#include "smtc_board.h"
#include "smtc_shield.h"
#include "smtc_stm32cube_arduino_compat.h"
/*
* -----------------------------------------------------------------------------
* --- PUBLIC FUNCTION PROTOTYPES ----------------------------------------------
*/
void smtc_shield_display_init( void );
/*
* -----------------------------------------------------------------------------
* --- PUBLIC VARIABLES --------------------------------------------------------
*/
const struct sxlib_Gpio_Led_dev global_gpio_led_rx = {
.port = SMTC_ARDUINO_PORT_A5,
.pin = SMTC_ARDUINO_PIN_A5,
.flags = SXLIB_GPIO_LED_DEV_FLAGS_ACTIVE_HIGH,
};
const struct sxlib_Gpio_Led_dev global_gpio_led_tx = {
.port = SMTC_ARDUINO_PORT_A4,
.pin = SMTC_ARDUINO_PIN_A4,
.flags = SXLIB_GPIO_LED_DEV_FLAGS_ACTIVE_HIGH,
};
const struct sxlib_Gpio_Output_inst global_gpio_output_nreset = { .port = SMTC_ARDUINO_PORT_A0,
.pin = SMTC_ARDUINO_PIN_A0 };
const struct sxlib_Gpio_Input_inst global_gpio_input_radio_busy = { .port = SMTC_ARDUINO_PORT_D3,
.pin = SMTC_ARDUINO_PIN_D3,
.pull = LL_GPIO_PULL_NO };
// Unused display module NSS lines that must be kept high
const struct sxlib_Gpio_Output_inst global_gpio_output_flash_ss = { .port = SMTC_ARDUINO_PORT_D6,
.pin = SMTC_ARDUINO_PIN_D6 };
const sxlib_System_irq_line_config_t global_irq_line_radio_interrupt = {
.id = 1,
.exti_source = SMTC_ARDUINO_LL_SYSCFG_EXTI_LINE_D5,
.trigger = LL_EXTI_TRIGGER_RISING,
.port = SMTC_ARDUINO_PORT_D5,
.IRQn = SMTC_ARDUINO_EXTI_IRQn_D5,
.pull = LL_GPIO_PULL_DOWN,
.pin = SMTC_ARDUINO_PIN_D5,
};
const sxlib_Comm_spi_device_config_t global_spi_device_config = {
.ss_port = SMTC_ARDUINO_PORT_D7,
.ss_pin = SMTC_ARDUINO_PIN_D7,
.flags = LL_SPI_MODE_MASTER | LL_SPI_BAUDRATEPRESCALER_DIV16,
};
const sxlib_Comm_spi_device_t global_spi_device = {
.cont = &global_spi_controller,
.config = &global_spi_device_config,
};
// If radio driver manages 'sleeping' flag, then this structure cannot be const
#ifndef CONFIG_SMTC_AUTO_WAKE_RADIO
const
#endif
sxlib_Radio_plain_spi_radio_t global_radio = {
.spi = &global_spi_device,
.busy = &global_gpio_input_radio_busy,
.reset = &global_gpio_output_nreset,
.antenna_switch = 0,
};
/*
* -----------------------------------------------------------------------------
* --- PUBLIC FUNCTION DEFINITIONS ---------------------------------------------
*/
void smtc_shield_init( void )
{
// Unused display module NSS lines that must be kept high
sxlib_Gpio_Output_init( &global_gpio_output_flash_ss, SXLIB_GPIO_OUTPUT_HIGH );
sxlib_Gpio_Led_init( &global_gpio_led_rx );
sxlib_Gpio_Led_init( &global_gpio_led_tx );
sxlib_Gpio_Led_init( &global_gpio_led_debug );
sxlib_Gpio_Output_init( &global_gpio_output_nreset, SXLIB_GPIO_OUTPUT_LOW );
sxlib_Gpio_Input_init( &global_gpio_input_radio_busy );
sxlib_System_irq_line_init( &global_irq_line_radio_interrupt );
sxlib_Comm_spi_device_init( &global_spi_device );
#ifdef CONFIG_SMTC_DEMO_ENABLE_DISPLAY
smtc_shield_display_init( );
#endif
}
/* --- EOF ------------------------------------------------------------------ */
|
hmrc/binding-tariff-trader-frontend
|
app/controllers/SupportingMaterialFileListController.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import config.FrontendAppConfig
import connectors.DataCacheConnector
import controllers.actions._
import forms.SupportingMaterialFileListFormProvider
import models.requests.DataRequest
import models.{FileAttachment, Mode, UserAnswers}
import navigation.Navigator
import pages._
import play.api.data.{Form, FormError}
import play.api.mvc.{Action, AnyContent, MessagesControllerComponents}
import play.twirl.api.HtmlFormat
import utils.Notification.{success, _}
import viewmodels.FileView
import views.html.supportingMaterialFileList
import javax.inject.Inject
import scala.concurrent.{ExecutionContext, Future}
class SupportingMaterialFileListController @Inject()(
appConfig: FrontendAppConfig,
val dataCacheConnector: DataCacheConnector,
val navigator: Navigator,
val identify: IdentifierAction,
val getData: DataRetrievalAction,
val requireData: DataRequiredAction,
formProvider: SupportingMaterialFileListFormProvider,
cc: MessagesControllerComponents,
supportingMaterialFileListView: supportingMaterialFileList
)(implicit ec: ExecutionContext) extends AnswerCachingController[Boolean](cc) {
lazy val form: Form[Boolean] = formProvider()
val questionPage: SupportingMaterialFileListPage.type = SupportingMaterialFileListPage
val FormInputField = "add-file-choice"
val MaxFilesMessage = "supportingMaterialFileList.error.numberFiles"
private def exceedsMaxFiles(userAnswers: UserAnswers): Boolean = {
val numberOfFiles = userAnswers
.get(UploadSupportingMaterialMultiplePage)
.map(_.size)
.getOrElse(0)
numberOfFiles > appConfig.fileUploadMaxFiles
}
private def hasMaxFiles(userAnswers: UserAnswers): Boolean = {
val numberOfFiles = userAnswers
.get(UploadSupportingMaterialMultiplePage)
.map(_.size)
.getOrElse(0)
numberOfFiles >= appConfig.fileUploadMaxFiles
}
def removeFile(id: String, userAnswers: UserAnswers): UserAnswers = {
val files = userAnswers.get(UploadSupportingMaterialMultiplePage).getOrElse(Seq.empty[FileAttachment])
val remainingFiles = files.filterNot(_.id == id)
val confidentialityStatuses = userAnswers.get(MakeFileConfidentialPage).getOrElse(Map.empty[String, Boolean])
val remainingStatuses = confidentialityStatuses.filterKeys(_ != id)
val updatedAnswers = userAnswers
.set(UploadSupportingMaterialMultiplePage, remainingFiles)
.set(MakeFileConfidentialPage, remainingStatuses)
if (remainingFiles.isEmpty) {
updatedAnswers.remove(AddSupportingDocumentsPage)
} else {
updatedAnswers
}
}
def onRemove(fileId: String, mode: Mode): Action[AnyContent] = (identify andThen getData andThen requireData).async { implicit request =>
val updatedAnswers = removeFile(fileId, request.userAnswers)
val onwardRoute = if (updatedAnswers.get(AddSupportingDocumentsPage).isEmpty) {
routes.AddSupportingDocumentsController.onPageLoad(mode)
} else {
routes.SupportingMaterialFileListController.onPageLoad(mode)
}
dataCacheConnector
.save(updatedAnswers.cacheMap)
.map { _ => Redirect(onwardRoute).flashing(success("supportingMaterialFile.remove.file.success.text")) }
}
def onClear(mode: Mode): Action[AnyContent] = (identify andThen getData andThen requireData).async { implicit request =>
val updatedAnswers = request.userAnswers
.remove(AddSupportingDocumentsPage)
.remove(UploadSupportingMaterialMultiplePage)
dataCacheConnector
.save(updatedAnswers.cacheMap)
.map { _ => Redirect(routes.AddSupportingDocumentsController.onPageLoad(mode)) }
}
def getFileViews(userAnswers: UserAnswers): Seq[FileView] = {
val files = userAnswers.get(UploadSupportingMaterialMultiplePage).getOrElse(Seq.empty[FileAttachment])
val confidentialityStatuses = userAnswers.get(MakeFileConfidentialPage).getOrElse(Map.empty[String, Boolean])
files.filter(_.uploaded).map { file => FileView(file.id, file.name, confidentialityStatuses(file.id)) }
}
def renderView(preparedForm: Form[Boolean], mode: Mode)(implicit request: DataRequest[_]): HtmlFormat.Appendable = {
val goodsName = request.userAnswers.get(ProvideGoodsNamePage).getOrElse("goods")
// We will not use the prepared form here because we don't want to prepopulate the choice; we will only ensure existing errors are populated
supportingMaterialFileListView(appConfig, form.copy(errors = preparedForm.errors), goodsName, getFileViews(request.userAnswers), mode)
}
override def onSubmit(mode: Mode): Action[AnyContent] = (identify andThen getData andThen requireData).async { implicit request: DataRequest[_] =>
val maxFilesError = FormError(FormInputField, MaxFilesMessage, Seq(appConfig.fileUploadMaxFiles))
val badRequest = (formWithErrors: Form[Boolean]) => Future.successful(BadRequest(renderView(formWithErrors, mode)))
form.bindFromRequest().fold({ form =>
if (exceedsMaxFiles(request.userAnswers)) {
badRequest(form.withError(maxFilesError))
} else {
badRequest(form)
}
}, { choice =>
if (choice && hasMaxFiles(request.userAnswers)) {
badRequest(form.withError(maxFilesError))
} else if (exceedsMaxFiles(request.userAnswers)) {
badRequest(form.withError(maxFilesError))
} else {
submitAnswer(choice, mode)
}
})
}
}
|
mummi-framework/mummi-ras
|
mummi_ras/scripts/run.wfmanager.py
|
# Copyright (c) 2021, Lawrence Livermore National Security, LLC. All rights reserved. LLNL-CODE-827655.
# This work was produced at the Lawrence Livermore National Laboratory (LLNL) under contract no. DE-AC52-07NA27344 (Contract 44) between the U.S. Department of Energy (DOE) and Lawrence Livermore National Security, LLC (LLNS) for the operation of LLNL. See license for disclaimers, notice of U.S. Government Rights and license terms and conditions.
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
import os
import yaml
import signal
import traceback
import numpy as np
from os.path import join
import mummi_core
from mummi_core.utils import Naming
from mummi_ras.workflow.wfmanager import WFManager
from logging import getLogger
LOGGER = getLogger(__name__)
# ------------------------------------------------------------------------------
def read_specs(spath):
# read the specs
with open(os.path.join(spath, 'wfmanager.yaml'), 'r') as data:
wfmngr = yaml.load(data, Loader=yaml.FullLoader)
with open(os.path.join(spath, 'maestro.yaml'), 'r') as data:
maestro = yaml.load(data, Loader=yaml.FullLoader)
with open(os.path.join(spath, 'pf2pfpatches.yaml'), 'r') as data:
pf2pfpatches = yaml.load(data, Loader=yaml.FullLoader)
config = wfmngr
config['wfmanager']['batch'] = maestro['maestro']['batch']
config['macro_patch_creator'] = pf2pfpatches['macro_patch_creator']
with open(os.path.join(spath, 'jobs_createsim.yaml'), 'r') as data:
config['createsim'] = yaml.load(data, Loader=yaml.FullLoader)
with open(os.path.join(spath, 'jobs_cg.yaml'), 'r') as data:
config['cg'] = yaml.load(data, Loader=yaml.FullLoader)
with open(os.path.join(spath, 'jobs_backmapping.yaml'), 'r') as data:
config['backmapping'] = yaml.load(data, Loader=yaml.FullLoader)
with open(os.path.join(spath, 'jobs_aa.yaml'), 'r') as data:
config['aa'] = yaml.load(data, Loader=yaml.FullLoader)
# --------------------------------------------------------------------------
# read list of macro gc sims and override the config
simlistfile = os.path.join(Naming.dir_root('macro'), 'simlist.spec')
simlist = np.genfromtxt(simlistfile, delimiter=',', dtype=str)
simlist = [os.path.splitext(s)[0] for s in simlist[:,0]]
simlist = [s for s in simlist if s != 'large']
print(f'> Read ({len(simlist)}) simulations from ({simlistfile})')
config['macro_patch_creator']['params']['sim2frame'] = {s: 1 for s in simlist}
print(f'> Updated sim2frame: {config["macro_patch_creator"]["params"]}')
# --------------------------------------------------------------------------
return config
# ------------------------------------------------------------------------------
def term_wrapper(job):
def signal_handler(signum, frame):
LOGGER.info(f"Signal caught! {signum} -- Terminating")
job.stop()
return signal_handler
# ------------------------------------------------------------------------------
if __name__ == '__main__':
# --------------------------------------------------------------------------
hostname = mummi_core.get_hostname(contract_hostname=False)
print(f'Launching workflow manager on ({hostname})')
mummi_core.init()
mummi_core.create_root()
# --------------------------------------------------------------------------
config = read_specs(os.path.join(Naming.MUMMI_SPECS, 'workflow'))
# --------------------------------------------------------------------------
# create logger
mummi_core.init_logger(config = config['wfmanager']['config'])
# --------------------------------------------------------------------------
# manager = Manager()
# exceptions = manager.Queue()
job = WFManager('wfmanager') #, exceptions)
# --------------------------------------------------------------------------
# start the job
try:
env = None
job.setup(env, config)
job.start()
# Add signals for termination
signal.signal(signal.SIGTERM, term_wrapper(job))
signal.signal(signal.SIGINT, term_wrapper(job))
# Join threads after start
job.join()
except Exception as e:
LOGGER.error(f"Exiting due to error ({e})")
traceback.print_exc()
#job.stop()
exit(1)
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
|
sparsecode/DaFlow
|
daflow-job-conf/daflow-job-conf-xml/src/test/scala/com/abhioncbr/daflow/job/conf/xml/ParseTransformSpec.scala
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.abhioncbr.daflow.job.conf.xml
import com.abhioncbr.daflow.commons.conf.transform.TransformConf
class ParseTransformSpec extends XmlJobConfBase {
"ParseTransform" should "return TransformConf object with array of TransformStepsConf" in {
val xmlContent = """<transform><step order="23">
<rule type="FILTER" group='feed1'><condition>{col1} like 'my%'</condition></rule>
<rule type='MERGE' group='feed2' mergeGroup='11,12'/>
</step></transform>"""
val transformConfObject: TransformConf = ParseTransform.fromXML(node(xmlContent))
transformConfObject should not equal None
transformConfObject.validateTransformedData should be (false)
transformConfObject.transformSteps.size should be (1)
transformConfObject.transformSteps.head.order should be (23)
}
}
|
l33tnobody/l33t_sol
|
src/12IntegerToRoman.java
|
class Solution {
public String intToRoman(int num) {
final String RomanSequence="IVXLCDM";
char[] RomanArray=RomanSequence.toCharArray();
StringBuilder res= new StringBuilder();
for(int base=0; num!=0; base+=2, num/=10) {
int digit = num % 10;
switch(digit) {
case 1: case 2: case 3:
res.insert(0, multiString(RomanArray[base], digit));
break;
case 4:
res.insert(0, RomanArray[base+1]);
res.insert(0, RomanArray[base]);
break;
case 5:
res.insert(0, RomanArray[base+1]);
break;
case 6: case 7: case 8:
res.insert(0, multiString(RomanArray[base], digit-5));
res.insert(0, RomanArray[base+1]);
break;
case 9:
res.insert(0, RomanArray[base+2]);
res.insert(0, RomanArray[base]);
break;
default:
break;
}
}
return res.toString();
}
private String multiString(char c, int times) {
StringBuilder sb = new StringBuilder();
for(int i = times; i > 0; i--) {
sb.insert(0, c);
}
return sb.toString();
}
}
|
frunox/dynamic-portfolio
|
routes/api/index.js
|
<reponame>frunox/dynamic-portfolio<gh_stars>0
const path = require("path");
const router = require("express").Router();
// Route to APIs by data source TODO: I think we should set up a route for each collection.
const developerRoutes = require("./developer");
const repositoriesRoutes = require("./repositories");
const devDataRoute = require("./devData");
console.log("4. in Route/Index/api/index");
router.use("/developer", developerRoutes);
router.use("/repositories", repositoriesRoutes);
//
// This route is to update (and create) a Developer with their github repositories.
// (Write out the first developer and repository data together)
console.log("4a. Getting Dev in route /devData");
router.use("/devData", devDataRoute);
//
// For anything else, render the html page
// router.use(function (req, res) {
// res.sendFile(path.join(__dirname, "../../client/public/index.html"));
// });
module.exports = router;
|
zealoussnow/chromium
|
third_party/blink/renderer/platform/network/blink_schemeful_site.cc
|
<filename>third_party/blink/renderer/platform/network/blink_schemeful_site.cc
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/platform/network/blink_schemeful_site.h"
#include <algorithm>
#include <string>
#include "net/base/schemeful_site.h"
#include "url/url_canon.h"
namespace blink {
BlinkSchemefulSite::BlinkSchemefulSite() {
site_as_origin_ = SecurityOrigin::CreateUniqueOpaque();
}
BlinkSchemefulSite::BlinkSchemefulSite(
scoped_refptr<const SecurityOrigin> origin)
: BlinkSchemefulSite(net::SchemefulSite(origin->ToUrlOrigin())) {}
BlinkSchemefulSite::BlinkSchemefulSite(const url::Origin& origin)
: BlinkSchemefulSite(net::SchemefulSite(origin)) {}
BlinkSchemefulSite::BlinkSchemefulSite(const net::SchemefulSite& site) {
site_as_origin_ = SecurityOrigin::CreateFromUrlOrigin(site.site_as_origin_);
// While net::SchemefulSite should correctly normalize the port value, adding
// this DCHECK makes it easier for readers of this class to trust the
// invariant.
//
// We clamp up to 0 because DefaultPortForScheme() can return -1 for
// non-standard schemes which net::SchemefulSite stores as 0. So we need to
// make sure our check matches.
DCHECK(site_as_origin_->Port() ==
std::max(url::DefaultPortForScheme(
site_as_origin_->Protocol().Ascii().c_str(),
static_cast<int>(
site_as_origin_->Protocol().Ascii().length())),
0));
}
BlinkSchemefulSite::operator net::SchemefulSite() const {
return net::SchemefulSite(site_as_origin_->ToUrlOrigin());
}
String BlinkSchemefulSite::Serialize() const {
return site_as_origin_->ToString();
}
String BlinkSchemefulSite::GetDebugString() const {
DCHECK(site_as_origin_);
return "{ origin_as_site: " + Serialize() + " }";
}
// static
bool BlinkSchemefulSite::FromWire(const url::Origin& site_as_origin,
BlinkSchemefulSite* out) {
// The origin passed into this constructor may not match the
// `site_as_origin_` used as the internal representation of the schemeful
// site. However, a valid SchemefulSite's internal origin should result in a
// match if used to construct another SchemefulSite. Thus, if there is a
// mismatch here, we must indicate a failure.
BlinkSchemefulSite candidate(site_as_origin);
scoped_refptr<const SecurityOrigin> security_origin =
SecurityOrigin::CreateFromUrlOrigin(site_as_origin);
if (!candidate.site_as_origin_->IsSameOriginWith(security_origin.get()))
return false;
*out = std::move(candidate);
return true;
}
} // namespace blink
|
applidium/Bike-Share-Prediction
|
db/schema.rb
|
<filename>db/schema.rb<gh_stars>100-1000
# encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20150818124538) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "bike_data_points", force: :cascade do |t|
t.integer "available_bikes"
t.boolean "open"
t.integer "weather_data_point_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "station_id"
end
add_index "bike_data_points", ["station_id"], name: "index_bike_data_points_on_station_id", using: :btree
add_index "bike_data_points", ["weather_data_point_id"], name: "index_bike_data_points_on_weather_data_point_id", using: :btree
create_table "contracts", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.float "latitude"
t.float "longitude"
end
create_table "data_point_rows", force: :cascade do |t|
t.integer "open"
t.integer "weather"
t.decimal "temperature"
t.decimal "wind_speed"
t.integer "humidity"
t.integer "hour"
t.integer "minute"
t.integer "day_of_week"
t.integer "week_number"
t.integer "season"
t.integer "weekend"
t.integer "holiday"
t.integer "available_bikes"
t.integer "station_id"
t.string "type"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "data_point_rows", ["station_id"], name: "index_data_point_rows_on_station_id", using: :btree
create_table "predictions", force: :cascade do |t|
t.integer "station_id"
t.datetime "datetime"
t.integer "available_bikes"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "kind", default: "scikit_lasso"
end
add_index "predictions", ["kind"], name: "index_predictions_on_kind", using: :btree
add_index "predictions", ["station_id"], name: "index_predictions_on_station_id", using: :btree
create_table "stations", force: :cascade do |t|
t.string "name"
t.string "address"
t.float "latitude"
t.float "longitude"
t.integer "bike_stands"
t.integer "last_update", limit: 8
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "contract_id"
t.integer "number"
t.integer "last_entry"
t.datetime "last_dump"
end
add_index "stations", ["contract_id"], name: "index_stations_on_contract_id", using: :btree
add_index "stations", ["last_entry"], name: "index_stations_on_last_entry", using: :btree
create_table "weather_data_points", force: :cascade do |t|
t.string "weather"
t.float "temperature"
t.float "wind_speed"
t.integer "humidity"
t.integer "contract_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "weather_data_points", ["contract_id"], name: "index_weather_data_points_on_contract_id", using: :btree
add_foreign_key "bike_data_points", "stations"
add_foreign_key "bike_data_points", "weather_data_points"
add_foreign_key "data_point_rows", "stations"
add_foreign_key "predictions", "stations"
add_foreign_key "stations", "contracts"
add_foreign_key "weather_data_points", "contracts"
end
|
omarathon/tabula
|
web/src/main/scala/uk/ac/warwick/tabula/web/controllers/cm2/DownloadSubmissionReceiptAsPdfController.scala
|
package uk.ac.warwick.tabula.web.controllers.cm2
import org.springframework.context.annotation.Profile
import org.springframework.stereotype.Controller
import org.springframework.web.bind.annotation.{ModelAttribute, PathVariable, RequestMapping}
import uk.ac.warwick.tabula.commands._
import uk.ac.warwick.tabula.commands.profiles.PhotosWarwickMemberPhotoUrlGeneratorComponent
import uk.ac.warwick.tabula.data.model.{Assignment, Submission}
import uk.ac.warwick.tabula.pdf.FreemarkerXHTMLPDFGeneratorComponent
import uk.ac.warwick.tabula.permissions.{CheckablePermission, Permissions}
import uk.ac.warwick.tabula.services.{AutowiringProfileServiceComponent, AutowiringSubmissionServiceComponent, ProfileServiceComponent, SubmissionServiceComponent}
import uk.ac.warwick.tabula.system.permissions.{PermissionsChecking, PermissionsCheckingMethods, RequiresPermissionsChecking}
import uk.ac.warwick.tabula.web.views.{AutowiredTextRendererComponent, PDFView}
import uk.ac.warwick.tabula.{AutowiringTopLevelUrlComponent, CurrentUser, PermissionDeniedException}
import uk.ac.warwick.userlookup.User
@Controller
@RequestMapping(value = Array("/coursework/submission/{assignment}/submission-receipt.pdf"))
class DownloadSubmissionReceiptAsPdfController extends CourseworkController {
hideDeletedItems
type DownloadSubmissionReceiptAsPdfCommand = Appliable[Submission] with DownloadSubmissionReceiptAsPdfState
@ModelAttribute
def command(@PathVariable assignment: Assignment, user: CurrentUser): DownloadSubmissionReceiptAsPdfCommand =
DownloadSubmissionReceiptAsPdfCommand(assignment, user, user.apparentUser)
@RequestMapping
def viewAsPdf(command: DownloadSubmissionReceiptAsPdfCommand, user: CurrentUser): PDFView with FreemarkerXHTMLPDFGeneratorComponent with AutowiredTextRendererComponent with PhotosWarwickMemberPhotoUrlGeneratorComponent = {
new PDFView(
"submission-receipt.pdf",
"/WEB-INF/freemarker/cm2/submit/submission-receipt.ftlh",
Map(
"submission" -> command.apply()
)
) with FreemarkerXHTMLPDFGeneratorComponent with AutowiredTextRendererComponent with PhotosWarwickMemberPhotoUrlGeneratorComponent with AutowiringTopLevelUrlComponent
}
}
@Controller
@RequestMapping(value = Array("/coursework/submission/{assignment}/{student}/submission-receipt.pdf"))
class DownloadSubmissionReceiptForStudentAsPdfController extends CourseworkController {
hideDeletedItems
type DownloadSubmissionReceiptAsPdfCommand = Appliable[Submission] with DownloadSubmissionReceiptAsPdfState
@ModelAttribute
def command(
@PathVariable assignment: Assignment,
@PathVariable student: User,
user: CurrentUser
): DownloadSubmissionReceiptAsPdfCommand = DownloadSubmissionReceiptAsPdfCommand(assignment, user, student)
@RequestMapping
def viewAsPdf(command: DownloadSubmissionReceiptAsPdfCommand, user: CurrentUser): PDFView with FreemarkerXHTMLPDFGeneratorComponent with AutowiredTextRendererComponent with PhotosWarwickMemberPhotoUrlGeneratorComponent = {
new PDFView(
"submission-receipt.pdf",
"/WEB-INF/freemarker/cm2/submit/submission-receipt.ftlh",
Map(
"submission" -> command.apply()
)
) with FreemarkerXHTMLPDFGeneratorComponent with AutowiredTextRendererComponent with PhotosWarwickMemberPhotoUrlGeneratorComponent with AutowiringTopLevelUrlComponent
}
}
object DownloadSubmissionReceiptAsPdfCommand {
val RequiredPermission = Permissions.Submission.Read
def apply(assignment: Assignment, user: CurrentUser, student: User) =
new DownloadSubmissionReceiptAsPdfCommandInternal(assignment, user, student)
with AutowiringSubmissionServiceComponent
with AutowiringProfileServiceComponent
with DownloadSubmissionReceiptAsPdfPermissions
with ComposableCommand[Submission]
with ReadOnly with Unaudited
}
class DownloadSubmissionReceiptAsPdfCommandInternal(val assignment: Assignment, val viewer: CurrentUser, val student: User)
extends CommandInternal[Submission]
with DownloadSubmissionReceiptAsPdfState {
self: SubmissionServiceComponent =>
override def applyInternal(): Submission = submissionOption.getOrElse(throw new IllegalStateException)
}
trait DownloadSubmissionReceiptAsPdfPermissions extends RequiresPermissionsChecking with PermissionsCheckingMethods {
self: DownloadSubmissionReceiptAsPdfState with SubmissionServiceComponent with ProfileServiceComponent =>
def permissionsCheck(p: PermissionsChecking): Unit = {
// We send a permission denied explicitly (this would normally be a 404 for feedback not found) because PDF handling is silly in Chrome et al
if (!viewer.loggedIn) {
throw PermissionDeniedException(viewer, DownloadSubmissionReceiptAsPdfCommand.RequiredPermission, assignment)
}
notDeleted(mandatory(assignment))
val submission = mandatory(submissionOption)
val studentMember = profileService.getMemberByUniversityIdStaleOrFresh(student.getWarwickId)
mustBeLinked(submission, assignment)
p.PermissionCheckAny(
Seq(
Some(CheckablePermission(Permissions.Submission.Read, submission)),
studentMember.map(CheckablePermission(Permissions.Submission.Read, _))
).flatten
)
}
}
trait DownloadSubmissionReceiptAsPdfState {
self: SubmissionServiceComponent =>
def assignment: Assignment
def viewer: CurrentUser
def student: User
lazy val submissionOption: Option[Submission] = submissionService.getSubmissionByUsercode(assignment, student.getUserId).filter(_.submitted)
}
|
alexey-anufriev/intellij-community
|
java/idea-ui/src/com/intellij/openapi/roots/ui/configuration/projectRoot/daemon/ProjectStructureElementUsage.java
|
<reponame>alexey-anufriev/intellij-community<gh_stars>1-10
package com.intellij.openapi.roots.ui.configuration.projectRoot.daemon;
import com.intellij.openapi.util.NlsContexts;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
public abstract class ProjectStructureElementUsage {
public abstract ProjectStructureElement getSourceElement();
public abstract ProjectStructureElement getContainingElement();
public abstract @NlsContexts.Label String getPresentableName();
@Nullable
public @NlsContexts.Label String getPresentableLocationInElement() {
return null;
}
public abstract PlaceInProjectStructure getPlace();
@Override
public abstract int hashCode();
@Override
public abstract boolean equals(Object obj);
public abstract Icon getIcon();
public abstract void removeSourceElement();
public abstract void replaceElement(ProjectStructureElement newElement);
}
|
code-dot-org/js-interpreter-tirant
|
packages/js-interpreter-portal/src/server/constants.js
|
export const MASTER_PORT = process.env.PORT || 3000;
|
Exynos7580/android_hardware_samsung_slsi_exynos7580
|
mobicore/ClientLib/include/uuid_attestation.h
|
/*
* Copyright (c) 2013-2015 TRUSTONIC LIMITED
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the TRUSTONIC LIMITED nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef __TEE_UUID_ATTESTATION_H__
#define __TEE_UUID_ATTESTATION_H__
#ifndef __TEE_CLIENT_TYPES_H__
#include "tee_type.h"
#endif
// Sizes of the fields of attestation structure
#define AT_MAGIC_SIZE 8
#define AT_SIZE_SIZE sizeof(uint32_t)
#define AT_VERSION_SIZE sizeof(uint32_t)
#define AT_UUID_SIZE sizeof(TEE_UUID)
// Sizes of the fields used to generate signature
#define AT_TAG_SIZE 20
#define AT_SHA1_HASH_SIZE 20
// Max size of RSA modulus supported
#define AT_MODULUS_MAX_SIZE 256
// Max size of RSA public exponent supported
#define AT_PUBLIC_EXPO_MAX_SIZE 4
// Attestation version
#define AT_VERSION 1
// Name space ID (the UUID of the RSA OID)
const uint8_t RSA_OID_UUID[AT_UUID_SIZE] = {0x6b, 0x8e, 0x02, 0x6b, 0x63, 0xc1, 0x5d, 0x58, 0xb0, 0x64, 0x00, 0xd3, 0x51, 0x89, 0xce, 0x65};
// Magic word
const char MAGIC[AT_MAGIC_SIZE] = "TAUUID\0";
// Tag for signature generation
const char TAG[AT_TAG_SIZE]="Trusted Application";
// Public key structure
typedef struct uuid_public_key {
uint32_t type; // TEE_TYPE_RSA_PUBLIC_KEY: 0xA0000030
uint16_t modulus_bytes; // Length of the modulus in bytes
uint16_t exponent_bytes; // Length of the exponent in bytes
uint8_t data[]; // Key material
} uuid_public_key;
// Attestation structure
typedef struct uuid_attestation {
uint8_t magic[AT_MAGIC_SIZE]; // Magic word: "TAUUID\0\0"
uint32_t size; // Attestation size (4 bytes)
uint32_t version; // Version number: 1 (4 bytes)
uint8_t uuid[AT_UUID_SIZE]; // UUID
uuid_public_key key; // Public key
} uuid_attestation;
#endif /* __TEE_UUID_ATTESTATION_H__ */
|
LiJiaQi1232/The-fifth-semester
|
Study_Nuxt/Day17/Project/letao-server/node_modules/tencentcloud-sdk-nodejs/tencentcloud/services/tem/index.js
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.tem = void 0;
const v20210701_1 = require("./v20210701");
const v20201221_1 = require("./v20201221");
exports.tem = {
v20210701: v20210701_1.v20210701,
v20201221: v20201221_1.v20201221,
};
|
openharmony-gitee-mirror/account_os_account
|
services/accountmgr/test/unittest/account_event_provider_test/account_event_provider_test.cpp
|
<reponame>openharmony-gitee-mirror/account_os_account<gh_stars>1-10
/*
* Copyright (c) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include "account_event_provider.h"
#include "account_error_no.h"
#include "account_log_wrapper.h"
#include "common_event_manager.h"
#include <common_event_support.h>
using namespace testing::ext;
using namespace OHOS;
using namespace OHOS::AccountSA;
using namespace OHOS::EventFwk;
class AccountEventProviderTest : public testing::Test {
public:
static void SetUpTestCase();
static void TearDownTestCase();
void SetUp();
void TearDown();
};
void AccountEventProviderTest::SetUpTestCase() {}
void AccountEventProviderTest::TearDownTestCase() {}
void AccountEventProviderTest::SetUp(){}
void AccountEventProviderTest::TearDown() {}
/**
* @tc.name: AccountEventProviderTest001
* @tc.desc: Test account EventPublish interface
* @tc.type: FUNC
* @tc.require: #I40129
*/
HWTEST_F(AccountEventProviderTest, AccountEventProviderTest001, TestSize.Level0)
{
bool ret = AccountEventProvider::EventPublish(EventFwk::CommonEventSupport::COMMON_EVENT_HWID_LOGOFF);
EXPECT_EQ(true, ret);
}
|
antopen/alipay-sdk-java-all
|
src/main/java/com/alipay/api/domain/AlipayBossBaseProcessInstanceUrgeModel.java
|
package com.alipay.api.domain;
import java.util.List;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.internal.mapping.ApiListField;
/**
* 催单
*
* @author auto create
* @since 1.0, 2022-03-17 09:36:53
*/
public class AlipayBossBaseProcessInstanceUrgeModel extends AlipayObject {
private static final long serialVersionUID = 8657598523555332994L;
/**
* 模板
*/
@ApiField("code")
private String code;
/**
* 这个是发消息使用,直接传给jar包的,请以JSON的格式传送字符串,内部使用的是map,但是此平台不支持map,所以定义成String,但是请以JSON的格式传送
*/
@ApiField("msg_template")
private String msgTemplate;
/**
* 消息类型
*/
@ApiListField("msg_type_list")
@ApiField("string")
private List<String> msgTypeList;
/**
* 流程实例Id
*/
@ApiField("puid")
private String puid;
/**
* 标题
*/
@ApiField("title")
private String title;
public String getCode() {
return this.code;
}
public void setCode(String code) {
this.code = code;
}
public String getMsgTemplate() {
return this.msgTemplate;
}
public void setMsgTemplate(String msgTemplate) {
this.msgTemplate = msgTemplate;
}
public List<String> getMsgTypeList() {
return this.msgTypeList;
}
public void setMsgTypeList(List<String> msgTypeList) {
this.msgTypeList = msgTypeList;
}
public String getPuid() {
return this.puid;
}
public void setPuid(String puid) {
this.puid = puid;
}
public String getTitle() {
return this.title;
}
public void setTitle(String title) {
this.title = title;
}
}
|
ftcksu/FTC_Mobile_Client
|
src/components/shared_components/ParticipantsDetails.js
|
import React, { Component } from 'react'
import { Text, View, Image, TouchableWithoutFeedback } from 'react-native'
import { Button } from 'react-native-elements/src/index';
import Icon from 'react-native-vector-icons/FontAwesome';
import { goToWhatsapp } from "../../global/actions/appActions";
export class ParticipantsDetails extends Component {
renderWhatsappButton(){
return (
<Button
onPress={() => goToWhatsapp(this.props.data.phone)} //TODO: fix the hard coded number
icon={
<Icon
style={styles.buttonIcon}
name="whatsapp"
size={40}
color="white"
/>
}
iconRight={true}
buttonStyle={styles.whatsappButton}
title="كلمه واتساب"
titleStyle={styles.whatsappButtonTitle}
/>
)
}
render() {
return (
<View style={styles.container}>
<Image style={styles.image} source={{uri:this.props.data.profilephoto_full_link}} />
<Text style={styles.name} > {this.props.data.first_name +" "+this.props.data.last_name } </Text>
{this.renderWhatsappButton()}
</View>
)
}
}
const styles={
container:{
justifyContent:'space-evenly',
height:'70%',
width:'80%',
backgroundColor:'white',
alignSelf:'center',
alignItems:'center'
},
image:{
width: 150,
height: 150,
borderRadius: 150/2,
},
name:{
fontFamily:'Cairo-Bold',
fontSize:20
},
whatsappButton:{
backgroundColor:'#2ecc71',
height:75,
width:'100%',
marginTop:15,
alignContent:'center',
borderRadius:20
},
whatsappButtonTitle:{
fontFamily:'Cairo-Bold',
textAlign:'center'
},
buttonIcon:{
margin:10
}
}
|
datacentred/stronghold
|
db/migrate/20141028085401_rename_tenant_id.rb
|
<filename>db/migrate/20141028085401_rename_tenant_id.rb<gh_stars>1-10
class RenameTenantId < ActiveRecord::Migration
def change
rename_column :tenants, :tenant_id, :tenant_uuid
end
end
|
Samhenry97/XV6
|
devrand.c
|
<gh_stars>0
#include "types.h"
#include "stat.h"
#include "defs.h"
#include "param.h"
#include "traps.h"
#include "spinlock.h"
#include "sleeplock.h"
#include "fs.h"
#include "file.h"
#include "memlayout.h"
#include "mmu.h"
#include "proc.h"
#include "x86.h"
// https://en.wikipedia.org/wiki/Mersenne_Twister
// Define MT19937 constants (32-bit RNG)
enum {
// Assumes W = 32 (omitting this)
N = 624,
M = 397,
R = 31,
A = 0x9908B0DF,
F = 1812433253,
U = 11,
S = 7,
B = 0x9D2C5680,
T = 15,
C = 0xEFC60000,
L = 18,
MASK_LOWER = (1ull << R) - 1,
MASK_UPPER = (1ull << R)
};
static unsigned int mt[N];
static unsigned int index;
static void init(const int seed) {
int i;
mt[0] = seed;
for(i = 1; i < N; i++) {
mt[i] = (F * (mt[i - 1] ^ (mt[i - 1] >> 30)) + i);
}
index = N;
}
static void twist() {
unsigned int i, x, xA;
for ( i = 0; i < N; i++ )
{
x = (mt[i] & MASK_UPPER) + (mt[(i + 1) % N] & MASK_LOWER);
xA = x >> 1;
if ( x & 0x1 )
xA ^= A;
mt[i] = mt[(i + M) % N] ^ xA;
}
index = 0;
}
static unsigned int rand() {
unsigned int y;
int i = index;
if(index >= N) {
twist();
i = index;
}
y = mt[i];
index++;
y ^= (mt[i] >> U);
y ^= (y << S) & B;
y ^= (y << T) & C;
y ^= (y >> L);
return y;
}
int devrandwrite(struct inode *ip, char *buf, int n) {
return n;
}
int devrandread(struct inode *ip, char *dst, int n) {
int i;
iunlock(ip);
for(i = 0; i < n; i++) {
*dst++ = (char) (rand() % 255);
}
ilock(ip);
return n;
}
void devrandinit(void) {
devsw[DEVRAND].write = devrandwrite;
devsw[DEVRAND].read = devrandread;
init(sys_uptime());
}
|
AVISPL/dal-avdevices-encoderdecoder-haivision-x4decoder
|
src/main/java/com/avispl/symphony/dal/avdevices/encoderdecoder/haivision/x4decoder/dto/streamstats/StreamInfo.java
|
/*
* Copyright (c) 2022 AVI-SPL, Inc. All Rights Reserved.
*/
package com.avispl.symphony.dal.avdevices.encoderdecoder.haivision.x4decoder.dto.streamstats;
import com.fasterxml.jackson.annotation.JsonAlias;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.avispl.symphony.dal.avdevices.encoderdecoder.haivision.x4decoder.common.DecoderConstant;
import com.avispl.symphony.dal.avdevices.encoderdecoder.haivision.x4decoder.common.stream.controllingmetric.Encapsulation;
import com.avispl.symphony.dal.util.StringUtils;
/**
* Set of stream configuration properties
*
* @author Harry / Symphony Dev Team<br>
* Created on 3/8/2022
* @since 1.0.0
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class StreamInfo {
@JsonAlias("name")
private String name;
@JsonAlias("encapsulation")
private Integer encapsulation;
@JsonAlias("address")
private String address;
@JsonAlias("port")
private String port;
@JsonAlias("id")
private Integer id;
@JsonAlias("decoderId")
private String decoderId;
// ToDo: comment out controlling capabilities, filtering and config management
// private String userData;
// @JsonAlias("userData")
// @JsonAlias("sourceIp")
// private String sourceIp;
//
// @JsonAlias("latency")
// private String latency;
//
// @JsonAlias("srtMode")
// private Integer srtMode;
//
// @JsonAlias("sourcePort")
// private String sourcePort;
//
// @JsonAlias("strictMode")
// private Boolean strictMode;
//
// @JsonAlias("passphrase")
// private String passphrase;
//
// @JsonAlias("passphraseSet")
// private Boolean passphraseSet;
//
// @JsonAlias("srtToUdp")
// private Boolean srtToUdp;
//
// @JsonAlias("srtToUdp_address")
// private String srtToUdpAddress;
//
// @JsonAlias("srtToUdp_port")
// private String srtToUdpPort;
//
// @JsonAlias("srtToUdp_tos")
// private String srtToUdpTos;
//
// @JsonAlias("srtToUdp_ttl")
// private String srtToUdpTtl;
//
// @JsonAlias("fecRtp")
// private Integer fecRtp;
//
// public StreamInfo() {
// }
//
// /**
// * This constructor is used for deep clone object
// *
// * @param streamInfo Stream config info
// */
// public StreamInfo(StreamInfo streamInfo) {
// this.name = streamInfo.getName();
// this.id = streamInfo.getId();
// this.decoderId = streamInfo.getDecoderId();
// this.encapsulation = streamInfo.getEncapsulation().getCode();
// this.userData = streamInfo.getUserData();
// this.address = streamInfo.getAddress();
// this.port = streamInfo.getPort();
// this.sourceIp = streamInfo.getSourceIp();
// this.latency = streamInfo.getLatency();
// this.srtMode = streamInfo.getSrtMode().getCode();
// this.sourcePort = streamInfo.getSourcePort();
// this.strictMode = streamInfo.getStrictMode();
// this.passphrase = streamInfo.getPassphrase();
// this.passphraseSet = streamInfo.getPassphraseSet();
// this.srtToUdp = streamInfo.getSrtToUdp();
// this.srtToUdpAddress = streamInfo.getSrtToUdpAddress();
// this.srtToUdpPort = streamInfo.getSrtToUdpPort();
// this.srtToUdpTos = streamInfo.getSrtToUdpTos();
// this.srtToUdpTtl = streamInfo.getSrtToUdpTtl();
// this.fecRtp = streamInfo.getFecRtp().getCode();
// }
/**
* Retrieves {@code {@link #name}}
*
* @return value of {@link #name}
*/
public String getName() {
return name;
}
/**
* Sets {@code name}
*
* @param name the {@code java.lang.String} field
*/
public void setName(String name) {
this.name = name;
}
/**
* Retrieves {@code {@link #encapsulation}}
*
* @return value of {@link #encapsulation}
*/
public Encapsulation getEncapsulation() {
if (this.encapsulation != null) {
for (Encapsulation encapsulationEnumElement : Encapsulation.values()) {
if (encapsulationEnumElement.getCode().equals(this.encapsulation)) {
return encapsulationEnumElement;
}
}
}
return Encapsulation.TS_OVER_UDP;
}
/**
* Sets {@code encapsulation}
*
* @param encapsulation the {@code java.lang.String} field
*/
public void setEncapsulation(Integer encapsulation) {
this.encapsulation = encapsulation;
}
/**
* Retrieves {@code {@link #address}}
*
* @return value of {@link #address}
*/
public String getAddress() {
if (address == null) {
return DecoderConstant.EMPTY;
}
return address;
}
/**
* Sets {@code address}
*
* @param address the {@code java.lang.String} field
*/
public void setAddress(String address) {
this.address = address;
}
/**
* Retrieves {@code {@link #port}}
*
* @return value of {@link #port}
*/
public String getPort() {
if (StringUtils.isNullOrEmpty(port) || port.equals("0")) {
return DecoderConstant.EMPTY;
}
try {
if (Integer.parseInt(port) < DecoderConstant.MIN_PORT) {
return DecoderConstant.MIN_PORT.toString();
} else if (Integer.parseInt(port) > DecoderConstant.MAX_PORT) {
return DecoderConstant.MAX_PORT.toString();
}
} catch (Exception e) {
return DecoderConstant.EMPTY;
}
return port;
}
/**
* Sets {@code port}
*
* @param port the {@code java.lang.String} field
*/
public void setPort(String port) {
this.port = port;
}
/**
* Retrieves {@code {@link #id}}
*
* @return value of {@link #id}
*/
public Integer getId() {
if (id == null) {
return DecoderConstant.DEFAULT_STREAM_ID;
}
return id;
}
/**
* Sets {@code id}
*
* @param id the {@code java.lang.String} field
*/
public void setId(Integer id) {
this.id = id;
}
/**
* Retrieves {@code {@link #decoderId}}
*
* @return value of {@link #decoderId}
*/
public String getDecoderId() {
return decoderId;
}
/**
* Sets {@code decoderId}
*
* @param decoderId the {@code java.lang.String} field
*/
public void setDecoderId(String decoderId) {
this.decoderId = decoderId;
}
// ToDo: comment out controlling capabilities, filtering and config management
// /**
// * Retrieves {@code {@link #userData}}
// *
// * @return value of {@link #userData}
// */
// public String getUserData() {
// return userData;
// }
//
// /**
// * Sets {@code userData}
// *
// * @param userData the {@code java.lang.String} field
// */
// public void setUserData(String userData) {
// this.userData = userData;
// }
//
// /**
// * Retrieves {@code {@link #sourceIp}}
// *
// * @return value of {@link #sourceIp}
// */
// public String getSourceIp() {
// if (sourceIp == null){
// return DecoderConstant.EMPTY;
// }
// return sourceIp;
// }
//
// /**
// * Sets {@code sourceIp}
// *
// * @param sourceIp the {@code java.lang.String} field
// */
// public void setSourceIp(String sourceIp) {
// this.sourceIp = sourceIp;
// }
//
// /**
// * Retrieves {@code {@link #latency}}
// *
// * @return value of {@link #latency}
// */
// public String getLatency() {
// if (StringUtils.isNullOrEmpty(latency)) {
// return DecoderConstant.DEFAULT_LATENCY.toString();
// }
// try {
// if (Integer.parseInt(latency) < DecoderConstant.MIN_LATENCY) {
// return DecoderConstant.MIN_LATENCY.toString();
// } else if (Integer.parseInt(latency) > DecoderConstant.MAX_LATENCY) {
// return DecoderConstant.MAX_LATENCY.toString();
// }
// } catch (Exception e) {
// return DecoderConstant.DEFAULT_LATENCY.toString();
// }
// return latency;
// }
//
// /**
// * Sets {@code latency}
// *
// * @param latency the {@code java.lang.String} field
// */
// public void setLatency(String latency) {
// this.latency = latency;
// }
//
// /**
// * Retrieves {@code {@link #srtMode}}
// *
// * @return value of {@link #srtMode}
// */
// public SRTMode getSrtMode() {
// if (this.srtMode != null) {
// for (SRTMode srtMode : SRTMode.values()) {
// if (srtMode.getCode().equals(this.srtMode)) {
// return srtMode;
// }
// }
// }
// return SRTMode.LISTENER;
// }
//
// /**
// * Sets {@code srtMode}
// *
// * @param srtMode the {@code java.lang.String} field
// */
// public void setSrtMode(Integer srtMode) {
// this.srtMode = srtMode;
// }
//
// /**
// * Retrieves {@code {@link #sourcePort}}
// *
// * @return value of {@link #sourcePort}
// */
// public String getSourcePort() {
// if (StringUtils.isNullOrEmpty(sourcePort) || sourcePort.equals("0")) {
// return DecoderConstant.EMPTY;
// }
// try {
// if (Integer.parseInt(sourcePort) < DecoderConstant.MIN_PORT) {
// return DecoderConstant.MIN_PORT.toString();
// } else if (Integer.parseInt(sourcePort) > DecoderConstant.MAX_PORT) {
// return DecoderConstant.MAX_PORT.toString();
// }
// } catch (Exception e) {
// return DecoderConstant.EMPTY;
// }
// return sourcePort;
// }
//
// /**
// * Sets {@code sourcePort}
// *
// * @param sourcePort the {@code java.lang.String} field
// */
// public void setSourcePort(String sourcePort) {
// this.sourcePort = sourcePort;
// }
//
// /**
// * Retrieves {@code {@link #strictMode}}
// *
// * @return value of {@link #strictMode}
// */
// public Boolean getStrictMode() {
// if (strictMode == null) {
// return false;
// }
// return strictMode;
// }
//
// /**
// * Sets {@code strictMode}
// *
// * @param strictMode the {@code java.lang.String} field
// */
// public void setStrictMode(Boolean strictMode) {
// this.strictMode = strictMode;
// }
//
// /**
// * Retrieves {@code {@link #passphrase}}
// *
// * @return value of {@link #passphrase}
// */
// public String getPassphrase() {
// if (passphrase == null){
// return DecoderConstant.EMPTY;
// }
// return passphrase;
// }
//
// /**
// * Sets {@code passphrase}
// *
// * @param passphrase the {@code java.lang.String} field
// */
// public void setPassphrase(String passphrase) {
// this.passphrase = passphrase;
// }
//
// /**
// * Retrieves {@code {@link #passphraseSet}}
// *
// * @return value of {@link #passphraseSet}
// */
// public Boolean getPassphraseSet() {
// if (passphraseSet == null) {
// return false;
// }
// return passphraseSet;
// }
//
// /**
// * Sets {@code passphraseSet}
// *
// * @param passphraseSet the {@code java.lang.String} field
// */
// public void setPassphraseSet(Boolean passphraseSet) {
// this.passphraseSet = passphraseSet;
// }
//
// /**
// * Retrieves {@code {@link #srtToUdp}}
// *
// * @return value of {@link #srtToUdp}
// */
// public Boolean getSrtToUdp() {
// if (srtToUdp == null) {
// return false;
// }
// return srtToUdp;
// }
//
// /**
// * Sets {@code srtToUdp}
// *
// * @param srtToUdp the {@code java.lang.String} field
// */
// public void setSrtToUdp(Boolean srtToUdp) {
// this.srtToUdp = srtToUdp;
// }
//
// /**
// * Retrieves {@code {@link #srtToUdpAddress }}
// *
// * @return value of {@link #srtToUdpAddress}
// */
// public String getSrtToUdpAddress() {
// if (srtToUdpAddress == null){
// return DecoderConstant.EMPTY;
// }
// return srtToUdpAddress;
// }
//
// /**
// * Sets {@code srtToUdp_address}
// *
// * @param srtToUdpAddress the {@code java.lang.String} field
// */
// public void setSrtToUdpAddress(String srtToUdpAddress) {
// this.srtToUdpAddress = srtToUdpAddress;
// }
//
// /**
// * Retrieves {@code {@link #srtToUdpPort }}
// *
// * @return value of {@link #srtToUdpPort}
// */
// public String getSrtToUdpPort() {
// if (StringUtils.isNullOrEmpty(srtToUdpPort) || srtToUdpPort.equals("0")) {
// return DecoderConstant.EMPTY;
// }
// try {
// if (Integer.parseInt(srtToUdpPort) < DecoderConstant.MIN_PORT) {
// return DecoderConstant.MIN_PORT.toString();
// } else if (Integer.parseInt(srtToUdpPort) > DecoderConstant.MAX_PORT) {
// return DecoderConstant.MAX_PORT.toString();
// }
// } catch (Exception e) {
// return DecoderConstant.EMPTY;
// }
// return srtToUdpPort;
// }
//
// /**
// * Sets {@code srtToUdp_port}
// *
// * @param srtToUdpPort the {@code java.lang.String} field
// */
// public void setSrtToUdpPort(String srtToUdpPort) {
// this.srtToUdpPort = srtToUdpPort;
// }
//
// /**
// * Retrieves {@code {@link #srtToUdpTos }}
// *
// * @return value of {@link #srtToUdpTos}
// */
// public String getSrtToUdpTos() {
// if (StringUtils.isNullOrEmpty(srtToUdpTos)) {
// return DecoderConstant.SRT_TO_UDP_TOS;
// }
// return srtToUdpTos;
// }
//
// /**
// * Sets {@code srtToUdp_tos}
// *
// * @param srtToUdpTos the {@code java.lang.String} field
// */
// public void setSrtToUdpTos(String srtToUdpTos) {
// this.srtToUdpTos = srtToUdpTos;
// }
//
// /**
// * Retrieves {@code {@link #srtToUdpTtl }}
// *
// * @return value of {@link #srtToUdpTtl}
// */
// public String getSrtToUdpTtl() {
// if (StringUtils.isNullOrEmpty(srtToUdpTtl)) {
// return "64";
// }
// try {
// if (Integer.parseInt(srtToUdpTtl) < DecoderConstant.MIN_TTL) {
// return DecoderConstant.MIN_TTL.toString();
// } else if (Integer.parseInt(srtToUdpTtl) > DecoderConstant.MAX_TTL) {
// return DecoderConstant.MAX_TTL.toString();
// }
// } catch (Exception e) {
// return DecoderConstant.DEFAULT_TTL.toString();
// }
// return srtToUdpTtl;
// }
//
// /**
// * Sets {@code srtToUdp_ttl}
// *
// * @param srtToUdpTtl the {@code java.lang.String} field
// */
// public void setSrtToUdpTtl(String srtToUdpTtl) {
// this.srtToUdpTtl = srtToUdpTtl;
// }
//
// /**
// * Retrieves {@code {@link #fecRtp}}
// *
// * @return value of {@link #fecRtp}
// */
// public FecRTP getFecRtp() {
// if (this.fecRtp != null) {
// for (FecRTP fecRTP : FecRTP.values()) {
// if (fecRTP.getCode().equals(this.fecRtp)) {
// return fecRTP;
// }
// }
// }
// return FecRTP.DISABLE;
// }
/**
* Retrieves default stream name when stream name is empty
*
* @return String default stream name
*/
public String getDefaultStreamName() {
if (getAddress().equals(DecoderConstant.ADDRESS_ANY) || getAddress().equals(DecoderConstant.EMPTY)) {
return getEncapsulation().getShortName() + DecoderConstant.COLON + DecoderConstant.SLASH + DecoderConstant.SLASH + DecoderConstant.LEFT_PARENTHESES + DecoderConstant.ADDRESS_ANY
+ DecoderConstant.RIGHT_PARENTHESES +
DecoderConstant.COLON + getPort();
} else {
return getEncapsulation().getShortName() + DecoderConstant.COLON + DecoderConstant.SLASH + DecoderConstant.SLASH + getAddress() +
DecoderConstant.COLON + getPort();
}
}
// ToDo: comment out controlling capabilities, filtering and config management
// /**
// * Sets {@code fecRtp}
// *
// * @param fecRtp the {@code java.lang.String} field
// */
// public void setFecRtp(Integer fecRtp) {
// this.fecRtp = fecRtp;
// }
//
// /**
// * This method is used to create request body as Json for stream controlling in case protocol is TS over UDP
// *
// * @return String json request body
// */
// public String jsonRequest() {
// String sourcePortDTO = getSourcePort();
// String srtToUDPPortDTO = getSrtToUdpPort();
// String portDTO = getPort();
// if (sourcePortDTO.isEmpty()){
// sourcePortDTO = "0";
// }
// if (srtToUDPPortDTO.isEmpty()){
// srtToUDPPortDTO = "0";
// }
// if (portDTO.isEmpty()){
// portDTO = "0";
// }
//
//
// if (getPassphraseSet() && !getPassphrase().isEmpty()) {
// return '{' +
// "\"encapsulation\":" + getEncapsulation().getCode() +
// ",\"fecRtp\":" + getFecRtp().getCode() +
// ",\"name\":" + '\"' + name + '\"' +
// ",\"passphrase\":" + '\"' + getPassphrase() + '\"' +
// ",\"address\":" + '\"' + getAddress() + '\"' +
// ",\"sourceIp\":" + '\"' + getSourceIp() + '\"' +
// ",\"stillImage\":" + '\"' + '\"' +
// ",\"port\":" + portDTO +
// ",\"sourcePort\":" + sourcePortDTO +
// ",\"latency\":" + getLatency() +
// ",\"srtMode\":" + getSrtMode().getCode() +
// ",\"srtToUdp\":" + getSrtToUdp() +
// ",\"srtToUdp_address\":" + '\"' + getSrtToUdpAddress() + '\"' +
// ",\"srtToUdp_port\":" + srtToUDPPortDTO +
// ",\"srtToUdp_tos\":" + '\"' + getSrtToUdpTos() + '\"' +
// ",\"srtToUdp_ttl\":" + getSrtToUdpTtl() +
// ",\"strictMode\":" + getStrictMode() +
// '}';
// }
// return '{' +
// "\"encapsulation\":" + getEncapsulation().getCode() +
// ",\"fecRtp\":" + getFecRtp().getCode() +
// ",\"name\":" + '\"' + name + '\"' +
// ",\"address\":" + '\"' + getAddress() + '\"' +
// ",\"sourceIp\":" + '\"' + getSourceIp() + '\"' +
// ",\"stillImage\":" + '\"' + '\"' +
// ",\"port\":" + portDTO +
// ",\"sourcePort\":" + sourcePortDTO +
// ",\"latency\":" + getLatency() +
// ",\"srtMode\":" + getSrtMode().getCode() +
// ",\"srtToUdp\":" + getSrtToUdp() +
// ",\"srtToUdp_address\":" + '\"' + getSrtToUdpAddress() + '\"' +
// ",\"srtToUdp_port\":" + srtToUDPPortDTO +
// ",\"srtToUdp_tos\":" + '\"' + getSrtToUdpTos() + '\"' +
// ",\"srtToUdp_ttl\":" + getSrtToUdpTtl() +
// ",\"strictMode\":" + getStrictMode() +
// '}';
// }
//
// /**
// * This method is used to compare object in specify protocol
// */
// public boolean equalsByProtocol(Object o, Encapsulation encapsulation, SRTMode srtMode, Boolean encrypted, Boolean srtToUDP) {
// if (this == o) {
// return true;
// }
// if (o == null || getClass() != o.getClass()) {
// return false;
// }
// StreamInfo that = (StreamInfo) o;
// switch (encapsulation) {
// case TS_OVER_UDP:
// return Objects.equals(name, that.name)
// && Objects.equals(this.encapsulation, that.encapsulation)
// && Objects.equals(port, that.port)
// && Objects.equals(address, that.address)
// && Objects.equals(sourceIp, that.sourceIp);
// case TS_OVER_RTP:
// return Objects.equals(name, that.name)
// && Objects.equals(this.encapsulation, that.encapsulation)
// && Objects.equals(port, that.port)
// && Objects.equals(address, that.address)
// && Objects.equals(sourceIp, that.sourceIp)
// && Objects.equals(fecRtp, that.fecRtp);
// case TS_OVER_SRT:
// return Objects.equals(name, that.name)
// && Objects.equals(this.encapsulation, that.encapsulation)
// && Objects.equals(latency, that.latency)
// && Objects.equals(this.srtMode, that.srtMode)
// && Objects.equals(passphraseSet, that.passphraseSet)
// && Objects.equals(srtToUdp, that.srtToUdp)
// && equalsByStreamConversion(o, srtToUDP)
// && equalsBySRTMode(o, srtMode)
// && equalsByEncrypted(o, encrypted);
// default:
// return false;
// }
// }
//
// /**
// * This method is used to compare object in specify stream conversion
// */
// public boolean equalsByStreamConversion(Object o, Boolean srtToUdp) {
// if (this == o) {
// return true;
// }
// if (o == null || getClass() != o.getClass()) {
// return false;
// }
// StreamInfo that = (StreamInfo) o;
// if (srtToUdp) {
// return Objects.equals(srtToUdpAddress, that.srtToUdpAddress)
// && Objects.equals(getSrtToUdpPort(), that.getSrtToUdpPort())
// && Objects.equals(srtToUdpTos, that.srtToUdpTos)
// && Objects.equals(srtToUdpTtl, that.srtToUdpTtl);
// }
// return true;
// }
//
// /**
// * This method is used to compare object in specify encrypted
// */
// public boolean equalsByEncrypted(Object o, Boolean passphraseSet) {
// if (this == o) {
// return true;
// }
// if (o == null || getClass() != o.getClass()) {
// return false;
// }
// StreamInfo that = (StreamInfo) o;
// if (passphraseSet) {
// return Objects.equals(passphrase, that.passphrase);
// }
// return true;
// }
//
// /**
// * This method is used to compare object in specify SRT mode
// */
// public boolean equalsBySRTMode(Object o, SRTMode srtMode) {
// if (this == o) {
// return true;
// }
// if (o == null || getClass() != o.getClass()) {
// return false;
// }
// StreamInfo that = (StreamInfo) o;
// switch (srtMode) {
// case LISTENER:
// return Objects.equals(port, that.port)
// && Objects.equals(strictMode, that.strictMode);
// case CALLER:
// return Objects.equals(address, that.address)
// && Objects.equals(getSourcePort(), that.getSourcePort())
// && Objects.equals(port, that.port);
// case RENDEZVOUS:
// return Objects.equals(address, that.address)
// && Objects.equals(port, that.port);
// default:
// return false;
// }
// }
//
// @Override
// public boolean equals(Object o) {
// if (this == o) {
// return true;
// }
// if (o == null || getClass() != o.getClass()) {
// return false;
// }
// StreamInfo that = (StreamInfo) o;
// return Objects.equals(name, that.name) && Objects.equals(id, that.id) && Objects.equals(decoderId, that.decoderId) && Objects.equals(encapsulation,
// that.encapsulation) && Objects.equals(userData, that.userData) && Objects.equals(address, that.address) && Objects.equals(port, that.port)
// && Objects.equals(sourceIp, that.sourceIp) && Objects.equals(latency, that.latency) && Objects.equals(srtMode, that.srtMode) && Objects.equals(sourcePort,
// that.sourcePort) && Objects.equals(strictMode, that.strictMode) && Objects.equals(passphrase, that.passphrase) && Objects.equals(passphraseSet, that.passphraseSet)
// && Objects.equals(srtToUdp, that.srtToUdp) && Objects.equals(srtToUdpAddress, that.srtToUdpAddress) && Objects.equals(srtToUdpPort, that.srtToUdpPort)
// && Objects.equals(srtToUdpTos, that.srtToUdpTos) && Objects.equals(srtToUdpTtl, that.srtToUdpTtl) && Objects.equals(fecRtp, that.fecRtp);
// }
//
// @Override
// public int hashCode() {
// return Objects.hash(name, id, decoderId, encapsulation, userData, address, port, sourceIp, latency, srtMode, sourcePort, strictMode, passphrase, passphraseSet, srtToUdp, srtToUdpAddress,
// srtToUdpPort, srtToUdpTos, srtToUdpTtl, fecRtp);
// }
}
|
ryunosinfx/HtmlCopiBon
|
src/view/content/prefMng.js
|
import vu from "../../util/viewUtil";
import {BaseView} from "../../util/reactive/baseView";
import {a,div,li,ul,img,span,input,label} from "../../util/reactive/base/vtags";
import {
PrefList
} from '../parts/pref/prefList'
export class PrefMng extends BaseView {
constructor() {
super("prefMng", "prefMng");
this.text="prefMng";
}
async onAfterAttach(store, data) {
}
render() {
return div(this.id, ["prefMng"], this.text);
}
// loadTitleList
// newTitle
// deleteTitle
// next Button
}
|
dev-bootcamp-2019/final-project-chechu
|
src/util/contracts/krakenPriceTicker.js
|
<gh_stars>0
import { getWeb3, DEFAULT_GAS_PRICE } from '../connectors';
import { ETH_PRICE_UPDATED } from '../actions';
import config from '../../../config';
const abi = require('./abi/KrakenPriceTicker.json').abi;
export const CONTRACT_ADDRESS = config.krakenPriceTikerAddress;
export let contract;
export function initContract(from) {
const web3 = getWeb3();
contract = contract || new web3.eth.Contract(abi, CONTRACT_ADDRESS, { from, gasPrice: DEFAULT_GAS_PRICE });
}
export function subscribeToKrakenPriceTicker(address, dispatch) {
initContract(address);
contract.events.LogNewKrakenPriceTicker({ fromBlock: 0 }, (error, res) => {
if (!error) {
if (res.returnValues && res.returnValues.price) {
dispatch({ type: ETH_PRICE_UPDATED, price: parseFloat(res.returnValues.price, 10) });
}
}
});
}
|
jmflorez/pymatgen
|
pymatgen/symmetry/pointgroup.py
|
<reponame>jmflorez/pymatgen
#!/usr/bin/env python
"""
This module implements a point group assigner for a molecule.
"""
from __future__ import division
__author__ = "<NAME>"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__date__ = "5/8/13"
import logging
import itertools
from collections import defaultdict
import numpy as np
try:
import scipy.cluster as spcluster
except ImportError:
spcluster = None
from pymatgen.core.operations import SymmOp
from pymatgen.util.coord_utils import find_in_coord_list
from pymatgen.util.decorators import requires
logger = logging.getLogger(__name__)
class PointGroup(list):
"""
Defines a point group, which is essentially a sequence of symmetry
operations.
.. attribute:: sch_symbol
Schoenflies symbol of the point group.
"""
def __init__(self, sch_symbol, operations, tol=0.1):
"""
Args:
sch_symbol:
The schoenflies symbol of the point group.
operations:
An initial set of symmetry operations. It is sufficient to
provide only just enough operations to generate the full set
of symmetries.
tol:
Tolerance to generate the full set of symmetry operations.
"""
self.sch_symbol = sch_symbol
super(PointGroup, self).__init__(
generate_full_symmops(operations, tol))
def __str__(self):
return self.sch_symbol
def __repr__(self):
return self.__str__()
@requires(spcluster is not None, "Cannot import scipy. PointGroupAnalyzer "
"requires scipy.cluster")
class PointGroupAnalyzer(object):
"""
A class to analyze the point group of a molecule. The general outline of
the algorithm is as follows:
1. Center the molecule around its center of mass.
2. Compute the inertia tensor and the eigenvalues and eigenvectors.
3. Handle the symmetry detection based on eigenvalues.
a. Linear molecules have one zero eigenvalue. Possible symmetry
operations are C*v or D*v
b. Asymetric top molecules have all different eigenvalues. The
maximum rotational symmetry in such molecules is 2
c. Symmetric top molecules have 1 unique eigenvalue, which gives a
unique rotation axis. All axial point groups are possible
except the cubic groups (T & O) and I.
d. Spherical top molecules have all three eigenvalues equal. They
have the rare T, O or I point groups.
.. attribute:: sch_symbol
Schoenflies symbol of the detected point group.
"""
inversion_op = SymmOp.inversion()
def __init__(self, mol, tolerance=0.3, eigen_tolerance=0.01,
matrix_tol=0.1):
"""
The default settings are usually sufficient.
Args:
mol:
Molecule
tolerance:
Distance tolerance to consider sites as symmetrically
equivalent. Defaults to 0.3 Angstrom.
eigen_tolerance:
Tolerance to compare eigen values of the inertia tensor.
Defaults to 0.01.
matrix_tol:
Tolerance used to generate the full set of symmetry
operations of the point group.
"""
self.mol = mol
self.centered_mol = mol.get_centered_molecule()
self.tol = tolerance
self.eig_tol = eigen_tolerance
self.mat_tol = matrix_tol
self._analyze()
def _analyze(self):
if len(self.centered_mol) == 1:
self.sch_symbol = "Kh"
else:
inertia_tensor = np.zeros((3, 3))
total_inertia = 0
for site in self.mol:
c = site.coords
wt = site.species_and_occu.weight
for i in xrange(3):
inertia_tensor[i, i] += wt * (c[(i + 1) % 3] ** 2
+ c[(i + 2) % 3] ** 2)
for i, j in itertools.combinations(xrange(3), 2):
inertia_tensor[i, j] += -wt * c[i] * c[j]
inertia_tensor[j, i] += -wt * c[j] * c[i]
total_inertia += wt * np.dot(c, c)
# Normalize the inertia tensor so that it does not scale with size
# of the system. This mitigates the problem of choosing a proper
# comparison tolerance for the eigenvalues.
inertia_tensor /= total_inertia
eigvals, eigvecs = np.linalg.eig(inertia_tensor)
self.principal_axes = eigvecs.T
self.eigvals = eigvals
v1, v2, v3 = eigvals
eig_zero = abs(v1 * v2 * v3) < self.eig_tol ** 3
eig_all_same = abs(v1 - v2) < self.eig_tol and abs(
v1 - v3) < self.eig_tol
eig_all_diff = abs(v1 - v2) > self.eig_tol and abs(
v1 - v2) > self.eig_tol and abs(v2 - v3) > self.eig_tol
self.rot_sym = []
self.symmops = [SymmOp(np.eye(4))]
if eig_zero:
logger.debug("Linear molecule detected")
self._proc_linear()
elif eig_all_same:
logger.debug("Spherical top molecule detected")
self._proc_sph_top()
elif eig_all_diff:
logger.debug("Asymmetric top molecule detected")
self._proc_asym_top()
else:
logger.debug("Symmetric top molecule detected")
self._proc_sym_top()
def _proc_linear(self):
if self.is_valid_op(PointGroupAnalyzer.inversion_op):
self.sch_symbol = "D*h"
self.symmops.append(PointGroupAnalyzer.inversion_op)
else:
self.sch_symbol = "C*v"
def _proc_asym_top(self):
"""
Handles assymetric top molecules, which cannot contain rotational
symmetry larger than 2.
"""
self._check_R2_axes_asym()
if len(self.rot_sym) == 0:
logger.debug("No rotation symmetries detected.")
self._proc_no_rot_sym()
elif len(self.rot_sym) == 3:
logger.debug("Dihedral group detected.")
self._proc_dihedral()
else:
logger.debug("Cyclic group detected.")
self._proc_cyclic()
def _proc_sym_top(self):
"""
Handles symetric top molecules which has one unique eigenvalue whose
corresponding principal axis is a unique rotational axis. More complex
handling required to look for R2 axes perpendicular to this unique
axis.
"""
if abs(self.eigvals[0] - self.eigvals[1]) < self.eig_tol:
ind = 2
elif abs(self.eigvals[1] - self.eigvals[2]) < self.eig_tol:
ind = 0
else:
ind = 1
unique_axis = self.principal_axes[ind]
self._check_rot_sym(unique_axis)
if len(self.rot_sym) > 0:
self._check_perpendicular_r2_axis(unique_axis)
if len(self.rot_sym) >= 2:
self._proc_dihedral()
elif len(self.rot_sym) == 1:
self._proc_cyclic()
else:
self._proc_no_rot_sym()
def _proc_no_rot_sym(self):
"""
Handles molecules with no rotational symmetry. Only possible point
groups are C1, Cs and Ci.
"""
self.sch_symbol = "C1"
if self.is_valid_op(PointGroupAnalyzer.inversion_op):
self.sch_symbol = "Ci"
self.symmops.append(PointGroupAnalyzer.inversion_op)
else:
for v in self.principal_axes:
mirror_type = self._find_mirror(v)
if not mirror_type == "":
self.sch_symbol = "Cs"
break
def _proc_cyclic(self):
"""
Handles cyclic group molecules.
"""
main_axis, rot = max(self.rot_sym, key=lambda v: v[1])
self.sch_symbol = "C{}".format(rot)
mirror_type = self._find_mirror(main_axis)
if mirror_type == "h":
self.sch_symbol += "h"
elif mirror_type == "v":
self.sch_symbol += "v"
elif mirror_type == "":
if self.is_valid_op(SymmOp.rotoreflection(main_axis,
angle=180 / rot)):
self.sch_symbol = "S{}".format(2 * rot)
def _proc_dihedral(self):
"""
Handles dihedral group molecules, i.e those with intersecting R2 axes
and a main axis.
"""
main_axis, rot = max(self.rot_sym, key=lambda v: v[1])
self.sch_symbol = "D{}".format(rot)
mirror_type = self._find_mirror(main_axis)
if mirror_type == "h":
self.sch_symbol += "h"
elif not mirror_type == "":
self.sch_symbol += "d"
def _check_R2_axes_asym(self):
"""
Test for 2-fold rotation along the principal axes. Used to handle
asymetric top molecules.
"""
for v in self.principal_axes:
op = SymmOp.from_axis_angle_and_translation(v, 180)
if self.is_valid_op(op):
self.symmops.append(op)
self.rot_sym.append((v, 2))
def _find_mirror(self, axis):
"""
Looks for mirror symmetry of specified type about axis. Possible
types are "h" or "vd". Horizontal (h) mirrors are perpendicular to
the axis while vertical (v) or diagonal (d) mirrors are parallel. v
mirrors has atoms lying on the mirror plane while d mirrors do
not.
"""
mirror_type = ""
#First test whether the axis itself is the normal to a mirror plane.
if self.is_valid_op(SymmOp.reflection(axis)):
self.symmops.append(SymmOp.reflection(axis))
mirror_type = "h"
else:
# Iterate through all pairs of atoms to find mirror
for s1, s2 in itertools.combinations(self.centered_mol, 2):
if s1.species_and_occu == s2.species_and_occu:
normal = s1.coords - s2.coords
if np.dot(normal, axis) < self.tol:
op = SymmOp.reflection(normal)
if self.is_valid_op(op):
self.symmops.append(op)
if len(self.rot_sym) > 1:
mirror_type = "d"
for v, r in self.rot_sym:
if not np.linalg.norm(v - axis) < self.tol:
if np.dot(v, normal) < self.tol:
mirror_type = "v"
break
else:
mirror_type = "v"
break
return mirror_type
def _get_smallest_set_not_on_axis(self, axis):
"""
Returns the smallest list of atoms with the same species and
distance from origin AND does not lie on the specified axis. This
maximal set limits the possible rotational symmetry operations,
since atoms lying on a test axis is irrelevant in testing rotational
symmetryOperations.
"""
def not_on_axis(site):
v = np.cross(site.coords, axis)
return np.linalg.norm(v) > self.tol
valid_sets = []
origin_site, dist_el_sites = cluster_sites(self.centered_mol, self.tol)
for test_set in dist_el_sites.values():
valid_set = filter(not_on_axis, test_set)
if len(valid_set) > 0:
valid_sets.append(valid_set)
return min(valid_sets, key=lambda s: len(s))
def _check_rot_sym(self, axis):
"""
Determines the rotational symmetry about supplied axis. Used only for
symmetric top molecules which has possible rotational symmetry
operations > 2.
"""
min_set = self._get_smallest_set_not_on_axis(axis)
max_sym = len(min_set)
for i in xrange(max_sym, 0, -1):
if max_sym % i != 0:
continue
op = SymmOp.from_axis_angle_and_translation(axis, 360 / i)
rotvalid = self.is_valid_op(op)
if rotvalid:
self.symmops.append(op)
self.rot_sym.append((axis, i))
return i
return 1
def _check_perpendicular_r2_axis(self, axis):
"""
Checks for R2 axes perpendicular to unique axis. For handling
symmetric top molecules.
"""
min_set = self._get_smallest_set_not_on_axis(axis)
for s1, s2 in itertools.combinations(min_set, 2):
test_axis = np.cross(s1.coords - s2.coords, axis)
if np.linalg.norm(test_axis) > self.tol:
op = SymmOp.from_axis_angle_and_translation(test_axis, 180)
r2present = self.is_valid_op(op)
if r2present:
self.symmops.append(op)
self.rot_sym.append((test_axis, 2))
return True
def _proc_sph_top(self):
"""
Handles Sperhical Top Molecules, which belongs to the T, O or I point
groups.
"""
self._find_spherical_axes()
main_axis, rot = max(self.rot_sym, key=lambda v: v[1])
if len(self.rot_sym) == 0 or rot < 3:
logger.debug("Accidental speherical top!")
self._proc_sym_top()
elif rot == 3:
mirror_type = self._find_mirror(main_axis)
if mirror_type != "":
if self.is_valid_op(PointGroupAnalyzer.inversion_op):
self.symmops.append(PointGroupAnalyzer.inversion_op)
self.sch_symbol = "Th"
else:
self.sch_symbol = "Td"
else:
self.sch_symbol = "T"
elif rot == 4:
if self.is_valid_op(PointGroupAnalyzer.inversion_op):
self.symmops.append(PointGroupAnalyzer.inversion_op)
self.sch_symbol = "Oh"
else:
self.sch_symbol = "O"
elif rot == 5:
if self.is_valid_op(PointGroupAnalyzer.inversion_op):
self.symmops.append(PointGroupAnalyzer.inversion_op)
self.sch_symbol = "Ih"
else:
self.sch_symbol = "I"
def _find_spherical_axes(self):
"""
Looks for R5, R4, R3 and R2 axes in speherical top molecules. Point
group T molecules have only one unique 3-fold and one unique 2-fold
axis. O molecules have one unique 4, 3 and 2-fold axes. I molecules
have a unique 5-fold axis.
"""
rot_present = defaultdict(bool)
origin_site, dist_el_sites = cluster_sites(self.centered_mol, self.tol)
test_set = min(dist_el_sites.values(), key=lambda s: len(s))
coords = [s.coords for s in test_set]
for c1, c2, c3 in itertools.combinations(coords, 3):
for cc1, cc2 in itertools.combinations([c1, c2, c3], 2):
if not rot_present[2]:
test_axis = cc1 + cc2
if np.linalg.norm(test_axis) > self.tol:
op = SymmOp.from_axis_angle_and_translation(test_axis,
180)
rot_present[2] = self.is_valid_op(op)
if rot_present[2]:
self.symmops.append(op)
self.rot_sym.append((test_axis, 2))
test_axis = np.cross(c2 - c1, c3 - c1)
if np.linalg.norm(test_axis) > self.tol:
for r in (3, 4, 5):
if not rot_present[r]:
op = SymmOp.from_axis_angle_and_translation(
test_axis, 360/r)
rot_present[r] = self.is_valid_op(op)
if rot_present[r]:
self.symmops.append(op)
self.rot_sym.append((test_axis, r))
break
if rot_present[2] and rot_present[3] and (
rot_present[4] or rot_present[5]):
break
def get_pointgroup(self):
"""
Returns a PointGroup object for the molecule.
"""
return PointGroup(self.sch_symbol, self.symmops, self.mat_tol)
def is_valid_op(self, symmop):
"""
Check if a particular symmetry operation is a valid symmetry operation
for a molecule, i.e., the operation maps all atoms to another
equivalent atom.
Args:
symmop:
Symmetry op to test.
"""
coords = self.centered_mol.cart_coords
for site in self.centered_mol:
coord = symmop.operate(site.coords)
ind = find_in_coord_list(coords, coord, self.tol)
if not (len(ind) == 1 and
self.centered_mol[ind[0]].species_and_occu
== site.species_and_occu):
return False
return True
@requires(spcluster is not None, "Cannot import scipy. cluster_sites require "
"scipy.cluster.")
def cluster_sites(mol, tol):
"""
Cluster sites based on distance and species type.
Args:
mol:
Molecule (should be centered at center of mass).
tol:
Tolerance to use.
Returns:
(origin_site, clustered_sites). origin_site is a site at the center
of mass (None if there are no origin atoms). clustered_sites is a
dict of {(avg_dist, species_and_occu): [list of sites]}
"""
# Cluster works for dim > 2 data. We just add a dummy 0 for second
# coordinate.
dists = [[np.linalg.norm(site.coords), 0] for site in mol]
f = spcluster.hierarchy.fclusterdata(dists, tol, criterion='distance')
clustered_dists = defaultdict(list)
for i, site in enumerate(mol):
clustered_dists[f[i]].append(dists[i])
avg_dist = {label: np.mean(val) for label, val in clustered_dists.items()}
clustered_sites = defaultdict(list)
origin_site = None
for i, site in enumerate(mol):
if avg_dist[f[i]] < tol:
origin_site = site
else:
clustered_sites[(avg_dist[f[i]],
site.species_and_occu)].append(site)
return origin_site, clustered_sites
def generate_full_symmops(symmops, tol):
"""
Recursive algorithm to permute through all possible combinations of the
initially supplied symmetry operations to arrive at a complete set of
operations mapping a single atom to all other equivalent atoms in the
point group. This assumes that the initial number already uniquely
identifies all operations.
Args:
symmops:
Initial set of symmetry operations.
Returns:
Full set of symmetry operations.
"""
a = [o.affine_matrix for o in symmops]
if len(symmops) > 300:
logger.debug("Generation of symmetry operations in infinite loop. " +
"Possible error in initial operations or tolerance too "
"low.")
else:
for op1, op2 in itertools.product(symmops, symmops):
m = np.dot(op1.affine_matrix, op2.affine_matrix)
d = np.abs(a - m) < tol
if not np.any(np.all(np.all(d, axis=2), axis=1)):
return generate_full_symmops(symmops + [SymmOp(m)], tol)
return symmops
|
279611480/oa_fk
|
notice/src/main/java/org/yun/notice/service/impl/NoticeTypeServiceImpl.java
|
<reponame>279611480/oa_fk<gh_stars>0
package org.yun.notice.service.impl;
import java.util.Date;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import org.yun.identity.UserHolder;
import org.yun.identity.domain.User;
import org.yun.notice.domain.Notice;
import org.yun.notice.domain.Notice.Status;
import org.yun.notice.domain.NoticeRead;
import org.yun.notice.domain.NoticeType;
import org.yun.notice.repository.NoticeDao;
import org.yun.notice.repository.NoticeReadDao;
import org.yun.notice.repository.NoticeTypeDao;
import org.yun.notice.service.NoticeTypeService;
@Service
public class NoticeTypeServiceImpl implements NoticeTypeService {
@Autowired
private NoticeTypeDao noticeTypeDao;
@Autowired
private NoticeDao noticeDao;
@Autowired
private NoticeReadDao noticeReadDao;
@Override
public List<NoticeType> findAllTypes() {
Sort sort = Sort.by("name");//按名字排序
return this.noticeTypeDao.findAll(sort);
}
@Override
public void save(NoticeType type) {
// 根据类型名字,查询出所有公告
NoticeType old = this.noticeTypeDao.findByName(type.getName());
//判断,如果是新增的/修改的公告 调用持久层方法保存数据
if(old == null || old.getId().equals(type.getId())) {
this.noticeTypeDao.save(type);
}else {//不然判处异常公告名重复
throw new IllegalArgumentException("公告类型的名称不能重复!");
}
}
@Override
public void deleteTypeById(String id) {
// 调用持久层方法根据数据id删除数据
this.noticeTypeDao.deleteById(id);
}
@Override
public void write(Notice notice) {
//01填充字段 表示 全都是 公告一开始的状态
notice.setAuthor(UserHolder.get());//拿到当前用户【公告作者】
notice.setWriteTime(new Date());//公告撰写时间
notice.setReleaseTime(null);//公告发布的时间
notice.setStatus(Status.DRAFT);//公告一开始就是草稿状态
if(StringUtils.isEmpty(notice.getId())) {
notice.setId(null);
}
//02.公告id不为空的话 ,那么就调用公告持久层方法保存公告
this.noticeDao.save(notice);
}
@Override
public Page<NoticeRead> findNotices(Integer number, String keyword) {
/**
* 标题、撰写时间、作者都可以非常方便的查询出来,内容在列表显示的时候,不关心
* 状态:草稿、发布、撤回
* 还有一个特殊状态:不同的用户有阅读状态, 没有阅读且已经有发布的,使用粗体字显示
*
* 要查询的列表数据就包括:
* 1.当前用户写的,还未发布的
* 2.已经发布、可以阅读,需要查询阅读状态,表关联查询
* 3.已经撤回的,只有作则能够查看
* */
//.公告页面 一般有 作者 以及页面一页可显示多少条数据
//01通过之前设置的UserHolder拿到当前用户【即作者】
User author = UserHolder.get();
//02.设置一页可显示多少条数据
Pageable pageable = PageRequest.of(number, 10);//第几页,显示多少条数据 其实就是 一页显示多少条数据
//03.设置,公告页面的情况 如果没有阅读状态,那么也会有公告记录
//Page<Notice> page =this.noticeDao.findNotices(author,pageable);
Page<NoticeRead> dataPage = this.noticeReadDao.findNotices(author, author, pageable);
//集合接收,公告页面的内容
List<NoticeRead> content = dataPage.getContent();
//处理关联查询 dataPage.getTotalElements()拿到页面的内容数据
Page<NoticeRead> page = new PageImpl<>(content,pageable,dataPage.getTotalElements());
return page;
}
@Override
@Transactional
public void read(String id) {
//拿到用户 公告 阅读时间
User user = UserHolder.get();
Notice notice = this.findById(id);
Date readTime = new Date();
NoticeRead old = this.noticeReadDao.findByNoticeAndUser(notice,user);
if(old== null) {
NoticeRead nr = new NoticeRead();
nr.setNotice(notice);
nr.setReadTime(readTime);
nr.setUser(user);
this.noticeReadDao.save(nr);
}
}
@Override
public void deleteById(String id) {
Notice n = this.findById(id);
if(n!=null) {
this.noticeDao.delete(n);
}
}
@Override
public Notice findById(String id) {
return this.noticeDao.findById(id).orElse(null);
}
@Override
@Transactional
public void publish(String id) {
Notice n = this.findById(id);
if(n!=null) {
n.setStatus(Status.RELEASED);
n.setReleaseTime(new Date());
}
}
@Override
@Transactional
public void recall(String id) {
Notice n = this.findById(id);
if(n != null) {
n.setStatus(Status.RECALL);
}
}
}
|
VHAINNOVATIONS/AVS
|
ll-javaBroker/src/main/java/gov/va/med/lom/javaBroker/rpc/patient/ProblemsRpc.java
|
<reponame>VHAINNOVATIONS/AVS
package gov.va.med.lom.javaBroker.rpc.patient;
import java.util.ArrayList;
import java.util.Vector;
import java.text.ParseException;
import gov.va.med.lom.javaBroker.rpc.*;
import gov.va.med.lom.javaBroker.rpc.patient.models.*;
import gov.va.med.lom.javaBroker.util.DateUtils;
import gov.va.med.lom.javaBroker.util.FMDateUtils;
import gov.va.med.lom.javaBroker.util.StringUtils;
public class ProblemsRpc extends AbstractRpc {
// PROBLEM STATUS
public static final String ACTIVE = "A"; // Active Problems
public static final String INACTIVE = "I"; // Inactive Problems
public static final String ALL = "B"; // All Problems
public static final String REMOVED = "R"; // Removed Problems
// FIELDS
private ProblemsList problemsList;
// CONSTRUCTORS
public ProblemsRpc() throws BrokerException {
super();
}
public ProblemsRpc(RpcBroker rpcBroker) throws BrokerException {
super(rpcBroker);
}
// RPC API
public synchronized ProblemsList getProblems(String dfn, String status) throws BrokerException {
if (setContext("OR CPRS GUI CHART")) {
setDfn(dfn);
problemsList = new ProblemsList();
problemsList.setStatus(status);
if ((status == null) ||
(status.length() == 0) &&
((!status.equals(ACTIVE)) ||
(!status.equals(INACTIVE)) ||
(!status.equals(ALL)) ||
(!status.equals(REMOVED)))) {
status = ACTIVE;
}
String[] params = {dfn, status};
// IEN^Status^Description^Code^Onset Date^Last Updated^Service Connected Status^(detailRPC?)^Transcribed('T')^
// Location IEN;Location^Location Type^Provider IEN;Provider^Service IEN;Service^prio('A')^Has Comments('1')^
// (?)^Service-Connected Conditions^#=Inactive ICD Code Stored with Problem
ArrayList list = lCall("ORQQPL PROBLEM LIST", params);
if (returnRpcResult) {
StringBuffer sb = new StringBuffer();
for(int i = 0; i < list.size(); i++)
sb.append((String)list.get(i) + "\n");
problemsList.setRpcResult(sb.toString().trim());
}
Vector problemsVect = new Vector();
for(int i = 0; i < list.size(); i++) {
String x = (String)list.get(i);
if (StringUtils.piece(x,2).length() > 0) {
if ((x.trim().length() > 0) && (StringUtils.piece(x, 1).length() > 0)) {
Problem problem = new Problem();
if (returnRpcResult)
problem.setRpcResult(x);
problem.setDfn(dfn);
problem.setIen(StringUtils.piece(x, 1));
problem.setDescription(StringUtils.piece(x, 3));
problem.setStatus(StringUtils.piece(x, 2));
problem.setCode(StringUtils.piece(x, 4));
try {
problem.setOnsetDate(FMDateUtils.fmDateTimeToDate(StringUtils.piece(x, 5)));
problem.setOnsetDateStr(DateUtils.toEnglishDate(problem.getOnsetDate()));
} catch(ParseException pe) {}
try {
problem.setLastUpdated(FMDateUtils.fmDateTimeToDate(StringUtils.piece(x, 6)));
problem.setLastUpdatedStr(DateUtils.toEnglishDate(problem.getLastUpdated()));
} catch(ParseException pe) {}
problem.setScStatus(StringUtils.piece(x, 7));
problem.setScConditions(StringUtils.piece(x, 17));
problem.setDetailRpc(StringUtils.piece(x, 8));
problem.setTranscribed(StringUtils.strToBool(StringUtils.piece(x, 9), "T"));
problem.setLocationIen(StringUtils.toLong(StringUtils.piece(StringUtils.piece(x, 10), ';', 1), 0));
problem.setLocation(StringUtils.piece(StringUtils.piece(x, 10), ';', 2));
problem.setLocationType(StringUtils.piece(x, 11));
problem.setProviderIen(StringUtils.toLong(StringUtils.piece(StringUtils.piece(x, 12), ';', 1), 0));
problem.setProvider(StringUtils.piece(StringUtils.piece(x, 12), ';', 2));
problem.setServiceIen(StringUtils.toLong(StringUtils.piece(StringUtils.piece(x, 13), ';', 1), 0));
problem.setService(StringUtils.piece(StringUtils.piece(x, 13), ';', 2));
String ver = null;
String prio = null;
char c = status.charAt(0);
if (StringUtils.piece(x, 18).equals("#") && ((c == 'A') || (c == 'B') || (c == 'R')))
ver = "#";
else if (problem.getTranscribed())
ver = "(u)";
else
ver = " ";
if (StringUtils.piece(x, 14).equals("A"))
prio = " * ";
else
prio = " ";
String version = problem.getStatus() + prio + ver;
problem.setVersion(version);
boolean hasComments = StringUtils.strToBool(StringUtils.piece(x, 15), "1");
if (hasComments) {
ArrayList commentsList = lCall("ORQQPL PROB COMMENTS", problem.getIen());
String[] comments = new String[commentsList.size()];
for (int j = 0; j < commentsList.size(); j++) {
String s = (String)commentsList.get(j);
comments[j] = s;
}
problem.setComments(comments);
}
problemsVect.add(problem);
}
}
}
Problem[] problems = new Problem[problemsVect.size()];
for (int i = 0; i < problems.length; i++)
problems[i] = (Problem)problemsVect.get(i);
problemsList.setProblems(problems);
return problemsList;
} else
throw getCreateContextException("OR CPRS GUI CHART");
}
public synchronized String getProblemDetail(String dfn, String ien) throws BrokerException {
if (setContext("OR CPRS GUI CHART")) {
setDfn(dfn);
Object[] params = {dfn, ien, ""};
ArrayList list = lCall("ORQQPL DETAIL", params);
StringBuffer sb = new StringBuffer();
for(int i = 0; i < list.size(); i++)
sb.append((String)list.get(i) + "\n");
return sb.toString();
} else
throw getCreateContextException("OR CPRS GUI CHART");
}
}
|
ItsCalebJones/SpaceLaunchNow_Android
|
astronauts/src/main/java/me/calebjones/spacelaunchnow/astronauts/AstronautListViewModel.java
|
<gh_stars>10-100
package me.calebjones.spacelaunchnow.astronauts;
import androidx.lifecycle.MutableLiveData;
import androidx.lifecycle.ViewModel;
import me.calebjones.spacelaunchnow.data.models.main.Launch;
import me.calebjones.spacelaunchnow.data.models.main.astronaut.Astronaut;
public class AstronautListViewModel extends ViewModel {
private MutableLiveData<Astronaut> astronauts;
public AstronautListViewModel() { }
public MutableLiveData<Astronaut> getAstronauts() {
if (astronauts == null) {
astronauts = new MutableLiveData<>();
}
return astronauts;
}
}
|
athenagroup/brxm
|
cms/api/src/main/java/org/hippoecm/addon/workflow/ActionDescription.java
|
/*
* Copyright 2009-2019 <NAME>.V. (http://www.onehippo.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hippoecm.addon.workflow;
import java.util.HashMap;
import java.util.Map;
import org.apache.wicket.Component;
import org.apache.wicket.MarkupContainer;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.markup.html.panel.Fragment;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.request.cycle.RequestCycle;
import org.hippoecm.repository.api.WorkflowException;
public abstract class ActionDescription extends Panel implements IWorkflowInvoker {
private boolean promise;
public abstract class ActionDisplay extends Fragment {
private boolean initialized = false;
protected ActionDisplay(String id) {
super(id, id, ActionDescription.this, ActionDescription.this.getDefaultModel());
}
abstract protected void initialize();
void substantiate() {
if (!initialized) {
initialized = true;
initialize();
}
}
}
Map<String, ActionDisplay> actions = new HashMap<String, ActionDisplay>();
public ActionDescription(String id) {
super(id);
}
public ActionDescription(String id, IModel<?> model) {
super(id, model);
}
public String getSubMenu() {
return null;
}
public final MarkupContainer add(final Fragment component) {
String id = component.getId();
if (get(id) != null) {
return addOrReplace(component);
} else {
return super.add(component);
}
}
public final MarkupContainer add(final ActionDisplay component) {
actions.put(component.getId(), component);
return null;
}
public Component getFragment(String id) {
if (actions.containsKey(id)) {
return actions.get(id);
} else {
return super.get(id);
}
}
public String getCssClass() {
return null;
}
public boolean isFormSubmitted() {
return false;
}
public void run() {
invoke();
}
protected abstract void invoke();
@Override
public void invokeWorkflow() throws Exception {
throw new WorkflowException("unsupported operation");
}
public void invokeAsPromise() {
promise = true;
invoke();
}
@Override
public void resolve(final String result) {
if (promise) {
promise = false;
final AjaxRequestTarget target = RequestCycle.get().find(AjaxRequestTarget.class);
if (target != null) {
target.appendJavaScript(String.format("Hippo.Workflow.resolve('%s');", result));
}
}
}
@Override
public void reject(final String reason) {
if (promise) {
promise = false;
final AjaxRequestTarget target = RequestCycle.get().find(AjaxRequestTarget.class);
if (target != null) {
target.appendJavaScript(String.format("Hippo.Workflow.reject('%s');", reason));
}
}
}
}
|
chaspy/tfsec
|
internal/app/tfsec/rules/azure/storage/default_action_deny_rule.go
|
package storage
import (
"github.com/aquasecurity/defsec/rules"
"github.com/aquasecurity/defsec/rules/azure/storage"
"github.com/aquasecurity/tfsec/internal/app/tfsec/block"
"github.com/aquasecurity/tfsec/internal/app/tfsec/scanner"
"github.com/aquasecurity/tfsec/pkg/rule"
)
func init() {
scanner.RegisterCheckRule(rule.Rule{
LegacyID: "AZU012",
BadExample: []string{`
resource "azurerm_storage_account_network_rules" "bad_example" {
default_action = "Allow"
ip_rules = ["127.0.0.1"]
virtual_network_subnet_ids = [azurerm_subnet.test.id]
bypass = ["Metrics"]
}
`},
GoodExample: []string{`
resource "azurerm_storage_account_network_rules" "good_example" {
default_action = "Deny"
ip_rules = ["127.0.0.1"]
virtual_network_subnet_ids = [azurerm_subnet.test.id]
bypass = ["Metrics"]
}
`},
Links: []string{
"https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/storage_account_network_rules#default_action",
},
RequiredTypes: []string{"resource"},
RequiredLabels: []string{"azurerm_storage_account", "azurerm_storage_account_network_rules"},
Base: storage.CheckDefaultActionDeny,
CheckTerraform: func(resourceBlock block.Block, _ block.Module) (results rules.Results) {
if resourceBlock.IsResourceType("azurerm_storage_account") {
if resourceBlock.MissingChild("network_rules") {
return
}
resourceBlock = resourceBlock.GetBlock("network_rules")
}
defaultAction := resourceBlock.GetAttribute("default_action")
if defaultAction.IsNotNil() && defaultAction.Equals("Allow", block.IgnoreCase) {
results.Add("Resource defines a default_action of Allow. It should be Deny.", defaultAction)
}
return results
},
})
}
|
xu-kai-xu/OpenPNM
|
examples/contrib/shale_oil_conductance_v2.8/pore_coordination_num.py
|
<filename>examples/contrib/shale_oil_conductance_v2.8/pore_coordination_num.py
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 10 20:11:18 2021
@author: <NAME>
"""
import numpy as np
import matplotlib.pyplot as plt
def pore_coordination_num(pn):
'''
calculate pore coordination number
parameters:
throats=pn['throat.conns']: array,(pn.Nt, 2), throat with their connected pores;
pore_num=pn.Np: nubmer of pores
'''
throats=pn['throat.conns']
pore_num=pn.Np
coordination = np.zeros(pore_num)
for throat in throats:
coordination[throat[0]] += 1
coordination[throat[1]] += 1
coordination = np.array(coordination, dtype=int)
bins = np.max(coordination) + 1
bin_range = (coordination.min()-0.5, coordination.max()+0.5)
plt.hist(coordination, bins=bins, align='mid', range=bin_range,
edgecolor='black')
plt.show()
return (coordination) # type change to int number
|
CostelMD/find-your-roomate-project
|
src/modules/Login/saga.js
|
<filename>src/modules/Login/saga.js<gh_stars>0
import { put, all, call, takeLatest } from "redux-saga/effects";
import { request } from "../../helpers/requests";
import { browserRedirect } from "../../helpers/helpers";
import { urls } from "../../helpers/urls";
import { LOGIN_REQUESTING, loginSuccess, loginError } from "./actions";
//Login API call
function loginCall(payload) {
return request("post", urls.LOGIN_URL, payload);
}
// LOGIN Worker
function* loginWorker({ payload }) {
try {
let response = yield call(loginCall, payload);
response = response.data;
localStorage.removeItem("user");
localStorage.setItem("token", response.data.token);
localStorage.setItem(
"user",
JSON.stringify({
id: response.data._id,
firstName: response.data.firstName,
lastName: response.data.lastName,
})
);
yield put(loginSuccess());
yield call(browserRedirect, "/");
} catch (err) {
yield put(loginError(err.response.data));
}
}
// Login Watcher
export default function* loginSaga() {
yield all([takeLatest(LOGIN_REQUESTING, loginWorker)]);
}
|
zweimach/wiyata.c
|
tests/leetcode/add_two_numbers_test.c
|
#include <setjmp.h>
#include <stdarg.h>
#include <stddef.h>
#include <stdlib.h>
#include <cmocka.h>
#include "add_two_numbers.h"
static void first_test(void** state)
{
(void)state;
struct ListNode* input[] = {list_node_create(2, 4, 3),
list_node_create(5, 6, 4)};
struct ListNode* expected = list_node_create(7, 0, 8);
struct ListNode* result = add_two_numbers(input[0], input[1]);
for (struct ListNode *r = result, *e = expected; e;
r = r->next, e = e->next) {
assert_int_equal(r->val, e->val);
}
list_node_free(input[0]);
list_node_free(input[1]);
list_node_free(expected);
list_node_free(result);
}
static void second_test(void** state)
{
(void)state;
struct ListNode* input[] = {list_node_create(0), list_node_create(0)};
struct ListNode* expected = list_node_create(0);
struct ListNode* result = add_two_numbers(input[0], input[1]);
for (struct ListNode *r = result, *e = expected; e;
r = r->next, e = e->next) {
assert_int_equal(r->val, e->val);
}
list_node_free(input[0]);
list_node_free(input[1]);
list_node_free(expected);
list_node_free(result);
}
static void third_test(void** state)
{
(void)state;
struct ListNode* input[] = {list_node_create(9, 9, 9, 9, 9, 9, 9),
list_node_create(9, 9, 9, 9)};
struct ListNode* expected =
list_node_new(8, (int[]){8, 9, 9, 9, 0, 0, 0, 1});
struct ListNode* result = add_two_numbers(input[0], input[1]);
for (struct ListNode *r = result, *e = expected; e;
r = r->next, e = e->next) {
assert_int_equal(r->val, e->val);
}
list_node_free(input[0]);
list_node_free(input[1]);
list_node_free(expected);
list_node_free(result);
}
static void fourth_test(void** state)
{
(void)state;
struct ListNode* input[] = {list_node_create(1, 9, 9, 9, 9, 9, 9, 9, 9),
list_node_create(9)};
struct ListNode* expected = list_node_create(0, 0, 0, 0, 0, 0, 0, 0, 0, 1);
struct ListNode* result = add_two_numbers(input[0], input[1]);
for (struct ListNode *r = result, *e = expected; e;
r = r->next, e = e->next) {
assert_int_equal(r->val, e->val);
}
list_node_free(input[0]);
list_node_free(input[1]);
list_node_free(expected);
list_node_free(result);
}
int main()
{
struct CMUnitTest const tests[] = {cmocka_unit_test(first_test),
cmocka_unit_test(second_test),
cmocka_unit_test(third_test),
cmocka_unit_test(fourth_test)};
return cmocka_run_group_tests(tests, NULL, NULL);
}
|
shitsurei/simple-bg-cli
|
src/main/java/io/github/shitsurei/common/util/GeometryUtil.java
|
<reponame>shitsurei/simple-bg-cli
package io.github.shitsurei.common.util;
import io.github.shitsurei.dao.pojo.bo.system.Captcha;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.util.Random;
/**
* 图形工具类
*
* @author zhanggr-b
* @version 1.0
* @date 2021/12/27 14:21
*/
public class GeometryUtil {
private static Random random = new Random();
private static Font timesNewRoman = new Font("Times New Roman", Font.ITALIC, 20);
/**
* 生成随机验证码
*
* @param expireSecond
* @return
*/
public static Captcha createCaptcha(int width, int height, long expireSecond) {
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
Graphics graphics = image.getGraphics();
graphics.setColor(getRandColor(200, 250));
graphics.fillRect(0, 0, width, height);
graphics.setFont(timesNewRoman);
graphics.setColor(getRandColor(160, 200));
for (int i = 0; i < 155; i++) {
int x = random.nextInt(width);
int y = random.nextInt(height);
int xl = random.nextInt(12);
int yl = random.nextInt(12);
graphics.drawLine(x, y, x + xl, y + yl);
}
StringBuilder exp = new StringBuilder();
int result = generateRandomExpression(width / 15, exp);
for (int i = 0; i < exp.length(); i++) {
graphics.setColor(new Color(20 + random.nextInt(110), 20 + random.nextInt(110), 20 + random.nextInt(110)));
graphics.drawString(String.valueOf(exp.charAt(i)), 13 * i + 6, 20);
}
graphics.dispose();
return new Captcha(image, String.valueOf(result), expireSecond);
}
/**
* 生成随机计算表达式
*
* @param codeLength 表达式长度
* @return 计算结果
*/
private static int generateRandomExpression(int codeLength, StringBuilder stringBuilder) {
codeLength -= 2;
if (codeLength < 3) {
codeLength = 3;
}
int sum = random.nextInt(10);
stringBuilder.append(sum);
for (int i = 0; i < codeLength; i += 2) {
boolean operate = random.nextInt(10) > 5;
int num = random.nextInt(10);
stringBuilder.append(operate ? '+' : '-').append(num);
sum += (operate ? num : -num);
}
stringBuilder.append("=?");
return sum;
}
/**
* 生成随机背景条纹
*
* @param fc
* @param bc
* @return
*/
private static Color getRandColor(int fc, int bc) {
if (fc > 255) {
fc = 255;
}
if (bc > 255) {
bc = 255;
}
int r = fc + random.nextInt(bc - fc);
int g = fc + random.nextInt(bc - fc);
int b = fc + random.nextInt(bc - fc);
return new Color(r, g, b);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.