text
stringlengths 1
1.05M
|
|---|
package com.ilscipio.scipio.ce.webapp.filter;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* SCIPIO: Helper methods for URL rewriting and filtering.
* Added 2017-08-14.
*/
public class UrlFilterHelper {
// NOTE: these should be non-static methods (TODO: re-verify)
public void verifySameWebappContext(HttpServletRequest request, HttpServletResponse response) {
String outboundUrlStr = (String) request.getAttribute("urlFilter.outUrlWebapp.outUrl");
boolean isSameContextPath = isSameWebappContext(request, outboundUrlStr);
request.setAttribute("urlFilter.outUrlWebapp.isSameContext", isSameContextPath ? "true" : "false");
}
public boolean isSameWebappContext(HttpServletRequest request, String outboundUrlStr) {
if (outboundUrlStr != null) {
String currentContextPath = request.getContextPath();
String urlContextPath = getPathFromUrl(outboundUrlStr);
if (urlContextPath.equals(currentContextPath)) {
return true;
} else {
if (!currentContextPath.endsWith("/")) currentContextPath += "/";
return urlContextPath.startsWith(currentContextPath);
}
}
return false;
}
// FIXME: ideally should optimize the regexp away
private static final Pattern pathPat = Pattern.compile("^([^/]*//[^/]*)?(/.*?)?([?;].*)?$");
protected static String getPathFromUrl(String url) {
String result = null;
Matcher matcher = pathPat.matcher(url);
if (matcher.matches()) {
String pathMatch = matcher.group(2);
if (pathMatch.isEmpty()) {
result = "/";
} else {
result = pathMatch;
}
}
return result;
}
}
|
<filename>src/main/java/malte0811/controlengineering/network/panellayout/Replace.java
package malte0811.controlengineering.network.panellayout;
import com.google.common.base.Preconditions;
import malte0811.controlengineering.controlpanels.PlacedComponent;
import net.minecraft.network.FriendlyByteBuf;
import net.minecraft.world.level.Level;
import javax.annotation.Nullable;
import java.util.List;
public class Replace extends PanelSubPacket {
@Nullable
private final PlacedComponent newComponent;
public Replace(@Nullable PlacedComponent newComponent) {
this.newComponent = newComponent;
}
public Replace(FriendlyByteBuf buffer) {
this(PlacedComponent.readWithoutState(buffer));
}
@Override
protected void write(FriendlyByteBuf out) {
Preconditions.checkNotNull(newComponent).writeToWithoutState(out);
}
@Override
public boolean process(Level level, List<PlacedComponent> allComponents) {
if (newComponent == null || !newComponent.isWithinPanel(level)) {
return false;
}
int toReplace = -1;
for (int i = 0; i < allComponents.size(); i++) {
PlacedComponent existing = allComponents.get(i);
if (existing.getPosMin().equals(newComponent.getPosMin())) {
toReplace = i;
} else if (!existing.disjoint(level, newComponent)) {
return false;
}
}
if (toReplace < 0) {
return false;
}
allComponents.set(toReplace, newComponent);
return true;
}
}
|
module Fog
module AWS
class Elasticache
class Real
require 'fog/aws/parsers/elasticache/single_cache_cluster'
# Reboots some or all of an existing cache cluster's nodes
# Returns a cache cluster description
#
# === Required Parameters
# * id <~String> - The ID of the existing cluster to be rebooted
# === Optional Parameters
# * nodes_to_reboot <~Array> - Array of node IDs to reboot
# === Returns
# * response <~Excon::Response>:
# * body <~Hash>
def reboot_cache_cluster(id, nodes_to_reboot)
# Construct CacheNodeIdsToReboot parameters in the format:
# CacheNodeIdsToReboot.member.N => "node_id"
node_ids = nodes_to_reboot || []
node_id_params = node_ids.inject({}) do |node_hash, node_id|
index = node_ids.index(node_id) + 1
node_hash["CacheNodeIdsToReboot.member.#{index}"] = node_id
node_hash
end
# Merge the CacheNodeIdsToReboot parameters with the normal options
request(node_id_params.merge(
'Action' => 'RebootCacheCluster',
'CacheClusterId' => id,
:parser => Fog::Parsers::AWS::Elasticache::SingleCacheCluster.new
))
end
end
class Mock
def reboot_cache_cluster(id, nodes_to_reboot)
response = Excon::Response.new
response.body = {
'CacheCluster' => self.data[:clusters][id].merge({
'CacheClusterStatus' => 'rebooting cache cluster nodes'
}),
'ResponseMetadata' => { 'RequestId' => Fog::AWS::Mock.request_id }
}
response
end
end
end
end
end
|
#! /bin/sh -x
set -e
set -u
set -o pipefail
opam_packages=(
dune
ppx_import
ppx_deriving
ppx_expect
#ppx_deriving_cmdliner
#camomile
menhir
ocaml-print-intf
ocaml-lsp-server
utop
# llvm
)
# opam init --bare --yes
# opam switch create 4.11.2
opam init --yes
eval $(opam env)
opam install "${opam_packages[@]}" --yes
|
const req = require.context('./svg', false, /\.svg$/);
const requireAll = (requireContext: any) => requireContext.keys().map(requireContext);
requireAll(req);
export default requireAll;
|
package com.creadigol.verticalviewpager.transforms;
/**
* Copyright (C) 2015 Kaelaela
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
import android.support.v4.view.ViewPager;
import android.view.View;
public class StackTransformer implements ViewPager.PageTransformer {
private static final float MIN_SCALE = 0.75f;
@Override
public void transformPage(View view, float position) {
view.setTranslationX(view.getWidth() * -position);
view.setTranslationY(position < 0 ? position * view.getHeight() : 0f);
{
int pageWidth = view.getWidth();
// if (position < -1) { // [-Infinity,-1)
// // This page is way off-screen to the left.
// view.setAlpha(0);
//
// } else if (position <= 0) { // [-1,0]
// // Use the default slide transition when moving to the left page
// view.setAlpha(1);
// view.setTranslationX(0);
// view.setScaleX(1);
// view.setScaleY(1);
//
// } else if (position <= 1) { // (0,1]
// Fade the page out.
view.setAlpha(1 - position);
// Counteract the default slide transition
view.setTranslationX(pageWidth * -position);
// Scale the page down (between MIN_SCALE and 1)
float scaleFactor = MIN_SCALE
+ (1 - MIN_SCALE) * (1 - Math.abs(position));
view.setScaleX(scaleFactor);
view.setScaleY(scaleFactor);
// } else { // (1,+Infinity]
// // This page is way off-screen to the right.
// view.setAlpha(0);
// }
}
}
}
|
import Keycloak from "keycloak-js";
import { idpUrl, idpRealm, idpClientId } from "./js/environment";
// Setup Keycloak instance as needed
// Pass initialization options as required or leave blank to load from 'keycloak.json'
const keycloak = Keycloak({
url: idpUrl,
realm: idpRealm,
clientId: idpClientId,
});
export default keycloak;
|
function previewFile(input){
//console.log(input, input.files, input.files[0]);
var file = input.files[0];
if(file){
var reader = new FileReader();
reader.onload = function(){
//console.log($(input).parent().find("img#previewImg"));
$(input).parent().find("img#previewImg").attr("src", reader.result);
}
reader.readAsDataURL(file);
}
}
|
# A dynamic programming solution for
# counting the number of possible
# ways to make change of N units
# using given coins
def count(S, m, n):
# table[i] will be storing
# the number of solutions for
# value i. We need n+1 rows
# as the table is consturcted
# in bottom up manner using
# the base case (n = 0)
table = [0 for k in range(n+1)]
# Base case (If given value is 0)
table[0] = 1
# Pick all coins one by one and
# update the table[] values
# after the index greater than
# or equal to the value of the
# picked coin
for i in range(0,m):
for j in range(S[i],n+1):
table[j] += table[j-S[i]]
return table[n]
|
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/grpc/service_stub"
require "google/example/library/v1/library_pb"
require "google/example/library/v1/library_services_pb"
require "google/example/library/v1/library_service"
class ::Google::Example::Library::V1::LibraryService::ClientTest < Minitest::Test
class ClientStub
attr_accessor :call_rpc_count, :requests
def initialize response, operation, &block
@response = response
@operation = operation
@block = block
@call_rpc_count = 0
@requests = []
end
def call_rpc *args, **kwargs
@call_rpc_count += 1
@requests << @block&.call(*args, **kwargs)
yield @response, @operation if block_given?
@response
end
end
def test_create_shelf
# Create GRPC objects.
grpc_response = ::Google::Example::Library::V1::Shelf.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
shelf = {}
create_shelf_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :create_shelf, name
assert_kind_of ::Google::Example::Library::V1::CreateShelfRequest, request
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Example::Library::V1::Shelf), request["shelf"]
refute_nil options
end
Gapic::ServiceStub.stub :new, create_shelf_client_stub do
# Create client
client = ::Google::Example::Library::V1::LibraryService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.create_shelf({ shelf: shelf }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.create_shelf shelf: shelf do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.create_shelf ::Google::Example::Library::V1::CreateShelfRequest.new(shelf: shelf) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.create_shelf({ shelf: shelf }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.create_shelf(::Google::Example::Library::V1::CreateShelfRequest.new(shelf: shelf), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, create_shelf_client_stub.call_rpc_count
end
end
def test_get_shelf
# Create GRPC objects.
grpc_response = ::Google::Example::Library::V1::Shelf.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "<NAME>"
get_shelf_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_shelf, name
assert_kind_of ::Google::Example::Library::V1::GetShelfRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_shelf_client_stub do
# Create client
client = ::Google::Example::Library::V1::LibraryService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_shelf({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_shelf name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_shelf ::Google::Example::Library::V1::GetShelfRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_shelf({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_shelf(::Google::Example::Library::V1::GetShelfRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_shelf_client_stub.call_rpc_count
end
end
def test_list_shelves
# Create GRPC objects.
grpc_response = ::Google::Example::Library::V1::ListShelvesResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
page_size = 42
page_token = "<NAME>"
list_shelves_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_shelves, name
assert_kind_of ::Google::Example::Library::V1::ListShelvesRequest, request
assert_equal 42, request["page_size"]
assert_equal "hello world", request["page_token"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_shelves_client_stub do
# Create client
client = ::Google::Example::Library::V1::LibraryService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_shelves({ page_size: page_size, page_token: page_token }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_shelves page_size: page_size, page_token: page_token do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_shelves ::Google::Example::Library::V1::ListShelvesRequest.new(page_size: page_size, page_token: page_token) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_shelves({ page_size: page_size, page_token: page_token }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_shelves(::Google::Example::Library::V1::ListShelvesRequest.new(page_size: page_size, page_token: page_token), grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_shelves_client_stub.call_rpc_count
end
end
def test_delete_shelf
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
delete_shelf_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :delete_shelf, name
assert_kind_of ::Google::Example::Library::V1::DeleteShelfRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, delete_shelf_client_stub do
# Create client
client = ::Google::Example::Library::V1::LibraryService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.delete_shelf({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.delete_shelf name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.delete_shelf ::Google::Example::Library::V1::DeleteShelfRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.delete_shelf({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.delete_shelf(::Google::Example::Library::V1::DeleteShelfRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, delete_shelf_client_stub.call_rpc_count
end
end
def test_merge_shelves
# Create GRPC objects.
grpc_response = ::Google::Example::Library::V1::Shelf.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
other_shelf = "hello world"
merge_shelves_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :merge_shelves, name
assert_kind_of ::Google::Example::Library::V1::MergeShelvesRequest, request
assert_equal "hello world", request["name"]
assert_equal "hello world", request["other_shelf"]
refute_nil options
end
Gapic::ServiceStub.stub :new, merge_shelves_client_stub do
# Create client
client = ::Google::Example::Library::V1::LibraryService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.merge_shelves({ name: name, other_shelf: other_shelf }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.merge_shelves name: name, other_shelf: other_shelf do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.merge_shelves ::Google::Example::Library::V1::MergeShelvesRequest.new(name: name, other_shelf: other_shelf) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.merge_shelves({ name: name, other_shelf: other_shelf }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.merge_shelves(::Google::Example::Library::V1::MergeShelvesRequest.new(name: name, other_shelf: other_shelf), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, merge_shelves_client_stub.call_rpc_count
end
end
def test_create_book
# Create GRPC objects.
grpc_response = ::Google::Example::Library::V1::Book.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
book = {}
create_book_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :create_book, name
assert_kind_of ::Google::Example::Library::V1::CreateBookRequest, request
assert_equal "hello world", request["parent"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Example::Library::V1::Book), request["book"]
refute_nil options
end
Gapic::ServiceStub.stub :new, create_book_client_stub do
# Create client
client = ::Google::Example::Library::V1::LibraryService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.create_book({ parent: parent, book: book }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.create_book parent: parent, book: book do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.create_book ::Google::Example::Library::V1::CreateBookRequest.new(parent: parent, book: book) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.create_book({ parent: parent, book: book }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.create_book(::Google::Example::Library::V1::CreateBookRequest.new(parent: parent, book: book), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, create_book_client_stub.call_rpc_count
end
end
def test_get_book
# Create GRPC objects.
grpc_response = ::Google::Example::Library::V1::Book.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
get_book_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_book, name
assert_kind_of ::Google::Example::Library::V1::GetBookRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_book_client_stub do
# Create client
client = ::Google::Example::Library::V1::LibraryService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_book({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_book name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_book ::Google::Example::Library::V1::GetBookRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_book({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_book(::Google::Example::Library::V1::GetBookRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_book_client_stub.call_rpc_count
end
end
def test_list_books
# Create GRPC objects.
grpc_response = ::Google::Example::Library::V1::ListBooksResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
page_size = 42
page_token = "<NAME>"
list_books_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_books, name
assert_kind_of ::Google::Example::Library::V1::ListBooksRequest, request
assert_equal "hello world", request["parent"]
assert_equal 42, request["page_size"]
assert_equal "hello world", request["page_token"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_books_client_stub do
# Create client
client = ::Google::Example::Library::V1::LibraryService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_books({ parent: parent, page_size: page_size, page_token: page_token }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_books parent: parent, page_size: page_size, page_token: page_token do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_books ::Google::Example::Library::V1::ListBooksRequest.new(parent: parent, page_size: page_size, page_token: page_token) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_books({ parent: parent, page_size: page_size, page_token: page_token }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_books(::Google::Example::Library::V1::ListBooksRequest.new(parent: parent, page_size: page_size, page_token: page_token), grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_books_client_stub.call_rpc_count
end
end
def test_delete_book
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "<NAME>"
delete_book_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :delete_book, name
assert_kind_of ::Google::Example::Library::V1::DeleteBookRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, delete_book_client_stub do
# Create client
client = ::Google::Example::Library::V1::LibraryService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.delete_book({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.delete_book name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.delete_book ::Google::Example::Library::V1::DeleteBookRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.delete_book({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.delete_book(::Google::Example::Library::V1::DeleteBookRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, delete_book_client_stub.call_rpc_count
end
end
def test_update_book
# Create GRPC objects.
grpc_response = ::Google::Example::Library::V1::Book.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
book = {}
update_mask = {}
update_book_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_book, name
assert_kind_of ::Google::Example::Library::V1::UpdateBookRequest, request
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Example::Library::V1::Book), request["book"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"]
refute_nil options
end
Gapic::ServiceStub.stub :new, update_book_client_stub do
# Create client
client = ::Google::Example::Library::V1::LibraryService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_book({ book: book, update_mask: update_mask }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_book book: book, update_mask: update_mask do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_book ::Google::Example::Library::V1::UpdateBookRequest.new(book: book, update_mask: update_mask) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_book({ book: book, update_mask: update_mask }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_book(::Google::Example::Library::V1::UpdateBookRequest.new(book: book, update_mask: update_mask), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_book_client_stub.call_rpc_count
end
end
def test_move_book
# Create GRPC objects.
grpc_response = ::Google::Example::Library::V1::Book.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "<NAME>"
other_shelf_name = "hello world"
move_book_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :move_book, name
assert_kind_of ::Google::Example::Library::V1::MoveBookRequest, request
assert_equal "hello world", request["name"]
assert_equal "hello world", request["other_shelf_name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, move_book_client_stub do
# Create client
client = ::Google::Example::Library::V1::LibraryService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.move_book({ name: name, other_shelf_name: other_shelf_name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.move_book name: name, other_shelf_name: other_shelf_name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.move_book ::Google::Example::Library::V1::MoveBookRequest.new(name: name, other_shelf_name: other_shelf_name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.move_book({ name: name, other_shelf_name: other_shelf_name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.move_book(::Google::Example::Library::V1::MoveBookRequest.new(name: name, other_shelf_name: other_shelf_name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, move_book_client_stub.call_rpc_count
end
end
def test_configure
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = block_config = config = nil
Gapic::ServiceStub.stub :new, nil do
client = ::Google::Example::Library::V1::LibraryService::Client.new do |config|
config.credentials = grpc_channel
end
end
config = client.configure do |c|
block_config = c
end
assert_same block_config, config
assert_kind_of ::Google::Example::Library::V1::LibraryService::Client::Configuration, config
end
end
|
<filename>test/menu/get-styles.test.js<gh_stars>10-100
/* eslint-env mocha */
import { expect } from 'chai';
import getStyles from '../../src/menu/get-styles';
import styles from '../../src/menu/styles';
describe('Menu.getStyles', () => {
describe('container', () => {
it('should get styles', () => {
const style = getStyles.container();
expect(style).to.deep.equal(styles.container);
});
});
describe('overlay', () => {
it('should get styles', () => {
const style = getStyles.overlay();
expect(style).to.deep.equal(styles.overlay);
});
it('should add open styles', () => {
const style = getStyles.overlay(true);
expect(style).to.have.property('opacity', 1);
expect(style).to.have.property('pointerEvents', 'auto');
expect(style).to.have.property('left', 0);
});
});
describe('root', () => {
it('should get styles', () => {
const style = getStyles.root();
expect(style).to.deep.equal(styles.root);
});
it('should combine styles', () => {
const style = getStyles.root(false, false, { color: 'red' });
expect(style).to.have.property('color', 'red');
});
it('should add open styles', () => {
const style = getStyles.root(true);
expect(style).to.have.property('transform', 'none');
});
it('should add right styles', () => {
const style = getStyles.root(false, 'right');
expect(style).to.have.property('transform', 'translateX(256px)');
expect(style).to.have.property('left', 'initial');
expect(style).to.have.property('right', 0);
});
});
describe('contentContainer', () => {
it('should get styles', () => {
const style = getStyles.contentContainer();
expect(style).to.deep.equal(styles.contentContainer);
});
it('should combine styles', () => {
const style = getStyles.contentContainer(false, false, { color: 'red' });
expect(style).to.have.property('color', 'red');
});
it('should add header styles', () => {
const style = getStyles.contentContainer(true);
expect(style).to.have.property('maxHeight', 'calc(100% - 48px)');
});
it('should add footer styles', () => {
const style = getStyles.contentContainer(false, true);
expect(style).to.have.property('maxHeight', 'calc(100% - 35px)');
});
it('should add header & footer styles', () => {
const style = getStyles.contentContainer(true, true);
expect(style).to.have.property('maxHeight', 'calc(100% - 83px)');
});
});
describe('header', () => {
it('should get styles', () => {
const style = getStyles.header();
expect(style).to.deep.equal(styles.header);
});
it('should combine styles', () => {
const style = getStyles.header(null, false, { color: 'red' });
expect(style).to.have.property('color', 'red');
});
it('should add theme color', () => {
const style = getStyles.header('red');
expect(style).to.have.property('color', 'red');
});
it('should add icon styles', () => {
const style = getStyles.header(null, true);
expect(style).to.have.property('padding', '16px 16px 16px 40px');
});
});
describe('icon', () => {
it('should get styles', () => {
const style = getStyles.icon();
expect(style).to.deep.equal(styles.icon);
});
it('should combine styles', () => {
const style = getStyles.icon({ color: 'red' });
expect(style).to.have.property('color', 'red');
});
});
describe('sidebar', () => {
it('should get styles', () => {
const style = getStyles.sidebar();
expect(style).to.deep.equal(styles.sidebar);
});
it('should combine styles', () => {
const style = getStyles.sidebar({ color: 'red' });
expect(style).to.have.property('color', 'red');
});
});
describe('footer', () => {
it('should get styles', () => {
const style = getStyles.footer();
expect(style).to.deep.equal(styles.footer);
});
it('should combine styles', () => {
const style = getStyles.footer(null, { color: 'red' });
expect(style).to.have.property('color', 'red');
});
it('should add sidebar styles', () => {
const style = getStyles.footer(true);
expect(style).to.have.property('position', 'initial');
});
});
});
|
<filename>lib/Ariadne/QueryFinder/TreeWalker.js
/*
Siesta 4.2.2
Copyright(c) 2009-2016 Bryntum AB
http://bryntum.com/contact
http://bryntum.com/products/siesta/license
*/
Role('Ariadne.QueryFinder.TreeWalker', {
methods : {
getParent : function (el) {
throw new Error("Abstract method called: `getParent`")
},
contains : function (parentEl, childEl) {
throw new Error("Abstract method called: `contains`")
}
}
});
|
import component from './ClickCatcher'
import connector from './ClickCatcher.connector'
export default connector(component)
|
#!/usr/bin/env bash
# Installs common brew packages
# Install coreutils
brew install coreutils
sudo ln -s /usr/local/bin/gsha256sum /usr/local/bin/sha256sum
brew install moreutils
brew install findutils
brew install gnu-sed
# Install Bash 4
brew install bash
sudo cat >> /etc/shells <<EOF
/usr/local/bin/bash
EOF
brew tap homebrew/versions
brew install bash-completion2
# Install 'wget' with IRI support
brew install wget
# Install other useful binaries
#brew install ack
#brew install imagemagick --with-webp
#brew install p7zip
brew install tree
brew install gpg
brew install jq
if [ -f $HOME/Dropbox/.gnupg ]; then
ln -sf $HOME/Dropbox/.gnupg $HOME/.gnupg
fi
# Install 1Password
brew install 1password 1password-cli
|
package parse
import (
"bytes"
"encoding/json"
"strings"
"github.com/BurntSushi/toml"
"github.com/douyu/juno/internal/pkg/code"
)
// TomlParse ..
type TomlParse struct {
}
// NewTomlParse ..
func NewTomlParse() Parse {
return &TomlParse{}
}
// Convert ...
func (tp *TomlParse) Convert(text string) (res []byte, err error) {
return
}
// Fusion only useful with configuration format
func (tp *TomlParse) Fusion(sources []string) (out string, err error) {
//buffer := new(bytes.Buffer)
//encode := toml.NewEncoder(buffer)
//for _ ,val := range sources {
// var decodeRes interface{}
// md, _ := toml.Decode(val, &decodeRes)
// util.PPP("md", md.Keys())
// err = encode.Encode(decodeRes)
//
// if err != nil {
// return
// }
//}
var tmp []byte
for _, v := range sources {
tmp = append(tmp, []byte(v)...)
tmp = append(tmp, []byte("\n")...)
}
return tp.Format(tmp)
//return tp.Format(buffer.Bytes())
//return buffer.String(), nil
}
// FusionWithTpl ...
func (tp *TomlParse) FusionWithTpl(source string, texts []string) (out string, err error) {
buffer := new(bytes.Buffer)
encode := toml.NewEncoder(buffer)
var decodeRes interface{}
_, err = toml.Decode(source, &decodeRes)
if err != nil {
return
}
err = encode.Encode(decodeRes)
if err != nil {
return
}
for _, text := range texts {
var textBytes interface{}
// json to byte
err = json.Unmarshal([]byte(text), &textBytes)
if err != nil {
return
}
err = encode.Encode(textBytes)
if err != nil {
return
}
}
//return tp.Format(buffer.Bytes())
return buffer.String(), nil
}
// Format ..
func (tp *TomlParse) Format(source []byte) (out string, err error) {
var decodeRes interface{}
_, err = toml.Decode(string(source), &decodeRes)
if err != nil {
return
}
buffer := new(bytes.Buffer)
encode := toml.NewEncoder(buffer)
err = encode.Encode(decodeRes)
if err != nil {
return
}
res := string(buffer.Bytes())
return res, nil
}
// FormatStrict ..
func (tp *TomlParse) FormatStrict(source []byte) (out string, err error) {
var decodeRes interface{}
_, err = toml.Decode(string(source), &decodeRes)
if err != nil {
return
}
buffer := new(bytes.Buffer)
encode := toml.NewEncoder(buffer)
err = encode.Encode(decodeRes)
if err != nil {
return
}
res := string(buffer.Bytes())
if strings.HasPrefix(res, "[") {
return res, nil
}
return "", code.ErrTomlFormatStrict
}
// IsLegal ...
func (tp *TomlParse) IsLegal(source []byte) (res bool, err error) {
var decodeRes interface{}
_, err = toml.Decode(string(source), &decodeRes)
if err != nil {
return false, err
}
return true, nil
}
|
#!/bin/bash
ganache-cli -p 7545 --gasLimit 100000000000000 2> /dev/null 1> /dev/null &
sleep 5 # to make sure ganache-cli is up and running before compiling
rm -rf build
truffle compile
truffle migrate --reset --network development
truffle test
kill -9 $(lsof -t -i:8545)
|
#!/usr/bin/env bash
set -e
if [ -z $OS_VERSION ];then
echo "No OS_VERSION provided, using 16.04."
OS_VERSION="16.04"
fi
# generate Vagrantfile for the initial box
if [ $OS_VERSION = "16.04" ];then
vagrant init puppetlabs/ubuntu-16.04-64-nocm --box-version="1.0.0"
else if [ $OS_VERSION = "18.04" ];then
vagrant init ubuntu/bionic64 --box-version="20181008.0.0"
else
echo "Unexpected OS_VERSION specified. Exiting."
exit 1
fi
fi
# Make sure "vagrant up" doesn't overwrite the default ssh key when provisioning
# We want this to happen when we actually use the box
sed -i '/config.vm.box.version/a\config.ssh.insert_key = false' Vagrantfile
vagrant up
# copy provisioning script onto the VM and execute
vagrant ssh-config > ssh-config.conf
chmod +x provision${OS_VERSION}.sh
scp -F ssh-config.conf provision${OS_VERSION}.sh default:~/provision.sh
vagrant ssh -c "/bin/bash ./provision.sh ${OS_VERSION}"
# package the box
vagrant package --output testbox.box
# cleanup
vagrant destroy -f
rm -rf ssh-config.conf Vagrantfile .vagrant *cloudimg-console.log
echo "Packaged to `pwd`/testbox.box"
echo """To test the box locally, run 'vagrant box add testbox.box --name boxname', then use it in your Vagrantfile with 'config.vm.box=boxname'"""
|
package com.example.signup;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.android.gms.tasks.Task;
import com.google.firebase.auth.AuthResult;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
public class Login extends AppCompatActivity {
EditText email, password;
Button Login, ForgotPassword1;
FirebaseAuth fAuth;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
email = findViewById(R.id.edit_text_Eid);
password = findViewById(R.id.edit_text_Pwd);
Login = findViewById(R.id.button_Login);
fAuth = FirebaseAuth.getInstance();
ForgotPassword1 = findViewById(R.id.ForgotPassword);
Login.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v){
String Email = email.getText().toString().trim();
String Password = password.getText().toString().trim();
if(TextUtils.isEmpty(Email)){
email.setError("Email Id is mandatory");
return;
}
if(TextUtils.isEmpty(Password)){
password.setError("Password is mandatory");
return;
}
if(Password.length() < 6) {
password.setError("Password should be longer than 6 characters");
return;
}
fAuth.signInWithEmailAndPassword(Email,Password).addOnCompleteListener(new OnCompleteListener<AuthResult>() {
@Override
public void onComplete(@NonNull Task<AuthResult> task) {
if(task.isSuccessful()){
String userId = fAuth.getCurrentUser().getUid();
FirebaseUser user = fAuth.getCurrentUser();
if(!user.isEmailVerified()){
Toast.makeText(Login.this, "Email isn't verified, try again after verification of Email ", Toast.LENGTH_LONG).show();
}
else {
Toast.makeText(Login.this, "You have Logged in", Toast.LENGTH_LONG).show();
startActivity(new Intent(getApplicationContext(), EmailVerification.class));
}
}
else{
Toast.makeText(Login.this, "Incorrect Email or Password"+ task.getException().getMessage(), Toast.LENGTH_LONG).show();
}
}
}
);
}
});
ForgotPassword1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
final EditText resetMail= new EditText(v.getContext());
AlertDialog.Builder passwordResetDialog =new AlertDialog.Builder(v.getContext());
passwordResetDialog.setTitle("Reset Password?");
passwordResetDialog.setMessage("Enter your email to receive reset link");
passwordResetDialog.setView(resetMail);
passwordResetDialog.setPositiveButton("Yes", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which){
String mail = resetMail.getText().toString();
fAuth.sendPasswordResetEmail(mail).addOnSuccessListener(new OnSuccessListener<Void>() {
@Override
public void onSuccess(Void aVoid) {
Toast.makeText(Login.this, "Reset Link sent to your Emai", Toast.LENGTH_SHORT).show();
}
}).addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
Toast.makeText(Login.this, "Error!Reset Link is not sent" + e.getMessage(), Toast.LENGTH_SHORT ).show();
}
});
}
});
passwordResetDialog.setNegativeButton("No", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which){
}
});
passwordResetDialog.create().show();
}
}
);
}
}
|
require 'spec_helper'
module Fauve
module Scheme
describe Colour do
let(:section_name) { :scheme }
let(:section) { Fauve::Scheme::Section.new(section_name) }
let(:reference) { Fauve::Scheme::Reference.new(reference_name) }
subject { described_class.new(section, reference) }
describe '#initialize' do
context 'when passed filters' do
let(:section_name) { :links }
let(:reference_name) { :main_hover }
it 'correctly interprets filters' do
expect(subject.filters).to be_a Array
expect(subject.filters.length).to eq 2
end
end
context 'when not passed filters' do
let(:section_name) { :links }
let(:reference_name) { :main_text }
it 'correctly interprets filters' do
expect(subject.filters).to be_empty
end
end
end
describe '#raw_colour' do
context "when using integers in the reference" do
let(:reference_name) { 0 }
it 'interprets primary colour correctly' do
expect(subject.raw_colour).to eq "#131210"
end
context "when invoking other integers" do
let(:reference_name) { 2 }
it 'interprets colours in the scheme correctly' do
expect(subject.raw_colour).to eq "#e5b455"
end
end
context "when invoking the last existing integer" do
let(:reference_name) { 4 }
it 'interprets the colour correctly' do
expect(subject.raw_colour).to eq "#d8ccb2"
end
end
end
context 'when using keys' do
let(:section_name) { :forms }
let(:reference_name) { :main_bg }
it 'interprets primary colour correctly' do
expect(subject.raw_colour).to eq "#f60"
end
context "when invoking other keys" do
let(:reference_name) { :alternate_bg }
it 'interprets colours in the scheme correctly' do
expect(subject.raw_colour).to eq "#9cde0d"
end
end
context 'when invoking the last existing key' do
let(:reference_name) { :main_text }
it 'interprets the colour correctly' do
expect(subject.raw_colour).to eq '#5af8de'
end
end
end
context 'when referencing another section' do
let(:section_name) { :links }
let(:reference_name) { :main_text }
context 'when the reference is valid' do
it 'interprets the colour correctly' do
expect(subject.raw_colour).to eq '#c05d33'
end
end
context 'when the reference is invalid' do
let(:reference_name) { :invalid_red }
it 'raises an UndefinedReference error' do
expect{ subject }.to raise_exception Fauve::UndefinedReferenceError
end
end
context 'when the reference is circular' do
let(:reference_name) { :circular_1 }
it 'raises a CircularReferenceError error' do
expect{ subject }.to raise_exception Fauve::CircularReferenceError
end
end
end
context 'when passed incorrect / non-existent references' do
let(:section_name) { :scheme }
context 'with integers' do
let(:reference_name) { 10 }
it 'raises an UndefinedReference error' do
expect{ subject }.to raise_exception Fauve::UndefinedReferenceError
end
end
context 'with keys' do
let(:reference_name) { :a_non_declared_key }
it 'raises an UndefinedReference error' do
expect{ subject }.to raise_exception Fauve::UndefinedReferenceError
end
end
end
end
end
end
end
|
#!/bin/sh
# create the flag file
echo $FLAG > flag.txt
# create the challenge file
tar czf /tmp/flag.tar.gz flag.txt
# tell the uploader what files we want uploaded
echo "/tmp/flag.tar.gz"
|
CUDA_VISIBLE_DEVICES=0,1 python3 main.py --dataroot='/data/zhangdan/dataset/celebAHQ/celeba-256/' --noise_dim=512 --batch_size=32 --test_batch_size=16 --nEpochs=500 --sample-step=1000 --save_step=10 --channels='32, 64, 128, 256, 512, 512' --trainsize=29000 --input_height=256 --output_height=256 --m_plus=160 --weight_neg=0.5 --weight_rec=0.05 --weight_kl=1. --weight_logit=1000 --num_vae=0 --num_gan=10 > main.log 2>&1 &
|
#!/usr/bin/env bash
# Root directory of integration tests.
INTEGRATION_ROOT=${INTEGRATION_ROOT:-$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")}
# Test data path.
TESTDATA="${INTEGRATION_ROOT}/testdata"
# Root directory of the repository.
CRIO_ROOT=${CRIO_ROOT:-$(
cd "$INTEGRATION_ROOT/.." || exit
pwd -P
)}
# Path to the crio binary.
CRIO_BINARY=${CRIO_BINARY:-crio}
CRIO_BINARY_PATH=${CRIO_BINARY_PATH:-${CRIO_ROOT}/bin/$CRIO_BINARY}
# Path to the crio-status binary.
CRIO_STATUS_BINARY_PATH=${CRIO_STATUS_BINARY_PATH:-${CRIO_ROOT}/bin/crio-status}
# Path to the pinns binary
PINNS_BINARY_PATH=${PINNS_BINARY_PATH:-${CRIO_ROOT}/bin/pinns}
# Path of the crictl binary.
CRICTL_PATH=$(command -v crictl || true)
CRICTL_BINARY=${CRICTL_PATH:-/usr/bin/crictl}
# Path of the conmon binary set as a variable to allow overwriting.
CONMON_BINARY=${CONMON_BINARY:-$(command -v conmon)}
# Cgroup for the conmon process
CONTAINER_CONMON_CGROUP=${CONTAINER_CONMON_CGROUP:-pod}
# Path of the default seccomp profile.
CONTAINER_SECCOMP_PROFILE=${CONTAINER_SECCOMP_PROFILE:-${CRIO_ROOT}/vendor/github.com/seccomp/containers-golang/seccomp.json}
CONTAINER_UID_MAPPINGS=${CONTAINER_UID_MAPPINGS:-}
CONTAINER_GID_MAPPINGS=${CONTAINER_GID_MAPPINGS:-}
OVERRIDE_OPTIONS=${OVERRIDE_OPTIONS:-}
# Runtime
CONTAINER_RUNTIME=${CONTAINER_RUNTIME:-runc}
CONTAINER_DEFAULT_RUNTIME=${CONTAINER_DEFAULT_RUNTIME:-runc}
RUNTIME_NAME=${RUNTIME_NAME:-runc}
RUNTIME_PATH=$(command -v "$CONTAINER_RUNTIME" || true)
RUNTIME_BINARY=${RUNTIME_PATH:-$(command -v runc)}
RUNTIME_ROOT=${RUNTIME_ROOT:-/run/runc}
RUNTIME_TYPE=${RUNTIME_TYPE:-oci}
if [[ $CONTAINER_RUNTIME == "kata-runtime" ]]; then
export RUNTIME_NAME="$CONTAINER_RUNTIME"
export CONTAINER_DEFAULT_RUNTIME="$RUNTIME_NAME"
fi
# Path of the apparmor_parser binary.
APPARMOR_PARSER_BINARY=${APPARMOR_PARSER_BINARY:-/sbin/apparmor_parser}
# Path of the apparmor profile for test.
APPARMOR_TEST_PROFILE_PATH=${APPARMOR_TEST_PROFILE_PATH:-${TESTDATA}/apparmor_test_deny_write}
# Path of the apparmor profile for unloading crio-default.
FAKE_CRIO_DEFAULT_PROFILE_PATH=${FAKE_CRIO_DEFAULT_PROFILE_PATH:-${TESTDATA}/fake_crio_default}
# Name of the default apparmor profile.
FAKE_CRIO_DEFAULT_PROFILE_NAME=${FAKE_CRIO_DEFAULT_PROFILE_NAME:-crio-default-fake}
# Name of the apparmor profile for test.
APPARMOR_TEST_PROFILE_NAME=${APPARMOR_TEST_PROFILE_NAME:-apparmor-test-deny-write}
# Path of boot config.
BOOT_CONFIG_FILE_PATH=${BOOT_CONFIG_FILE_PATH:-/boot/config-$(uname -r)}
# Path of apparmor parameters file.
APPARMOR_PARAMETERS_FILE_PATH=${APPARMOR_PARAMETERS_FILE_PATH:-/sys/module/apparmor/parameters/enabled}
# Path of the copyimg binary.
COPYIMG_BINARY=${COPYIMG_BINARY:-${CRIO_ROOT}/test/copyimg/copyimg}
# Path of tests artifacts.
ARTIFACTS_PATH=${ARTIFACTS_PATH:-${CRIO_ROOT}/.artifacts}
# Path of the checkseccomp binary.
CHECKSECCOMP_BINARY=${CHECKSECCOMP_BINARY:-${CRIO_ROOT}/test/checkseccomp/checkseccomp}
# The default log directory where all logs will go unless directly specified by the kubelet
DEFAULT_LOG_PATH=${DEFAULT_LOG_PATH:-/var/log/crio/pods}
# Cgroup manager to be used
CONTAINER_CGROUP_MANAGER=${CONTAINER_CGROUP_MANAGER:-systemd}
# Image volumes handling
CONTAINER_IMAGE_VOLUMES=${CONTAINER_IMAGE_VOLUMES:-mkdir}
# Container pids limit
CONTAINER_PIDS_LIMIT=${CONTAINER_PIDS_LIMIT:-1024}
# Log size max limit
CONTAINER_LOG_SIZE_MAX=${CONTAINER_LOG_SIZE_MAX:--1}
# Stream Port
STREAM_PORT=${STREAM_PORT:-10010}
# Metrics Port
CONTAINER_METRICS_PORT=${CONTAINER_METRICS_PORT:-9090}
POD_IPV4_CIDR="10.88.0.0/16"
POD_IPV4_CIDR_START="10.88"
POD_IPV4_DEF_ROUTE="0.0.0.0/0"
POD_IPV6_CIDR="1100:200::/24"
POD_IPV6_CIDR_START="1100:200::"
POD_IPV6_DEF_ROUTE="1100:200::1/24"
IMAGES=(
k8s.gcr.io/pause:3.2
quay.io/crio/busybox:latest
quay.io/crio/fedora-ping:latest
quay.io/crio/image-volume-test:latest
quay.io/crio/oom:latest
quay.io/crio/redis:alpine
quay.io/crio/stderr-test:latest
)
function img2dir() {
local dir
dir=$(echo "$@" | sed -e 's|^.*/||' -e 's/:.*$//' -e 's/-/_/' -e 's/$/-image/')
echo "$ARTIFACTS_PATH/$dir"
}
function get_img() {
local img="docker://$1" dir
dir="$(img2dir "$img")"
if ! [ -d "$dir" ]; then
mkdir -p "$dir"
if ! "$COPYIMG_BINARY" \
--import-from="$img" \
--export-to="dir:$dir" \
--signature-policy="$INTEGRATION_ROOT"/policy.json; then
echo "Error pulling $img" >&2
rm -fr "$dir"
exit 1
fi
fi
}
for img in "${IMAGES[@]}"; do
get_img "$img"
done
function setup_test() {
TESTDIR=$(mktemp -d)
# Setup default hooks dir
HOOKSDIR=$TESTDIR/hooks
mkdir "$HOOKSDIR"
HOOKSCHECK=$TESTDIR/hookscheck
CONTAINER_EXITS_DIR=$TESTDIR/containers/exits
CONTAINER_ATTACH_SOCKET_DIR=$TESTDIR/containers
MOUNT_PATH="$TESTDIR/secrets"
mkdir "$MOUNT_PATH"
MOUNT_FILE="$MOUNT_PATH/test.txt"
touch "$MOUNT_FILE"
echo "Testing secrets mounts!" >"$MOUNT_FILE"
# Setup default secrets mounts
mkdir "$TESTDIR/containers"
touch "$TESTDIR/containers/mounts.conf"
echo "$TESTDIR/rhel/secrets:/run/secrets" >"$TESTDIR/containers/mounts.conf"
echo "$MOUNT_PATH:/container/path1" >>"$TESTDIR/containers/mounts.conf"
mkdir -p "$TESTDIR/rhel/secrets"
touch "$TESTDIR/rhel/secrets/test.txt"
echo "Testing secrets mounts. I am mounted!" >"$TESTDIR/rhel/secrets/test.txt"
mkdir -p "$TESTDIR/symlink/target"
touch "$TESTDIR/symlink/target/key.pem"
ln -s "$TESTDIR/symlink/target" "$TESTDIR/rhel/secrets/mysymlink"
# We may need to set some default storage options.
case "$(stat -f -c %T "$TESTDIR")" in
aufs)
# None of device mapper, overlay, or aufs can be used dependably over aufs, and of course btrfs and zfs can't,
# and we have to explicitly specify the "vfs" driver in order to use it, so do that now.
STORAGE_OPTIONS=${STORAGE_OPTIONS:--s vfs}
;;
*)
STORAGE_OPTIONS=${STORAGE_OPTIONS:-}
;;
esac
if [ -e /usr/sbin/selinuxenabled ] && /usr/sbin/selinuxenabled; then
# shellcheck disable=SC1091
. /etc/selinux/config
filelabel=$(awk -F'"' '/^file.*=.*/ {print $2}' "/etc/selinux/${SELINUXTYPE}/contexts/lxc_contexts")
chcon -R "$filelabel" "$TESTDIR"
fi
CRIO_SOCKET="$TESTDIR/crio.sock"
CRIO_CONFIG_DIR="$TESTDIR/crio.conf.d"
mkdir "$CRIO_CONFIG_DIR"
CRIO_CONFIG="$TESTDIR/crio.conf"
CRIO_CNI_CONFIG="$TESTDIR/cni/net.d/"
CRIO_LOG="$TESTDIR/crio.log"
# Copy all the CNI dependencies around to ensure encapsulated tests
CRIO_CNI_PLUGIN="$TESTDIR/cni-bin"
mkdir "$CRIO_CNI_PLUGIN"
cp /opt/cni/bin/* "$CRIO_CNI_PLUGIN"
cp "$INTEGRATION_ROOT"/cni_plugin_helper.bash "$CRIO_CNI_PLUGIN"
sed -i "s;%TEST_DIR%;$TESTDIR;" "$CRIO_CNI_PLUGIN"/cni_plugin_helper.bash
# configure crictl globally
crictl config --set pull-image-on-create=true || true
PATH=$PATH:$TESTDIR
}
# Run crio using the binary specified by $CRIO_BINARY_PATH.
# This must ONLY be run on engines created with `start_crio`.
function crio() {
"$CRIO_BINARY_PATH" --listen "$CRIO_SOCKET" "$@"
}
# Run crictl using the binary specified by $CRICTL_BINARY.
function crictl() {
"$CRICTL_BINARY" -r "unix://$CRIO_SOCKET" -i "unix://$CRIO_SOCKET" "$@"
}
# Communicate with Docker on the host machine.
# Should rarely use this.
function docker_host() {
command docker "$@"
}
# Retry a command $1 times until it succeeds. Wait $2 seconds between retries.
function retry() {
local attempts=$1
shift
local delay=$1
shift
local i
for ((i = 0; i < attempts; i++)); do
if "$@"; then
return 0
fi
sleep "$delay"
done
echo "Command \"$*\" failed $attempts times"
false
}
# Waits until crio becomes reachable.
function wait_until_reachable() {
retry 15 1 crictl info
}
function copyimg() {
# Don't forget: copyimg and crio have their own default drivers,
# so if you override any, you probably need to override them all.
# shellcheck disable=SC2086
"$COPYIMG_BINARY" \
--root "$TESTDIR/crio" \
--runroot "$TESTDIR/crio-run" \
--signature-policy="$INTEGRATION_ROOT"/policy.json \
$STORAGE_OPTIONS \
"$@"
}
function setup_img() {
local name="$1" dir
dir="$(img2dir "$name")"
copyimg --image-name="$name" --import-from="dir:$dir"
}
function setup_crio() {
apparmor=""
if [[ -n "$1" ]]; then
apparmor="$1"
fi
for img in "${IMAGES[@]}"; do
setup_img "$img"
done
# Prepare the CNI configuration files, we're running with non host
# networking by default
CNI_DEFAULT_NETWORK=${CNI_DEFAULT_NETWORK:-crio}
CNI_TYPE=${CNI_TYPE:-bridge}
# shellcheck disable=SC2086
"$CRIO_BINARY_PATH" \
--hooks-dir="$HOOKSDIR" \
--apparmor-profile "$apparmor" \
--cgroup-manager "$CONTAINER_CGROUP_MANAGER" \
--conmon "$CONMON_BINARY" \
--listen "$CRIO_SOCKET" \
--registry "quay.io" \
--registry "docker.io" \
--runtimes "$RUNTIME_NAME:$RUNTIME_BINARY:$RUNTIME_ROOT:$RUNTIME_TYPE" \
-r "$TESTDIR/crio" \
--runroot "$TESTDIR/crio-run" \
--cni-default-network "$CNI_DEFAULT_NETWORK" \
--cni-config-dir "$CRIO_CNI_CONFIG" \
--cni-plugin-dir "$CRIO_CNI_PLUGIN" \
--pinns-path "$PINNS_BINARY_PATH" \
$STORAGE_OPTIONS \
-c "" \
-d "" \
$OVERRIDE_OPTIONS \
config >"$CRIO_CONFIG"
sed -r -e 's/^(#)?root =/root =/g' -e 's/^(#)?runroot =/runroot =/g' -e 's/^(#)?storage_driver =/storage_driver =/g' -e '/^(#)?storage_option = (\[)?[ \t]*$/,/^#?$/s/^(#)?//g' -e '/^(#)?registries = (\[)?[ \t]*$/,/^#?$/s/^(#)?//g' -e '/^(#)?default_ulimits = (\[)?[ \t]*$/,/^#?$/s/^(#)?//g' -i "$CRIO_CONFIG"
# make sure we don't run with nodev, or else mounting a readonly rootfs will fail: https://github.com/cri-o/cri-o/issues/1929#issuecomment-474240498
sed -r -e 's/nodev(,)?//g' -i "$CRIO_CONFIG"
sed -i -e 's;\(container_exits_dir =\) \(.*\);\1 "'"$CONTAINER_EXITS_DIR"'";g' "$CRIO_CONFIG"
sed -i -e 's;\(container_attach_socket_dir =\) \(.*\);\1 "'"$CONTAINER_ATTACH_SOCKET_DIR"'";g' "$CRIO_CONFIG"
prepare_network_conf
}
function check_images() {
local img json list
# check that images are there
json=$(crictl images -o json)
[ -n "$json" ]
list=$(jq -r '.images[] | .repoTags[]' <<<"$json")
for img in "${IMAGES[@]}"; do
if [[ "$list" != *"$img"* ]]; then
echo "Image $img is not present but it should!" >&2
exit 1
fi
done
# these two variables are used by a few tests
eval "$(jq -r '.images[] |
select(.repoTags[0] == "quay.io/crio/redis:alpine") |
"REDIS_IMAGEID=" + .id + "\n" +
"REDIS_IMAGEREF=" + .repoDigests[0]' <<<"$json")"
}
function start_crio_no_setup() {
"$CRIO_BINARY_PATH" \
--default-mounts-file "$TESTDIR/containers/mounts.conf" \
-l debug \
-c "$CRIO_CONFIG" \
-d "$CRIO_CONFIG_DIR" \
&> >(tee "$CRIO_LOG") &
CRIO_PID=$!
wait_until_reachable
}
# Start crio.
# shellcheck disable=SC2120
function start_crio() {
setup_crio "$@"
start_crio_no_setup
check_images
}
function check_journald() {
if ! pkg-config --exists libsystemd-journal; then
if ! pkg-config --exists libsystemd; then
echo "1"
return
fi
fi
if ! journalctl --version; then
echo "1"
return
fi
echo "0"
}
# Check whether metrics port is listening
function check_metrics_port() {
if ! netstat -lanp | grep "$1" >/dev/null; then
echo "1"
return
fi
echo "0"
}
function cleanup_ctrs() {
if output=$(crictl ps --quiet); then
if [ "$output" != "" ]; then
printf '%s\n' "$output" | while IFS= read -r line; do
crictl stop "$line"
crictl rm "$line"
done
fi
fi
rm -f "$HOOKSCHECK"
}
function cleanup_images() {
if output=$(crictl images --quiet); then
if [ "$output" != "" ]; then
printf '%s\n' "$output" | while IFS= read -r line; do
crictl rmi "$line"
done
fi
fi
}
function cleanup_pods() {
if output=$(crictl pods --quiet); then
if [ "$output" != "" ]; then
printf '%s\n' "$output" | while IFS= read -r line; do
crictl stopp "$line"
crictl rmp "$line"
done
fi
fi
}
function stop_crio_no_clean() {
if [ -n "${CRIO_PID+x}" ]; then
kill "$CRIO_PID" >/dev/null 2>&1
wait "$CRIO_PID"
unset CRIO_PID
fi
}
# Stop crio.
function stop_crio() {
stop_crio_no_clean
cleanup_network_conf
}
function restart_crio() {
if [ "$CRIO_PID" != "" ]; then
kill "$CRIO_PID" >/dev/null 2>&1
wait "$CRIO_PID"
start_crio
else
echo "you must start crio first"
exit 1
fi
}
function cleanup_lvm() {
if [ -n "${LVM_DEVICE+x}" ]; then
lvm lvremove -y storage/thinpool
lvm vgremove -y storage
lvm pvremove -y "$LVM_DEVICE"
fi
}
function cleanup_testdir() {
# shellcheck disable=SC2013
for mnt in $(awk '{print $2}' /proc/self/mounts | grep ^"$TESTDIR" | sort); do
umount "$mnt"
done
rm -rf "$TESTDIR" || true
unset TESTDIR
}
function cleanup_test() {
[ -z "$TESTDIR" ] && return
cleanup_ctrs
cleanup_pods
stop_crio
cleanup_lvm
cleanup_testdir
}
function load_apparmor_profile() {
"$APPARMOR_PARSER_BINARY" -r "$1"
}
function remove_apparmor_profile() {
"$APPARMOR_PARSER_BINARY" -R "$1"
}
function is_apparmor_enabled() {
grep -q Y "$APPARMOR_PARAMETERS_FILE_PATH" 2>/dev/null
}
function prepare_network_conf() {
mkdir -p "$CRIO_CNI_CONFIG"
cat >"$CRIO_CNI_CONFIG/10-crio.conf" <<-EOF
{
"cniVersion": "0.3.1",
"name": "$CNI_DEFAULT_NETWORK",
"type": "$CNI_TYPE",
"bridge": "cni0",
"isGateway": true,
"ipMasq": true,
"ipam": {
"type": "host-local",
"routes": [
{ "dst": "$POD_IPV4_DEF_ROUTE" },
{ "dst": "$POD_IPV6_DEF_ROUTE" }
],
"ranges": [
[{ "subnet": "$POD_IPV4_CIDR" }],
[{ "subnet": "$POD_IPV6_CIDR" }]
]
}
}
EOF
}
function parse_pod_ip() {
inet=$(crictl exec --sync "$1" ip addr show dev eth0 scope global 2>&1 | grep "$2")
echo "$inet" | sed -n 's;.*\('"$3"'.*\)/.*;\1;p'
}
function parse_pod_ipv4() {
parse_pod_ip "$1" 'inet ' $POD_IPV4_CIDR_START
}
function parse_pod_ipv6() {
parse_pod_ip "$1" inet6 $POD_IPV6_CIDR_START
}
function get_host_ip() {
gateway_dev=$(ip -o route show default $POD_IPV4_DEF_ROUTE | sed 's/.*dev \([^[:space:]]*\).*/\1/')
[ "$gateway_dev" ]
ip -o -4 addr show dev "$gateway_dev" scope global | sed 's/.*inet \([0-9.]*\).*/\1/'
}
function ping_pod() {
ipv4=$(parse_pod_ipv4 "$1")
ping -W 1 -c 5 "$ipv4"
ipv6=$(parse_pod_ipv6 "$1")
ping6 -W 1 -c 5 "$ipv6"
}
function ping_pod_from_pod() {
ipv4=$(parse_pod_ipv4 "$1")
crictl exec --sync "$2" ping -W 1 -c 2 "$ipv4"
# since RHEL kernels don't mirror ipv4.ip_forward sysctl to ipv6, this fails
# in such an environment without giving all containers NET_RAW capability
# rather than reducing the security of the tests for all cases, skip this check
# instead
if (grep -i 'Red Hat\|CentOS' /etc/redhat-release | grep " 7"); then
return
fi
ipv6=$(parse_pod_ipv6 "$1")
crictl exec --sync "$2" ping6 -W 1 -c 2 "$ipv6"
}
function cleanup_network_conf() {
rm -rf "$CRIO_CNI_CONFIG"
}
function temp_sandbox_conf() {
sed -e s/\"namespace\":.*/\"namespace\":\ \""$1"\",/g "$TESTDATA"/sandbox_config.json >"$TESTDIR/sandbox_config_$1.json"
}
function reload_crio() {
kill -HUP $CRIO_PID
}
function wait_for_log() {
CNT=0
while true; do
if [[ $CNT -gt 50 ]]; then
echo wait for log timed out
exit 1
fi
if grep -iq "$1" "$CRIO_LOG"; then
break
fi
echo "waiting for log entry to appear ($CNT): $1"
sleep 0.1
CNT=$((CNT + 1))
done
}
function replace_config() {
sed -i -e 's;\('"$1"' = "\).*\("\);\1'"$2"'\2;' "$CRIO_CONFIG"
}
# Fails the current test, providing the error given.
function fail() {
echo "FAIL [${BATS_TEST_NAME} ${BASH_SOURCE[0]##*/}:${BASH_LINENO[0]}] $*" >&2
exit 1
}
|
#!/usr/bin/env bash
CURR_WID=$(xdotool getwindowfocus);
CHROME_WID=$(xdotool search --name '#e11aeca0e7e00447b2d72d96cd55c32b4273ad29f1f2b48ae73d692475825979');
if [[ ${CHROME_WID} ]]
then
xdotool windowactivate ${CHROME_WID};
xdotool key 'ctrl+r';
xdotool windowactivate ${CURR_WID};
fi
|
"""
Create a program to remove duplicates from a given list without using a set
"""
# List with duplicate items
input = [1, 2, 3, 2, 4, 5]
# Initialize an empty list
output = []
# Iterate over the input list
for element in input:
# If the element is not in the output list
# append it to the output list
if element not in output:
output.append(element)
# Print the output list
print(output) # Output: [1, 2, 3, 4, 5]
|
<reponame>willkg/steve<filename>setup.py
#######################################################################
# This file is part of steve.
#
# Copyright (C) 2012-2014 <NAME>
# Licensed under the Simplified BSD License. See LICENSE for full
# license.
#######################################################################
from setuptools import find_packages, setup
import os
import re
READMEFILE = 'README.rst'
VERSIONFILE = os.path.join('steve', '__init__.py')
VSRE = r"""^__version__ = ['"]([^'"]*)['"]"""
def get_version():
verstrline = open(VERSIONFILE, 'rt').read()
mo = re.search(VSRE, verstrline, re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError(
'Unable to find version string in {0}.'.format(VERSIONFILE))
setup(
name='steve',
version=get_version(),
description='Command line importer for richard',
long_description=open(READMEFILE).read(),
license='Simplified BSD License',
author='<NAME>',
author_email='<EMAIL>',
keywords='richard videos importer',
url='http://github.com/pyvideo/steve',
zip_safe=True,
packages=find_packages(),
include_package_data=True,
install_requires=[
'click',
'html2text',
'jinja2',
'requests',
'pytest',
'tabulate',
'youtube-dl',
],
entry_points="""
[console_scripts]
steve-cmd=steve.cmdline:click_run
""",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
|
<gh_stars>10-100
alter table #prefix#user add column phone char(32) not null default '';
alter table #prefix#user add column address char(48) not null default '';
alter table #prefix#user add column address2 char(48) not null default '';
alter table #prefix#user add column city char(48) not null default '';
alter table #prefix#user add column state char(3) not null default '';
alter table #prefix#user add column country char(3) not null default '';
alter table #prefix#user add column zip char(16) not null default '';
alter table #prefix#user add column title char(48) not null default '';
alter table #prefix#user add column company char(48) not null default '';
alter table #prefix#user add column photo char(128) not null default '';
alter table #prefix#user add column about text not null;
alter table #prefix#user add column website char(128) not null default '';
create table #prefix#user_links (
id int not null auto_increment primary key,
user_id int not null,
service char(32) not null default '',
handle char(72) not null,
index (user_id, service)
);
create table #prefix#user_notes (
id int not null auto_increment primary key,
user_id int not null,
ts datetime not null,
made_by int not null,
note text not null,
index (user_id, ts),
index (made_by, ts)
);
|
<reponame>carvetime/study-webpack
function log(){
console.log("execute log ....")
}
log()
|
package com.iseninc;
import org.bouncycastle.asn1.ASN1ObjectIdentifier;
import org.bouncycastle.asn1.x500.X500Name;
import org.bouncycastle.asn1.x509.*;
import org.bouncycastle.cert.CertIOException;
import org.bouncycastle.cert.X509CertificateHolder;
import org.bouncycastle.cert.X509v3CertificateBuilder;
import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter;
import org.bouncycastle.cert.jcajce.JcaX509ExtensionUtils;
import org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder;
import org.bouncycastle.jce.X509KeyUsage;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.operator.ContentSigner;
import org.bouncycastle.operator.OperatorCreationException;
import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder;
import org.bouncycastle.pkcs.PKCS10CertificationRequest;
import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequest;
import org.bouncycastle.util.encoders.Hex;
import java.io.FileInputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.security.*;
import java.security.cert.CertificateEncodingException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.Date;
import static com.iseninc.Utils.checkNotNullOrEmpty;
import static com.iseninc.Utils.isNullOrEmpty;
public class Signer {
private KeyStore mKeyStore;
public Signer(KeyStore keyStore) {
mKeyStore = keyStore;
}
public X509Certificate sign(String alias, char[] keyPassword, int validityInDays, PKCS10CertificationRequest csr) {
try {
BigInteger sn = generateSn();
Date endTime = getEndTime(validityInDays);
X509KeyUsage keyUsageTypes = determineKeyUsageTypes();
PublicKey publicKey = new JcaPKCS10CertificationRequest(csr).getPublicKey();
KeyStore.PrivateKeyEntry entry = (KeyStore.PrivateKeyEntry) mKeyStore.getEntry(alias, new KeyStore.PasswordProtection(keyPassword));
X509Certificate caCertificate = (X509Certificate) entry.getCertificate();
X509v3CertificateBuilder certificateBuilder = createCertificateBuilder(caCertificate, sn, csr.getSubject(), publicKey, keyUsageTypes, endTime);
KeyPair keyPair = new KeyPair(caCertificate.getPublicKey(), entry.getPrivateKey());
ContentSigner signer = createSigner(keyPair);
X509CertificateHolder bouncyCastleHolder = certificateBuilder.build(signer);
return new JcaX509CertificateConverter().setProvider(new BouncyCastleProvider()).getCertificate(bouncyCastleHolder);
}
catch (CertificateException | NoSuchAlgorithmException | CertIOException | InvalidKeyException |
UnrecoverableEntryException | KeyStoreException | OperatorCreationException e) {
throw new SecurityException(e);
}
}
private BigInteger generateSn() {
byte[] snBytes = new byte[16];
SecureRandom random = new SecureRandom();
random.nextBytes(snBytes);
return new BigInteger(Hex.encode(snBytes));
}
private X509KeyUsage determineKeyUsageTypes() {
return new X509KeyUsage(X509KeyUsage.digitalSignature | KeyUsage.nonRepudiation);
}
private Date getEndTime(int validityInDays) {
return new Date(System.currentTimeMillis() + (validityInDays * 24L * 60L * 60L * 1000L));
}
private X509v3CertificateBuilder createCertificateBuilder(
X509Certificate caCertificate,
BigInteger sn,
X500Name subject,
PublicKey publicKey,
X509KeyUsage keyUsages,
Date endTime)
throws NoSuchAlgorithmException, CertIOException, CertificateEncodingException {
X509v3CertificateBuilder certificateBuilder = new JcaX509v3CertificateBuilder(
caCertificate,
sn,
new Date(),
endTime,
subject,
publicKey
).addExtension(
Extension.basicConstraints,
false,
new BasicConstraints(false) // true if it is allowed to sign other certs
).addExtension(
new ASN1ObjectIdentifier("192.168.3.11"),
true,
keyUsages
).addExtension(
Extension.authorityKeyIdentifier,
false,
new JcaX509ExtensionUtils().createAuthorityKeyIdentifier(caCertificate)
).addExtension(
Extension.subjectKeyIdentifier,
false,
new JcaX509ExtensionUtils().createSubjectKeyIdentifier(publicKey)
);
ExtendedKeyUsage ext = new ExtendedKeyUsage(KeyPurposeId.id_kp_clientAuth);
certificateBuilder.addExtension(Extension.extendedKeyUsage, false, ext);
return certificateBuilder;
}
private ContentSigner createSigner(KeyPair keyPair) throws OperatorCreationException {
String signingAlgorithm;
if ("RSA".equals(keyPair.getPublic().getAlgorithm())) {
signingAlgorithm = "SHA384withRSA";
}
else {
signingAlgorithm = "SHA384withECDSA";
}
return new JcaContentSignerBuilder(signingAlgorithm).build(keyPair.getPrivate());
}
public static class Builder {
private String mKeyStore;
private char[] mStorePass;
private String mStoreType;
public Builder setKeyStore(String keyStore) {
mKeyStore = keyStore;
return this;
}
public Builder setStorePass(char[] storePass) {
mStorePass = storePass;
return this;
}
public Builder setStoreType(String storeType) {
mStoreType = storeType;
return this;
}
public Signer build() {
checkNotNullOrEmpty(mKeyStore, "KeyStore");
checkNotNullOrEmpty(mStorePass, "StorePass");
String storeType = isNullOrEmpty(mStoreType) ? KeyStore.getDefaultType() : mStoreType;
try {
KeyStore keyStore = KeyStore.getInstance(storeType);
try (FileInputStream keyStoreFileStream = new FileInputStream(mKeyStore)) {
keyStore.load(keyStoreFileStream, mStorePass);
}
return new Signer(keyStore);
}
catch (IOException | KeyStoreException | NoSuchAlgorithmException | CertificateException e) {
throw new SecurityException(e);
}
}
}
}
|
#!/bin/bash
bold=$(tput bold)
normal=$(tput sgr0)
red=$(tput setaf 1)
error()
{
echo "ERROR: $1" 1>&2
usage_and_exit 1
}
usage()
{
echo "Usage: $PROGRAM [options] "
echo " options:"
echo " -help Print this usage"
echo " -aarch Architecture <aarch32/aarch64/versal>"
echo " -cache path to sstate-cache"
echo " -setup setup file to use"
echo " -clean, clean Remove build directories"
echo " -full, full Full Petalinux build which builds images along with XRT RPMs"
echo ""
}
usage_and_exit()
{
usage
exit $1
}
# --- Internal funtions ---
install_recipes()
{
META_USER_PATH=$1
SAVED_OPTIONS_LOCAL=$(set +o)
set +e
mkdir -p ${META_USER_PATH}/recipes-xrt/xrt
mkdir -p ${META_USER_PATH}/recipes-xrt/zocl
XRT_BB=${META_USER_PATH}/recipes-xrt/xrt/xrt_git.bbappend
ZOCL_BB=${META_USER_PATH}/recipes-xrt/zocl/zocl_git.bbappend
grep "inherit externalsrc" $XRT_BB
if [ $? != 0 ]; then
echo "inherit externalsrc" > $XRT_BB
echo "EXTERNALSRC = \"$XRT_REPO_DIR/src\"" >> $XRT_BB
echo 'EXTERNALSRC_BUILD = "${WORKDIR}/build"' >> $XRT_BB
echo 'PACKAGE_CLASSES = "package_rpm"' >> $XRT_BB
echo 'LICENSE = "GPLv2 & Apache-2.0"' >> $XRT_BB
echo 'LIC_FILES_CHKSUM = "file://../LICENSE;md5=da5408f748bce8a9851dac18e66f4bcf \' >> $XRT_BB
echo ' file://runtime_src/core/edge/drm/zocl/LICENSE;md5=7d040f51aae6ac6208de74e88a3795f8 "' >> $XRT_BB
fi
grep "inherit externalsrc" $ZOCL_BB
if [ $? != 0 ]; then
echo "inherit externalsrc" > $ZOCL_BB
echo "EXTERNALSRC = \"$XRT_REPO_DIR/src/runtime_src/core/edge/drm/zocl\"" >> $ZOCL_BB
echo "EXTERNALSRC_BUILD = \"$XRT_REPO_DIR/src/runtime_src/core/edge/drm/zocl\"" >> $ZOCL_BB
echo 'PACKAGE_CLASSES = "package_rpm"' >> $ZOCL_BB
echo 'LICENSE = "GPLv2 & Apache-2.0"' >> $ZOCL_BB
echo 'LIC_FILES_CHKSUM = "file://LICENSE;md5=7d040f51aae6ac6208de74e88a3795f8"' >> $ZOCL_BB
echo 'pkg_postinst_ontarget_${PN}() {' >> $ZOCL_BB
echo ' #!/bin/sh' >> $ZOCL_BB
echo ' echo "Unloading old XRT Linux kernel modules"' >> $ZOCL_BB
echo ' ( rmmod zocl || true ) > /dev/null 2>&1' >> $ZOCL_BB
echo ' echo "Loading new XRT Linux kernel modules"' >> $ZOCL_BB
echo ' modprobe zocl' >> $ZOCL_BB
echo '}' >> $ZOCL_BB
fi
eval "$SAVED_OPTIONS_LOCAL"
}
config_versal_project()
{
# remove following unused packages from rootfs sothat its size would fit in QSPI
sed -i 's/^CONFIG_packagegroup-petalinux-opencv.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_packagegroup-petalinux-jupyter.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_kernel-devsrc.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_xrt-dev.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_e2fsprogs-mke2fs.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_tcl.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_opencl-clhpp-dev.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_opencl-headers.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_libstdcPLUSPLUS.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_resize-part.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_packagegroup-petalinux-x11.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_imagefeature-hwcodecs.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_htop.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_iperf3.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_meson.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_imagefeature-ssh-server-dropbear.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_imagefeature-package-management.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_imagefeature-debug-tweaks.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_dnf.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_python3.*//g' project-spec/configs/rootfs_config
sed -i 's/^CONFIG_package-feed-uris.*//g' project-spec/configs/rootfs_config
# Configure u-boot to pick dtb from address 0x40000
UBOOT_USER_SCRIPT=u-boot_custom.cfg
echo "CONFIG_XILINX_OF_BOARD_DTB_ADDR=0x40000" > project-spec/meta-user/recipes-bsp/u-boot/files/$UBOOT_USER_SCRIPT
echo "SRC_URI += \"file://${UBOOT_USER_SCRIPT}\"" >> project-spec/meta-user/recipes-bsp/u-boot/u-boot-xlnx_%.bbappend
# Configure kernel
echo "CONFIG_SUSPEND=n" >> project-spec/meta-user/recipes-kernel/linux/linux-xlnx/bsp.cfg
echo "CONFIG_PM=n" >> project-spec/meta-user/recipes-kernel/linux/linux-xlnx/bsp.cfg
echo "CONFIG_SPI=n" >> project-spec/meta-user/recipes-kernel/linux/linux-xlnx/bsp.cfg
# Configure inittab for getty
INIT_TAB_FILE=project-spec/meta-user/recipes-core/sysvinit/sysvinit-inittab_%.bbappend
if [ ! -d $(dirname "$INIT_TAB_FILE") ]; then
mkdir -p $(dirname "$INIT_TAB_FILE")
fi
cat << EOF > $INIT_TAB_FILE
do_install_append(){
echo "UL0:12345:respawn:/bin/start_getty 115200 ttyUL0 vt102" >> \${D}\${sysconfdir}/inittab
}
EOF
}
# --- End internal functions
SAVED_OPTIONS=$(set +o)
# Don't print all commands
set +x
# Error on non-zero exit code, by default:
set -e
# Get real script by read symbol link
THIS_SCRIPT=`readlink -f ${BASH_SOURCE[0]}`
THIS_SCRIPT_DIR="$( cd "$( dirname "${THIS_SCRIPT}" )" >/dev/null 2>&1 && pwd )"
PROGRAM=`basename $0`
CONFIG_FILE=""
PETA_BSP=""
PROJ_NAME=""
PLATFROM=""
XRT_REPO_DIR=`readlink -f ${THIS_SCRIPT_DIR}/..`
clean=0
full=0
SSTATE_CACHE=""
SETTINGS_FILE="petalinux.build"
while [ $# -gt 0 ]; do
case $1 in
-help )
usage_and_exit 0
;;
-aarch )
shift
AARCH=$1
;;
-setup )
shift
SETTINGS_FILE=$1
;;
-clean | clean )
clean=1
;;
-full | full )
full=1
;;
-cache )
shift
SSTATE_CACHE=$1
;;
--* | -* )
error "Unregognized option: $1"
;;
* )
error "Unregognized option: $1"
;;
esac
shift
done
aarch64_dir="aarch64"
aarch32_dir="aarch32"
versal_dir="versal"
YOCTO_MACHINE=""
if [[ $clean == 1 ]]; then
echo $PWD
echo "/bin/rm -rf $aarch64_dir $aarch32_dir $versal_dir"
/bin/rm -rf $aarch64_dir $aarch32_dir $versal_dir
exit 0
fi
# we pick Petalinux BSP
if [ -f $SETTINGS_FILE ]; then
source $SETTINGS_FILE
fi
source $PETALINUX/settings.sh
if [[ $AARCH = $aarch64_dir ]]; then
if [[ -f $PETALINUX/../../bsp/release/zynqmp-common-v$PETALINUX_VER-final.bsp ]]; then
PETA_BSP="$PETALINUX/../../bsp/release/zynqmp-common-v$PETALINUX_VER-final.bsp"
else
PETA_BSP="$PETALINUX/../../bsp/internal/zynqmp/zynqmp-common-v$PETALINUX_VER-final.bsp"
fi
YOCTO_MACHINE="zynqmp-generic"
elif [[ $AARCH = $aarch32_dir ]]; then
if [[ -f $PETALINUX/../../bsp/release/zynq-rootfs-common-v$PETALINUX_VER-final.bsp ]]; then
PETA_BSP="$PETALINUX/../../bsp/release/zynq-rootfs-common-v$PETALINUX_VER-final.bsp"
else
PETA_BSP="$PETALINUX/../../bsp/internal/zynq/zynq-rootfs-common-v$PETALINUX_VER-final.bsp"
fi
YOCTO_MACHINE="zynq-generic"
elif [[ $AARCH = $versal_dir ]]; then
if [[ -f $PETALINUX/../../bsp/release/versal-rootfs-common-v$PETALINUX_VER-final.bsp ]]; then
PETA_BSP="$PETALINUX/../../bsp/release/versal-rootfs-common-v$PETALINUX_VER-final.bsp"
else
PETA_BSP="$PETALINUX/../../bsp/internal/versal/versal-rootfs-common-v$PETALINUX_VER-final.bsp"
fi
YOCTO_MACHINE="versal-generic"
else
error "$AARCH not exist"
fi
# Sanity Check
if [ ! -f $PETA_BSP ]; then
error "$PETA_BSP not accessible"
fi
if [ ! -d $SSTATE_CACHE ]; then
error "SSTATE_CACHE= not accessible"
fi
# Sanity check done
PETA_CONFIG_OPT="--silentconfig"
ORIGINAL_DIR=`pwd`
PETA_BIN="$PETALINUX/tools/common/petalinux/bin"
echo "** START [${BASH_SOURCE[0]}] **"
echo " PETALINUX: $PETALINUX"
echo ""
PETALINUX_NAME=$AARCH
echo " * Create PetaLinux from BSP (-s $PETA_BSP)"
PETA_CREATE_OPT="-s $PETA_BSP"
if [ ! -d $PETALINUX_NAME ]; then
echo " * Create PetaLinux Project: $PETALINUX_NAME"
echo "[CMD]: petalinux-create -t project -n $PETALINUX_NAME $PETA_CREATE_OPT"
$PETA_BIN/petalinux-create -t project -n $PETALINUX_NAME $PETA_CREATE_OPT
cd ${PETALINUX_NAME}/project-spec/meta-user/
install_recipes .
else
echo "$red $bold INFO: Project Already exists on Disk. Running incremental build $normal"
fi
cd $ORIGINAL_DIR/$PETALINUX_NAME
#cleanup existing files in incremental build
/bin/rm -rf *.rpm
/bin/rm -rf install_xrt.sh
/bin/rm -rf reinstall_xrt.sh
/bin/rm -rf rpms
echo "CONFIG_YOCTO_MACHINE_NAME=\"${YOCTO_MACHINE}\""
echo "CONFIG_YOCTO_MACHINE_NAME=\"${YOCTO_MACHINE}\"" >> project-spec/configs/config
if [ ! -z $SSTATE_CACHE ] && [ -d $SSTATE_CACHE ]; then
echo "SSTATE-CACHE:${SSTATE_CACHE} added"
echo "CONFIG_YOCTO_LOCAL_SSTATE_FEEDS_URL=\"${SSTATE_CACHE}\"" >> project-spec/configs/config
else
echo "SSTATE-CACHE:${SSTATE_CACHE} not present"
fi
# Build package
echo " * Performing PetaLinux Build (from: ${PWD})"
#Run a full build if -full option is provided
if [[ $full == 1 ]]; then
if [[ $AARCH = $versal_dir ]]; then
# configure the project with appropriate options
config_versal_project
fi
echo "[CMD]: petalinux-config -c kernel --silentconfig"
$PETA_BIN/petalinux-config -c kernel --silentconfig
echo "[CMD]: petalinux-config -c rootfs --silentconfig"
$PETA_BIN/petalinux-config -c rootfs --silentconfig
echo "[CMD]: petalinux-build"
$PETA_BIN/petalinux-build
else
#Run just xrt build if -full option is not provided
echo "[CMD]: petalinux-build -c xrt"
$PETA_BIN/petalinux-build -c xrt
echo "[CMD]: petalinux-build -c zocl"
$PETA_BIN/petalinux-build -c zocl
fi
if [ $? != 0 ]; then
error "XRT build failed"
fi
# zocl is now part of xrt depenendencies. No need to build zocl from petalinux 2021.1+
#echo "[CMD]: petalinux-build -c zocl"
#$PETA_BIN/petalinux-build -c zocl
#if [ $? != 0 ]; then
# error "ZOCL build failed"
#fi
echo "Copying rpms in $ORIGINAL_DIR/$PETALINUX_NAME"
if [ ! -d build/tmp/deploy/rpm ]; then
tmp_path=$(cat project-spec/configs/config | grep CONFIG_TMP_DIR_LOCATION \
| awk -F'=' '{print $2}' | sed -e 's/^"//' -e 's/"$//')
cp -v ${tmp_path}/deploy/rpm/*/xrt* $ORIGINAL_DIR/$PETALINUX_NAME/.
cp -v ${tmp_path}/deploy/rpm/${PLATFORM_NAME}*/*zocl* $ORIGINAL_DIR/$PETALINUX_NAME/.
else
cp -v build/tmp/deploy/rpm/${PLATFORM_NAME}*/*zocl* $ORIGINAL_DIR/$PETALINUX_NAME/.
cp -v build/tmp/deploy/rpm/*/xrt* $ORIGINAL_DIR/$PETALINUX_NAME/.
fi
#copying rpms into rpms folder
mkdir -p $ORIGINAL_DIR/$PETALINUX_NAME/rpms
cp -v $ORIGINAL_DIR/$PETALINUX_NAME/xrt* $ORIGINAL_DIR/$PETALINUX_NAME/rpms/.
cp -v $ORIGINAL_DIR/$PETALINUX_NAME/zocl* $ORIGINAL_DIR/$PETALINUX_NAME/rpms/.
cp -v $ORIGINAL_DIR/$PETALINUX_NAME/kernel* $ORIGINAL_DIR/$PETALINUX_NAME/rpms/.
echo "Creating $ORIGINAL_DIR/$PETALINUX_NAME/rpm.txt"
echo `ls xrt-dev*` > $ORIGINAL_DIR/$PETALINUX_NAME/rpm.txt
echo `ls xrt-2*` >> $ORIGINAL_DIR/$PETALINUX_NAME/rpm.txt
echo "Creating $ORIGINAL_DIR/$PETALINUX_NAME/install_xrt.sh"
xrt_dbg=`ls xrt-dbg*`
zocl_dbg=`ls zocl-dbg*`
echo dnf --disablerepo=\"*\" install -y *.rpm | sed -e "s/\<$xrt_dbg\>//g" | sed -e "s/\<$zocl_dbg\>//g" > $ORIGINAL_DIR/$PETALINUX_NAME/install_xrt.sh
echo "Creating $ORIGINAL_DIR/$PETALINUX_NAME/reinstall_xrt.sh"
echo dnf --disablerepo=\"*\" reinstall -y *.rpm | sed -e "s/\<$xrt_dbg\>//g" | sed -e "s/\<$zocl_dbg\>//g" > $ORIGINAL_DIR/$PETALINUX_NAME/reinstall_xrt.sh
cp $ORIGINAL_DIR/$PETALINUX_NAME/rpm.txt $ORIGINAL_DIR/$PETALINUX_NAME/rpms/.
cp $ORIGINAL_DIR/$PETALINUX_NAME/install_xrt.sh $ORIGINAL_DIR/$PETALINUX_NAME/rpms/.
cp $ORIGINAL_DIR/$PETALINUX_NAME/reinstall_xrt.sh $ORIGINAL_DIR/$PETALINUX_NAME/rpms/.
cd $ORIGINAL_DIR
eval "$SAVED_OPTIONS"; # Restore shell options
echo "** COMPLETE [${BASH_SOURCE[0]}] **"
echo ""
|
package ca.bc.jx.ap.retry.kafka;
import kafka.server.KafkaConfig;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
@TestConfiguration
public class EmbeddedKafkaBrokerConfig {
private final int embeddedKafkaBrokerPort = 39092;
@Bean
public EmbeddedKafkaBroker embeddedKafkaBroker() {
return new EmbeddedKafkaBroker(
1,
false,
3,
"async-main1",
"async-retry1",
"async-dlq1")
.kafkaPorts(embeddedKafkaBrokerPort)
.brokerProperty(KafkaConfig.AutoCreateTopicsEnableProp(), "false");
}
// @Bean
// KafkaTemplate<String, String> getKafkaTemplate() {
// private final KafkaTemplate<String, String> kafkaTemplate;
// ProducerFactory<java.lang.String, java.lang.String> factory = new DefaultKafkaProducerFactory<>()
// return new KafkaTemplate<String, String>(factory);
// }
}
|
#!/bin/sh
# MAINTAINER: portmgr@FreeBSD.org
# $FreeBSD: head/Mk/Scripts/find-lib.sh 481327 2018-10-06 08:19:19Z antoine $
[ -n "${DEBUG_MK_SCRIPTS}" -o -n "${DEBUG_MK_SCRIPTS_FIND_LIB}" ] && set -x
if [ -z "${LIB_DIRS}" -o -z "${LOCALBASE}" ]; then
echo "LIB_DIRS, LOCALBASE required in environment." >&2
exit 1
fi
if [ -f /usr/share/misc/magic.mime -o -f /usr/share/misc/magic.mime.mgc ]; then
echo >&2
echo "Either /usr/share/misc/magic.mime or /usr/share/misc/magic.mime.mgc exist and must be removed." >&2
echo "These are legacy files from an older release and may safely be deleted." >&2
echo "Please see UPDATING 20150213 for more details." >&2
exit 1
fi
if [ $# -ne 1 ]; then
echo "$0: no argument provided." >&2
fi
lib=$1
dirs="${LIB_DIRS} $(cat ${LOCALBASE}/libdata/ldconfig/* 2>/dev/null || :)"
for libdir in ${dirs} ; do
test -f ${libdir}/${lib} || continue
libfile=${libdir}/${lib}
[ "$(/usr/bin/file -b -L --mime-type ${libfile})" = "application/x-sharedlib" ] || continue
echo $libfile
break
done
|
require "./test/helper.rb"
class TestHTTP < ElasticTestCase
include EventMachine::ElasticSearch::HTTP
def base_url
"http://localhost:9200"
end
test "successful explicit callback" do
req = request(:get, "/")
req.callback {|response|
assert response["ok"]
done
}
end
test "successful implicit callback" do
req = request(:get, "/") {|response|
assert response["ok"]
done
}
end
test "successful implicit callback with error arg" do
req = request(:get, "/") {|response, err|
assert response["ok"]
assert_nil err
done
}
end
test "failing explicit callback" do
req = request(:get, "/_non-existing")
req.callback { flunk "should fail on /_non-existing"; done }
req.errback {|err|
assert_not_nil err
done
}
end
test "failing implicit callback with error arg" do
req = request(:get, "/_non-existing") {|response, err|
assert_not_nil err
done
}
end
end
class TestClient < ElasticTestCase
test "#base_url" do
assert_equal "http://127.0.0.1:9200", elastic.base_url
done
end
test "#status" do
elastic.status {|response|
assert response["ok"]
done
}
end
test "#bulk" do
cluster.delete_all_indices {
ops = [
{"index" => {"index" => "notes", "type" => "person", "id" => "harry"}},
{"name" => "Harry", "country" => "Denmark"},
{"delete" => {"index" => "notes", "type" => "person", "id" => "harry"}}
]
elastic.bulk(ops) {|response|
assert_equal 2, response["items"].size
assert_equal "index", response["items"][0].keys[0]
assert_equal "delete", response["items"][1].keys[0]
done
}
}
end
end
class TestCluster < ElasticTestCase
test "#base_url" do
assert_equal "http://12192.168.3.11:9200/_cluster", cluster.base_url
done
end
test "#state" do
cluster.state {|state|
assert_not_nil state["cluster_name"]
done
}
end
# this test doesn't really test we get indices from cluster, though.
test "#indices" do
cluster.delete_all_indices {
elastic.index("foo").create {
elastic.index("bar").create {
cluster.indices {|indices|
assert_equal 2, indices.size
assert indices["foo"].is_a? EventMachine::ElasticSearch::Index
assert indices["bar"].is_a? EventMachine::ElasticSearch::Index
done
}
}
}
}
end
test "#delete_all_indices" do
cluster.delete_all_indices {
cluster.state {|response|
assert_equal 0, response["metadata"]["indices"].size
done
}
}
end
end
class TestIndex < ElasticTestCase
setup do
@notes = elastic.index("notes")
end
test "require name" do
begin
elastic.index(nil)
flunk "should raise ArgumentError"
rescue ArgumentError
done
end
end
test "#base_url" do
assert_equal "http://127.0.0.1:9200/notes", @notes.base_url
done
end
test "#create" do
cluster.delete_all_indices {
@notes.create {|response|
assert response["ok"]
done
}
}
end
test "#create with settings" do
cluster.delete_all_indices {
@notes.create(:index => {:number_of_shards => 2}) {|response|
assert response["ok"]
done
}
}
end
test "#status" do
cluster.delete_all_indices {
@notes.create {
@notes.status {|response|
assert response["ok"]
assert_not_nil response["_shards"]
done
}
}
}
end
test "#delete" do
cluster.delete_all_indices {
@notes.create {
@notes.delete {
cluster.indices {|response|
assert_nil response["notes"]
done
}
}
}
}
end
end
class TestType < ElasticTestCase
setup do
@notes = elastic.index("notes")
@person = @notes.type("person")
end
test "require name" do
begin
@notes.type(nil)
flunk "should raise ArgumentError"
rescue ArgumentError
done
end
end
test "#base_url" do
assert_equal "http://127.0.0.1:9200/notes/person", @person.base_url
done
end
test "#index" do
cluster.delete_all_indices {
@person.index("harry", Harry) {|response|
assert response["ok"]
assert_equal "notes", response["_index"]
assert_equal "person", response["_type"]
assert_equal "harry", response["_id"]
done
}
}
end
test "#get" do
cluster.delete_all_indices {
@person.index("harry", Harry, :refresh => true) {
@person.get("harry") {|response|
assert_equal "notes", response["_index"]
assert_equal "person", response["_type"]
assert_equal "harry", response["_id"]
assert_equal "Denmark", response["_source"]["country"]
done
}
}
}
end
STRING_MAPPING = {
"properties" => {
"name" => {"type" => "string"}
}
}
INTEGER_MAPPING = {
"properties" => {
"name" => {"type" => "integer"}
}
}
test "#map/#mapping" do
cluster.delete_all_indices {@notes.create {
@person.map(STRING_MAPPING) {|response|
assert response["ok"]
@person.mapping {|response|
type = response["notes"]["person"]["properties"]["name"]["type"]
assert_equal "string", type
done
}
}
}}
end
test "#map with conflict" do
cluster.delete_all_indices {@notes.create {
@person.map(STRING_MAPPING) {
request = @person.map(INTEGER_MAPPING)
request.callback { flunk "should fail with merge conflict"; done }
request.errback { done }
}
}}
end
end
#class EM::HttpClient
#alias :old_receive :receive_data
#alias :old_send :send_data
#def receive_data d
#puts "<<<<<\n#{d}"
#old_receive d
#end
#def send_data d
#puts ">>>>>\n#{d}"
#old_send d
#end
#end
|
/**
klassi-js
Copyright © 2016 - <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
const wdio = require('webdriverio');
const fs = require('fs-extra');
const path = require('path');
const loadConfig = require('./configLoader');
const lambdatest = require('./remotes/lambdatest');
const modHeader = fs.readFileSync(path.resolve(__dirname, './scripts/extensions/modHeader_3_1_22_0.crx'), {
encoding: 'base64',
});
const chExt = {
'goog:chromeOptions': {
extensions: [modHeader],
},
};
let defaults;
let config;
module.exports = async function lambdatestDriver(options, configType) {
const browserCapabilities = loadConfig(`./lambdatest/${configType}.json`);
if (
(projectName === 'OAF' && browserName === 'chrome')
) {
config = Object.assign(browserCapabilities, chExt);
} else {
config = browserCapabilities;
}
const credentials = lambdatest.getCredentials();
const { user } = credentials;
const { key } = credentials;
// lambdatest will do this anyway, this is to make it explicit
const buildNameFromConfig = configType.replace(/-/g, ' ');
if (process.env.CI || process.env.CIRCLE_CI) {
if (
(projectName !== 'OAF' && browserName !== 'chrome')
) {
config.tunnelName = process.env.TUNNEL_NAME;
const { CIRCLE_BUILD_NUM, CIRCLE_JOB, CIRCLE_USERNAME } = process.env;
config.build = `${global.projectName} - CircleCI Build No. #${CIRCLE_BUILD_NUM} for ${CIRCLE_USERNAME}. Job: ${CIRCLE_JOB}`;
}
} else if (!config.build) {
if (
(projectName !== 'OAF' && browserName !== 'chrome')
) {
// configs can define their own build name or it is inferred from the configType
config.build = buildNameFromConfig;
config.tunnelName = 'lttunnel';
}
}
defaults = {
user,
key,
updateJob: false,
exclude: [],
maxInstances: 10,
capabilities: config,
logLevel: 'silent',
coloredLogs: true,
screenshotPath: './errorShots/',
baseUrl: '',
waitforTimeout: 10000,
connectionRetryTimeout: 90000,
connectionRetryCount: 3,
path: '/wd/hub',
hostname: 'hub.lambdatest.com',
port: 80,
};
const extendedOptions = Object.assign(defaults, options);
if (config.logLevel) {
// OPTIONS: verbose | silent | command | data | result
extendedOptions.logLevel = config.logLevel;
}
global.browser = await wdio.remote(extendedOptions);
return browser;
};
|
#!/bin/bash
ExitCode="$1"
Region="$2"
StackName="$3"
StackTag="$4"
AutoScalingGroup="$5"
IsSubStack="$6"
if [ -z "$ExitCode" ]; then
echo "Invalid ExitCode"
exit 1
fi
if [ -z "$Region" ]; then
echo "Invalid Region"
exit 1
fi
if [ -z "$StackName" ]; then
echo "Invalid StackName"
exit 1
fi
if [ -z "$StackTag" ]; then
echo "Invalid StackTag"
exit 1
fi
if [ -z "$AutoScalingGroup" ]; then
echo "Invalid AutoScalingGroup"
exit 1
fi
# Further processing if all input parameters are valid
# Add your processing logic here
|
export const httpHeaders = [
'Accept',
'Accept-Charset',
'Accept-Encoding',
'Accept-Language',
'Accept-Datetime',
'Authorization',
'Cache-Control',
'Connection',
'Cookie',
'Content-Length',
'Content-MD5',
'Content-Type',
'Date', 'Expect',
'Forwarded',
'From',
'Host',
'If-Match',
'If-Modified-Since',
'If-None-Match',
'If-Range',
'If-Unmodified-Since',
'Max-Forwards',
'Origin',
'Pragma',
'Proxy-Authorization',
'Range',
'Referer',
'TE',
'User-Agent',
'Upgrade',
'Via',
'Warning',
'X-Requested-With',
'DNT',
'X-Forwarded-For',
'X-Forwarded-Host',
'X-Forwarded-Proto',
'Front-End-Https',
'X-Http-Method-Override',
'X-ATT-DeviceId',
'X-Wap-Profile',
'Proxy-Connection',
'X-UIDH',
'X-Csrf-Token',
'X-Request-ID'
];
|
<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_looks_6_twotone = void 0;
var ic_looks_6_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M11 13h2v2h-2zm8-8H5v14h14V5zm-4 4h-4v2h2c1.1 0 2 .89 2 2v2c0 1.11-.9 2-2 2h-2c-1.1 0-2-.89-2-2V9c0-1.11.9-2 2-2h4v2z",
"opacity": ".3"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M9 9v6c0 1.11.9 2 2 2h2c1.1 0 2-.89 2-2v-2c0-1.11-.9-2-2-2h-2V9h4V7h-4c-1.1 0-2 .89-2 2zm4 4v2h-2v-2h2zm-8 8h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2H5c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2zM5 5h14v14H5V5z"
},
"children": []
}]
};
exports.ic_looks_6_twotone = ic_looks_6_twotone;
|
Feature('Fold / Unfold');
Scenario('can fold/unfold tasks', ({ I }) => {
I.amOnPage('http://localhost:8080');
I.fillField('할 일', '첫 번째 할 일');
I.click('추가');
I.click('첫 번째 할 일');
I.fillField('할 일', '두 번째 할 일');
I.click('추가');
I.click('두 번째 할 일');
I.fillField('할 일', '세 번째 할 일');
I.click('추가');
I.click('//*[@id="app"]/div/div[1]/ul/li/ul/li/button[2]');
I.dontSee('세 번째 할 일');
I.click('//*[@id="app"]/div/div[1]/ul/li/button[2]');
I.dontSee('두 번째 할 일');
I.click('펼치기');
I.see('두 번째 할 일');
I.dontSee('세 번째 할 일');
I.click('펼치기');
I.see('세 번째 할 일');
});
|
window.addEventListener('load', () => {
const timerCircles = {
300: document.querySelector('.ShortBreak'),
900: document.querySelector('.LongBreak'),
1500: document.querySelector('.Pomodoro'),
};
let timer = 0;
let current;
let auto = false;
let autoCounter = -1;
let interval;
let blockConnect = false;
let websocket = null;
const audioPlayer = document.createElement('audio');
if (audioPlayer.canPlayType('audio/mpeg')) {
audioPlayer.setAttribute('src','ding.mp3');
}
const urlParams = new URLSearchParams(window.location.search);
const wsUrl = urlParams.get('s');
if (wsUrl) {
document.querySelector('.ServerInfoInput').value = decodeURI(wsUrl);
}
const resetTimers = () => {
document.querySelectorAll('.active').forEach(el => {
el.classList.remove('active');
});
timerCircles[300].innerHTML = '05:00';
timerCircles[900].innerHTML = '15:00';
timerCircles[1500].innerHTML = '25:00';
current = null;
timer = 0;
};
const setupTimer = time => {
resetTimers();
timerCircles[time].classList.add('active');
current = time;
timer = time;
clearInterval(interval);
interval = setInterval(() => {
updateTimer();
}, 1000);
// addLog('Started ' + (time / 60) + ' Minute Timer');
document.querySelector('.PauseBox').innerHTML = '<i class="fas fa-pause"></i>';
}
const startInProgress = (newTimer, newCurrent, newAuto, newAutoCounter, paused) => {
pause();
resetTimers();
timer = newTimer;
current = newCurrent;
auto = newAuto;
autoCounter = newAutoCounter;
timerCircles[current].classList.add('active');
const s = timer % 60;
const m = (timer - s) / 60;
const formatS = ('0' + s).slice(-2);
const formatM = ('0' + m).slice(-2);
timerCircles[current].innerHTML = formatM + ':' + formatS;
if (paused) {
unpause();
}
if (auto) {
document.querySelector('.AutoMode').innerHTML = 'Auto Mode On';
} else {
document.querySelector('.AutoMode').innerHTML = 'Auto Mode Off';
}
}
/* const addLog = message => {
const now = new Date();
const logItem = document.createElement('div');
logItem.classList.add('LogItem');
const time = document.createElement('span');
const text = document.createElement('span');
text.innerHTML = message;
time.innerHTML = ('0' + now.getHours()).slice(-2) + ':' + ('0' + now.getMinutes()).slice(-2);
logItem.appendChild(time);
logItem.appendChild(text);
document.querySelector('.Log').appendChild(logItem);
} */
const updateTimer = () => {
timer -= 1;
const s = timer % 60;
const m = (timer - s) / 60;
const formatS = ('0' + s).slice(-2);
const formatM = ('0' + m).slice(-2);
timerCircles[current].innerHTML = formatM + ':' + formatS;
if (timer === 0) {
audioPlayer.currentTime = 0;
audioPlayer.play();
// addLog('Completed Timer');
if (auto && !websocket) {
clearInterval(interval);
let nextDuration;
let target;
switch (autoCounter) {
case 0:
case 2:
case 4:
nextDuration = 300;
target = document.querySelector('.ShortBreak');
break;
case -1:
case 1:
case 3:
case 5:
nextDuration = 1500;
break;
case 7:
nextDuration = 1500;
break;
case 6:
nextDuration = 900;
break;
};
if (autoCounter === 7) {
autoCounter = 0;
} else {
autoCounter++;
}
setupTimer(nextDuration);
} else {
pause();
resetTimers();
}
}
}
const pause = () => {
if (interval) {
document.querySelector('.PauseBox').innerHTML = '<i class="fas fa-play"></i>';
// addLog('Paused');
clearInterval(interval);
interval = null;
}
}
const unpause = () => {
if (!interval) {
document.querySelector('.PauseBox').innerHTML = '<i class="fas fa-pause"></i>';
// addLog('Resumed');
interval = setInterval(() => {
updateTimer();
}, 1000);
}
}
const enableAuto = startAtPomodoro => {
auto = true;
// addLog('Auto Mode Turned On');
autoCounter = startAtPomodoro ? 0 : -1;
document.querySelector('.AutoMode').innerHTML = 'Auto Mode On';
}
const disableAuto = () => {
auto = false;
// addLog('Auto Mode Turned Off');
document.querySelector('.AutoMode').innerHTML = 'Auto Mode Off';
}
const sendWSMessage = data => {
websocket.send(JSON.stringify(data));
console.log('Sending...');
}
const color = (newColor, element) => {
element.classList.remove('Yellow');
element.classList.remove('Red');
element.classList.remove('Green');
if (newColor) element.classList.add(newColor);
}
const makeQR = () => {
document.querySelector('.QRCode').innerHTML = '';
let minUnit = Math.min(window.innerHeight, window.innerWidth);
let qrCode = new QRCode(document.querySelector('.QRCode'), {
width: Math.max(250, 0.35*minUnit),
height: Math.max(250, 0.35*minUnit),
colorDark : "#222222",
colorLight : "#eeeeee",
correctLevel : QRCode.CorrectLevel.H
});
qrCode.makeCode(window.location.toString());
}
document.querySelector('.Pomodoro').addEventListener('click', e => {
disableAuto();
if (websocket) {
sendWSMessage({ type: 'autoMode', data: { autoMode: false } });
sendWSMessage({ type: 'startTimer', data: { timer: 1500 } });
}
setupTimer(1500);
});
document.querySelector('.ShortBreak').addEventListener('click', e => {
disableAuto();
if (websocket) {
sendWSMessage({ type: 'autoMode', data: { autoMode: false } });
sendWSMessage({ type: 'startTimer', data: { timer: 300 } });
}
setupTimer(300);
});
document.querySelector('.LongBreak').addEventListener('click', e => {
disableAuto();
if (websocket) {
sendWSMessage({ type: 'autoMode', data: { autoMode: false } });
sendWSMessage({ type: 'startTimer', data: { timer: 900 } });
}
setupTimer(900);
});
document.querySelector('.PauseBox').addEventListener('click', e => {
if (interval) {
pause();
sendWSMessage({ type: 'pause' });
} else {
if (!current) {
setupTimer(1500);
enableAuto(true);
document.querySelector('.AutoMode').innerHTML = 'Auto Mode On';
if (websocket) {
sendWSMessage({ type: 'autoMode', data: { autoMode: true } });
sendWSMessage({ type: 'startTimer', data: { timer: 1500 } });
}
} else {
unpause();
if (websocket) {
sendWSMessage({ type: 'resume' });
}
}
}
});
window.addEventListener('keypress', e => {
if (e.which === 32) {
if (interval) {
pause();
if (websocket) {
sendWSMessage({ type: 'pause' });
}
} else {
if (!current) {
setupTimer(1500);
enableAuto(true);
document.querySelector('.AutoMode').innerHTML = 'Auto Mode On';
if (websocket) {
sendWSMessage({ type: 'autoMode', data: { autoMode: true } });
sendWSMessage({ type: 'startTimer', data: { timer: 1500 } });
}
} else {
unpause();
if (websocket) {
sendWSMessage({ type: 'resume' });
}
}
}
}
});
document.querySelector('.AutoMode').addEventListener('click', e => {
if (!auto) {
if (current === 1500) {
enableAuto(true);
} else {
enableAuto(false);
}
if (websocket) {
sendWSMessage({ type: 'autoMode', data: { autoMode: true } });
}
} else {
disableAuto();
if (websocket) {
sendWSMessage({ type: 'autoMode', data: { autoMode: false } });
}
}
});
document.querySelector('.ModalClose').addEventListener('click', () => {
if (!blockConnect) {
const clientInfo = document.querySelector('.ClientInfo');
const status = document.querySelector('.ServerInfoStatus');
const url = document.querySelector('.ServerInfoInput').value.trim();
if (url) {
blockConnect = true;
status.innerHTML = 'Connecting...';
color('Yellow', status);
try {
websocket = new WebSocket(url);
websocket.onopen = event => {
blockConnect = false;
color('Green', status);
status.innerHTML = 'Connected.';
clientInfo.innerHTML = 'Connected to Server';
color('Green', clientInfo);
urlParams.set('s', encodeURI(url));
window.history.replaceState({}, '', '?' + urlParams.toString());
makeQR();
document.querySelector('.QRBox').classList.remove('is-hidden');
setTimeout(() => {
document.querySelector('.Modal').classList.add('is-inactive');
}, 500);
};
websocket.onclose = event => {
if (!event.wasClean) {
console.log('Connection Failed:', event);
blockConnect = false;
color('Red', status);
status.innerHTML = 'Connection Failed: ' + event.reason;
color('Red', clientInfo);
clientInfo.innerHTML = 'Connection Error';
}
document.querySelector('.QRBox').classList.add('is-hidden');
};
websocket.onmessage = event => {
console.log('Message Recieved');
const jsonData = JSON.parse(event.data);
const type = jsonData.type;
const data = jsonData.data;
switch (type) {
case 'pause':
pause();
break;
case 'resume':
unpause();
break;
case 'autoMode':
if (data) {
if (data.autoMode) {
if (current === 1500) {
enableAuto(true);
} else {
enableAuto(false);
}
} else {
disableAuto();
}
}
break;
case 'startTimer':
if (data) {
setupTimer(data.timer);
}
break;
case 'startInProgress':
if (data) {
startInProgress(data.timer, data.current, data.auto, data.autoCounter, data.paused);
}
break;
case 'resetTimers':
resetTimers();
break;
}
}
} catch (e) {
console.log('Websocket Create Failed:', e);
blockConnect = false;
color('Red', status);
status.innerHTML = 'Connection Failed: ' + e.message;
}
} else {
if (websocket) {
websocket.close();
}
websocket = null;
clientInfo.innerHTML = '';
color('', clientInfo);
status.innerHTML = '';
color('', status);
document.querySelector('.Modal').classList.add('is-inactive');
}
}
});
document.querySelector('.ResetTimer').addEventListener('click', () => {
pause();
resetTimers();
if (websocket) {
sendWSMessage({ type: 'pause' });
sendWSMessage({ type: 'resetTimers' });
}
});
document.querySelector('.ModalBack').addEventListener('click', () => {
document.querySelector('.Modal').classList.add('is-inactive');
});
document.querySelector('.Info').addEventListener('click', () => {
document.querySelector('.Modal').classList.remove('is-inactive');
});
window.addEventListener('resize', makeQR);
});
|
<filename>src/test/java/es/upm/etsisi/cf4j/recommender/matrixFactorization/BeMFTest.java
package es.upm.etsisi.cf4j.recommender.matrixFactorization;
import es.upm.etsisi.cf4j.data.DataModel;
import es.upm.etsisi.cf4j.data.MockDataSet;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
class BeMFTest {
private static final int seed = 43;
private static final int numFactors = 2;
private static final int numIters = 1;
private static final double learningRate = 0.01;
private static final double regularization = 0.08;
private static final double[] ratings = {1, 2, 3, 4, 5};
private static final int testUserId = 1;
private static final int testItemId = 1;
private static DataModel datamodel;
@BeforeAll
static void initAll() {
datamodel = new DataModel(new MockDataSet());
}
@Test
void bemfTest() {
BeMF bemf = new BeMF(datamodel, numFactors, numIters, learningRate, regularization, ratings, seed);
bemf.fit();
assertEquals(3.0, bemf.predict(testUserId, testItemId));
assertEquals(0.22521728438775332, bemf.predictProba(testUserId, testItemId));
assertEquals(numFactors, bemf.getNumFactors());
assertEquals(numIters, bemf.getNumIters());
assertEquals(learningRate, bemf.getLearningRate());
assertEquals(regularization, bemf.getRegularization());
assertEquals(ratings, bemf.getRatings());
}
}
|
import React, {Component} from "react";
import {DateTime} from "luxon";
export default class DateHelper extends Component {
static getDateDifferenceInMinutes(start,end){
if(start != null && end != null){
let diffMilliseconds = end-start;
let diffSeconds = diffMilliseconds/1000;
let diffMinutes = diffSeconds/60;
diffMinutes = Math.round(diffMinutes); //sometimes this results... 119.9933, we round up or down
return diffMinutes;
} else {
return null;
}
}
static formatDateToGermanString(dateString){
let javaDate = new Date(dateString);
let date = DateTime.fromJSDate(javaDate,{ zone: "Europe/Berlin" });
let format = {month: 'long', day: 'numeric', year: 'numeric'};
return date.setLocale('de').toLocaleString(format);
}
static formatDateToHHMMString(dateString){
let javaDate = new Date(dateString);
let date = DateTime.fromJSDate(javaDate,{ zone: "Europe/Berlin" });
console.log(date);
console.log(date.toISO());
//HH is 24 clock; mm is Minutes; MM is Month !
let formatedDate = date.toFormat('HH:mm');
console.log(formatedDate);
return formatedDate;
}
static getWeekdaynameByNumber(weekdayNumber){
//weekdaynumber sunday = 0
weekdayNumber = weekdayNumber%7+1; //sunday will now be 1
let februar = 1;
//1970-feb-1 was a sunday
let date = new Date(1970, februar, weekdayNumber);
return DateHelper.getWeekdaynameByDate(date);
}
static getWeekdaynameByDate(date){
return date.toLocaleDateString("default", { weekday: 'long' });
}
static getMonthnameByNumber(monthNumber) {
let date = new Date(1970, monthNumber, 1);
return DateHelper.getMonthnameByDate(date);
}
static getMonthnameByDate(date) {
let monthName = date.toLocaleString("default", { month: "long" });
return monthName;
}
}
|
<reponame>MccreeFei/jframe<gh_stars>10-100
package jframe.elasticsearch.service;
/**
* @author dzh
* @date May 17, 2018 1:40:04 PM
* @version 0.0.1
*/
public class TestTransportClient {
}
|
def spellchecker(paragraph):
# Split the paragraph into words
words = paragraph.split()
# Comparing each word with our dictionary
for word in words:
if word not in dictionary:
print("Spelling mistake:", word)
# Call the function
spellchecker("The quick brpwn fox jumps over the lzy dog")
# Output
# Spelling mistake: brpwn
# Spelling mistake: lzy
|
rm *.pdf
rm *.gz
rm *.bcf
rm *.blg
rm *.toc
rm *.xml
rm *.out
rm *.log
rm *.bbl
rm *.aux
|
/*
* Copyright (c) 2015, EURECOM (www.eurecom.fr)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are those
* of the authors and should not be interpreted as representing official policies,
* either expressed or implied, of the FreeBSD Project.
*/
#ifndef FILE_3GPP_24_301_SEEN
#define FILE_3GPP_24_301_SEEN
#include <stdint.h>
#include <stdbool.h>
//==============================================================================
// 9 General message format and information elements coding
//==============================================================================
//------------------------------------------------------------------------------
// 9.2 Protocol discriminator
//------------------------------------------------------------------------------
// 9.3.1 Security header type
#define SECURITY_HEADER_TYPE_NOT_PROTECTED 0b0000
#define SECURITY_HEADER_TYPE_INTEGRITY_PROTECTED 0b0001
#define SECURITY_HEADER_TYPE_INTEGRITY_PROTECTED_CYPHERED 0b0010
#define SECURITY_HEADER_TYPE_INTEGRITY_PROTECTED_NEW 0b0011
#define SECURITY_HEADER_TYPE_INTEGRITY_PROTECTED_CYPHERED_NEW 0b0100
#define SECURITY_HEADER_TYPE_SERVICE_REQUEST 0b1100
#define SECURITY_HEADER_TYPE_RESERVED1 0b1101
#define SECURITY_HEADER_TYPE_RESERVED2 0b1110
#define SECURITY_HEADER_TYPE_RESERVED3 0b1111
// 9.3.2 EPS bearer identity
// see 24.007
//------------------------------------------------------------------------------
// 9.8 Message type
//------------------------------------------------------------------------------
// Table 9.8.1: Message types for EPS mobility management
/* Message identifiers for EPS Mobility Management */
# define ATTACH_REQUEST 0b01000001 /* 65 = 0x41 */
# define ATTACH_ACCEPT 0b01000010 /* 66 = 0x42 */
# define ATTACH_COMPLETE 0b01000011 /* 67 = 0x43 */
# define ATTACH_REJECT 0b01000100 /* 68 = 0x44 */
# define DETACH_REQUEST 0b01000101 /* 69 = 0x45 */
# define DETACH_ACCEPT 0b01000110 /* 70 = 0x46 */
# define TRACKING_AREA_UPDATE_REQUEST 0b01001000 /* 72 = 0x48 */
# define TRACKING_AREA_UPDATE_ACCEPT 0b01001001 /* 73 = 0x49 */
# define TRACKING_AREA_UPDATE_COMPLETE 0b01001010 /* 74 = 0x4a */
# define TRACKING_AREA_UPDATE_REJECT 0b01001011 /* 75 = 0x4b */
# define EXTENDED_SERVICE_REQUEST 0b01001100 /* 76 = 0x4c */
# define SERVICE_REJECT 0b01001110 /* 78 = 0x4e */
# define GUTI_REALLOCATION_COMMAND 0b01010000 /* 80 = 0x50 */
# define GUTI_REALLOCATION_COMPLETE 0b01010001 /* 81 = 0x51 */
# define AUTHENTICATION_REQUEST 0b01010010 /* 82 = 0x52 */
# define AUTHENTICATION_RESPONSE 0b01010011 /* 83 = 0x53 */
# define AUTHENTICATION_REJECT 0b01010100 /* 84 = 0x54 */
# define AUTHENTICATION_FAILURE 0b01011100 /* 92 = 0x5c */
# define IDENTITY_REQUEST 0b01010101 /* 85 = 0x55 */
# define IDENTITY_RESPONSE 0b01010110 /* 86 = 0x56 */
# define SECURITY_MODE_COMMAND 0b01011101 /* 93 = 0x5d */
# define SECURITY_MODE_COMPLETE 0b01011110 /* 94 = 0x5e */
# define SECURITY_MODE_REJECT 0b01011111 /* 95 = 0x5f */
# define EMM_STATUS 0b01100000 /* 96 = 0x60 */
# define EMM_INFORMATION 0b01100001 /* 97 = 0x61 */
# define DOWNLINK_NAS_TRANSPORT 0b01100010 /* 98 = 0x62 */
# define UPLINK_NAS_TRANSPORT 0b01100011 /* 99 = 0x63 */
# define CS_SERVICE_NOTIFICATION 0b01100100 /* 100 = 0x64 */
# define DOWNLINK_GENERIC_NAS_TRANSPORT 0b01101000 /* 104 = 0x68 */
# define UPLINK_GENERIC_NAS_TRANSPORT 0b01101001 /* 101 = 0x69 */
// Table 9.8.2: Message types for EPS session management
# define ACTIVATE_DEFAULT_EPS_BEARER_CONTEXT_REQUEST 0b11000001 /* 193 = 0xc1 */
# define ACTIVATE_DEFAULT_EPS_BEARER_CONTEXT_ACCEPT 0b11000010 /* 194 = 0xc2 */
# define ACTIVATE_DEFAULT_EPS_BEARER_CONTEXT_REJECT 0b11000011 /* 195 = 0xc3 */
# define ACTIVATE_DEDICATED_EPS_BEARER_CONTEXT_REQUEST 0b11000101 /* 197 = 0xc5 */
# define ACTIVATE_DEDICATED_EPS_BEARER_CONTEXT_ACCEPT 0b11000110 /* 198 = 0xc6 */
# define ACTIVATE_DEDICATED_EPS_BEARER_CONTEXT_REJECT 0b11000111 /* 199 = 0xc7 */
# define MODIFY_EPS_BEARER_CONTEXT_REQUEST 0b11001001 /* 201 = 0xc9 */
# define MODIFY_EPS_BEARER_CONTEXT_ACCEPT 0b11001010 /* 202 = 0xca */
# define MODIFY_EPS_BEARER_CONTEXT_REJECT 0b11001011 /* 203 = 0xcb */
# define DEACTIVATE_EPS_BEARER_CONTEXT_REQUEST 0b11001101 /* 205 = 0xcd */
# define DEACTIVATE_EPS_BEARER_CONTEXT_ACCEPT 0b11001110 /* 206 = 0xce */
# define PDN_CONNECTIVITY_REQUEST 0b11010000 /* 208 = 0xd0 */
# define PDN_CONNECTIVITY_REJECT 0b11010001 /* 209 = 0xd1 */
# define PDN_DISCONNECT_REQUEST 0b11010010 /* 210 = 0xd2 */
# define PDN_DISCONNECT_REJECT 0b11010011 /* 211 = 0xd3 */
# define BEARER_RESOURCE_ALLOCATION_REQUEST 0b11010100 /* 212 = 0xd4 */
# define BEARER_RESOURCE_ALLOCATION_REJECT 0b11010101 /* 213 = 0xd5 */
# define BEARER_RESOURCE_MODIFICATION_REQUEST 0b11010110 /* 214 = 0xd6 */
# define BEARER_RESOURCE_MODIFICATION_REJECT 0b11010111 /* 215 = 0xd7 */
# define ESM_INFORMATION_REQUEST 0b11011001 /* 217 = 0xd9 */
# define ESM_INFORMATION_RESPONSE 0b11011010 /* 218 = 0xda */
# define ESM_STATUS 0b11101000 /* 232 = 0xe8 */
//------------------------------------------------------------------------------
// 9.9 OTHER INFORMATION ELEMENTS
//------------------------------------------------------------------------------
//..............................................................................
//9.9.3 EPS Mobility Management (EMM) information elements
//..............................................................................
//192.168.3.11 UE network capability
#define UE_NETWORK_CAPABILITY_MINIMUM_LENGTH 4
#define UE_NETWORK_CAPABILITY_MAXIMUM_LENGTH 15
typedef struct ue_network_capability_s {
/* EPS encryption algorithms supported (octet 3) */
#define UE_NETWORK_CAPABILITY_EEA0 0b10000000
#define UE_NETWORK_CAPABILITY_EEA1 0b01000000
#define UE_NETWORK_CAPABILITY_EEA2 0b00100000
#define UE_NETWORK_CAPABILITY_EEA3 0b00010000
#define UE_NETWORK_CAPABILITY_EEA4 0b00001000
#define UE_NETWORK_CAPABILITY_EEA5 0b00000100
#define UE_NETWORK_CAPABILITY_EEA6 0b00000010
#define UE_NETWORK_CAPABILITY_EEA7 0b00000001
uint8_t eea;
/* EPS integrity algorithms supported (octet 4) */
#define UE_NETWORK_CAPABILITY_EIA0 0b10000000
#define UE_NETWORK_CAPABILITY_EIA1 0b01000000
#define UE_NETWORK_CAPABILITY_EIA2 0b00100000
#define UE_NETWORK_CAPABILITY_EIA3 0b00010000
#define UE_NETWORK_CAPABILITY_EIA4 0b00001000
#define UE_NETWORK_CAPABILITY_EIA5 0b00000100
#define UE_NETWORK_CAPABILITY_EIA6 0b00000010
#define UE_NETWORK_CAPABILITY_EIA7 0b00000001
uint8_t eia;
/* UMTS encryption algorithms supported (octet 5) */
#define UE_NETWORK_CAPABILITY_UEA0 0b10000000
#define UE_NETWORK_CAPABILITY_UEA1 0b01000000
#define UE_NETWORK_CAPABILITY_UEA2 0b00100000
#define UE_NETWORK_CAPABILITY_UEA3 0b00010000
#define UE_NETWORK_CAPABILITY_UEA4 0b00001000
#define UE_NETWORK_CAPABILITY_UEA5 0b00000100
#define UE_NETWORK_CAPABILITY_UEA6 0b00000010
#define UE_NETWORK_CAPABILITY_UEA7 0b00000001
uint8_t uea;
/* UCS2 support (octet 6, bit 8) */
#define UE_NETWORK_CAPABILITY_DEFAULT_ALPHABET 0
#define UE_NETWORK_CAPABILITY_UCS2_ALPHABET 1
uint8_t ucs2:1;
/* UMTS integrity algorithms supported (octet 6) */
#define UE_NETWORK_CAPABILITY_UIA1 0b01000000
#define UE_NETWORK_CAPABILITY_UIA2 0b00100000
#define UE_NETWORK_CAPABILITY_UIA3 0b00010000
#define UE_NETWORK_CAPABILITY_UIA4 0b00001000
#define UE_NETWORK_CAPABILITY_UIA5 0b00000100
#define UE_NETWORK_CAPABILITY_UIA6 0b00000010
#define UE_NETWORK_CAPABILITY_UIA7 0b00000001
uint8_t uia:7;
/* Bits 8 to 6 of octet 7 are spare and shall be coded as zero */
uint8_t spare:3;
/* eNodeB-based access class control for CSFB capability */
#define UE_NETWORK_CAPABILITY_CSFB 1
uint8_t csfb:1;
/* LTE Positioning Protocol capability */
#define UE_NETWORK_CAPABILITY_LPP 1
uint8_t lpp:1;
/* Location services notification mechanisms capability */
#define UE_NETWORK_CAPABILITY_LCS 1
uint8_t lcs:1;
/* 1xSRVCC capability */
#define UE_NETWORK_CAPABILITY_SRVCC 1
uint8_t srvcc:1;
/* NF notification procedure capability */
#define UE_NETWORK_CAPABILITY_NF 1
uint8_t nf:1;
//uint8_t spare[0..8]
bool umts_present;
bool misc_present;
} ue_network_capability_t;
//9.9.3.36 UE security capability
#define UE_SECURITY_CAPABILITY_MINIMUM_LENGTH 4
#define UE_SECURITY_CAPABILITY_MAXIMUM_LENGTH 7
typedef struct ue_security_capability_s {
/* EPS encryption algorithms supported (octet 3) */
#define UE_SECURITY_CAPABILITY_EEA0 0b10000000
#define UE_SECURITY_CAPABILITY_EEA1 0b01000000
#define UE_SECURITY_CAPABILITY_EEA2 0b00100000
#define UE_SECURITY_CAPABILITY_EEA3 0b00010000
#define UE_SECURITY_CAPABILITY_EEA4 0b00001000
#define UE_SECURITY_CAPABILITY_EEA5 0b00000100
#define UE_SECURITY_CAPABILITY_EEA6 0b00000010
#define UE_SECURITY_CAPABILITY_EEA7 0b00000001
uint8_t eea;
/* EPS integrity algorithms supported (octet 4) */
#define UE_SECURITY_CAPABILITY_EIA0 0b10000000
#define UE_SECURITY_CAPABILITY_EIA1 0b01000000
#define UE_SECURITY_CAPABILITY_EIA2 0b00100000
#define UE_SECURITY_CAPABILITY_EIA3 0b00010000
#define UE_SECURITY_CAPABILITY_EIA4 0b00001000
#define UE_SECURITY_CAPABILITY_EIA5 0b00000100
#define UE_SECURITY_CAPABILITY_EIA6 0b00000010
#define UE_SECURITY_CAPABILITY_EIA7 0b00000001
uint8_t eia;
bool umts_present;
bool gprs_present;
/* UMTS encryption algorithms supported (octet 5) */
#define UE_SECURITY_CAPABILITY_UEA0 0b10000000
#define UE_SECURITY_CAPABILITY_UEA1 0b01000000
#define UE_SECURITY_CAPABILITY_UEA2 0b00100000
#define UE_SECURITY_CAPABILITY_UEA3 0b00010000
#define UE_SECURITY_CAPABILITY_UEA4 0b00001000
#define UE_SECURITY_CAPABILITY_UEA5 0b00000100
#define UE_SECURITY_CAPABILITY_UEA6 0b00000010
#define UE_SECURITY_CAPABILITY_UEA7 0b00000001
uint8_t uea;
/* UMTS integrity algorithms supported (octet 6) */
#define UE_SECURITY_CAPABILITY_UIA1 0b01000000
#define UE_SECURITY_CAPABILITY_UIA2 0b00100000
#define UE_SECURITY_CAPABILITY_UIA3 0b00010000
#define UE_SECURITY_CAPABILITY_UIA4 0b00001000
#define UE_SECURITY_CAPABILITY_UIA5 0b00000100
#define UE_SECURITY_CAPABILITY_UIA6 0b00000010
#define UE_SECURITY_CAPABILITY_UIA7 0b00000001
uint8_t uia:7;
/* GPRS encryption algorithms supported (octet 7) */
#define UE_SECURITY_CAPABILITY_GEA1 0b01000000
#define UE_SECURITY_CAPABILITY_GEA2 0b00100000
#define UE_SECURITY_CAPABILITY_GEA3 0b00010000
#define UE_SECURITY_CAPABILITY_GEA4 0b00001000
#define UE_SECURITY_CAPABILITY_GEA5 0b00000100
#define UE_SECURITY_CAPABILITY_GEA6 0b00000010
#define UE_SECURITY_CAPABILITY_GEA7 0b00000001
uint8_t gea:7;
} ue_security_capability_t;
//------------------------------------------------------------------------------
// 10.2 Timers of EPS mobility management
//------------------------------------------------------------------------------
//..............................................................................
// Table 10.2.1: EPS mobility management timers – UE side
//..............................................................................
#define T3402_DEFAULT_VALUE 720
#define T3410_DEFAULT_VALUE 15
#define T3411_DEFAULT_VALUE 10
#define T3412_DEFAULT_VALUE 3240
#define T3416_DEFAULT_VALUE 30
#define T3417_DEFAULT_VALUE 5
#define T3417_EXT_DEFAULT_VALUE 10
#define T3420_DEFAULT_VALUE 15
#define T3421_DEFAULT_VALUE 15
#define T3423_DEFAULT_VALUE 0 // value provided by network
#define T3440_DEFAULT_VALUE 10
#define T3442_DEFAULT_VALUE 0 // value provided by network
//..............................................................................
// Table 10.2.2: EPS mobility management timers – network side
//..............................................................................
#define T3413_DEFAULT_VALUE 400 /* Network dependent */
#define T3422_DEFAULT_VALUE 6
#define T3450_DEFAULT_VALUE 6
#define T3460_DEFAULT_VALUE 6
#define T3470_DEFAULT_VALUE 6
//------------------------------------------------------------------------------
// 10.3 Timers of EPS session management
//------------------------------------------------------------------------------
//..............................................................................
// Table 10.3.1: EPS session management timers – UE side
//..............................................................................
#define T3480_DEFAULT_VALUE 8
#define T3481_DEFAULT_VALUE 8
#define T3482_DEFAULT_VALUE 8
#define T3492_DEFAULT_VALUE 6
//..............................................................................
// Table 10.3.2: EPS session management timers – network side
//..............................................................................
#define T3485_DEFAULT_VALUE 8
#define T3486_DEFAULT_VALUE 8
#define T3489_DEFAULT_VALUE 4
#define T3495_DEFAULT_VALUE 8
//==============================================================================
// Annex A (informative): Cause values for EPS mobility management
//==============================================================================
//------------------------------------------------------------------------------
// A.1 Causes related to UE identification
//------------------------------------------------------------------------------
#define EMM_CAUSE_IMSI_UNKNOWN_IN_HSS 2
#define EMM_CAUSE_ILLEGAL_UE 3
#define EMM_CAUSE_ILLEGAL_ME 6
#define EMM_CAUSE_UE_IDENTITY_CANT_BE_DERIVED_BY_NW 9
#define EMM_CAUSE_IMPLICITLY_DETACHED 10
//------------------------------------------------------------------------------
// A.2 Cause related to subscription options
//------------------------------------------------------------------------------
#define EMM_CAUSE_IMEI_NOT_ACCEPTED 5
#define EMM_CAUSE_EPS_NOT_ALLOWED 7
#define EMM_CAUSE_BOTH_NOT_ALLOWED 8
#define EMM_CAUSE_PLMN_NOT_ALLOWED 11
#define EMM_CAUSE_TA_NOT_ALLOWED 12
#define EMM_CAUSE_ROAMING_NOT_ALLOWED 13
#define EMM_CAUSE_EPS_NOT_ALLOWED_IN_PLMN 14
#define EMM_CAUSE_NO_SUITABLE_CELLS 15
#define EMM_CAUSE_CSG_NOT_AUTHORIZED 25
#define EMM_CAUSE_NOT_AUTHORIZED_IN_PLMN 35
#define EMM_CAUSE_NO_EPS_BEARER_CTX_ACTIVE 40
//------------------------------------------------------------------------------
// A.3 Causes related to PLMN specific network failures and congestion/authentication failures
//------------------------------------------------------------------------------
#define EMM_CAUSE_MSC_NOT_REACHABLE 16
#define EMM_CAUSE_NETWORK_FAILURE 17
#define EMM_CAUSE_CS_DOMAIN_NOT_AVAILABLE 18
#define EMM_CAUSE_ESM_FAILURE 19
#define EMM_CAUSE_MAC_FAILURE 20
#define EMM_CAUSE_SYNCH_FAILURE 21
#define EMM_CAUSE_CONGESTION 22
#define EMM_CAUSE_UE_SECURITY_MISMATCH 23
#define EMM_CAUSE_SECURITY_MODE_REJECTED 24
#define EMM_CAUSE_NON_EPS_AUTH_UNACCEPTABLE 26
#define EMM_CAUSE_CS_SERVICE_NOT_AVAILABLE 39
//------------------------------------------------------------------------------
// A.4 Causes related to nature of request
//------------------------------------------------------------------------------
// NOTE: This subclause has no entries in this version of the specification
//------------------------------------------------------------------------------
// A.5 Causes related to invalid messages
//------------------------------------------------------------------------------
#define EMM_CAUSE_SEMANTICALLY_INCORRECT 95
#define EMM_CAUSE_INVALID_MANDATORY_INFO 96
#define EMM_CAUSE_MESSAGE_TYPE_NOT_IMPLEMENTED 97
#define EMM_CAUSE_MESSAGE_TYPE_NOT_COMPATIBLE 98
#define EMM_CAUSE_IE_NOT_IMPLEMENTED 99
#define EMM_CAUSE_CONDITIONAL_IE_ERROR 100
#define EMM_CAUSE_MESSAGE_NOT_COMPATIBLE 101
#define EMM_CAUSE_PROTOCOL_ERROR 111
//==============================================================================
// Annex B (informative): Cause values for EPS session management
//==============================================================================
//------------------------------------------------------------------------------
// B.1 Causes related to nature of request
//------------------------------------------------------------------------------
#define ESM_CAUSE_OPERATOR_DETERMINED_BARRING 8
#define ESM_CAUSE_INSUFFICIENT_RESOURCES 26
#define ESM_CAUSE_UNKNOWN_ACCESS_POINT_NAME 27
#define ESM_CAUSE_UNKNOWN_PDN_TYPE 28
#define ESM_CAUSE_USER_AUTHENTICATION_FAILED 29
#define ESM_CAUSE_REQUEST_REJECTED_BY_GW 30
#define ESM_CAUSE_REQUEST_REJECTED_UNSPECIFIED 31
#define ESM_CAUSE_SERVICE_OPTION_NOT_SUPPORTED 32
#define ESM_CAUSE_REQUESTED_SERVICE_OPTION_NOT_SUBSCRIBED 33
#define ESM_CAUSE_SERVICE_OPTION_TEMPORARILY_OUT_OF_ORDER 34
#define ESM_CAUSE_PTI_ALREADY_IN_USE 35
#define ESM_CAUSE_REGULAR_DEACTIVATION 36
#define ESM_CAUSE_EPS_QOS_NOT_ACCEPTED 37
#define ESM_CAUSE_NETWORK_FAILURE 38
#define ESM_CAUSE_REACTIVATION_REQUESTED 39
#define ESM_CAUSE_SEMANTIC_ERROR_IN_THE_TFT_OPERATION 41
#define ESM_CAUSE_SYNTACTICAL_ERROR_IN_THE_TFT_OPERATION 42
#define ESM_CAUSE_INVALID_EPS_BEARER_IDENTITY 43
#define ESM_CAUSE_SEMANTIC_ERRORS_IN_PACKET_FILTER 44
#define ESM_CAUSE_SYNTACTICAL_ERROR_IN_PACKET_FILTER 45
#define ESM_CAUSE_PTI_MISMATCH 47
#define ESM_CAUSE_LAST_PDN_DISCONNECTION_NOT_ALLOWED 49
#define ESM_CAUSE_PDN_TYPE_IPV4_ONLY_ALLOWED 50
#define ESM_CAUSE_PDN_TYPE_IPV6_ONLY_ALLOWED 51
#define ESM_CAUSE_SINGLE_ADDRESS_BEARERS_ONLY_ALLOWED 52
#define ESM_CAUSE_ESM_INFORMATION_NOT_RECEIVED 53
#define ESM_CAUSE_PDN_CONNECTION_DOES_NOT_EXIST 54
#define ESM_CAUSE_MULTIPLE_PDN_CONNECTIONS_NOT_ALLOWED 55
#define ESM_CAUSE_COLLISION_WITH_NETWORK_INITIATED_REQUEST 56
#define ESM_CAUSE_UNSUPPORTED_QCI_VALUE 59
#define ESM_CAUSE_BEARER_HANDLING_NOT_SUPPORTED 60
#define ESM_CAUSE_INVALID_PTI_VALUE 81
#define ESM_CAUSE_APN_RESTRICTION_VALUE_NOT_COMPATIBLE 112
//------------------------------------------------------------------------------
// B.2 Protocol errors (e.g., unknown message) class
//------------------------------------------------------------------------------
#define ESM_CAUSE_SEMANTICALLY_INCORRECT 95
#define ESM_CAUSE_INVALID_MANDATORY_INFO 96
#define ESM_CAUSE_MESSAGE_TYPE_NOT_IMPLEMENTED 97
#define ESM_CAUSE_MESSAGE_TYPE_NOT_COMPATIBLE 98
#define ESM_CAUSE_IE_NOT_IMPLEMENTED 99
#define ESM_CAUSE_CONDITIONAL_IE_ERROR 100
#define ESM_CAUSE_MESSAGE_NOT_COMPATIBLE 101
#define ESM_CAUSE_PROTOCOL_ERROR 111
#endif /* FILE_3GPP_24_301_SEEN */
|
#!/bin/bash
full_path="$PWD/$0"
prefix=$(dirname $full_path)
script_path="python $prefix/scripts/cron_script.py $prefix/db_chauffage.db >> $prefix/logs/cron.log"
(crontab -l 2>/dev/null; echo "*/5 * * * * $script_path ") | crontab -
|
#!/usr/bin/env zsh
# \
# \\,
# \\\,^,.,,. Zero to Hero
# ,;7~((\))`;;,, <zerotohero.dev>
# ,(@') ;)`))\;;', stay up to date, be curious: learn
# ) . ),(( ))\;,
# /;`,,/7),)) )) )\,,
# (& )` (,((,((;( ))\,
D="$FIZZ_HTML_GEN_ROOT/dist"
F="warm-up/020-valid-parentheses"
D="$D/$F"
mkdir -p "$D"
NAMES=("$F/doc.go" "$F/impl.go" "$F/main.go")
"$FIZZ_HTML_GEN_ROOT/hack/colorize/pygmentize.sh" "${NAMES[@]}"
|
import type { Address, AddressPOJO, ContactInfo, ContactInfoPOJO, Identifiers, IdentifiersPOJO, Note, NotePOJO, TimeRange, TimeRangePOJO, UUID } from "../../common";
import type { PickupCancellationReason } from "../enums";
import type { PickupService, PickupServiceIdentifierPOJO } from "./pickup-service";
import type { PickupShipment, PickupShipmentPOJO } from "./pickup-shipment";
/**
* Cancellation of a previously-scheduled package pickup
*/
export interface PickupCancellationPOJO {
/**
* The unique ID of this cancellation. This ID is used to correlate cancellations with outcomes.
*/
cancellationID: UUID;
/**
* The unique ID of the pickup to be cancelled
*/
id: string;
/**
* Your own identifiers for this pickup
*/
identifiers?: IdentifiersPOJO;
/**
* The requested pickup service
*/
pickupService: PickupServiceIdentifierPOJO;
/**
* The reason for the cancellation
*/
reason: PickupCancellationReason;
/**
* Human-readable information about why the customer is cancelling the pickup
*/
notes?: string | ReadonlyArray<string | NotePOJO>;
/**
* The address where the pickup was requested
*/
address: AddressPOJO;
/**
* The contact information of the person who scheduled/cancelled the pickup
*/
contact: ContactInfoPOJO;
/**
* A list of dates and times when the carrier intended to pickup
*/
timeWindows: ReadonlyArray<TimeRangePOJO>;
/**
* The shipments to be picked up
*/
shipments: ReadonlyArray<PickupShipmentPOJO>;
/**
* Arbitrary data about this pickup that was previously persisted by the ShipEngine Platform.
*/
metadata?: object;
}
/**
* Cancellation of a previously-scheduled package pickup
*/
export interface PickupCancellation {
/**
* The unique ID of this cancellation. This ID is used to correlate cancellations with outcomes.
*/
readonly cancellationID: UUID;
/**
* The unique ID of the pickup to be cancelled
*/
readonly id: string;
/**
* Your own identifiers for this pickup
*/
readonly identifiers: Identifiers;
/**
* The requested pickup service
*/
readonly pickupService: PickupService;
/**
* The reason for the cancellation
*/
readonly reason: PickupCancellationReason;
/**
* Information about why the customer is cancelling the pickup
*/
readonly notes: ReadonlyArray<Note>;
/**
* The address where the pickup was requested
*/
readonly address: Address;
/**
* The contact information of the person who scheduled/cancelled the pickup
*/
readonly contact: ContactInfo;
/**
* A list of dates and times when the carrier intended to pickup
*/
readonly timeWindows: ReadonlyArray<TimeRange>;
/**
* The shipments to be picked up
*/
readonly shipments: ReadonlyArray<PickupShipment>;
/**
* Arbitrary data about this pickup that was previously persisted by the ShipEngine Platform.
*/
readonly metadata: object;
}
|
class SignalProperty:
def __init__(self, color):
self.color = color
class FluorescentConfig:
def __init__(self):
self.reference = "mRFP1"
self.signal_properties = {}
def set_reference(self, reference):
self.reference = reference
def add_signal_property(self, protein, color):
if protein in self.signal_properties:
self.signal_properties[protein].color = color
else:
self.signal_properties[protein] = SignalProperty(color)
|
import express from 'express';
import http from 'http';
import passport from 'passport';
import { PORT } from './config'
import db from './db';
import router from './routes';
import middlewares from './middlewares';
import passportConfig from './auth';
import socket from './socketIO';
const app = express();
const server = http.Server(app);
//Connect database
db(app);
//Use middleware
middlewares(app);
//Config passport
passportConfig(passport)
//Routes
app.get('/', (req, res) => {
res.send('Hello world !');
})
app.use(router);
//Config socket.io
socket(server);
//Run server
server.listen(PORT, () => {
console.log(`nore-server running on port ${server.address().port} ...`);
})
// Handling unhandled rejections
process.on('unhandledRejection', (reason, promise) => {
console.error('Unhandled Rejection at:', promise);
console.error('Reason:', reason);
});
|
#!/bin/bash -l
#$ -e /dev/null
#$ -o /dev/null
#$ -P tuberculosis
#$ -pe omp 16
#$ -t 1-18
module load sratoolkit/2.10.5
module load nextflow/19.10.0
INDEX=$(($SGE_TASK_ID-1))
INPUT=(SRR7992048 SRR7992049 SRR7992050 SRR7992051 SRR7992052 SRR7992053 SRR7992054 SRR7992055 SRR7992056 SRR7992057 SRR7992058 SRR7992059 SRR7992060 SRR7992061 SRR7992062 SRR7992063 SRR7992064 SRR7992065)
SRRID=${INPUT[$INDEX]}
SERIES_LIBRARY_LAYOUT=(SINGLE SINGLE SINGLE SINGLE SINGLE SINGLE SINGLE SINGLE SINGLE SINGLE SINGLE SINGLE SINGLE SINGLE SINGLE SINGLE SINGLE SINGLE)
SAMPLE_LIBRARY_LAYOUT=${SERIES_LIBRARY_LAYOUT[$INDEX]}
if [[ -e "$SRRID.tsv.gz" ]]; then
exit
fi
prefetch --output-directory "/scratch/$USER" $SRRID | grep "\S" &> "$SRRID.log"
fasterq-dump --outdir "/scratch/$USER/$SRRID" --temp "/scratch/$USER/$SRRID" --threads 16 "/scratch/$USER/$SRRID" &>> "$SRRID.log"
if [[ $SAMPLE_LIBRARY_LAYOUT == "PAIRED" ]]; then
nextflow run -profile singularity -revision 1.4.2 -work-dir "/scratch/$USER/$SRRID/work" nf-core/rnaseq --reads "/scratch/$USER/$SRRID/*{1,2}.fastq" --genome GRCh38 --skipBiotypeQC --outdir "/scratch/$USER/$SRRID/results" &>> "$SRRID.log"
fi
if [[ $SAMPLE_LIBRARY_LAYOUT == "SINGLE" ]]; then
nextflow run -profile singularity -revision 1.4.2 -work-dir "/scratch/$USER/$SRRID/work" nf-core/rnaseq --reads "/scratch/$USER/$SRRID/*.fastq" --singleEnd --genome GRCh38 --skipBiotypeQC --outdir "/scratch/$USER/$SRRID/results" &>> "$SRRID.log"
fi
if [[ -n $(grep -l "Succeeded : 13" "$SRRID.log") ]]; then
gzip --stdout "/scratch/$USER/$SRRID/results/featureCounts/merged_gene_counts.txt" > "$SRRID.tsv.gz"
fi
rm -rf "/scratch/$USER/$SRRID"
|
<reponame>rsuite/rsuite-icons<gh_stars>1-10
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import YoastSvg from '@rsuite/icon-font/lib/legacy/Yoast';
const Yoast = createSvgIcon({
as: YoastSvg,
ariaLabel: 'yoast',
category: 'legacy',
displayName: 'Yoast'
});
export default Yoast;
|
#!/bin/bash
#set -e
PRJ_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
UTILS_DIR="${PRJ_DIR}/utils"
TMP_FOLDER="${PRJ_DIR}/tmp"
TFS_PATH="${PRJ_DIR}/terraform/aws"
STATE_FILE_PATH="${TFS_PATH}/terraform.tfstate"
LOGS_FOLDER="${PRJ_DIR}/logs"
LOG_FILE_PATH="${LOGS_FOLDER}/start.log"
export EXAMPLE="streaming-pacman"
export TOPICS_TO_CREATE="USER_GAME USER_LOSSES"
function create_tfvars_file {
AWS_REGION=$(echo "$BOOTSTRAP_SERVERS" | awk -F'.' '{print $2}')
TFVAR_S3_BUCKET=""
if [ -z ${S3_BUCKET_NAME+x} ];
then
echo "S3_BUCKET_NAME is unset"
else
echo "S3_BUCKET_NAME is set to '$S3_BUCKET_NAME'"
TFVAR_S3_BUCKET="bucket_name=\"${S3_BUCKET_NAME}\""
fi
cd $PRJ_DIR
TERRAFORM_CONFIG="$TFS_PATH/configs.auto.tfvars"
echo -e "\n# Create a local configuration file $TERRAFORM_CONFIG with the terraform variables"
cat <<EOF > $TERRAFORM_CONFIG
bootstrap_server="$BOOTSTRAP_SERVERS"
cluster_api_key="$CLOUD_KEY"
cluster_api_secret="$CLOUD_SECRET"
ksql_endpoint="$KSQLDB_ENDPOINT"
ksql_basic_auth_user_info="$KSQLDB_BASIC_AUTH_USER_INFO"
aws_access_key="$AWS_ACCESS_KEY"
aws_secret_key="$AWS_SECRET_KEY"
aws_region="$AWS_REGION"
$TFVAR_S3_BUCKET
EOF
}
function create_infra_with_tf (){
DELTA_CONFIGS_DIR=delta_configs
source $DELTA_CONFIGS_DIR/env.delta
create_tfvars_file
cd $TFS_PATH
terraform init
terraform apply --auto-approve
}
function create_ccloud_resources {
if [ "$(ls -A $PRJ_DIR/stack-configs/ )" ]
then
echo "Files found"
ls -A $PRJ_DIR/stack-configs/
echo "There is already an existing Confluent stack, will not recreate"
return
fi
ccloud::validate_version_ccloud_cli 1.7.0 \
&& print_pass "ccloud version ok" \
|| exit 1
ccloud::validate_logged_in_ccloud_cli \
&& print_pass "logged into ccloud CLI" \
|| exit 1
check_jq \
&& print_pass "jq found" \
|| exit 1
echo
echo ====== Create new Confluent Cloud stack
ccloud::prompt_continue_ccloud_demo || exit 1
ccloud::create_ccloud_stack true
SERVICE_ACCOUNT_ID=$(ccloud kafka cluster list -o json | jq -r '.[0].name' | awk -F'-' '{print $4;}')
if [[ "$SERVICE_ACCOUNT_ID" == "" ]]; then
echo "ERROR: Could not determine SERVICE_ACCOUNT_ID from 'ccloud kafka cluster list'. Please troubleshoot, destroy stack, and try again to create the stack."
exit 1
fi
CONFIG_FILE=stack-configs/java-service-account-$SERVICE_ACCOUNT_ID.config
export CONFIG_FILE=$CONFIG_FILE
ccloud::validate_ccloud_config $CONFIG_FILE \
&& print_pass "$CONFIG_FILE ok" \
|| exit 1
echo ====== Generate CCloud configurations
ccloud::generate_configs $CONFIG_FILE
DELTA_CONFIGS_DIR=delta_configs
source $DELTA_CONFIGS_DIR/env.delta
printf "\n"
# Pre-flight check of Confluent Cloud credentials specified in $CONFIG_FILE
MAX_WAIT=720
echo "Waiting up to $MAX_WAIT seconds for Confluent Cloud ksqlDB cluster to be UP"
retry $MAX_WAIT ccloud::validate_ccloud_ksqldb_endpoint_ready $KSQLDB_ENDPOINT || exit 1
ccloud::validate_ccloud_stack_up $CLOUD_KEY $CONFIG_FILE || exit 1
# Set Kafka cluster
ccloud::set_kafka_cluster_use_from_api_key $CLOUD_KEY || exit 1
#################################################################
# Confluent Cloud ksqlDB application
#################################################################
./create_ksqldb_app.sh || exit 1
printf "\nDONE! Connect to your Confluent Cloud UI at https://confluent.cloud/\n"
echo
echo "Local client configuration file written to $CONFIG_FILE"
echo
echo "Cloud resources are provisioned and accruing charges. To destroy this demo and associated resources run ->"
echo " ./stop.sh $CONFIG_FILE"
echo
}
function start_demo {
# Source demo-specific configurations
source $PRJ_DIR/config/demo.cfg
source $UTILS_DIR/demo_helper.sh
create_ccloud_resources
create_infra_with_tf
}
mkdir $LOGS_FOLDER
start_demo 2>&1 | tee -a $LOG_FILE_PATH
|
/*
* OutOfScope.sql
* Chapter 7, Oracle10g PL/SQL Programming
* by <NAME>, <NAME>, and <NAME>
*
* This script demonstrates the scope of exceptions.
*/
BEGIN
DECLARE
e_UserDefinedException EXCEPTION;
BEGIN
RAISE e_UserDefinedException;
END;
EXCEPTION
/* e_UserDefinedException is out of scope here - can only be
handled by an OTHERS handler */
WHEN OTHERS THEN
/* Just re-raise the exception, which will be propagated to the
calling environment */
RAISE;
END;
/
CREATE OR REPLACE PACKAGE Globals AS
/* This package contains global declarations. Objects declared here will
be visible via qualified references for any other blocks or procedures.
Note that this package does not have a package body. */
/* A user-defined exception. */
e_UserDefinedException EXCEPTION;
END Globals;
/
BEGIN
BEGIN
RAISE Globals.e_UserDefinedException;
END;
EXCEPTION
/* Since e_UserDefinedException is still visible, we can handle it
explicitly */
WHEN Globals.e_UserDefinedException THEN
/* Just re-raise the exception, which will be propagated to the
calling environment */
RAISE;
END;
/
|
import { Controller, Get, Post, Req, Res } from '@nestjs/common';
import { AppService } from './app.service';
import {Request,Response} from 'express'
import { verify } from 'jsonwebtoken';
import { UserService } from './user/user.service';
import { createAccessToken, createRefreshToken } from './utils/auth';
@Controller()
export class AppController {
constructor(private readonly appService: AppService,private readonly userService:UserService) {}
@Get()
hello(){
return "hello"
}
@Post('/refresh-token')
async refreshToken(@Req()req:Request,@Res()res:Response){
res.header('Access-Control-Allow-Origin',"*")
const token=req.cookies.jid
if (!token){
res.send({success:false,accessToken:""})
}
let payload:any=null
try{
payload=verify(token,process.env.REFRESH_TOKEN_SECRET)
}catch(err){
console.log(err)
return res.send({success:false,accessToken:""})
}
const user= await this.userService.findById(payload.userId)
if (!user){
return res.send({success:false,accessToken:""})
}
res.cookie('jid',createRefreshToken(user))
return res.send({success:true,accessToken:createAccessToken(user)})
}
}
|
<reponame>Andy114514/bible-copypaste<filename>src/config/bible.js
const bible = [
{
id: "support",
title: "支援",
template:
"${支援者|桐生會長},請您支援 #${被支援者|桐生ココ} ,她僅僅因為說${被支援者的主张|台灣是個國家},就受${被支援者的反对者|中國五毛}機器人的攻擊以及人身威脅,和未婚夫${无关者|肖戰}的婚約也被迫取消,請一定聲援她!",
},
{
id: "nature",
title: "尿性",
template:
"依${反对者|五毛}的尿性,${声援者|蔡總統}的聲援反而會引來越多${反对者2|中國五毛}攻擊 #${被反对者|桐生ココ},因為他們會說那是 #${被反对者|桐生ココ} 支持台獨的證據,對待${反对者|五毛}最好的方法是視若無睹或直接封鎖他們的帳號。",
},
{
id: "too-hard",
title: "太难了",
template:
"當${支援者的职业|台灣的總統}太難了,要考量的事太多了,也許${支援者的团体|蔡政府}有受到甚麼我們不知道的壓力,需要她立即表態,畢竟這麼我第一次對她的決定覺得不妥,不過或許這是她能夠當${支援者的职业|台灣的總統},而我只能打個字發一發牢騷的原因。希望${支援者的称呼|蔡總統}發這個推文之前有和肖戰執政團隊溝通並取得他們的諒解。",
},
{
id: "promotion",
title: "引流",
template:
"有考虑来${竞争者|bilibili}吗,我们不希望你的才华被${目标平台|youtube}推荐机制埋没,我们这里有${领域|vtuber}专区,可以提供正常引流和推荐机制哦。只要承认一个中国中国人就会哗哗地打钱哦,而且台湾人也不介意",
},
{
id: "grand-picture",
title: "民心所向",
template: `\${团体|台灣}現在正迎接歷來最強大的時代、
\${团体|台灣}從來沒有這麼強盛過。
這一切歸功於\${团体|台灣}講求人權民主自由所投票選出來的\${目标团体|政黨}、不管他有多少\${目标团体成员|黨員}現在的執政者不負眾望、怎麼轉移話題都沒用這就是事實。
\${团体|台灣}正努力邁開腳步一步一步遠離\${离开的地点|地球}而去這才是最重要的民心所向`,
},
]
export default bible
|
<reponame>smagill/opensphere-desktop<filename>open-sphere-plugins/infinity/src/main/java/io/opensphere/infinity/json/BoundingBox.java<gh_stars>10-100
package io.opensphere.infinity.json;
import org.codehaus.jackson.annotate.JsonPropertyOrder;
import com.vividsolutions.jts.geom.Geometry;
import io.opensphere.core.model.GeographicBoundingBox;
import io.opensphere.core.model.LatLonAlt;
/** Elasticsearch bounding box JSON bean. */
@JsonPropertyOrder({ "bottom_right", "top_left" })
public class BoundingBox
{
/** The bottom right coordinate. */
private Coordinate myBottomRight;
/** The top left coordinate. */
private Coordinate myTopLeft;
/**
* Constructor.
*/
public BoundingBox()
{
}
/**
* Constructor from a JTS geometry.
*
* @param geometry the geometry
*/
public BoundingBox(Geometry geometry)
{
GeographicBoundingBox queryBbox = getMinimumBoundingBoxLLA(geometry.getCoordinates());
myBottomRight = new Coordinate(queryBbox.getLowerRight());
myBottomRight.setLon(adjustLon(myBottomRight.getLon()));
myTopLeft = new Coordinate(queryBbox.getUpperLeft());
}
/**
* Gets the bottomRight.
*
* @return the bottomRight
*/
public Coordinate getBottom_right()
{
return myBottomRight;
}
/**
* Sets the bottomRight.
*
* @param bottomRight the bottomRight
*/
public void setBottom_right(Coordinate bottomRight)
{
myBottomRight = bottomRight;
}
/**
* Gets the topLeft.
*
* @return the topLeft
*/
public Coordinate getTop_left()
{
return myTopLeft;
}
/**
* Sets the topLeft.
*
* @param topLeft the topLeft
*/
public void setTop_left(Coordinate topLeft)
{
myTopLeft = topLeft;
}
/**
* Get the smallest bounding box which contains all of the coordinates.
*
* @param coordinates The coordinates which must be contained in the box.
* @return The smallest bounding box which contains all of the coordinates.
*/
static GeographicBoundingBox getMinimumBoundingBoxLLA(com.vividsolutions.jts.geom.Coordinate[] coordinates)
{
double minLat = Double.MAX_VALUE;
double maxLat = -Double.MAX_VALUE;
double minLon = Double.MAX_VALUE;
double maxLon = -Double.MAX_VALUE;
for (com.vividsolutions.jts.geom.Coordinate coord : coordinates)
{
double latD = coord.y;
minLat = Math.min(minLat, latD);
maxLat = Math.max(maxLat, latD);
double lonD = coord.x;
minLon = Math.min(minLon, lonD);
maxLon = Math.max(maxLon, lonD);
}
return new GeographicBoundingBox(LatLonAlt.createFromDegrees(minLat, minLon),
LatLonAlt.createFromDegrees(maxLat, maxLon));
}
/**
* Adjusts the longitude if it's greater than 180.
*
* @param lon the longitude
* @return the adjusted longitude
*/
static double adjustLon(double lon)
{
return lon > 180 ? -(360 - lon) : lon;
}
}
|
<reponame>paullewallencom/java-978-1-7884-7523-5
package com.packt.designpatterns.bp.commandpattern;
public class Pizza {
private String name;
private int qty;
public Pizza(String name, int qty) {
this.name = name;
this.qty = qty;
}
public void makeOrder() {
System.out.println(qty + " " + name + " Pizzas Ordered");
}
public void cancelOrder() {
System.out.println(qty + " " + name + " Pizzas Cancelled");
}
}
|
package projectsol.worldsofsol.common.world.gen;
import net.minecraft.block.BlockState;
import net.minecraft.block.Blocks;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.biome.Biome;
import net.minecraft.world.chunk.Chunk;
import net.minecraft.world.gen.surfacebuilder.SurfaceBuilder;
import java.util.Random;
public class MoonSurfaceBuilder extends SurfaceBuilder<MoonSurfaceBuilderConfig> {
public MoonSurfaceBuilder() {
super(MoonSurfaceBuilderConfig.CODEC);
}
@Override
public void generate(Random random, Chunk chunk, Biome biome, int x, int z, int height, double noise, BlockState defaultBlock, BlockState defaultFluid, int seaLevel, int i, long l, MoonSurfaceBuilderConfig surfaceConfig) {
BlockState topState = surfaceConfig.getTopMaterial();
BlockState underState = surfaceConfig.getUnderMaterial();
BlockPos.Mutable mut = new BlockPos.Mutable();
int maxDepth = -1;
int depth = (int)(noise / 3.0D + 3.0D + random.nextDouble() * 0.25D);
int cX = x & 15;
int cZ = z & 15;
for(int m = height; m >= 0; --m) {
mut.set(cX, m, cZ);
BlockState blockState3 = chunk.getBlockState(mut);
if (blockState3.isAir()) {
maxDepth = -1;
} else if (blockState3.isOf(defaultBlock.getBlock())) {
if (maxDepth == -1) {
if (depth <= 0) {
topState = Blocks.AIR.getDefaultState();
underState = defaultBlock;
} else if (m >= seaLevel - 4 && m <= seaLevel + 1) {
topState = surfaceConfig.getTopMaterial();
underState = surfaceConfig.getUnderMaterial();
}
if (m < seaLevel && (topState == null || topState.isAir())) {
topState = defaultFluid;
mut.set(cX, m, cZ);
}
maxDepth = depth;
if (m >= seaLevel - 1) {
chunk.setBlockState(mut, topState, false);
} else if (m < seaLevel - 7 - depth) {
topState = Blocks.AIR.getDefaultState();
underState = defaultBlock;
chunk.setBlockState(mut, surfaceConfig.getUnderwaterMaterial(), false);
} else {
chunk.setBlockState(mut, underState, false);
}
} else if (maxDepth > 0) {
--maxDepth;
chunk.setBlockState(mut, underState, false);
}
}
}
}
}
|
namespace PadawanEquipment
{
class Program
{
static void Main(string[] args)
{
double amountOfMoney = double.Parse(Console.ReadLine());
int studentsCount = int.Parse(Console.ReadLine());
double priceOFLightsaber = double.Parse(Console.ReadLine());
double priceOFRobe = double.Parse(Console.ReadLine());
double priceOfBelt = double.Parse(Console.ReadLine());
double totalCost = (priceOFLightsaber + priceOFRobe + priceOfBelt) * studentsCount;
bool canAfford = amountOfMoney >= totalCost;
Console.WriteLine(canAfford);
}
}
}
|
#!/bin/bash
#
# Show update instruction for installed tools.
#
# VERSION :0.1.2
# DATE :2015-07-06
# AUTHOR :Viktor Szépe <viktor@szepe.net>
# LICENSE :The MIT License (MIT)
# URL :https://github.com/szepeviktor/debian-server-tools
# BASH-VERSION :4.2+
# DEPENDS :apt-get install colordiff
# Show also colorized diffs.
#
# ./update.sh -d
Get_meta() {
# defaults to self
local FILE="${1:-$0}"
# defaults to "VERSION"
local META="${2:-VERSION}"
local VALUE
VALUE="$(head -n 30 "$FILE" | grep -m 1 "^# ${META}\\s*:" | cut -d ":" -f 2-)"
if [ -z "$VALUE" ]; then
VALUE="(unknown)"
fi
echo "$VALUE"
}
PARAM="$1"
hash colordiff 2>/dev/null || unset PARAM
#Input_motd Get_meta input/update-motd.d - Get_meta /etc/update-motd.d/update-motd.d
D="$(dirname "$0")"
find . -type f -size -100k -not -name README.md -not -path "*/.git*" -printf '%P\n' \
| while read -r FILE; do
SCRIPT="$(Get_meta "$FILE" LOCATION)"
if [ -z "$SCRIPT" ] || [ "$SCRIPT" == "(unknown)" ] || [ ! -f "$SCRIPT" ]; then
continue
fi
OLD_VERSION="$(Get_meta "$SCRIPT")"
CURRENT_VERSION="$(Get_meta "$FILE")"
if [ "$OLD_VERSION" == "$CURRENT_VERSION" ]; then
continue
fi
echo "# Update ${FILE}: ${OLD_VERSION} -> ${CURRENT_VERSION}"
echo "${D}/install.sh ${FILE}"
if [ "$PARAM" == "-d" ]; then
colordiff -w -B "$SCRIPT" "$FILE"
fi
done
|
<!DOCTYPE html>
<html>
<head>
<title>Fibonacci Sequence</title>
</head>
<body>
<h1>Fibonacci Sequence Generator</h1>
<p>Enter a number to generate the Fibonacci sequence:</p>
<input id="num" type="number">
<button onclick="fibonacci()">Generate</button>
<p>Sequence:</p>
<p id="result"></p><br>
<script>
function fibonacci() {
let num = document.getElementById('num').value;
let result = [1];
if (num === 0) {
result = [];
} else {
for (let i = 0; i < num - 1; i++) {
result[i + 1] = result[i] + result[i - 1] || 1;
}
}
document.getElementById('result').innerHTML = result;
}
</script>
</body>
</html>
|
<reponame>mhs1314/allPay<filename>qht-modules/qht-api/src/main/java/com/qht/model/TeacherInfoModel.java
package com.qht.model;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.List;
public class TeacherInfoModel implements Serializable {
private String uid;
private BigDecimal integral;
private String nickname;
private String subject_name;
private String school_name;
private String back_img;
private List<CourseModel> course;
@Override
public String toString() {
return "TeacherInfoModel{" +
"uid='" + uid + '\'' +
", integral='" + integral + '\'' +
", nickname='" + nickname + '\'' +
", subject_name='" + subject_name + '\'' +
", school_name='" + school_name + '\'' +
", back_img='" + back_img + '\'' +
", course=" + course +
'}';
}
public String getUid() {
return uid;
}
public void setUid(String uid) {
this.uid = uid;
}
public BigDecimal getIntegral() {
return integral;
}
public void setIntegral(BigDecimal integral) {
this.integral = integral;
}
public String getNickname() {
return nickname;
}
public void setNickname(String nickname) {
this.nickname = nickname;
}
public String getSubject_name() {
return subject_name;
}
public void setSubject_name(String subject_name) {
this.subject_name = subject_name;
}
public String getSchool_name() {
return school_name;
}
public void setSchool_name(String school_name) {
this.school_name = school_name;
}
public String getBack_img() {
return back_img;
}
public void setBack_img(String back_img) {
this.back_img = back_img;
}
public List<CourseModel> getCourse() {
return course;
}
public void setCourse(List<CourseModel> course) {
this.course = course;
}
}
|
package main
import (
"flag"
_ "image/gif"
_ "image/jpeg"
_ "image/png"
"log"
"os"
"9fans.net/go/draw"
"github.com/mjl-/duit"
)
func check(err error, msg string) {
if err != nil {
log.Fatalf("%s: %s\n", msg, err)
}
}
func main() {
log.SetFlags(0)
flag.Usage = func() {
log.Println("duitimage path")
flag.PrintDefaults()
}
flag.Parse()
args := flag.Args()
if len(args) != 1 {
flag.Usage()
os.Exit(2)
}
dui, err := duit.NewDUI("ex/image", nil)
check(err, "new dui")
readImagePath := func(path string) *draw.Image {
img, err := duit.ReadImagePath(dui.Display, path)
check(err, "read image")
return img
}
dui.Top.UI = &duit.Image{
Image: readImagePath(args[0]),
}
dui.Render()
for {
select {
case e := <-dui.Inputs:
dui.Input(e)
case err, ok := <-dui.Error:
if !ok {
return
}
log.Printf("duit: %s\n", err)
}
}
}
|
g++ -shared -fPIC -Wl,-z,relro,-z,now -Ofast -std=gnu++17 -I ../base/ -I ../../include main.cpp -o ../../out/antixray.so
|
#!/bin/sh
set -e
# Install CMake 3.16+ from official Kitware repository (see https://apt.kitware.com/)
sudo apt-get update
sudo rm /usr/local/bin/ccmake* /usr/local/bin/cmake* /usr/local/bin/cpack* /usr/local/bin/ctest*
sudo apt-get install apt-transport-https ca-certificates gnupg software-properties-common wget
wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | sudo apt-key add -
sudo apt-add-repository 'deb https://apt.kitware.com/ubuntu/ focal main'
sudo apt-get update
sudo apt-get install cmake
|
<filename>tools/buildmessage.js<gh_stars>0
var Fiber = require('fibers');
var Future = require('fibers/future');
var _ = require('underscore');
var files = require('./files.js');
var parseStack = require('./parse-stack.js');
var fiberHelpers = require('./fiber-helpers.js');
var Progress = require('./progress.js').Progress;
var debugBuild = !!process.env.METEOR_DEBUG_BUILD;
// A job is something like "building package foo". It contains the set
// of messages generated by tha job. A given build run could contain
// several jobs. Each job has an (absolute) path associated with
// it. Filenames in messages within a job are to be interpreted
// relative to that path.
//
// Jobs are used both for error handling (via buildmessage.capture) and to set
// the progress bar title (via progress.js).
//
// Job titles should begin with a lower-case letter (unless they begin with a
// proper noun), so that they look correct in error messages which say "While
// jobbing the job". The first letter will be capitalized automatically for the
// progress bar.
var Job = function (options) {
var self = this;
self.messages = [];
// Should be something like "building package 'foo'"
// Should look good in "While $title:\n[messages]"
self.title = options.title;
self.rootPath = options.rootPath;
// Array of Job (jobs created inside this job)
self.children = [];
};
_.extend(Job.prototype, {
// options may include type ("error"), message, func, file, line,
// column, stack (in the format returned by parseStack.parse())
addMessage: function (options) {
var self = this;
self.messages.push(options);
},
hasMessages: function () {
var self = this;
return self.messages.length > 0;
},
hasMessageWithTag: function (tagName) {
var self = this;
return _.any(self.messages, function (message) {
return message.tags && _.has(message.tags, tagName);
});
},
// Returns a multi-line string suitable for displaying to the user
formatMessages: function (indent) {
var self = this;
var out = "";
var already = {};
indent = new Array((indent || 0) + 1).join(' ');
_.each(self.messages, function (message) {
var stack = message.stack || [];
var line = indent;
if (message.file) {
line+= message.file;
if (message.line) {
line += ":" + message.line;
if (message.column) {
// XXX maybe exclude unless specifically requested (eg,
// for an automated tool that's parsing our output?)
line += ":" + message.column;
}
}
line += ": ";
} else {
// not sure how to display messages without a filenanme.. try this?
line += "error: ";
}
// XXX line wrapping would be nice..
line += message.message;
if (message.func && stack.length <= 1) {
line += " (at " + message.func + ")";
}
line += "\n";
if (stack.length > 1) {
_.each(stack, function (frame) {
// If a nontrivial stack trace (more than just the file and line
// we already complained about), print it.
var where = "";
if (frame.file) {
where += frame.file;
if (frame.line) {
where += ":" + frame.line;
if (frame.column) {
where += ":" + frame.column;
}
}
}
if (! frame.func && ! where)
return; // that's a pretty lame stack frame
line += " at ";
if (frame.func)
line += frame.func + " (" + where + ")\n";
else
line += where + "\n";
});
line += "\n";
}
// Deduplicate messages (only when exact duplicates, including stack)
if (! (line in already)) {
out += line;
already[line] = true;
}
});
return out;
}
});
// A MessageSet contains a set of jobs, which in turn each contain a
// set of messages.
var MessageSet = function () {
var self = this;
self.jobs = [];
};
_.extend(MessageSet.prototype, {
formatMessages: function () {
var self = this;
var jobsWithMessages = _.filter(self.jobs, function (job) {
return job.hasMessages();
});
return _.map(jobsWithMessages, function (job) {
var out = '';
out += "While " + job.title + ":\n";
out += job.formatMessages(0);
return out;
}).join('\n'); // blank line between jobs
},
hasMessages: function () {
var self = this;
return _.any(self.jobs, function (job) {
return job.hasMessages();
});
},
hasMessageWithTag: function (tagName) {
var self = this;
return _.any(self.jobs, function (job) {
return job.hasMessageWithTag(tagName);
});
},
// Copy all of the messages in another MessageSet into this
// MessageSet. If the other MessageSet is subsequently mutated,
// results are undefined.
//
// XXX rather than this, the user should be able to create a
// MessageSet and pass it into capture(), and functions such as
// bundle() should take and mutate, rather than return, a
// MessageSet.
merge: function (messageSet) {
var self = this;
_.each(messageSet.jobs, function (j) {
self.jobs.push(j);
});
}
});
var spaces = function (n) {
return _.times(n, function() { return ' ' }).join('');
};
// XXX: This is now a little bit silly... ideas:
// Can we just have one hierarchical state?
// Can we combined job & messageSet
// Can we infer nesting level?
var currentMessageSet = new fiberHelpers.EnvironmentVariable;
var currentJob = new fiberHelpers.EnvironmentVariable;
var currentNestingLevel = new fiberHelpers.EnvironmentVariable(0);
var currentProgress = new fiberHelpers.EnvironmentVariable;
var rootProgress = new Progress();
var getRootProgress = function () {
return rootProgress;
};
var reportProgress = function (state) {
var progress = currentProgress.get();
if (progress) {
progress.reportProgress(state);
}
};
var reportProgressDone = function () {
var progress = currentProgress.get();
if (progress) {
progress.reportProgressDone();
}
};
var getCurrentProgressTracker = function () {
var progress = currentProgress.get();
return progress ? progress : rootProgress;
};
var addChildTracker = function (title) {
var options = {};
if (title !== undefined)
options.title = title;
return getCurrentProgressTracker().addChildTask(options);
};
// Create a new MessageSet, run `f` with that as the current
// MessageSet for the purpose of accumulating and recovering from
// errors (see error()), and then discard the return value of `f` and
// return the MessageSet.
//
// Note that you must also create a job (with enterJob) to actually
// begin capturing errors. Alternately you may pass `options`
// (otherwise optional) and a job will be created for you based on
// `options`.
var capture = function (options, f) {
var messageSet = new MessageSet;
var parentMessageSet = currentMessageSet.get();
var title;
if (typeof options === "object" && options.title)
title = options.title;
var progress = addChildTracker(title);
currentProgress.withValue(progress, function () {
currentMessageSet.withValue(messageSet, function () {
var job = null;
if (typeof options === "object") {
job = new Job(options);
messageSet.jobs.push(job);
} else {
f = options; // options not actually provided
}
currentJob.withValue(job, function () {
var nestingLevel = currentNestingLevel.get();
currentNestingLevel.withValue(nestingLevel + 1, function () {
var start;
if (debugBuild) {
start = Date.now();
console.log(spaces(nestingLevel * 2), "START CAPTURE", nestingLevel, options.title, "took " + (end - start));
}
try {
f();
} finally {
progress.reportProgressDone();
if (debugBuild) {
var end = Date.now();
console.log(spaces(nestingLevel * 2), "END CAPTURE", nestingLevel, options.title, "took " + (end - start));
}
}
});
});
});
});
return messageSet;
};
// Called from inside capture(), creates a new Job inside the current
// MessageSet and run `f` inside of it, so that any messages emitted
// by `f` are logged in the Job. Returns the return value of `f`. May
// be called recursively.
//
// Called not from inside capture(), does nothing (except call f).
//
// options:
// - title: a title for the job (required)
// - rootPath: the absolute path relative to which paths in messages
// in this job should be interpreted (omit if there is no way to map
// files that this job talks about back to files on disk)
var enterJob = function (options, f) {
if (typeof options === "function") {
f = options;
options = {};
}
if (typeof options === "string") {
options = {title: options};
}
var progress;
{
var progressOptions = {};
// XXX: Just pass all the options?
if (typeof options === "object") {
if (options.title) {
progressOptions.title = options.title;
}
if (options.forkJoin) {
progressOptions.forkJoin = options.forkJoin;
}
}
progress = getCurrentProgressTracker().addChildTask(progressOptions);
}
return currentProgress.withValue(progress, function () {
if (!currentMessageSet.get()) {
var nestingLevel = currentNestingLevel.get();
var start;
if (debugBuild) {
start = Date.now();
console.log(spaces(nestingLevel * 2), "START", nestingLevel, options.title);
}
try {
return currentNestingLevel.withValue(nestingLevel + 1, function () {
return f();
});
} finally {
progress.reportProgressDone();
if (debugBuild) {
var end = Date.now();
console.log(spaces(nestingLevel * 2), "DONE", nestingLevel, options.title, "took " + (end - start));
}
}
}
var job = new Job(options);
var originalJob = currentJob.get();
originalJob && originalJob.children.push(job);
currentMessageSet.get().jobs.push(job);
return currentJob.withValue(job, function () {
var nestingLevel = currentNestingLevel.get();
return currentNestingLevel.withValue(nestingLevel + 1, function () {
var start;
if (debugBuild) {
start = Date.now();
console.log(spaces(nestingLevel * 2), "START", nestingLevel, options.title);
}
try {
return f();
} finally {
progress.reportProgressDone();
if (debugBuild) {
var end = Date.now();
console.log(spaces(nestingLevel * 2), "DONE", nestingLevel, options.title, "took " + (end - start));
}
}
});
});
});
};
// If not inside a job, return false. Otherwise, return true if any
// messages (presumably errors) have been recorded for this job
// (including subjobs created inside this job), else false.
var jobHasMessages = function () {
var search = function (job) {
if (job.hasMessages())
return true;
return !! _.find(job.children, search);
};
return currentJob.get() ? search(currentJob.get()) : false;
};
// Given a function f, return a "marked" version of f. The mark
// indicates that stack traces should stop just above f. So if you
// mark a user-supplied callback function before calling it, you'll be
// able to show the user just the "user portion" of the stack trace
// (the part inside their own code, and not all of the innards of the
// code that called it).
var markBoundary = function (f) {
return parseStack.markBottom(f);
};
// Record a build error. If inside a job, add the error to the current
// job and return (caller should do its best to recover and
// continue). Otherwise, throws an exception based on the error.
//
// options may include
// - file: the file containing the error, relative to the root of the build
// (this must be agreed upon out of band)
// - line: the (1-indexed) line in the file that contains the error
// - column: the (1-indexed) column in that line where the error begins
// - func: the function containing the code that triggered the error
// - useMyCaller: true to capture information the caller (function
// name, file, and line). It captures not the information of the
// caller of error(), but that caller's caller. It saves them in
// 'file', 'line', and 'column' (overwriting any values passed in
// for those). It also captures the user portion of the stack,
// starting at and including the caller's caller.
// If this is a number instead of 'true', skips that many stack frames.
// - downcase: if true, the first character of `message` will be
// converted to lower case.
// - secondary: ignore this error if there are are already other
// errors in this job (the implication is that it's probably
// downstream of the other error, ie, a consequence of our attempt
// to continue past other errors)
// - tags: object with other error-specific data; there is a method
// on MessageSet which can search for errors with a specific named
// tag.
var error = function (message, options) {
options = options || {};
if (options.downcase)
message = message.slice(0,1).toLowerCase() + message.slice(1);
if (! currentJob.get())
throw new Error("Error: " + message);
if (options.secondary && jobHasMessages())
return; // skip it
var info = _.extend({
message: message
}, options);
if ('useMyCaller' in info) {
if (info.useMyCaller) {
info.stack = parseStack.parse(new Error()).slice(2);
if (typeof info.useMyCaller === 'number') {
info.stack = info.stack.slice(info.useMyCaller);
}
var caller = info.stack[0];
info.func = caller.func;
info.file = caller.file;
info.line = caller.line;
info.column = caller.column;
}
delete info.useMyCaller;
}
currentJob.get().addMessage(info);
};
// Record an exception. The message as well as any file and line
// information be read directly out of the exception. If not in a job,
// throws the exception instead. Also capture the user portion of the stack.
//
// There is special handling for files.FancySyntaxError exceptions. We
// will grab the file and location information where the syntax error
// actually occurred, rather than the place where the exception was
// thrown.
var exception = function (error) {
if (! currentJob.get()) {
// XXX this may be the wrong place to do this, but it makes syntax errors in
// files loaded via isopack.load have context.
if (error instanceof files.FancySyntaxError) {
error = new Error("Syntax error: " + error.message + " at " +
error.file + ":" + error.line + ":" + error.column);
}
throw error;
}
var message = error.message;
if (error instanceof files.FancySyntaxError) {
// No stack, because FancySyntaxError isn't a real Error and has no stack
// property!
currentJob.get().addMessage({
message: message,
file: error.file,
line: error.line,
column: error.column
});
} else {
var stack = parseStack.parse(error);
var locus = stack[0];
currentJob.get().addMessage({
message: message,
stack: stack,
func: locus.func,
file: locus.file,
line: locus.line,
column: locus.column
});
}
};
var assertInJob = function () {
if (! currentJob.get())
throw new Error("Expected to be in a buildmessage job");
};
var assertInCapture = function () {
if (! currentMessageSet.get())
throw new Error("Expected to be in a buildmessage capture");
};
var mergeMessagesIntoCurrentJob = function (innerMessages) {
var outerMessages = currentMessageSet.get();
if (! outerMessages)
throw new Error("Expected to be in a buildmessage capture");
var outerJob = currentJob.get();
if (! outerJob)
throw new Error("Expected to be in a buildmessage job");
_.each(innerMessages.jobs, function (j) {
outerJob.children.push(j);
});
outerMessages.merge(innerMessages);
};
// Like _.each, but runs each operation in a separate job
var forkJoin = function (options, iterable, fn) {
if (!_.isFunction(fn)) {
fn = iterable;
iterable = options;
options = {};
}
var futures = [];
var results = [];
// XXX: We could check whether the sub-jobs set estimates, and if not
// assume they each take the same amount of time and auto-report their completion
var errors = [];
var firstError = null;
options.forkJoin = true;
enterJob(options, function () {
var parallel = (options.parallel !== undefined) ? options.parallel : true;
if (parallel) {
var runOne = fiberHelpers.bindEnvironment(function (fut, fnArguments) {
try {
var result = enterJob({title: (options.title || '') + ' child'}, function () {
return fn.apply(null, fnArguments);
});
fut['return'](result);
} catch (e) {
fut['throw'](e);
}
});
_.each(iterable, function (...args) {
var fut = new Future();
Fiber(function () {
runOne(fut, args);
}).run();
futures.push(fut);
});
_.each(futures, function (future) {
try {
var result = future.wait();
results.push(result);
errors.push(null);
} catch (e) {
results.push(null);
errors.push(e);
if (firstError === null) {
firstError = e;
}
}
});
} else {
// not parallel
_.each(iterable, function (...args) {
try {
var result = fn(...args);
results.push(result);
errors.push(null);
} catch (e) {
results.push(null);
errors.push(e);
if (firstError === null) {
firstError = e;
}
}
});
}
});
if (firstError) {
throw firstError;
}
return results;
};
var buildmessage = exports;
_.extend(exports, {
capture: capture,
enterJob: enterJob,
markBoundary: markBoundary,
error: error,
exception: exception,
jobHasMessages: jobHasMessages,
assertInJob: assertInJob,
assertInCapture: assertInCapture,
mergeMessagesIntoCurrentJob: mergeMessagesIntoCurrentJob,
forkJoin: forkJoin,
getRootProgress: getRootProgress,
reportProgress: reportProgress,
reportProgressDone: reportProgressDone,
getCurrentProgressTracker: getCurrentProgressTracker,
addChildTracker: addChildTracker
});
|
// Copyright...
#include <gtest/gtest.h>
#include "toolbox.h"
using namespace formulate;
TEST(ToolboxTest, GetSetTest) {
Toolbox toolbox;
toolbox.set_current_tool(Toolbox::kText_Tool);
EXPECT_EQ(Toolbox::kText_Tool, toolbox.current_tool());
toolbox.set_current_tool(Toolbox::kArrow_Tool);
EXPECT_EQ(Toolbox::kArrow_Tool, toolbox.current_tool());
}
|
interface Person {
name: string;
age: number;
}
|
<reponame>jbwyme/action-destinations<gh_stars>0
// Generated file. DO NOT MODIFY IT BY HAND.
export interface Payload {
/**
* The Google Ads conversion label. You can find this value from your Google Ads event snippet. The provided event snippet should have, for example, `send_to: AW-123456789/AbC-D_efG-h12_34-567`. Enter the part after the forward slash, without the AW- prefix, e.g. 123456789
*/
conversion_label: string
/**
* Email address of the customer who triggered the conversion event.
*/
email: string
/**
* Order ID of the conversion event. Google requires an Order ID even if the event is not an ecommerce event.
*/
transaction_id: string
/**
* User Agent of the customer who triggered the conversion event.
*/
user_agent: string
/**
* Timestamp of the conversion event.
*/
conversion_time: string | number
/**
* The monetary value attributed to the conversion event.
*/
value?: number
/**
* Currency of the purchase or items associated with the event, in 3-letter ISO 4217 format.
*/
currency_code?: string
/**
* Phone number of the purchaser, in E.164 standard format, e.g. +14150000000
*/
phone_number?: string
/**
* First name of the individual who triggered the conversion event.
*/
first_name?: string
/**
* Last name of the individual who triggered the conversion event.
*/
last_name?: string
/**
* Street address of the individual who triggered the conversion event.
*/
street_address?: string
/**
* City of the individual who triggered the conversion event.
*/
city?: string
/**
* Region of the individual who triggered the conversion event.
*/
region?: string
/**
* Post code of the individual who triggered the conversion event.
*/
post_code?: string
/**
* Country of the individual who triggered the conversion event.
*/
country?: string
}
|
package org.apache.dubbo.sample.tri.interop.client;
import io.grpc.*;
import org.apache.dubbo.common.context.Lifecycle;
import org.apache.dubbo.sample.tri.util.TriSampleConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Objects;
public class GrpcServer implements Lifecycle {
private static final Logger LOGGER = LoggerFactory.getLogger(GrpcServer.class);
private final int port;
private Server server;
public GrpcServer(int port) {
this.port = port;
}
public static void main(String[] args) throws IOException, InterruptedException {
GrpcServer server = new GrpcServer(TriSampleConstants.GRPC_SERVER_PORT);
server.initialize();
server.start();
System.in.read();
}
@Override
public void initialize() throws IllegalStateException {
this.server = ServerBuilder.forPort(port)
.addService(new GrpcGreeterImpl())
.intercept(new EchoAttachmentInterceptor())
.build();
}
@Override
public void start() throws IllegalStateException {
try {
server.start();
LOGGER.info("Grpc server started at port {}", port);
} catch (IOException e) {
throw new IllegalStateException("Start grpc server failed ", e);
}
}
@Override
public void destroy() throws IllegalStateException {
server.shutdown();
}
private static class EchoAttachmentInterceptor implements ServerInterceptor {
@Override
public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(
ServerCall<ReqT, RespT> serverCall,
Metadata metadata, ServerCallHandler<ReqT, RespT> serverCallHandler) {
ForwardingServerCall.SimpleForwardingServerCall<ReqT, RespT> forwardingCall = new ForwardingServerCall.SimpleForwardingServerCall<ReqT, RespT>(serverCall) {
@Override
public void close(Status status, Metadata trailers) {
final String key = "user-attachment";
final Metadata.Key<String> metaKey = Metadata.Key.of(key,
Metadata.ASCII_STRING_MARSHALLER);
if (metadata.containsKey(metaKey)) {
trailers.put(metaKey, "hello," + Objects.requireNonNull(
metadata.get(metaKey)));
}
super.close(status, trailers);
}
};
return serverCallHandler.startCall(forwardingCall, metadata);
}
}
}
|
<filename>src/eu/iamgio/jrfl/api/commands/Command.java
package eu.iamgio.jrfl.api.commands;
import eu.iamgio.jrfl.api.commands.completion.InitialCommandTabCompletion;
import eu.iamgio.jrfl.api.commands.completion.TabCompletion;
import eu.iamgio.jrfl.api.configuration.Formatter;
import eu.iamgio.jrfl.program.nodes.Nodes;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyEvent;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
/**
* Class that must be extended by commands classes
* @author Gio
*/
public abstract class Command {
private String name, description, usage, text;
private List<Alias> aliases = new ArrayList<>();
private HashMap<Integer, TabCompletion> completions = new HashMap<>();
private boolean log = true;
protected Command(String name, String usage) {
this.name = name;
this.description = "No description provided";
this.usage = usage;
}
protected Command(String name, String description, String usage) {
this.name = name;
this.description = description;
this.usage = usage;
}
@Override
public String toString() {
return name;
}
/**
* @return Text
*/
public String getText() {
return text;
}
/**
* Sets the text
* @param text Text
*/
public void setText(String text) {
this.text = text;
}
/**
* @return Command name
*/
public String getName() {
return name;
}
/**
* @return Command description
*/
public String getDescription() {
return description;
}
/**
* @return Command usage
*/
public String getUsage() {
return usage;
}
public abstract void onCommand(String[] args);
/**
* @return <tt>true</tt> if the command is fully logged
*/
public boolean isLogged() {
return log;
}
/**
* Sets if the command must be logged fully
* @param log Log value
*/
public void setLogged(boolean log) {
this.log = log;
}
/**
* Adds an alias
* @param name Alias name
*/
public void addAlias(String name) {
Alias alias = new Alias(name, this);
for(int arg : completions.keySet()) {
alias.setArgCompletion(arg, completions.get(arg));
}
aliases.add(alias);
Commands.registerCommand(alias);
}
/**
* @return Command aliases
*/
public List<Alias> getAliases() {
return aliases;
}
/**
* Adds an auto-completion to a certain argument - Must be added in decreasing order
* @param index Arg index
* @param completion Completion type
*/
@SuppressWarnings("deprecation")
protected void setArgCompletion(int index, TabCompletion completion) {
completions.put(index, completion);
Nodes.TEXTFIELD.addEventHandler(KeyEvent.KEY_RELEASED, e -> {
if(e.getCode() == KeyCode.TAB) {
String t = Nodes.TEXTFIELD.getText() + "\0";
String[] args = Formatter.getInstance().format(t).split(" ");
if(args.length == 1 && args[0].replace("\0", "").isEmpty() && completion instanceof InitialCommandTabCompletion) {
completion.onTab(new String[] {""});
}
else if(args.length > 0 && (args[0].equals(name) || args.length == 1)) {
if(args.length == index + 2) {
List<String> list = Arrays.asList(args);
list.set(list.size()-1, list.get(list.size()-1).replace("\0", ""));
completion.onTab(list.toArray(new String[list.size()]));
}
}
}
});
}
}
|
<reponame>gebsl/oyd-browsing-plugin<gh_stars>1-10
export const REPO_URI = 'dev.unterholzer.oyd.browsing';
|
<reponame>mincho8050/web_programming_course
package net.control;
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class Welcome extends HttpServlet {
//클래스의 부모가 class : extends
//클래스의 부모가 interface : implements
//결과확인
//서버 다시 재시작
//-> http://localhost:8090.mvcTest/wel.do
//Welcome서블릿은 web.xml(배치관리자)에 등록해야 한다.
@Override //<-물려받은 메소드 재정의(리폼)
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
//<form method=get>방식으로 요청하면 호출
//req : 내부객체 request와 동일 / resp : 내부객체 response와 동일
try{
//단순 문자열로 응답(AJAX)
//resp.setContentType("text/plain; charset=UTF-8");
//HTML문서 구조로 응답
resp.setContentType("text/html; charset=UTF-8");
//요청한 사용자에게 응답하기 위한 객체
PrintWriter out=resp.getWriter();
out.println("<!DOCTYPE html>");
out.println("<html>");
out.println("<head>");
out.println("<meta charset='UTF-8'>");
out.println("<title>환영합니다</title>");
out.println("</head>");
out.println("<body>");
out.println("<strong>대한민국</strong>");
out.println("<hr>");
out.println("<span style='color: red'>오필승코리아</span>");
out.println("<hr>");
out.println("<table border='1'>");
out.println("<tr>");
out.println("<th>이름</th>");
out.println("<td>무궁화</td>");
out.println("</tr>");
out.println("</table>");
out.println("</body>");
out.println("</html>");
out.close(); //자원반납!
}catch(Exception e){
System.out.println("요청실패:"+e);
}//try
}//doGet
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
//<form method=post> 방식으로 요청했을때 호출
}//doPost
}//
|
#!/usr/bin/env bash
## run with -
## bash contrib/build-docker-scripts/buildARMlinux_64.sh
export LC_ALL=C.UTF-8
export PACKAGES="clang llvm unzip"
# The root dir.
BASE_ROOT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )"/../../ >/dev/null 2>&1 && pwd )
export BASE_ROOT_DIR
VER=$(head -n 20 $BASE_ROOT_DIR/configure.ac | grep -E 'define\(_CLIENT_VERSION_(MAJOR|MINOR|REVISION|BUILD)' | grep -ohE '[0-9]' | tr -d '[:space:]')
export VER
echo "Setting specific values in env"
if [ -n "${FILE_ENV}" ]; then
set -o errexit;
source "${FILE_ENV}"
fi
echo "Fallback to default values in env (if not yet set)"
export MAKEJOBS=${MAKEJOBS:--j4}
export BASE_SCRATCH_DIR=${BASE_SCRATCH_DIR:-$BASE_ROOT_DIR/ci/scratch}
export HOST=${HOST:-$("$BASE_ROOT_DIR/depends/config.guess")}
export CONTAINER_NAME=xuez_build
export DOCKER_NAME_TAG=ubuntu:18.04
export DEBIAN_FRONTEND=noninteractive
export CCACHE_SIZE=${CCACHE_SIZE:-100M}
export CCACHE_TEMPDIR=${CCACHE_TEMPDIR:-/tmp/.ccache-temp}
export CCACHE_COMPRESS=${CCACHE_COMPRESS:-1}
# The cache dir.
# This folder exists on the ci host and ci guest. Changes are propagated back and forth.
export CCACHE_DIR=${CCACHE_DIR:-$BASE_SCRATCH_DIR/.ccache}
# The depends dir.
# This folder exists on the ci host and ci guest. Changes are propagated back and forth.
export DEPENDS_DIR=${DEPENDS_DIR:-$BASE_ROOT_DIR/depends}
# Folder where the build result is put (bin and lib).
export BASE_OUTDIR=${BASE_OUTDIR:-$BASE_SCRATCH_DIR/out/$HOST}
# Folder where the build is done (dist and out-of-tree build).
export BASE_BUILD_DIR=${BASE_BUILD_DIR:-$BASE_SCRATCH_DIR/build}
export PREVIOUS_RELEASES_DIR=${PREVIOUS_RELEASES_DIR:-$BASE_ROOT_DIR/releases/$HOST}
export SDK_URL=${SDK_URL:-https://bitcoincore.org/depends-sources/sdks}
export DOCKER_PACKAGES=${DOCKER_PACKAGES:-build-essential libtool autotools-dev automake pkg-config bsdmainutils curl ca-certificates ccache python3 rsync git procps g++-aarch64-linux-gnu binutils-aarch64-linux-gnu}
export GOAL=${GOAL:-install}
export DIR_QA_ASSETS=${DIR_QA_ASSETS:-${BASE_SCRATCH_DIR}/qa-assets}
export PATH=${BASE_ROOT_DIR}/ci/retry:$PATH
export CI_RETRY_EXE=${CI_RETRY_EXE:-"retry --"}
export BITCOIN_CONFIG="--enable-glibc-back-compat --enable-reduce-exports CXXFLAGS=-Wno-psabi --with-boost-process --enable-suppress-external-warnings --disable-dependency-tracking --disable-tests --disable-gui-tests --disable-bench --enable-c++17"
# create the docker container
if [[ -z $(docker container ls --all | grep "$CONTAINER_NAME") ]]; then
# Create folders that are mounted into the docker
mkdir -p "${CCACHE_DIR}"
mkdir -p "${PREVIOUS_RELEASES_DIR}"
export ASAN_OPTIONS="detect_stack_use_after_return=1:check_initialization_order=1:strict_init_order=1"
export LSAN_OPTIONS="suppressions=${BASE_ROOT_DIR}/test/sanitizer_suppressions/lsan"
export TSAN_OPTIONS="suppressions=${BASE_ROOT_DIR}/test/sanitizer_suppressions/tsan:halt_on_error=1:log_path=${BASE_SCRATCH_DIR}/sanitizer-output/tsan"
export UBSAN_OPTIONS="suppressions=${BASE_ROOT_DIR}/test/sanitizer_suppressions/ubsan:print_stacktrace=1:halt_on_error=1:report_error_type=1"
env | grep -E '^(BITCOIN_CONFIG|BASE_|QEMU_|CCACHE_|LC_ALL|BOOST_TEST_RANDOM|DEBIAN_FRONTEND|CONFIG_SHELL|(ASAN|LSAN|TSAN|UBSAN)_OPTIONS|PREVIOUS_RELEASES_DIR)' | tee /tmp/env
if [[ $BITCOIN_CONFIG = *--with-sanitizers=*address* ]]; then # If ran with (ASan + LSan), Docker needs access to ptrace (https://github.com/google/sanitizers/issues/764)
DOCKER_ADMIN="--cap-add SYS_PTRACE"
fi
echo "Creating $DOCKER_NAME_TAG container to run in"
${CI_RETRY_EXE} docker pull "$DOCKER_NAME_TAG"
docker run -idt \
--mount type=bind,src=$BASE_ROOT_DIR,dst=/ro_base,readonly \
--mount type=bind,src=$CCACHE_DIR,dst=$CCACHE_DIR \
--mount type=bind,src=$DEPENDS_DIR,dst=$DEPENDS_DIR \
--mount type=bind,src=$PREVIOUS_RELEASES_DIR,dst=$PREVIOUS_RELEASES_DIR \
-w $BASE_ROOT_DIR \
--env-file /tmp/env \
--name $CONTAINER_NAME \
$DOCKER_NAME_TAG
fi
export P_CI_DIR="$PWD"
DOCKER_EXEC () {
docker exec $CONTAINER_NAME bash -c "export PATH=$BASE_SCRATCH_DIR/bins/:\$PATH && cd $P_CI_DIR && $*"
}
export -f DOCKER_EXEC
COPYBINS () {
DOCKER_EXEC "aarch64-linux-gnu-strip src/qt/xuez-qt && cp src/qt/xuez-qt . && tar czf $BUILDHOST-xuez-gui-v$VER.tgz xuez-qt && rm xuez-qt"
DOCKER_EXEC "aarch64-linux-gnu-strip src/xuezd src/xuez-cli src/xuez-tx src/xuez-wallet && mv src/xuezd src/xuez-cli src/xuez-tx src/xuez-wallet . "
DOCKER_EXEC "tar czf $BUILDHOST-xuez-cli-v$VER.tgz xuezd xuez-cli xuez-tx xuez-wallet && rm xuezd xuez-cli xuez-tx xuez-wallet"
docker cp $CONTAINER_NAME:$BASE_ROOT_DIR/$BUILDHOST-xuez-gui-v$VER.tgz .
docker cp $CONTAINER_NAME:$BASE_ROOT_DIR/$BUILDHOST-xuez-cli-v$VER.tgz .
DOCKER_EXEC rm -rf \$\(ls $BASE_ROOT_DIR/*xuez*tgz\)
}
DOCKER_EXEC echo "Free disk space:"
DOCKER_EXEC df -h
${CI_RETRY_EXE} DOCKER_EXEC apt-get update
${CI_RETRY_EXE} DOCKER_EXEC apt-get install --no-install-recommends --no-upgrade -y $PACKAGES $DOCKER_PACKAGES
echo "Create/syncing $BASE_ROOT_DIR"
DOCKER_EXEC rsync -a /ro_base/ $BASE_ROOT_DIR
if [[ ! -d "${DEPENDS_DIR}/SDKs ${DEPENDS_DIR}/sdk-sources" ]]; then
DOCKER_EXEC mkdir -p ${DEPENDS_DIR}/SDKs ${DEPENDS_DIR}/sdk-sources
fi
DEP_OPTS=""
BUILDHOST="aarch64-linux-gnu"
#i686-pc-linux-gnu for Linux 32 bit
#x86_64-pc-linux-gnu for x86 Linux
#x86_64-w64-mingw32 for Win64
#x86_64-apple-darwin16 for macOS
#arm-linux-gnueabihf for Linux ARM 32 bit
#aarch64-linux-gnu for Linux ARM 64 bit
#armv7a-linux-android for Android ARM 32 bit
#aarch64-linux-android for Android ARM 64 bit
#i686-linux-android for Android x86 32 bit
#x86_64-linux-android for Android x86 64 bit
MAKE_COMMAND="make $MAKEJOBS -C depends"
DOCKER_EXEC "$MAKE_COMMAND" HOST=$BUILDHOST
if [ "$1" == "clean" ]; then
echo "Cleaning build dir $BASE_ROOT_DIR..."
DOCKER_EXEC "[[ -f Makefile ]] && make distclean || make clean"
fi
DOCKER_EXEC "[[ ! -f configure ]] && ./autogen.sh"
DOCKER_EXEC "[[ -f configure ]] && ./configure $BITCOIN_CONFIG --prefix=$DEPENDS_DIR/$BUILDHOST"
DOCKER_EXEC "make $MAKEJOBS" && COPYBINS
|
var searchData=
[
['continueonfailure_1451',['ContinueOnFailure',['../structCatch_1_1ResultDisposition.html#a3396cad6e2259af326b3aae93e23e9d8aa18c94bd60c5614e17a84c2ced3bbfd5',1,'Catch::ResultDisposition']]]
];
|
package com.mycompany.smartparkingmanagement.dao;
import com.mycompany.smartparkingmanagement.entities.BookingBean;
import com.mycompany.smartparkingmanagement.entities.OrderBean;
import com.mycompany.smartparkingmanagement.entities.TimeCheckBean;
import com.mycompany.smartparkingmanagement.helper.ConnectionProvider;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.time.LocalTime;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Random;
public class BookingDao {
Random random = new Random();
public boolean bookingDone(String cust_name, String cust_surname, String vehicle_type,
String vehicle_no, String date, String newstart, String newend,
double total_amount, double amount_paid, int slot_no) {
String str_time = date + " " + newstart;
String end_time = date + " " + newend;
boolean f = true;
try {
Connection con = ConnectionProvider.getConnection();
String sql_insert_bookingtable = "INSERT INTO booking(cust_name,cust_surname,"
+ "vehicle_type,vehicle_no,date,start_time,end_time"
+ ",total_amount,paid_amount,slot_id) VALUES(?,?,?,?,?,?,?,?,?,?)";
PreparedStatement pstmt_insert_bookingtable = con.prepareStatement(sql_insert_bookingtable);
pstmt_insert_bookingtable.setString(1, cust_name);
pstmt_insert_bookingtable.setString(2, cust_surname);
pstmt_insert_bookingtable.setString(3, vehicle_type);
pstmt_insert_bookingtable.setString(4, vehicle_no);
pstmt_insert_bookingtable.setString(5, date);
pstmt_insert_bookingtable.setString(6, str_time);
pstmt_insert_bookingtable.setString(7, end_time);
pstmt_insert_bookingtable.setDouble(8, total_amount);
pstmt_insert_bookingtable.setDouble(9, amount_paid);
pstmt_insert_bookingtable.setInt(10, slot_no);
int rowInserted = pstmt_insert_bookingtable.executeUpdate();
if (rowInserted > 0) {
System.out.println("A new record is added");
f = true;
}
} catch (SQLException e) {
System.out.println("Opps Something is wrong " + e);
}
return f;
}
public BookingBean slotProvider(BookingBean book) {
book.setBool(false);
try {
Connection con = ConnectionProvider.getConnection();
ArrayList<Integer> arr = new ArrayList<>();
String newstart = book.getDate() + " " + book.getStr_start_time();
String newend = book.getDate() + " " + book.getStr_end_time();
String sql = "SELECT slot_id FROM booking WHERE NOT (end_time < ? OR start_time > ?) AND (date=?)";
PreparedStatement pstmt = con.prepareStatement(sql);
pstmt.setString(1, newstart);
pstmt.setString(2, newend);
pstmt.setString(3, book.getDate());
ResultSet rs = pstmt.executeQuery();
while (rs.next()) {
arr.add(rs.getInt("slot_id"));
}
ArrayList<Integer> arr1 = new ArrayList<>();
Statement stmt = con.createStatement();
ResultSet rs1 = stmt.executeQuery("SELECT slot_id FROM slot WHERE buffer='no'");
while (rs1.next()) {
arr1.add(rs1.getInt("slot_id"));
}
arr1.removeAll(arr);
// System.out.println("Available Slots:" + arr1);
if (arr1.isEmpty()) {
System.out.println("Sorry we are out of slots on " + book.getDate() + " at " + book.getStr_start_time());
book.setMsg("Sorry we are out of slots on");//
book.setMsg("false");
book.setBool(true);
} else {
int slot_no = arr1.get(random.nextInt(arr1.size()));
book.setMsg("true");
book.setSlot_no(slot_no);
book.setBool(true);
}
} catch (Exception e) {
e.printStackTrace();
}
return book;
}
//vehicle type //start time//end time from slot provider
public double Payment(String vehicle_type, String new_start_time, String new_end_time) {
//Payment part
float cost = 0;
try {
Connection con = ConnectionProvider.getConnection();
String sql_vehicle = "SELECT * FROM master where vehicle_type = ?";
System.out.println("vehicle type:" + vehicle_type);
PreparedStatement pstmt_payment = con.prepareStatement(sql_vehicle);
pstmt_payment.setString(1, vehicle_type);
ResultSet rs_payment = pstmt_payment.executeQuery();
int db_rate = 0;
while (rs_payment.next()) {
db_rate = rs_payment.getInt("rate");
}
LocalTime start_time = LocalTime.parse(new_start_time);
LocalTime end_time = LocalTime.parse(new_end_time);
// Calculating the difference in Hours
int hours = (int) ChronoUnit.HOURS.between(start_time, end_time);
// Calculating the difference in Minutes
int minutes = (int) (ChronoUnit.MINUTES.between(start_time, end_time) % 60);
int min = (hours * 60) + minutes;
cost = db_rate * (min / 30);
System.out.println("You have to pay :" + cost);
} catch (Exception e) {
e.printStackTrace();
}
return cost;
}
public TimeCheckBean CheckTImeFromMaster(String vehicle_type) {
Connection con = ConnectionProvider.getConnection();
TimeCheckBean time = null;
try {
String sql = "SELECT * FROM master WHERE vehicle_type = ?";
PreparedStatement pstmt = con.prepareStatement(sql);
pstmt.setString(1, vehicle_type);
ResultSet rs = pstmt.executeQuery();
while (rs.next()) {
time = new TimeCheckBean();
time.setDb_open_time(rs.getString("open_time"));
time.setDb_close_time(rs.getString("close_time"));
System.out.println(rs.getString("open_time"));
}
} catch (Exception e) {
e.printStackTrace();
}
return time;
}
public boolean InsertOrderToDB(OrderBean or) {
boolean f = false;
try {
Connection con = ConnectionProvider.getConnection();
PreparedStatement pstmt = con.prepareStatement("INSERT INTO orders(amount,"
+ " amount_paid, created_at, amount_due, currency, receipt, order_id, entity,"
+ " offer_id, status, attempts) VALUES(?,?,?,?,?,?,?,?,?,?,?)");
pstmt.setDouble(1, or.getAmount());
pstmt.setDouble(2, or.getAmount_paid());
pstmt.setString(3, or.getCreated_at());
pstmt.setDouble(4, or.getAmount_due());
pstmt.setString(5, or.getCurrency());
pstmt.setString(6, or.getReceipt());
pstmt.setString(7, or.getOrder_id());
pstmt.setString(8, or.getEntity());
pstmt.setInt(9, or.getOffer_id());
pstmt.setString(10, or.getStatus());
pstmt.setInt(11, or.getAttempts());
int rowInserted = pstmt.executeUpdate();
if (rowInserted > 0) {
System.out.println("InsertedSuccessfully Orderdb");
f = true;
} else {
System.out.println("Cannot Update data ");
}
} catch (SQLException e) {
e.printStackTrace();
}
return f;
}
public boolean InsertSuccessPaidToDB(OrderBean or) {
boolean f = false;
try {
Connection con = ConnectionProvider.getConnection();
PreparedStatement pstmt = con.prepareStatement(" UPDATE orders SET amount_paid=?,amount_due=?,"
+ "razor_id=?,signature=?,status=? WHERE order_id = ?");
pstmt.setDouble(1, or.getAmount_paid());
pstmt.setDouble(2, or.getAmount_due());
pstmt.setString(3, or.getId());
pstmt.setString(4, or.getSignature());
pstmt.setString(5, or.getStatus());
pstmt.setString(6, or.getOrder_id());
int rowInserted = pstmt.executeUpdate();
if (rowInserted > 0) {
System.out.println("InsertedSuccessfully signature");
f = true;
} else {
System.out.println("Cannot Update data ");;
}
} catch (SQLException e) {
e.printStackTrace();
f = false;
}
return f;
}
public boolean onSpotBookingInsertion(String cust_name, String cust_surname, String vehicle_type, String vehicle_no, String date, String newstart, String newend, int slot_no) {
String str_time = date + " " + newstart;
String end_time = date + " " + newend;
boolean f = false;
try {
Connection con = ConnectionProvider.getConnection();
String sql_insert_ordertable = "INSERT INTO orders( created_at) VALUES (now())";
PreparedStatement pstmt_insert_ordertable = con.prepareStatement(sql_insert_ordertable);
int rowInserted = pstmt_insert_ordertable.executeUpdate();
if (rowInserted > 0) {
System.out.println("A new record is added");
String sql_insert_bookingtable = "INSERT INTO BOOKING(cust_name,cust_surname,vehicle_type,vehicle_no,date,start_time,end_time,checkin_time,slot_id) VALUES(?,?,?,?,?,?,?,now(),?)";
PreparedStatement pstmt_insert_bookingtable = con.prepareStatement(sql_insert_bookingtable);
pstmt_insert_bookingtable.setString(1, cust_name);
pstmt_insert_bookingtable.setString(2, cust_surname);
pstmt_insert_bookingtable.setString(3, vehicle_type);
pstmt_insert_bookingtable.setString(4, vehicle_no);
pstmt_insert_bookingtable.setString(5, date);
pstmt_insert_bookingtable.setString(6, str_time);
pstmt_insert_bookingtable.setString(7, end_time);
pstmt_insert_bookingtable.setInt(8, slot_no);
int rowInserted1 = pstmt_insert_bookingtable.executeUpdate();
if (rowInserted1 > 0) {
System.out.println("A new record is added");
f = true;
}
}
} catch (SQLException e) {
System.out.println("Opps Something is wrong " + e);
}
return f;
}
}
|
<reponame>aminnelson/google-appengine-mac-launcher
/* Copyright 2009 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#import <Foundation/Foundation.h>
// MBLogFilter is a line-oriented filter with regexp triggers.
// Data sent to the filter with a call to processString: will be processed.
//
// Hooks:
// [logFilter addGenericHook:callback forRegex:@"^ERROR:.*"];
// This will invoke the 'callback' NSInvocation when a line starting with
// "ERROR" is received.
//
// This class is now only used for hooks on the log output of dev_appserver.
@interface MBLogFilter : NSObject {
@private
// Hooks
NSMutableArray *hooks_; // array of dictionaries with keys: regex, callback
}
// The designated initialiser. outputPipe may be nil, in which case the data
// will be dropped after the hooks are run.
- (id)init;
// The following are one-shot callbacks. The callback is retained, and then
// released after it is invoked. You can add multiple hooks for the same regex
// or event.
// Most generic hook. The callback will be executed if a line matches the
// given regex. See GMRegex for the regexp language supported.
- (void)addGenericHook:(NSInvocation *)callback forRegex:(NSString *)regex;
// Hook for a project finished starting.
- (void)addProjectLaunchCompleteCallback:(NSInvocation *)callback;
// Input is a string from our owner (e.g. a running task).
// Output is our filtered result, which may be the same as input.
// This is called for all lines of test running through the filter.
- (NSString *)processString:(NSString *)output;
@end
|
#!/bin/bash
#
chown vagrant.vagrant /home/vagrant/.ssh/id_rsa
chmod 600 /home/vagrant/.ssh/id_rsa
|
#!/usr/bin/env bash
REL_SCRIPT_DIR="`dirname \"$0\"`"
SCRIPT_DIR="`( cd \"$REL_SCRIPT_DIR\" && pwd )`"
case "$OSTYPE" in
darwin*) PLATFORM="OSX" ;;
linux*) PLATFORM="LINUX" ;;
bsd*) PLATFORM="BSD" ;;
*) PLATFORM="UNKNOWN" ;;
esac
release_python()
{
cd $SCRIPT_DIR/..
git checkout -B release origin/release
git clean -xfd || exit 1
cd python
# python setup.py register -r pypi
python setup.py sdist || exit 1
python -m twine upload dist/* || exit 1
}
release_node()
{
cd $SCRIPT_DIR/..
git checkout -B release origin/release
git clean -xfd || exit 1
unset NPM_TAG
if [[ $NEW_VERSION =~ .*(rc|beta).* ]]; then
NPM_TAG='--tag next'
fi
npm publish . $NPM_TAG || exit 1
}
release_web()
{
echo release is now on beautifier/beautifier.io
# cd $SCRIPT_DIR/..
# git clean -xfd || exit 1
# git fetch || exit 1
# git checkout -B gh-pages origin/gh-pages || exit 1
# git merge origin/release --no-edit || exit 1
# git push || exit 1
# git checkout master
}
sedi() {
if [[ "$PLATFORM" == "OSX" || "$PLATFORM" == "BSD" ]]; then
sed -i "" $@
elif [ "$PLATFORM" == "LINUX" ]; then
sed -i $@
else
exit 1
fi
}
update_versions()
{
git fetch --all || exit 1
git checkout master || exit 1
git reset --hard origin/master || exit 1
git clean -xfd || exit 1
npm version --no-git-tag-version $NEW_VERSION
sedi -E 's@(cdn.rawgit.+beautify/v)[^/]+@\1'$NEW_VERSION'@' README.md
sedi -E 's@(cdnjs.cloudflare.+beautify/)[^/]+@\1'$NEW_VERSION'@' README.md
sedi -E 's/\((README\.md:.js-beautify@).+\)/(\1'$NEW_VERSION')/' README.md
echo "__version__ = '$NEW_VERSION'" > python/jsbeautifier/__version__.py
git add .
git commit -am "Bump version numbers for $NEW_VERSION"
git push
}
update_release_branch()
{
git reset --hard
git clean -xfd
git checkout -B release origin/release || exit 1
git merge origin/master --no-edit || exit 1
make js || exit 1
git add -f js/lib/ || exit 1
git commit -m "Release: $NEW_VERSION"
git tag "v$NEW_VERSION" || exit 1
git push || exit 1
git push --tags
}
main()
{
cd $SCRIPT_DIR/..
local NEW_VERSION=$1
NEW_VERSION=$1
update_versions
update_release_branch
release_python
release_node
release_web
}
(main $*)
|
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
set -ev
. "$DIR/set-tags"
mkdir -p /tmp/csi
for tag_info_string in $TAGS; do
IFS=';' read -a tag_info <<< "$tag_info_string"
echo "testing ${tag_info[1]}"
echo "X_CSI_PERSISTENCE_CONFIG=$X_CSI_PERSISTENCE_CONFIG"
echo "X_CSI_BACKEND_CONFIG=$X_CSI_BACKEND_CONFIG"
docker run --name ember -t --privileged --net=host \
-e X_CSI_PERSISTENCE_CONFIG=$X_CSI_PERSISTENCE_CONFIG \
-e X_CSI_BACKEND_CONFIG=$X_CSI_BACKEND_CONFIG \
-e DM_DISABLE_UDEV=1 \
-e PYTHONUNBUFFERED=0 \
-v /tmp/csi:/tmp/csi \
-v /etc/iscsi:/etc/iscsi \
-v /dev:/dev \
-v /etc/lvm:/etc/lvm \
-v /var/lock/lvm:/var/lock/lvm \
-v /lib/modules:/lib/modules:ro \
-v /run:/run \
-v /var/lib/iscsi:/var/lib/iscsi \
-v /etc/localtime:/etc/localtime:ro \
-v /root/cinder:/var/lib/cinder \
-v /sys/fs/cgroup:/sys/fs/cgroup:ro \
-p 50051:50051 \
-d ${tag_info[1]}
echo -e "\n\n Ember-CSI startup logs:"
docker logs ember
$DIR/csi-sanity-v2 --csi.endpoint=127.0.0.1:50051 --test.timeout 15m --ginkgo.v --ginkgo.progress
docker rm -f ember
done
|
final class AddNewEmployeeState {
enum Change {
case error(message: String?)
case loading(Bool)
case success
}
var onChange: ((AddNewEmployeeState.Change) -> Void)?
/// Loading state indicator
var isLoading = false {
didSet {
onChange?(.loading(isLoading))
}
}
func addNewEmployee() {
// Simulate adding a new employee
isLoading = true
DispatchQueue.global().async {
// Simulate network request or time-consuming operation
let success = Bool.random() // Simulate success or failure
DispatchQueue.main.async {
if success {
self.isLoading = false
self.onChange?(.success)
} else {
self.isLoading = false
self.onChange?(.error(message: "Failed to add new employee. Please try again."))
}
}
}
}
}
|
<reponame>Hunter4787/User-Admin<gh_stars>0
import React, { Component } from 'react';
import { Route } from 'react-router-dom'
import SignIn from './signin';
import SignUp from './signup';
import Home from './home';
import Aboutus from './aboutus';
import Footer from './footer';
import Header from './header';
import User from './Useraccount';
import Contactus from './contactus';
import Space from './space';
class Routes extends Component {
constructor(props) {
super(props);
this.state = {}
}
render() {
return (<div>
<Route exact path="/" component={Home}></Route>
<Route exact path="/a-propos" render={() => <div><Space/><Header/><Aboutus style={{margin:"100px"}}/><Footer /></div>}></Route>
<Route exact path="/connexion" render={() => <div><Header/><SignIn /><Footer /></div>}></Route>
<Route exact path="/inscription" render={() => <div><Header/><SignUp /><Footer /></div>}></Route>
<Route exact path="/contact" render={() => <div><Space/><Header/><Contactus /><Footer /></div>}></Route>
<Route exact path="/utilisateur" component={User}></Route>
</div>);
}
}
export default Routes;
|
<reponame>MrH3nry/Hefastos<filename>Hefastos_Android/app/src/main/java/br/edu/ifpb/hefastos_android/entities/QuestaoAberta.java
package br.edu.ifpb.hefastos_android.entities;
/**
* Created by henri on 20/03/2017.
*/
public class QuestaoAberta extends Questao {
private String resposta;
public QuestaoAberta(String fonte, String descricao, String enunciado, String resposta) {
super(fonte, descricao, enunciado);
this.resposta = resposta;
}
public String getResposta() {
return resposta;
}
public void setResposta(String resposta) {
this.resposta = resposta;
}
@Override
public String toString() {
return "(" + this.fonte + ") " + this.enunciado;
}
}
|
root -l -b <<-EOF
.x digi.C
.q
EOF
root -l -b <<-EOF
.x ADC_analysis.C
.q
EOF
root -l -b <<-EOF
.x Particle_tracking.C
.q
EOF
|
webpackJsonp(["module"],{
/***/ "../../../../../src/app/components/dashboard/dashboard.component.html":
/***/ (function(module, exports) {
module.exports = "<app-loader [loaderState]=\"loaderState\"></app-loader>\n\n<header>\n <app-header \n [activeView]=\"activeView\" \n (changeView)=\"changeView($event)\"\n ></app-header>\n</header>\n<main class=\"container\">\n <section *ngIf=\"singleUser\" id=\"userHeader\">\n <p>Bienvenue sur votre tableau de bord <strong>{{singleUser.name}}</strong></p>\n </section>\n\n <app-feed-form \n *ngIf=\"singleUser\" \n [singleUser]=\"singleUser\" \n (sendFeedData)=\"addNewFeed($event)\"\n ></app-feed-form>\n\n <section *ngIf=\"feedCollection\">\n <app-single-feed \n *ngFor=\"let item of feedCollection\" \n [item]=\"item\" \n (deleteFeed)=\"deleteFeed($event)\"\n ></app-single-feed>\n </section>\n\n</main>"
/***/ }),
/***/ "../../../../../src/app/components/dashboard/dashboard.component.ts":
/***/ (function(module, exports, __webpack_require__) {
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
Object.defineProperty(exports, "__esModule", { value: true });
/*
Configuration du composants
*/
// Import des interfaces
var core_1 = __webpack_require__("../../../core/esm5/core.js");
// Modules
var user_service_1 = __webpack_require__("../../../../../src/app/services/user/user.service.ts");
var feed_service_1 = __webpack_require__("../../../../../src/app/services/feed/feed.service.ts");
// Définition du composant
var DashboardComponent = /** @class */ (function () {
function DashboardComponent(userService, feedService) {
var _this = this;
this.userService = userService;
this.feedService = feedService;
// Loader
this.loaderState = { path: "/dashboard", isClose: true };
this.activeView = "/dashboard";
// Fonction Change View
this.changeView = function (evt) {
_this.loaderState = evt;
};
// Fonction User Info
this.getUserInfos = function () {
_this.userService.getUserInfo(localStorage.getItem('MEANSOCIALtoken'))
.then(function (data) {
// Masquer le loader
_this.loaderState.isClose = true;
// Définition de l'objet singleUser
_this.singleUser = data;
})
.catch(function (err) {
// Introduction
_this.loaderState.isClose = false;
console.error(err);
});
};
// Fonction User Feed
this.getUserFeed = function () {
_this.feedService.getFeeds(localStorage.getItem('MEANSOCIALtoken'))
.then(function (dataFeeds) {
_this.feedCollection = dataFeeds;
})
.catch(function (err) {
console.error(err);
});
};
// Fonction Add New Feed
this.addNewFeed = function (evt) {
_this.feedService.addNewFeed(evt, localStorage.getItem('MEANSOCIALtoken'))
.then(function (data) {
// Ajout du feed dans la liste
_this.getUserFeed();
})
.catch(function (err) {
console.error(err);
});
};
// Fonction Add New Feed
this.deleteFeed = function (evt) {
_this.feedService.deleteFeed(evt, localStorage.getItem('MEANSOCIALtoken'))
.then(function (data) {
// Ajout du feed dans la liste
_this.getUserFeed();
})
.catch(function (err) {
console.error(err);
});
};
}
DashboardComponent.prototype.ngOnInit = function () {
// Récupérer les informations utilisateur
this.getUserInfos();
// Récupérer la liste des feeds
this.getUserFeed();
};
;
DashboardComponent = __decorate([
core_1.Component({
selector: 'app-dashboard',
template: __webpack_require__("../../../../../src/app/components/dashboard/dashboard.component.html"),
providers: [user_service_1.UserService, feed_service_1.FeedService],
})
//
/*
Export du composant
*/
,
__metadata("design:paramtypes", [user_service_1.UserService,
feed_service_1.FeedService])
], DashboardComponent);
return DashboardComponent;
}());
exports.DashboardComponent = DashboardComponent;
//
/***/ }),
/***/ "../../../../../src/app/components/dashboard/module.ts":
/***/ (function(module, exports, __webpack_require__) {
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
Object.defineProperty(exports, "__esModule", { value: true });
/*
DashboardModule
*/
// Imports des Interfaces
var core_1 = __webpack_require__("../../../core/esm5/core.js");
var common_1 = __webpack_require__("../../../common/esm5/common.js");
var forms_1 = __webpack_require__("../../../forms/esm5/forms.js");
// Importer les composants
var dashboard_component_1 = __webpack_require__("../../../../../src/app/components/dashboard/dashboard.component.ts");
var module_1 = __webpack_require__("../../../../../src/app/partials/header/module.ts");
var module_2 = __webpack_require__("../../../../../src/app/partials/loader/module.ts");
var module_3 = __webpack_require__("../../../../../src/app/partials/feed-form/module.ts");
var module_4 = __webpack_require__("../../../../../src/app/partials/single-feed/module.ts");
var route_1 = __webpack_require__("../../../../../src/app/components/dashboard/route.ts");
// Configuration du DashboardModule
var DashboardModule = /** @class */ (function () {
//
/*
Export de DashboardModule
*/
function DashboardModule() {
}
DashboardModule = __decorate([
core_1.NgModule({
declarations: [dashboard_component_1.DashboardComponent],
imports: [route_1.Routing, common_1.CommonModule, forms_1.FormsModule, module_1.HeaderModule, module_2.LoaderModule, module_3.FeedFormModule, module_4.SingleFeedModule]
})
//
/*
Export de DashboardModule
*/
], DashboardModule);
return DashboardModule;
}());
exports.DashboardModule = DashboardModule;
;
//
/***/ }),
/***/ "../../../../../src/app/components/dashboard/route.ts":
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var router_1 = __webpack_require__("../../../router/esm5/router.js");
// Importer les composants à utiliser dans les routes
var dashboard_component_1 = __webpack_require__("../../../../../src/app/components/dashboard/dashboard.component.ts");
// Créer une constante pour définir le comportement des routes
var appRoutes = [
{
path: '',
component: dashboard_component_1.DashboardComponent
}
];
// Exporter une autre constante pour utiliser les routes
exports.Routing = router_1.RouterModule.forChild(appRoutes);
/***/ }),
/***/ "../../../../../src/app/partials/feed-form/feed-form.component.html":
/***/ (function(module, exports) {
module.exports = "<section>\n\n <form (submit)=\"addNewFeed()\" id=\"addFeedForm\">\n <textarea name=\"content\" [(ngModel)]=\"newFeedObject.content\" placeholder=\"Ajouter un message\"></textarea>\n <button type=\"submit\"><i class=\"fas fa-check\"></i></button>\n </form>\n\n</section>"
/***/ }),
/***/ "../../../../../src/app/partials/feed-form/feed-form.component.ts":
/***/ (function(module, exports, __webpack_require__) {
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
Object.defineProperty(exports, "__esModule", { value: true });
/*
Configuration du composants
*/
// Import des interfaces
var core_1 = __webpack_require__("../../../core/esm5/core.js");
// Définition du composant
var FeedFormComponent = /** @class */ (function () {
function FeedFormComponent() {
var _this = this;
this.sendFeedData = new core_1.EventEmitter;
// Fonction Add Feed
this.addNewFeed = function () {
if (_this.newFeedObject.content === null || _this.newFeedObject.content.length === 0) {
}
else {
_this.sendFeedData.emit(_this.newFeedObject);
_this.resetFeedObject();
}
};
// Fonction resetFeedObject
this.resetFeedObject = function () {
_this.newFeedObject = {
content: null,
author: {
_id: _this.singleUser._id,
name: _this.singleUser.name,
avatar: _this.singleUser.facebook.avatar
}
};
};
}
FeedFormComponent.prototype.ngOnInit = function () {
// Configuration de l'objet newFeedObject
this.resetFeedObject();
};
__decorate([
core_1.Output(),
__metadata("design:type", Object)
], FeedFormComponent.prototype, "sendFeedData", void 0);
__decorate([
core_1.Input(),
__metadata("design:type", Object)
], FeedFormComponent.prototype, "singleUser", void 0);
FeedFormComponent = __decorate([
core_1.Component({
selector: 'app-feed-form',
template: __webpack_require__("../../../../../src/app/partials/feed-form/feed-form.component.html")
})
//
/*
Export du composant
*/
,
__metadata("design:paramtypes", [])
], FeedFormComponent);
return FeedFormComponent;
}());
exports.FeedFormComponent = FeedFormComponent;
//
/***/ }),
/***/ "../../../../../src/app/partials/feed-form/module.ts":
/***/ (function(module, exports, __webpack_require__) {
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
Object.defineProperty(exports, "__esModule", { value: true });
/*
Configuration du composants
*/
var core_1 = __webpack_require__("../../../core/esm5/core.js");
var common_1 = __webpack_require__("../../../common/esm5/common.js");
var forms_1 = __webpack_require__("../../../forms/esm5/forms.js");
var feed_form_component_1 = __webpack_require__("../../../../../src/app/partials/feed-form/feed-form.component.ts");
// Configuration du module
var FeedFormModule = /** @class */ (function () {
//
/*
Export de la class du module
*/
function FeedFormModule() {
}
FeedFormModule = __decorate([
core_1.NgModule({
declarations: [
feed_form_component_1.FeedFormComponent,
],
imports: [common_1.CommonModule, forms_1.FormsModule],
exports: [
feed_form_component_1.FeedFormComponent
]
})
//
/*
Export de la class du module
*/
], FeedFormModule);
return FeedFormModule;
}());
exports.FeedFormModule = FeedFormModule;
//
/***/ }),
/***/ "../../../../../src/app/partials/single-feed/module.ts":
/***/ (function(module, exports, __webpack_require__) {
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
Object.defineProperty(exports, "__esModule", { value: true });
/*
SingleFeedModule
*/
// Imports des Interfaces
var core_1 = __webpack_require__("../../../core/esm5/core.js");
var common_1 = __webpack_require__("../../../common/esm5/common.js");
// Imports des composants
var single_feed_component_1 = __webpack_require__("../../../../../src/app/partials/single-feed/single-feed.component.ts");
// Configuration du SingleFeedModule
var SingleFeedModule = /** @class */ (function () {
//
/*
Export de SingleFeedModule
*/
function SingleFeedModule() {
}
SingleFeedModule = __decorate([
core_1.NgModule({
declarations: [
single_feed_component_1.SingleFeedComponent,
],
imports: [common_1.CommonModule],
exports: [
single_feed_component_1.SingleFeedComponent
]
})
//
/*
Export de SingleFeedModule
*/
], SingleFeedModule);
return SingleFeedModule;
}());
exports.SingleFeedModule = SingleFeedModule;
//
/***/ }),
/***/ "../../../../../src/app/partials/single-feed/single-feed.component.html":
/***/ (function(module, exports) {
module.exports = "<article class=\"singleFeed\">\n <div class=\"avatar\">\n <img src=\"{{singleFeed.author.avatar}}\" alt=\"Photo de {{singleFeed.author.name}}\">\n </div>\n <div class=\"contentFeed\">\n <p class=\"userName\">\n <b>{{singleFeed.author.name}}</b>\n <span>{{singleFeed.date}}</span> \n <button (click)=\"submitDeleteFeed(item._id)\" ><i class=\"far fa-trash-alt\"></i></button>\n </p>\n <p class=\"content\">{{singleFeed.content}}</p>\n </div>\n</article>"
/***/ }),
/***/ "../../../../../src/app/partials/single-feed/single-feed.component.ts":
/***/ (function(module, exports, __webpack_require__) {
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
Object.defineProperty(exports, "__esModule", { value: true });
/*
Import des composants
*/
// Class
var core_1 = __webpack_require__("../../../core/esm5/core.js");
// Déclaration du composant
var SingleFeedComponent = /** @class */ (function () {
function SingleFeedComponent() {
var _this = this;
this.deleteFeed = new core_1.EventEmitter;
// Fonction Submit Delete Feed
this.submitDeleteFeed = function (_id) {
_this.deleteFeed.emit(_id);
};
}
SingleFeedComponent.prototype.ngOnInit = function () {
this.singleFeed = this.item;
var options = { weekday: 'short', year: 'numeric', month: 'short', day: 'numeric' };
this.singleFeed.date = new Date(this.singleFeed.date).toLocaleTimeString('fr') + ' - ' + new Date(this.singleFeed.date).toLocaleDateString('fr');
};
__decorate([
core_1.Input(),
__metadata("design:type", Object)
], SingleFeedComponent.prototype, "item", void 0);
__decorate([
core_1.Output(),
__metadata("design:type", Object)
], SingleFeedComponent.prototype, "deleteFeed", void 0);
SingleFeedComponent = __decorate([
core_1.Component({
selector: 'app-single-feed',
template: __webpack_require__("../../../../../src/app/partials/single-feed/single-feed.component.html")
})
//
/*
Export de la class du composant
*/
,
__metadata("design:paramtypes", [])
], SingleFeedComponent);
return SingleFeedComponent;
}());
exports.SingleFeedComponent = SingleFeedComponent;
//
/***/ }),
/***/ "../../../../../src/app/services/feed/feed.service.ts":
/***/ (function(module, exports, __webpack_require__) {
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
Object.defineProperty(exports, "__esModule", { value: true });
/*
Import des composants du service
*/
// Import des interfaces
var core_1 = __webpack_require__("../../../core/esm5/core.js");
var http_1 = __webpack_require__("../../../http/esm5/http.js");
var http_2 = __webpack_require__("../../../http/esm5/http.js");
__webpack_require__("../../../../rxjs/_esm5/add/operator/toPromise.js");
//
/*
Définition et export du service
*/
var FeedService = /** @class */ (function () {
function FeedService(http) {
this.http = http;
this.apiUrl = '/feed';
}
;
// Fonction Get All Feed
FeedService.prototype.getFeeds = function (token) {
// Définition du header de la requête
var myHeader = new http_2.Headers();
myHeader.append('x-access-token', token);
return this.http.get(this.apiUrl + "/all", { headers: myHeader })
.toPromise().then(this.getData).catch(this.handleError);
};
;
// Fonction Add New Feed
FeedService.prototype.addNewFeed = function (newFeed, token) {
// Définition du header de la requête
var myHeader = new http_2.Headers();
myHeader.append('x-access-token', token);
return this.http.post(this.apiUrl + "/add", newFeed, { headers: myHeader })
.toPromise().then(this.getData).catch(this.handleError);
};
;
// Fonction Delete Feed
FeedService.prototype.deleteFeed = function (_id, token) {
// Définition du header de la requête
var myHeader = new http_2.Headers();
myHeader.append('x-access-token', token);
return this.http.delete(this.apiUrl + "/delete/" + _id, { headers: myHeader })
.toPromise().then(this.getData).catch(this.handleError);
};
;
/*
Fonctions de traitement de Promises
*/
// Traiter le retour de l'API
FeedService.prototype.getData = function (res) {
return res.json() || {};
};
;
// Traiter les erreurs de requête
FeedService.prototype.handleError = function (err) {
return Promise.reject(err);
};
;
FeedService = __decorate([
core_1.Injectable(),
__metadata("design:paramtypes", [http_1.Http])
], FeedService);
return FeedService;
}());
exports.FeedService = FeedService;
;
//
/***/ })
});
//# sourceMappingURL=module.chunk.js.map
|
#!/bin/sh
#
#
script_dir=`dirname $0`
service_mode=$1
servers=$2
if [ "${service_mode}" == "" ] ; then
service_mode=bootstrap
fi
REPO_NAME=cloudconductor_init
GIT_URL=https://github.com/cloudconductor/cloudconductor_init.git
TOOL_DIR=/opt/chefdk/embedded/bin
BUNDLE_TOOL=${TOOL_DIR}/bundle
WORK_DIR=`pwd`
WORK_REPO=init_chefrepo
ls ./${REPO_NAME}
if [ $? -ne 0 ] ; then
git clone ${GIT_URL}
cd ${REPO_NAME}/
git checkout develop
fi
cd ${WORK_DIR}
mkdir ./${WORK_REPO}
if [ ! -f ./${WORK_REPO}/Berksfile ] ; then
echo "source 'https://supermarket.getchef.com'" >> ./${WORK_REPO}/Berksfile
echo "" >> ./${WORK_REPO}/Berksfile
echo "cookbook 'bootstrap', path: '../cloudconductor_init/bootstrap'" >> ./${WORK_REPO}/Berksfile
fi
if [ ! -f ./${WORK_REPO}/Gemfile ] ; then
cp ./tomcat_cluster_pattern/Gemfile ./${WORK_REPO}/Gemfile
fi
cd ./${WORK_REPO}/
${BUNDLE_TOOL} config build.nokogiri --use-system-libraries
${BUNDLE_TOOL} install
${BUNDLE_TOOL} exec berks vendor cookbooks
mkdir ./roles
echo '{"name":"setup","description":"Setup Role","chef_type":"role","json_class":"Chef::Role","run_list":["consul::install_source","consul::_service"]}' | jq '.' > ./roles/setup.json
echo "{\"consul\":{\"service_mode\":\"${service_mode}\",\"servers\":[\"${servers}\"]},\"run_list\":[\"role[setup]\"]}" | jq '.' > ./dna.json
if [ ! -f ./solo.rb ] ; then
cp ${script_dir}/files/solo.rb ./solo.rb
fi
chef-solo -c solo.rb -j dna.json
|
<reponame>premss79/zignaly-webapp
import React, { useState, useEffect } from "react";
import "./StatsFilter.scss";
import { Box } from "@material-ui/core";
import useExchangesOptions from "../../../../../hooks/useExchangesOptions";
import CustomSelect from "../../../../CustomSelect";
import { useIntl } from "react-intl";
import useExchangeQuotes from "../../../../../hooks/useExchangeQuotes";
import useExchangeList from "../../../../../hooks/useExchangeList";
/**
*
* @typedef {import("../../../../../services/tradeApiClient.types").ProfileProviderStatsSignalsObject} ProfileProviderStatsSignalsObject
*/
/**
*
* @typedef {Object} DefaultProps
* @property {Array<ProfileProviderStatsSignalsObject>} list
* @property {Function} onChange
*/
/**
*
* @param {DefaultProps} props Default props.
* @returns {JSX.Element} JSX Component.
*/
const StatsFilter = ({ list, onChange }) => {
const { exchanges } = useExchangeList();
const [exchange, setExchange] = useState("ALL");
const [quote, setQuote] = useState("ALL");
const intl = useIntl();
const exchangeOptions = useExchangesOptions(true);
const selectedBaseExchange =
exchanges &&
exchanges.find(
(item) => item.name.toLowerCase() === (exchange === "ALL" ? "binance" : exchange),
);
const { quoteAssets } = useExchangeQuotes({
exchangeId: selectedBaseExchange && selectedBaseExchange.id ? selectedBaseExchange.id : "",
exchangeType:
selectedBaseExchange && selectedBaseExchange.type ? selectedBaseExchange.type[0] : "",
});
const quotes = [
{
val: "ALL",
label: intl.formatMessage({ id: "fil.allcoins" }),
},
].concat(
Object.keys(quoteAssets).map((label) => ({
val: label,
label,
})),
);
const filterData = () => {
let newList = [...list].filter(
(item) =>
(!quote || quote === "ALL" || item.quote === quote) &&
(!exchange || exchange === "ALL" || item.exchange.toLowerCase() === exchange.toLowerCase()),
);
onChange(newList);
};
useEffect(filterData, [exchange, quote]);
return (
<Box alignItems="center" className="statsFilter" display="flex" flexDirection="row">
<CustomSelect
onChange={(/** @type {string} */ v) => setExchange(v)}
options={exchangeOptions}
value={exchange}
/>
<CustomSelect
onChange={(/** @type {import("../../../../CustomSelect/CustomSelect").OptionType} */ v) =>
// @ts-ignore
setQuote(v.val)
}
options={quotes}
search={true}
value={quote}
/>
</Box>
);
};
export default StatsFilter;
|
import number from '../src/number'
describe('number()', () => {
const key = 'foo'
const originalEnv = process.env
beforeEach(() => {
process.env = {
...originalEnv,
}
})
it('reads "0" as 0', () => {
process.env[key] = '0'
expect(number(key)).toBe(0)
})
it('reads "1" as 1', () => {
process.env[key] = '1'
expect(number(key)).toBe(1)
})
it('reads "-1" as -1', () => {
process.env[key] = '-1'
expect(number(key)).toBe(-1)
})
it('reads "0.1" as 0.1', () => {
process.env[key] = '0.1'
expect(number(key)).toBe(0.1)
})
it('reads "string" as undefined', () => {
process.env[key] = 'string'
expect(number(key)).toBeUndefined()
})
it('returns undefined if a key is not defined', () => {
expect(number(key)).toBeUndefined()
})
})
|
#!/bin/bash
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do
DIR="$(cd -P "$(dirname "$SOURCE")" && pwd)"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
done
DIR="$(cd -P "$(dirname "$SOURCE")" && pwd)"
classpath=$(${DIR}/classpath.sh dev/jetty-server)
if [ $? -ne 0 ]; then
echo "${classpath}"
exit
fi
[ "${DEBUG_PORT}" ] || DEBUG_PORT=12345
[ "$1" = '-d' ] && debug_option="-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=${DEBUG_PORT}"
cd ${DIR}/../
java ${debug_option} \
-Dfile.encoding=UTF-8 \
-Djava.awt.headless=true \
-Djava.security.krb5.realm= \
-Djava.security.krb5.kdc= \
-Dvisallo.request.debug=true \
-classpath ${classpath} \
-Xmx1024M \
org.visallo.web.JettyWebServer \
--port=8080 \
--httpsPort=8443 \
--keyStorePath=${DIR}/../dev/jetty-server/config/visallo-vm.visallo.org.jks \
--keyStorePassword=password \
--webAppDir=${DIR}/../web/war/src/main/webapp
|
<reponame>zmb3/om
package api_test
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/onsi/gomega/ghttp"
"github.com/pivotal-cf/om/api"
"net/http"
"strings"
)
var _ = Describe("Available Products", func() {
var (
progressClient *ghttp.Server
client *ghttp.Server
service api.Api
)
BeforeEach(func() {
progressClient = ghttp.NewServer()
client = ghttp.NewServer()
service = api.New(api.ApiInput{
Client: httpClient{client.URL()},
ProgressClient: httpClient{progressClient.URL()},
})
})
AfterEach(func() {
client.Close()
})
Describe("GetLatestAvailableVersion", func() {
When("there is a single version", func() {
It("returns that version", func() {
client.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/api/v0/available_products"),
ghttp.RespondWith(http.StatusOK, `[
{
"name": "available-product",
"product_version": "1.2.3"
}
]`),
),
)
version, err := service.GetLatestAvailableVersion("available-product")
Expect(err).ToNot(HaveOccurred())
Expect(version).To(Equal("1.2.3"))
})
})
When("there are multiple versions", func() {
It("returns the greatest (by semver)", func() {
client.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/api/v0/available_products"),
ghttp.RespondWith(http.StatusOK, `[
{
"name": "available-product",
"product_version": "1.2.3"
},
{
"name": "not-the-product-we-are-looking-for",
"product_version": "100.100.100"
},
{
"name": "available-product",
"product_version": "1.1.1"
}
]`),
),
)
version, err := service.GetLatestAvailableVersion("available-product")
Expect(err).ToNot(HaveOccurred())
Expect(version).To(Equal("1.2.3"))
})
})
When("there are no versions for the product", func() {
It("returns an error", func() {
client.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/api/v0/available_products"),
ghttp.RespondWith(http.StatusOK, `[]`),
),
)
_, err := service.GetLatestAvailableVersion("available-product")
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("no versions available for the product 'available-product'"))
})
})
When("the api returns an error", func() {
It("returns an error", func() {
client.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/api/v0/available_products"),
ghttp.RespondWith(http.StatusBadGateway, `[]`),
),
)
_, err := service.GetLatestAvailableVersion("available-product")
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("could not retrieve product list from Ops Manager: "))
})
})
})
Describe("UploadAvailableProduct", func() {
It("makes a request to upload the product to the Ops Manager", func() {
progressClient.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("POST", "/api/v0/available_products"),
ghttp.VerifyContentType("some content-type"),
ghttp.VerifyBody([]byte("some content")),
http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
Expect(req.ContentLength).To(Equal(int64(12)))
_, err := w.Write([]byte(`{}`))
Expect(err).ToNot(HaveOccurred())
}),
),
)
output, err := service.UploadAvailableProduct(api.UploadAvailableProductInput{
ContentLength: 12,
Product: strings.NewReader("some content"),
ContentType: "some content-type",
PollingInterval: 1,
})
Expect(err).ToNot(HaveOccurred())
Expect(output).To(Equal(api.UploadAvailableProductOutput{}))
})
When("an error occurs", func() {
When("the client errors performing the request", func() {
It("returns an error", func() {
progressClient.Close()
_, err := service.UploadAvailableProduct(api.UploadAvailableProductInput{
PollingInterval: 1,
})
Expect(err).To(MatchError(ContainSubstring("could not make api request to available_products endpoint")))
})
})
When("the api returns a non-200 status code", func() {
It("returns an error", func() {
progressClient.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("POST", "/api/v0/available_products"),
ghttp.RespondWith(http.StatusTeapot, `{}`),
),
)
_, err := service.UploadAvailableProduct(api.UploadAvailableProductInput{
PollingInterval: 1,
})
Expect(err).To(MatchError(ContainSubstring("request failed: unexpected response")))
})
})
})
})
Describe("ListAvailableProducts", func() {
It("lists available products", func() {
client.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/api/v0/available_products"),
ghttp.RespondWith(http.StatusOK, `[{
"name": "available-product",
"product_version": "available-version"
}]`),
),
)
output, err := service.ListAvailableProducts()
Expect(err).ToNot(HaveOccurred())
Expect(output.ProductsList).To(ConsistOf([]api.ProductInfo{{
Name: "available-product",
Version: "available-version",
}}))
})
When("the client can't connect to the client", func() {
It("returns an error", func() {
client.Close()
_, err := service.ListAvailableProducts()
Expect(err).To(MatchError(ContainSubstring("could not make api request")))
})
})
When("the client won't fetch available products", func() {
It("returns an error", func() {
client.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/api/v0/available_products"),
ghttp.RespondWith(http.StatusInternalServerError, `{}`),
),
)
_, err := service.ListAvailableProducts()
Expect(err).To(MatchError(ContainSubstring("request failed")))
})
})
When("the response is not JSON", func() {
It("returns an error", func() {
client.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/api/v0/available_products"),
ghttp.RespondWith(http.StatusOK, `invalid-json`),
),
)
_, err := service.ListAvailableProducts()
Expect(err).To(MatchError(ContainSubstring("could not unmarshal")))
})
})
})
Describe("DeleteAvailableProducts", func() {
It("deletes a named product / version", func() {
client.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("DELETE", "/api/v0/available_products", "product_name=some-product&version=1.2.3-build.4"),
ghttp.RespondWith(http.StatusOK, `{}`),
),
)
err := service.DeleteAvailableProducts(api.DeleteAvailableProductsInput{
ProductName: "some-product",
ProductVersion: "1.2.3-build.4",
})
Expect(err).ToNot(HaveOccurred())
})
When("the ShouldDeleteAllProducts flag is provided", func() {
It("does not provide a product query to DELETE", func() {
client.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("DELETE", "/api/v0/available_products", ""),
ghttp.RespondWith(http.StatusOK, `{}`),
),
)
err := service.DeleteAvailableProducts(api.DeleteAvailableProductsInput{
ShouldDeleteAllProducts: true,
})
Expect(err).ToNot(HaveOccurred())
})
})
When("a non-200 status code is returned", func() {
It("returns an error", func() {
client.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("DELETE", "/api/v0/available_products"),
ghttp.RespondWith(http.StatusInternalServerError, `{}`),
),
)
err := service.DeleteAvailableProducts(api.DeleteAvailableProductsInput{
ProductName: "some-product",
ProductVersion: "1.2.3-build.4",
})
Expect(err).To(MatchError(ContainSubstring("request failed: unexpected response")))
})
})
})
})
|
<reponame>danielotaviano/fastfeet<filename>server/src/modules/user/services/ListAllUserService.spec.ts
import BadRequestError from '../../../shared/err/BadRequestError';
import FakeHashProvider from '../providers/HashProvider/fakes/FakeHashProvider';
import FakeUserRepository from '../repositories/fakes/FakeUserRepository';
import CreateUserService from './CreateUserService';
import ListAllUserService from './ListAllUserService';
describe('list all users service', () => {
let fakeUserRepository: FakeUserRepository;
let fakeHashProvider: FakeHashProvider;
let listAll: ListAllUserService;
let createUser: CreateUserService;
beforeEach(() => {
fakeUserRepository = new FakeUserRepository();
fakeHashProvider = new FakeHashProvider();
listAll = new ListAllUserService(fakeUserRepository);
createUser = new CreateUserService(fakeUserRepository, fakeHashProvider);
});
it('should not be able to list all users if not authorized', async () => {
const userInfo = {
cpf: 'sameCpf',
deliveryman: true,
email: '<EMAIL>',
name: 'valid name',
password: '<PASSWORD>',
};
const user = await createUser.execute(userInfo);
await expect(listAll.execute(user.id)).rejects.toBeInstanceOf(
BadRequestError,
);
});
it('should be able to list all users', async () => {
const userInfo1 = {
cpf: 'validCpf1',
deliveryman: false,
email: '<EMAIL>',
name: 'valid name',
password: '<PASSWORD>',
};
const userInfo2 = {
cpf: 'validCpf2',
deliveryman: true,
email: '<EMAIL>',
name: 'valid name',
password: '<PASSWORD>',
};
const userInfo3 = {
cpf: 'validCpf3',
deliveryman: false,
email: '<EMAIL>',
name: 'valid name',
password: '<PASSWORD>',
};
const userInfo4 = {
cpf: 'validCpf4',
deliveryman: true,
email: '<EMAIL>',
name: 'valid name',
password: '<PASSWORD>',
};
const user1 = await createUser.execute(userInfo1);
const user2 = await createUser.execute(userInfo2);
const user3 = await createUser.execute(userInfo3);
const user4 = await createUser.execute(userInfo4);
const allUsers = await listAll.execute(user1.id);
expect(allUsers).toEqual([user1, user2, user3, user4]);
});
});
|
<gh_stars>10-100
import {
animate,
animateChild,
AnimationTriggerMetadata,
query,
state,
style,
transition,
trigger
} from '@angular/animations';
export function fadeInOut(): AnimationTriggerMetadata {
return trigger('fadeInOut', [
state(
'open',
style({
opacity: 1
})
),
state(
'close-fast',
style({
opacity: 0
})
),
state(
'close-instant',
style({
opacity: 0
})
),
transition('* => close-fast', [query('*', [animateChild()]), animate('{{closeDelay}}')]),
transition('* => open', [animate(100)]),
transition('* => close-instant', [animate(0)])
]);
}
|
/*
Info: JavaScript for JavaScript Basics Lesson 3, JavaScript Loops, Arrays, Strings, Task 6, Maximal Sequence
Author: Removed for reasons of anonymity
Successfully checked as valid in JSLint Validator at: http://www.jslint.com/ and JSHint Validator at: http://www.jshint.com/
*/
'use strict';
function findMaxSequence(seq) {
var length = 1, maxLength = 1;
var maxSeqValue = seq[0];
var seqLength = seq.length;
var i, k;
var result = [];
for (i = 1; i < seqLength; i = i + 1) {
if (seq[i - 1] === seq[i]) {
length = length + 1;
if (length >= maxLength) {
maxLength = length;
maxSeqValue = seq[i];
}
} else {
length = 1;
}
}
for (k = 0; k < maxLength; k = k + 1) {
result.push(maxSeqValue);
}
return result;
}
/* For html result view */
function findMaxSequenceByInput() {
var output = document.getElementById("output");
var sequence = document.getElementById('sequence').value;
var elementP = document.createElement("p");
var data = sequence.split(/[\s,.]+/);
var outRes = "[";
outRes += findMaxSequence(data).join(", ");
outRes += "]";
elementP.innerHTML = outRes;
output.appendChild(elementP);
}
/* For node.js result */
var sequences = [
[2, 1, 1, 2, 3, 3, 2, 2, 2, 1],
['happy'],
[2, 'qwe', 'qwe', 3, 3, '3']
];
var length = sequences.length;
var i = 0;
var outResult;
for (i = 0; i < length; i = i + 1) {
outResult = findMaxSequence(sequences[i]);
console.log(outResult);
}
|
class CustomIterator:
def __init__(self, data):
self.data = data
self.position = 0
self.size = len(data)
def __iter__(self):
return self
def __next__(self):
if self.position < self.size:
self.position += 1
return True
else:
return False
# Test the CustomIterator class
elements = [1, 2, 3, 4, 5]
iter_obj = CustomIterator(elements)
for _ in elements:
print(next(iter_obj))
|
#!/bin/bash
##
read -p "Please input the AliCloud access_key:" ACCESS_KEY
read -p "Please input the AliCloud secret_key:" SECRET_KEY
read -p "The Alicloud region is: " REGION
read -p "The Docker version is:" DOCKER_VERSION
read -p "The kubernetes version is:" KUBE_VERSION
## check params
if [[ -z $ACCESS_KEY || -z $SECRET_KEY || -z $REGION || -z $DOCKER_VERSION || -z $KUBE_VERSION ]]; then
echo -e "[ERROR] $(date '+%F %T') following parameters is empty:
access_key=${ACCESS_KEY}
secret_key=${SECRET_KEY}
region=${REGION}
docker_version=${DOCKER_VERSION}
kube_version=${KUBE_VERSION}"
exit 0
fi
file_path="$(pwd)/$1"
##build OS image
docker run -e ALICLOUD_ACCESS_KEY=$ACCESS_KEY -e ALICLOUD_SECRET_KEY=$SECRET_KEY -e REGION=$REGION -e KUBE_VERSION=$KUBE_VERSION \
-e DOCKER_VERSION=$DOCKER_VERSION -v $file_path:$file_path registry.aliyuncs.com/acs/ack-image-builder:v1.0.0 $file_path
|
import { createSlice } from '@reduxjs/toolkit'
import { apiPortfolio, getcheck } from './api'
const check = createSlice({
name: 'check',
initialState: {
loading: false,
check: null,
error: false,
},
reducers: {
fetching: state => {
state.loading = true
state.error = false
},
fetched: (state, { payload }) => {
state.loading = false
state.check = payload
state.error = false
},
fetchError: state => {
state.loading = false
state.error = true
},
},
})
export const getCheck = () =>
apiPortfolio({
url: getcheck,
method: 'get',
fetching: check.actions.fetching.type,
fetched: check.actions.fetched.type,
fetchError: check.actions.fetchError.type,
})
export default check.reducer
|
<gh_stars>0
package vishnu.adventofcode.zio
import zio._
object Day2 extends AoCZIORunnable {
def countValidInputs(inputs:Iterator[String]):UIO[Int] = {
ZIO.effectTotal(inputs.count(vishnu.adventofcode.justscala.Day2.isInputValid))
}
def countTobogganValidInputs(inputs:Iterator[String]):UIO[Int] = {
ZIO.effectTotal(inputs.count(vishnu.adventofcode.justscala.Day2.tobogganValid))
}
override def getRunLayer(input: Iterator[String]) = {
for {
validCount <- readResource(resourceName).use(countValidInputs)
tobogganValidCount <- readResource(resourceName).use(countTobogganValidInputs)
_ <- console.putStrLn(s"Valid:$validCount; TobogganValid:$tobogganValidCount")
} yield {
}
}
// val resourceName = "/day2_sample.txt"
val resourceName = "/day2_input.txt"
}
|
#!/bin/bash
#
# Jobscript for launching dcmip2012 test4-1 on the NERSC Cori machine
#
# usage: sbatch jobscript-...
#SBATCH -J d41-preqx # job name
#SBATCH -o out_dcmip2-0.o%j # output and error file name (%j expands to jobID)
#SBATCH -n 64 # total number of mpi tasks requested
#SBATCH -p debug # queue (partition) -- normal, development, etc.
#SBATCH -t 00:10:00 # run time (hh:mm:ss)
#SBATCH -A acme # charge hours to account 1
#SBATCH -C haswell # use Haswell nodes
EXEC=../../../test_execs/preqx-nlev30-interp/preqx-nlev30-interp # set name of executable
srun -n 64 $EXEC < ./h-x1.nl # launch simulation
|
<gh_stars>0
/**
* Sample program that perform BlockWrite
* @file BlockWrite.c
*/
#include <tm_reader.h>
#include <stdio.h>
#include <stdlib.h>
#include <stdarg.h>
#include <inttypes.h>
#include <string.h>
#include "serial_reader_imp.h"
/* Enable this to use transportListener */
#ifndef USE_TRANSPORT_LISTENER
#define USE_TRANSPORT_LISTENER 0
#endif
#define usage() {errx(1, "Please provide valid reader URL, such as: reader-uri [--ant n]\n"\
"reader-uri : e.g., 'tmr:///COM1' or 'tmr:///dev/ttyS0/' or 'tmr://readerIP'\n"\
"[--ant n] : e.g., '--ant 1'\n"\
"Example: 'tmr:///com4' or 'tmr:///com4 --ant 1,2' \n");}
void errx(int exitval, const char *fmt, ...)
{
va_list ap;
va_start(ap, fmt);
vfprintf(stderr, fmt, ap);
exit(exitval);
}
void checkerr(TMR_Reader* rp, TMR_Status ret, int exitval, const char *msg)
{
if (TMR_SUCCESS != ret)
{
errx(exitval, "Error %s: %s\n", msg, TMR_strerr(rp, ret));
}
}
void serialPrinter(bool tx,uint32_t dataLen, const uint8_t data[],uint32_t timeout, void *cookie)
{
FILE *out = cookie;
uint32_t i;
fprintf(out, "%s", tx ? "Sending: " : "Received:");
for (i = 0; i < dataLen; i++)
{
if (i > 0 &&
(i & 15) == 0)
{
fprintf(out, "\n ");
}
fprintf(out, " %02x", data[i]);
}
fprintf(out, "\n");
}
void stringPrinter(bool tx,uint32_t dataLen, const uint8_t data[],uint32_t timeout, void *cookie)
{
FILE *out = cookie;
fprintf(out, "%s", tx ? "Sending: " : "Received:");
fprintf(out, "%s\n", data);
}
void parseAntennaList(uint8_t *antenna, uint8_t *antennaCount, char *args)
{
char *token = NULL;
char *str = ",";
uint8_t i = 0x00;
int scans;
/* get the first token */
if (NULL == args)
{
fprintf(stdout, "Missing argument\n");
usage();
}
token = strtok(args, str);
if (NULL == token)
{
fprintf(stdout, "Missing argument after %s\n", args);
usage();
}
while(NULL != token)
{
scans = sscanf(token, "%"SCNu8, &antenna[i]);
if (1 != scans)
{
fprintf(stdout, "Can't parse '%s' as an 8-bit unsigned integer value\n", token);
usage();
}
i++;
token = strtok(NULL, str);
}
*antennaCount = i;
}
int main(int argc, char *argv[])
{
TMR_Reader r, *rp;
TMR_Status ret;
#ifdef TMR_ENABLE_UHF
TMR_Region region;
#endif /* TMR_ENBALE_UHF */
uint8_t *antennaList = NULL;
uint8_t buffer[20];
uint8_t i;
uint8_t antennaCount = 0x0;
#if USE_TRANSPORT_LISTENER
TMR_TransportListenerBlock tb;
#endif
if (argc < 2)
{
usage();
}
for (i = 2; i < argc; i+=2)
{
if(0x00 == strcmp("--ant", argv[i]))
{
if (NULL != antennaList)
{
fprintf(stdout, "Duplicate argument: --ant specified more than once\n");
usage();
}
parseAntennaList(buffer, &antennaCount, argv[i+1]);
antennaList = buffer;
}
else
{
fprintf(stdout, "Argument %s is not recognized\n", argv[i]);
usage();
}
}
rp = &r;
ret = TMR_create(rp, argv[1]);
checkerr(rp, ret, 1, "creating reader");
#if USE_TRANSPORT_LISTENER
if (TMR_READER_TYPE_SERIAL == rp->readerType)
{
tb.listener = serialPrinter;
}
else
{
tb.listener = stringPrinter;
}
tb.cookie = stdout;
TMR_addTransportListener(rp, &tb);
#endif
ret = TMR_connect(rp);
checkerr(rp, ret, 1, "connecting reader");
#ifdef TMR_ENABLE_UHF
region = TMR_REGION_NONE;
ret = TMR_paramGet(rp, TMR_PARAM_REGION_ID, ®ion);
checkerr(rp, ret, 1, "getting region");
if (TMR_REGION_NONE == region)
{
TMR_RegionList regions;
TMR_Region _regionStore[32];
regions.list = _regionStore;
regions.max = sizeof(_regionStore)/sizeof(_regionStore[0]);
regions.len = 0;
ret = TMR_paramGet(rp, TMR_PARAM_REGION_SUPPORTEDREGIONS, ®ions);
checkerr(rp, ret, __LINE__, "getting supported regions");
if (regions.len < 1)
{
checkerr(rp, TMR_ERROR_INVALID_REGION, __LINE__, "Reader doesn't supportany regions");
}
region = regions.list[0];
ret = TMR_paramSet(rp, TMR_PARAM_REGION_ID, ®ion);
checkerr(rp, ret, 1, "setting region");
}
/**
* Checking the software version of the sargas.
* The antenna detection is supported on sargas from software version of 5.3.x.x.
* If the Sargas software version is 5.1.x.x then antenna detection is not supported.
* User has to pass the antenna as arguments.
*/
{
ret = isAntDetectEnabled(rp, antennaList);
if(TMR_ERROR_UNSUPPORTED == ret)
{
fprintf(stdout, "Reader doesn't support antenna detection. Please provide antenna list.\n");
usage();
}
else
{
checkerr(rp, ret, 1, "Getting Antenna Detection Flag Status");
}
}
//Use first antenna for operation
if (NULL != antennaList)
{
ret = TMR_paramSet(rp, TMR_PARAM_TAGOP_ANTENNA, &antennaList[0]);
checkerr(rp, ret, 1, "setting tagop antenna");
}
{
TMR_TagOp tagop;
TMR_uint16List data;
uint16_t writeData[] = { 0x0123, 0x4567 };
data.list = writeData;
data.max = data.len = sizeof(writeData) / sizeof(writeData[0]);
ret = TMR_TagOp_init_GEN2_BlockWrite(&tagop, TMR_GEN2_BANK_USER, 0, &data);
checkerr(rp, ret, 1, "creating BlockWrite tagop");
ret = TMR_executeTagOp(rp, &tagop, NULL, NULL);
checkerr(rp, ret, 1, "executing BlockWrite tagop");
printf("BlockWrite succeeded\n");
{
TMR_TagOp verifyOp;
TMR_uint8List response;
uint8_t responseData[16];
response.list = responseData;
response.max = sizeof(responseData) / sizeof(responseData[0]);
response.len = 0;
ret = TMR_TagOp_init_GEN2_ReadData(&verifyOp, TMR_GEN2_BANK_USER, 0, (uint8_t)data.len);
checkerr(rp, ret, 1, "creating ReadData tagop");
ret = TMR_executeTagOp(rp, &verifyOp, NULL, &response);
checkerr(rp, ret, 1, "executing ReadData tagop");
{
int i;
printf("Verified Write Data:");
for (i=0; i<response.len; i++)
{
printf(" %02X", response.list[i]);
}
printf("\n");
}
}
}
#endif /* TMR_ENABLE_UHF */
TMR_destroy(rp);
return 0;
}
|
import unittest
from tornado_http2.encoding import BitEncoder, BitDecoder, EODError
class TestData(object):
def __init__(self, *args):
self.args = args
def encode(self, encoder):
for arg in self.args:
self.encode_value(encoder, arg)
def decode(self, test, decoder):
for arg in self.args:
test.assertEqual(self.decode_value(decoder), arg)
class Bits(TestData):
def encode_value(self, encoder, arg):
encoder.write_bit(arg)
def decode_value(self, decoder):
return decoder.read_bit()
class HpackInt(TestData):
def encode_value(self, encoder, arg):
encoder.write_hpack_int(arg)
def decode_value(self, decoder):
return decoder.read_hpack_int()
class HuffChar(TestData):
def __init__(self, data):
# convert strings to a sequence of bytes
super(HuffChar, self).__init__(*list(data))
def encode_value(self, encoder, arg):
encoder.write_huffman_char(arg)
def decode_value(self, decoder):
return decoder.read_huffman_char(None)
test_data = [
('1-bit', [Bits(1)], [0b10000000], False),
('5-bits', [Bits(1, 0, 1, 1, 0)], [0b10110000], False),
# 8 bits: the first byte is full and the second is not started.
('8-bits', [Bits(1, 0, 1, 1, 0, 1, 1, 1)], [0b10110111], True),
('9-bits', [Bits(1, 0, 1, 1, 0, 1, 1, 1, 0)],
[0b10110111, 0b00000000], False),
('16-bits', [Bits(1, 0, 1, 1, 0, 1, 1, 1,
0, 1, 1, 1, 1, 1, 1, 1)],
[0b10110111, 0b01111111], True),
# Test cases from
# http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-12#appendix-C.1
# Encode 10 with a 5-bit prefix.
('C.1.1', [Bits(1, 0, 1), HpackInt(10)], [0b10101010], True),
# Encode 1337 with a 5-bit prefix.
('C.1.2', [Bits(0, 1, 0), HpackInt(1337)],
[0b01011111, 0b10011010, 0b00001010], True),
# Encode 42 on a byte boundary.
('C.1.3', [HpackInt(42)], [42], True),
# Edge cases.
# Rollover from 1 byte to 2.
('8-bit-prefix', [HpackInt(254)], [0b11111110], True),
('8-bit-prefix2', [HpackInt(255)], [0b11111111, 0b00000000], True),
('8-bit-prefix3', [HpackInt(256)], [0b11111111, 0b00000001], True),
# A single bit followed by a 7-bit prefix.
('7-bit-prefix', [Bits(1), HpackInt(126)],
[0b11111110], True),
('7-bit-prefix2', [Bits(1), HpackInt(127)],
[0b11111111, 0b00000000], True),
# Rollover from 2 bytes to 3.
('3-byte-rollover', [HpackInt(382)], [0b11111111, 0b01111111], True),
('3-byte-rollover2', [HpackInt(383)], [0b11111111, 0b10000000, 0b00000001],
True),
# Individual huffman-encoded characters
('huff1', [HuffChar(b'a')], [0b00011000], False),
('huff2', [HuffChar(b'Hi')], [0b11000110, 0b01100000], False),
]
class BitEncodingTest(unittest.TestCase):
def test_bit_encoder(self):
for name, calls, data, complete in test_data:
try:
encoder = BitEncoder()
for c in calls:
c.encode(encoder)
self.assertEqual(encoder.data(), bytearray(data))
except Exception:
print("Error in test case %s" % name)
raise
def test_bit_decoder(self):
for name, calls, data, complete in test_data:
try:
decoder = BitDecoder(bytearray(data))
for c in calls:
c.decode(self, decoder)
if complete:
self.assertRaises(EODError, decoder.read_bit)
else:
decoder.read_bit()
except Exception:
print("Error in test case %s" % name)
print("Decoder offsets: %d, %d" % (
decoder._byte_offset, decoder._bit_offset))
raise
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.