text stringlengths 1 1.05M |
|---|
# -*- coding: utf-8 -*-
import logging
from django.db.models import Q
import django_filters
from django_filters.constants import EMPTY_VALUES
from django_filters.fields import Lookup
from django_filters.rest_framework import filters
from rest_framework.compat import coreapi
from rest_framework.schemas import ManualSchema
import six
from calculator import models as calc_models
logger = logging.getLogger('laa-calc')
class CalculatorSchema(ManualSchema):
def __init__(self, fields, *args, **kwargs):
for unit in calc_models.Unit.objects.all():
fields.append(
coreapi.Field(unit.pk.lower(), **{
'required': False,
'location': 'query',
'type': 'number',
'description': (
'Quantity of the price unit: {}'.format(unit.name)
),
}),
)
for modifier in calc_models.ModifierType.objects.all():
fields.append(
coreapi.Field(modifier.name.lower(), **{
'required': False,
'location': 'query',
'type': 'number',
'description': (
'Price modifier: {}'.format(modifier.description)
)
}),
)
super().__init__(fields, *args, **kwargs)
class ModelOrNoneChoiceFilter(django_filters.ModelChoiceFilter):
def filter(self, qs, value):
if isinstance(value, Lookup):
lookup = six.text_type(value.lookup_type)
value = value.value
else:
lookup = self.lookup_expr
if value in EMPTY_VALUES:
return qs
if self.distinct:
qs = qs.distinct()
qs = self.get_method(qs)(
Q(**{'%s__isnull' % self.field_name: True}) |
Q(**{'%s__%s' % (self.field_name, lookup): value})
)
return qs
class FeeTypeFilter(django_filters.FilterSet):
is_basic = filters.BooleanFilter()
class Meta:
model = calc_models.FeeType
fields = (
'is_basic',
)
class PriceFilter(django_filters.FilterSet):
fee_type_code = django_filters.CharFilter(
field_name='fee_type__code',
)
offence_class = ModelOrNoneChoiceFilter(
field_name='offence_class',
queryset=calc_models.OffenceClass.objects.all(),
help_text=(
'Note the query will return prices '
'with `offence_class` either matching the value or null.'
)
)
advocate_type = ModelOrNoneChoiceFilter(
field_name='advocate_type',
queryset=calc_models.AdvocateType.objects.all(),
help_text=(
'Note the query will return prices '
'with `advocate_type` either matching the value or null.'
)
)
class Meta:
model = calc_models.Price
fields = {
'scenario': ['exact'],
'unit': ['exact'],
'limit_from': ['exact', 'gte'],
'limit_to': ['exact', 'lte'],
'fixed_fee': ['lte', 'gte'],
'fee_per_unit': ['lte', 'gte'],
}
|
#!/bin/bash
/usr/bin/tmux new-session -s bb-workers -d 'cd /vagrant && export C_FORCE_ROOT="true" && /home/vagrant/env/bin/celery -A
blockbuster_celery.bb_celery worker -c 1 -l info -n worker10' \; split-window -h -d 'cd /vagrant && export
C_FORCE_ROOT="true" && sleep 5 && /home/vagrant/env/bin/celery -A blockbuster_celery.bb_celery worker -c 1 -l info -n
worker11' \; attach |
#!/bin/bash
set -e
TMPDIR=tmpdir
rm -fr $TMPDIR
docker save sysinner:a1el7v1 | ../../../bin/docker2oci $TMPDIR
cd $TMPDIR
sed -i 's/a1el7v1/a2p1el7/g' index.json
tar cf ../a2p1el7.oci.tar .
cd ../
pouch load -i a2p1el7.oci.tar sysinner
rm -fr $TMPDIR
|
const combineReducers = import('redux').combineReducers;
const model = {
todos : [
{
text: 'This is my TODO',
complete: false
}
],
visibility : 'VIS_ALL'
}
const todos = (state, action) => {
switch(action.type) {
case 'ADD':
return [
...state,
{
text: action.text,
complete: false
}
];
default :
return state;
}
};
const visibility = (state, action) => {
return action.type;
};
const appReducers = combineReducers({todos,visibility});
const store = createStore(appReducers);
const addTodo = (text) => {
dispatch({
type: 'ADD',
text
});
}
store.subscribe(() => console.log(store.getState()));
addTodo('Learn about Redux Actions');
addTodo('Learn about Redux Reducers');
addTodo('Learn React-Redux'); |
<gh_stars>0
package com.netshell.test.java;
import java.util.UUID;
/**
* Created by ASHEKHA
* on 4/30/2017.
*/
public class FromUUIDTest implements Test {
@Override
public void execute() throws Exception {
final UUID uuid = UUID.randomUUID();
System.out.println(uuid);
System.out.println(UUID.fromString(uuid.toString()));
System.out.println(UUID.fromString(uuid.toString()));
}
}
|
<reponame>ericho/sync
package io.metaparticle.sync.examples;
import io.metaparticle.sync.Election;
import java.util.Random;
public class ElectionMain {
public static void main(String[] args) throws InterruptedException {
Random r = new Random();
while (true) {
final Object block = new Object();
Election e = new Election(
"test", args[0],
() -> {
System.out.println("I am the master.");
synchronized(block) {
try {
block.wait();
} catch (InterruptedException ex) {
ex.printStackTrace();
}
}
},
() -> {
System.out.println("I lost the master.");
synchronized(block) {
block.notify();
}
});
new Thread(e).start();
Thread.sleep((r.nextInt(15) + 25) * 1000);
e.shutdown();
Thread.sleep(10 * 1000);
}
}
} |
import { test, expect } from '@playwright/experimental-ct-react'
import { DelayedData } from './DelayedData';
test('toHaveText works on delayed data', async ({ mount }) => {
const component = await mount(<DelayedData data='complete' />);
await expect(component).toHaveText('complete');
});
|
#!/bin/bash
set -ev
# Corrade
git clone --depth 1 git://github.com/mosra/corrade.git
cd corrade
mkdir build && cd build
cmake .. \
-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS" \
-DCMAKE_INSTALL_PREFIX=$HOME/deps \
-DCMAKE_INSTALL_RPATH=$HOME/deps/lib \
-DCMAKE_BUILD_TYPE=Debug \
-DWITH_INTERCONNECT=OFF \
-DBUILD_DEPRECATED=$BUILD_DEPRECATED \
-G Ninja
ninja install
cd ../..
# Magnum
git clone --depth 1 git://github.com/mosra/magnum.git
cd magnum
mkdir build && cd build
cmake .. \
-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS" \
-DCMAKE_INSTALL_PREFIX=$HOME/deps \
-DCMAKE_INSTALL_RPATH=$HOME/deps/lib \
-DCMAKE_BUILD_TYPE=Debug \
-DWITH_AUDIO=OFF \
-DWITH_DEBUGTOOLS=OFF \
-DWITH_MESHTOOLS=$WITH_DART \
-DWITH_PRIMITIVES=$WITH_DART \
-DWITH_SCENEGRAPH=ON \
-DWITH_SHADERS=ON \
-DWITH_TEXT=OFF \
-DWITH_TEXTURETOOLS=OFF \
-DWITH_OPENGLTESTER=ON \
-DWITH_ANYIMAGEIMPORTER=ON \
-DWITH_SDL2APPLICATION=ON \
-DWITH_WINDOWLESS${PLATFORM_GL_API}APPLICATION=ON \
-DBUILD_DEPRECATED=$BUILD_DEPRECATED \
-G Ninja
ninja install
cd ../..
# DartIntegration needs plugins
if [ "$TRAVIS_OS_NAME" == "osx" ]; then
# Magnum Plugins
git clone --depth 1 git://github.com/mosra/magnum-plugins.git
cd magnum-plugins
mkdir build && cd build
cmake .. \
-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS" \
-DCMAKE_INSTALL_PREFIX=$HOME/deps \
-DCMAKE_INSTALL_RPATH=$HOME/deps/lib \
-DCMAKE_BUILD_TYPE=Debug \
-DWITH_ASSIMPIMPORTER=$WITH_DART \
-DWITH_STBIMAGEIMPORTER=$WITH_DART \
-G Ninja
ninja install
cd ../..
fi
mkdir build && cd build
cmake .. \
-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS" \
-DCMAKE_INSTALL_PREFIX=$HOME/deps \
-DCMAKE_PREFIX_PATH=$HOME/deps-dart \
-DCMAKE_INSTALL_RPATH=$HOME/deps/lib \
-DIMGUI_DIR=$HOME/imgui \
-DCMAKE_BUILD_TYPE=Debug \
-DWITH_BULLET=ON \
-DWITH_DART=$WITH_DART \
-DWITH_EIGEN=ON \
-DWITH_GLM=ON \
-DWITH_IMGUI=ON \
-DWITH_OVR=OFF \
-DBUILD_TESTS=ON \
-DBUILD_GL_TESTS=ON \
-G Ninja
# Otherwise the job gets killed (probably because using too much memory)
ninja -j4
# DART leaks somewhere deep in std::string, run these tests separately to avoid
# suppressing too much
ASAN_OPTIONS="color=always" LSAN_OPTIONS="color=always" CORRADE_TEST_COLOR=ON ctest -V -E "GLTest|Dart"
ASAN_OPTIONS="color=always" LSAN_OPTIONS="color=always suppressions=$TRAVIS_BUILD_DIR/package/ci/leaksanitizer.conf" CORRADE_TEST_COLOR=ON ctest -V -R Dart -E GLTest
|
#include <stdio.h>
// Recursive function to find the nth Fibonacci number
int Fibonacci(int n)
{
if (n <= 1)
return n;
return Fibonacci(n - 1) + Fibonacci(n - 2);
}
int main()
{
int n = 10;
printf("The %dth Fibonacci number is %d\n", n, Fibonacci(n));
return 0;
} |
import test from 'ava';
import iquotes from '.';
const lifeQuotes = require('./quotes/life');
const loveQuotes = require('./quotes/love');
const devQuotes = require('./quotes/dev');
function testCategory(data, category) {
const result = data.filter(d => d.category === category);
return result ? result.length === data.length : true;
}
test('all data files must have right category', t => {
t.true(testCategory(lifeQuotes, 'Life'));
t.true(testCategory(loveQuotes, 'Love'));
t.true(testCategory(devQuotes, 'Dev'));
});
test('get all quotes', t => {
t.truthy(iquotes.all('life'));
t.truthy(iquotes.all('love'));
t.truthy(iquotes.all('dev'));
t.truthy(iquotes.all());
});
test('get random', t => {
t.truthy(iquotes.random('life'));
t.truthy(iquotes.random('love'));
t.truthy(iquotes.random('dev'));
t.truthy(iquotes.random());
});
test('get count', t => {
t.deepEqual(iquotes.count('life'), lifeQuotes.length);
t.deepEqual(iquotes.count('love'), loveQuotes.length);
t.deepEqual(iquotes.count('dev'), devQuotes.length);
t.deepEqual(iquotes.count(), lifeQuotes.length + loveQuotes.length + devQuotes.length);
});
test('get count detail', t => {
t.deepEqual(iquotes.countDetail(), [
{category: 'life', count: lifeQuotes.length},
{category: 'love', count: loveQuotes.length},
{category: 'dev', count: devQuotes.length},
{category: 'all', count: lifeQuotes.length + loveQuotes.length + devQuotes.length}
]);
});
test('get types', t => {
t.deepEqual(iquotes.categories(), ['life', 'love', 'dev', 'all']);
});
|
#include "fstarlib.h"
// Rotate left and right for different sizes of integers
uint8 FStar_UInt8_op_Less_Less_Less(uint8 x, int y){
return ((uint8)(x << y)) + (x >> (8 - y));
}
uint8 FStar_UInt8_op_Greater_Greater_Greater(uint8 x, int y){
return (x >> y) + ((uint8)(x << (8 - y)));
}
uint8 FStar_UInt8_rotate_left(uint8 x, int y){
return ((uint8)(x << y)) + (x >> (8 - y));
}
uint8 FStar_UInt8_rotate_right(uint8 x, int y){
return (x >> y) + ((uint8)(x << (8 - y)));
}
inline uint32 FStar_UInt32_op_Less_Less_Less(uint32 x, int y){
return ((uint32)(x << y)) + (x >> (32 - y));
}
inline uint32 FStar_UInt32_op_Greater_Greater_Greater(uint32 x, int y){
return (x >> y) + ((uint32)(x << (32 - y)));
}
uint64 FStar_UInt63_op_Less_Less_Less(uint64 x, int y){
return ((uint64)(x << y)) + (x >> (64 - y));
}
uint64 FStar_UInt63_op_Greater_Greater_Greater(uint64 x, int y){
return (x >> y) + ((uint64)(x << (64 - y)));
}
uint64 FStar_UInt64_op_Less_Less_Less(uint64 x, int y){
return ((uint64)(x << y)) + (x >> (64 - y));
}
uint64 FStar_UInt64_op_Greater_Greater_Greater(uint64 x, int y){
return (x >> y) + ((uint64)(x << (64 - y)));
}
// Converts bytes to and from int32s. Should be integrated to the compiler
void FStar_SBytes_sbytes_of_uint32s(uint8* res, uint32* b, int l){
unsigned int i;
uint32* tmp = (uint32*)res;
for(i = 0; i < l; i++){
tmp[i] = b[i];
}
}
void FStar_SBytes_xor_bytes(uint8* output, uint8* a, uint8* b, int l){
unsigned int i;
for(i=0; i < l; i++){
output[i] = a[i] ^ b[i];
}
}
inline uint8 FStar_UInt8_gte(uint8 x, uint8 y){
return (uint8)~(((int16_t)x) - y >> 15);
}
inline uint8 FStar_UInt8_eq(uint8 a, uint8 b){
a = ~(a ^ b);
a &= a << 4;
a &= a << 2;
a &= a << 1;
return ((char)a) >> 7;
}
// Constant time comparisons
uint32 FStar_UInt32_eq(uint32 a, uint32 b) {
a = ~(a ^ b);
a &= a << 16;
a &= a << 8;
a &= a << 4;
a &= a << 2;
a &= a << 1;
return ((int32_t)a) >> 31;
}
uint32 FStar_UInt32_gte(uint32 a, uint32 b) {
int64_t tmp;
tmp = a - b;
return ~(tmp >> 63);
}
uint64 FStar_UInt63_eq(uint64 a, uint64 b) {
a = ~(a ^ b);
a &= a << 32;
a &= a << 16;
a &= a << 8;
a &= a << 4;
a &= a << 2;
a &= a << 1;
return ((int64_t)a) >> 63;
}
uint64 FStar_UInt63_gte(uint64 a, uint64 b) {
a -= - b; // Works because a and b are never negative
return ~(a >> 63);
}
uint64 FStar_UInt64_eq(uint64 a, uint64 b) {
a = ~(a ^ b);
a &= a << 32;
a &= a << 16;
a &= a << 8;
a &= a << 4;
a &= a << 2;
a &= a << 1;
return ((int64_t)a) >> 63;
}
uint64 FStar_UInt64_gte(uint64 a, uint64 b) {
return (uint64)~(((__int128_t)a) - b >> 127);
}
void print_bytes(uint8* b, int len){
int i;
for (i = 0; i < len; i++){
if (b[i] < 0x10) {printf("0%x", 0xff & b[i]);}
else {printf("%x", 0xff & b[i]);}
}
printf("\n");
}
|
publicclassSumUnder10 {
public static int getSum(int[] nums) {
int sum = 0;
for (int num : nums) {
if (num <= 10) {
sum += num;
}
}
return sum;
}
public static void main(String[] args) {
int[] nums = {1, 3, 7, 8, 9};
System.out.println(getSum(nums)); //Outputs 28
}
} |
/*
* This header file defines various components.
*/
#pragma once
#include "Core/UUID.h"
#include "Renderer/Texture2D.h"
#include "Scene/SceneCamera.h"
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#define GLM_ENABLE_EXPERIMENTAL
#include <glm/gtx/quaternion.hpp>
namespace Basil
{
struct CameraComponent
{
CameraComponent() = default;
CameraComponent(const CameraComponent&) = default;
SceneCamera camera;
bool primary = true;
bool fixedAspectRatio = false;
};
struct IDComponent
{
UUID id;
IDComponent() = default;
IDComponent(const IDComponent&) = default;
};
class ScriptableEntity; // Forward declaration
struct NativeScriptComponent
{
ScriptableEntity* instance = nullptr;
ScriptableEntity*(*instantiateScript)();
void (*destroyScript)(NativeScriptComponent*);
template <typename T>
void bind()
{
instantiateScript = []() { return static_cast<ScriptableEntity*>(new T()); };
destroyScript = [](NativeScriptComponent* nsc) { delete nsc->instance; nsc->instance = nullptr; };
}
};
struct TagComponent
{
TagComponent() = default;
TagComponent(const TagComponent&) = default;
TagComponent(const std::string& tag) : tag(tag) {}
std::string tag;
};
struct TransformComponent
{
TransformComponent() = default;
TransformComponent(const TransformComponent&) = default;
TransformComponent(const glm::vec3& translation) : translation(translation) {}
glm::mat4 getTransform() const
{
glm::mat4 localRotation = glm::toMat4(glm::quat(rotation));
return glm::translate(glm::mat4(1.0f), translation)
* localRotation
* glm::scale(glm::mat4(1.0f), scale);
}
glm::vec3 translation{ 0.0f, 0.0f, 0.0f };
glm::vec3 rotation = { 0.0f, 0.0f, 0.0f };
glm::vec3 scale{ 1.0f, 1.0f, 1.0f };
};
// == 2D Rendering ==
struct SpriteRendererComponent
{
SpriteRendererComponent() = default;
SpriteRendererComponent(const SpriteRendererComponent&) = default;
SpriteRendererComponent(const glm::vec4& color) : color(color) {}
float tilingFactor = 1.0f;
glm::vec4 color{ 1.0f, 1.0f, 1.0f, 1.0f };
Shared<Texture2D> texture;
};
struct CircleRendererComponent
{
glm::vec4 color{ 1.0f, 1.0f, 1.0f, 1.0f };
float thickness = 1.0f;
float fade = 0.005f;
CircleRendererComponent() = default;
CircleRendererComponent(const CircleRendererComponent&) = default;
};
// == Physics ==
struct Rigidbody2DComponent
{
enum class BodyType { Static = 0, Dynamic, Kinematic };
Rigidbody2DComponent() = default;
Rigidbody2DComponent(const Rigidbody2DComponent& other) = default;
BodyType type = BodyType::Static;
bool fixedRotation = false;
void* runtimeBody = nullptr;
};
struct BoxCollider2DComponent
{
BoxCollider2DComponent() = default;
BoxCollider2DComponent(const BoxCollider2DComponent& other) = default;
glm::vec2 offset = { 0.0f, 0.0f };
glm::vec2 size = { 0.5f, 0.5f };
float density = 1.0f;
float friction = 0.5f;
float restitution = 0.0f;
float restitutionThreshold = 0.5f;
void* runtimeBody = nullptr;
};
struct CircleCollider2DComponent
{
CircleCollider2DComponent() = default;
CircleCollider2DComponent(const CircleCollider2DComponent& other) = default;
glm::vec2 offset = { 0.0f, 0.0f };
float radius = 0.5f;
float density = 1.0f;
float friction = 0.5f;
float restitution = 0.0f;
float restitutionThreshold = 0.5f;
void* runtimeBody = nullptr;
};
} |
function findMostFrequentString(strings) {
const frequencyTable = {};
let mostFrequentString = null;
let highestFrequency = 0;
// loop through array and count the frequency of each string
for (let i = 0; i < strings.length; i++) {
const currentString = strings[i];
if (frequencyTable[currentString]) {
frequencyTable[currentString]++;
} else {
frequencyTable[currentString] = 1;
}
// loop through table and find string with highest frequency
for (let k in frequencyTable) {
if (frequencyTable[k] > highestFrequency) {
mostFrequentString = k;
highestFrequency = frequencyTable[k];
}
}
return mostFrequentString;
} |
<filename>conf/db.js
/**
* @description 数据库连接
* @author lnden
*/
const mongoose = require('mongoose')
const config = require('./index')
const log4js =require('../utils/log4')
mongoose.connect(config.URL, {
userNewUrlParser: true,
useUnifiedTopology: true
})
const db = mongoose.connection
db.on('error', () => {
log4js.error('***数据裤连接失败***')
})
db.on('open', () => {
log4js.info('*** 数据库连接成功 ***')
}) |
<reponame>hpbuniat/flightzilla<filename>Gruntfile.js
module.exports = function (grunt) {
// Project configuration.
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
meta: {
banner: '/* <%= pkg.description %>, v<%= pkg.version %> <%= pkg.homepage %>\n' +
'Copyright (c) <%= grunt.template.today("yyyy") %> <%= pkg.author.name %>, <%= pkg.license.type %> license ' +
'<%= pkg.license.url %>*/\n'
},
jshint: {
options: {
curly: true,
eqeqeq: true,
forin: true,
indent: 2,
latedef: false,
newcap: true,
noarg: true,
noempty: true,
white: false,
sub: true,
undef: true,
unused: true,
loopfunc: true,
expr: true,
evil: true,
eqnull: true
}
},
shell: {
composer: {
command: 'php composer.phar update',
stdout: true,
stderr: true
},
bower: {
command: 'bower update',
stdout: true,
stderr: true
}
},
concat: {
options: {
separator: '\n'
},
dist: {
src: [
'./bower_components/jquery-ui/ui/minified/jquery.ui.core.min.js',
'./bower_components/jquery-ui/ui/minified/jquery.ui.widget.min.js',
'./bower_components/jquery-ui/ui/minified/jquery.ui.mouse.min.js',
'./bower_components/jquery-ui/ui/minified/jquery.ui.position.min.js',
'./bower_components/jquery-ui/ui/minified/jquery.ui.draggable.min.js',
'./bower_components/jquery-ui/ui/minified/jquery.ui.droppable.min.js',
'./bower_components/jquery-ui/ui/minified/jquery.ui.selectable.min.js',
'./bower_components/jquery-ui/ui/minified/jquery.ui.autocomplete.min.js',
'./bower_components/jquery-ui/ui/minified/jquery.ui.datepicker.min.js',
'./bower_components/jquery-ui/ui/minified/jquery.ui.menu.min.js',
'./bower_components/jquery-ui/ui/minified/jquery.ui.progressbar.min.js',
'./bower_components/jquery-ui/ui/minified/jquery.ui.slider.min.js',
'./bower_components/jquery-ui/ui/minified/jquery.ui.tooltip.min.js'
],
dest: 'bower_components/jquery-ui/jquery.ui.custom.min.js'
}
},
copy: {
bower: {
files: [
{src: [
'./bower_components/bootstrap/dist/js/bootstrap.min.js',
'./bower_components/bootstrap/dist/css/bootstrap.css'
], dest: './public/vendor/', filter: 'isFile', expand: true, flatten: true},
{src: ['./bower_components/d3/d3.min.js'], dest: './public/vendor/', filter: 'isFile', expand: true, flatten: true},
{src: ['./bower_components/lodash/dist/lodash.min.js'], dest: './public/vendor/', filter: 'isFile', expand: true, flatten: true},
{src: ['./bower_components/highcharts.com/js/highcharts.src.js'], dest: './public/vendor/', filter: 'isFile', expand: true, flatten: true},
{src: ['./bower_components/jquery/jquery.min.js'], dest: './public/vendor/', filter: 'isFile', expand: true, flatten: true},
{src: [
'./bower_components/jquery-ui/themes/smoothness/*.min.css',
'./bower_components/jquery-ui/themes/smoothness/*.theme.css'
], dest: './public/vendor/', filter: 'isFile', expand: true, flatten: true},
{src: ['./bower_components/jquery-ui/jquery.ui.custom.min.js'], dest: './public/vendor/', filter: 'isFile', expand: true, flatten: true},
{src: ['./bower_components/jquery.tablesorter/js/jquery.tablesorter.min.js'], dest: './public/vendor/', filter: 'isFile', expand: true, flatten: true},
{src: ['./bower_components/bootstrap/dist/fonts/*'], dest: './public/fonts/', filter: 'isFile', expand: true, flatten: true}
]
}
}
});
// load tasks
grunt.loadNpmTasks('grunt-contrib-concat');
grunt.loadNpmTasks('grunt-contrib-copy');
grunt.loadNpmTasks('grunt-shell');
// Default task(s).
grunt.registerTask('default', ['shell', 'concat', 'copy']);
};
|
#!/usr/bin/env bash
# This script is executed inside the builder image
set -e
source ./ci/matrix.sh
unset CC; unset CXX
unset DISPLAY
export CCACHE_COMPRESS=${CCACHE_COMPRESS:-1}
export CCACHE_SIZE=${CCACHE_SIZE:-400M}
if [ "$PULL_REQUEST" != "false" ]; then contrib/devtools/commit-script-check.sh $COMMIT_RANGE; fi
#if [ "$CHECK_DOC" = 1 ]; then contrib/devtools/check-doc.py; fi TODO reenable after all Bitcoin PRs have been merged and docs fully fixed
if [ "$CHECK_DOC" = 1 ]; then contrib/devtools/check-rpc-mappings.py .; fi
if [ "$CHECK_DOC" = 1 ]; then contrib/devtools/lint-all.sh; fi
ccache --max-size=$CCACHE_SIZE
if [ -n "$USE_SHELL" ]; then
export CONFIG_SHELL="$USE_SHELL"
fi
BITCOIN_CONFIG_ALL="--disable-dependency-tracking --prefix=$BUILD_DIR/depends/$HOST --bindir=$OUT_DIR/bin --libdir=$OUT_DIR/lib"
test -n "$USE_SHELL" && eval '"$USE_SHELL" -c "./autogen.sh"' || ./autogen.sh
rm -rf build-ci
mkdir build-ci
cd build-ci
../configure --cache-file=config.cache $BITCOIN_CONFIG_ALL $BITCOIN_CONFIG || ( cat config.log && false)
make distdir VERSION=$BUILD_TARGET
cd axecore-$BUILD_TARGET
./configure --cache-file=../config.cache $BITCOIN_CONFIG_ALL $BITCOIN_CONFIG || ( cat config.log && false)
make $MAKEJOBS $GOAL || ( echo "Build failure. Verbose build follows." && make $GOAL V=1 ; false )
|
#!/usr/bin/env bash
set -e
source $SNAP/actions/common/utils.sh
CA_CERT=/snap/core18/current/etc/ssl/certs/ca-certificates.crt
echo "Enabling Helm 3"
if [ ! -f "${SNAP_DATA}/bin/helm3" ]
then
SOURCE_URI="https://get.helm.sh"
HELM_VERSION="v3.5.0"
echo "Fetching helm version $HELM_VERSION."
run_with_sudo mkdir -p "${SNAP_DATA}/tmp/helm"
(cd "${SNAP_DATA}/tmp/helm"
run_with_sudo "${SNAP}/usr/bin/curl" --cacert $CA_CERT -L $SOURCE_URI/helm-$HELM_VERSION-linux-$(arch).tar.gz -o "$SNAP_DATA/tmp/helm/helm.tar.gz"
run_with_sudo gzip -f -d "$SNAP_DATA/tmp/helm/helm.tar.gz"
run_with_sudo tar -xf "$SNAP_DATA/tmp/helm/helm.tar")
run_with_sudo mkdir -p "$SNAP_DATA/bin/"
run_with_sudo mv "$SNAP_DATA/tmp/helm/linux-$(arch)/helm" "$SNAP_DATA/bin/helm3"
run_with_sudo chmod +x "$SNAP_DATA/bin/"
run_with_sudo chmod +x "$SNAP_DATA/bin/helm3"
run_with_sudo rm -rf "$SNAP_DATA/tmp/helm"
fi
echo "Helm 3 is enabled"
|
### Head: main #################################################################
#
xfce4_conf_set () {
echo
echo "### Head: xfce4_conf_set #######################################################"
echo "#"
xfce4_conf_set_dir_create
xfce4_conf_set_config
echo "#"
echo "### Tail: xfce4_conf_set #######################################################"
echo
}
#
### Tail: main #################################################################
### Head: xfce4 ################################################################
#
xfce4_conf_set_dir_create () {
#echo "mkdir -p $HOME/.config/xfce4"
#mkdir -p "$HOME/.config/xfce4"
echo "mkdir -p $HOME/.config/xfce4/xfconf/xfce-perchannel-xml"
mkdir -p "$HOME/.config/xfce4/xfconf/xfce-perchannel-xml"
echo "mkdir -p $HOME/.config/xfce4/panel"
mkdir -p "$HOME/.config/xfce4/panel"
}
xfce4_conf_set_config () {
echo "cp $THE_SUB_ON_DIR_PATH/xfce4/conf/set/xfconf/xfce-perchannel-xml/xfce4-keyboard-shortcuts.xml $HOME/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-keyboard-shortcuts.xml"
cp "$THE_SUB_ON_DIR_PATH/xfce4/conf/set/xfconf/xfce-perchannel-xml/xfce4-keyboard-shortcuts.xml" "$HOME/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-keyboard-shortcuts.xml"
echo "cp $THE_SUB_ON_DIR_PATH/xfce4/conf/set/xfconf/xfce-perchannel-xml/xfwm4.xml $HOME/.config/xfce4/xfconf/xfce-perchannel-xml/xfwm4.xml"
cp "$THE_SUB_ON_DIR_PATH/xfce4/conf/set/xfconf/xfce-perchannel-xml/xfwm4.xml" "$HOME/.config/xfce4/xfconf/xfce-perchannel-xml/xfwm4.xml"
echo "cp $THE_SUB_ON_DIR_PATH/xfce4/conf/set/xfconf/xfce-perchannel-xml/xsettings.xml $HOME/.config/xfce4/xfconf/xfce-perchannel-xml/xsettings.xml"
cp "$THE_SUB_ON_DIR_PATH/xfce4/conf/set/xfconf/xfce-perchannel-xml/xsettings.xml" "$HOME/.config/xfce4/xfconf/xfce-perchannel-xml/xsettings.xml"
echo "cp $THE_SUB_ON_DIR_PATH/xfce4/conf/set/xfconf/xfce-perchannel-xml/xfce4-panel.xml $HOME/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-panel.xml"
cp "$THE_SUB_ON_DIR_PATH/xfce4/conf/set/xfconf/xfce-perchannel-xml/xfce4-panel.xml" "$HOME/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-panel.xml"
echo "cp $THE_SUB_ON_DIR_PATH/xfce4/conf/set/xfconf/xfce-perchannel-xml/xfce4-desktop.xml $HOME/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-desktop.xml"
cp "$THE_SUB_ON_DIR_PATH/xfce4/conf/set/xfconf/xfce-perchannel-xml/xfce4-desktop.xml" "$HOME/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-desktop.xml"
echo "cp $THE_SUB_ON_DIR_PATH/xfce4/conf/set/xfconf/xfce-perchannel-xml/keyboards.xml $HOME/.config/xfce4/xfconf/xfce-perchannel-xml/keyboards.xml"
cp "$THE_SUB_ON_DIR_PATH/xfce4/conf/set/xfconf/xfce-perchannel-xml/keyboards.xml" "$HOME/.config/xfce4/xfconf/xfce-perchannel-xml/keyboards.xml"
echo "cp $THE_SUB_ON_DIR_PATH/xfce4/conf/set/panel/whiskermenu-1.rc $HOME/.config/xfce4/panel/whiskermenu-1.rc"
cp "$THE_SUB_ON_DIR_PATH/xfce4/conf/set/panel/whiskermenu-1.rc" "$HOME/.config/xfce4/panel/whiskermenu-1.rc"
}
#
### Tail: xfce4 ################################################################
|
#!/bin/sh
# Build a "FROM scratch" docker image based on some
# statically compiled binaries. Usage example:
#
# $ scripts/build-golang-docker-image.sh \
# --project "dvol" \
# --source-files "dvol.go cmd/dvol-docker-plugin/dvol-docker-plugin.go" \
# --binaries "dvol dvol-docker-plugin" \
# --tag "golang"
#
# XXX: Currently you have to specify the arguments in exactly this order.
# TODO: Make this actually do proper argument parsing.
set -xe
PROJECT=$2
SOURCE_FILES=$4
BINARIES=$6
TAG=$8
# Statically compile the binaries
for SOURCE_FILE in $SOURCE_FILES; do
# XXX Need to make sure this is go 1.5 to avoid bug in
# older versions of docker.
CGO_ENABLED=0 GOOS=linux godep go build -a -ldflags '-s' ${SOURCE_FILE}
done
mkdir -p ${PROJECT}-build
# Copy them into the build directory
for BINARY in $BINARIES; do
cp ${BINARY} ${PROJECT}-build/
done
# Copy the dockerfile
cp Dockerfile.${PROJECT} ${PROJECT}-build/Dockerfile
# Build the docker image in a constrained context.
cd ${PROJECT}-build
docker build -t clusterhq/${PROJECT}:${TAG} .
cd ..
# Clean up
rm -rf ${PROJECT}-build/
|
<reponame>mp1otr1/Master-thesis-Data-Science-in-Action<gh_stars>0
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
#from spectral import
import numpy as np
from torch.utils.data import Dataset
from PIL import Image
import PIL.Image
class DatasetImages(Dataset):
"""Dataset loading photos on the hard drive.
Parameters
----------
path : pathlib.Path
Path to the folder containing all the images.
transform : None or callable
The transform to be applied when yielding the image.
Attributes
----------
all_paths : list
List of all paths to the `.jpg` images.
"""
def __init__(self, path, transform=None):
super().__init__()
self.all_paths = sorted([p for p in path.iterdir() if p.suffix == ".jpg"])
self.transform = transform
def __len__(self):
"""Compute length of the dataset."""
return len(self.all_paths)
def __getitem__(self, ix):
"""Get a single item."""
img = Image.open(self.all_paths[ix])
if self.transform is not None:
img = self.transform(img)
return img
class Self_Attn(nn.Module):
""" Self attention Layer"""
def __init__(self, in_dim):
super().__init__()
# Construct the module
self.query_conv = nn.Conv2d(in_channels = in_dim , out_channels = in_dim//2 , kernel_size= 1)
self.key_conv = nn.Conv2d(in_channels = in_dim , out_channels = in_dim//2 , kernel_size= 1)
self.value_conv = nn.Conv2d(in_channels = in_dim , out_channels = in_dim , kernel_size= 1)
self.gamma = nn.Parameter(torch.zeros(1))
self.softmax = nn.Softmax(dim=-1)
def forward(self,x):
"""
inputs :
x : input feature maps( B * C * W * H)
returns :
out : self attention value + input feature
attention: B * N * N (N is Width*Height)
"""
m_batchsize,C,width ,height = x.size()
proj_query = self.query_conv(x).view(m_batchsize, -1, width*height).permute(0,2,1) # B * N * C
proj_key = self.key_conv(x).view(m_batchsize, -1, width*height) # B * C * N
energy = torch.bmm(proj_query, proj_key) # batch matrix-matrix product
attention = self.softmax(energy) # B * N * N
proj_value = self.value_conv(x).view(m_batchsize, -1, width*height) # B * C * N
out = torch.bmm(proj_value, attention.permute(0,2,1)) # batch matrix-matrix product
out = out.view(m_batchsize,C,width,height) # B * C * W * H
out = self.gamma*out + x
return out, attention
class Generator(nn.Module):
"""
Generator
input:
z: latent matrix with shape of (batch_size, 100)
output:
out: generated image with shape (batch_size, 1, 28, 28)
p1: attention matrix generated by attn layer
"""
def __init__(self, batch_size=64, attn=True, image_size=32, z_dim=100, conv_dim=64):
super().__init__()
self.attn = attn
# Layer 1 turn 100 dims -> 512 dims, size 1 -> 3
layer1 = []
layer1.append((nn.ConvTranspose2d(in_channels = z_dim, out_channels = conv_dim*8, kernel_size = 4)))
layer1.append(nn.BatchNorm2d(conv_dim*8))
layer1.append(nn.ReLU())
self.l1 = nn.Sequential(*layer1)
# Layer 2 turn 512 dims -> 256 dims, size 3 -> 8
layer2 = []
layer2.append((nn.ConvTranspose2d(in_channels = conv_dim*8, out_channels = conv_dim*4,
kernel_size = 4, stride = 2, padding = 1)))
layer2.append(nn.BatchNorm2d(conv_dim*4))
layer2.append(nn.ReLU())
self.l2 = nn.Sequential(*layer2)
# Layer 3 turn 256 dims -> 128 dims, size 8 -> 16
layer3 = []
layer3.append((nn.ConvTranspose2d(in_channels = conv_dim*4, out_channels = conv_dim*2,
kernel_size = 4, stride = 2, padding = 1)))
layer3.append(nn.BatchNorm2d(conv_dim*2))
layer3.append(nn.ReLU())
self.l3 = nn.Sequential(*layer3)
# Layer 4 (Attn) turn 128 dims -> 128 dims
self.attn = Self_Attn(conv_dim*2)
# Layer 5 turn 128 dims -> 1 dims, size 16 -> 32
last = []
last.append(nn.ConvTranspose2d(conv_dim*2, 3, 4, 2, 1))
last.append(nn.Tanh())
self.last = nn.Sequential(*last)
def forward(self, z):
# z is the input random matrix for generator
z = z.view(z.size(0), z.size(1), 1, 1)
out=self.l1(z)
out=self.l2(out)
out=self.l3(out)
if self.attn == True:
out = self.attn(out)
out=self.last(out)
return out
class Discriminator(nn.Module):
"""
Discriminator
input:
x: one batch of data with shape of (batch_size, 3, 32, 32)
output:
out.squeeze: a batch of scalars indicating the predict results
p1: attention matrix generated by attn layer
"""
def __init__(self, batch_size=64, attn=True ):
conv_dim=64
super().__init__()
self.attn = attn
layer1 = []
layer1.append((nn.Conv2d(3, conv_dim, 4, 2, 1)))
layer1.append(nn.LeakyReLU(0.1))
curr_dim = conv_dim
self.l1 = nn.Sequential(*layer1)
layer2 = []
layer2.append((nn.Conv2d(curr_dim, curr_dim * 2, 4, 2, 1)))
layer2.append(nn.LeakyReLU(0.1))
curr_dim = curr_dim * 2
self.l2 = nn.Sequential(*layer2)
layer3 = []
layer3.append((nn.Conv2d(curr_dim, curr_dim * 2, 4, 2, 1)))
layer3.append(nn.LeakyReLU(0.1))
curr_dim = curr_dim * 2
self.l3 = nn.Sequential(*layer3)
self.attn = Self_Attn(curr_dim)
last = []
last.append(nn.Conv2d(curr_dim, 1, 4, 2, 1))
self.last = nn.Sequential(*last)
def forward(self, x):
out = self.l1(x)
out = self.l2(out)
out = self.l3(out)
if self.attn == True:
out = self.attn(out)
out=self.last(out)
return out.squeeze()
def init_weights_(module):
"""Initialize weights by sampling from a normal distribution.
Note that this operation is modifying the weights in place.
Parameters
----------
module : nn.Module
Module with trainable weights.
"""
cls_name = module.__class__.__name__
if cls_name in {"Conv2d", "ConvTranspose2d"}:
nn.init.normal_(module.weight.data, 0.0, 0.02)
elif cls_name == "BatchNorm2d":
nn.init.normal_(module.weight.data, 1.0, 0.02)
nn.init.constant_(module.bias.data, 0.0)
|
<filename>Helpers/Add/main.go
// Add appends the provided items to the statement
package main
import (
"fmt"
. "github.com/dave/jennifer/jen"
)
func main() {
ptr := Op("*")
//c := Id("a").Op("=").Add(Op("*")).Id("b")
c := Id("a").Op("=").Add(ptr).Id("b")
fmt.Printf("%#v\n", c)
a := Id("a")
i := Int()
c = Var().Add(a, i)
fmt.Printf("%#v\n", c)
} |
const fs = require('fs');
const path = require('path');
const projectDist = path.join(__dirname, 'project-dist');
main();
async function main() {
await createEnv();
await copyAssets();
await mergeStyles();
await fillTemplate();
}
async function createEnv() {
return fs.promises.rm(projectDist, {recursive: true, force: true})
.then(() => fs.promises.mkdir(projectDist));
}
async function copyAssets() {
const projectAssets = path.join(projectDist, 'assets');
const srcAssets = path.join(__dirname, 'assets');
copyDir('', srcAssets, projectAssets);
function copyDir(relPath, srcPath, dstPath) {
const fullSrcPath = path.join(srcPath, relPath);
const fullDstPath = path.join(dstPath, relPath);
fs.promises.mkdir(fullDstPath).then(() => {
fs.promises.readdir(fullSrcPath, {withFileTypes: true})
.then(fileList => {
for (let file of fileList) {
if (file.isFile()) {
const srcFilePath = path.join(fullSrcPath, file.name);
const dstFilePath = path.join(fullDstPath, file.name);
fs.promises.copyFile(srcFilePath, dstFilePath).catch(err => console.log(err));
} else if (file.isDirectory()) {
copyDir(path.join(relPath, file.name), srcPath, dstPath);
}
}
});
});
}
}
async function mergeStyles() {
const stylesDir = path.join(__dirname, 'styles');
const bundlePath = path.join(projectDist, 'style.css');
fs.promises.readdir(stylesDir, {withFileTypes: true})
.then(files => files
.filter(file => file.isFile() && (path.extname(file.name) === '.css'))
.map(file => path.join(stylesDir, file.name)))
.then(files => Promise.all(files.map(filename => readStyles(filename))))
.then(results => results.flat().join('\n'))
.then(styles => {
const ws = fs.createWriteStream(bundlePath);
ws.write(styles);
});
function readStyles(filename) {
return new Promise(resolver => {
const rs = fs.createReadStream(filename);
const styles = [];
rs.on('data', data => styles.push(data.toString()));
return rs.on('end', () => resolver(styles));
});
}
}
async function fillTemplate() {
const templatePath = path.join(__dirname, 'template.html');
const componentsPath = path.join(__dirname, 'components');
const html = await loadTemplate(templatePath);
const renderedHtml = await renderTemplate(html);
await makeIndex(renderedHtml);
async function loadTemplate(filename){
return loadFile(filename);
}
async function loadComponent(name){
name = name.trim();
return loadFile(path.join(componentsPath,`${name}.html`));
}
async function loadFile(filename){
return new Promise(resolver => {
const rs = fs.createReadStream(filename);
const content = [];
rs.on('data', data => content.push(data.toString()));
rs.on('end', () => {
resolver(content.join(''));
});
});
}
async function renderTemplate(html){
const result = [];
html = html.split('\n');
const pattern = /({{(?<component>[a-zA-Z -]+)}})/;
for (let line of html){
if (line.match(pattern)){
const componentName = line.match(pattern).groups.component;
const component = await loadComponent(componentName);
result.push(line.replace(pattern, component));
}
else {
result.push(line);
}
}
return result.join('\n');
}
async function makeIndex(html){
return new Promise(resolve => {
const indexPath = path.join(projectDist,'index.html');
const ws = fs.createWriteStream(indexPath);
ws.write(html);
ws.on('finish', () => resolve());
});
}
} |
/*
* Copyright 2014 Groupon.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arpnetworking.metrics.common.tailer;
/**
* Based on the Apache {@link TailerListener} but uses a {@link Tailer}
* interface instead of a class for improved extensibility.
*
* @author <NAME> (brandon dot arp at inscopemetrics dot io)
*/
public interface TailerListener {
/**
* The {@link Tailer} instance invokes this method during construction
* giving the listening class a method of stopping the {@link Tailer}.
*
* @param tailer the {@link Tailer} instance.
*/
void initialize(Tailer tailer);
/**
* This method is called if the tailed file is not found.
* <p>
* <b>Note:</b> this is called from the {@link Tailer} thread.
*/
void fileNotFound();
/**
* Called if a file rotation is detected.
*
* This method is called before the file is reopened, and fileNotFound may
* be called if the new file has not been created yet.
* <p>
* <b>Note:</b> this is called from the {@link Tailer} thread.
*/
void fileRotated();
/**
* Called if a file is successfully opened.
*
* <p>
* <b>Note:</b> this is called from the {@link Tailer} thread.
*/
void fileOpened();
/**
* Handles a line from a {@link Tailer}.
* <p>
* <b>Note:</b> this is called from the {@link Tailer} thread.
* @param line the raw line.
*/
void handle(byte[] line);
/**
* Handles a {@link Throwable} encountered during tailing.
* <p>
* <b>Note:</b> this is called from the {@link Tailer} thread.
* @param throwable the {@link Throwable}.
*/
void handle(Throwable throwable);
}
|
from heart_server_helpers import existing_beats
import pytest
@pytest.mark.parametrize("pat_id, expected", [
(-1, True),
(-2, False),
])
def test_existing_beats(pat_id, expected):
pat_exist = existing_beats(pat_id)
assert pat_exist == expected
|
package io.opensphere.core.util.swing;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.text.BadLocationException;
import io.opensphere.core.util.lang.ImpossibleException;
/**
* Convenience class to make DocumentListeners look cleaner.
*/
public abstract class DocumentListenerAdapter implements DocumentListener
{
/**
* Get the text from a document event.
*
* @param e The event.
* @return The text.
*/
public static String getText(DocumentEvent e)
{
try
{
return e.getDocument().getText(0, e.getDocument().getLength());
}
catch (BadLocationException e1)
{
throw new ImpossibleException(e1);
}
}
@Override
public void changedUpdate(DocumentEvent e)
{
updateAction(e);
}
@Override
public void insertUpdate(DocumentEvent e)
{
updateAction(e);
}
@Override
public void removeUpdate(DocumentEvent e)
{
updateAction(e);
}
/**
* A catch-all update action if the user wants all updates for a
* DocumentListener to do the same thing.
*
* @param e the DocumentEvent to act upon.
*/
protected abstract void updateAction(DocumentEvent e);
}
|
def highest_occurring_character(string):
"""Find the highest occurring character in a string."""
# Initialise a dictionary to store the number of occurrences of each character
char_occurrences = {}
# Store the number of occurrences of each character
for char in string:
char_occurrences[char] = char_occurrences.get(char, 0) + 1
# Find the maximum occurrence of all the characters
max_occurrence = 0
highest_occurring_char = None
# Iterate over the occurrence dictionary to find the highest occurrence
for char, occurrence in char_occurrences.items():
if occurrence > max_occurrence:
max_occurrence = occurrence
highest_occurring_char = char
return highest_occurring_char |
const fs = require('fs');
const path = require('path');
const { promisify } = require('util');
const immutable = require('object-path-immutable');
const yaml = require('js-yaml');
import { DirectoryData } from '../../frontend/src/types';
const Ajv = require('ajv');
const readdir = promisify(fs.readdir);
const readFile = promisify(fs.readFile);
async function yamlFileToJson(filePath: string) {
try {
const { name } = path.parse(filePath);
// Note: the safeLoad function is delightfully robust and will correctly
// parse JSON, and plain text files into the resulting structure.
// This may not be desired, if so a possible work around would be to filter
// by file extension and only process .yml or .yaml files
return {
[name]: await yaml.safeLoad(await readFile(filePath)),
};
} catch (ex) {
console.error(`Unable to process ${filePath}`, ex);
}
return {};
}
async function reduceDirectory(directory: string) {
const dirEntries = await readdir(directory, { withFileTypes: true });
let retVal = {};
for (const entry of dirEntries) {
const filePath = `${directory}/${entry.name}`;
const namespace = directory.split('/').pop();
retVal = immutable.merge(
retVal,
namespace,
entry.isDirectory()
? await reduceDirectory(filePath)
: await yamlFileToJson(filePath),
);
}
return retVal;
}
async function validate(schema: object, json: string): Promise<boolean> {
var ajv = new Ajv();
const obj = JSON.parse(json);
var valid = ajv.validate(schema, obj);
if (!valid) {
console.error(ajv.errors);
return false;
}
return true;
}
function validateToolMapping(json: string): boolean {
const obj: any = JSON.parse(json);
const { tooling, modules } = obj.data;
const flattenedTools: any = Object.values(tooling).reduce(
(list: any, current: any) => {
return list.concat(...Object.keys(current));
},
[] as string[],
);
const nonMatches: string[] = [];
const result = Object.values(modules).every(category => {
return Object.values(category).every(mod => {
if (!mod.checkLists) {
return true;
}
return Object.values(mod.checkLists).every((checklist: any) => {
return checklist.every(ch => {
if (ch.tools) {
const results = ch.tools.map((tool: any) => {
const result = flattenedTools.includes(tool);
if (!result) {
nonMatches.push(tool);
}
return result;
});
return results.every(val => val);
}
// If no tools are listed then just return true
return true;
});
});
});
});
if (!result) {
console.error(
`TOOLING ERROR: The following keys could not be found as tools: \n\n${nonMatches.join(
'\n',
)}\n\n`,
);
}
return result;
}
export async function combineData(
schemaPath: string,
dataDirectory: string,
): Promise<DirectoryData> {
const data = JSON.stringify(await reduceDirectory(dataDirectory));
const schema = JSON.parse(await readFile(schemaPath));
const validSchema = await validate(schema, data);
if (!validSchema) {
throw new Error('invalid schema');
}
const validToolMapping = validateToolMapping(data);
if (!validToolMapping) {
throw new Error('invalid tooling');
}
return JSON.parse(data);
}
if (process.argv[1] === __filename) {
(async () => {
try {
const [schemaPath, dataDirectory] = process.argv.slice(2);
if (!schemaPath || !dataDirectory) {
console.error(
`Usage: ${process.argv[0]} <schema path> <data directory>`,
);
process.exit(1);
}
console.log(`Validating ${dataDirectory} against ${schemaPath}`);
await combineData(schemaPath, dataDirectory);
} catch (ex) {
console.error('BAD CODE', ex);
process.exit(1);
}
})();
}
|
// Function to print all odd numbers in a given range n
void printOddNumbers(int n)
{
// Start loop from 1 and increment by 2
for (int i = 1; i <= n; i += 2)
{
// if i is odd number, print it
if (i % 2 == 1)
{
Console.WriteLine(i);
}
}
} |
interface IUser {
firstname: string;
lastname: string;
profile_url?: string;
}
interface IRequester {
id: string;
user: IUser;
}
interface IActivity {
id: string;
name: string;
experience: number;
dmRules?: string;
}
export default interface IActivityRequest {
id: string;
information: string;
requestDate: string;
completionDate: string;
requester: IRequester;
activity: IActivity;
}
|
#!/usr/bin/env bash
##############################################################################
# Auto complete for magento console
#
# Install a package bamarni/symfony-console-autocomplete for your user:
# $ composer global require bamarni/symfony-console-autocomplete
# And bash complete:
# $ yum install -y bash-completion
#
# References:
# https://github.com/bamarni/symfony-console-autocomplete
# https://www.cyberciti.biz/faq/fedora-redhat-scientific-linuxenable-bash-completion/
##############################################################################
set -o pipefail
set -o errexit
set -o nounset
#set -o xtrace
# Set magic variables for current file & dir
__dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
__file="${__dir}/$(basename "${BASH_SOURCE[0]}")"
readonly __dir __file
how_help () {
cat << EOF
Install Magento 2 binary file auto-completion.
OPTIONS
-b FILE, --binary FILE
Set path to binary file to put it into /usr/local/bin directory
EOF
}
read_params () {
# Process args
action=''
if [[ -n "${1:-}" ]] && [[ "${1::1}" != '-' ]]; then
action="$1"
shift
fi
# Process options
# validate and redefine options
OPTS=`getopt -o vhn -l version,help,no-restart -- "$@"`
eval set -- "${OPTS}"
restart_on=1
while true; do
case "${1}" in
-b|--binary)
magento_bin=${2}
shift 2
;;
-d|--global-binary-dir)
global_binary_dir=${2}
shift 2
;;
-h|--help)
show_help
exit 0
;;
-\?)
show_help
exit 1
;;
--)
shift
break
;;
*)
echo "${0}: unparseable option ${1}."
exit 3
;;
esac
done
rest_params=$@
}
sudo_cmd() {
local docroot sud
# use sudo if possible
sud_cmd=$(sudo -h > /dev/null 2>&1 && echo sudo || true)
docroot=${DOCROOT_DIR:-/var/www/html}
# use only root or sudo
if [ $(whoami) != 'root' ] && [ -z "${sud_cmd}" ]; then
echo 'error: It cannot be done without root permissions. Seem you have no sudo. Login with root then.' > /dev/stderr
exit 2
fi
}
bash_completion_installed? () {
if [ ${package_manager} == 'yum' ]; then
${sud} ${package_manager} list installed bash-completion
elif [ ${package_manager} == 'apt-get' ]; then
${sud} dpkg -s bash-completion
fi
}
install_bash_completion() {
local package_manager=yum
if ! ${package_manager} --help 2> /dev/null 1> /dev/null; then
package_manager=apt-get
fi
if ! ${package_manager} --help 2> /dev/null 1> /dev/null; then
echo 'error: Cannot find suitable package package manager. YUM and APT-GET does not work.' > /dev/stderr
exit 2
fi
if ! bash_completion_installed? 1> /dev/null 2> /dev/null; then
${sud} ${package_manager} install -y bash-completion
fi
}
install_symfony_autocomplete() {
local sources_dir='/usr/share/symfony-autocomplete'
local binary="${sources_dir}/vendor/bin/symfony-autocomplete" \
link='/usr/bin/symfony-autocomplete' \
composer_bin=${COMPOSER_BIN:-composer}
if type symfony-autocomplete > /dev/null 2> /dev/null || [ -d ${link} ]; then
# binary already declared
return
fi
mkdir -p "${sources_dir}"
${composer_bin} "composer --working-dir=${sources_dir} require bamarni/symfony-console-autocomplete"
ln -s ${binary} ${link}
# set readable and accessible
chmod +rX ${sources_dir}
echo 'Installed binary:'
ls -l ${link} | cut -d' ' -f9-
}
install_magento_bin() {
if type magento > /dev/null 2> /dev/null || [[ -z "${magento_bin:-}" ]]; then
# binary already declared
return
fi
if [[ -f "${magento_bin}" ]]; then
if [[ -d /usr/local/bin ]]; then
link='/usr/bin/local/magento'
elif [[ -d /usr/bin ]]; then
link='/usr/bin/magento'
elif [[ -d /bin ]]; then
link='/bin/magento'
elif [[ -d ~/bin ]]; then
link='~/bin/magento'
else
echo "Cannot define binary directory. There is no path like: /usr/bin/local, /usr/bin, /bin, or ~/bin." > /dev/stderr
return 2
fi
else
echo "Magento 2 app-binary file ${magento_bin} not found." > /dev/stderr
return 3
fi
if [[ -L ${link} ]]; then
chmod +x ${link}
# binary already declared
return
fi
ln -s ${binary} ${link}
echo 'Installed binary:'
ls -l ${link} | cut -d' ' -f9-
}
install_complete_script() {
if [ -f /etc/bash_completion.d/console ]; then
return
fi
# Copy and load file for autocompletion
${sud} cp ${__dir}/autocomplete.sh /etc/bash_completion.d/console
${sud} chmod +r /etc/bash_completion.d/console
echo 'Set auto-complete scripts to file: /etc/bash_completion.d/console'
}
install_autocomplete () {
local sud package_manager
sud="$(sudo_cmd)"
install_bash_completion
# not required installation
# it's need only for generation "autocomplete" script
#install_symfony_autocomplete
install_magento_bin
install_complete_script
}
install_autocomplete
|
<filename>src/app/dashboard/dashboard-calendar-settings/dashboard-calendar-settings.component.ts<gh_stars>0
import {Component, EventEmitter, OnInit, Output} from '@angular/core'
@Component({
selector: 'lwm-dashboard-calendar-settings',
templateUrl: './dashboard-calendar-settings.component.html',
styleUrls: ['./dashboard-calendar-settings.component.scss']
})
export class DashboardCalendarSettingsComponent implements OnInit {
ownEntriesOnly: boolean
toggleLabel: string
@Output() ownEntriesOnlyChange = new EventEmitter<boolean>()
constructor() {
this.ownEntriesOnly = true // defaults to true
}
ngOnInit() {
this.updateToggleLabel(this.ownEntriesOnly, false)
}
updateToggleLabel = (checked: boolean, emit: boolean) => {
if (checked) {
this.toggleLabel = 'Nur meine Termine anzeigen'
} else {
this.toggleLabel = 'Alle Termine anzeigen'
}
if (emit) {
this.ownEntriesOnlyChange.emit(checked)
}
}
}
|
<gh_stars>0
"""
Wallet utility functions
"""
import base58
import base64
def b64_to_bytes(val: str, urlsafe=False) -> bytes:
"""
Convert a base 64 string to bytes
"""
if urlsafe:
return base64.urlsafe_b64decode(val)
return base64.b64decode(val)
def bytes_to_b64(val: bytes, urlsafe=False) -> str:
"""
Convert a byte string to base 64
"""
if urlsafe:
return base64.urlsafe_b64encode(val).decode("ascii")
return base64.b64encode(val).decode("ascii")
def b58_to_bytes(val: str) -> bytes:
"""
Convert a base 58 string to bytes
"""
return base58.b58decode(val)
def bytes_to_b58(val: bytes) -> str:
"""
Convert a byte string to base 58
"""
return base58.b58encode(val).decode("ascii")
|
#!/bin/bash
: "----- install package dependencies for ruby"
# FIXME mysql version
${PRVENV_CMD_PKG_INS} libffi-dev zlib1g-dev libssl-dev libreadline-dev mysql-client libmysqld-dev
|
<reponame>dailave/oqs
/*
* $Id$
*
* Copyright 2002-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.opoo.oqs.transaction;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Central helper that manages resources and transaction synchronizations per thread.
* To be used by resource management code but not by typical application code.
*
* <p>Supports one resource per key without overwriting, i.e. a resource needs
* to be removed before a new one can be set for the same key.
* Supports a list of transaction synchronizations if synchronization is active.
*
* <p>Resource management code should check for thread-bound resources, e.g. JDBC
* Connections or Hibernate Sessions, via <code>getResource</code>. Such code is
* normally not supposed to bind resources to threads, as this is the responsiblity
* of transaction managers. A further option is to lazily bind on first use if
* transaction synchronization is active, for performing transactions that span
* an arbitrary number of resources.
*
* @author <NAME>
* @author <NAME>(<EMAIL>)
* @version 1.0
*/
public abstract class TransactionSynchronizationManager {
private static final Log logger = LogFactory.getLog(
TransactionSynchronizationManager.class);
private static final ThreadLocal resources = new ThreadLocal();
public TransactionSynchronizationManager() {
super();
}
//-------------------------------------------------------------------------
// Management of transaction-associated resource handles
//-------------------------------------------------------------------------
/**
* Return all resources that are bound to the current thread.
* <p>Mainly for debugging purposes. Resource managers should always invoke
* hasResource for a specific resource key that they are interested in.
* @return Map with resource keys and resource objects,
* or empty Map if currently none bound
* @see #hasResource
*/
public static Map getResourceMap() {
Map map = (Map) resources.get();
if (map == null) {
map = new HashMap();
}
return Collections.unmodifiableMap(map);
}
/**
* Check if there is a resource for the given key bound to the current thread.
* @param key key to check
* @return if there is a value bound to the current thread
*/
public static boolean hasResource(Object key) {
Map map = (Map) resources.get();
return (map != null && map.containsKey(key));
}
/**
* Retrieve a resource for the given key that is bound to the current thread.
* @param key key to check
* @return a value bound to the current thread, or null if none
*/
public static Object getResource(Object key) {
Map map = (Map) resources.get();
if (map == null) {
return null;
}
Object value = map.get(key);
if (value != null && logger.isDebugEnabled()) {
logger.debug("Retrieved value [" + value + "] for key [" + key +
"] bound to thread [" +
Thread.currentThread().getName() + "]");
}
return value;
}
/**
* Bind the given resource for the given key to the current thread.
* @param key key to bind the value to
* @param value value to bind
* @throws IllegalStateException if there is already a value bound to the thread
*/
public static void bindResource(Object key, Object value) throws
IllegalStateException {
Map map = (Map) resources.get();
// set ThreadLocal Map if none found
if (map == null) {
map = new HashMap();
resources.set(map);
}
if (map.containsKey(key)) {
throw new IllegalStateException("Already value [" + map.get(key) +
"] for key [" + key +
"] bound to thread [" +
Thread.currentThread().getName() +
"]");
}
map.put(key, value);
if (logger.isDebugEnabled()) {
logger.debug("Bound value [" + value + "] for key [" + key +
"] to thread [" +
Thread.currentThread().getName() + "]");
}
}
/**
* Unbind a resource for the given key from the current thread.
* @param key key to check
* @return the previously bound value
* @throws IllegalStateException if there is no value bound to the thread
*/
public static Object unbindResource(Object key) throws
IllegalStateException {
Map map = (Map) resources.get();
if (map == null || !map.containsKey(key)) {
throw new IllegalStateException(
"No value for key [" + key + "] bound to thread [" +
Thread.currentThread().getName() + "]");
}
Object value = map.remove(key);
// remove entire ThreadLocal if empty
if (map.isEmpty()) {
resources.set(null);
}
if (logger.isDebugEnabled()) {
logger.debug("Removed value [" + value + "] for key [" + key +
"] from thread [" +
Thread.currentThread().getName() + "]");
}
return value;
}
}
|
<html>
<head>
<title>Chatbot</title>
<script src="https://code.jquery.com/jquery-3.5.1.min.js"></script>
</head>
<body>
<div class="conversation-container">
<div class="conversation-title">Chatbot</div>
<div id="message-container"></div>
<input id="user-input" type="text" placeholder="Type your message here...">
</div>
<script>
//Post user message
$('#user-input').keypress(function (e) {
if (e.which == 13){
var userInput = $('#user-input').val()
postMessage(userInput);
$.ajax({
type: 'post',
url: ‘chatbot.php`,
data: {userMessage: userInput},
success: function (response) {
postMessage(response);
}
});
}
});
//Post messages
function postMessage(message) {
$('#message-container').append('<div class="message">'+ message +'</div>');
$('#user-input').val('');
}
</script>
</body>
</html> |
#!/bin/bash
# The MIT License (MIT)
#
# Copyright (c) 2010 Technische Universitaet Berlin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Author: Tilman Rassy <rassy@math.tu-berlin.de>
# $Id: build.sh,v 1.16 2009/11/10 13:02:44 linges Exp $
# Build script for mmtex4japs
# Set fixed variabes (constants):
readonly program_name=build.sh
readonly program_version='$Revision: 1.16 $'
readonly user_config_file=build.conf
# Source user config file:
[ -e "$user_config_file" ] && source "$user_config_file"
# Init customizable variables:
prefix=${MM_BUILD_PREFIX:-/usr/local}
checkin_root=${MM_CHECKIN_ROOT:-$HOME/mumie/checkin}
mmtex_cmd=mmtex
mmtex_opts="-w"
# Process command line parameters:
params=`getopt \
--longoptions prefix:,force,targets,ignore-deps,help,version,vars,release:,cvsroot: \
--options f,t,D,h,v \
-- \
"$@"`
if [ $? -ne 0 ] ; then exit 1 ; fi
eval set -- "$params"
while true ; do
case "$1" in
--prefix) prefix="$2" ; shift 2 ;;
--targets|-t) task=show_targets ; shift ;;
--ignore-deps|-D) ignore_deps=ignore_deps ; shift ;;
--help|-h) task=show_help ; shift ;;
--version|-v) task=show_version ; shift ;;
--vars) task=print_variables ; shift ;;
--force|-f) force=force ; shift ;;
--release) release="$2" ; shift 2 ;;
--cvsroot) cvsroot="$2" ; shift 2 ;;
--) shift ; break ;;
esac
done
targets=${*:-'all'}
# Set the variables if not set already:
prefix=${prefix:-${MM_BUILD_PREFIX:-/usr/local}}
inc_lib_dir=$prefix/lib/mmtex/include
dcl_lib_dir=$prefix/lib/mmtex/dcl
etc_dir=$prefix/etc/mmplutil
version_file=VERSION
task=${task:-process_targets}
# Store the current directory:
base_dir=`pwd`
# section names of this content repository
section_names="samples"
#mount path constant (normally: content repository sections are mounted under <checkin root>/content/)
mount_path=
# Mmtex libraries to intsall/uninstall:
inc_install_files="
japs_core.mtx.pl
japs_metainfo.mtx.pl
japs_content.mtx.pl
japs_problem.mtx.pl
japs_summary.mtx.pl
japs_media.mtx.pl
japs_link.mtx.pl
japs_course.mtx.pl
"
# Mmtex document classes to install/uninstall:
dcl_install_files="
japs/element/algorithm.dcl.pl
japs/element/application.dcl.pl
japs/element/definition.dcl.pl
japs/element/lemma.dcl.pl
japs/element/motivation.dcl.pl
japs/element/theorem.dcl.pl
japs/problem/applet.dcl.pl
japs/problem/mchoice.dcl.pl
japs/problem/traditional.dcl.pl
japs/problem/program.dcl.pl
japs/subelement/deduction.dcl.pl
japs/subelement/example.dcl.pl
japs/subelement/history.dcl.pl
japs/subelement/motivation.dcl.pl
japs/subelement/proof.dcl.pl
japs/subelement/remark.dcl.pl
japs/subelement/table.dcl.pl
japs/subelement/test.dcl.pl
japs/subelement/visualization.dcl.pl
japs/summary.dcl.pl
japs/course.dcl.pl
japs/coursesection.dcl.pl
japs/worksheet/homework.dcl.pl
japs/worksheet/selftest.dcl.pl
japs/worksheet/training.dcl.pl
"
# --------------------------------------------------------------------------------
# Utility functions
# --------------------------------------------------------------------------------
# (Pasted from build_tools/lib/bash/build.inc, revision 1.2)
# Compares a target and a source file and prints "needs_build" to stdout if the
# target file needs to be (re)build; otherwise, prints the empty string.
# Usage: needs_build SOURCE_FILE TARGET_FILE
function needs_build
{
local source_file=$1
local target_file=$2
if [ "$force" ] || \
[ ! -e "$target_file" ] || \
[ `stat -c %Y "$source_file"` -gt `stat -c %Y "$target_file"` ]
then
echo "needs_build"
else
echo ""
fi
}
# Compares last modification times and returns changed sources. If $force is
# set, all sources are returned regardless if they have changed or not.
# Usage: get_source_files SOURCE_SUFFIX TARGET_SUFFIX TARGET_DIR SOURCE_FILES
function get_source_files
{
local source_suffix=$1
local target_suffix=$2
local target_dir=$3
shift; shift; shift;
local source_file
for source_file in "$@"
do
local target_file=${target_dir}${source_file%$source_suffix}${target_suffix}
[ "`needs_build $source_file $target_file`" ] && echo $source_file
done
}
# Returns all target files for a given list of source files.
# Usage: get_target_files SOURCE_SUFFIX TARGET_SUFFIX TARGET_DIR SOURCE_FILES
function get_target_files
{
local source_suffix=$1
local target_suffix=$2
local target_dir=$3
shift; shift; shift;
local source_file
for source_file in "$@"
do
echo ${target_dir}${source_file%$source_suffix}${target_suffix}
done
}
# Aborts with an error message
function error
{
echo "ERROR: $*"
echo
exit 1
}
# Checks the exit code of the last command, terminates with an error message if the
# exit code is not 0
function check_exit_code
{
local exit_code=$?
[ "$exit_code" -eq 0 ] || error "Last command returned with code $exit_code"
}
# Runs a command, checks the exit code, terminates with an error message if the exit
# code is not 0
function run_cmd
{
"$@"
check_exit_code
}
# Recursivly creates .dir files. Usage:
#
# create_dot_dir_files PATH NAME
#
# PATH is the path relative to the checkin root of the directory where to start. NAME
# is the local name of that directory. It is assumed that the parent of that directory
# is the current working directory when the function is entered.
function create_dot_dir_files
{
local path=$1
local name=$2
run_cmd cd $name
echo "$program_name: changed into $name"
echo "$program_name: creating .dir"
echo -n $checkin_root/$path > .dir
check_exit_code
local subdirs=`ls $base_dir/checkin/$path`
if [ "$subdirs" ] ; then
local subdir
for subdir in $subdirs ; do
local dir=$base_dir/checkin/$path/$subdir
[ -d "$dir" ] && [ "$subdir" != 'CVS' ] && ! [ -h "$subdir" ] && \
create_dot_dir_files $path/$subdir $subdir
done
fi
run_cmd cd ..
echo "$program_name: left $name"
}
# Copies files to a directory, preserves subdirectory struture
# Usage: cp_to_dir DIR FILE1 FILE2 ...
# Copies FILE1 FILE2 ... to DIR, with their relative paths. Subdirectories are created
# if necessary
function cp_to_dir
{
local dir=$1
shift
local file
for file in "$@" ; do
local path=`dirname $file | sed s/^\\.\\\\///`
if [ "$path" != '.' ] ; then
run_cmd mkdir -vp $dir/$path
run_cmd cp -v $file $dir/$path
else
run_cmd mkdir -vp $dir
run_cmd cp -v $file $dir
fi
done
}
# Quotes the character '/' with a backslash. Used in sed input.
function quote
{
echo "$@" | sed 's/\//\\\//g'
}
# Creates a directory in the global checkin tree provided it does not exist already.
# Parent directories are created if necessary. The directory and its parents are
# equipped with .meta.xml files if the latter do not exist already (this is done even
# if the directories itselfs existed before).
function create_checkin_dir
{
local path=$1
run_cmd mkdir -pv $checkin_root/$path
while [ "$path" ] && [ "$path" != "." ] ; do
[ -e "$checkin_root/$path/.meta.xml" ] || \
run_cmd cp -v $base_dir/checkin/$path/.meta.xml $checkin_root/$path
path=`dirname $path`
done
}
# Starts mmjvmd if necessary. The old mmjvmd status (running or not running)
# is saved in the gloabl variable old_mmjvmd_status.
function begin_mmjvmd
{
old_mmjvmd_status=`mmjvmd status`
run_cmd mmjvmd start
}
# Stops mmjvmd provided its old status is "Jvmd is not running"
function end_mmjvmd
{
[ "$old_mmjvmd_status" = 'Jvmd is not running' ] && run_cmd mmjvmd stop
}
# --------------------------------------------------------------------------------
# Functions implementing targets
# --------------------------------------------------------------------------------
#Compiles the Java sources and creates Mumie documents ready for checkin
function apply_mmjava
{
echo
echo "======================================================================"
echo "Compiling java sources"
echo "======================================================================"
echo
local section_name
begin_mmjvmd
for section_name in $section_names ; do
run_cmd cd $checkin_root/$mount_path/$section_name
for source_file in `find -L -name "*.java"` ; do
[ "$force" == 'force' ] && local force_flag="-f "
run_cmd mmjava $force_flag $source_file
done
done
end_mmjvmd
}
# Converts all tex sources
function apply_mmtex
{
echo
echo "======================================================================"
echo "Applying mmtex"
echo "======================================================================"
echo
# Compose mmtex command:
local mmtex_call="$mmtex_cmd -rwF $mmtex_opts"
[ "$force" ] && mmtex_call="$mmtex_call -f"
local section_name
for section_name in $section_names ; do
# Change into top-level local checkin dir:
run_cmd cd $base_dir/checkin/$mount_path/$section_name
echo "$program_name: changed into checkin/$mount_path/$section_name"
# Execute mmtex:
echo "$program_name: calling mmtex:"
run_cmd $mmtex_call
run_cmd cd $base_dir
done
apply_mmtex_done=done
echo "$program_name: applying mmtex done"
}
# "Mounts" the contents to the global checkin tree
function mount_checkin
{
echo
echo "======================================================================"
echo "Mounting checkin"
echo "======================================================================"
echo
create_checkin_dir $mount_path
local section_name
for section_name in $section_names ; do
if ! [ -e $checkin_root/$mount_path/$section_name ] ; then
run_cmd ln -vs $base_dir/checkin/$mount_path/$section_name $checkin_root/$mount_path
fi
run_cmd cd $checkin_root/$mount_path
create_dot_dir_files $mount_path/$section_name $section_name
run_cmd cd $base_dir
done
mount_checkin_done=done
echo "$program_name: mounting checkin done"
}
# "Unmounts" the contents to the global checkin tree
function unmount_checkin
{
echo
echo "======================================================================"
echo "Unmounting checkin"
echo "======================================================================"
echo
local section_name
for section_name in $section_names ; do
if [ -h $checkin_root/$mount_path/$section_name ] ; then
run_cmd rm $checkin_root/$mount_path/$section_name
echo "$program_name: unmounting checkin done"
else
echo "Package ${section_name} not mounted yet"
fi
done
unmount_checkin_done=done
}
# --------------------------------------------------------------------------------
# Functions implementing targets
# --------------------------------------------------------------------------------
# Copies the mmtex document classes to their installation location
function install_dcls
{
echo
echo "======================================================================"
echo "Installing document classes"
echo "======================================================================"
echo
run_cmd cd $base_dir/lib/dcl
echo "$program_name: changed into lib/dcl"
cp_to_dir $dcl_lib_dir $dcl_install_files
run_cmd cd $base_dir
}
# Removes the mmtex document classes from their installation location
function uninstall_dcls
{
echo
echo "======================================================================"
echo "Uninstalling document classes"
echo "======================================================================"
echo
if [ -e $dcl_lib_dir ] ; then
run_cmd cd $dcl_lib_dir
echo "$program_name:: changed into $dcl_lib_dir"
run_cmd rm -vf $dcl_install_files
else
echo "$program_name: installation dir does not exist"
fi
run_cmd cd $base_dir
}
# Copies the mmtex libraries to their installation location
function install_libs
{
echo
echo "======================================================================"
echo "Installing libraries"
echo "======================================================================"
echo
run_cmd cd $base_dir/lib/include
echo "$program_name: changed into lib/include"
cp_to_dir $inc_lib_dir $inc_install_files
run_cmd cd $base_dir
}
# Removes the mmtex libraries files from their installation location
function uninstall_libs
{
echo
echo "======================================================================"
echo "Uninstalling libraries"
echo "======================================================================"
echo
if [ -e $inc_lib_dir ] ; then
run_cmd cd $inc_lib_dir
echo "$program_name:: changed into $inc_lib_dir"
run_cmd rm -vf $inc_install_files
else
echo "$program_name: installation dir does not exist"
fi
run_cmd cd $base_dir
}
# Copies the version file to its installation location
function install_version_file
{
echo
echo "======================================================================"
echo "Installing version file"
echo "======================================================================"
echo
run_cmd cd $base_dir
if [ -e "$version_file" ] ; then
run_cmd mkdir -pv $etc_dir
run_cmd rm -vf $etc_dir/$version_file
run_cmd cp -v $version_file $etc_dir
else
echo "$program_name: Version file does not exist"
fi
run_cmd cd $base_dir
}
# Removes the version file from its installation location
function uninstall_version_file
{
echo
echo "======================================================================"
echo "Uninstalling version file"
echo "======================================================================"
echo
if [ -e $etc_dir ] ; then
run_cmd rm -vf $etc_dir/$version_file
else
echo "$program_name: installation dir does not exist"
fi
run_cmd cd $base_dir
}
# Creates the distribution
function create_dist
{
echo
echo "======================================================================"
echo "Creating distribution"
echo "======================================================================"
echo
[ "$release" ] || error "No release specified"
run_cmd cd $base_dir
run_cmd mkdir -pv dist
run_cmd cd dist
echo "$program_name: Changed into dist/"
echo "$program_name: Checking-out release"
local dist_name="mmtex_mumie_${release}"
local archive="${dist_name}.tgz"
local tag="ver-`echo $release | tr '.' '-'`"
run_cmd rm -rfv $dist_name
run_cmd rm -fv $archive
local cvscmd=cvs
[ "$cvsroot" ] && cvscmd="cvs -d $cvsroot"
run_cmd $cvscmd export -r $tag mmtex_mumie
run_cmd mv -v mmtex_mumie $dist_name
echo "$program_name: Creating version file"
run_cmd echo $release > $dist_name/$version_file
echo "$program_name: Creating tgz"
run_cmd tar czf $archive $dist_name
run_cmd cd $base_dir
}
# Processes the targets
function process_targets
{
for target in $targets ; do
case $target in
conf-xsl)
create_conf_xsl ;;
all)
echo "Nothing to build" ;;
clear)
echo "Nothing to clear" ;;
install-dcls)
install_dcls ;;
install-libs)
install_libs ;;
install-verfile)
install_version_file ;;
install)
install_dcls; install_libs; install_version_file ;;
uninstall-dcls)
uninstall_dcls ;;
uninstall-libs)
uninstall_libs ;;
uninstall-verfile)
uninstall_version_file ;;
uninstall)
uninstall_libs; uninstall_dcls; uninstall_version_file ;;
dist)
create_dist ;;
mount-checkin)
mount_checkin ;;
unmount-checkin)
unmount_checkin ;;
mmjava)
apply_mmjava ;;
mmtex)
apply_mmtex ;;
*)
echo "ERROR: Unknown target: $target"
exit 3 ;;
esac
done
echo
echo "$program_name: Done"
echo
}
function show_targets
{
cat <<EOF
all Does nothing
clear Does nothing
install-dcls Installs the mmtex document classes
install-libs Installs the mmtex libraries
install-verfile Installs the version file
install Installs all
uninstall-dcls Uninstalls the mmtex document classes
uninstall-libs Uninstalls the mmtex libraries
uninstall-verfile Uninstalls the version file
uninstall Uninstalls all
mount-checkin Adds the contents to the global checkin tree
unmount-checkin Removes the contents from the global checkin tree
mmtex Applies mmtex to all tex sources
mmjava Applies mmjava to all java sources
dist Creates a distribution
EOF
}
# Prints all variables to stdout
function print_variables
{
cat <<EOF
cvsroot = $cvsroot
force = $force
ignore_deps = $ignore_deps
prefix = $prefix
release = $release
targets = $targets
task = $task
java_source_path = $java_source_path
section_names = $section_names
mount_path = $mount_path
EOF
}
function show_help
{
cat <<EOF
Usage:
./build.sh [OPTIONS] [TARGETS]
Description:
Builds and/or installs the mmtex_mumie package, or parts of it. What is
actually done is controlled by TARGETS, which is a list of keywords called
targets. Type ./build.sh -t to get a list of all targets. The default target
is "all"; it is assumed if no targets are specified.
Options:
--prefix=PREFIX
The root of the installation directory. Default is /usr/local.
--targets, -t
List all targets
--force, -f
Create files even if they are up-to-date.
--ignore-deps, -D
Ignore target dependencies. If a target is build with this option,
then targets required by this target are not build automatically.
--release=VERSION_NUMBER
Set the release for the distribution to build. In effect only with
the "dist" target, otherwise ignored.
--cvsroot=CVSROOT
Set the cvs root for retrieving the distribution to build. In effect
only with the "dist" target, otherwise ignored. If not set, the
environment variable \$CVSROOT is used
--vars
Prints the build variables to stdout
--help, -h
Print this help text and exit.
--version, -v
Print version information and exit.
EOF
}
function show_version
{
echo $program_version
}
$task
|
<gh_stars>1-10
function IndexBuffer(a, b)
{
var mIndexBuffer = null;
var mIndex = 0;
var mNumIndices = 0;
var mGLBuffer = new GLBuffer(gl.ELEMENT_ARRAY_BUFFER);
var mUseVbo = false;
this.size = function ()
{
return mNumIndices;
};
this.reset = function (numVertices)
{
mNumIndices = numVertices;
this.regenerateBuffer();
};
// Call this when we have to re-create the surface and reloading all OpenGL resources.
this.reload = function ()
{
mGLBuffer.reload();
};
this.regenerateBuffer = function ()
{
if (mNumIndices == 0)
{
return;
}
mIndex = 0;
mIndexBuffer = new Uint16Array(mNumIndices);
};
this.addIndex = function (index)
{
mIndexBuffer[mIndex++] = index;
};
this.draw = function (gl, primitiveType)
{
if (mNumIndices == 0)
{
return;
}
mIndex = 0;
if (mUseVbo && GLBuffer.canUseVBO())
{
mGLBuffer.bind(gl, mIndexBuffer);
gl.drawElements(primitiveType, this.size(), gl.UNSIGNED_SHORT, 0);
}
else
{
mGLBuffer.bindDynamic(gl, mIndexBuffer);
gl.drawElements(primitiveType, this.size(), gl.UNSIGNED_SHORT, 0);
}
};
if (a != undefined)
{
if (a.constructor == Number)
{
if (b != undefined)
{
mUseVbo = b;
}
this.reset(a);
}
else
{
mUseVbo = a;
}
}
}
|
func evaluatePolynomial(coefficients: [Int], at x: Int) -> Int {
var result = coefficients[0]
var temp = 1
for i in 1..<coefficients.count {
temp *= x
result += coefficients[i] * temp
}
return result
}
// Test the function
let coefficients = [2, -1, 3]
let x = 4
let result = evaluatePolynomial(coefficients: coefficients, at: x)
print("The result of evaluating the polynomial at x=\(x) is \(result)")
// Output: The result of evaluating the polynomial at x=4 is 35 |
<gh_stars>1-10
from matalg.core.atoms import SymbolSequence, Context
from abc import ABCMeta, abstractmethod
class AbstractConfiguration(metaclass=ABCMeta):
def __init__(self, context: Context):
self.__context = context
self.__final = False
def __str__(self) -> str:
return self.representation()
@property
def context(self) -> Context:
return self.__context
@property
def is_final(self) -> bool:
return self.__final
def make_final(self):
self.__final = True
@abstractmethod
def is_empty(self) -> bool:
pass
@abstractmethod
def representation(self) -> str:
'''returns string representation for current configuration'''
pass
|
#!/usr/bin/env bash
__scriptpath=$(cd "$(dirname "$0")"; pwd -P)
if [ "$BUILDVARS_DONE" != 1 ]; then
. $__scriptpath/buildscripts/hostvars-setup.sh
fi
__init_tools_log=$__scriptpath/init-tools.log
__PACKAGES_DIR=$__scriptpath/packages
__TOOLRUNTIME_DIR=$__scriptpath/Tools
__DOTNET_PATH=$__TOOLRUNTIME_DIR/dotnetcli
__DOTNET_CMD=$__DOTNET_PATH/dotnet
if [ -z "$__BUILDTOOLS_SOURCE" ]; then __BUILDTOOLS_SOURCE=https://dotnet.myget.org/F/dotnet-buildtools/api/v3/index.json; fi
export __BUILDTOOLS_USE_CSPROJ=true
__BUILD_TOOLS_PACKAGE_VERSION=$(cat $__scriptpath/BuildToolsVersion.txt)
__DOTNET_TOOLS_VERSION=$(cat $__scriptpath/DotnetCLIVersion.txt)
__BUILD_TOOLS_PATH=$__PACKAGES_DIR/microsoft.dotnet.buildtools/$__BUILD_TOOLS_PACKAGE_VERSION/lib
__INIT_TOOLS_RESTORE_PROJECT=$__scriptpath/init-tools.msbuild
__INIT_TOOLS_DONE_MARKER_DIR=$__TOOLRUNTIME_DIR/$__BUILD_TOOLS_PACKAGE_VERSION
__INIT_TOOLS_DONE_MARKER=$__INIT_TOOLS_DONE_MARKER_DIR/done
if [ -z "$__DOTNET_PKG" ]; then
OSName=$(uname -s)
case $OSName in
Darwin)
OS=OSX
__PKG_RID=osx
ulimit -n 2048
# Format x.y.z as single integer with three digits for each part
VERSION=`sw_vers -productVersion| sed -e 's/\./ /g' | xargs printf "%03d%03d%03d"`
if [ "$VERSION" -lt 010012000 ]; then
echo error: macOS version `sw_vers -productVersion` is too old. 10.12 is needed as minimum.
exit 1
fi
;;
Linux)
OS=Linux
__PKG_RID=linux
if [ -e /etc/os-release ]; then
source /etc/os-release
if [[ $ID == "alpine" ]]; then
__PKG_RID="linux-musl"
fi
elif [ -e /etc/redhat-release ]; then
redhatRelease=$(</etc/redhat-release)
if [[ $redhatRelease == "CentOS release 6."* || $redhatRelease == "Red Hat Enterprise Linux Server release 6."* ]]; then
__PKG_RID=rhel.6
fi
fi
;;
*)
echo "Unsupported OS '$OSName' detected. Downloading linux-$__HostArch tools."
OS=Linux
__PKG_RID=linux
;;
esac
__PKG_RID=$__PKG_RID-$__HostArch
__DOTNET_PKG=dotnet-sdk-${__DOTNET_TOOLS_VERSION}-$__PKG_RID
fi
display_error_message()
{
echo "Please check the detailed log that follows." 1>&2
cat "$__init_tools_log" 1>&2
}
if [ ! -e $__INIT_TOOLS_DONE_MARKER ]; then
__PATCH_CLI_NUGET_FRAMEWORKS=0
if [ -e $__TOOLRUNTIME_DIR ]; then rm -rf -- $__TOOLRUNTIME_DIR; fi
echo "Running: $__scriptpath/init-tools.sh" > $__init_tools_log
if [ ! -e $__DOTNET_PATH ]; then
mkdir -p "$__DOTNET_PATH"
if [ -n "$DOTNET_TOOLSET_DIR" ] && [ -d "$DOTNET_TOOLSET_DIR/$__DOTNET_TOOLS_VERSION" ]; then
echo "Copying $DOTNET_TOOLSET_DIR/$__DOTNET_TOOLS_VERSION to $__DOTNET_PATH" >> $__init_tools_log
cp -r $DOTNET_TOOLSET_DIR/$__DOTNET_TOOLS_VERSION/* $__DOTNET_PATH
elif [ -n "$DOTNET_TOOL_DIR" ] && [ -d "$DOTNET_TOOL_DIR" ]; then
echo "Copying $DOTNET_TOOL_DIR to $__DOTNET_PATH" >> $__init_tools_log
cp -r $DOTNET_TOOL_DIR/* $__DOTNET_PATH
else
echo "Installing dotnet cli..."
__DOTNET_LOCATION="https://dotnetcli.azureedge.net/dotnet/Sdk/${__DOTNET_TOOLS_VERSION}/${__DOTNET_PKG}.tar.gz"
# curl has HTTPS CA trust-issues less often than wget, so lets try that first.
echo "Installing '${__DOTNET_LOCATION}' to '$__DOTNET_PATH/dotnet.tar'" >> $__init_tools_log
if command -v curl > /dev/null; then
curl --retry 10 -sSL --create-dirs -o $__DOTNET_PATH/dotnet.tar ${__DOTNET_LOCATION}
else
wget -q -O $__DOTNET_PATH/dotnet.tar ${__DOTNET_LOCATION}
fi
cd $__DOTNET_PATH
tar -xf $__DOTNET_PATH/dotnet.tar
if [ "$?" != "0" ]; then
echo "ERROR: Could not download dotnet cli." 1>&2
display_error_message
exit 1
fi
cd $__scriptpath
__PATCH_CLI_NUGET_FRAMEWORKS=1
fi
fi
if [ -n "$BUILD_TOOLS_TOOLSET_DIR" ] && [ -d "$BUILD_TOOLS_TOOLSET_DIR/$__BUILD_TOOLS_PACKAGE_VERSION" ]; then
echo "Copying $BUILD_TOOLS_TOOLSET_DIR/$__BUILD_TOOLS_PACKAGE_VERSION to $__TOOLRUNTIME_DIR" >> $__init_tools_log
cp -r $BUILD_TOOLS_TOOLSET_DIR/$__BUILD_TOOLS_PACKAGE_VERSION/* $__TOOLRUNTIME_DIR
elif [ -n "$BUILD_TOOLS_TOOL_DIR" ] && [ -d "$BUILD_TOOLS_TOOL_DIR" ]; then
echo "Copying $BUILD_TOOLS_TOOL_DIR to $__TOOLRUNTIME_DIR" >> $__init_tools_log
cp -r $BUILD_TOOLS_TOOL_DIR/* $__TOOLRUNTIME_DIR
else
if [ ! -e $__BUILD_TOOLS_PATH ]; then
echo "Restoring BuildTools version $__BUILD_TOOLS_PACKAGE_VERSION..."
echo "Running: $__DOTNET_CMD restore \"$__INIT_TOOLS_RESTORE_PROJECT\" --no-cache --packages $__PACKAGES_DIR --source $__BUILDTOOLS_SOURCE /p:BuildToolsPackageVersion=$__BUILD_TOOLS_PACKAGE_VERSION" >> $__init_tools_log
$__DOTNET_CMD restore "$__INIT_TOOLS_RESTORE_PROJECT" --no-cache --packages $__PACKAGES_DIR --source $__BUILDTOOLS_SOURCE /p:BuildToolsPackageVersion=$__BUILD_TOOLS_PACKAGE_VERSION >> $__init_tools_log
if [ ! -e "$__BUILD_TOOLS_PATH/init-tools.sh" ]; then
echo "ERROR: Could not restore build tools correctly." 1>&2
display_error_message
fi
fi
echo "Initializing BuildTools..."
echo "Running: $__BUILD_TOOLS_PATH/init-tools.sh $__scriptpath $__DOTNET_CMD $__TOOLRUNTIME_DIR $__PACKAGES_DIR" >> $__init_tools_log
# Executables restored with .NET Core 2.0 do not have executable permission flags. https://github.com/NuGet/Home/issues/4424
chmod +x $__BUILD_TOOLS_PATH/init-tools.sh
$__BUILD_TOOLS_PATH/init-tools.sh $__scriptpath $__DOTNET_CMD $__TOOLRUNTIME_DIR $__PACKAGES_DIR >> $__init_tools_log
if [ "$?" != "0" ]; then
echo "ERROR: An error occurred when trying to initialize the tools." 1>&2
display_error_message
exit 1
fi
fi
echo "Making all .sh files executable under Tools."
# Executables restored with .NET Core 2.0 do not have executable permission flags. https://github.com/NuGet/Home/issues/4424
ls $__scriptpath/Tools/*.sh | xargs chmod +x
ls $__scriptpath/Tools/scripts/docker/*.sh | xargs chmod +x
Tools/crossgen.sh $__scriptpath/Tools $__PKG_RID
mkdir -p $__INIT_TOOLS_DONE_MARKER_DIR
touch $__INIT_TOOLS_DONE_MARKER
echo "Done initializing tools."
else
echo "Tools are already initialized"
fi
|
match dom_node {
DomNode::DomEvent(Event::CData(ref doc_type)) => {
print!("<![CDATA[{}]]>", from_utf8(doc_type.escaped()).unwrap());
},
DomNode::DomEvent(Event::PI(ref doc_type)) => {
print!("<?{}?>", from_utf8(doc_type.escaped()).unwrap());
},
DomNode::DomEvent(Event::Text(ref text)) => {
print!("{}", from_utf8(text.escaped()).unwrap());
},
DomNode::DomEvent(Event::Start) => {
println!("Unexpected Event: Start");
},
DomNode::DomEvent(Event::Empty) => {
println!("Unexpected Event: Empty");
},
DomNode::DomEvent(Event::End) => {
println!("Unexpected Event: End");
},
DomNode::DomEvent(Event::Eof) => {
println!("Unexpected Event: Eof");
},
} |
public class PrimeNumberGenerator {
public static void main(String[] args) {
int low = 1;
int high = 100;
while (low < high) {
boolean flag = false;
for (int i = 2; i <= low / 2; ++i) {
if (low % i == 0) {
flag = true;
break;
}
}
if (!flag)
System.out.print(low + " ");
++low;
}
}
} |
alias path='. ~/.path/_set'
path
|
<reponame>codefoxut/la-casa-de-papel
package main
import (
"fmt"
"strconv"
"strings"
)
func max(a, b int) int {
if a > b {
return a
} else {
return b
}
}
func addBinary(binaryA, binaryB string) string {
var soln string
i := len(binaryA)
j := len(binaryB)
a := []rune(binaryA)
b := []rune(binaryB)
sol := make([]string, max(i, j)+1)
lengthOfSol := len(sol)
var sum, carry int
for i > 0 || j > 0 || carry > 0 {
var x, y int
if i > 0 {
x, _ = strconv.Atoi(string(a[i-1]))
i--
}
if j > 0 {
y, _ = strconv.Atoi(string(b[j-1]))
j--
}
sum = carry + x + y
sol[lengthOfSol-1] = strconv.Itoa(sum % 2)
carry = sum / 2
lengthOfSol--
}
if sol[0] == "0" {
soln = strings.Join(sol[1:], "")
} else {
soln = strings.Join(sol, "")
}
return soln
}
func main() {
fmt.Println(addBinary("1111", "1"))
}
|
package com.pharmacySystem.service.implementations;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.pharmacySystem.DTOs.AddExaminerToPharmacyDTO;
import com.pharmacySystem.DTOs.DermatologistBasicDTO;
import com.pharmacySystem.DTOs.DermatologistDTO;
import com.pharmacySystem.DTOs.DermatologistSearchDTO;
import com.pharmacySystem.mappers.DermatologistMapper;
import com.pharmacySystem.model.pharmacy.Pharmacy;
import com.pharmacySystem.model.user.Dermatologist;
import com.pharmacySystem.model.workingHours.TimeInterval;
import com.pharmacySystem.model.workingHours.WorkingHours;
import com.pharmacySystem.repository.AppointmentRepository;
import com.pharmacySystem.repository.DermatologistRepository;
import com.pharmacySystem.repository.PharmacyRepository;
import com.pharmacySystem.repository.WorkingHoursRepository;
import com.pharmacySystem.service.interfaces.IDermatologistService;
import com.pharmacySystem.service.interfaces.ITimeIntervalService;
import com.pharmacySystem.service.interfaces.IWorkingHoursService;
@Service
public class DermatologistService implements IDermatologistService{
@Autowired
private DermatologistRepository dermatologistRepository;
@Autowired
private IWorkingHoursService workingHoursService;
@Autowired
private PharmacyRepository pharmacyRepository;
@Autowired
private ITimeIntervalService timeIntervalService;
@Autowired
private AppointmentRepository appointmentRespository;
@Autowired
private WorkingHoursRepository workingHoursRepository;
@Override
public Dermatologist findById(Long id) {
Dermatologist dermatologist = dermatologistRepository.findById(id).orElse(null);
//dermatologist.setAverageGrade(gradeRepository.findAverageGradeByGradedIdAndGradeType(dermatologist.getId(), GradeType.DERMATOLOGIST_GRADE));
return dermatologist;
}
@Override
public Set<DermatologistDTO> findAllByPharmacy(Long id) {
Set<Dermatologist> dermatologists = dermatologistRepository.findAllByPharmacy(id);
Pharmacy pharmacy = pharmacyRepository.findById(id).orElse(null);
Set<DermatologistDTO> dermatologistsDTO = new HashSet<DermatologistDTO>();
if(pharmacy != null && !dermatologists.isEmpty()) {
dermatologistsDTO = DermatologistMapper.dermatologistToDermatologistDTO(dermatologists, pharmacy);
}
return dermatologistsDTO;
}
@Override
public Set<DermatologistSearchDTO> searchDermatologist(String input, Pharmacy pharmacy) {
Set<Dermatologist> searchResult = new HashSet<Dermatologist>();
Set<Dermatologist> dermatologists = dermatologistRepository.findAllByPharmacy(pharmacy.getId());
String nameAndSurname;
for(Dermatologist dermatologist : dermatologists) {
nameAndSurname = dermatologist.getName() + " "+ dermatologist.getSurname();
if(nameAndSurname.toUpperCase().contains(input.toUpperCase()))
searchResult.add(dermatologist);
}
return DermatologistMapper.createDermatologistSearchDTOFromDermatologist(searchResult, pharmacy);
}
@Override
public Set<DermatologistSearchDTO> searchAllDermatologist(String input) {
Set<DermatologistSearchDTO> searchResult = new HashSet<DermatologistSearchDTO>();
Set<Dermatologist> dermatologists = dermatologistRepository.findAllDermatologist();
Set<Pharmacy> allPharmacies = pharmacyRepository.findAllPharmacies();
String nameAndSurname;
for(Dermatologist dermatologist : dermatologists) {
nameAndSurname = dermatologist.getName() + " "+ dermatologist.getSurname();
if(nameAndSurname.toUpperCase().contains(input.toUpperCase())) {
Set<Pharmacy> pharmacies = new HashSet<Pharmacy>();
for(Pharmacy pharmacy : allPharmacies) {
for(Dermatologist pharmacyDermatologist : pharmacy.getDermatologists()) {
if(dermatologist.getId() == pharmacyDermatologist.getId())
pharmacies.add(pharmacy);
}
}
searchResult.add(DermatologistMapper.createDermatologistSearchDTOFromAllDermatologists(dermatologist, pharmacies));
}
}
return searchResult;
}
private Set<WorkingHours> createWorkingHours(Pharmacy pharmacy, Set<WorkingHours> unavailableWorkingHours, Set<TimeInterval> timeIntervals) {
Set<WorkingHours> workingHours = new HashSet<WorkingHours>();
for(TimeInterval timeInterval : timeIntervals) {
if(workingHoursAvailable(unavailableWorkingHours, timeInterval)) {
workingHours.add(workingHoursService.create(pharmacy, timeInterval));
}else {
return null;
}
}
return workingHours;
}
private boolean workingHoursAvailable(Set<WorkingHours> unavailableWorkingHours, TimeInterval timeInterval) {
boolean available = true;
if(unavailableWorkingHours == null) {
return available;
}
for(WorkingHours workingHour : unavailableWorkingHours) {
Date workingHourStart = timeIntervalService.formatDate(workingHour.getTimeInterval().getStartTime());
Date workingHourEnd = timeIntervalService.formatDate(workingHour.getTimeInterval().getEndTime());
Date timeIntervalStart = timeIntervalService.formatDate(timeInterval.getStartTime());
Date timeIntervalEnd = timeIntervalService.formatDate(timeInterval.getEndTime());
if(!timeIntervalService.checkAvailability(workingHourStart, workingHourEnd, timeIntervalStart, timeIntervalEnd)) {
available = false;
break;
}
}
System.out.println("Available: "+available);
return available;
}
@Override
public DermatologistDTO addDermatologistToPharmacy(AddExaminerToPharmacyDTO examiner, long pharmacyId) {
Dermatologist dermatologist = (Dermatologist) dermatologistRepository.findById(examiner.getId()).orElse(null);
System.out.println("Dermatologist: "+dermatologist.getId() +" - "+dermatologist.getSurname());
Pharmacy pharmacy = pharmacyRepository.findById(pharmacyId).orElse(null);
if(dermatologist != null && pharmacy != null) {
Set<WorkingHours> unavailableWorkingHours = workingHoursRepository.findAllByDermatologistsId(examiner.getId());
Set<TimeInterval> timeIntervals = timeIntervalService.createTimeIntervals(examiner.getStartDates(), examiner.getEndDates());
Set<WorkingHours> workingHours = createWorkingHours(pharmacy, unavailableWorkingHours, timeIntervals);
if(workingHours != null) {
Set<Dermatologist> dermatologists = pharmacy.getDermatologists();
Set<Pharmacy> pharmacies = dermatologist.getPharmacy();
dermatologists.add(dermatologist);
pharmacies.add(pharmacy);
pharmacy.setDermatologists(dermatologists);
dermatologist.setPharmacy(pharmacies);
pharmacy.setDermatologists(dermatologists);
dermatologist.setWorkingHours(workingHours);
dermatologistRepository.save(dermatologist);
return DermatologistMapper.dermatologistDTOFromDermatologist(dermatologist, pharmacy);
}else {
dermatologist.setId(-1);
return DermatologistMapper.dermatologistDTOFromDermatologist(dermatologist, pharmacy);
}
}
return null;
}
@Override
public DermatologistBasicDTO removeDermatologist(long id, long pharmacyId) {
Dermatologist dermatologist = dermatologistRepository.findById(id).orElse(null);
Pharmacy pharmacy = pharmacyRepository.findById(pharmacyId).orElse(null);
Set<Dermatologist> pharmacyDermatologists = new HashSet<Dermatologist>();
Set<Pharmacy> dermatologistPharmacies = new HashSet<Pharmacy>();
if(dermatologist != null && pharmacy != null) {
if(!hasSceduledAppointments(dermatologist.getId(), pharmacyId)) {
pharmacyDermatologists = pharmacy.getDermatologists();
dermatologistPharmacies = dermatologist.getPharmacy();
pharmacyDermatologists.remove(dermatologist);
dermatologistPharmacies.remove(pharmacy);
pharmacy.setDermatologists(pharmacyDermatologists);
dermatologist.setPharmacy(dermatologistPharmacies);
pharmacyRepository.save(pharmacy);
dermatologistRepository.save(dermatologist);
return DermatologistMapper.createDermatologistBasicDTOFromDermatologist(dermatologist);
}
dermatologist.setId(-1);
return DermatologistMapper.createDermatologistBasicDTOFromDermatologist(dermatologist);
}
return null;
}
private boolean hasSceduledAppointments(long id, long pharmacyId) {
if(appointmentRespository.findScheduledByDermatologistAndPharmacyId(id, pharmacyId).isEmpty()) {
return false;
}
return true;
}
@Override
public Set<DermatologistBasicDTO> dermatologistForAddingToPharmacy(long pharmacyId) {
Set<Dermatologist> dermatologists = dermatologistRepository.findAllDermatologist();
Set<Dermatologist> pharmacyDermatologists = dermatologistRepository.findAllByPharmacy(pharmacyId);
Set<Dermatologist> retVal = new HashSet<Dermatologist>();
for(Dermatologist deramtologist : dermatologists) {
if(!pharmacyDermatologists.contains(deramtologist)) {
retVal.add(deramtologist);
}
}
Set<DermatologistBasicDTO> dermatologistsDTO = DermatologistMapper.createDermatologistBasicDTOsFromDermatologists(retVal);
return dermatologistsDTO;
}
@Override
public Set<DermatologistDTO> findAll() {
List<Dermatologist> dermatologists = dermatologistRepository.findAll();
List<Pharmacy> pharmacies = pharmacyRepository.findAll();
Set<DermatologistDTO> dermatologistsDTO = new HashSet<>();
for(Dermatologist dermatologist : dermatologists) {
Set<Pharmacy> dermatologistPharmacies = new HashSet<>();
for(Pharmacy pharmacy : pharmacies) {
if(pharmacy.getDermatologists().contains(dermatologist)) {
dermatologistPharmacies.add(pharmacy);
}
}
dermatologistsDTO.add(DermatologistMapper.dermatologistDTOFromDermatologistAndPharmacies(dermatologist, dermatologistPharmacies));
}
return dermatologistsDTO;
}
}
|
#!/bin/sh
# Copyright (c) 2002, Intel Corporation. All rights reserved.
# This file is licensed under the GPL license. For the full content
# of this license, see the COPYING file at the top level of this
# source tree.
#The pthread_spin_trylock( ) function shall fail if:
#[EBUSY] A thread currently holds the lock.
# This is tested implicitly via assertion 1.
echo "Tested implicitly via assertion 1. See output for status"
exit 0
|
<gh_stars>0
const db = require('./mainDB')
const Crust = {
add: ( name, price ) => db.none( `INSERT INTO crust ( name, price ) VALUES ( '${name}', '${price}' )` ),
getAll: () => db.any( `SELECT * FROM crust` ),
getById: crust_id => db.one( `SELECT * FROM crust WHERE id = ${crust_id}` ),
update: ( id, name, price ) => {
let sql = `BEGIN TRANSACTION;`
if ( name != '' ) sql += `UPDATE crust SET name='${name}' WHERE id = ${id};`
if ( price != '' ) sql += `UPDATE crust SET price=${price} WHERE id = ${id};`
sql += `COMMIT;`
db.none( sql )},
delete: id => db.none( `DELETE FROM crust WHERE id = '${id}'` )
}
module.exports = { Crust }
|
def threeSum(nums):
res = []
nums.sort()
for i in range(len(nums)-2):
if i > 0 and nums[i] == nums[i-1]:
continue
l, r = i+1, len(nums)-1
while l < r:
s = nums[i] + nums[l] + nums[r]
if s > 0:
r -= 1
elif s < 0:
l += 1
else:
res.append([nums[i], nums[l], nums[r]])
while l < r and nums[l] == nums[l+1]:
l += 1
while l < r and nums[r] == nums[r-1]:
r -= 1
l += 1
r -= 1
return res
print(threeSum(nums)) |
list1 = [1, 2, 3]
list2 = [4, 5, 6]
result = [x + y for x in list1 for y in list2]
print(result) |
<gh_stars>1-10
import matplotlib.pyplot as plt
def gen_plot(nr,nc):
fig,axis=plt.subplots(nr,nc)
return fig,axis
def plot_car_image(image,fig,axes):
axes.imshow(image,cmap="gray")
def add_borders(border,fig,axes):
axes.add_patch(border)
def show():
plt.show()
|
<filename>chest/gui/helpers/src/main/java/net/community/chest/ui/helpers/SectionsMap.java<gh_stars>1-10
/*
*
*/
package net.community.chest.ui.helpers;
import java.util.Collection;
import java.util.Map;
import org.w3c.dom.Element;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @author <NAME>.
* @since Jan 19, 2009 1:57:20 PM
*/
public interface SectionsMap extends Map<String,Element> {
/**
* @return A {@link Collection} of the names of sections in the
* <U>same</U> order as the way the {@link #put(String, Element)}
* was called (if same element re-mapped, then 1st call is used)
*/
Collection<String> getSectionsNames ();
/**
* @return A {@link Collection} of "pairs" represented as
* {@link java.util.Map.Entry}-ies where key=section name, value=section XML
* {@link Element}. The <U>order</U> of these pairs is according to the
* one specified in {@link #getSectionsNames()} (which may differ from
* the {@link #entrySet()}
*/
Collection<Map.Entry<String,Element>> sectionsSet ();
}
|
import React, { useEffect, useState } from "react";
import { makeStyles } from "@material-ui/core/styles";
import KeyboardArrowRightIcon from "@material-ui/icons/KeyboardArrowRight";
import { Grid, Typography, Box, Button } from "@material-ui/core";
import Paper from "@material-ui/core/Paper";
import { connect } from "react-redux";
import { Link } from "react-router-dom";
import { FaIdCardAlt, FaSchool, FaCar } from "react-icons/fa";
import Header from "../../components/header/Header";
let title = "Setup your account and files";
let contentHeader = (
<p>
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
tempor
<br /> incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud
<br /> exercitation ullamco labori nisi ut aliquip ex ea commodo consequat
</p>
);
const useStyles = makeStyles((theme) => ({
paperFinished: { backgroundColor: "#36CAB2" },
avatar: {
border: "1px solid gray",
borderRadius: "50%",
width: "40px",
height: "40px",
textAlign: "center",
lineHeight: "40px",
color: "gray",
},
root: {
display: "flex",
flexWrap: "wrap",
alignItems: "center",
justifyContent: "center",
"& > *": {
margin: theme.spacing(2),
padding: theme.spacing(1),
paddingLeft: theme.spacing(2),
width: theme.spacing(35),
height: theme.spacing(20),
},
},
lastPaper: {
display: "flex",
flexWrap: "wrap",
alignItems: "center",
justifyContent: "center",
"& > *": {
backgroundColor: "#e0e0e0",
margin: theme.spacing(2),
padding: theme.spacing(2),
width: theme.spacing(34),
},
},
body1: {
paddingLeft: theme.spacing(1),
fontSize: "16px",
},
body2: {
paddingLeft: theme.spacing(1),
fontSize: "12px",
},
rightIcon: { marginTop: "auto", marginBottom: "auto" },
pos: {
marginTop: theme.spacing(7),
},
rightBottom: { textAlign: "right" },
linkDecoration: { textDecoration: "none" },
}));
function Home(props) {
const [token, setToken] = useState("");
useEffect(() => {
setToken(localStorage.getItem("jwtToken"));
}, []);
const classes = useStyles();
return (
<div className="wrapper-left">
<Header title={title} contentHeader={contentHeader} />
<content>
<div className="container">
<Grid container className={classes.root}>
<Paper
elevation={3}
className={props.companyID && classes.paperFinished}
>
<Link
to={token ? "/addcompany" : "/signin"}
className={classes.linkDecoration}
>
<Grid container>
<Grid item xs={10}>
<Typography variant="h5" color="textSecondary">
Company
</Typography>
</Grid>
<Grid item xs={2}>
<div className={classes.avatar}>
<FaSchool />
</div>
</Grid>
</Grid>
<Typography className={classes.pos} color="textSecondary">
Setup your company profil
</Typography>
<Typography
className={classes.rightBottom}
color="textSecondary"
>
<KeyboardArrowRightIcon fontSize="large" />
</Typography>
</Link>
</Paper>
<Paper
elevation={3}
className={props.vehicleID && classes.paperFinished}
>
<Link
to={
token
? props.companyID
? "/addvehicle"
: "/addcompany"
: "/signin"
}
className={classes.linkDecoration}
>
<Grid container>
<Grid item xs={10}>
<Typography variant="h5" color="textSecondary">
Vehicles
</Typography>
</Grid>
<Grid item xs={2}>
<div className={classes.avatar}>
<FaCar />
</div>
</Grid>
</Grid>
<Typography className={classes.pos} color="textSecondary">
Setup your vehicle(s)
</Typography>
<Typography
className={classes.rightBottom}
color="textSecondary"
>
<KeyboardArrowRightIcon fontSize="large" />
</Typography>
</Link>
</Paper>
<Paper elevation={3}>
{/* <Link to={"/admin/dashboard"} className={classes.linkDecoration}> */}
<Link to={"/adddriver"} className={classes.linkDecoration}>
<Grid container>
<Grid item xs={10}>
<Typography variant="h5" color="textSecondary">
Drivers
</Typography>
</Grid>
<Grid item xs={2}>
<div className={classes.avatar}>
<FaIdCardAlt />
</div>
</Grid>
</Grid>
<Typography className={classes.pos} color="textSecondary">
Setup your Driver(s)
</Typography>
<Typography
className={classes.rightBottom}
color="textSecondary"
>
<KeyboardArrowRightIcon fontSize="large" />
</Typography>
</Link>
</Paper>
</Grid>
<div className={classes.lastPaper}>
<Paper elevation={3}>
<Grid container>
<Grid item xs={11}>
<Typography
variant="body1"
className={classes.body1}
color="textSecondary"
>
skip check out the dashboard
</Typography>
<Typography
variant="body2"
className={classes.body2}
color="textSecondary"
>
you can not be put into circulation, whenyour account will
not be complete
</Typography>
</Grid>
<Grid item xs={1} className={classes.rightIcon}>
<Typography color="textSecondary">
<KeyboardArrowRightIcon fontSize="large" />
</Typography>
</Grid>
</Grid>
</Paper>
</div>
<Box mt={-2} ml={-3}>
<div className="text-right">
<Link className="tabs-nav-item active" to="/admin">
<Button
type="submit"
variant="contained"
className="Button-signin"
>
Next
</Button>
</Link>
</div>
</Box>
</div>
</content>
</div>
);
}
const mapStateToProps = (state) => ({
companyID: state.company.companyID,
vehicleID: state.vehicle.vehicleID,
});
export default connect(mapStateToProps)(Home);
|
options="-DBUILD_TEST=ON \
-DUSE_MALI=ON \
-DUSE_NEON=ON \
-DUSE_DYNAMIC_LIBRARY=OFF \
-DUSE_TRAINING=OFF \
-DCMAKE_C_COMPILER=`which aarch64-linux-android21-clang` \
-DCMAKE_CXX_COMPILER=`which aarch64-linux-android21-clang++` \
-DCMAKE_STRIP=`which aarch64-linux-android-strip` "
rm -rf ./build_gcl_sample
mkdir ./build_gcl_sample
cd ./build_gcl_sample
cmake .. ${options}
make -j33
cd ..
|
#!/bin/bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH
LANG=en_US.UTF-8
mkdir -p /home/slemp/server
mkdir -p /home/slemp/wwwroot
mkdir -p /home/slemp/wwwlogs
mkdir -p /home/slemp/backup/database
mkdir -p /home/slemp/backup/site
if [ ! -f /usr/bin/applydeltarpm ];then
yum -y provides '*/applydeltarpm'
yum -y install deltarpm
fi
setenforce 0
sed -i 's#SELINUX=enforcing#SELINUX=disabled#g' /etc/selinux/config
yum install -y wget curl vixie-cron lsof
#https need
if [ ! -f /root/.acme.sh ];then
curl https://get.acme.sh | sh
fi
if [ -f "/etc/init.d/iptables" ];then
iptables -I INPUT -p tcp -m state --state NEW -m tcp --dport 22 -j ACCEPT
iptables -I INPUT -p tcp -m state --state NEW -m tcp --dport 80 -j ACCEPT
iptables -I INPUT -p tcp -m state --state NEW -m tcp --dport 443 -j ACCEPT
iptables -I INPUT -p tcp -m state --state NEW -m tcp --dport 888 -j ACCEPT
iptables -I INPUT -p tcp -m state --state NEW -m tcp --dport 7200 -j ACCEPT
iptables -I INPUT -p tcp -m state --state NEW -m tcp --dport 3306 -j ACCEPT
iptables -I INPUT -p tcp -m state --state NEW -m tcp --dport 30000:40000 -j ACCEPT
service iptables save
iptables_status=`service iptables status | grep 'not running'`
if [ "${iptables_status}" == '' ];then
service iptables restart
fi
fi
#安装时不开启
service iptables stop
if [ "${isVersion}" == '' ];then
if [ ! -f "/etc/init.d/iptables" ];then
yum install firewalld -y
systemctl enable firewalld
systemctl start firewalld
firewall-cmd --permanent --zone=public --add-port=22/tcp
firewall-cmd --permanent --zone=public --add-port=80/tcp
firewall-cmd --permanent --zone=public --add-port=443/tcp
firewall-cmd --permanent --zone=public --add-port=888/tcp
firewall-cmd --permanent --zone=public --add-port=7200/tcp
firewall-cmd --permanent --zone=public --add-port=3306/tcp
firewall-cmd --permanent --zone=public --add-port=30000-40000/tcp
firewall-cmd --reload
fi
fi
#安装时不开启
systemctl stop firewalld
yum install -y libevent libevent-devel mysql-devel libjpeg* libpng* gd* zip unzip libmcrypt libmcrypt-devel
if [ ! -d /home/slemp/server/panel ];then
wget -O /tmp/master.zip https://codeload.github.com/basoro/slemp/zip/master
cd /tmp && unzip /tmp/master.zip
mv /tmp/slemp-master /home/slemp/server/panel
rm -rf /tmp/master.zip
rm -rf /tmp/slemp-master
fi
yum groupinstall -y "Development Tools"
paces="wget python-devel python-imaging libicu-devel zip unzip bzip2-devel gcc libxml2 libxml2-dev libxslt* libjpeg-devel libpng-devel libwebp libwebp-devel lsof pcre pcre-devel vixie-cron crontabs"
yum -y install $paces
yum -y lsof net-tools.x86_64
yum -y install ncurses-devel mysql-dev locate cmake
yum -y install python-devel.x86_64
yum -y install MySQL-python
yum -y install epel-release
yum -y install python36-devel
#if [ ! -f '/usr/bin/pip' ];then
# wget https://bootstrap.pypa.io/pip/2.7/get-pip.py
# python get-pip.py
# pip install --upgrade pip
# pip install pillow==6.2.2
#fi
cd /home/slemp/server/panel/scripts && ./lib.sh
chmod 755 /home/slemp/server/panel/data
if [ -f /home/slemp/server/panel/bin/activate ];then
cd /home/slemp/server/panel && source /home/slemp/server/panel/bin/activate && pip3 install -r /home/slemp/server/panel/requirements.txt
else
cd /home/slemp/server/panel && pip3 install -r /home/slemp/server/panel/requirements.txt
fi
cd /home/slemp/server/panel && ./cli.sh start
sleep 5
cd /home/slemp/server/panel && ./cli.sh stop
cd /home/slemp/server/panel && ./scripts/init.d/slemp default
cd /home/slemp/server/panel && ./cli.sh start
|
import {Joker} from './deck'
const hasJoker = (hand) => (hand.length > removeCard(hand, Joker).length)
const sameValue = (a,b) => (a.value === b.value || (a.value * b.value === 0)) // UNUSED
const sameSuit = (a,b) => (a.suit.short === b.suit.short || (a.value * b.value === 0))
const cardEquals = (a,b) => (sameValue(a,b) && sameSuit(a,b)) // UNUSED
const sameSuitCards = (deck, card) =>
deck.filter( c => sameSuit(c, card))
const removeCard = (deck, card) =>
deck.filter( (c) =>
(c.suit !== card.suit) || (c.value !== card.value)
)
const sortHand = (hand) => hand.sort((a,b) => {
if (a.value < b.value) { return -1 }
if (a.value > b.value) { return 1 }
return 0
})
const distinctValuesCount = (hand) => countValues(hand).filter((count, value) => count > 0).length
const countValues = (hand) => {
const valueCounts = Array.from({length: 15}, () => 0)
hand.forEach(card => {
valueCounts[card.value]++
})
return valueCounts
}
const productOfCardCounts = (hand) => (
countValues(hand).filter( (count, value) => count > 0
).reduce( (acc, curr) => curr * acc, 1)
)
const logDeck = (deck, description='') => {
console.log(description, deck.map(card => card.toString()+`(${card.value})`))
}
const roundToPrecision = (number, precision=0) => Math.round(number * 10**precision)/(10**precision)
const createBet = () => {
const bet = {
index : 0,
multiplier : 0.2,
values : [
0.2,
0.4,
0.6,
0.8,
1
]
}
const betInterface = {
increment : () => {
bet.index = (bet.index+1) % 5
return {...betInterface}
},
limit: (limit = 1) => {
while (bet.index > 0 && bet.values[bet.index] > limit) {
bet.index--
}
return {...betInterface}
},
toString : () => bet.values[bet.index],
values : () => [...bet.values],
get value() {
return bet.values[bet.index]
},
get index() {
return bet.index
}
}
return betInterface
}
const getKeyboardHandler = () => {
const handlers = []
const registerHandler = (eventType, handler) => {
const handlerRecord = {
eventType,
handler,
enabled: true,
wrapper: (event) => {
if (handlerRecord.enabled) {
handler(event)
}
}
}
const id = handlers.push(handlerRecord)
window.addEventListener(eventType, handlerRecord.wrapper)
return id
}
const removeHandler = (handlerId) => {
if (handlers.length < handlerId) {
return false
}
const handler = handlers[handlerId - 1]
handler.enabled = false
window.removeEventListener(handler.eventType, handler.wrapper)
}
return {
registerHandler,
removeHandler
}
}
export {
hasJoker,
sameValue,
sameSuit,
cardEquals,
sameSuitCards,
removeCard,
sortHand,
distinctValuesCount,
countValues,
productOfCardCounts,
logDeck,
roundToPrecision,
createBet,
getKeyboardHandler,
} |
import { ISerializable } from '../interface/Storage';
import { IBoilerplate } from '../interface/Objects';
export declare class Boilerplate implements ISerializable<IBoilerplate, Boilerplate>, IBoilerplate {
/** The boilerplate's unique id */
id: string;
/** The boilerplate slug */
slug: string;
/** The boilerplate's name */
name: string;
/** The boilerplate's repository url */
git_url: string;
constructor(object?: IBoilerplate);
fromObject(object: IBoilerplate): Boilerplate;
toObject(): IBoilerplate;
}
|
def select_page(items: list, page_number: int, page_size: int) -> list:
if not items or page_number <= 0 or page_size <= 0:
return []
start_index = (page_number - 1) * page_size
end_index = start_index + page_size
return items[start_index:end_index] |
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vertx.java.core.stdio;
import org.vertx.java.core.Handler;
import org.vertx.java.core.Vertx;
/**
* @author <a href="http://tfox.org"><NAME></a>
*/
class StreamBase {
protected final long contextID;
protected final Thread th;
protected Handler<Exception> exceptionHandler;
protected StreamBase() {
Long contextID = Vertx.instance.getContextID();
if (contextID == null) {
throw new IllegalStateException("Can only be used inside an event loop");
}
this.contextID = contextID;
this.th = Thread.currentThread();
}
protected void checkThread() {
// All ops must always be invoked on same thread
if (Thread.currentThread() != th) {
throw new IllegalStateException("Invoked with wrong thread, actual: " + Thread.currentThread() + " expected: " + th);
}
}
public void exceptionHandler(Handler<Exception> handler) {
checkThread();
this.exceptionHandler = handler;
}
}
|
#!/bin/bash -v
CUDA_ID=0
MODEL_TYPE=embracebertwithkeyvaluequeryconcatatt # Options=[embracebert, embracebertconcatatt, embracebertwithkeyvaluequery, embracebertwithkeyvaluequeryconcatatt]
MODEL_NAME="${MODEL_TYPE}"
DIM_REDUCTION_METHOD=attention # Options = [projection, attention]
P_TYPE="attention_clsquery_weights" # Options = [multinomial, attention_clsquery_weights]
MODEL_NAME="${MODEL_NAME}_${DIM_REDUCTION_METHOD}_p_${P_TYPE}"
LANGUAGE="english" # Options = [english, korean]
if [[ $LANGUAGE == *"english"* ]]; then
MODEL_NAME_OR_PATH="bert-base-uncased"
OUTPUT_DIR="../../results/${MODEL_NAME}/"
EVAL_DIR="../../results/test_with_complete_results/${MODEL_NAME}/"
mkdir $EVAL_DIR
RUN_DIR="../../runs/${MODEL_NAME}/"
DATA_PATH_NAME="intent_processed"
else
MODEL_NAME_OR_PATH="bert-base-multilingual-uncased"
OUTPUT_DIR="../../results_korean/${MODEL_NAME}/"
EVAL_DIR="../../results_korean/test_with_complete_results/${MODEL_NAME}/"
mkdir $EVAL_DIR
RUN_DIR="../../runs_korean/${MODEL_NAME}/"
DATA_PATH_NAME="korean_intent_processed"
fi
echo $MODEL_NAME_OR_PATH
BS_EVAL=1
for BS_TRAIN in 8; do
for EPOCH in 100; do
for DATASET in chatbot; do
echo $DATASET
echo "Evaluating ${DATASET} dataset with incomplete data for ${EPOCH} epochs"
for NOISE_PERC in 20 40 60 80; do
for SEED in 1 2 3 4 5 6 7 8 9 10; do
for TTS in "macsay"; do
for STT in "sphinx" "witai"; do
RESULT_DIR="${DATASET}/stterror_${NOISE_PERC}/${TTS}_${STT}/${DATASET}_ep${EPOCH}_bs${BS_TRAIN}_seed${SEED}"
OUT_PATH="${OUTPUT_DIR}/${RESULT_DIR}"
LOG_DIR_PATH="${RUN_DIR}/${RESULT_DIR}"
EVAL_PATH_1="${EVAL_DIR}/${DATASET}"
mkdir $EVAL_PATH_1
EVAL_PATH="${EVAL_DIR}/${RESULT_DIR}"
mkdir $EVAL_PATH
DATA_DIR="../../data/${DATA_PATH_NAME}/nlu_eval/${DATASET}corpus/"
EVAL_OUTPUT_FILENAME="eval_results"
# Eval
CUDA_VISIBLE_DEVICES=$CUDA_ID python ../../run_classifier.py --seed $SEED --p $P_TYPE --dimension_reduction_method $DIM_REDUCTION_METHOD --task_name "${DATASET}_intent" --model_type $MODEL_TYPE --model_name_or_path $MODEL_NAME_OR_PATH --num_train_epochs_frozen_bert 100.0 --logging_steps 1 --do_eval --do_lower_case --data_dir $DATA_DIR --max_seq_length 128 --per_gpu_eval_batch_size=$BS_EVAL --per_gpu_train_batch_size=$BS_TRAIN --learning_rate 2e-5 --num_train_epochs $EPOCH --output_dir $OUT_PATH --overwrite_output_dir --overwrite_cache --save_best --log_dir $LOG_DIR_PATH --eval_type "incomplete_test" --eval_output_dir $EVAL_PATH --eval_output_filename $EVAL_OUTPUT_FILENAME
done
done
done
done
done
done
done
|
cd /opt/ms
java -jar ms-1.0.0.jar |
<gh_stars>1-10
chrome.runtime.onMessage.addListener(function (e, sender, sendResponse) {
const { message, data } = e
switch (message) {
case 'queryCode_response':
response(data); break
}
})
function htmlToElement(html) {
var template = document.createElement('template');
html = html.trim(); // Never return a text node of whitespace as the result
template.innerHTML = html;
return template.content.firstChild;
}
function response({ queryResult, idx }) {
if (queryResult["TotalRecordCount"] >= 1) {
box = document.getElementsByClassName("grid-item")[idx];
button = htmlToElement('<span class="tag is-error">影片已存在</span>');
box.getElementsByClassName("box")[0].attributes.removeNamedItem("href");
tags = box.getElementsByClassName("tags")[0];
let childs = tags.childNodes;
for (var i = childs.length - 1; i >= 0; i--)
tags.removeChild(childs[i]);
tags.appendChild(button);
}
}
(function () {
boxes = document.getElementsByClassName("grid-item");
for (let idx = 0; idx < boxes.length; idx++) {
var box = boxes[idx];
var uidNodes = box.getElementsByClassName("uid");
if (uidNodes.length <= 0)
continue;
var code = uidNodes[0].innerHTML;
chrome.runtime.sendMessage(chrome.runtime.id, {
message: 'queryCode',
data: { code, idx }
});
}
})(); |
sudo yum install -y yum-utils device-mapper-persistent-data lvm2
sudo yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo
sudo yum install docker-ce
docker -v
|
def calculate_total_margin(theme_spacing: int, table_heading_width: int) -> int:
return theme_spacing + table_heading_width |
def select_best_item(collection_items):
best_item = None
best_value = float('-inf')
for item in collection_items:
value = evaluate_item(item)
if value > best_value:
best_value = value
best_item = item
return best_item
def evaluate_item(item):
# Add an AI Algorithm to evaluate the item and return its value.
# The higher the value, the better the item is. |
"""
Generate a machine learning algorithm to classify data into three classes
"""
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.neighbors import KNeighborsClassifier
# Define dataset
data = pd.read_csv('dataset.csv')
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
# Split dataset into training and testing
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 0)
# Standardize data
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
# Train the KNN model
classifier = KNeighborsClassifier(n_neighbors = 5, p = 2, metric = 'minkowski')
classifier.fit(X_train, y_train)
# Make predictions
y_pred = classifier.predict(X_test)
# Evaluate the model
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score
cm = confusion_matrix(y_test, y_pred)
accuracy = accuracy_score(y_test, y_pred)
print('Confusion Matrix : \n', cm)
print('Accuracy: ', accuracy) |
<reponame>sophiemarceau/HTJD_Android
package com.huatuo.custom_widget;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import android.content.Context;
import com.huatuo.R;
import com.huatuo.util.Toast_Util;
public class XlistViewUtil {
public static XlistViewUtil instance;
public static XlistViewUtil getInstance() {
if (instance == null) {
synchronized (XlistViewUtil.class) {
if (instance == null) {
instance = new XlistViewUtil();
}
}
}
return instance;
}
public String getTime() {
return new SimpleDateFormat("MM-dd HH:mm", Locale.CHINA)
.format(new Date());
}
/**
* 计算listview的item数量设置能不能刷新和加载更多
*
* @param listView
* @param list
*/
public void measureListSize(Context context ,XListView listView, List<?> list) {
if (null != listView) {
listView.stopRefresh();
listView.stopLoadMore();
if (null == list || list.size() < 50) {
listView.setPullLoadEnable(false);
// Toast_Util.showToastAtBootom(context,
// context.getResources().getString(R.string.xlistview_footer_hint_no_more));
} else {
listView.setFooterViewVISIBLE();
listView.setPullLoadEnable(true);
}
}
}
}
|
#!/usr/bin/env bash
# The strategy for this test is to write to the cluster and roll forward
# with release upgrades
# We track the previous release, current release and the prerelease
#
# We create new topic per round.
# We'll produce and consume from all test topics per round.
# Content is verified via checksum
#
# Usage:
# ./upgrade-test.sh <older stable version> <current stable version> [dev version]
#
# If dev version is set,
# If CI env var is set, we will build fluvio code and upgrade to local develop image
set -exu
#set -e
echo command: $0 $*
readonly STABLE_MINUS_ONE=${1:?Please provide a starting cluster version for arg 1}
readonly STABLE=${2:?Please provide a second cluster version for arg 2}
readonly PRERELEASE=${3:-$(cat ../VERSION)-$(git rev-parse HEAD)}
readonly CI_SLEEP=${CI_SLEEP:-10}
readonly CI=${CI:-}
readonly STABLE_MINUS_ONE_TOPIC=${STABLE_MINUS_ONE_TOPIC:-stable-minus-one-cli-topic}
readonly STABLE_TOPIC=${STABLE_TOPIC:-stable-cli-topic}
readonly PRERELEASE_TOPIC=${PRERELEASE_TOPIC:-prerelease-cli-topic}
function cleanup() {
echo Clean up test data
rm -f --verbose ./*.txt.tmp;
rm -f --verbose ./*.checksum;
echo Delete cluster if possible
fluvio cluster delete || true
}
# If we're in CI, we want to slow down execution
# to give CPU some time to rest, so we don't time out
function ci_check() {
if [[ ! -z "$CI" ]];
then
echo "[CI MODE] Pausing for ${CI_SLEEP} second(s)";
w | head -1
sleep ${CI_SLEEP};
fi
}
# This function is intended to be run first after generating test data
# We install the Stable-1 CLI, and start a cluster
# A topic is created, and we do a produce + consume and validate the checksum of the output
function validate_cluster_out_of_date_stable() {
echo "Install (out-of-date) v${STABLE_MINUS_ONE} CLI"
curl -fsS https://packages.fluvio.io/v1/install.sh | VERSION=${STABLE_MINUS_ONE} bash
echo "Start v${STABLE_MINUS_ONE} cluster"
fluvio cluster start
ci_check;
fluvio version
ci_check;
echo "Create test topic: ${STABLE_MINUS_ONE_TOPIC}"
fluvio topic create ${STABLE_MINUS_ONE_TOPIC}
ci_check;
cat data1.txt.tmp | fluvio produce ${STABLE_MINUS_ONE_TOPIC}
ci_check;
echo "Validate test data w/ v${STABLE_MINUS_ONE} CLI matches expected data created BEFORE upgrading cluster + CLI to ${STABLE}"
fluvio consume -B -d ${STABLE_MINUS_ONE_TOPIC} | tee output.txt.tmp
ci_check;
if cat output.txt.tmp | shasum -c stable-minus-one-cli-stable-minus-one-topic.checksum; then
echo "${STABLE_MINUS_ONE_TOPIC} topic validated with v${STABLE_MINUS_ONE} CLI"
else
echo "Got: $(cat output.txt.tmp | awk '{print $1}')"
echo "Expected: $(cat stable-minus-one-cli-stable-minus-one-topic.checksum | awk '{print $1}')"
exit 1
fi
}
# This function is intended to be run second after the Stable-1 validation
# We install the Stable CLI, and upgrade the existing cluster
# A brand new topic is created, and we do a produce + consume and validate the checksum of the output on that topic
# Then we produce + consume on the Stable-1 topic and validate the checksums on that topic
function validate_upgrade_cluster_to_stable() {
echo "Install (current stable) v${STABLE} CLI"
curl -fsS https://packages.fluvio.io/v1/install.sh | VERSION=${STABLE} bash
fluvio cluster upgrade
ci_check;
fluvio version
ci_check;
echo "Create test topic: ${STABLE_TOPIC}"
fluvio topic create ${STABLE_TOPIC}
ci_check;
cat data2.txt.tmp | fluvio produce ${STABLE_TOPIC}
ci_check;
echo "Validate test data w/ v${STABLE} CLI matches expected data created BEFORE upgrading cluster + CLI to v${PRERELEASE}"
fluvio consume -B -d ${STABLE_TOPIC} | tee output.txt.tmp
ci_check;
if cat output.txt.tmp | shasum -c stable-cli-stable-topic.checksum; then
echo "${STABLE_TOPIC} topic validated with v${STABLE} CLI"
else
echo "Got: $(cat output.txt.tmp | awk '{print $1}')"
echo "Expected: $(cat stable-cli-stable-topic.checksum | awk '{print $1}')"
exit 1
fi
# Exercise older topics
cat data2.txt.tmp | fluvio produce ${STABLE_MINUS_ONE_TOPIC}
ci_check;
echo "Validate v${STABLE_MINUS_ONE_TOPIC} test data w/ ${STABLE} CLI matches expected data AFTER upgrading cluster + CLI to v${STABLE}"
fluvio consume -B -d ${STABLE_MINUS_ONE_TOPIC} | tee output.txt.tmp
ci_check;
if cat output.txt.tmp | shasum -c stable-cli-stable-minus-one-topic.checksum; then
echo "${STABLE_MINUS_ONE_TOPIC} topic validated with v${STABLE} CLI"
else
echo "Got: $(cat output.txt.tmp | awk '{print $1}')"
echo "Expected: $(cat stable-cli-stable-topic.checksum | awk '{print $1}')"
exit 1
fi
}
# This function is intended to be run last after the Stable validation
# We install the Prerelease CLI (either the dev prerelease, or compiled if we're in CI), and upgrade the existing cluster
# Another brand new topic is created, and we do a produce + consume and validate the checksum of the output on that topic
# Then we produce + consume on the Stable + Stable-1 topic and validate the checksums on each of those topics
function validate_upgrade_cluster_to_prerelease() {
if [[ ! -z "$CI" ]];
then
echo "[CI MODE] Build and test the dev image v${PRERELEASE}"
pushd ..
local FLUVIO_BIN="$(pwd)/fluvio"
$FLUVIO_BIN cluster upgrade --chart-version=${PRERELEASE} --develop
popd
else
echo "Build and test the latest published dev image v${PRERELEASE}"
echo "Install prerelease v${PRERELEASE} CLI"
curl -fsS https://packages.fluvio.io/v1/install.sh | VERSION=latest bash
local FLUVIO_BIN=`which fluvio`
$FLUVIO_BIN cluster upgrade --chart-version=${PRERELEASE}
fi
ci_check;
$FLUVIO_BIN version
ci_check;
echo "Create test topic: ${PRERELEASE_TOPIC}"
$FLUVIO_BIN topic create ${PRERELEASE_TOPIC}
ci_check;
cat data3.txt.tmp | fluvio produce ${PRERELEASE_TOPIC}
ci_check;
echo "Validate test data w/ v${PRERELEASE} CLI matches expected data AFTER upgrading cluster + CLI to v${PRERELEASE}"
$FLUVIO_BIN consume -B -d ${PRERELEASE_TOPIC} | tee output.txt.tmp
ci_check;
if cat output.txt.tmp | shasum -c prerelease-cli-prerelease-topic.checksum; then
echo "${PRERELEASE_TOPIC} topic validated with v${PRERELEASE} CLI"
else
echo "Got: $(cat output.txt.tmp | awk '{print $1}')"
echo "Expected: $(cat prerelease-cli-prerelease-topic.checksum | awk '{print $1}')"
exit 1
fi
# Exercise older topics
cat data3.txt.tmp | fluvio produce ${STABLE_TOPIC}
ci_check;
echo "Validate v${STABLE} test data w/ ${PRERELEASE} CLI matches expected data AFTER upgrading cluster + CLI to v${PRERELEASE}"
$FLUVIO_BIN consume -B -d ${STABLE_TOPIC} | tee output.txt.tmp
ci_check;
if cat output.txt.tmp | shasum -c prerelease-cli-stable-topic.checksum; then
echo "${STABLE_TOPIC} topic validated with v${PRERELEASE} CLI"
else
echo "Got: $(cat output.txt.tmp | awk '{print $1}')"
echo "Expected: $(cat prerelease-cli-stable-topic.checksum | awk '{print $1}')"
exit 1
fi
cat data3.txt.tmp | fluvio produce ${STABLE_MINUS_ONE_TOPIC}
ci_check;
echo "Validate v${STABLE_MINUS_ONE_TOPIC} test data w/ ${PRERELEASE} CLI matches expected data AFTER upgrading cluster + CLI to v${PRERELEASE}"
$FLUVIO_BIN consume -B -d ${STABLE_MINUS_ONE_TOPIC} | tee output.txt.tmp
ci_check;
if cat output.txt.tmp | shasum -c prerelease-cli-stable-minus-one-topic.checksum; then
echo "${STABLE_MINUS_ONE_TOPIC} topic validated with v${PRERELEASE} CLI"
else
echo "Got: $(cat output.txt.tmp | awk '{print $1}')"
echo "Expected: $(cat prerelease-cli-stable-minus-one-topic.checksum | awk '{print $1}')"
exit 1
fi
}
# Create 3 base data files and calculate checksums for the expected states of each of our testing topics
# Build produce/consume with generated data to validate integrity across upgrades
function create_test_data() {
local TEST_DATA_BYTES=${TEST_DATA_BYTES:-100}
# The baseline files
for BASE in {1..3}
do
echo "Create the baseline file \#${BASE}"
local RANDOM_DATA=$(tr -cd '[:alnum:]' < /dev/urandom | fold -w${TEST_DATA_BYTES} | head -n1)
echo ${RANDOM_DATA} | tee -a data${BASE}.txt.tmp
done
# Round 1
# Stable-1
echo "Create expected topic checksum for Stable-1 cli x Stable-1 topic"
cat data1.txt.tmp | shasum | tee -a stable-minus-one-cli-stable-minus-one-topic.checksum
# Round 2
# Topic 2 expected output
echo "Create expected topic contents for Stable cli x Stable-1 topic"
cat data1.txt.tmp data2.txt.tmp | tee -a stable-cli-stable-minus-one-topic.txt.tmp
echo "Create expected topic checksum for Stable cli x Stable-1 topic"
cat stable-cli-stable-minus-one-topic.txt.tmp | shasum | tee -a stable-cli-stable-minus-one-topic.checksum
## Topic 2 expected output
cat data2.txt.tmp | shasum | tee -a stable-cli-stable-topic.checksum
# Round 3
# Topic 1 expected output
echo "Create expected topic contents for Prerelease cli x Stable-1 topic"
cat data1.txt.tmp data2.txt.tmp data3.txt.tmp | tee -a prerelease-cli-stable-minus-one-topic.txt.tmp
echo "Create expected topic checksum for Prerelease cli x Stable-1 topic"
cat prerelease-cli-stable-minus-one-topic.txt.tmp | shasum | tee -a prerelease-cli-stable-minus-one-topic.checksum
# Topic 2 expected output
echo "Create expected topic contents for Prerelease cli x Stable topic"
cat data2.txt.tmp data3.txt.tmp | tee -a prerelease-cli-stable-topic.txt.tmp
echo "Create expected topic checksum for Prerelease cli x Stable topic"
cat prerelease-cli-stable-topic.txt.tmp | shasum | tee -a prerelease-cli-stable-topic.checksum
# Topic 3 expected output
echo "Create expected topic checksum for Prerelease cli x Prerelease topic"
cat data3.txt.tmp | shasum | tee -a prerelease-cli-prerelease-topic.checksum
}
function main() {
# Change to this script's directory
pushd "$(dirname "$(readlink -f "$0")")" > /dev/null
cleanup;
create_test_data;
echo "Create cluster @ stable v${STABLE_MINUS_ONE}. Create and validate data."
validate_cluster_out_of_date_stable;
echo "Update cluster to stable v${STABLE}. Create and validate data."
validate_upgrade_cluster_to_stable;
echo "Update cluster to prerelease v${PRERELEASE}"
validate_upgrade_cluster_to_prerelease;
cleanup;
# Change back to original directory
popd > /dev/null
}
main; |
<filename>Phonebook/src/information/EmailAddress.java
package information;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class EmailAddress{
private String email;
public EmailAddress() {}
public EmailAddress(String email)
{
this.email = email;
}
public void setEmailAddress(String email)
{
this.email = email;
}
public String getEmailAddress()
{
return this.email;
}
public boolean validEmail(String emailAddress)
{
Pattern validEmail = Pattern.compile("^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,6}$");
Matcher matcher = validEmail.matcher(emailAddress);
boolean isValid = matcher.find();
return isValid;
}
public String toString()
{
return this.email;
}
}
|
import kchandra423.kImages.KImage;
import kchandra423.kImages.KImageBuilder;
import processing.core.PApplet;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
public class DrawingSurface extends PApplet {
ArrayList<KImage> examples = new ArrayList<>();
// KImage k;
public void setup() {
File f = new File("res");
for (File img :
f.listFiles()) {
try {
KImage image = KImageBuilder.getKImage(img.getAbsolutePath());
image.resize(250, 250);
examples.add(image);
} catch (IOException e) {
e.printStackTrace();
}
}
}
public void draw() {
// k.draw(this);
background(200);
for (int i = 0; i < examples.size(); i++) {
float x = i % 3 * 250;
float y = (i / 3) * 250;
examples.get(i).moveTo(x, y);
examples.get(i).draw(this);
}
}
}
|
<filename>src/com/opengamma/analytics/financial/instrument/annuity/AnnuityCouponIborSpreadDefinition.java<gh_stars>10-100
/**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.instrument.annuity;
import java.util.ArrayList;
import java.util.List;
import javax.time.calendar.Period;
import javax.time.calendar.ZonedDateTime;
import org.apache.commons.lang.ObjectUtils;
import com.opengamma.analytics.financial.instrument.index.IborIndex;
import com.opengamma.analytics.financial.instrument.payment.CouponIborSpreadDefinition;
import com.opengamma.analytics.financial.interestrate.annuity.derivative.Annuity;
import com.opengamma.analytics.financial.interestrate.payments.derivative.Coupon;
import com.opengamma.analytics.financial.schedule.ScheduleCalculator;
import com.opengamma.financial.convention.businessday.BusinessDayConvention;
import com.opengamma.financial.convention.daycount.DayCount;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.timeseries.DoubleTimeSeries;
/**
* A wrapper class for an AnnuityDefinition containing CouponIborSpreadDefinition.
*/
public class AnnuityCouponIborSpreadDefinition extends AnnuityDefinition<CouponIborSpreadDefinition> {
/** Empty array for array conversion of list */
protected static final Coupon[] EMPTY_ARRAY_COUPON = new Coupon[0];
private final IborIndex _iborIndex;
/**
* Constructor from a list of Ibor-like coupons.
* @param payments The Ibor coupons.
*/
public AnnuityCouponIborSpreadDefinition(final CouponIborSpreadDefinition[] payments) {
super(payments);
_iborIndex = payments[0].getIndex();
}
/**
* Annuity builder from the conventions and common characteristics.
* @param settlementDate The settlement date.
* @param tenor The tenor.
* @param notional The notional.
* @param index The Ibor index.
* @param spread The common spread.
* @param isPayer The payer flag.
* @return The Ibor annuity.
*/
public static AnnuityCouponIborSpreadDefinition from(final ZonedDateTime settlementDate, final Period tenor, final double notional, final IborIndex index, final double spread,
final boolean isPayer) {
ArgumentChecker.notNull(settlementDate, "settlement date");
ArgumentChecker.notNull(index, "index");
ArgumentChecker.notNull(tenor, "tenor");
final AnnuityCouponIborDefinition iborAnnuity = AnnuityCouponIborDefinition.from(settlementDate, tenor, notional, index, isPayer);
final CouponIborSpreadDefinition[] coupons = new CouponIborSpreadDefinition[iborAnnuity.getPayments().length];
for (int loopcpn = 0; loopcpn < iborAnnuity.getPayments().length; loopcpn++) {
coupons[loopcpn] = CouponIborSpreadDefinition.from(iborAnnuity.getNthPayment(loopcpn), spread);
}
return new AnnuityCouponIborSpreadDefinition(coupons);
}
/**
* Annuity builder from the conventions and common characteristics.
* @param settlementDate The settlement date.
* @param maturityDate The annuity maturity date.
* @param paymentPeriod The payment period.
* @param notional The notional.
* @param index The Ibor index.
* @param isPayer The payer flag.
* @param businessDayConvention The leg business day convention.
* @param endOfMonth The leg end-of-month convention.
* @param dayCount The coupons day count.
* @param spread The spread rate.
* @return The Ibor annuity.
*/
public static AnnuityCouponIborSpreadDefinition from(final ZonedDateTime settlementDate, final ZonedDateTime maturityDate, final Period paymentPeriod, final double notional, final IborIndex index,
final boolean isPayer, final BusinessDayConvention businessDayConvention, final boolean endOfMonth, final DayCount dayCount, final double spread) {
ArgumentChecker.notNull(settlementDate, "settlement date");
ArgumentChecker.notNull(maturityDate, "maturity date");
ArgumentChecker.notNull(paymentPeriod, "payment period");
ArgumentChecker.notNull(index, "index");
ArgumentChecker.notNull(businessDayConvention, "Business day convention");
ArgumentChecker.notNull(dayCount, "Day count convention");
ArgumentChecker.isTrue(notional > 0, "notional <= 0");
final double sign = isPayer ? -1.0 : 1.0;
final ZonedDateTime[] paymentDates = ScheduleCalculator.getAdjustedDateSchedule(settlementDate, maturityDate, paymentPeriod, true, false, businessDayConvention, index.getCalendar(), endOfMonth);
final CouponIborSpreadDefinition[] coupons = new CouponIborSpreadDefinition[paymentDates.length];
ZonedDateTime fixingDate = ScheduleCalculator.getAdjustedDate(settlementDate, -index.getSpotLag(), index.getCalendar());
coupons[0] = new CouponIborSpreadDefinition(index.getCurrency(), paymentDates[0], settlementDate, paymentDates[0], dayCount.getDayCountFraction(settlementDate, paymentDates[0]), sign * notional,
fixingDate, index, spread);
for (int loopcpn = 1; loopcpn < paymentDates.length; loopcpn++) {
fixingDate = ScheduleCalculator.getAdjustedDate(paymentDates[loopcpn - 1], -index.getSpotLag(), index.getCalendar());
coupons[loopcpn] = new CouponIborSpreadDefinition(index.getCurrency(), paymentDates[loopcpn], paymentDates[loopcpn - 1], paymentDates[loopcpn], dayCount.getDayCountFraction(
paymentDates[loopcpn - 1], paymentDates[loopcpn]), sign * notional, fixingDate, index, spread);
}
return new AnnuityCouponIborSpreadDefinition(coupons);
}
/**
* Annuity builder from the conventions and common characteristics.
* @param settlementDate The settlement date.
* @param maturityDate The annuity maturity date.
* @param notional The notional.
* @param index The Ibor index.
* @param isPayer The payer flag.
* @param spread The common spread.
* @return The Ibor annuity.
*/
public static AnnuityCouponIborSpreadDefinition from(final ZonedDateTime settlementDate, final ZonedDateTime maturityDate, final double notional, final IborIndex index, final double spread,
final boolean isPayer) {
ArgumentChecker.notNull(settlementDate, "settlement date");
ArgumentChecker.notNull(index, "index");
ArgumentChecker.notNull(maturityDate, "maturity date");
final AnnuityCouponIborDefinition iborAnnuity = AnnuityCouponIborDefinition.from(settlementDate, maturityDate, notional, index, isPayer);
final CouponIborSpreadDefinition[] coupons = new CouponIborSpreadDefinition[iborAnnuity.getPayments().length];
for (int loopcpn = 0; loopcpn < iborAnnuity.getPayments().length; loopcpn++) {
coupons[loopcpn] = CouponIborSpreadDefinition.from(iborAnnuity.getNthPayment(loopcpn), spread);
}
return new AnnuityCouponIborSpreadDefinition(coupons);
}
public static AnnuityCouponIborSpreadDefinition from(final AnnuityCouponIborDefinition iborAnnuity) {
final CouponIborSpreadDefinition[] coupons = new CouponIborSpreadDefinition[iborAnnuity.getPayments().length];
for (int loopcpn = 0; loopcpn < iborAnnuity.getPayments().length; loopcpn++) {
coupons[loopcpn] = CouponIborSpreadDefinition.from(iborAnnuity.getNthPayment(loopcpn), 0);
}
return new AnnuityCouponIborSpreadDefinition(coupons);
}
/**
* Returns the underlying ibor index
* @return The underlying ibor index
*/
public IborIndex getIborIndex() {
return _iborIndex;
}
@Override
public Annuity<Coupon> toDerivative(final ZonedDateTime date, final DoubleTimeSeries<ZonedDateTime> indexFixingTS, final String... yieldCurveNames) {
ArgumentChecker.notNull(date, "date");
final List<Coupon> resultList = new ArrayList<Coupon>();
final CouponIborSpreadDefinition[] payments = getPayments();
for (int loopcoupon = 0; loopcoupon < payments.length; loopcoupon++) {
if (!date.isAfter(payments[loopcoupon].getPaymentDate())) {
resultList.add(payments[loopcoupon].toDerivative(date, indexFixingTS, yieldCurveNames));
}
}
return new Annuity<Coupon>(resultList.toArray(EMPTY_ARRAY_COUPON));
}
@Override
public Annuity<Coupon> toDerivative(final ZonedDateTime date, final String... yieldCurveNames) {
ArgumentChecker.notNull(date, "date");
final List<Coupon> resultList = new ArrayList<Coupon>();
for (int loopcoupon = 0; loopcoupon < getPayments().length; loopcoupon++) {
if (!date.isAfter(getPayments()[loopcoupon].getPaymentDate())) {
resultList.add(getPayments()[loopcoupon].toDerivative(date, yieldCurveNames));
}
}
return new Annuity<Coupon>(resultList.toArray(EMPTY_ARRAY_COUPON));
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + _iborIndex.hashCode();
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final AnnuityCouponIborSpreadDefinition other = (AnnuityCouponIborSpreadDefinition) obj;
if (!ObjectUtils.equals(_iborIndex, other._iborIndex)) {
return false;
}
return true;
}
}
|
var searchData=
[
['onebyfftlen',['onebyfftLen',['../structarm__cfft__radix2__instance__f32.html#a1d3d289d47443e597d88a40effd14b8f',1,'arm_cfft_radix2_instance_f32::onebyfftLen()'],['../structarm__cfft__radix4__instance__f32.html#ab9eed39e40b8d7c16381fbccf84467cd',1,'arm_cfft_radix4_instance_f32::onebyfftLen()']]],
['outlen',['outLen',['../arm__convolution__example__f32_8c.html#a9c49c44c8bc5c432d220d33a26b4b589',1,'arm_convolution_example_f32.c']]],
['outputq31',['outputQ31',['../arm__graphic__equalizer__example__q31_8c.html#a9862488450f2547b07aee8035d6b4d8a',1,'arm_graphic_equalizer_example_q31.c']]]
];
|
class WithdrawApplication
def initialize(application_choice:)
@application_choice = application_choice
end
def save!
ActiveRecord::Base.transaction do
ApplicationStateChange.new(application_choice).withdraw!
application_choice.update!(withdrawn_at: Time.zone.now)
SetDeclineByDefault.new(application_form: application_choice.application_form).call
end
if @application_choice.application_form.ended_without_success?
CandidateMailer.withdraw_last_application_choice(@application_choice.application_form).deliver_later
StateChangeNotifier.new(:withdrawn, @application_choice).application_outcome_notification
end
send_email_notification_to_provider_users(application_choice)
resolve_ucas_match(application_choice)
end
private
attr_reader :application_choice
def send_email_notification_to_provider_users(application_choice)
NotificationsList.for(application_choice, include_ratifying_provider: true).each do |provider_user|
ProviderMailer.application_withdrawn(provider_user, application_choice).deliver_later
end
end
def resolve_ucas_match(application_choice)
match = UCASMatches::RetrieveForApplicationChoice.new(application_choice).call
if match && match.ready_to_resolve? && match.duplicate_applications_withdrawn_from_apply?
UCASMatches::ResolveOnApply.new(match).call
end
end
end
|
<filename>pkg/common/common_test.go
/**
* Copyright 2020 The IcecaneDB Authors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package common
import (
"fmt"
"testing"
"github.com/dr0pdb/icecanedb/test"
"github.com/stretchr/testify/assert"
)
func TestUint64BytesConversion(t *testing.T) {
test.CreateTestDirectory(test.TestDirectory)
defer test.CleanupTestDirectory(test.TestDirectory)
n := uint64(121)
b := U64ToByteSlice(n)
assert.Equal(t, n, ByteSliceToU64(b), fmt.Sprintf("Unexpected error in uint64-bytes comparison; expected %d actual %d", n, ByteSliceToU64(b)))
}
func TestBoolByteConversion(t *testing.T) {
b := BoolToByte(true)
assert.True(t, ByteToBool(b), "unexpected value of decoded b. expected true, found false")
b = BoolToByte(false)
assert.False(t, ByteToBool(b), "unexpected value of decoded b. expected false, found true")
}
func TestFloat64ByteConversion(t *testing.T) {
f := float64(32.01)
b := Float64ToByteSlice(f)
df := ByteSliceToFloat64(b)
assert.Equal(t, f, df, "unexpected value of decoded float64")
}
|
#!/bin/bash
#-------------------------------------------------------------------------------#
#-------------------------- I N F O & L I C E N S E --------------------------#
#-------------------------------------------------------------------------------#
# Energized Protection - ad.porn.malware blocking.
# Packs Building Script
# Git: http://go.energized.pro/github
# WEB: https://energized.pro
# @adroitadorkhan
# License: CC BY-NC-SA 4.0
#-------------------------------------------------------------------------------#
#--------------------------------- C O L O R S ---------------------------------#
#-------------------------------------------------------------------------------#
BL='\033[1;30m' # Black
R='\033[1;31m' # Red
G='\033[1;32m' # Green
Y='\033[1;33m' # Yellow
B='\033[1;34m' # Blue
P='\033[1;35m' # Purple
C='\033[1;36m' # Cyan
W='\033[1;37m' # White
N='\033[0m' # No Color
#-------------------------------------------------------------------------------#
#------------------------------ V A R I A B L E S ------------------------------#
#-------------------------------------------------------------------------------#
date=$(date +%d.%m.%Y)
fileDir=../assets/sources/filter
oFileDir=./
whitelist=whitelist
whitelistURL="https://raw.githubusercontent.com/AdroitAdorKhan/EnergizedProtection/master/core/filter/whitelist"
formats=formats
hosts=$formats/hosts
hostsTXT=$formats/hosts.txt
hostsV6=$formats/hosts-ipv6.txt
domains=$formats/domains.txt
filter=$formats/filter
dnsMasq=$formats/dnsmasq.conf
dnsMasqIPV6=$formats/dnsmasq-ipv6.conf
unbound=$formats/unbound.conf
rpz=$formats/rpz.txt
oneline=$formats/one-line.txt
file=list
fileTemp=list.temp
urls=urls
footer=$formats/footer
footerAB=$formats/footerAB
footerRPZ=$formats/footerRPZ
footerOL=$formats/footerOL
temp=$formats/temp
atemp=$formats/a.temp
wtemp=$formats/w.temp
divider='------------------------------------------------------------' 2>/dev/null
#-------------------------------------------------------------------------------#
#--------------------------- P A C K D E T A I L S ----------------------------#
#-------------------------------------------------------------------------------#
#----------------------------- FILL THE INFO HERE ------------------------------#
#-------------------------------------------------------------------------------#
pack="ultimate"
dividerTiny="--------------------------------------------"
headerLogo="# _____ _________ _____________ _______\n# / __/ |/ / __/ _ \/ ___/ _/_ / / __/ _ \ \n# / _// , / _// , _/ (_ // / / /_/ _// // /\n# /___/_/|_/___/_/|_|\___/___/ /___/___/____/\n#\n# P R O T E C T I O N\n# $dividerTiny\n# ad.porn.malware blocking.\n# ------\n# Merged collection of hosts from\n# reputable sources.\n# $dividerTiny\n# energized.pro\n# github.com/EnergizedProtection/block\n# $dividerTiny\n#\n# Let's make an annoyance free\n# better open internet. Altogether.\n# ------\n#"
headerLogoAB="! _____ _________ _____________ _______\n! / __/ |/ / __/ _ \/ ___/ _/_ / / __/ _ \ \n! / _// , / _// , _/ (_ // / / /_/ _// // /\n! /___/_/|_/___/_/|_|\___/___/ /___/___/____/\n!\n! P R O T E C T I O N\n! $dividerTiny\n! ad.porn.malware blocking.\n! ------\n! Merged collection of hosts from\n! reputable sources.\n! $dividerTiny\n! energized.pro\n! github.com/EnergizedProtection/block\n! $dividerTiny\n!\n! Let's make an annoyance free\n! better open internet. Altogether.\n! ------\n!"
headerLogoRPZ="; _____ _________ _____________ _______\n; / __/ |/ / __/ _ \/ ___/ _/_ / / __/ _ \ \n; / _// , / _// , _/ (_ // / / /_/ _// // /\n; /___/_/|_/___/_/|_|\___/___/ /___/___/____/\n;\n; P R O T E C T I O N\n; $dividerTiny\n; ad.porn.malware blocking.\n; ------\n; Merged collection of hosts from\n; reputable sources.\n; $dividerTiny\n; energized.pro\n; github.com/EnergizedProtection/block\n; $dividerTiny\n;\n; Let's make an annoyance free\n; better open internet. Altogether.\n; ------\n;"
localHost="# $dividerTiny\n# L O C A L H O S T\n# $dividerTiny\n127.0.0.1 localhost\n127.0.0.1 localhost.localdomain\n127.0.0.1 local\n255.255.255.255 broadcasthost\n::1 localhost\n::1 ip6-localhost\n::1 ip6-loopback\nfe80::1%lo0 localhost\nff00::0 ip6-localnet\nff00::0 ip6-mcastprefix\nff02::1 ip6-allnodes\nff02::2 ip6-allrouters\nff02::3 ip6-allhosts\n0.0.0.0 0.0.0.0\n# $dividerTiny"
localHostRPZ="; $dividerTiny\n; R P Z\n; $dividerTiny\n\$TTL 2h\n@ IN SOA localhost. root.localhost. (1 6h 1h 1w 2h)\n IN NS localhost.\n; $dividerTiny"
f1="_hosts based: based on Hosts file."
f2="_any device: compatible with all devices."
f3="_blockings: strictly blocks web annoyances."
f4="_formats: most used formats."
f5="_speed: reduces page loading time."
f6="_privacy: increases privacy."
f7="_saves expense: decreases data consumption."
f8="_clean: no extra abracadabra!"
ador="@AdroitAdorKhan - Head Developer & Maintainer"
avinash="@AvinashReddy3108 - Developer"
badmojr="@badmojr - Maintainer"
packName="Energized Ultimate Protection"
packDesc="Flagship Protection Pack from Energized Protection."
buildVersion=$(date +%y.%m.%j)
releaseVersion=$(date +%j)
packCode="EUL71M473-P"
license="MIT, https://energized.pro/license"
raw="https://block.energized.pro/$pack/formats"
updateDate=$(date +"%a, %d %b %y %H:%M:%S %Z")
expiry="1 day (update frequency)"
echo -n "# $dividerTiny\n# E N E R G I Z E D E N D S\n# $dividerTiny\n#\n# - - - - S T A Y E N E R G I Z E D - - - -" >> $footer
echo -n "! $dividerTiny\n! E N E R G I Z E D E N D S\n! $dividerTiny\n!\n! - - - - S T A Y E N E R G I Z E D - - - -" >> $footerAB
echo -n "; $dividerTiny\n; E N E R G I Z E D E N D S\n; $dividerTiny\n;\n; - - - - S T A Y E N E R G I Z E D - - - -" >> $footerRPZ
echo -n "\n# $dividerTiny\n# E N E R G I Z E D E N D S\n# $dividerTiny\n#\n# - - - - S T A Y E N E R G I Z E D - - - -" >> $footerOL
#-------------------------------------------------------------------------------#
#--------------------------- W H I T E L I S T I N G ---------------------------#
#-------------------------------------------------------------------------------#
wget -qO $whitelist $whitelistURL
# Remove Headers & Comments
sed '/#/d' -i $whitelist
# Remove Blank/Empty Lines
sed '/^$/d' -i $whitelist
# Removes Whitespace
cat $whitelist | tr -d '\r' >> $temp
# Sort, Remove Duplicate and Write
sed -i 's/ *$//' $temp && sort $temp |uniq |tee > $whitelist
# Clear Cache
rm -f $temp
#-------------------------------------------------------------------------------#
#-------------------------------- S O U R C E S --------------------------------#
#-------------------------------------------------------------------------------#
{ wget -qO- https://raw.githubusercontent.com/AdroitAdorKhan/EnergizedProtection/master/core/hosts; \
wget -qO- https://raw.githubusercontent.com/Yhonay/antipopads/master/hosts; \
} > $file
{ cat $oFileDir/domains.txt; \
cat $fileDir/1hosts.cf-pro.txt; \
cat $fileDir/280blocker.txt; \
cat $fileDir/ad-wars.txt; \
cat $fileDir/adaway.org.txt; \
cat $fileDir/ador-energized.txt; \
cat $fileDir/adguard-dns.txt; \
cat $fileDir/adguard-mobile-ads.txt; \
cat $fileDir/adguard-mobile-ads-filter.txt; \
cat $fileDir/adguard-mobile-specific.txt; \
cat $fileDir/adguard-safari.txt; \
cat $fileDir/adguard-tracking-protection.txt; \
cat $fileDir/adguard-tracking-firstparty.txt; \
cat $fileDir/adguard-tracking-servers.txt; \
cat $fileDir/antipopads.txt; \
cat $fileDir/anudeep-adservers.txt; \
cat $fileDir/barbblock.txt; \
cat $fileDir/better-fyi-trackers.txt; \
cat $fileDir/bjornstar-hosts.txt; \
cat $fileDir/blackbook.txt; \
cat $fileDir/cybercrime.txt; \
cat $fileDir/digitalside-threat-intel.txt; \
cat $fileDir/disconnect.me-ad.txt; \
cat $fileDir/disconnect.me-malvertising.txt; \
cat $fileDir/disconnect.me-tracking.txt; \
cat $fileDir/easylist.txt; \
cat $fileDir/easylist-adservers.txt; \
cat $fileDir/easyprivacy.txt; \
cat $fileDir/easyprivacy-specific.txt; \
cat $fileDir/easyprivacy-thirdparty.txt; \
cat $fileDir/easyprivacy-tracking-servers.txt; \
cat $fileDir/eth-phishing-detect.txt; \
cat $fileDir/fademind-add.2o7net.txt; \
cat $fileDir/fademind-add.dead.txt; \
cat $fileDir/fademind-add.risk.txt; \
cat $fileDir/fademind-add.spam.txt; \
cat $fileDir/gnuzilla.txt; \
cat $fileDir/hexxium-creations-threat-list.txt; \
cat $fileDir/hosts-blocklists.txt; \
cat $fileDir/kadhosts.txt; \
cat $fileDir/matomo.org-spammers.txt; \
cat $fileDir/molinero-hblock.txt; \
cat $fileDir/neohosts.txt; \
cat $fileDir/oisd.nl-basic.txt; \
cat $fileDir/openphish.txt; \
cat $fileDir/pgl.yoyo.org.txt; \
cat $fileDir/prigent-ads.txt; \
cat $fileDir/prigent-cryptojacking.txt; \
cat $fileDir/prigent-gambling.txt; \
cat $fileDir/prigent-phishing.txt; \
cat $fileDir/quidsup-notrack.txt; \
cat $fileDir/quidsup-notrack-malware.txt; \
cat $fileDir/someonewhocares.org.txt; \
cat $fileDir/sbc.txt; \
cat $fileDir/stopforumspam.com.txt; \
cat $fileDir/windows-spy-blocker.txt; \
cat $fileDir/winhelp2002.mvps.org.txt; \
cat $fileDir/youslist.txt; \
} >> $file
#-------------------------------------------------------------------------------#
#--------------------------- P R O C E S S P A C K ----------------------------#
#-------------------------------------------------------------------------------#
# Remove 0.0.0.0
sed 's/0.0.0.0 //' -i $file
# Remove Headers & Comments
sed '/#/d' -i $file
# Remove Blog Domains and Unwanted Craps
sed '/.blogspot./d;/.wixsite./d;/.wordpress./d;/\//d;/:/d;/(/d;/|/d;/\[/d;/\]/d' -i $file
# Remove Blank/Empty Lines
sed '/^$/d' -i $file
# Removes Whitespace
cat $file | tr -d '\r' >> $temp
# Sort, Remove Duplicate and Write
sed -i 's/ *$//' $temp && sort $temp |uniq |tee > $file
# Clear Cache
rm -f $temp
# Remove Whitelisted Domains
comm -23 $file $whitelist > $temp
mv -f $temp $file
# Remove Blank/Empty Lines
sed '/^$/d' -i $file
# Build Diff Formats
awk '$0="0.0.0.0 "$0' $file > $hosts
awk '$0="0.0.0.0 "$0' $file > $hostsTXT
awk '$0=":: "$0' $file > $hostsV6
awk '$0=$0' $file > $domains
awk '$0="||"$0"^"' $file > $filter
awk '$0="address=/"$0"/0.0.0.0/"' $file > $dnsMasq
awk '$0="address=/"$0"/::1/"' $file > $dnsMasqIPV6
awk '$0="local-zone: \""$0"\" static"' $file > $unbound
awk '$0=$0" CNAME ."' $file > $rpz
awk 'FNR!=1{print l}{l=$0};END{ORS="";print l}' ORS=',' $file > $oneline
#-------------------------------------------------------------------------------#
#--------------------------------- E C H O S -----------------------------------#
#-------------------------------------------------------------------------------#
# Read Total Domain Number
totaldomains=$(awk '!/^#/ && !/^$/{c++}END{print c}' $file | awk '{ len=length($0); res=""; for (i=0;i<=len;i++) { res=substr($0,len-i+1,1) res; if (i > 0 && i < len && i % 3 == 0) { res = "," res } }; print res }')
# Echo Pack, Domains and Size
echo $Y"! _____ _________ _____________ _______\n! / __/ |/ / __/ _ \/ ___/ _/_ / / __/ _ \ \n! / _// , / _// , _/ (_ // / / /_/ _// // /\n! /___/_/|_/___/_/|_|\___/___/ /___/___/____/\n!\n! P R O T E C T I O N"$N
echo $Y"! $dividerTiny"$N
echo $Y"! B U I L D I N G P A C K S"$N
echo $Y"! $dividerTiny"$N
echo $G"! Pack: "$N$Y"$packName"$N
echo $G"! Domains: "$N$W"$totaldomains"$N
echo $G"! Version: "$N$W"$buildVersion"$N
echo $Y"! $dividerTiny"$N
#-------------------------------------------------------------------------------#
#-------------------------- B U I L D F O R M A T S ---------------------------#
#-------------------------------------------------------------------------------#
# Clear Cache
rm -f $temp $atemp
echo $G"! Building "$N$Y"hosts"$N$G" Format"$N
# Hosts Header
echo "$headerLogo
# $dividerTiny
# P A C K D E T A I L S
# $dividerTiny
# Title: $packName
# Description: $packDesc
# Format: hosts
# Version: $buildVersion
# Release: $releaseVersion
# Entries: $totaldomains
# Pack Code: $packCode
# License: $license
# Updated: $updateDate
# RAW: $raw/hosts
# $dividerTiny
#
$localHost
#
# $dividerTiny
# F E A T U R E S
# $dividerTiny
# $f1\n# $f2\n# $f3\n# $f4\n# $f5\n# $f6\n# $f7\n# $f8
# $dividerTiny
#
# $dividerTiny
# T E A M B O L T Z - meet the team
# $dividerTiny
# $ador\n# $avinash\n# $badmojr
# $dividerTiny
#
# $dividerTiny
# E N E R G I Z E D B E G I N S
# $dividerTiny" >> $temp
# Build Hosts
cat $temp $hosts $footer > $atemp
mv -f $atemp $hosts
#-------------------------------------------------------------------------------#
# Clear Cache
rm -f $temp $atemp
echo $G"! Building "$N$Y"hosts text"$N$G" Format"$N
# Hosts.txt Header
echo "$headerLogo
# $dividerTiny
# P A C K D E T A I L S
# $dividerTiny
# Title: $packName
# Description: $packDesc
# Format: hosts
# Version: $buildVersion
# Release: $releaseVersion
# Entries: $totaldomains
# Pack Code: $packCode
# License: $license
# Updated: $updateDate
# RAW: $raw/hosts.txt
# $dividerTiny
#
$localHost
#
# $dividerTiny
# F E A T U R E S
# $dividerTiny
# $f1\n# $f2\n# $f3\n# $f4\n# $f5\n# $f6\n# $f7\n# $f8
# $dividerTiny
#
# $dividerTiny
# T E A M B O L T Z - meet the team
# $dividerTiny
# $ador\n# $avinash\n# $badmojr
# $dividerTiny
#
# $dividerTiny
# E N E R G I Z E D B E G I N S
# $dividerTiny" >> $temp
# Build Hosts
cat $temp $hostsTXT $footer > $atemp
mv -f $atemp $hostsTXT
#-------------------------------------------------------------------------------#
# Clear Cache
rm -f $temp $atemp
echo $G"! Building "$N$Y"hosts ipv6"$N$G" Format"$N
# Hosts IPV6 Header
echo "$headerLogo
# $dividerTiny
# P A C K D E T A I L S
# $dividerTiny
# Title: $packName
# Description: $packDesc
# Format: hosts IPV6
# Version: $buildVersion
# Release: $releaseVersion
# Entries: $totaldomains
# Pack Code: $packCode
# License: $license
# Updated: $updateDate
# RAW: $raw/hosts-ipv6.txt
# $dividerTiny
#
$localHost
#
# $dividerTiny
# F E A T U R E S
# $dividerTiny
# $f1\n# $f2\n# $f3\n# $f4\n# $f5\n# $f6\n# $f7\n# $f8
# $dividerTiny
#
# $dividerTiny
# T E A M B O L T Z - meet the team
# $dividerTiny
# $ador\n# $avinash\n# $badmojr
# $dividerTiny
#
# $dividerTiny
# E N E R G I Z E D B E G I N S
# $dividerTiny" >> $temp
# Build Hosts
cat $temp $hostsV6 $footer > $atemp
mv -f $atemp $hostsV6
#-------------------------------------------------------------------------------#
# Clear Cache
rm -f $temp $atemp
echo $G"! Building "$N$Y"domain list"$N$G" Format"$N
# Domain List Header
echo "$headerLogo
# $dividerTiny
# P A C K D E T A I L S
# $dividerTiny
# Title: $packName
# Description: $packDesc
# Format: domain list
# Version: $buildVersion
# Release: $releaseVersion
# Entries: $totaldomains
# Pack Code: $packCode
# License: $license
# Updated: $updateDate
# RAW: $raw/domains.txt
# $dividerTiny
#
# $dividerTiny
# F E A T U R E S
# $dividerTiny
# $f1\n# $f2\n# $f3\n# $f4\n# $f5\n# $f6\n# $f7\n# $f8
# $dividerTiny
#
# $dividerTiny
# T E A M B O L T Z - meet the team
# $dividerTiny
# $ador\n# $avinash\n# $badmojr
# $dividerTiny
#
# $dividerTiny
# E N E R G I Z E D B E G I N S
# $dividerTiny" >> $temp
# Build Domain List
cat $temp $domains $footer > $atemp
mv -f $atemp $domains
#-------------------------------------------------------------------------------#
# Clear Cache
rm -f $temp $atemp
echo $G"! Building "$N$Y"adblock filter"$N$G" Format"$N
# Adblock Filter Header
echo "$headerLogoAB
! $dividerTiny
! P A C K D E T A I L S
! $dividerTiny
! Title: $packName
! Description: $packDesc
! Format: adblock
! Version: $buildVersion
! Release: $releaseVersion
! Expires: $expiry
! Entries: $totaldomains
! Pack Code: $packCode
! License: $license
! Updated: $updateDate
! RAW: $raw/filter
! $dividerTiny
!
! $dividerTiny
! F E A T U R E S
! $dividerTiny
! $f1\n! $f2\n! $f3\n! $f4\n! $f5\n! $f6\n! $f7\n! $f8
! $dividerTiny
!
! $dividerTiny
! T E A M B O L T Z - meet the team
! $dividerTiny
! $ador\n! $avinash\n! $badmojr
! $dividerTiny
!
! $dividerTiny
! E N E R G I Z E D B E G I N S
! $dividerTiny" >> $temp
# Build Filter
cat $temp $filter $footerAB > $atemp
mv -f $atemp $filter
#-------------------------------------------------------------------------------#
# Clear Cache
rm -f $temp $atemp
echo $G"! Building "$N$Y"DNSMasq"$N$G" Format"$N
# DNSMasq Header
echo "$headerLogo
# $dividerTiny
# P A C K D E T A I L S
# $dividerTiny
# Title: $packName
# Description: $packDesc
# Format: dnsmasq
# Version: $buildVersion
# Release: $releaseVersion
# Entries: $totaldomains
# Pack Code: $packCode
# License: $license
# Updated: $updateDate
# RAW: $raw/dnsmasq.conf
# $dividerTiny
#
# $dividerTiny
# F E A T U R E S
# $dividerTiny
# $f1\n# $f2\n# $f3\n# $f4\n# $f5\n# $f6\n# $f7\n# $f8
# $dividerTiny
#
# $dividerTiny
# T E A M B O L T Z - meet the team
# $dividerTiny
# $ador\n# $avinash\n# $badmojr
# $dividerTiny
#
# $dividerTiny
# E N E R G I Z E D B E G I N S
# $dividerTiny" >> $temp
# Build DNSMasq
cat $temp $dnsMasq $footer > $atemp
mv -f $atemp $dnsMasq
#-------------------------------------------------------------------------------#
# Clear Cache
rm -f $temp $atemp
echo $G"! Building "$N$Y"DNSMasq ipv6"$N$G" Format"$N
# DNSMasq IPV6 Header
echo "$headerLogo
# $dividerTiny
# P A C K D E T A I L S
# $dividerTiny
# Title: $packName
# Description: $packDesc
# Format: dnsmasq ipv6
# Version: $buildVersion
# Release: $releaseVersion
# Entries: $totaldomains
# Pack Code: $packCode
# License: $license
# Updated: $updateDate
# RAW: $raw/dnsmasq-ipv6.conf
# $dividerTiny
#
# $dividerTiny
# F E A T U R E S
# $dividerTiny
# $f1\n# $f2\n# $f3\n# $f4\n# $f5\n# $f6\n# $f7\n# $f8
# $dividerTiny
#
# $dividerTiny
# T E A M B O L T Z - meet the team
# $dividerTiny
# $ador\n# $avinash\n# $badmojr
# $dividerTiny
#
# $dividerTiny
# E N E R G I Z E D B E G I N S
# $dividerTiny" >> $temp
# Build DNSMasq IPV6
cat $temp $dnsMasqIPV6 $footer > $atemp
mv -f $atemp $dnsMasqIPV6
#-------------------------------------------------------------------------------#
# Clear Cache
rm -f $temp $atemp
echo $G"! Building "$N$Y"unbound"$N$G" Format"$N
# Unbound Header
echo "$headerLogo
# $dividerTiny
# P A C K D E T A I L S
# $dividerTiny
# Title: $packName
# Description: $packDesc
# Format: unbound
# Version: $buildVersion
# Release: $releaseVersion
# Entries: $totaldomains
# Pack Code: $packCode
# License: $license
# Updated: $updateDate
# RAW: $raw/unbound.conf
# $dividerTiny
#
# $dividerTiny
# F E A T U R E S
# $dividerTiny
# $f1\n# $f2\n# $f3\n# $f4\n# $f5\n# $f6\n# $f7\n# $f8
# $dividerTiny
#
# $dividerTiny
# T E A M B O L T Z - meet the team
# $dividerTiny
# $ador\n# $avinash\n# $badmojr
# $dividerTiny
#
# $dividerTiny
# E N E R G I Z E D B E G I N S
# $dividerTiny" >> $temp
# Build Unbound
cat $temp $unbound $footer > $atemp
mv -f $atemp $unbound
#-------------------------------------------------------------------------------#
# Clear Cache
rm -f $temp $atemp
echo $G"! Building "$N$Y"rpz"$N$G" Format"$N
# RPZ Header
echo "$headerLogoRPZ
; $dividerTiny
; P A C K D E T A I L S
; $dividerTiny
; Title: $packName
; Description: $packDesc
; Format: rpz
; Version: $buildVersion
; Release: $releaseVersion
; Entries: $totaldomains
; Pack Code: $packCode
; License: $license
; Updated: $updateDate
; RAW: $raw/rpz.txt
; $dividerTiny
;
$localHostRPZ
;
; $dividerTiny
; F E A T U R E S
; $dividerTiny
; $f1\n; $f2\n; $f3\n; $f4\n; $f5\n; $f6\n; $f7\n; $f8
; $dividerTiny
;
; $dividerTiny
; T E A M B O L T Z - meet the team
; $dividerTiny
; $ador\n; $avinash\n; $badmojr
; $dividerTiny
;
; $dividerTiny
; E N E R G I Z E D B E G I N S
; $dividerTiny" >> $temp
# Build RPZ
cat $temp $rpz $footerRPZ > $atemp
mv -f $atemp $rpz
#-------------------------------------------------------------------------------#
# Clear Cache
rm -f $temp $atemp
echo $G"! Building "$N$Y"one-line"$N$G" Format"$N
# One Line Header
echo "$headerLogo
# $dividerTiny
# P A C K D E T A I L S
# $dividerTiny
# Title: $packName
# Description: $packDesc
# Format: one-line
# Version: $buildVersion
# Release: $releaseVersion
# Entries: $totaldomains
# Pack Code: $packCode
# License: $license
# Updated: $updateDate
# RAW: $raw/one-line.txt
# $dividerTiny
#
# $dividerTiny
# F E A T U R E S
# $dividerTiny
# $f1\n# $f2\n# $f3\n# $f4\n# $f5\n# $f6\n# $f7\n# $f8
# $dividerTiny
#
# $dividerTiny
# T E A M B O L T Z - meet the team
# $dividerTiny
# $ador\n# $avinash\n# $badmojr
# $dividerTiny
#
# $dividerTiny
# E N E R G I Z E D B E G I N S
# $dividerTiny" >> $temp
# Build One-Line
cat $temp $oneline $footerOL > $atemp
mv -f $atemp $oneline
#-------------------------------------------------------------------------------#
# Build GZIP
echo $G"! Building "$N$Y"hosts gzip"$N$G" Format"$N
gzip -9kf $hosts
# Build GZIP Unbound
##gzip -9f $unbound
#-------------------------------------------------------------------------------#
## Build Chromium Ruleset
#echo $G"! Building "$N$Y"chromium ruleset"$N$G" Format"$N
#rulesetFile=../assets/ruleset-converter/ruleset_converter
#chmod +x $rulesetFile
#inFile=./formats/filter
#outFile=./formats/filter.dat
#./$rulesetFile --input_format=filter-list --output_format=unindexed-ruleset --input_files=$inFile --output_file=$outFile
##
# Complete
echo $Y"! $dividerTiny"$N
echo $G"! DONE BUILDING PACK & FORMATS."$N
echo $Y"! $dividerTiny"$N
# Remove Stales
rm -f "$temp" "$atemp" "$file" "$whitelist" "$footer" "$footerAB" "$footerRPZ" "$footerOL"
#-------------------------------------------------------------------------------#
#---------------------------------- D O N E -----------------------------------#
#-------------------------------------------------------------------------------#
|
import { ContextFactory } from 'utils/test/factories';
import { ApiBackend } from 'types/api';
describe('Base API', () => {
const context = ContextFactory({
lms_backends: [
{
backend: ApiBackend.BASE,
course_regexp: '(?<course_id>.*)',
endpoint: 'https://demo.endpoint/api',
},
],
}).generate();
(window as any).__richie_frontend_context__ = { context };
const { default: API } = require('./base');
const LMSConf = context.lms_backends[0];
const BaseAPI = API(LMSConf);
describe('enrollment', () => {
beforeEach(() => {
sessionStorage.clear();
});
describe('get', () => {
it('returns nothing if the user is not enrolled to the provided course_id', async () => {
const response = await BaseAPI.enrollment.get(
'https://demo.endpoint/courses?course_id=af1987efz98:afe78',
{ username: 'johndoe' },
);
expect(response).toBeNull();
});
it('returns course run information if user is enrolled', async () => {
await BaseAPI.enrollment.set('https://demo.endpoint/courses?course_id=af1987efz98:afe78', {
username: 'johndoe',
});
const response = await BaseAPI.enrollment.get(
'https://demo.endpoint/courses?course_id=af1987efz98:afe78',
{ username: 'johndoe' },
);
expect(response.user).toEqual('johndoe');
expect(response.is_active).toBeTruthy();
});
});
describe('isEnrolled', () => {
it('returns true if user is enrolled', async () => {
await BaseAPI.enrollment.set('https://demo.endpoint/courses?course_id=af1987efz98:afe78', {
username: 'johndoe',
});
const response = await BaseAPI.enrollment.isEnrolled(
'https://demo.endpoint/courses?course_id=af1987efz98:afe78',
{ username: 'johndoe' },
);
expect(response).toBeTruthy();
});
it('returns false if user is not enrolled', async () => {
const response = await BaseAPI.enrollment.isEnrolled(
'https://demo.endpoint/courses?course_id=af1987efz98:afe78',
{ username: 'johndoe' },
);
expect(response).toBeFalsy();
});
it('returns false if user is anonymous', async () => {
const response = await BaseAPI.enrollment.isEnrolled(
'https://demo.endpoint/courses?course_id=af1987efz98:afe78',
);
expect(response).toBeFalsy();
});
});
describe('set', () => {
it('enrolls user', async () => {
const response = await BaseAPI.enrollment.set(
'https://demo.endpoint/courses?course_id=af1987efz98:afe78',
{ username: 'johndoe' },
);
expect(response).toBeTruthy();
expect(
sessionStorage.getItem(
`johndoe-https://demo.endpoint/courses?course_id=af1987efz98:afe78`,
),
).toEqual('true');
});
});
});
});
|
export interface IOppgave {
id: string;
identer?: IOppgaveIdent[];
tildeltEnhetsnr?: string;
journalpostId?: string;
saksreferanse?: string;
aktoerId?: string;
behandlingstema?: string;
beskrivelse?: string;
mappeId?: number;
fristFerdigstillelse?: string;
oppgavetype: string;
opprettetTidspunkt?: string;
prioritet: OppgavePrioritet;
status: string;
tilordnetRessurs?: string;
}
export interface IOppgaveIdent {
ident: string;
gruppe: IdentGruppe;
}
export enum IdentGruppe {
AKTOERID = 'AKTOERID',
FOLKEREGISTERIDENT = 'FOLKEREGISTERIDENT',
NPID = 'NPID',
ORGNR = 'ORGNR',
SAMHANDLERNR = 'SAMHANDLERNR',
}
export enum OppgavePrioritet {
HOY = 'HOY',
NORM = 'NORM',
LAV = 'LAV',
}
export enum OppgaveStatus {
OPPRETTET = 'OPPRETTET',
AAPNET = 'AAPNET',
UNDER_BEHANDLING = 'UNDER_BEHANDLING',
FERDIGSTILT = 'FERDIGSTILT',
FEILREGISTRERT = 'FEILREGISTRERT',
}
|
pub mod usb {
pub struct USBDevice {
vendor_id: u16,
product_id: u16,
name: String,
connected: bool,
}
impl USBDevice {
pub fn new(vendor_id: u16, product_id: u16, name: String) -> USBDevice {
USBDevice {
vendor_id,
product_id,
name,
connected: false,
}
}
pub fn connect(&mut self) {
self.connected = true;
}
pub fn disconnect(&mut self) {
self.connected = false;
}
}
pub fn find_device_by_ids(devices: &Vec<USBDevice>, vendor_id: u16, product_id: u16) -> Option<&USBDevice> {
devices.iter().find(|&device| device.vendor_id == vendor_id && device.product_id == product_id)
}
} |
let randomCategory
let randomClue
let answer
let answersCorrect = 0
let catIndex = 1 // category iteration variable for append loop
let board =
[['cat','cat','cat','cat','cat','cat'],
['$200','$200','$200','$200','$200','$200'],
['$400','$400','$400','$400','$400','$400'],
['$600','$600','$600','$600','$600','$600'],
['$800','$800','$800','$800','$800','$800'],
['$1000','$1000','$1000','$1000','$1000','$1000']]
let categoryID = []
let divBoard = document.createElement('div')
divBoard.classList.add('board')
document.body.prepend(divBoard) |
#!/bin/bash
# ------------------------------------------------------------------------------
# Usage
# ------------------------------------------------------------------------------
usage() {
cat <<EOM
$0: [hs]
-h display some help, you know, this
-s shallow clone, useful for faster builds
-p install prefix
-s doesn't work with git prior to 2.8 (e.g. xenial)
EOM
exit 1
}
# ------------------------------------------------------------------------------
# Requirements to build...
# ------------------------------------------------------------------------------
check_req() {
which cmake g++ make || {
echo "Failed to find required build packages. Please install with: sudo apt-get install cmake make g++"
exit 1
}
}
# ------------------------------------------------------------------------------
# build...
# ------------------------------------------------------------------------------
main() {
check_req
mkdir -p build
pushd build && \
cmake ../ \
-DBUILD_SYMBOLS=ON \
-DBUILD_TLS=ON \
-DCMAKE_INSTALL_PREFIX=${install_prefix} && \
make -j${NPROC} && \
umask 0022 && chmod -R a+rX . && \
make package && \
popd && \
exit $?
}
install_prefix="/usr"
#parse options
while getopts ":hsp:" opts; do
case "${opts}" in
h)
usage
;;
p)
install_prefix="${OPTARG}"
;;
*)
usage
;;
esac
done
main
|
def extract_location(text):
tokens = text.split()
location = tokens[-1]
return location
location = extract_location("John Neville lives in San Francisco")
print(location)
# San Francisco |
using System;
using System.Collections.Generic;
using System.Linq;
public class IntegerProcessor
{
public List<int> ProcessIntegers(List<int> input)
{
// Remove duplicates and sort the input list
List<int> distinctSorted = input.Distinct().OrderBy(x => x).ToList();
// Calculate the square of each integer and add it to the new list
List<int> result = distinctSorted.Select(x => x * x).ToList();
return result;
}
} |
<filename>tests/cmd_app_touch/touch/src/Container.js
/**
* A Container has all of the abilities of {@link Ext.Component Component}, but lets you nest other Components inside
* it. Applications are made up of lots of components, usually nested inside one another. Containers allow you to
* render and arrange child Components inside them. Most apps have a single top-level Container called a Viewport,
* which takes up the entire screen. Inside of this are child components, for example in a mail app the Viewport
* Container's two children might be a message List and an email preview pane.
*
* Containers give the following extra functionality:
*
* - Adding child Components at instantiation and run time
* - Removing child Components
* - Specifying a [Layout](#!/guide/layouts)
*
* Layouts determine how the child Components should be laid out on the screen. In our mail app example we'd use an
* HBox layout so that we can pin the email list to the left hand edge of the screen and allow the preview pane to
* occupy the rest. There are several layouts in Sencha Touch 2, each of which help you achieve your desired
* application structure, further explained in the [Layout guide](#!/guide/layouts).
*
* ## Adding Components to Containers
*
* As we mentioned above, Containers are special Components that can have child Components arranged by a Layout. One of
* the code samples above showed how to create a Panel with 2 child Panels already defined inside it but it's easy to
* do this at run time too:
*
* @example miniphone
* //this is the Panel we'll be adding below
* var aboutPanel = Ext.create('Ext.Panel', {
* html: 'About this app'
* });
*
* //this is the Panel we'll be adding to
* var mainPanel = Ext.create('Ext.Panel', {
* fullscreen: true,
*
* layout: 'hbox',
* defaults: {
* flex: 1
* },
*
* items: {
* html: 'First Panel',
* style: 'background-color: #5E99CC;'
* }
* });
*
* //now we add the first panel inside the second
* mainPanel.add(aboutPanel);
*
* Here we created three Panels in total. First we made the aboutPanel, which we might use to tell the user a little
* about the app. Then we create one called mainPanel, which already contains a third Panel in its
* {@link Ext.Container#cfg-items items} configuration, with some dummy text ("First Panel"). Finally, we add the first
* panel to the second by calling the {@link Ext.Container#method-add add} method on `mainPanel`.
*
* In this case we gave our mainPanel another hbox layout, but we also introduced some
* {@link Ext.Container#defaults defaults}. These are applied to every item in the Panel, so in this case every child
* inside `mainPanel` will be given a `flex: 1` configuration. The effect of this is that when we first render the screen
* only a single child is present inside `mainPanel`, so that child takes up the full width available to it. Once the
* `mainPanel.add` line is called though, the `aboutPanel` is rendered inside of it and also given a `flex` of 1, which will
* cause it and the first panel to both receive half the full width of the `mainPanel`.
*
* Likewise, it's easy to remove items from a Container:
*
* mainPanel.remove(aboutPanel);
*
* After this line is run everything is back to how it was, with the first child panel once again taking up the full
* width inside `mainPanel`.
*
* ## Further Reading
*
* See the [Component & Container Guide](#!/guide/components) for more information, and check out the
* {@link Ext.Container} class docs also.
*
* @aside guide components
* @aside guide layouts
*/
Ext.define('Ext.Container', {
extend: 'Ext.Component',
alternateClassName: 'Ext.lib.Container',
requires: [
'Ext.layout.*',
'Ext.ItemCollection',
'Ext.behavior.Scrollable',
'Ext.Mask'
],
xtype: 'container',
/**
* @event add
* Fires whenever item added to the Container.
* @param {Ext.Container} this The Container instance.
* @param {Object} item The item added to the Container.
* @param {Number} index The index of the item within the Container.
*/
/**
* @event remove
* Fires whenever item removed from the Container.
* @param {Ext.Container} this The Container instance.
* @param {Object} item The item removed from the Container.
* @param {Number} index The index of the item that was removed.
*/
/**
* @event move
* Fires whenever item moved within the Container.
* @param {Ext.Container} this The Container instance.
* @param {Object} item The item moved within the Container.
* @param {Number} toIndex The new index of the item.
* @param {Number} fromIndex The old index of the item.
*/
/**
* @private
* @event renderedchange
* Fires whenever an item is rendered into a container or derendered
* from a Container.
* @param {Ext.Container} this The Container instance.
* @param {Object} item The item in the Container.
* @param {Boolean} rendered The current rendered status of the item.
*/
/**
* @event activate
* Fires whenever item within the Container is activated.
* @param {Object} newActiveItem The new active item within the container.
* @param {Ext.Container} this The Container instance.
* @param {Object} oldActiveItem The old active item within the container.
*/
/**
* @event deactivate
* Fires whenever item within the Container is deactivated.
* @param {Object} oldActiveItem The old active item within the container.
* @param {Ext.Container} this The Container instance.
* @param {Object} newActiveItem The new active item within the container.
*/
eventedConfig: {
/**
* @cfg {Object/String/Number} activeItem The item from the {@link #cfg-items} collection that will be active first. This is
* usually only meaningful in a {@link Ext.layout.Card card layout}, where only one item can be active at a
* time. If passes a string, it will be assumed to be a {@link Ext.ComponentQuery} selector.
* @accessor
* @evented
*/
activeItem: 0,
/**
* @cfg {Boolean/String/Object} scrollable
* Configuration options to make this Container scrollable. Acceptable values are:
*
* - `'horizontal'`, `'vertical'`, `'both'` to enabling scrolling for that direction.
* - `true`/`false` to explicitly enable/disable scrolling.
*
* Alternatively, you can give it an object which is then passed to the scroller instance:
*
* scrollable: {
* direction: 'vertical',
* directionLock: true
* }
*
* Please look at the {@link Ext.scroll.Scroller} documentation for more example on how to use this.
* @return {Ext.scroll.View} The scroll view.
* @accessor
* @evented
*/
scrollable: null
},
config: {
/**
* @cfg {String/Object/Boolean} cardSwitchAnimation
* Animation to be used during transitions of cards.
* @removed 2.0.0 Please use {@link Ext.layout.Card#animation} instead
*/
/**
* @cfg {Object/String} layout Configuration for this Container's layout. Example:
*
* Ext.create('Ext.Container', {
* layout: {
* type: 'hbox',
* align: 'middle'
* },
* items: [
* {
* xtype: 'panel',
* flex: 1,
* style: 'background-color: red;'
* },
* {
* xtype: 'panel',
* flex: 2,
* style: 'background-color: green'
* }
* ]
* });
*
* See the [Layouts Guide](#!/guide/layouts) for more information.
*
* @accessor
*/
layout: null,
/**
* @cfg {Object} control Enables you to easily control Components inside this Container by listening to their
* events and taking some action. For example, if we had a container with a nested Disable button, and we
* wanted to hide the Container when the Disable button is tapped, we could do this:
*
* Ext.create('Ext.Container', {
* control: {
* 'button[text=Disable]': {
* tap: 'hideMe'
* }
* },
*
* hideMe: function () {
* this.hide();
* }
* });
*
* We used a {@link Ext.ComponentQuery} selector to listen to the {@link Ext.Button#tap tap} event on any
* {@link Ext.Button button} anywhere inside the Container that has the {@link Ext.Button#text text} 'Disable'.
* Whenever a Component matching that selector fires the `tap` event our `hideMe` function is called. `hideMe` is
* called with scope: `this` (e.g. `this` is the Container instance).
*
*/
control: {},
/**
* @cfg {Object} defaults A set of default configurations to apply to all child Components in this Container.
* It's often useful to specify defaults when creating more than one items with similar configurations. For
* example here we can specify that each child is a panel and avoid repeating the xtype declaration for each
* one:
*
* Ext.create('Ext.Container', {
* defaults: {
* xtype: 'panel'
* },
* items: [
* {
* html: 'Panel 1'
* },
* {
* html: 'Panel 2'
* }
* ]
* });
*
* @accessor
*/
defaults: null,
/**
* @cfg {Array/Object} items The child items to add to this Container. This is usually an array of Component
* configurations or instances, for example:
*
* Ext.create('Ext.Container', {
* items: [
* {
* xtype: 'panel',
* html: 'This is an item'
* }
* ]
* });
* @accessor
*/
items: null,
/**
* @cfg {Boolean} autoDestroy If `true`, child items will be destroyed as soon as they are {@link #method-remove removed}
* from this container.
* @accessor
*/
autoDestroy: true,
/** @cfg {String} defaultType
* The default {@link Ext.Component xtype} of child Components to create in this Container when a child item
* is specified as a raw configuration object, rather than as an instantiated Component.
* @accessor
*/
defaultType: null,
//@private
useBodyElement: null,
/**
* @cfg {Boolean/Object/Ext.Mask/Ext.LoadMask} masked
* A configuration to allow you to mask this container.
* You can optionally pass an object block with and xtype of `loadmask`, and an optional `message` value to
* display a loading mask. Please refer to the {@link Ext.LoadMask} component to see other configurations.
*
* masked: {
* xtype: 'loadmask',
* message: 'My message'
* }
*
* Alternatively, you can just call the setter at any time with `true`/`false` to show/hide the mask:
*
* setMasked(true); //show the mask
* setMasked(false); //hides the mask
*
* There are also two convenient methods, {@link #method-mask} and {@link #unmask}, to allow you to mask and unmask
* this container at any time.
*
* Remember, the {@link Ext.Viewport} is always a container, so if you want to mask your whole application at anytime,
* can call:
*
* Ext.Viewport.setMasked({
* xtype: 'loadmask',
* message: 'Hello'
* });
*
* @accessor
*/
masked: null,
/**
* @cfg {Boolean} modal `true` to make this Container modal. This will create a mask underneath the Container
* that covers its parent and does not allow the user to interact with any other Components until this
* Container is dismissed.
* @accessor
*/
modal: null,
/**
* @cfg {Boolean} hideOnMaskTap When using a {@link #modal} Component, setting this to `true` will hide the modal
* mask and the Container when the mask is tapped on.
* @accessor
*/
hideOnMaskTap: null
},
isContainer: true,
constructor: function(config) {
var me = this;
me._items = me.items = new Ext.ItemCollection();
me.innerItems = [];
me.onItemAdd = me.onFirstItemAdd;
me.callParent(arguments);
},
getElementConfig: function() {
return {
reference: 'element',
classList: ['x-container', 'x-unsized'],
children: [{
reference: 'innerElement',
className: 'x-inner'
}]
};
},
/**
* Changes the {@link #masked} configuration when its setter is called, which will convert the value
* into a proper object/instance of {@link Ext.Mask}/{@link Ext.LoadMask}. If a mask already exists,
* it will use that instead.
* @param {Boolean/Object/Ext.Mask/Ext.LoadMask} masked
* @return {Object}
*/
applyMasked: function(masked) {
var isVisible = true,
currentMask;
if (masked === false) {
masked = true;
isVisible = false;
}
currentMask = Ext.factory(masked, Ext.Mask, this.getMasked());
if (currentMask) {
this.add(currentMask);
currentMask.setHidden(!isVisible);
}
return currentMask;
},
/**
* Convenience method which calls {@link #setMasked} with a value of `true` (to show the mask). For additional
* functionality, call the {@link #setMasked} function direction (See the {@link #masked} configuration documentation
* for more information).
*/
mask: function(mask) {
this.setMasked(mask || true);
},
/**
* Convenience method which calls {@link #setMasked} with a value of false (to hide the mask). For additional
* functionality, call the {@link #setMasked} function direction (See the {@link #masked} configuration documentation
* for more information).
*/
unmask: function() {
this.setMasked(false);
},
setParent: function(container) {
this.callSuper(arguments);
if (container) {
var modal = this.getModal();
if (modal) {
container.insertBefore(modal, this);
modal.setZIndex(this.getZIndex() - 1);
}
}
},
applyModal: function(modal, currentModal) {
var isVisible = true;
if (modal === false) {
modal = true;
isVisible = false;
}
currentModal = Ext.factory(modal, Ext.Mask, currentModal);
if (currentModal) {
currentModal.setVisibility(isVisible);
}
return currentModal;
},
updateModal: function(modal) {
var container = this.getParent();
if (container) {
if (modal) {
container.insertBefore(modal, this);
modal.setZIndex(this.getZIndex() - 1);
}
else {
container.remove(modal);
}
}
},
updateHideOnMaskTap : function(hide) {
var mask = this.getModal();
if (mask) {
mask[hide ? 'on' : 'un'].call(mask, 'tap', 'hide', this);
}
},
updateZIndex: function(zIndex) {
var modal = this.getModal();
this.callParent(arguments);
if (modal) {
modal.setZIndex(zIndex - 1);
}
},
updateBaseCls: function(newBaseCls, oldBaseCls) {
var me = this,
ui = me.getUi();
if (oldBaseCls) {
this.element.removeCls(oldBaseCls);
this.innerElement.removeCls(newBaseCls, null, 'inner');
if (ui) {
this.element.removeCls(this.currentUi);
}
}
if (newBaseCls) {
this.element.addCls(newBaseCls);
this.innerElement.addCls(newBaseCls, null, 'inner');
if (ui) {
this.element.addCls(newBaseCls, null, ui);
this.currentUi = newBaseCls + '-' + ui;
}
}
},
updateUseBodyElement: function(useBodyElement) {
if (useBodyElement) {
this.link('bodyElement', this.innerElement.wrap({
cls: 'x-body'
}));
}
},
applyItems: function(items, collection) {
if (items) {
var me = this;
me.getDefaultType();
me.getDefaults();
if (me.initialized && collection.length > 0) {
me.removeAll();
}
me.add(items);
//Don't need to call setActiveItem when Container is first initialized
if (me.initialized) {
var activeItem = me.initialConfig.activeItem || me.config.activeItem || 0;
me.setActiveItem(activeItem);
}
}
},
/**
* @private
*/
applyControl: function(selectors) {
var selector, key, listener, listeners;
for (selector in selectors) {
listeners = selectors[selector];
for (key in listeners) {
listener = listeners[key];
if (Ext.isObject(listener)) {
listener.delegate = selector;
}
}
listeners.delegate = selector;
this.addListener(listeners);
}
return selectors;
},
/**
* Initialize layout and event listeners the very first time an item is added
* @private
*/
onFirstItemAdd: function() {
delete this.onItemAdd;
if (this.innerHtmlElement && !this.getHtml()) {
this.innerHtmlElement.destroy();
delete this.innerHtmlElement;
}
this.on('innerstatechange', 'onItemInnerStateChange', this, {
delegate: '> component'
});
return this.onItemAdd.apply(this, arguments);
},
//<debug error>
updateLayout: function(newLayout, oldLayout) {
if (oldLayout && oldLayout.isLayout) {
Ext.Logger.error('Replacing a layout after one has already been initialized is not currently supported.');
}
},
//</debug>
getLayout: function() {
var layout = this.layout;
if (!layout) {
layout = this.link('_layout', this.link('layout', Ext.factory(this._layout || 'default', Ext.layout.Default, null, 'layout')));
layout.setContainer(this);
}
return layout;
},
updateDefaultType: function(defaultType) {
// Cache the direct reference to the default item class here for performance
this.defaultItemClass = Ext.ClassManager.getByAlias('widget.' + defaultType);
//<debug error>
if (!this.defaultItemClass) {
Ext.Logger.error("Invalid defaultType of: '" + defaultType + "', must be a valid component xtype");
}
//</debug>
},
applyDefaults: function(defaults) {
if (defaults) {
this.factoryItem = this.factoryItemWithDefaults;
return defaults;
}
},
factoryItem: function(item) {
//<debug error>
if (!item) {
Ext.Logger.error("Invalid item given: " + item + ", must be either the config object to factory a new item, " +
"or an existing component instance");
}
//</debug>
return Ext.factory(item, this.defaultItemClass);
},
factoryItemWithDefaults: function(item) {
//<debug error>
if (!item) {
Ext.Logger.error("Invalid item given: " + item + ", must be either the config object to factory a new item, " +
"or an existing component instance");
}
//</debug>
var me = this,
defaults = me.getDefaults(),
instance;
if (!defaults) {
return Ext.factory(item, me.defaultItemClass);
}
// Existing instance
if (item.isComponent) {
instance = item;
// Apply defaults only if this is not already an item of this container
if (defaults && item.isInnerItem() && !me.has(instance)) {
instance.setConfig(defaults, true);
}
}
// Config object
else {
if (defaults && !item.ignoreDefaults) {
// Note:
// - defaults is only applied to inner items
// - we merge the given config together with defaults into a new object so that the original object stays intact
if (!(
item.hasOwnProperty('left') &&
item.hasOwnProperty('right') &&
item.hasOwnProperty('top') &&
item.hasOwnProperty('bottom') &&
item.hasOwnProperty('docked') &&
item.hasOwnProperty('centered')
)) {
item = Ext.mergeIf({}, item, defaults);
}
}
instance = Ext.factory(item, me.defaultItemClass);
}
return instance;
},
/**
* Adds one or more Components to this Container. Example:
*
* var myPanel = Ext.create('Ext.Panel', {
* html: 'This will be added to a Container'
* });
*
* myContainer.add([myPanel]);
*
* @param {Object/Object[]/Ext.Component/Ext.Component[]} newItems The new items to add to the Container.
* @return {Ext.Component} The last item added to the Container from the `newItems` array.
*/
add: function(newItems) {
var me = this,
i, ln, item, newActiveItem;
if (Ext.isArray(newItems)) {
for (i = 0, ln = newItems.length; i < ln; i++) {
item = me.factoryItem(newItems[i]);
this.doAdd(item);
if (!newActiveItem && !this.getActiveItem() && this.innerItems.length > 0 && item.isInnerItem()) {
newActiveItem = item;
}
}
} else {
item = me.factoryItem(newItems);
this.doAdd(item);
if (!newActiveItem && !this.getActiveItem() && this.innerItems.length > 0 && item.isInnerItem()) {
newActiveItem = item;
}
}
if (newActiveItem) {
this.setActiveItem(newActiveItem);
}
return item;
},
/**
* @private
* @param {Ext.Component} item
*/
doAdd: function(item) {
var me = this,
items = me.getItems(),
index;
if (!items.has(item)) {
index = items.length;
items.add(item);
if (item.isInnerItem()) {
me.insertInner(item);
}
item.setParent(me);
me.onItemAdd(item, index);
}
},
/**
* Removes an item from this Container, optionally destroying it.
* @param {Object} item The item to remove.
* @param {Boolean} [destroy] Calls the Component's {@link Ext.Component#method-destroy destroy}
* method if `true`.
* @return {Ext.Component} this
*/
remove: function(item, destroy) {
var me = this,
index = me.indexOf(item),
innerItems = me.getInnerItems();
if (destroy === undefined) {
destroy = me.getAutoDestroy();
}
if (index !== -1) {
if (!me.removingAll && innerItems.length > 1 && item === me.getActiveItem()) {
me.on({
activeitemchange: 'doRemove',
scope: me,
single: true,
order: 'after',
args: [item, index, destroy]
});
me.doResetActiveItem(innerItems.indexOf(item));
}
else {
me.doRemove(item, index, destroy);
if (innerItems.length === 0) {
me.setActiveItem(null);
}
}
}
return me;
},
doResetActiveItem: function(innerIndex) {
if (innerIndex === 0) {
this.setActiveItem(1);
}
else {
this.setActiveItem(0);
}
},
doRemove: function(item, index, destroy) {
var me = this;
me.items.remove(item);
if (item.isInnerItem()) {
me.removeInner(item);
}
me.onItemRemove(item, index, destroy);
item.setParent(null);
if (destroy) {
item.destroy();
}
},
/**
* Removes all items currently in the Container, optionally destroying them all.
* @param {Boolean} destroy If `true`, {@link Ext.Component#method-destroy destroys}
* each removed Component.
* @param {Boolean} everything If `true`, completely remove all items including
* docked / centered and floating items.
* @return {Ext.Component} this
*/
removeAll: function(destroy, everything) {
var items = this.items,
ln = items.length,
i = 0,
item;
if (typeof destroy != 'boolean') {
destroy = this.getAutoDestroy();
}
everything = Boolean(everything);
// removingAll flag is used so we don't unnecessarily change activeItem while removing all items.
this.removingAll = true;
for (; i < ln; i++) {
item = items.getAt(i);
if (item && (everything || item.isInnerItem())) {
this.doRemove(item, i, destroy);
i--;
ln--;
}
}
this.setActiveItem(null);
this.removingAll = false;
return this;
},
/**
* Returns the Component for a given index in the Container's {@link #property-items}.
* @param {Number} index The index of the Component to return.
* @return {Ext.Component} The item at the specified `index`, if found.
*/
getAt: function(index) {
return this.items.getAt(index);
},
getInnerAt: function(index) {
return this.innerItems[index];
},
/**
* Removes the Component at the specified index:
*
* myContainer.removeAt(0); // removes the first item
*
* @param {Number} index The index of the Component to remove.
*/
removeAt: function(index) {
var item = this.getAt(index);
if (item) {
this.remove(item);
}
return this;
},
/**
* Removes an inner Component at the specified index:
*
* myContainer.removeInnerAt(0); // removes the first item of the innerItems property
*
* @param {Number} index The index of the Component to remove.
*/
removeInnerAt: function(index) {
var item = this.getInnerItems()[index];
if (item) {
this.remove(item);
}
return this;
},
/**
* @private
*/
has: function(item) {
return this.getItems().indexOf(item) != -1;
},
/**
* @private
*/
hasInnerItem: function(item) {
return this.innerItems.indexOf(item) != -1;
},
/**
* @private
*/
indexOf: function(item) {
return this.getItems().indexOf(item);
},
innerIndexOf: function(item) {
return this.innerItems.indexOf(item);
},
/**
* @private
* @param {Ext.Component} item
* @param {Number} index
*/
insertInner: function(item, index) {
var items = this.getItems().items,
innerItems = this.innerItems,
currentInnerIndex = innerItems.indexOf(item),
newInnerIndex = -1,
nextSibling;
if (currentInnerIndex !== -1) {
innerItems.splice(currentInnerIndex, 1);
}
if (typeof index == 'number') {
do {
nextSibling = items[++index];
} while (nextSibling && !nextSibling.isInnerItem());
if (nextSibling) {
newInnerIndex = innerItems.indexOf(nextSibling);
innerItems.splice(newInnerIndex, 0, item);
}
}
if (newInnerIndex === -1) {
innerItems.push(item);
newInnerIndex = innerItems.length - 1;
}
if (currentInnerIndex !== -1) {
this.onInnerItemMove(item, newInnerIndex, currentInnerIndex);
}
return this;
},
onInnerItemMove: Ext.emptyFn,
/**
* @private
* @param {Ext.Component} item
*/
removeInner: function(item) {
Ext.Array.remove(this.innerItems, item);
return this;
},
/**
* Adds a child Component at the given index. For example, here's how we can add a new item, making it the first
* child Component of this Container:
*
* myContainer.insert(0, {xtype: 'panel', html: 'new item'});
*
* @param {Number} index The index to insert the Component at.
* @param {Object} item The Component to insert.
*/
insert: function(index, item) {
var me = this,
i;
//<debug error>
if (typeof index != 'number') {
Ext.Logger.error("Invalid index of '" + index + "', must be a valid number");
}
//</debug>
if (Ext.isArray(item)) {
for (i = item.length - 1; i >= 0; i--) {
me.insert(index, item[i]);
}
return me;
}
item = this.factoryItem(item);
this.doInsert(index, item);
return item;
},
/**
* @private
* @param {Number} index
* @param {Ext.Component} item
*/
doInsert: function(index, item) {
var me = this,
items = me.items,
itemsLength = items.length,
currentIndex, isInnerItem;
isInnerItem = item.isInnerItem();
if (index > itemsLength) {
index = itemsLength;
}
if (items[index - 1] === item) {
return me;
}
currentIndex = me.indexOf(item);
if (currentIndex !== -1) {
if (currentIndex < index) {
index -= 1;
}
items.removeAt(currentIndex);
}
items.insert(index, item);
if (currentIndex === -1) {
item.setParent(me);
}
if (isInnerItem) {
me.insertInner(item, index);
}
if (currentIndex !== -1) {
me.onItemMove(item, index, currentIndex);
}
else {
me.onItemAdd(item, index);
}
},
/**
* @private
*/
insertFirst: function(item) {
return this.insert(0, item);
},
/**
* @private
*/
insertLast: function(item) {
return this.insert(this.getItems().length, item);
},
/**
* @private
*/
insertBefore: function(item, relativeToItem) {
var index = this.indexOf(relativeToItem);
if (index !== -1) {
this.insert(index, item);
}
return this;
},
/**
* @private
*/
insertAfter: function(item, relativeToItem) {
var index = this.indexOf(relativeToItem);
if (index !== -1) {
this.insert(index + 1, item);
}
return this;
},
/**
* @private
*/
onItemAdd: function(item, index) {
this.doItemLayoutAdd(item, index);
if (this.initialized) {
this.fireEvent('add', this, item, index);
}
},
doItemLayoutAdd: function(item, index) {
var layout = this.getLayout();
if (this.isRendered() && item.setRendered(true)) {
item.fireAction('renderedchange', [this, item, true], 'onItemAdd', layout, { args: [item, index] });
}
else {
layout.onItemAdd(item, index);
}
},
/**
* @private
*/
onItemRemove: function(item, index, destroying) {
this.doItemLayoutRemove(item, index, destroying);
this.fireEvent('remove', this, item, index);
},
doItemLayoutRemove: function(item, index, destroying) {
var layout = this.getLayout();
if (this.isRendered() && item.setRendered(false)) {
item.fireAction('renderedchange', [this, item, false], 'onItemRemove', layout, { args: [item, index, destroying] });
}
else {
layout.onItemRemove(item, index, destroying);
}
},
/**
* @private
*/
onItemMove: function(item, toIndex, fromIndex) {
if (item.isDocked()) {
item.setDocked(null);
}
this.doItemLayoutMove(item, toIndex, fromIndex);
this.fireEvent('move', this, item, toIndex, fromIndex);
},
doItemLayoutMove: function(item, toIndex, fromIndex) {
this.getLayout().onItemMove(item, toIndex, fromIndex);
},
onItemInnerStateChange: function(item, isInner) {
var layout = this.getLayout();
if (isInner) {
this.insertInner(item, this.items.indexOf(item));
}
else {
this.removeInner(item);
}
layout.onItemInnerStateChange.apply(layout, arguments);
},
/**
* Returns all inner {@link #property-items} of this container. `inner` means that the item is not `docked` or
* `floating`.
* @return {Array} The inner items of this container.
*/
getInnerItems: function() {
return this.innerItems;
},
/**
* Returns all the {@link Ext.Component#docked} items in this container.
* @return {Array} The docked items of this container.
*/
getDockedItems: function() {
var items = this.getItems().items,
dockedItems = [],
ln = items.length,
item, i;
for (i = 0; i < ln; i++) {
item = items[i];
if (item.isDocked()) {
dockedItems.push(item);
}
}
return dockedItems;
},
/**
* @private
*/
applyActiveItem: function(activeItem, currentActiveItem) {
var innerItems = this.getInnerItems();
// Make sure the items are already initialized
this.getItems();
// No items left to be active, reset back to 0 on falsy changes
if (!activeItem && innerItems.length === 0) {
return 0;
}
else if (typeof activeItem == 'number') {
activeItem = Math.max(0, Math.min(activeItem, innerItems.length - 1));
activeItem = innerItems[activeItem];
if (activeItem) {
return activeItem;
}
else if (currentActiveItem) {
return null;
}
}
else if (activeItem) {
var item;
//ComponentQuery selector?
if (typeof activeItem == 'string') {
item = this.child(activeItem);
activeItem = {
xtype : activeItem
};
}
if (!item || !item.isComponent) {
item = this.factoryItem(activeItem);
}
this.pendingActiveItem = item;
//<debug error>
if (!item.isInnerItem()) {
Ext.Logger.error("Setting activeItem to be a non-inner item");
}
//</debug>
if (!this.has(item)) {
this.add(item);
}
return item;
}
},
/**
* Animates to the supplied `activeItem` with a specified animation. Currently this only works
* with a Card layout. This passed animation will override any default animations on the
* container, for a single card switch. The animation will be destroyed when complete.
* @param {Object/Number} activeItem The item or item index to make active.
* @param {Object/Ext.fx.layout.Card} animation Card animation configuration or instance.
*/
animateActiveItem: function(activeItem, animation) {
var layout = this.getLayout(),
defaultAnimation;
if (this.activeItemAnimation) {
this.activeItemAnimation.destroy();
}
this.activeItemAnimation = animation = new Ext.fx.layout.Card(animation);
if (animation && layout.isCard) {
animation.setLayout(layout);
defaultAnimation = layout.getAnimation();
if (defaultAnimation) {
defaultAnimation.disable();
}
animation.on('animationend', function() {
if (defaultAnimation) {
defaultAnimation.enable();
}
animation.destroy();
}, this);
}
return this.setActiveItem(activeItem);
},
/**
* @private
*/
doSetActiveItem: function(newActiveItem, oldActiveItem) {
delete this.pendingActiveItem;
if (oldActiveItem) {
oldActiveItem.fireEvent('deactivate', oldActiveItem, this, newActiveItem);
}
if (newActiveItem) {
newActiveItem.fireEvent('activate', newActiveItem, this, oldActiveItem);
}
},
show:function(){
this.callParent(arguments);
var modal = this.getModal();
if (modal) {
modal.setHidden(false);
}
},
hide:function(){
this.callParent(arguments);
var modal = this.getModal();
if (modal) {
modal.setHidden(true);
}
},
doSetHidden: function(hidden) {
var modal = this.getModal();
if (modal && (modal.getHidden() !== hidden)) {
modal.setHidden(hidden);
}
this.callSuper(arguments);
},
/**
* @private
*/
setRendered: function(rendered) {
if (this.callParent(arguments)) {
var items = this.items.items,
i, ln;
for (i = 0,ln = items.length; i < ln; i++) {
items[i].setRendered(rendered);
}
return true;
}
return false;
},
/**
* @private
*/
getScrollableBehavior: function() {
var behavior = this.scrollableBehavior;
if (!behavior) {
behavior = this.scrollableBehavior = new Ext.behavior.Scrollable(this);
}
return behavior;
},
/**
* @private
*/
applyScrollable: function(config) {
if (typeof config === 'boolean') {
//<debug warn>
if (config === false && !(this.getHeight() !== null || this.heightLayoutSized || (this.getTop() !== null && this.getBottom() !== null))) {
Ext.Logger.warn("This container is set to scrollable: false but has no specified height. " +
"You may need to set the container to scrollable: null or provide a height.", this);
}
//</debug>
this.getScrollableBehavior().setConfig({disabled: !config});
} else if (config && !config.isObservable) {
this.getScrollableBehavior().setConfig(config);
}
return config;
},
doSetScrollable: function() {
// Used for plugins when they need to reinitialize scroller listeners
},
/**
* Returns an the scrollable instance for this container, which is a {@link Ext.scroll.View} class.
*
* Please checkout the documentation for {@link Ext.scroll.View}, {@link Ext.scroll.View#getScroller}
* and {@link Ext.scroll.Scroller} for more information.
* @return {Ext.scroll.View} The scroll view.
*/
getScrollable: function() {
return this.getScrollableBehavior().getScrollView();
},
// Used by ComponentQuery to retrieve all of the items
// which can potentially be considered a child of this Container.
// This should be overridden by components which have child items
// that are not contained in items. For example `dockedItems`, `menu`, etc
// @private
getRefItems: function(deep) {
var items = this.getItems().items.slice(),
ln = items.length,
i, item;
if (deep) {
for (i = 0; i < ln; i++) {
item = items[i];
if (item.getRefItems) {
items = items.concat(item.getRefItems(true));
}
}
}
return items;
},
/**
* Examines this container's `{@link #property-items}` property
* and gets a direct child component of this container.
* @param {String/Number} component This parameter may be any of the following:
*
* - {String} : representing the `itemId`
* or `{@link Ext.Component#getId id}` of the child component.
* - {Number} : representing the position of the child component
* within the `{@link #property-items}` property.
*
* For additional information see {@link Ext.util.MixedCollection#get}.
* @return {Ext.Component} The component (if found).
*/
getComponent: function(component) {
if (Ext.isObject(component)) {
component = component.getItemId();
}
return this.getItems().get(component);
},
/**
* Finds a docked item of this container using a reference, `id `or an `index` of its location
* in {@link #getDockedItems}.
* @param {String/Number} component The `id` or `index` of the component to find.
* @return {Ext.Component/Boolean} The docked component, if found.
*/
getDockedComponent: function(component) {
if (Ext.isObject(component)) {
component = component.getItemId();
}
var dockedItems = this.getDockedItems(),
ln = dockedItems.length,
item, i;
if (Ext.isNumber(component)) {
return dockedItems[component];
}
for (i = 0; i < ln; i++) {
item = dockedItems[i];
if (item.id == component) {
return item;
}
}
return false;
},
/**
* Retrieves all descendant components which match the passed selector.
* Executes an Ext.ComponentQuery.query using this container as its root.
* @param {String} selector Selector complying to an Ext.ComponentQuery selector.
* @return {Array} Ext.Component's which matched the selector.
*/
query: function(selector) {
return Ext.ComponentQuery.query(selector, this);
},
/**
* Retrieves the first direct child of this container which matches the passed selector.
* The passed in selector must comply with an {@link Ext.ComponentQuery} selector.
* @param {String} selector An {@link Ext.ComponentQuery} selector.
* @return {Ext.Component}
*/
child: function(selector) {
return this.query('> ' + selector)[0] || null;
},
/**
* Retrieves the first descendant of this container which matches the passed selector.
* The passed in selector must comply with an {@link Ext.ComponentQuery} selector.
* @param {String} selector An {@link Ext.ComponentQuery} selector.
* @return {Ext.Component}
*/
down: function(selector) {
return this.query(selector)[0] || null;
},
//<deprecated product=touch since=2.0>
onClassExtended: function(Class, members) {
if ('onAdd' in members || 'onRemove' in members) {
throw new Error("["+Class.$className+"] 'onAdd()' and 'onRemove()' methods " +
"no longer exist in Ext.Container, please use 'onItemAdd()' " +
"and 'onItemRemove()' instead }");
}
},
//</deprecated>
destroy: function() {
var me = this,
modal = me.getModal();
if (modal) {
modal.destroy();
}
me.removeAll(true, true);
me.unlink('_scrollable');
Ext.destroy(me.items);
me.callSuper();
}
}, function() {
this.addMember('defaultItemClass', this);
//<deprecated product=touch since=2.0>
/**
* @method addAll
* Adds an array of Components to this Container.
* @deprecated 2.0.0 Please use {@link #method-add} instead.
* @param {Array} items The array of items to add to this container.
* @return {Array} The array of items after they have been added.
*/
Ext.deprecateClassMethod(this, 'addAll', 'add');
/**
* @method removeDocked
* Removes a docked item from this Container.
* @deprecated 2.0.0 Please use {@link #method-remove} instead.
* @param {Object} item The item to remove.
* @param {Boolean} destroy Calls the Component's {@link Ext.Component#method-destroy destroy}
* method if `true`.
* @return {Ext.Component} this
*/
Ext.deprecateClassMethod(this, 'removeDocked', 'remove');
/**
* @member Ext.Container
* @property items
* @type Ext.util.MixedCollection
* The set of all items in this Container.
* @deprecated 2.0.0 Please use {@link #getItems} method instead.
*/
this.override({
constructor: function(config) {
config = config || {};
var dockedItems = config.dockedItems,
i, ln, item;
/**
* @cfg {Boolean/String/Object} scroll
* @inheritdoc Ext.Container#scrollable
* @deprecated 2.0.0 Please use the {@link #scrollable} configuration.
*/
if (config.scroll) {
//<debug warn>
Ext.Logger.deprecate("'scroll' config is deprecated, please use 'scrollable' instead.", this);
//</debug>
config.scrollable = config.scroll;
delete config.scroll;
}
this.callOverridden(arguments);
if (dockedItems) {
//<debug warn>
Ext.Logger.deprecate("'dockedItems' config is deprecated, please add all docked items inside the 'items' config with a 'docked' property indicating the docking position instead, i.e { /*...*/ docked: 'top' /*...*/ }");
//</debug>
dockedItems = Ext.Array.from(dockedItems);
for (i = 0,ln = dockedItems.length; i < ln; i++) {
item = dockedItems[i];
if ('dock' in item) {
//<debug warn>
Ext.Logger.deprecate("'dock' config for docked items is deprecated, please use 'docked' instead");
//</debug>
item.docked = item.dock;
}
}
this.add(dockedItems);
}
},
add: function() {
var args = arguments;
if (args.length > 1) {
if (typeof args[0] == 'number') {
//<debug warn>
Ext.Logger.deprecate("add(index, item) method signature is deprecated, please use insert(index, item) instead");
//</debug>
return this.insert(args[0], args[1]);
}
//<debug warn>
Ext.Logger.deprecate("Passing items as multiple arguments is deprecated, please use one single array of items instead");
//</debug>
args = [Array.prototype.slice.call(args)];
}
return this.callOverridden(args);
},
doAdd: function(item) {
var docked = item.getDocked(),
overlay = item.overlay,
position;
if (overlay && docked) {
//<debug>
Ext.Logger.deprecate("'overlay' config is deprecated on docked items, please set the top/left/right/bottom configurations instead.", this);
//</debug>
if (docked == "top") {
position = {
top: 0,
bottom: 'auto',
left: 0,
right: 0
};
} else if (docked == "bottom") {
position = {
top: null,
bottom: 0,
left: 0,
right: 0
};
}
if (position) {
item.setDocked(false);
item.setTop(position.top);
item.setBottom(position.bottom);
item.setLeft(position.left);
item.setRight(position.right);
}
}
return this.callOverridden(arguments);
},
applyDefaults: function(defaults) {
if (typeof defaults == 'function') {
//<debug warn>
Ext.Logger.deprecate("Passing a function as 'defaults' is deprecated. To add custom logics when " +
"'defaults' is applied to each item, have your own factoryItem() method in your sub-class instead");
//</debug>
}
return this.callOverridden(arguments);
},
factoryItemWithDefaults: function(item) {
var defaults = this.getDefaults(),
customDefaults, ret;
// Defaults is a function (must return a string, object, or class instance)
if (typeof defaults == 'function') {
customDefaults = defaults.call(this, item);
}
// String (must be the id of an existent component)
if (typeof item == 'string') {
//<debug warn>
Ext.Logger.deprecate("Passing a string id of item ('"+item+"') is deprecated, please pass a reference to that item instead");
//</debug>
item = Ext.getCmp(item);
}
if (customDefaults) {
this._defaults = customDefaults;
}
ret = this.callParent([item]);
if (customDefaults) {
this._defaults = defaults;
}
return ret;
},
applyMasked: function(masked) {
if (Ext.isObject(masked) && !masked.isInstance && 'message' in masked && !('xtype' in masked) && !('xclass' in masked)) {
masked.xtype = 'loadmask';
//<debug warn>
Ext.Logger.deprecate("Using a 'message' config without specify an 'xtype' or 'xclass' will no longer implicitly set 'xtype' to 'loadmask'. Please set that explicitly.");
//</debug>
}
return this.callOverridden(arguments);
}
});
Ext.deprecateClassMethod(this, 'setMask', 'setMasked');
//</deprecated>
});
|
function include {
if [ -f "$1" ] ; then
source "$1" && echo "Sourced $1"
else
echo "$1 doesn't exist"
fi
}
function command_exists {
type "$1" &> /dev/null ;
}
function get_first_item_in_list_command {
head -2 | tail -1 | cut -f1 -d' '
}
|
<filename>raw/REPRISE DELTA/Mouvements.sql
select mv.mvtw_age, mv.mvtw_cha,mv.mvtw_cli,mv.mvtw_suf,c.cliw_int ,mv.mvtw_dco, mv.mvtw_lib, mv.mvtw_sen,
case when mv.mvtw_sen = 'D' then mv.mvtw_mon end DEBIT, case when mv.mvtw_sen = 'C' then mv.mvtw_mon end CREDIT
from mouvement2009 mv, clients c
where mv.mvtw_cli = c.cliw_cli
and mv.mvtw_cha like '36%'
order by mv.mvtw_dco
|
public class EntityManagement
{
public void ManageEntities(List<uint> entityIDs)
{
foreach (uint entityID in entityIDs)
{
MyEntity entity = MyEntities.GetEntityById(new MyEntityIdentifier(entityID));
MyScriptWrapper.SetEntityDestructible(entity, false);
}
}
/// <summary>
/// Is called whenever mission/Submission gets obsolete. Clean your allocations/handlers here.
/// </summary>
public override void Unload()
{
// Clean up resources here
// Implement resource cleanup logic such as releasing allocated memory, closing open connections, or unregistering event handlers.
}
} |
<filename>spring-boot/2.4.3/src/main/java/club/yunzhi/framework/springboot/security/OneTimePasswordImpl.java
package club.yunzhi.framework.springboot.security;
import club.yunzhi.framework.springboot.properties.AppProperties;
import com.j256.twofactorauth.TimeBasedOneTimePasswordUtil;
import org.apache.commons.codec.binary.Base32;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import java.security.GeneralSecurityException;
import java.util.Optional;
@Service
public class OneTimePasswordImpl implements OneTimePassword {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
/**
* 密码.
*/
private String password = "";
private final String token;
public OneTimePasswordImpl(AppProperties appProperties) {
// 将token使用base32进行转码,原理同base64
Base32 base32 = new Base32();
this.token = base32.encodeAsString(appProperties.getToken().getBytes());
}
/**
* 仅允许获取1次,获取成功后code值为null
*
* @return
*/
@Override
public Optional<String> getPassword() {
try {
String password = TimeBasedOneTimePasswordUtil.generateCurrentNumberString(this.token);
// 每个密码只能用一次,如果生成的密码与当前的密码相同,则说明短时间内请求了两次,返回empty
if (password.equals(this.password)) {
return Optional.empty();
} else {
this.password = password;
}
} catch (GeneralSecurityException e) {
this.logger.error("生成一次性密码时发生错误");
e.printStackTrace();
}
return Optional.of(this.password);
}
}
|
# VMware Fusion
if [ -d "/Applications/VMware Fusion.app/Contents/Library" ]; then
export PATH=$PATH:"/Applications/VMware Fusion.app/Contents/Library"
fi
|
'use strict';
const uuid = require('uuid');
const AWS = require('aws-sdk');
const dynamoDb = new AWS.DynamoDB.DocumentClient({
// region: 'localhost',
// endpoint: 'http://localhost:8000'
}); // remove when deploying!
module.exports.create = (event, context, callback) => {
// create a note and put it in the database
const data = JSON.parse(event.body);
const params = {
TableName: process.env.DYNAMODB_TABLE,
Item: {
id: uuid.v1(),
content: data.content
}
}
dynamoDb.put(params, (error) => {
if (error) {
console.error(error);
return callback(null, {
statusCode: error.statusCode || 500,
headers: { 'Content-Type': 'text/plain' },
body: 'Could not create the note.'
});
}
const response = {
statusCode: 200,
body: JSON.stringify(params.Item)
};
callback(null, response);
});
}
module.exports.getOne = (event, context, callback) => {
const params = {
TableName: process.env.DYNAMODB_TABLE,
Key: {
id: event.pathParameters.id
}
};
dynamoDb.get(params, (error, result) => {
if (error) {
console.error(error);
return callback(null, {
statusCode: error.statusCode || 500,
headers: { 'Content-Type': 'text/plain' },
body: 'Could not fetch the note.'
});
}
const response = {
statusCode: 200,
body: JSON.stringify(result.Item)
};
callback(null, response);
});
};
module.exports.getAll = (event, context, callback) => {
const params = {
TableName: process.env.DYNAMODB_TABLE
};
dynamoDb.scan(params, (error, result) => {
if (error) {
console.error(error);
return callback(null, {
statusCode: error.statusCode || 500,
headers: { 'Content-Type': 'text/plain' },
body: 'Could not fetch the note.'
});
}
const response = {
statusCode: 200,
body: JSON.stringify(result.Items)
}
callback(null, response);
});
};
module.exports.update = (event, context, callback) => {
const data = JSON.parse(event.body);
const params = {
TableName: process.env.DYNAMODB_TABLE,
Key: {
id: event.pathParameters.id
},
ExpressionAttributeValues: {
':content': data.content
},
UpdateExpression: 'SET content = :content',
ReturnValues: 'ALL_NEW'
};
dynamoDb.update(params, (error, result) => {
if (error) {
console.error(error);
return callback(null, {
statusCode: error.statusCode || 500,
headers: { 'Content-Type': 'text/plain' },
body: 'Could not update the note.'
});
}
const response = {
statusCode: 200,
body: JSON.stringify(result.Attributes)
};
callback(null, response);
});
};
module.exports.delete = (event, context, callback) => {
const params = {
TableName: process.env.DYNAMODB_TABLE,
Key: {
id: event.pathParameters.id
}
};
dynamoDb.delete(params, (error) => {
if (error) {
console.error(error);
return callback(null, {
statusCode: error.statusCode || 500,
headers: { 'Content-Type': 'text/plain' },
body: 'Could not delete the note.'
});
}
const response = {
statusCode: 200,
body: JSON.stringify('Removed the note with id: ' + event.pathParameters.id)
};
callback(null, response);
});
}; |
#!/bin/sh
find src -name '*.ml' -not -path 'src/zzzz/*' -print0 | xargs -0 grep -w -F "$1"
|
<!DOCTYPE html>
<html>
<head>
<title>Input Validation</title>
<script>
function validateForm() {
// Get the form input values
var firstName = document.getElementById("firstName").value;
var lastName = document.getElementById("lastName").value;
var email = document.getElementById("email").value;
// Validate the input
if (firstName == "" || lastName == "") {
alert("First and last name must be filled out");
return false;
}
if (email.indexOf("@") < 0) {
alert("Email must be valid");
return false;
}
// If valid, submit form
document.forms["form"].submit();
return true;
}
</script>
</head>
<body>
<form name="form" action="form_action.html" onsubmit="return validateForm()">
First name: <input type="text" name="firstName" id="firstName" /> <br />
Last name: <input type="text" name="lastName" id="lastName" /> <br />
Email: <input type="text" name="email" id="email" /> <br />
<input type="submit" value="Submit" />
</form>
</body>
</html> |
require 'coveralls'
Coveralls.wear!
$LOAD_PATH.unshift File.expand_path('../../lib/mastermind', __FILE__)
require 'mastermind'
require "messages"
require "player"
require "computer"
require "game_engine"
require "difficulty"
require "result"
|
package Servlets;
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import Classes.Lesson;
/**
* Servlet implementation class PGive_Grades1
*/
@WebServlet("/PGive_Grades1")
public class PGive_Grades1 extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#HttpServlet()
*/
public PGive_Grades1() {
super();
// TODO Auto-generated constructor stub
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
response.setContentType("text/html");
response.setCharacterEncoding("UTF-8");
PrintWriter out = response.getWriter();
Lesson l=new Lesson();
l.setId_Lnumber(request.getParameter("id_lesson") );
out.print("<!DOCTYPE html>\r\n" +
"<html>\r\n" +
"<head>\r\n" +
" <meta http-equiv=\"Content-Type\" content=\"text/html\">\r\n" +
" <meta charset=\"UTF-8\">\r\n" +
" <meta http-equiv=\"Content-Type\" content=\"text/html\">\r\n" +
" <meta charset=\"UTF-8\">\r\n" +
" <title>UniversityStyle</title>\r\n" +
" <style>\r\n" +
" .column {\r\n" +
" width: 50%;\r\n" +
" padding: 0 25px;\r\n" +
" -webkti-box-sizing: border-box;\r\n" +
" -moz-box-sizing: border-box;\r\n" +
" box-sizing: border-box;\r\n" +
" float: left;\r\n" +
" }\r\n" +
"\r\n" +
" .field {\r\n" +
" width: 13%;\r\n" +
" padding: auto;\r\n" +
" height: auto;\r\n" +
" background-color: deepskyblue;\r\n" +
" float: left;\r\n" +
" height: 100%;\r\n" +
" }\r\n" +
"\r\n" +
" .field1 {\r\n" +
" width: auto;\r\n" +
" border-radius: 15px;\r\n" +
" padding: auto;\r\n" +
" height: 75%;\r\n" +
" background-color: white;\r\n" +
" float: center;\r\n" +
" position: relative;\r\n" +
" }\r\n" +
"\r\n" +
" body {\r\n" +
" background-color: white;\r\n" +
" }\r\n" +
"\r\n" +
" .container {\r\n" +
" padding: 10px;\r\n" +
" height: 45px;\r\n" +
" float: left;\r\n" +
" }\r\n" +
"\r\n" +
" .title {\r\n" +
" border-radius: 15px 50px 30px 5px;\r\n" +
" background-color: deepskyblue;\r\n" +
" color: white;\r\n" +
" height: 80px;\r\n" +
" }\r\n" +
"\r\n" +
" .userbutton {\r\n" +
" background-color: white;\r\n" +
" border-radius: 15px;\r\n" +
" padding: auto;\r\n" +
" width: 150px;\r\n" +
" height: 100px;\r\n" +
" }\r\n" +
"\r\n" +
" .adminbutton {\r\n" +
" background-color: white;\r\n" +
" border-radius: 15px;\r\n" +
" }\r\n" +
"\r\n" +
" .refresh {\r\n" +
" background-color: deepskyblue;\r\n" +
" border-radius: 15px;\r\n" +
" float: right;\r\n" +
" width: 50px;\r\n" +
" height: 50px;\r\n" +
" color: white;\r\n" +
" }\r\n" +
"\r\n" +
" ul {\r\n" +
" list-style-type: none;\r\n" +
" margin: 0;\r\n" +
" padding: 0;\r\n" +
" width: 200px;\r\n" +
" background-color: deepskyblue;\r\n" +
" }\r\n" +
"\r\n" +
" li .form1 {\r\n" +
" border-radius: 15px;\r\n" +
" display: block;\r\n" +
" background-color: deepskyblue;\r\n" +
" position: relative;\r\n" +
" color: #000;\r\n" +
" padding: 30px 16px;\r\n" +
" width: 75%;\r\n" +
" text-decoration: none;\r\n" +
" }\r\n" +
"\r\n" +
" li .form1:hover {\r\n" +
" border-radius: 15px;\r\n" +
" background-color: #555;\r\n" +
" color: white;\r\n" +
" .dot;\r\n" +
"\r\n" +
" {\r\n" +
" height: 10px;\r\n" +
" width: 10px;\r\n" +
" background-color: black;\r\n" +
" border-radius: 50%;\r\n" +
" display: inline-block;\r\n" +
" }\r\n"
+ ".column {\r\n" +
" float: left;\r\n" +
" width: 50%;\r\n" +
" padding: 10px;\r\n" +
" height: 300px; /* Should be removed. Only for demonstration */\r\n" +
"}" +
".column1 {\r\n" +
" float: right;\r\n" +
" width: 50%;\r\n" +
" padding: 10px;\r\n" +
" height: 300px; /* Should be removed. Only for demonstration */\r\n" +
"}" +
" </style>\r\n" +
"</head>\r\n" +
"<body>\r\n" +
"\r\n" +
" <div class=\"container\">\r\n" +
" <img src=\"university_icon.png\" style=\"width:150px;float:left;\">\r\n" +
" </div>\r\n" +
"\r\n" +
" <div class=\"title\">\r\n" +
" <a href=\"PGive_Grades\">\r\n" +
" <img src=\"leftarrow.png\" style=\"width:50px;float:right;\">\r\n" +
" </a>\r\n" +
" <h2>Ηλεκτρονική Πλατφόρμα Πανεπιστημίου<br></br>Επίσημη-Ιστοσελίδα Γραμματείας </h2>\r\n" +
"\r\n" +
"\r\n" +
"\r\n" +
" </div>\r\n" +
" <p> </p>\r\n" +
" <fieldset class=\"field1\">\r\n" +
"\r\n" +
" <fieldset class=\"field\">\r\n" +
"\r\n" +
"\r\n" +
" <ul>\r\n" +
" <br></br>\r\n" +
" <li><form action=\"Proffessor_menu.html\"><button class=\"form1\" type=\"submit\"> <h2> Aρχική </h2></button></form></li>\r\n" +
" <li><form action=\"PGive_Grades\"> <button class=\"form1\" type=\"submit\"> <h2>Αναθεση Βαθμολογίας</h2></button> </form></li> \r\n" +
" <li><form action=\"index\"><button class=\"form1\" type=\"submit\"> <h2>Αποσύνδεση</h2></button></form></li>\r\n" +
" </ul>" +
"\r\n" +
"\r\n" +
"\r\n" +
"\r\n" +
" </fieldset>\r\n" +
" <h2> Ηλεκτρονική Πλατφόρμα Γραμματείας!</h2>\r\n" +
" <img src=\"secretary.png\" style=\"width:500px;position:relative;float:right;\">\r\n"
+ "<fieldset>" +
" <h2><fieldset style=\"background-color:deepskyblue;\"><u>Δωστε βαθμολογία "+l.getId_Lnumber()+" </u></fieldset></h2>"
+ " id_Μαθηματος:<form action=\"CheckProffessorPassingGrades\"><input type=\"text\" value=\" "+l.getId_Lnumber()+" \" name=\"id_lesson\" required>"
+ "<h3><div class=\"column\"> Id_Φοιτητή </div> Βαθμολογία-Φοιτητή </h3>"
+
" <span class=\"dot\"></span> <div class=\"column\"><fieldset>"
+ " "
+ ""
+ ""
+ " "
+ "Δώστε το id:<input type=\"text\" minlength=\"5\" maxlength=\"5\" size=\"5\" value=\"p0000\" name=\"id_student\" >"
+"<br></br>Δώστε το id:<input type=\"text\" minlength=\"5\" maxlength=\"5\" size=\"5\" value=\"p0000\" name=\"id_student1\" >"
+"<br></br>Δώστε το id:<input type=\"text\" minlength=\"5\" maxlength=\"5\" size=\"5\" value=\"p0000\" name=\"id_student2\" >"
+"<br></br>Δώστε το id:<input type=\"text\" minlength=\"5\" maxlength=\"5\" size=\"5\" value=\"p0000\" name=\"id_student3\" >"
+ " </fieldset></div> <div class=\"column\"> <fieldset>"
+ "Βαθμός:<input type=\"number\" value=\" \" name=\"grade\" min=\"5\" max=\"10\">"
+ "<br></br>Βαθμός:<input type=\"number\" value=\" \" name=\"grade1\" min=\"5\" max=\"10\" >"
+ "<br></br>Βαθμός:<input type=\"number\" value=\" \" name=\"grade2\" min=\"5\" max=\"10\" >"
+ "<br></br>Βαθμός:<input type=\"number\" value=\" \" name=\"grade3\" min=\"5\" max=\"10\">"
+ "</fieldset></div> \r\n" +
" \r\n" +
"<button style=\"width:auto;background:deepskyblue;\" > Pass_Grades</button></form>"+
" <br></br>\r\n" +
"</fieldset>"
+ "</fieldset>"+
"</body>\r\n" +
"</html>");
}
}
|
find /home/pi/images/image20*.jpg -type f -cmin -720 -exec cat {} \; | ffmpeg -f image2pipe -framerate 10 -i - -s 696x520 -vcodec libx264 -pix_fmt yuv420p latest.mp4 -y
cp latest.mp4 /home/pi/videos/$(date +%Y%m%d%H%M).mp4
|
# ***** BEGIN GPL LICENSE BLOCK *****
#
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ***** END GPL LICENCE BLOCK *****
import bpy
from bpy.props import *
from bpy.types import Operator, AddonPreferences
import os
import shutil
class EX_MainPanel(bpy.types.Panel):
bl_label = "Exchanger"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_context = "objectmode"
bl_category = 'Ex'
#bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
scene = context.scene
b_exchanger = bpy.context.scene.b_exchanger
# GUI
row = layout.row()
col = row.column()
col.operator("ex_export.exchanger", text="Export")
#op.world_scale = 1.0
col.operator("ex_import.exchanger", text="Import")
#op.world_scale = 1.0
layout.separator()
row = layout.row()
op = col.operator("ex_export.exchanger", text="ExportHoudini")
op.world_scale = 0.01
op = col.operator("ex_import.exchanger", text="ImportHoudini")
op.world_scale = 100.0
layout.separator()
op = col.operator("ex_export.exchanger", text="Export Obj")
op.export_type = 'OBJ'
op = col.operator("ex_import.exchanger", text="Import Obj")
op.import_type = 'OBJ'
row = layout.row()
row.prop(b_exchanger, "doApplyModifiers", text="Apply Modifiers")
row = layout.row()
row.prop(b_exchanger, "exportMaterials", text="Export Materials")
row = layout.row()
row.prop(b_exchanger, "importNormals", text="Import Normals")
class EX_AddonPreferences(AddonPreferences):
# this must match the addon name, use '__package__'
# when defining this in a submodule of a python package.
# bl_idname = __name__
bl_idname = __package__
exchangedir = StringProperty(
name="ExchangeFolder",
subtype="DIR_PATH",
default="",
)
def draw(self, context):
layout = self.layout
row = layout.row()
row.label(text="Please, set Exchanges Folder and save Preferences")
row = layout.row()
row.prop(self, "exchangedir")
class EX_ExportScene(bpy.types.Operator):
bl_idname = "ex_export.exchanger"
bl_label = "Export your custom property"
bl_description = "Export your custom property"
bl_options = {'UNDO'}
world_scale : FloatProperty( default=1.0 )
export_type : EnumProperty(
items=(('FBX', 'FBX', ''),
('OBJ', 'OBJ', '')
),
default = 'FBX'
)
def invoke(self, context, event):
# Addon Preferences
#user_preferences = context.user_preferences
addon_prefs = context.preferences.addons[__package__].preferences
b_exchanger = bpy.context.scene.b_exchanger
scene = context.scene
exchange_dir = addon_prefs.exchangedir.replace("\\", os.sep)
if exchange_dir.endswith(os.sep) is False:
exchange_dir += os.sep
if len(bpy.context.selected_objects) > 0 and os.path.isdir(addon_prefs.exchangedir):
# change render levl of susurf and multires for good export
fix_modifiers = []
for obj in bpy.context.selected_objects:
for mod in obj.modifiers:
if mod.type in {'SUBSURF', 'MULTIRES'}:
fix_modifiers.append((mod, mod.render_levels))
if mod.show_viewport is False:
mod.render_levels = 0
else:
mod.render_levels = mod.levels
# Export setings
if self.export_type == 'FBX':
extension = 'fbx'
else:
extension = 'OBJ'
model_path = exchange_dir + "exchange." + extension
apply_modifiers = b_exchanger.doApplyModifiers
# Export Model
if self.export_type == 'FBX':
bpy.ops.export_scene.fbx(filepath=model_path, check_existing=True, axis_forward='-Z', axis_up='Y', use_selection=True, global_scale=self.world_scale, apply_unit_scale=True, bake_space_transform=True, use_mesh_modifiers=apply_modifiers, use_custom_props=True)
else:
bpy.ops.export_scene.obj(filepath=model_path, check_existing=True, use_selection=True, use_mesh_modifiers=apply_modifiers, use_edges=True, use_normals=True, use_uvs=True, use_vertex_groups=False, use_blen_objects=True, keep_vertex_order=True, global_scale=self.world_scale)
# revert render level of modifiers back
for mod_stuff in fix_modifiers:
mod_stuff[0].render_levels = mod_stuff[1]
fix_modifiers = None # clear array
else:
self.report(
{'INFO'}, "No Selected Objects or Bad Exchange Folder!!!")
return {'FINISHED'}
class EX_ImportScene(bpy.types.Operator):
bl_idname = "ex_import.exchanger"
bl_label = "import your custom property"
bl_description = "import your custom property"
bl_options = {'UNDO'}
world_scale = FloatProperty( default=1.0 )
import_type : EnumProperty(
items=(('FBX', 'FBX', ''),
('OBJ', 'OBJ', '')
),
default = 'FBX'
)
def invoke(self, context, event):
# Addon Preferences
#user_preferences = context.user_preferences
addon_prefs = context.preferences.addons[__package__].preferences
scene = context.scene
b_exchanger = bpy.context.scene.b_exchanger
exchange_dir = addon_prefs.exchangedir.replace("\\", os.sep)
if exchange_dir.endswith(os.sep) is False:
exchange_dir += os.sep
if os.path.isdir(exchange_dir):
## fix for animation removement for Modo
#scene_objects = []
#for obj in bpy.context.scene.objects:
#scene_objects.append(obj.name)
# Import setings
if self.import_type == 'FBX':
extension = 'fbx'
else:
extension = 'OBJ'
model_path = exchange_dir + "exchange." + extension
importNormals = b_exchanger.importNormals
# IMPORT
if self.import_type == 'FBX':
bpy.ops.import_scene.fbx(filepath=model_path, axis_forward='-Z', axis_up='Y', global_scale=self.world_scale, bake_space_transform=True, use_custom_normals=importNormals, force_connect_children=False, primary_bone_axis='Y', secondary_bone_axis='X', use_prepost_rot=True)
else:
bpy.ops.import_scene.obj(filepath=model_path, use_edges=True, use_smooth_groups=True, use_split_objects=False, use_split_groups=False, use_groups_as_vgroups=False, use_image_search=False, split_mode='OFF')
## remove animatrins. Fix for Modo
#for obj in scene.objects:
#if obj.name not in scene_objects:
#obj.animation_data.action.use_fake_user = False
#obj.animation_data.action = None
#scene_objects = None # clear
else:
self.report({'INFO'}, "Bad Exchange Folder!!!")
return {'FINISHED'}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.genetics;
import java.util.Collections;
import java.util.List;
import org.apache.commons.math3.exception.NotPositiveException;
import org.apache.commons.math3.exception.NullArgumentException;
import org.apache.commons.math3.exception.NumberIsTooLargeException;
import org.apache.commons.math3.exception.OutOfRangeException;
import org.apache.commons.math3.exception.util.LocalizedFormats;
import org.apache.commons.math3.util.FastMath;
/**
* Population of chromosomes which uses elitism (certain percentage of the best
* chromosomes is directly copied to the next generation).
*
* @version $Id: ElitisticListPopulation.java 1416643 2012-12-03 19:37:14Z tn $
* @since 2.0
*/
public class ElitisticListPopulation extends ListPopulation {
/** percentage of chromosomes copied to the next generation */
private double elitismRate = 0.9;
/**
* Creates a new {@link ElitisticListPopulation} instance.
*
* @param chromosomes list of chromosomes in the population
* @param populationLimit maximal size of the population
* @param elitismRate how many best chromosomes will be directly transferred to the next generation [in %]
* @throws NullArgumentException if the list of chromosomes is {@code null}
* @throws NotPositiveException if the population limit is not a positive number (< 1)
* @throws NumberIsTooLargeException if the list of chromosomes exceeds the population limit
* @throws OutOfRangeException if the elitism rate is outside the [0, 1] range
*/
public ElitisticListPopulation(final List<Chromosome> chromosomes, final int populationLimit,
final double elitismRate)
throws NullArgumentException, NotPositiveException, NumberIsTooLargeException, OutOfRangeException {
super(chromosomes, populationLimit);
setElitismRate(elitismRate);
}
/**
* Creates a new {@link ElitisticListPopulation} instance and initializes its inner chromosome list.
*
* @param populationLimit maximal size of the population
* @param elitismRate how many best chromosomes will be directly transferred to the next generation [in %]
* @throws NotPositiveException if the population limit is not a positive number (< 1)
* @throws OutOfRangeException if the elitism rate is outside the [0, 1] range
*/
public ElitisticListPopulation(final int populationLimit, final double elitismRate)
throws NotPositiveException, OutOfRangeException {
super(populationLimit);
setElitismRate(elitismRate);
}
/**
* Start the population for the next generation. The <code>{@link #elitismRate}</code>
* percents of the best chromosomes are directly copied to the next generation.
*
* @return the beginnings of the next generation.
*/
public Population nextGeneration() {
// initialize a new generation with the same parameters
ElitisticListPopulation nextGeneration =
new ElitisticListPopulation(getPopulationLimit(), getElitismRate());
final List<Chromosome> oldChromosomes = getChromosomeList();
Collections.sort(oldChromosomes);
// index of the last "not good enough" chromosome
int boundIndex = (int) FastMath.ceil((1.0 - getElitismRate()) * oldChromosomes.size());
for (int i = boundIndex; i < oldChromosomes.size(); i++) {
nextGeneration.addChromosome(oldChromosomes.get(i));
}
return nextGeneration;
}
/**
* Sets the elitism rate, i.e. how many best chromosomes will be directly transferred to the next generation [in %].
*
* @param elitismRate how many best chromosomes will be directly transferred to the next generation [in %]
* @throws OutOfRangeException if the elitism rate is outside the [0, 1] range
*/
public void setElitismRate(final double elitismRate) throws OutOfRangeException {
if (elitismRate < 0 || elitismRate > 1) {
throw new OutOfRangeException(LocalizedFormats.ELITISM_RATE, elitismRate, 0, 1);
}
this.elitismRate = elitismRate;
}
/**
* Access the elitism rate.
* @return the elitism rate
*/
public double getElitismRate() {
return this.elitismRate;
}
}
|
class FibonacciIterator:
def __init__(self, maxiters):
self.maxiters = maxiters
self.current_iter = 0
self.prev, self.curr = 0, 1
def __iter__(self):
return self
def __next__(self):
if self.current_iter >= self.maxiters:
raise StopIteration
if self.current_iter == 0:
self.current_iter += 1
return 0
elif self.current_iter == 1:
self.current_iter += 1
return 1
else:
self.current_iter += 1
self.prev, self.curr = self.curr, self.prev + self.curr
return self.curr |
import { text } from '../../lib/AKCommunications'
export function sendMessage(phoneNumber) {
text(phoneNumber)
}
export function sendMessageWithBody(phoneNumber, body) {
text(phoneNumber, body)
} |
package personalfinance.gui.panel;
import personalfinance.gui.MainFrame;
import personalfinance.gui.table.TransactionTableData;
import personalfinance.settings.Settings;
import personalfinance.settings.Style;
public class OverviewPanel extends RightPanel {
public OverviewPanel(MainFrame frame) {
super(frame, new TransactionTableData(Settings.COUNT_OVERVIEW_ROWS), "LAST_TRANSACTIONS", Style.ICON_PANEL_OVERVIEW);
}
} |
<filename>src/molecules/base_field/index.ts
export { default as BaseField } from './BaseField';
export type { BaseFieldProps } from './BaseField';
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.