text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
OUTPUT=$1
LIGHT="ﯦ"
current_brightness=$(python -c "print(round($(xbacklight -get)))")
echo -en "%{A:brightness:}%{T3}${LIGHT} %{T1}${current_brightness}%{A}%{T1}" > $OUTPUT
|
<gh_stars>10-100
//
// Created by neo on 2019-08-15.
//
#include "../../pinball.h"
#ifndef PINBALL_TWALL_H
#define PINBALL_TWALL_H
/* 102 */
struct TWall;
void TWall::TimerExpired(int, void*); // idb
int __thiscall TWall::Message(TWall* this, int, float); // idb
int __thiscall TWall::get_scoring(TWall* this, int); // idb
void __thiscall TWall::put_scoring(TWall* this, int, int); // idb
void __thiscall TWall::Collision(TWall* this, struct TBall*, struct vector_type*, struct vector_type*, float, struct TEdgeSegment*); // idb
TWall* __thiscall TWall::TWall(TWall* this, struct TPinballTable* a2, int a3);
void* TWall::vftable = &TWall::Message; // weak
#endif //PINBALL_TWALL_H
|
#!/bin/bash
# Script to install LDAP User Manager with nginx web server on Debian 10
## Version: 1.0
## Date of update: 14/05/2021
## By Thatoo
## OS Configuration : Debian 10 Buster
### Update
apt update && apt dist-upgrade -y
### Install dependencies
apt install -y --no-install-recommends libldb-dev libldap2-dev libfreetype6-dev libjpeg-dev libpng-dev
### Install Nginx
apt install nginx nginx-extras
### Install PHP
apt install php php-fpm php-gd php-ldap
### Download and Install PHPMailer
wget https://github.com/PHPMailer/PHPMailer/archive/v6.2.0.tar.gz
tar -xzf v6.2.0.tar.gz -C /opt && mv /opt/PHPMailer-6.2.0 /opt/PHPMailer
rm v6.2.0.tar.gz
### Configuration of LDAP User Manager
echo "What are your settings?"
read -p "Hostname/URL of your serveur without http:// nor https:// (example : example.com) : " YOUR_URL
read -p "If you want LDAP User Manager to be accessible in a subfolder (example : example.com/subfolder) please write it here, if not, juste click enter : " YOUR_SUBFOLDER
read -p "The URI of the LDAP server, e.g. ldap://ldap.example.com or ldaps://ldap.example.com : " YOUR_LDAP_URI
read -p " The base DN for your organisation, e.g. dc=example,dc=com : " YOUR_LDAP_BASE_DN
read -p "The DN for the user with permission to modify all records under LDAP_BASE_DN, e.g. cn=admin,dc=example,dc=com : " YOUR_LDAP_ADMIN_BIND_DN
read -p "The password for LDAP_ADMIN_BIND_DN : " YOUR_LDAP_ADMIN_BIND_PWD
read -p "The name of the group used to define accounts that can use this tool to manage LDAP accounts. e.g. admins : " YOUR_LDAP_ADMINS_GROUP
### Configuration de Nginx
rm /etc/nginx/sites-available/default
PHP_version=$(php -v | cut -c5-7 | head -n 1)
if [ $YOUR_SUBFOLDER == ""]
then
echo -e "server {
\tlisten 80 default_server;
\tlisten [::]:80 default_server;
\troot /var/www/html/ldap-user-manager;
\tindex index.html index.php index.htm index.nginx-debian.html;
\tserver_name _;
\tlocation / {
\t\ttry_files \$uri \$uri/ =404;
\t}
\tlocation ~ \.php$ {
\t\tinclude snippets/fastcgi-php.conf;
\t\tfastcgi_pass unix:/var/run/php/php7.3-fpm.sock;
\t\tfastcgi_param SCRIPT_FILENAME \$request_filename;
\t\tinclude /etc/nginx/lum.nginx.conf;
\t}
\tlocation ~ /\.ht {
\t\tdeny all;
\t}
}
">/etc/nginx/sites-available/default
echo -e "fastcgi_param LDAP_URI $YOUR_LDAP_URI;
fastcgi_param LDAP_BASE_DN $YOUR_LDAP_ADMIN_BIND_DN;
fastcgi_param LDAP_ADMIN_BIND_DN $YOUR_LDAP_ADMIN_BIND_DN;
fastcgi_param LDAP_ADMIN_BIND_PWD $YOUR_LDAP_ADMIN_BIND_PWD;
fastcgi_param LDAP_ADMINS_GROUP $YOUR_LDAP_ADMINS_GROUP;
">/etc/nginx/lum.nginx.conf
else
echo -e "server {
\tlisten 80 default_server;
\tlisten [::]:80 default_server;
\troot /var/www/html;
\t# Add index.php to the list if you are using PHP
\tindex index.html index.php index.htm index.nginx-debian.html;
\tserver_name _;
\tlocation / {
\t\t# First attempt to serve request as file, then
\t\t# as directory, then fall back to displaying a 404.
\t\ttry_files \$uri \$uri/ =404;
\t}
\tlocation /$YOUR_SUBFOLDER {
\t\talias /var/www/html/$YOUR_SUBFOLDER;
\t\ttry_files \$uri \$uri/ =404;
\t\t# deny access to .htaccess files, if Apache's document root
\t\t# concurs with nginx's one
\t\t#
\t\tlocation ~ /\.ht {
\t\t\tdeny all;
\t\t}
\t\tlocation ~ \.php$ {
\t\t\tinclude snippets/fastcgi-php.conf;
\t\t\tfastcgi_pass unix:/var/run/php/php7.3-fpm.sock;
\t\t\tfastcgi_param SCRIPT_FILENAME \$request_filename;
\t\t\tinclude /etc/nginx/lum.nginx.conf;
\t\t }
\t}
}
">/etc/nginx/sites-available/default
echo -e "fastcgi_param HTTP_HOST $YOUR_URL/$YOUR_SUBFOLDER;
fastcgi_param HTTP_SUBFOLDER $YOUR_SUBFOLDER;
fastcgi_param LDAP_URI $YOUR_LDAP_URI;
fastcgi_param LDAP_BASE_DN $YOUR_LDAP_ADMIN_BIND_DN;
fastcgi_param LDAP_ADMIN_BIND_DN $YOUR_LDAP_ADMIN_BIND_DN;
fastcgi_param LDAP_ADMIN_BIND_PWD $YOUR_LDAP_ADMIN_BIND_PWD;
fastcgi_param LDAP_ADMINS_GROUP $YOUR_LDAP_ADMINS_GROUP;
">/etc/nginx/lum.nginx.conf
fi
service nginx reload
### Download and Install LDAP User Manager
wget https://github.com/wheelybird/ldap-user-manager/archive/refs/heads/master.zip
apt install unzip
unzip master.zip && rm master.zip
if [ $YOUR_SUBFOLDER == ""]
then
cp -R ldap-user-manager-master/www/ /var/www/html/ldap-user-manager/
chown -R www-data:www-data /var/www/html/ldap-user-manager
else
cp -R ldap-user-manager-master/www/ /var/www/html/$YOUR_SUBFOLDER/
chown -R www-data:www-data /var/www/html/$YOUR_SUBFOLDER
fi
rm -R ldap-user-manager-master
|
<reponame>shihuajie/Self-Tuning
/*******************************************************************************
* patch.h - abstract image patch
*******************************************************************************
* Add license here...
*******************************/
#ifndef PATCH_H
#define PATCH_H
#include <cmath>
#include <cstdlib>
#ifndef M_PI
#define M_PI 3.14159265358979323846
#define M_PI_4 0.785398163397448309616
#endif
#include <rng.h>
#include <texture.h>
#define DEFAULT_PATCH_SIZE 7
namespace pm {
// Patch type
enum PatchType {
Basic,
BasicFloat,
Affine,
BasicMultiRes,
BasicFloatMultiRes,
AffineMultiRes
// BilinearAffine
};
/**
* \brief Grid index iterator
*/
template <typename Index>
struct LinearPatchIterator {
typedef LinearPatchIterator<Index> IndexIterator;
Index i;
const int width;
LinearPatchIterator(int y, int x, int w) : i(x, y), width(w){}
inline IndexIterator &operator ++(){
++i.x;
if(i.x >= width){
++i.y;
i.x = 0;
}
return *this;
}
inline const Index &operator *() const {
return i;
}
inline operator bool() const {
return i.y < width;
}
};
/**
* Patch base type for iterations
*/
template <typename Patch>
struct BasicGrid {
static int depth(int = 0) {
mexErrMsgIdAndTxt("MATLAB:basicgrid:depth", "Called depth on a simple resolution patch!");
return 0;
}
/// The type of the index iterator
typedef Point<int> Index;
typedef LinearPatchIterator<Index> IndexIterator;
IndexIterator begin() const {
return IndexIterator(0, 0, Patch::width());
}
const IndexIterator end() const {
int w = Patch::width();
return IndexIterator(w, 0, w);
}
};
namespace patch {
/**
* \brief Create a random patch
*
* \param rand
* the random number generator
* \param parent
* the image parent of the patch
* \param patch
* the patch to randomly initialize
*/
template <typename Patch>
void randomInit(RNG rand, const Image *parent, Patch &patch);
/**
* \brief Initialize a new random patch taken from an old one
*
* \param rand
* the random number generator
* \param parent
* the image parent of the patches
* \param oldPatch
* the patch we randomly generate from
* \param newPatch
* the new patch we randomly initialize
* \param windowSize
* the random search window size
* \return whether the patch is valid (within the frame)
*/
template <typename Patch>
bool random(RNG rand, const Image *parent,
const Patch &oldPatch, Patch &newPatch, int windowSize);
/**
* \brief Initialize a new random patch taken from an old one using aligned search
*
* \param rand
* the random number generator
* \param parent
* the image parent of the patches
* \param oldPatch
* the patch we randomly generate from
* \param newPatch
* the new patch we randomly initialize
* \param g1
* the first direction
* \param g2
* the second direction
* \return whether the patch is valid (within the frame)
*/
template <typename Patch>
bool aligned(RNG rand, const Image *parent,
const Patch &oldPatch, Patch &newPatch, const Point2f &g1, const Point2f &g2, float jitter);
/**
* \brief Initialize a delta patch
*
* \param patch
* the fixed patch
* \param delta
* the delta patch to initialize
* \param dy
* the y delta
* \param dx
* the x delta
*/
template <typename Patch>
void deltaPatch(const Patch &patch, Patch &delta, int dy, int dx);
/**
* \brief Check whether a patch is within a given frame
*
* \param patch
* the patch
* \param maxY
* the frame Y size
* \param maxX
* the frame X size
* \return whether the patch is within the frame (0, 0, maxY, maxX)
*/
template <typename Patch>
bool isWithin(const Image *frame, const Patch &patch);
/**
* \brief Compute the [0-1] coherence between two patches
*
* \param p1
* the first patch
* \param p2
* the second patch
* \param dy
* the expected dy from p1 to p2
* \param dx
* the expected dx from p1 to p2
* \return 0 if not coherent, in (0;1] when coherent, 1 if fully coherent
*/
template <typename Patch>
typename Patch::Coherence coherence(const Patch &p1, const Patch &p2, int dy, int dx);
}
// displacement of location
typedef int PatchDisplacement;
template <typename Patch>
inline bool random(RNG rand, const Image *parent,
const Patch &oldPatch, Patch &newPatch,
int windowSize, PatchDisplacement minDisp, int y0, int x0) {
using patch::random;
if(minDisp <= 0) return random(rand, parent, oldPatch, newPatch, windowSize);
// draw until we find a correct one otherwise
int i = 0;
bool valid = false;
PatchDisplacement d = 0;
do {
// TODO rejection sampling is maybe not the best!
valid = random(rand, parent, oldPatch, newPatch, windowSize);
d = std::abs(newPatch.x - x0) + std::abs(newPatch.y - y0); // L1 distance
} while(++i < 100 && d < minDisp);
return valid;
}
template <typename Patch>
inline bool aligned(RNG rand, const Image *parent,
const Patch &oldPatch, Patch &newPatch,
const Point2f &g1, const Point2f &g2, float jitter,
PatchDisplacement minDisp, int y0, int x0) {
using patch::aligned;
if(minDisp <= 0) return aligned(rand, parent, oldPatch, newPatch, g1, g2, jitter);
// draw until we find a correct one otherwise
int i = 0;
bool valid = false;
PatchDisplacement d = 0;
do {
// TODO rejection sampling is maybe not the best!
valid = aligned(rand, parent, oldPatch, newPatch, g1, g2, jitter);
d = std::abs(newPatch.x - x0) + std::abs(newPatch.y - y0); // L1 distance
} while(++i < 100 && d < minDisp);
return valid;
}
}
#endif |
<gh_stars>0
package main;
import javax.swing.JOptionPane;
public class LeapYear
{
public static void main(String[] args)
{
String input = JOptionPane.showInputDialog(null,
"Enter a year: ",
"Leap Year",
JOptionPane.QUESTION_MESSAGE);
int year = Integer.parseInt(input);
// Check if the year is a leap year
boolean isLeapYear = ((year % 4 == 0) && (year % 100 != 0)) || (year % 400 == 0);
JOptionPane.showMessageDialog(null,
year + " is a leap year? " + isLeapYear,
"Leap Year",
JOptionPane.INFORMATION_MESSAGE);
}
} |
using System;
using System.Collections.Generic;
public class Program
{
public class Node
{
public string Name { get; set; }
public int Cost { get; set; }
public Node Parent { get; set; }
public List<(Node, int)> Adjacents { get; set; }
}
public static void Main()
{
Node start = new Node { Name = "S", Cost = 0, Adjacents = new List<(Node, int)>() };
Node a = new Node { Name = "A", Cost = int.MaxValue, Adjacents = new List<(Node, int)>() };
Node b = new Node { Name = "B", Cost = int.MaxValue, Adjacents = new List<(Node, int)>() };
Node c = new Node { Name = "C", Cost = int.MaxValue, Adjacents = new List<(Node, int)>() };
Node d = new Node { Name = "D", Cost = int.MaxValue, Adjacents = new List<(Node, int)>() };
Node goal = new Node { Name = "G", Cost = int.MaxValue, Adjacents = new List<(Node, int)>() };
// S A B C D G
//S - 5 - - - -
//A - - 1 - 8 -
//B - - - 4 - 2
//C - - - - - 2
//D - - - - - 4
start.Adjacents.Add((a, 5));
a.Adjacents.AddRange(new List<(Node, int)> { (b, 1), (d, 8) });
b.Adjacents.AddRange(new List<(Node, int)> { (c, 4), (goal, 2) });
c.Adjacents.Add((goal, 2));
d.Adjacents.Add((goal, 4));
Dictionary<string, Node> nodes = new Dictionary<string, Node>()
{
["S"] = start,
["A"] = a,
["B"] = b,
["C"] = c,
["D"] = d,
["G"] = goal
};
// Perform uniform cost search
UniformCostSearch(start, goal, nodes);
Console.WriteLine($"Cost of path from {start.Name} to {goal.Name} is {goal.Cost}");
// Print the path
string path = goal.Name + "<-";
Node currentNode = goal.Parent;
while (currentNode != null)
{
path += currentNode.Name;
if (currentNode.Name != start.Name)
{
path += "<-";
}
currentNode = currentNode.Parent;
}
Console.WriteLine($"The path is {path}");
}
public static void UniformCostSearch(Node start, Node goal, Dictionary<string, Node> nodes)
{
// Add the start to the open list
List<Node> open = new List<Node> { start };
// Add the start to the closed list
List<Node> closed = new List<Node> { };
while (open.Count > 0)
{
// Get node with lowest cost from the open list
Node currentNode = open[0];
foreach (Node node in open)
{
if (node.Cost < currentNode.Cost)
{
currentNode = node;
}
}
open.Remove(currentNode);
// Check if we have found the goal
if (currentNode.Name.Equals(goal.Name))
{
// We have found the goal node
goal.Parent = currentNode.Parent;
goal.Cost = currentNode.Cost;
break;
}
// Expand node
foreach (var (node, cost) in currentNode.Adjacents)
{
int newCost = currentNode.Cost + cost;
if (newCost < node.Cost)
{
node.Cost = newCost;
node.Parent = currentNode;
}
if (!open.Contains(node) && !closed.Contains(node))
{
open.Add(node);
}
}
closed.Add(currentNode);
}
}
} |
#!/bin/sh
CONFIGS_DIR=/etc/docker/daemon/config
CERT_DIR=/etc/docker/ssl
CERT_SUBJ="/C=US/ST=California/L=San Francisco/O=CTF/CN=ComeAtMeBro CA"
if [ ! -f "${CERT_DIR}/cacert.pem" ]; then
mkdir -p "${CERT_DIR}"
# create the root CA
openssl req -x509 \
-config "${CONFIGS_DIR}/openssl-ca.cnf" \
-newkey rsa:4096 -sha256 \
-subj "${CERT_SUBJ}" \
-nodes -out "${CERT_DIR}/cacert.pem" -outform PEM
openssl x509 -noout -text -in "${CERT_DIR}/cacert.pem"
# create the server certificate signing request
openssl req \
-config "${CONFIGS_DIR}/openssl-server.cnf" \
-newkey rsa:2048 -sha256 \
-subj "/CN=localhost" \
-nodes -out "${CERT_DIR}/server.csr" -outform PEM
openssl req -text -noout -verify -in "${CERT_DIR}/server.csr"
touch "${CERT_DIR}/index.txt"
echo 01 > "${CERT_DIR}/serial.txt"
# create the server cert
openssl ca -batch \
-config "${CONFIGS_DIR}/openssl-ca.cnf" \
-policy signing_policy -extensions signing_req \
-out "${CERT_DIR}/server.cert" -infiles "${CERT_DIR}/server.csr"
openssl x509 -noout -text -in "${CERT_DIR}/server.cert"
# create the client certificate signing request
openssl req \
-config "${CONFIGS_DIR}/openssl-client.cnf" \
-newkey rsa:2048 -sha256 \
-subj "/CN=client" \
-nodes -out "${CERT_DIR}/client.csr" -outform PEM
openssl req -text -noout -verify -in "${CERT_DIR}/client.csr"
touch "${CERT_DIR}/index.txt"
echo 02 > "${CERT_DIR}/serial.txt"
# create the client cert
openssl ca -batch \
-config "${CONFIGS_DIR}/openssl-ca.cnf" \
-policy signing_policy -extensions signing_req \
-out "${CERT_DIR}/client.cert" -infiles "${CERT_DIR}/client.csr"
openssl x509 -noout -text -in "${CERT_DIR}/client.cert"
# remove the signing requests
rm -rf "${CERT_DIR}/client.csr" "${CERT_DIR}/server.csr" "${CERT_DIR}/"*.attr "${CERT_DIR}/"*.old
# copy the certs and keys to places where they can be auto picked up by the docker daemon
cp "${CERT_DIR}/cacert.pem" "${CERT_DIR}/ca.pem"
cp "${CERT_DIR}/server.cert" "${CERT_DIR}/cert.pem"
cp "${CERT_DIR}/server.key" "${CERT_DIR}/key.pem"
fi
if [ "$1" = 'dockerd' ]; then
# if we're running Docker, let's pipe through dind
# (and we'll run dind explicitly with "sh" since its shebang is /bin/bash)
set -- sh "$(which dind)" "$@"
fi
exec dind "$@"
|
import string
import random
def get_random_list():
letters = string.ascii_lowercase
list_size = 35
return random.choices(letters, k=list_size) |
#!/bin/bash
# This script verifies that no user-provided links in agarrharr/awesome-cli-apps
# broken. It does so by making a HTTP request to each website and looking at
# the status code of the response.
#
# If the request responds with 5xx the script terminates with a status code of
# 1, meaning a link is broken. 3xx and 4xx responses are treated as warnings
# and are simply logged, because they do not guarantee that there is something
# wrong with the requested website. The status code 000 is also treated as a
# warning because the status code alone does not specify where the problem
# lies, only that there is a problem, read more here: https://tinyurl.com/superuser-status-code-000
#
### Dependencies
# - ggrep (GNU flavored grep, comes with `brew install coreutils`)
# - curl
# - GNU parallel
#
### Usage
#
# /bin/bash ./verify-links.sh
#
### Improvements
# - Use grep instead of ggrep to avoid potential additional dependency
#
# Author: http://github.com/simeg
# License: MIT
#
readonly SOURCE_FILE_URL="https://raw.githubusercontent.com/agarrharr/awesome-cli-apps/master/readme.md"
readonly JOBS_COUNT=100
readonly REGEX_URLS='(\((http(s*)\:\/\/.+)\))(\s-\s)'
echo "Fetching source file.."
readonly URL_STRING=$(curl --silent "${SOURCE_FILE_URL}" | ggrep -oP "${REGEX_URLS}")
echo "OK!"
echo "Parsing URLs from file..."
RAW_URLS_FILE=$(mktemp)
for URL in $URL_STRING; do
if [ "$URL" != "-" ]; then
echo "${URL:1:${#URL}-2}" >> "$RAW_URLS_FILE"
fi
done
echo "OK!"
curl_for_status_code() {
local url="$1"
local status_code=
status_code=$(
curl "$url" \
--silent \
--head \
--max-time 5 \
-L \
--write-out "%{http_code}" \
--output /dev/null
)
printf "%s\\t%d\\n" "$url" "$status_code"
}
# Make function available for parallel
export -f curl_for_status_code
printf "Found [ %s ] URLs, cURLing them...\\n" "$(wc -l < "$RAW_URLS_FILE")"
URLS_WITH_STATUSES_FILE=$(mktemp)
parallel --jobs $JOBS_COUNT curl_for_status_code < $RAW_URLS_FILE >> $URLS_WITH_STATUSES_FILE
cat $URLS_WITH_STATUSES_FILE | while read RESULT
do
URL=$(echo "$RESULT" | cut -f1)
STATUS_CODE=$(echo "$RESULT" | cut -f2)
FIRST_DIGIT=${STATUS_CODE:0:1}
if [ "${FIRST_DIGIT}" == "2" ]; then
echo OK!
elif [ "${FIRST_DIGIT}" == "4" ]; then
printf "WARNING: URL [ %s ] responded with status code [ %d ], continuing..\\n" "$URL" "$STATUS_CODE"
elif [ "${FIRST_DIGIT}" == "5" ]; then
printf "ERROR: URL [ %s ] responded with status code [ %d ], aborting!\\n" "$URL" "$STATUS_CODE"
exit 1
elif [ "${STATUS_CODE}" == "000" ]; then
printf "ERROR: URL [ %s ] responded with status code [ %d ], aborting!\\n" "$URL" "$STATUS_CODE"
exit 1
else
printf "UNKNOWN STATUS CODE: URL [ %s ] responded with status code [ %d ], continuing..\\n" "$URL" "$STATUS_CODE"
fi
done
|
<gh_stars>0
ig.module(
'game.entities.defenceblock'
)
.requires(
'impact.entity'
)
.defines(function(){
EntityDefenceblock = ig.Entity.extend({
_wmDrawBox: true,
_wmBoxColor: 'rgba(0, 0, 255, 0.7)',
size: {
x: 89,
y: 59
},
offset: {
x: 0,
y: 0
},
maxVel:{
x:50,
y:50
},
vel:{
x:50,
y:50
},
scale:{
x:1,
y:1
},
offset:{
//cached offset prior to scaling
x:0,
y:0
},
_offset:{
//cached offset prior to scaling
x:0,
y:0
},
_scale:{
//scale relative to ig.system.scale
x:1,
y:1
},
_size:{
//cached size prior to scaling
x: 89,
y: 59
},
type: ig.Entity.TYPE.A,
checkAgainst: ig.Entity.TYPE.BOTH,
collides: ig.Entity.COLLIDES.NEVER,
health: 300,
startX:null,
startY:null,
blinkTimer:null,
canShoot: false,
scaleTimer: new ig.Timer(0),
animSheet: new ig.AnimationSheet('media/blocks.png', 89, 59),
text: null,
init: function (x, y, settings)
{
this.parent(x, y, settings);
this.addAnim('idle', 1, [0]);
this.addAnim('hit', 1, [3]);
this.addAnim('damaged1', 0.2, [0]);
this.addAnim('damaged2', 0.2, [0]);
this._offset.x = this.offset.x;
this._offset.y = this.offset.y;
this._size.x = this.size.x;
this._size.y = this.size.y;
this.startX=this.pos.x;
this.startY=this.pos.y;
this.setScale( this.scale.x, this.scale.y );
this.setAnimation();
this.blinkTimer = new ig.Timer(0);
},
update: function(){
if(this.blinkTimer.delta() > 0){
//if(this.health<300) this.setScale(1.1,1.1); this.pos.x=this.startX; this.pos.y=this.startY;
//if(this.health<200) this.setScale(1.2,1.2); this.pos.x=this.startX; this.pos.y=this.startY;
//if(this.health<100) this.setScale(1.3,1.3); this.pos.x=this.startX; this.pos.y=this.startY;
//if(this.health<300) this.currentAnim=this.anims.damaged1
//if(this.health<200) this.currentAnim=this.anims.damaged2
//if(this.health>=300) this.currentAnim=this.anims.idle
this.currentAnim=this.anims.idle
}
},
draw: function(){
var ctx = ig.system.context;
ctx.save();
ctx.translate(
ig.system.getDrawPos( this.pos.x.round() - this.offset.x - ig.game.screen.x ),
ig.system.getDrawPos( this.pos.y.round() - this.offset.y - ig.game.screen.y )
);
ctx.scale( this._scale.x, this._scale.y );
this.currentAnim.draw( 0, 0 );
ctx.restore();
},
setScale: function( x, y ){
//cache size prior to scaling
var oX = this.size.x,
oY = this.size.y;
//set scale
this.scale.x = x || this.scale.x;
this.scale.y = y || this.scale.y;
//set scale relative to game scale
this._scale.x = this.scale.x / ig.system.scale;
this._scale.y = this.scale.y / ig.system.scale;
//scale offset
this.offset.x = this._offset.x * this._scale.x;
this.offset.y = this._offset.y * this._scale.y;
//scale size
this.size.x = this._size.x * this._scale.x;
this.size.y = this._size.y * this._scale.y;
//offset entity's position by the change in size
this.pos.x += (oX - this.size.x) / 0.2;
this.pos.y += (oY - this.size.y) / 0.2;
},
receiveDamage: function(amount,from)
{
this.currentAnim=this.anims.hit
this.blinkTimer.set(0.15)
this.parent(amount)
},
setAnimation: function ()
{
this.currentAnim = this.anims.idle;
},
handleMovementTrace: function (res)
{
this.parent(res);
}
});
});
|
package controllers
import akka.actor.{ActorRef, ActorSystem}
import javax.inject._
import play.api.mvc._
import utils.{ActorNames, ApiId}
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future, Promise}
class HealthController @Inject()(@Named(ActorNames.HEALTH_ACTOR) healthActor: ActorRef, cc: ControllerComponents, actorSystem: ActorSystem)(implicit exec: ExecutionContext) extends BaseController(cc) {
def health() = Action.async { implicit request =>
getResult(ApiId.APPLICATION_HEALTH, healthActor, new org.sunbird.common.dto.Request())
}
}
|
#!/bin/bash
# Substitute install directory with your own
KAFKA_DIR="$HOME/kafka"
$KAFKA_DIR/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic test
|
def best_route(cities):
# Maximum number of cities to visit
n = len(cities)
# Define the array to save the calculated distances
dist = [[0 for x in range(n)] for y in range(n)]
# Compute the distances
for i in range(n):
for j in range(n):
if (i != j):
dist[i][j] = calculate_distance(cities[i], cities[j])
# Define the array to save the best routes
dp = [[0 for x in range(n)] for y in range(1 << n)]
# Initialize the array
for i in range(1 << n):
for j in range(n):
dp[i][j] = float('inf')
# Exploit overlapping subproblems
for i in range(1 << n):
for j in range(n):
if (i == (1 << j)):
dp[i][j] = 0
# Use dynamic programming
for i in range(1 << n):
for j in range(n):
for k in range(n):
if (k != j and (i & (1 << k)) > 0):
dp[i][j] = min(dp[i][j], dp[i - (1 << j)][k] + dist[k][j])
# Find the best route
best_route = 0
for i in range(n):
best_route = max(best_route, dp[(1 << n) - 1][i])
return best_route |
#!/usr/bin/env bash
set -e
## If you are in main directory with run_all.sh
# you will need to go to code to run everything
# Install Packages
pip install -r requirements.txt
pip install git+https://github.com/jeffgortmaker/pyblp/
# make sure you have the latest
# requires git, which may require xcode!
cd code
# Get BLP and Nevo Cases: Comment out when /dict/ is populated
python run_all_cases.py
# Table Generating Block
python tab34_params.py
python tab56_diversion.py
python tab7_wtp.py
# Figure Generating Block
python fig12_decomp.py
python fig34_late.py
|
#!/bin/bash
basepath=$(cd `dirname $0`; pwd)
cd $basepath
cd ../../source/backend/vulkan/compiler/
python makeshader.py
cd ../../../
rm -rf build_vulkan
mkdir build_vulkan
cd build_vulkan
# for cpp cache
# macOS `brew install ccache` ubuntu `apt-get install ccache` windows `not care`
cmake ../../ \
-DNDK_CCACHE=ccache \
-DCMAKE_TOOLCHAIN_FILE=$ANDROID_NDK/build/cmake/android.toolchain.cmake \
-DANDROID_ABI="armeabi-v7a" \
-DCMAKE_BUILD_TYPE=Release \
-DANDROID_STL=c++_static \
-DANDROID_NATIVE_API_LEVEL=android-21 \
-DANDROID_TOOLCHAIN=gcc \
-DMNN_OPENGL=OFF \
-DMNN_OPENCL=OFF \
-DMNN_VULKAN=ON \
-DMNN_BUILD_FOR_ANDROID_COMMAND=true \
-DNATIVE_LIBRARY_OUTPUT=. -DNATIVE_INCLUDE_OUTPUT=. $1 $2
make -j4
|
<gh_stars>0
package com.esri.ges.processor.serviceAreaCalculator;
import java.io.Serializable;
public class SpatialReference implements com.esri.ges.spatial.SpatialReference, Serializable
{
private static final long serialVersionUID = 1L;
private final int DEFAULT_WKID = 4326;
private int wkid;
public SpatialReference()
{
setWkid(DEFAULT_WKID);
}
public SpatialReference(int wkid)
{
setWkid(wkid);
}
@Override
public int getWkid()
{
return wkid;
}
@Override
public void setWkid(int wkid)
{
try
{
this.wkid = (com.esri.core.geometry.SpatialReference.create(wkid) != null) ? wkid : DEFAULT_WKID;
}
catch (IllegalArgumentException e)
{
this.wkid = DEFAULT_WKID;
}
}
@Override
public boolean equals(com.esri.ges.spatial.SpatialReference sr)
{
return (sr != null && sr.getWkid() == wkid);
}
} |
<reponame>balajikr1994/ngx-scrollbar
import { Component, ChangeDetectionStrategy } from '@angular/core';
@Component({
selector: 'app-example-nested-virtual-scroll',
templateUrl: './example-nested-virtual-scroll.component.html',
styleUrls: ['./example-nested-virtual-scroll.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush,
host: {
'[class.example-component]': 'true'
}
})
export class ExampleNestedVirtualScrollComponent {
items = Array.from({length: 1000}, (v, k) => k + 1);
horizontal = Array.from({length: 100}, (v, k) => k + 1);
}
|
class Inboxes::UpdateWidgetPreChatCustomFieldsJob < ApplicationJob
queue_as :default
def perform(account, custom_attribute)
attribute_key = custom_attribute['attribute_key']
account.web_widgets.all.find_each do |web_widget|
pre_chat_fields = web_widget.pre_chat_form_options['pre_chat_fields']
pre_chat_fields.each_with_index do |pre_chat_field, index|
next unless pre_chat_field['name'] == attribute_key
web_widget.pre_chat_form_options['pre_chat_fields'][index] =
pre_chat_field.deep_merge({
'label' => custom_attribute['attribute_display_name'],
'placeholder' => custom_attribute['attribute_display_name'],
'values' => custom_attribute['attribute_values']
})
end
web_widget.save!
end
end
end
|
package info.u250.c2d.box2d.model.joint;
import com.badlogic.gdx.math.Vector2;
import info.u250.c2d.box2d.model.b2JointDefModel;
public class b2RopeJointDefModel extends b2JointDefModel {
private static final long serialVersionUID = 1L;
/**
* The local anchor point relative to bodyA's origin.
**/
public final Vector2 localAnchorA = new Vector2(-1, 0);
/**
* The local anchor point relative to bodyB's origin.
**/
public final Vector2 localAnchorB = new Vector2(1, 0);
/**
* The maximum length of the rope. Warning: this must be larger than b2_linearSlop or the joint will have no effect.
*/
public float maxLength = 0;
}
|
import { HttpClient } from "@angular/common/http";
import { Injectable } from "@angular/core";
import { of } from "rxjs";
@Injectable({
providedIn: 'root'
})
export class ArticlesAPIService {
constructor(private http: HttpClient) {
}
getArticle$(id) {
return this.http.get('https://jsonplaceholder.typicode.com/todos/' + id);
}
} |
"use strict";
require("../DevExpress.ui.widgets.dataGrid/dataGrid.markup.tests.js");
|
#!/bin/bash
# Copyright 2020 Cortex Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")"/.. >/dev/null && pwd)"
arg1="${1:-""}"
git_branch="${CIRCLE_BRANCH:-""}"
if [ "$git_branch" = "" ]; then
git_branch=$(cd "$ROOT" && git rev-parse --abbrev-ref HEAD)
fi
if [ -z "$git_branch" ]; then
echo "error: unable to determine git branch"
exit 1
fi
if [ "$git_branch" = "master" ]; then
if [ -z "$arg1" ]; then
echo "error: use \`./dev/update_version_comments.sh <minor_version>\` to update all docs and examples warnings"
exit 1
fi
if [[ "$OSTYPE" == "darwin"* ]]; then
cd "$ROOT" && find . -type f \
! -path "./build/lint.sh" \
! -path "./dev/update_version_comments.sh" \
! -path "./vendor/*" \
! -path "./bin/*" \
! -path "./.git/*" \
! -name ".*" \
-print0 | \
xargs -0 sed -i '' -e "s/WARNING: you are on the master branch; please refer to examples on the branch corresponding to your \`cortex version\` [(]e\.g\. for version [0-9]*\.[0-9]*\.\*, run \`git checkout -b [0-9]*\.[0-9]*\` or switch to the \`[0-9]*\.[0-9]*\` branch on GitHub[)]/WARNING: you are on the master branch; please refer to examples on the branch corresponding to your \`cortex version\` (e.g. for version ${arg1}.*, run \`git checkout -b ${arg1}\` or switch to the \`${arg1}\` branch on GitHub)/"
else
cd "$ROOT" && find . -type f \
! -path "./build/lint.sh" \
! -path "./dev/update_version_comments.sh" \
! -path "./vendor/*" \
! -path "./bin/*" \
! -path "./.git/*" \
! -name ".*" \
-print0 | \
xargs -0 sed -i "s/WARNING: you are on the master branch; please refer to examples on the branch corresponding to your \`cortex version\` [(]e\.g\. for version [0-9]*\.[0-9]*\.\*, run \`git checkout -b [0-9]*\.[0-9]*\` or switch to the \`[0-9]*\.[0-9]*\` branch on GitHub[)]/WARNING: you are on the master branch; please refer to examples on the branch corresponding to your \`cortex version\` (e.g. for version ${arg1}.*, run \`git checkout -b ${arg1}\` or switch to the \`${arg1}\` branch on GitHub)/"
fi
echo "done"
exit 0
fi
if ! echo "$git_branch" | grep -Eq ^[0-9]+.[0-9]+$; then
echo "error: this is meant to be run on release branches"
exit 1
fi
if [[ "$OSTYPE" == "darwin"* ]]; then
cd "$ROOT" && find . -type f \
! -path "./build/lint.sh" \
! -path "./dev/update_version_comments.sh" \
! -path "./vendor/*" \
! -path "./bin/*" \
! -path "./.git/*" \
! -name ".*" \
-print0 | \
xargs -0 sed -i '' -e '/.*WARNING: you are on the master branch, please refer to the docs on the branch that matches.*$/N; /.*WARNING: you are on the master branch, please refer to the docs on the branch that matches.*/d'
else
cd "$ROOT" && find . -type f \
! -path "./build/lint.sh" \
! -path "./dev/update_version_comments.sh" \
! -path "./vendor/*" \
! -path "./bin/*" \
! -path "./.git/*" \
! -name ".*" \
-print0 | \
xargs -0 sed -i '/.*WARNING: you are on the master branch, please refer to the docs on the branch that matches.*/,+1 d'
fi
if [[ "$OSTYPE" == "darwin"* ]]; then
cd "$ROOT" && find . -type f \
! -path "./build/lint.sh" \
! -path "./dev/update_version_comments.sh" \
! -path "./vendor/*" \
! -path "./bin/*" \
! -path "./.git/*" \
! -name ".*" \
-print0 | \
xargs -0 sed -i '' -e "s/WARNING: you are on the master branch; please refer to examples on the branch corresponding to your \`cortex version\` [(]e\.g\. for version [0-9]*\.[0-9]*\.\*, run \`git checkout -b [0-9]*\.[0-9]*\` or switch to the \`[0-9]*\.[0-9]*\` branch on GitHub[)]/this is an example for cortex release ${git_branch} and may not deploy correctly on other releases of cortex/"
else
cd "$ROOT" && find . -type f \
! -path "./build/lint.sh" \
! -path "./dev/update_version_comments.sh" \
! -path "./vendor/*" \
! -path "./bin/*" \
! -path "./.git/*" \
! -name ".*" \
-print0 | \
xargs -0 sed -i "s/WARNING: you are on the master branch; please refer to examples on the branch corresponding to your \`cortex version\` [(]e\.g\. for version [0-9]*\.[0-9]*\.\*, run \`git checkout -b [0-9]*\.[0-9]*\` or switch to the \`[0-9]*\.[0-9]*\` branch on GitHub[)]/this is an example for cortex release ${git_branch} and may not deploy correctly on other releases of cortex/"
fi
echo "done"
|
<filename>Kindergarten counting game.c
#include<stdio.h>
#include<string.h>
int main()
{
char string[100000],c,i,l;
while(gets(string))
{
l=strlen(string);
c=0;
for(i=0;i<l;i++)
{
if(string[i]==32)
{
}
}
printf("%d\n",c+1);
}
return 0;
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.leaf = void 0;
var leaf = {
"viewBox": "0 0 20 20",
"children": [{
"name": "path",
"attribs": {
"d": "M19.025,3.587c-4.356,2.556-4.044,7.806-7.096,10.175c-2.297,1.783-5.538,0.88-7.412,0.113c0,0-1.27,1.603-2.181,3.74\r\n\tc-0.305,0.717-1.644-0.073-1.409-0.68C3.905,9.25,14.037,5.416,14.037,5.416S6.888,5.113,2.11,11.356\r\n\tC1.982,9.93,1.77,6.072,5.47,3.706C10.486,0.495,20.042,2.991,19.025,3.587z"
}
}]
};
exports.leaf = leaf; |
/*-
* ========================LICENSE_START=================================
* TeamApps
* ---
* Copyright (C) 2014 - 2021 TeamApps.org
* ---
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =========================LICENSE_END==================================
*/
import {UiWorkSpaceLayoutItemConfig} from "../../generated/UiWorkSpaceLayoutItemConfig";
import {ItemTreeItem} from "./ItemTree";
import {View} from "./View";
import {SplitPaneItem} from "./SplitPaneItem";
import {UiWorkSpaceLayoutSplitItemConfig} from "../../generated/UiWorkSpaceLayoutSplitItemConfig";
import {TabPanelItem} from "./TabPanelItem";
import {UiWorkSpaceLayoutViewGroupItemConfig} from "../../generated/UiWorkSpaceLayoutViewGroupItemConfig";
import {LocalViewContainer} from "./LocalViewContainer";
import * as log from "loglevel";
import {UiWorkSpaceLayoutViewConfig} from "../../generated/UiWorkSpaceLayoutViewConfig";
import {TeamAppsUiContext} from "../TeamAppsUiContext";
import {isSplitPanelDescriptor, isTabPanelDescriptor} from "./UiWorkSpaceLayout";
import {UiViewGroupPanelState} from "../../generated/UiViewGroupPanelState";
import {UiComponent} from "../UiComponent";
export class LayoutDescriptorApplyer {
private static logger: log.Logger = log.getLogger("LocalViewContainer");
private descriptorItemById: { [itemId: string]: UiWorkSpaceLayoutItemConfig } = {};
private descriptorViewNames: string[] = [];
private clientItemsById: { [itemId: string]: ItemTreeItem } = {};
private clientViewsByName: { [viewName: string]: View } = {};
private clientItemStash: { [itemId: string]: ItemTreeItem } = {};
constructor(
private $rootItemContainer: HTMLElement,
private viewGroupFactory: (config: UiWorkSpaceLayoutViewGroupItemConfig, parent: SplitPaneItem) => TabPanelItem,
private setViewGroupPanelStateFunction: (viewGroupItem: TabPanelItem, panelState: UiViewGroupPanelState) => void,
private context: TeamAppsUiContext
) {
}
public apply(
currentRootItem: ItemTreeItem,
newLayoutDescriptor: UiWorkSpaceLayoutItemConfig,
newViewConfigs: UiWorkSpaceLayoutViewConfig[]
): ItemTreeItem {
this.descriptorItemById = {};
this.descriptorViewNames = [];
this.clientItemsById = {};
this.clientViewsByName = {};
this.clientItemStash = {};
this.buildDescriptorDictionaries(newLayoutDescriptor);
if (currentRootItem != null) {
this.buildClientItemDictionaries(currentRootItem);
this.cleanupUnknownClientItems(currentRootItem, newLayoutDescriptor, null);
}
return this.addNewStructure(newLayoutDescriptor, null, false, newViewConfigs);
}
private buildDescriptorDictionaries(descriptorItem: UiWorkSpaceLayoutItemConfig) {
this.descriptorItemById[descriptorItem.id] = descriptorItem;
if (isTabPanelDescriptor(descriptorItem)) {
this.descriptorViewNames.push(...descriptorItem.viewNames);
} else if (isSplitPanelDescriptor(descriptorItem)) {
if (descriptorItem.firstChild != null) {
this.buildDescriptorDictionaries(descriptorItem.firstChild);
}
if (descriptorItem.lastChild != null) {
this.buildDescriptorDictionaries(descriptorItem.lastChild);
}
}
}
private buildClientItemDictionaries(item: ItemTreeItem) {
this.clientItemsById[item.id] = item;
if (item instanceof TabPanelItem) {
item.tabs.forEach(tab => this.clientViewsByName[tab.viewName] = tab);
} else if (item instanceof SplitPaneItem) {
this.buildClientItemDictionaries(item.firstChild);
this.buildClientItemDictionaries(item.lastChild);
}
}
public cleanupUnknownClientItems(clientSideItem: ItemTreeItem, descriptorItem: UiWorkSpaceLayoutItemConfig, parent: SplitPaneItem | null) {
// descriptorItem may be null for recursive executions of this method!
if (descriptorItem != null && descriptorItem.id === clientSideItem.id) {
if (clientSideItem instanceof SplitPaneItem) {
this.cleanupUnknownClientItems(clientSideItem.firstChild, (descriptorItem as UiWorkSpaceLayoutSplitItemConfig).firstChild, clientSideItem);
this.cleanupUnknownClientItems(clientSideItem.lastChild, (descriptorItem as UiWorkSpaceLayoutSplitItemConfig).lastChild, clientSideItem);
} else if (clientSideItem instanceof TabPanelItem) {
this.stashUnknownViews(clientSideItem, (descriptorItem as UiWorkSpaceLayoutViewGroupItemConfig).viewNames);
}
} else {
let correspondingDescriptorItem = this.descriptorItemById[clientSideItem.id];
if (correspondingDescriptorItem != null) {
this.clientItemStash[clientSideItem.id] = clientSideItem;
clientSideItem.component.getMainElement().remove();
this.cleanupUnknownClientItems(clientSideItem, correspondingDescriptorItem, null);
} else {
// not referenced in the descriptor! however, descendants might well be referenced in the descriptor!
if (clientSideItem instanceof TabPanelItem) {
this.stashUnknownViews(clientSideItem, []);
} else if (clientSideItem instanceof SplitPaneItem) {
this.cleanupUnknownClientItems(clientSideItem.firstChild, null, clientSideItem);
this.cleanupUnknownClientItems(clientSideItem.lastChild, null, clientSideItem);
}
}
// remove the clientSideItem!
if (parent != null) {
if (clientSideItem === parent.firstChild) {
parent.firstChild = null;
} else {
parent.lastChild = null;
}
} else {
// this is the root item!
clientSideItem.component.getMainElement().remove();
}
}
}
private stashUnknownViews(viewGroup: TabPanelItem, descriptorViewNames: string[]) {
let viewsNotFoundInDescriptorItem = viewGroup.tabs
.filter(tab => descriptorViewNames.indexOf(tab.viewName) === -1);
viewsNotFoundInDescriptorItem.forEach((tab: View) => {
let viewIsReferencedAnywhereInRootDescriptor = this.descriptorViewNames.indexOf(tab.viewName) !== -1;
if (viewIsReferencedAnywhereInRootDescriptor) {
tab.component.getMainElement().remove();
viewGroup.removeTab(tab);
} else {
// will not be used anymore in any way! just remove and destroy
viewGroup.removeTab(tab);
}
});
}
private addNewStructure(descriptor: UiWorkSpaceLayoutItemConfig, parent: SplitPaneItem | null, firstChild: boolean, newViewConfigs: UiWorkSpaceLayoutViewConfig[]) {
if (descriptor == null) {
return null;
}
let clientSideItem = this.clientItemsById[descriptor.id];
let itemMovedToStash = this.clientItemStash[descriptor.id] != null;
let item: ItemTreeItem;
if (clientSideItem != null) {
item = clientSideItem;
if (isSplitPanelDescriptor(descriptor)) {
this.addNewStructure(descriptor.firstChild, clientSideItem as SplitPaneItem, true, newViewConfigs);
this.addNewStructure(descriptor.lastChild, clientSideItem as SplitPaneItem, false, newViewConfigs);
} else if (isTabPanelDescriptor(descriptor)) {
this.addViews(clientSideItem as TabPanelItem, descriptor, newViewConfigs);
}
} else { // this is a descriptor for a new item
item = this.createTreeItemFromLayoutDescriptor(descriptor, parent, newViewConfigs);
}
if (itemMovedToStash) {
if (parent != null) {
if (firstChild) {
parent.firstChild = item;
} else {
parent.lastChild = item;
}
item.parent = parent;
} else {
this.$rootItemContainer.append(item.component.getMainElement());
}
}
if (item instanceof SplitPaneItem && isSplitPanelDescriptor(descriptor)) {
item.component.setSize(descriptor.referenceChildSize, descriptor.sizePolicy);
} else if (item instanceof TabPanelItem && isTabPanelDescriptor(descriptor)) {
this.setViewGroupPanelStateFunction(item, descriptor.panelState);
}
return item;
}
private createTreeItemFromLayoutDescriptor(descriptor: UiWorkSpaceLayoutItemConfig, parent: SplitPaneItem, newViewConfigs: UiWorkSpaceLayoutViewConfig[]) {
if (isTabPanelDescriptor(descriptor)) {
let tabPanelItem = this.viewGroupFactory(descriptor, parent);
this.addViews(tabPanelItem, descriptor, newViewConfigs);
return tabPanelItem;
} else if (isSplitPanelDescriptor(descriptor)) {
let splitPaneItem = new SplitPaneItem(descriptor.id, parent, descriptor.splitDirection, descriptor.sizePolicy, descriptor.referenceChildSize, this.context);
splitPaneItem.firstChild = this.addNewStructure(descriptor.firstChild, splitPaneItem, true, newViewConfigs);
splitPaneItem.lastChild = this.addNewStructure(descriptor.lastChild, splitPaneItem, false, newViewConfigs);
return splitPaneItem;
}
}
private addViews(tabPanelItem: TabPanelItem, viewGroupDescriptor: UiWorkSpaceLayoutViewGroupItemConfig, newViewConfigs: UiWorkSpaceLayoutViewConfig[]) {
viewGroupDescriptor.viewNames.forEach((viewName, index) => {
let selected = viewName === viewGroupDescriptor.selectedViewName || viewGroupDescriptor.selectedViewName == null && index === 0;
let tabAlreadyInGroupItem = tabPanelItem.tabs.filter(tab => tab.viewName === viewName)[0];
if (tabAlreadyInGroupItem) {
tabPanelItem.moveTab(viewName, index);
if (selected) {
tabPanelItem.selectTab(viewName);
}
} else if (this.clientViewsByName[viewName]) {
let view = this.clientViewsByName[viewName];
tabPanelItem.addTab(view, selected, index);
} else {
let newViewConfig = newViewConfigs.filter(view => view.viewName === viewName)[0];
if (newViewConfig != null) {
let view = new View(newViewConfig.viewName, newViewConfig.tabIcon, newViewConfig.tabCaption, newViewConfig.tabCloseable, newViewConfig.lazyLoading, newViewConfig.visible, newViewConfig.component as UiComponent);
tabPanelItem.addTab(view, selected);
} else {
LayoutDescriptorApplyer.logger.error("View item references non-existing view: " + viewName);
return;
}
}
});
}
}
|
<gh_stars>1-10
import IDowntimeMetricRecord from "@statusify/core/dist/Metric/IDowntimeMetricRecord";
import IMetricRange from "@statusify/core/dist/Metric/IMetricRange";
import ISeverityTick from "../interfaces/ISeverityTick";
import { MetricType } from "@statusify/core/dist/Metric/Metric";
import React from "react";
import Severity from "@statusify/core/dist/Severity/Severity";
import WorstSeverity from "@statusify/core/dist/Util/WorstSeverity";
import dayjs from "../utils/dayjs";
import { useComponent } from "../contexts/ComponentContext";
import { useLaminar } from "../contexts/LaminarContext";
import { useStatusify } from "../contexts/StatusifyContext";
import useIncidents from "./useIncidents";
import { IncidentsQuery } from "@statusify/core/dist/Incident/IProvidesIncidents";
export default function useSeverityTicks(range: IMetricRange) {
const statusify = useStatusify();
const component = useComponent();
const { downtimeSeverities } = useLaminar();
const [ ticks, setTicks ] = React.useState<ISeverityTick[]>([]);
const incidentsQuery = React.useMemo((): IncidentsQuery => {
return {
component: component.id,
createdAt: {
after: range.start,
before: range.end
},
}
}, [ range, component ]);
const incidents = useIncidents(incidentsQuery);
React.useMemo(() => {
new Promise(async (resolve, _reject) => {
// Find a downtime metric
const downtimeMetric = component.metrics?.find(m => m.type === MetricType.DOWNTIME);
const downtimes: IDowntimeMetricRecord[] = (downtimeMetric) ? await downtimeMetric.getPeriod(range) : [];
const severities = await statusify.getSeverities();
// Calculate some dates
const nStart = dayjs(range.start).startOf('day');
const nEnd = dayjs(range.end).endOf('day');
const daysBetween = nEnd.diff(nStart, 'days');
// Work on each day to find its tick
const dayTicks = [ ...Array(daysBetween) ].map(async (_, i) => {
const day = dayjs(nStart).add(i + 1, 'days').startOf('day');
const daySeverities: Severity[] = [];
const dayIncidents = incidents
.filter((incident) => {
return day.isBetween(incident.createdAt, incident.resolvedAt, 'day', '[]');
}).map((v) => {
// Add to the severities
daySeverities.push(v.severity);
return v;
});
const dayDowntimes: IDowntimeMetricRecord[] = (downtimes === undefined) ? [] : downtimes.filter((downtime) => {
const startedAt = dayjs(downtime.time);
const endedAt = startedAt.add(downtime.value, 'millisecond');
return day.isBetween(startedAt, endedAt, 'day', '(]')
}).map((v) => {
// Add to the severities based upon the config
const index = Number(Object.keys(downtimeSeverities).reduce((a, b) => {
if(Number(b) > Number(a) && (v.value / 1000 >= Number(b))) {
return b;
}
return a;
}))
// Use the index to get the severity id and then get the actual severity instance
const foundSeverity = severities.find(s => s.id === downtimeSeverities[index]);
if(foundSeverity !== undefined) {
daySeverities.push(foundSeverity);
}
return v;
});
return {
date: day.toDate(),
severity: await WorstSeverity(daySeverities, statusify),
relatedIncidents: dayIncidents,
relatedDowntimes: dayDowntimes
}
});
// Do everything, set the ticks and resolve for some reason
Promise.all(dayTicks).then(setTicks).then(resolve);
})
}, [ component, incidents, downtimeSeverities, range, statusify ]);
return ticks;
} |
<gh_stars>1-10
import { BindGroupLayoutDescriptor, ShaderModuleDescriptor, ShaderStage } from "../webgpu";
type BindGroupLayoutDescriptorMap = Map<number, BindGroupLayoutDescriptor>;
/**
* Shader program, corresponding to the GPU shader program.
* @internal
*/
export class ShaderProgram {
private static _shaderModuleDescriptor: ShaderModuleDescriptor = new ShaderModuleDescriptor();
private readonly _bindGroupLayoutDescriptorMap: BindGroupLayoutDescriptorMap;
private readonly _stage: ShaderStage;
private _shader: GPUShaderModule;
private _fragmentShader: GPUShaderModule;
private _device: GPUDevice;
get vertexShader(): GPUShaderModule {
if (this._stage === GPUShaderStage.VERTEX) {
return this._shader;
} else {
return null;
}
}
get computeShader(): GPUShaderModule {
if (this._stage === GPUShaderStage.COMPUTE) {
return this._shader;
} else {
return null;
}
}
get fragmentShader(): GPUShaderModule {
return this._fragmentShader;
}
get bindGroupLayoutDescriptorMap(): BindGroupLayoutDescriptorMap {
return this._bindGroupLayoutDescriptorMap;
}
constructor(
device: GPUDevice,
source: string,
stage: ShaderStage,
bindGroupLayoutDescriptorMap: BindGroupLayoutDescriptorMap = null,
fragmentSource: string = null
) {
if (bindGroupLayoutDescriptorMap) {
this._bindGroupLayoutDescriptorMap = new Map<number, BindGroupLayoutDescriptor>();
bindGroupLayoutDescriptorMap.forEach((descriptor, group) => {
const bindGroupLayoutDescriptor = new BindGroupLayoutDescriptor();
descriptor.cloneTo(bindGroupLayoutDescriptor);
this._bindGroupLayoutDescriptorMap.set(group, bindGroupLayoutDescriptor);
});
}
// console.log(source);
// console.log(fragmentSource);
// debugger;
this._stage = stage;
this._device = device;
this._createProgram(source, fragmentSource);
}
/**
* init and link program with shader.
*/
private _createProgram(source: string, fragmentSource: string = null) {
ShaderProgram._shaderModuleDescriptor.code = source;
this._shader = this._device.createShaderModule(ShaderProgram._shaderModuleDescriptor);
if (fragmentSource) {
ShaderProgram._shaderModuleDescriptor.code = fragmentSource;
this._fragmentShader = this._device.createShaderModule(ShaderProgram._shaderModuleDescriptor);
}
}
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DLA-500-1
#
# Security announcement date: 2016-06-01 00:00:00 UTC
# Script generation date: 2017-01-30 21:09:26 UTC
#
# Operating System: Debian 7 (Wheezy)
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - imagemagick:8:6.7.7.10-5+deb7u6
#
# Last versions recommanded by security team:
# - imagemagick:8:6.7.7.10-5+deb7u11
#
# CVE List:
# - CVE-2016-5118
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade imagemagick=8:6.7.7.10-5+deb7u11 -y
|
#!/bin/bash -e
# this script is run during the image build
cat /container/service/phpldapadmin/assets/php5-fpm/pool.conf >> /etc/php5/fpm/pool.d/www.conf
rm /container/service/phpldapadmin/assets/php5-fpm/pool.conf
mkdir -p /var/www/tmp
chown www-data:www-data /var/www/tmp
# remove apache default host
a2dissite 000-default
rm -rf /var/www/html
# delete unnecessary files
rm -rf /var/www/phpldapadmin_bootstrap/doc
# apply php5.5 patch
patch -p1 -d /var/www/phpldapadmin_bootstrap < /container/service/phpldapadmin/assets/php5.5.patch
sed -i "s/password_hash/password_hash_custom/g" /var/www/phpldapadmin_bootstrap/lib/TemplateRender.php
# fix php5-fpm $_SERVER['SCRIPT_NAME'] bad value with cgi.fix_pathinfo=0
sed -i "s/'SCRIPT_NAME'/'PATH_INFO'/g" /var/www/phpldapadmin_bootstrap/lib/common.php
cp /container/service/phpldapadmin/changepassword.php /var/www/changepassword.php
|
#!/bin/bash
# Run clickhouse-operator
# Do not forget to update version
# Source configuration
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
LOG_DIR="${CUR_DIR}/log"
source "${CUR_DIR}/go_build_config.sh"
echo -n "Building ${OPERATOR_BIN}, please wait..."
if "${CUR_DIR}/go_build_operator.sh"; then
echo "successfully built ${OPERATOR_BIN}. Starting"
mkdir -p "${LOG_DIR}"
rm -f "${LOG_DIR}"/clickhouse-operator.*.log.*
"${OPERATOR_BIN}" \
-config="${SRC_ROOT}/config/config-dev.yaml" \
-alsologtostderr=true \
-log_dir=log \
-v=1
# -logtostderr=true \
# -stderrthreshold=FATAL \
# -log_dir=log Log files will be written to this directory instead of the default temporary directory
# -alsologtostderr=true Logs are written to standard error as well as to files
# -logtostderr=true Logs are written to standard error instead of to files
# -stderrthreshold=FATAL Log events at or above this severity are logged to standard error as well as to files
# And clean binary after run. It'll be rebuilt next time
"${CUR_DIR}/go_build_operator_clean.sh"
echo "======================"
echo "=== Logs available ==="
echo "======================"
ls "${LOG_DIR}"/*
else
echo "unable to build ${OPERATOR_BIN}"
fi
|
require "ar_find_in_batches_with_usefulness/version"
require "active_record"
module ArFindInBatchesWithUsefulness
ActiveRecord::Relation.class_eval do
# based on https://github.com/afair/postgresql_cursor/blob/e8630d5f04e926a3fa152c78bc629d85c5cc573d/lib/postgresql_cursor/active_record/relation/cursor_iterators.rb
# Returns sql string like #to_sql, but with bind parameters interpolated.
# ActiveRecord sets up query as prepared statements with bind variables.
# Cursors will prepare statements regardless.
def to_unprepared_sql
if connection.respond_to?(:unprepared_statement)
connection.unprepared_statement do
to_sql
end
else
to_sql
end
end
# Alias of find_in_batches that allows use of a cursor
#
# Only supports PostgreSQL adapter
#
# @param options include :cursor option to opt in
#
# @return enumerator unless block given, otherwise executes relation in batches
def find_in_batches_with_usefulness(options = {}, &block)
if connection.adapter_name == "PostgreSQL" && options[:cursor]
find_in_batches_with_cursor(options, &block)
else
find_in_batches_without_usefulness(options) do |batch|
yield batch
end
end
end
alias_method_chain :find_in_batches, :usefulness
def find_in_batches_with_cursor(options = {})
batch_size = options[:batch_size] || 1000
unless block_given?
return to_enum(:find_in_batches_with_cursor) do
(size - 1).div(batch_size) + 1
end
end
klass.transaction do
begin
sql = to_unprepared_sql
cursor = "#{table_name}_in_batches_cursor_#{sql.hash.abs.to_s(36)}"
connection.execute("DECLARE #{cursor} CURSOR FOR #{sql}")
move_forward(cursor, options[:start]) if options[:start]
batch = fetch_forward(batch_size, cursor)
until batch.empty?
yield batch
break if batch.size < batch_size
batch = fetch_forward(batch_size, cursor)
end
# not ensure; if the transaction rolls back due to another exception, it will
# automatically close
connection.execute("CLOSE #{cursor}")
batch
end
end
end
private
def move_forward(cursor, start)
connection.execute("MOVE FORWARD #{start} IN #{cursor}")
end
def fetch_forward(batch_size, cursor)
connection.uncached { klass.find_by_sql("FETCH FORWARD #{batch_size} FROM #{cursor}") }
end
end
end
|
#!/bin/bash
set -e
echo "Add GOPATH and GOBIN"
sudo touch /etc/profile.d/aispaths.sh
sudo sh -c 'cat << EOF > /etc/profile.d/aispaths.sh
export GOBIN=$HOME/ais/bin
export GOPATH=$HOME/ais
export PATH=$PATH:/usr/local/go/bin:$GOBIN
export AISSRC=$HOME/ais/src/github.com/NVIDIA/aistore/ais
export AISTORE_SRC=$HOME/ais/src/github.com/NVIDIA/aistore
EOF'
sudo chmod 777 /etc/profile.d/aispaths.sh
. /etc/profile.d/aispaths.sh
sudo rm -rf ~/ais || true
mkdir -p ~/ais/{bin,pkg,src}
GOLANG_VER_FILE="/usr/local/go/VERSION"
GOLANG_VERSION="go1.16"
CURRENT_GOLANG_VERSION=""
if [[ -f ${GOLANG_VER_FILE} ]]; then
CURRENT_GOLANG_VERSION=$(cat ${GOLANG_VER_FILE})
fi
if [[ ${CURRENT_GOLANG_VERSION} != "${GOLANG_VERSION}" ]]; then
echo "Current Golang version does not match with expected, so updating Golang to " ${GOLANG_VERSION}
sudo rm -rf /usr/local/go /usr/bin/go
echo "Downloading Go..."
curl -LO https://storage.googleapis.com/golang/${GOLANG_VERSION}.linux-amd64.tar.gz
shasum -a 256 go1.*
sudo tar -C /usr/local -xvzf ${GOLANG_VERSION}.linux-amd64.tar.gz >/dev/null
sudo ln -s /usr/local/go/bin/go /usr/bin/go
rm -rf ${GOLANG_VERSION}.linux-amd64.tar.gz
fi
GIT_NVIDIA=${GOPATH}/src/github.com/NVIDIA
mkdir -p "${GIT_NVIDIA}"
cd "${GIT_NVIDIA}"
git clone https://github.com/NVIDIA/aistore.git
echo "Backend provider(s) set to: ${AIS_BACKEND_PROVIDERS}"
cd aistore && make node
|
sudo emcc dip.cc -s WASM=1 -O3 --post-js post-script.js -o dip.js
|
package com.abubusoft.kripton.examplea0.recipes.v2.persistence;
import com.abubusoft.kripton.android.annotation.BindDao;
import com.abubusoft.kripton.android.annotation.BindSqlInsert;
import com.abubusoft.kripton.android.annotation.BindSqlSelect;
import com.abubusoft.kripton.examplea0.recipes.v2.model.RecipeV2;
import java.util.List;
/**
* Created by xcesco on 01/09/2017.
*/
@BindDao(RecipeV2.class)
public interface RecipeV2Dao extends BaseV2Dao<RecipeV2> {
}
|
#ifndef kinect_azure_structs_h
#define kinect_azure_structs_h
#ifdef KINECT_AZURE_ENABLE_BODY_TRACKING
typedef struct _JSJoint
{
float cameraX = 0;
float cameraY = 0;
float cameraZ = 0;
//
float orientationX = 0;
float orientationY = 0;
float orientationZ = 0;
float orientationW = 0;
//
float colorX = 0;
float colorY = 0;
//
float depthX = 0;
float depthY = 0;
//
int confidence = 0;
} JSJoint;
typedef struct _JSSkeleton
{
JSJoint joints[K4ABT_JOINT_COUNT];
} JSSkeleton;
typedef struct _JSBody
{
int id = 0;
JSSkeleton skeleton;
} JSBody;
typedef struct _JSBodyFrame
{
JSBody* bodies = NULL;
int numBodies = 0;
void reset() {
if (bodies != NULL) {
delete [] bodies;
bodies = NULL;
}
numBodies = 0;
}
} JSBodyFrame;
#endif // KINECT_AZURE_ENABLE_BODY_TRACKING
typedef struct _JSImageFrame
{
uint8_t* image_data = NULL;
size_t image_length = 0;
int stride_bytes = 0;
int width = 0;
int height = 0;
void reset() {
if (image_data != NULL) {
delete [] image_data;
image_data = NULL;
}
image_length = 0;
stride_bytes = 0;
width = 0;
height = 0;
}
} JSImageFrame;
typedef struct _JSFrame
{
#ifdef KINECT_AZURE_ENABLE_BODY_TRACKING
JSBodyFrame bodyFrame;
#endif // KINECT_AZURE_ENABLE_BODY_TRACKING
JSImageFrame colorImageFrame;
JSImageFrame depthImageFrame;
JSImageFrame irImageFrame;
JSImageFrame depthToColorImageFrame;
JSImageFrame colorToDepthImageFrame;
void reset() {
colorImageFrame.reset();
depthImageFrame.reset();
irImageFrame.reset();
depthToColorImageFrame.reset();
colorToDepthImageFrame.reset();
}
void resetBodyFrame() {
#ifdef KINECT_AZURE_ENABLE_BODY_TRACKING
bodyFrame.reset();
#endif // KINECT_AZURE_ENABLE_BODY_TRACKING
}
} JSFrame;
typedef struct _CustomDeviceConfig
{
bool include_depth_to_color = false;
bool include_color_to_depth = false;
bool flip_BGRA_to_RGBA = false;
void reset() {
include_depth_to_color = false;
include_color_to_depth = false;
flip_BGRA_to_RGBA = false;
}
} CustomDeviceConfig;
#endif
|
# Amazon FPGA Hardware Development Kit
#
# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Amazon Software License (the "License"). You may not use
# this file except in compliance with the License. A copy of the License is
# located at
#
# http://aws.amazon.com/asl/
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or
# implied. See the License for the specific language governing permissions and
# limitations under the License.
info_msg "Setting up environment variables"
# Make sure that AWS_FPGA_REPO_DIR is set to the location of this script.
if [[ ":$AWS_FPGA_REPO_DIR" == ':' ]]; then
debug_msg "AWS_FPGA_REPO_DIR not set so setting to $script_dir"
export AWS_FPGA_REPO_DIR=$script_dir
elif [[ $AWS_FPGA_REPO_DIR != $script_dir ]]; then
info_msg "Changing AWS_FPGA_REPO_DIR from $AWS_FPGA_REPO_DIR to $script_dir"
export AWS_FPGA_REPO_DIR=$script_dir
else
debug_msg "AWS_FPGA_REPO_DIR=$AWS_FPGA_REPO_DIR"
fi
# HDK
# Clear environment variables
unset HDK_DIR
unset HDK_COMMON_DIR
unset HDK_SHELL_DIR
unset HDK_SHELL_DESIGN_DIR
export -f allow_non_root
export -f allow_others
if allow_non_root || allow_others ; then
export AWS_FPGA_SDK_GROUP=${AWS_FPGA_SDK_GROUP:-"fpgauser"}
export SDK_NON_ROOT_USER=$(whoami)
info_msg "Allowing group ${AWS_FPGA_SDK_GROUP} access to FPGA management tools and resources"
fi
export HDK_DIR=$AWS_FPGA_REPO_DIR/hdk
# The next variable should not be modified and should always point to the /common directory under HDK_DIR
export HDK_COMMON_DIR=$HDK_DIR/common
# Point to the latest version of AWS shell
export HDK_SHELL_DIR=$(readlink -f $HDK_COMMON_DIR/shell_stable)
# Set the common shell design dir
export HDK_SHELL_DESIGN_DIR=$HDK_SHELL_DIR/design
# SDK
unset SDK_DIR
export SDK_DIR=$AWS_FPGA_REPO_DIR/sdk
# SDACCEL
# Setup Location of SDACCEL_DIR
export SDACCEL_DIR=$AWS_FPGA_REPO_DIR/SDAccel
# Vitis
# Setup Location of VITIS_DIR
export VITIS_DIR=$AWS_FPGA_REPO_DIR/Vitis
# PYTHONPATH
# Update PYTHONPATH with libraries used for unit testing
python_lib=$AWS_FPGA_REPO_DIR/shared/lib
PYTHONPATH=$python_lib:$PYTHONPATH
export PYTHONPATH
# PATH Changes
export PATH=$(echo $PATH | sed -e 's/\(^\|:\)[^:]\+\/shared\/bin\/scripts\(:\|$\)/:/g; s/^://; s/:$//')
PATH=$AWS_FPGA_REPO_DIR/shared/bin/scripts:$PATH
# Enable xilinx licensing
export XILINX_ENABLE_AWS_WHITELIST=095707098027
|
package support;
import java.util.List;
import java.util.Map;
public class Config {
public String browser;
public boolean isHeadless;
public int implicitTimeout;
public int pageLoadTimeout;
public int explicitTimeout;
public List<String> supportedOsList;
public Map<String, String>admin;
public String enviroment1;
public String dBQAUrl;
public String dBQAUser;
public String dBQAPass;
public String qAEmail;
public String qAPass;
public String baseApiQAUrl;
public String enviroment2;
public String dBStageUrl;
public String dBStageUser;
public String dBStagePass;
public String baseApiStageUrl;
public String stageEmail;
public String stagePass;
}
|
package org.sterl.testproject.addresses.dao;
import org.sterl.testproject.addresses.model.AddressBE;
public interface AddressDao {
AddressBE save(AddressBE e);
}
|
<gh_stars>1-10
/*
* Copyright (c) 2001-2004 Sendmail, Inc. All Rights Reserved
*/
package com.sendmail.jilter;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* Contains the actions available during {@link JilterHandler#eom eom} processing.
*/
public interface JilterEOMActions
{
/**
* Add a header to the current message.
*
* @param headerf the header name.
* @param headerv the header value.
*/
public void addheader(String headerf, String headerv)
throws IOException;
/**
* Change or delete a message header.
*
* @param headerf the header name.
* @param hdridx header index value (1-based). A hdridx value of 1 will modify
* the first occurrence of a header named headerf. If hdridx is greater than the number
* of times headerf appears, a new copy of headerf will be added.
* @param headerv the new value of the given header. headerv == <code>null</code> indicates
* that the header should be deleted.
*/
public void chgheader(String headerf, int hdridx, String headerv)
throws IOException;
/**
* Add a recipient for the current message.
*
* @param rcpt the new recipient's address.
*/
public void addrcpt(String rcpt)
throws IOException;
/**
* Removes the named recipient from the current message's envelope.
*
* @param rcpt the recipient address to be removed.
*/
public void delrcpt(String rcpt)
throws IOException;
/**
* Replaces the body of the current message. If called more than once,
* subsequent calls result in data being appended to the new body.
*
* @param bodyp a buffer containing the new body data. Body data should be in CR/LF form.
*/
public void replacebody(ByteBuffer bodyp)
throws IOException;
/**
* Notify the MTA that an operation is still in progress.
*/
public void progress()
throws IOException;
/**
* Set the resulting EOM status. Note: Calling the method essentially invalidates this object. The result of any subsequent
* calls to methods on this object is undefined.
*
* @param status the resulting status of EOM processing.
*/
public void finish(JilterStatus status)
throws IOException;
}
|
using System;
using System.Reactive.Linq;
using System.Reactive.Subjects;
using System.Threading.Tasks;
public class AsyncEvent
{
private Func<Task> function;
private bool hasSubscribers = false;
private object lockObject = new object();
private ISubject<bool> subject = new Subject<bool>();
/// <summary>
/// Invokes the specified <paramref name="function"/> asynchronously upon the first subscription to the returned observable.
/// </summary>
public IObservable<bool> Subscribe(Func<Task> function)
{
lock (lockObject)
{
if (!hasSubscribers)
{
this.function = function;
hasSubscribers = true;
Task.Run(async () =>
{
try
{
await this.function();
subject.OnNext(true);
subject.OnCompleted();
}
catch (Exception ex)
{
subject.OnError(ex);
}
});
}
}
return subject.AsObservable();
}
} |
import pandas as pd
# Read the employee working hours dataset
df = pd.read_csv("working_hours_data.csv")
# Define a function to check if an employee is working more than 8 hours
def check_hours(hours):
if hours > 8:
return True
else:
return False
# Add a new column that stores the results of the check
df['More than 8 hours?'] = df['Hours'].apply(check_hours)
# Print the updated dataframe
print(df) |
#!/bin/sh
docker build -t symbiote/php-cli:7.1 .
# and the latest tag also
docker build -t symbiote/php-cli . |
/**
* Package for Square, FindLoop, Turn, Check, BubbleSort, ArrayChar, Matrix, MatrixCheck, ArrayDuplicate test.
*
* @author <NAME> ( https://vk.com/id428714363)
* @version 1.0
* @since 26.03.2019
*/
package ru.job4j.array; |
<gh_stars>1-10
// base
import URender from './u-render'
import ULink from './u-link'
import UButton from './u-button'
import UButtonGroup from './u-button-group'
import UIcon from './u-icon'
import ULayout from './u-layout'
// form
import UInput from './u-input'
import USelect from './u-select'
import USwitch from './u-switch'
import UCheckbox from './u-checkbox/u-checkbox'
import UCheckboxs from './u-checkbox/u-checkboxs'
import URadio from './u-radio/u-radio'
import URadios from './u-radio/u-radios'
import UForm from './u-form/u-form'
import UFormItem from './u-form/u-form-item'
// usage
import UTableColumn from './u-table/u-table-column'
import UTable from './u-table/u-table'
import UPagination from './u-pagination'
import UTab from './u-tab/u-tab'
import UTabs from './u-tab/u-tabs'
import UCrumb from './u-crumb/u-crumb.vue'
import UCrumbItem from './u-crumb/u-crumb-item.vue'
import UStep from './u-step/u-step'
import USteps from './u-step/u-steps'
// popup
import UModal from './u-modal'
import UToast from './u-toast'
import UPopper from './u-popper'
import UPopover from './u-popover'
import UTooltip from './u-tooltip'
import UDivider from './u-divider'
import UBackTop from './u-back-top'
import UTransition from './u-transition.js'
import { version } from '../package.json'
import './assets/css/index.scss'
const components = {
URender,
ULink,
UButton,
UButtonGroup,
UInput,
USelect,
UCheckbox,
UCheckboxs,
USwitch,
ULayout,
UTableColumn,
UTable,
UPagination,
UTab,
UTabs,
UStep,
USteps,
UModal,
UToast,
UPopper,
UPopover,
UTooltip,
URadio,
URadios,
UIcon,
UForm,
UFormItem,
UCrumb,
UCrumbItem,
UDivider,
UBackTop,
UTransition
}
const install = function(Vue, opts = {}) {
Object.keys(components).forEach(key => Vue.component(key, components[key]))
Vue.prototype.$toast = UToast.toast
Vue.prototype.$confirm = UModal.confirm
Vue.prototype.$alert = UModal.alert
Object.defineProperty(Vue.prototype, '$YIUI', {
configurable: false,
enumerable: false,
writable: false,
value: opts
})
}
// auto install
if (typeof window !== 'undefined' && window.Vue) {
install(window.Vue)
}
export default {
version,
install,
...components
}
|
#! /bin/sh
#-----------------------------------------------------------------------
# Default script
. ./Release_CLI_GNU.sub
#-----------------------------------------------------------------------
# Launch
Release_CLI Mac Intel
|
CREATE OR REPLACE FUNCTION generate_weekly_schedule() RETURNS JSON AS $$
BEGIN
RETURN (
SELECT json_build_object(
'Monday', COALESCE(json_agg(
CASE
WHEN schedules.weekday <> 1 THEN NULL::json
ELSE json_build_object('schedule_id', schedules.scid, 'start_hour', schedules.start_hour, 'end_hour', schedules.end_hour, 'duration', schedules.duration, 'fee', schedules.fee)
END) FILTER (WHERE schedules.weekday = 1)),
'Tuesday', COALESCE(json_agg(
CASE
WHEN schedules.weekday <> 2 THEN NULL::json
ELSE json_build_object('schedule_id', schedules.scid, 'start_hour', schedules.start_hour, 'end_hour', schedules.end_hour, 'duration', schedules.duration, 'fee', schedules.fee)
END) FILTER (WHERE schedules.weekday = 2)),
'Wednesday', COALESCE(json_agg(
CASE
WHEN schedules.weekday <> 3 THEN NULL::json
ELSE json_build_object('schedule_id', schedules.scid, 'start_hour', schedules.start_hour, 'end_hour', schedules.end_hour, 'duration', schedules.duration, 'fee', schedules.fee)
END) FILTER (WHERE schedules.weekday = 3)),
'Thursday', COALESCE(json_agg(
CASE
WHEN schedules.weekday <> 4 THEN NULL::json
ELSE json_build_object('schedule_id', schedules.scid, 'start_hour', schedules.start_hour, 'end_hour', schedules.end_hour, 'duration', schedules.duration, 'fee', schedules.fee)
END) FILTER (WHERE schedules.weekday = 4)),
'Friday', COALESCE(json_agg(
CASE
WHEN schedules.weekday <> 5 THEN NULL::json
ELSE json_build_object('schedule_id', schedules.scid, 'start_hour', schedules.start_hour, 'end_hour', schedules.end_hour, 'duration', schedules.duration, 'fee', schedules.fee)
END) FILTER (WHERE schedules.weekday = 5)),
'Saturday', COALESCE(json_agg(
CASE
WHEN schedules.weekday <> 6 THEN NULL::json
ELSE json_build_object('schedule_id', schedules.scid, 'start_hour', schedules.start_hour, 'end_hour', schedules.end_hour, 'duration', schedules.duration, 'fee', schedules.fee)
END) FILTER (WHERE schedules.weekday = 6)),
'Sunday', COALESCE(json_agg(
CASE
WHEN schedules.weekday <> 7 THEN NULL::json
ELSE json_build_object('schedule_id', schedules.scid, 'start_hour', schedules.start_hour, 'end_hour', schedules.end_hour, 'duration', schedules.duration, 'fee', schedules.fee)
END) FILTER (WHERE schedules.weekday = 7))
)
FROM schedules
);
END;
$$ LANGUAGE plpgsql; |
import re
def count_keyword_occurrences_in_comments(file_path, keywords):
keyword_counts = {keyword: 0 for keyword in keywords}
in_comment = False
with open(file_path, 'r') as file:
for line in file:
line = line.strip()
if line.startswith('//'):
for keyword in keywords:
keyword_counts[keyword] += line.lower().count(keyword.lower())
if '/*' in line:
in_comment = True
for keyword in keywords:
keyword_counts[keyword] += line.lower().count(keyword.lower())
if '*/' in line:
in_comment = False
for keyword in keywords:
keyword_counts[keyword] += line.lower().count(keyword.lower())
if in_comment:
for keyword in keywords:
keyword_counts[keyword] += line.lower().count(keyword.lower())
return keyword_counts |
/**
* This file allow to quickly build project from sources files.
*/
//Dependancies
const fs = require("fs")
const path = require("path")
const spawn = require("child_process").spawnSync
const src = path.join(__dirname, process.env.npm_package_config_jsdoc_source)
const out = path.join(__dirname, process.env.npm_package_config_jsdoc_output)
const config = path.join(__dirname, process.env.npm_package_config_jsdoc_config)
const readme = path.join(__dirname, process.env.npm_package_config_jsdoc_readme)
let exit = 0
//Title
console.log("\033[2J")
console.log("+-------------------------------+")
console.log(`| ${(process.env.npm_package_config_project_name+" ".repeat(29)).substr(0, 29)} |`)
console.log("+-------------------------------+")
//Scripts
console.log("Scripts minification :")
exit += execute("Babili", "../../../node_modules/.bin/babili", [path.join(__dirname, "static/js/scripts.js"), "-o", path.join(__dirname, "static/js/scripts.min.js")])
//Style
console.log("Styles minification :")
exit += execute("Less pre-processor", "../../../node_modules/.bin/lessc", [path.join(__dirname, "static/css/styles.less"), path.join(__dirname, "static/css/styles.min.css")])
//Generating documentation
console.log("Generating documentation :")
exit += execute("JSDoc 3", "../../../node_modules/.bin/jsdoc", [src, "-c", config, "-d", out, "-R", readme, "-t", path.join(__dirname), "-a", "all"])
//Command execution
function execute(name, bin, args) {
try {
//Check installation
bin = bin.split("/"), bin.unshift(__dirname)
let pckg = path.join.apply(null, bin)
if (!fs.existsSync(pckg)) { throw new Error(`${name} isn't installed`) }
//Execute command
let c = spawn(pckg, args, {shell:true})
//Output
if (c.stderr.length) { throw new Error(c.stderr) }
console.log(" \x1b[32m%s\x1b[0m", "Success")
return 0
} catch (e) {
console.log(" \x1b[31m%s\x1b[0m", e)
return 1
}
}
//Return
console.log("General status :")
if (exit) { console.log(" \x1b[31m%s\x1b[0m", `${exit} error${exit > 1 ? "s" : ""} occured :(`) } else { console.log(" \x1b[32m%s\x1b[0m", "Success :)") }
|
package main
import (
"fmt"
"os/exec"
)
func main() {
out, err := exec.Command("ping", "-c 5", "www.example.com").Output()
if err != nil {
fmt.Println(err)
}
fmt.Println(string(out))
} |
public void cancelDownloadAndLaunch() {
// Implementation to cancel the download and launch process without GUI representation
// This method is called when cancelDownloadAndLaunch() is invoked without any parameters
}
/**
* Cancels the download and launch process and updates the provided GUI to reflect the cancellation.
*
* @param gui The {@link HidableUpdateProgressDialog} that represents the GUI for the download and launch process.
*/
public void cancelDownloadAndLaunch(HidableUpdateProgressDialog gui) {
// Implementation to cancel the download and launch process and update the provided GUI
// This method is called when cancelDownloadAndLaunch(gui) is invoked with a parameter of type HidableUpdateProgressDialog
} |
#!/usr/bin/env bash
PROJECT_NAME="template-flask-react_test"
DOCKER_COMPOSE_FILE="docker-compose.test.yml"
docker-compose \
-f $DOCKER_COMPOSE_FILE \
-p $PROJECT_NAME \
up \
--abort-on-container-exit\
--exit-code-from api \
code=$?
docker-compose \
-f $DOCKER_COMPOSE_FILE \
-p $PROJECT_NAME \
down
exit $code
|
import tensorflow as tf
class IdentityLayer(tf.keras.layers.Layer):
def __init__(self, **kwargs):
super(IdentityLayer, self).__init__(**kwargs)
def __call__(self, x):
assert x.dtype == tf.float32, "Input data type must be tf.float32"
return tf.identity(x)
def get_config(self):
return {} |
import LaminarThemeOptions from "./LaminarThemeOptions";
const theme: LaminarThemeOptions = {
config: {
initialColorMode: 'dark',
useSystemColorMode: true
},
viewboxes: [
{width: 1200, box: "0 0 448 40", days: 90},
{width: 900, box: "0 0 298 40", days: 60},
{width: 0, box: "0 0 148 40", days: 30}
],
severityColors: {
operational: 'green',
partial: 'orange',
minor: 'yellow',
major: 'red',
},
downtimeSeverities: {
0: 'operational',
1: 'partial',
300: 'minor',
1800: 'major'
},
sizes: {
container: {
xl: '1140px'
}
}
}
export default theme; |
'use strict';
angular.module('tryNav', ['post']); |
lmfit <- lm(Loyalty_program_points ~ Amount_spent, data=mydata)
summary(lmfit) |
import React, { useState, useEffect } from 'react';
import axios from 'axios';
const Books = () => {
const [books, setBooks] = useState([]);
useEffect(() => {
axios.get('http://myapi.com/books')
.then(response => {
setBooks(response.data.books);
});
}, []);
return (
<ul>
{books.map(book => (
<li key={book.id}>
<img src={book.coverImage} alt={book.title} />
<p>{book.title}</p>
<p>{book.author}</p>
</li>
))}
</ul>
);
};
export default Books; |
/*
*
*/
package net.community.chest.db.sql.convert;
import java.util.NoSuchElementException;
import net.community.chest.db.sql.ConnectionHoldability;
import net.community.chest.dom.AbstractXmlValueStringInstantiator;
/**
* <P>Copyright 2010 as per GPLv2</P>
*
* @author <NAME>.
* @since Feb 28, 2010 12:48:46 PM
*/
public class ConnectionHoldabilityValueInstantiator extends
AbstractXmlValueStringInstantiator<Integer> {
public ConnectionHoldabilityValueInstantiator ()
{
super(Integer.class);
}
/*
* @see net.community.chest.convert.ValueStringInstantiator#convertInstance(java.lang.Object)
*/
@Override
public String convertInstance (Integer inst) throws Exception
{
if (null == inst)
return null;
final ConnectionHoldability l=ConnectionHoldability.fromValue(inst.intValue());
if (null == l)
throw new NoSuchElementException("convertInstance(" + inst + ") unknown connection holdability");
return l.toString();
}
/*
* @see net.community.chest.convert.ValueStringInstantiator#newInstance(java.lang.String)
*/
@Override
public Integer newInstance (String s) throws Exception
{
if ((null == s) || (s.length() <= 0))
return null;
final ConnectionHoldability l=ConnectionHoldability.fromString(s);
if (null == l)
throw new NoSuchElementException("newInstance(" + s + ") unknown connection holdability");
return Integer.valueOf(l.getHoldability());
}
public static final ConnectionHoldabilityValueInstantiator DEFAULT=
new ConnectionHoldabilityValueInstantiator();
}
|
import thenChrome from 'then-chrome';
const disableButton = function (tabId) {
chrome.browserAction.setIcon({
path: {
19: '/icons/19_disable.png',
38: '/icons/19_disable@2x.png',
},
});
chrome.browserAction.disable(tabId);
};
const enableButton = function (tabId) {
chrome.browserAction.setIcon({
path: {
19: '/icons/19.png',
38: '/icons/19@2x.png',
},
});
chrome.browserAction.enable(tabId);
};
chrome.tabs.onActivated.addListener(async (activeInfo) => {
const tab = await thenChrome.tabs.get(activeInfo.tabId);
if (tab.status === 'loading') {
return disableButton(tab.id);
}
if (tab.url && tab.url.match(/^https?:/)) {
enableButton(tab.id);
} else {
disableButton(tab.id);
}
});
chrome.tabs.onUpdated.addListener(async (tabId, changeInfo) => {
if (changeInfo.status === 'loading') {
disableButton(tabId);
} else if (changeInfo.status === 'complete') {
const tab = await thenChrome.tabs.get(tabId);
if (!tab.url) return;
if (!tab.url.match(/^https?:/)) {
return console.error(
'This Extension can run only on https? pages: ' + location.href
);
}
let loaded = [false];
try {
loaded = await thenChrome.tabs.executeScript(tabId, {
code: 'window.__embededGyazoContentJS',
});
} catch (e) {
// no-op
}
if (loaded[0]) return enableButton(tabId);
await thenChrome.tabs.executeScript(tabId, {
file: './content.js',
});
await thenChrome.tabs.insertCSS(tabId, {
file: '/content.css',
});
enableButton(tabId);
}
});
export { enableButton, disableButton };
|
package exptypes
import specs "github.com/opencontainers/image-spec/specs-go/v1"
const ExporterImageConfigKey = "containerimage.config"
const ExporterInlineCache = "containerimage.inlinecache"
const ExporterPlatformsKey = "refs.platforms"
type Platforms struct {
Platforms []Platform
}
type Platform struct {
ID string
Platform specs.Platform
}
|
#!/bin/bash
# Copyright (c) 2018 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Linux Build Script.
# Fail on any error.
set -e
# Display commands being run.
set -x
BUILD_ROOT=$PWD
SRC=$PWD/github/SPIRV-Tools
CONFIG=$1
COMPILER=$2
SKIP_TESTS="False"
BUILD_TYPE="Debug"
CMAKE_C_CXX_COMPILER=""
if [ $COMPILER = "clang" ]
then
PATH=/usr/lib/llvm-3.8/bin:$PATH
CMAKE_C_CXX_COMPILER="-DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++"
fi
# Possible configurations are:
# ASAN, COVERAGE, RELEASE, DEBUG, DEBUG_EXCEPTION, RELEASE_MINGW
if [ $CONFIG = "RELEASE" ] || [ $CONFIG = "RELEASE_MINGW" ]
then
BUILD_TYPE="RelWithDebInfo"
fi
ADDITIONAL_CMAKE_FLAGS=""
if [ $CONFIG = "ASAN" ]
then
ADDITIONAL_CMAKE_FLAGS="SPIRV_USE_SANITIZER=address"
[ $COMPILER = "clang" ] || { echo "$CONFIG requires clang"; exit 1; }
elif [ $CONFIG = "COVERAGE" ]
then
ADDITIONAL_CMAKE_FLAGS="-DENABLE_CODE_COVERAGE=ON"
SKIP_TESTS="True"
elif [ $CONFIG = "DEBUG_EXCEPTION" ]
then
ADDITIONAL_CMAKE_FLAGS="-DDISABLE_EXCEPTIONS=ON -DDISABLE_RTTI=ON"
elif [ $CONFIG = "RELEASE_MINGW" ]
then
ADDITIONAL_CMAKE_FLAGS="-Dgtest_disable_pthreads=ON -DCMAKE_TOOLCHAIN_FILE=$SRC/cmake/linux-mingw-toolchain.cmake"
SKIP_TESTS="True"
fi
# Get NINJA.
wget -q https://github.com/ninja-build/ninja/releases/download/v1.8.2/ninja-linux.zip
unzip -q ninja-linux.zip
export PATH="$PWD:$PATH"
cd $SRC
git clone --depth=1 https://github.com/KhronosGroup/SPIRV-Headers external/spirv-headers
git clone --depth=1 https://github.com/google/googletest external/googletest
git clone --depth=1 https://github.com/google/effcee external/effcee
git clone --depth=1 https://github.com/google/re2 external/re2
mkdir build && cd $SRC/build
# Invoke the build.
BUILD_SHA=${KOKORO_GITHUB_COMMIT:-$KOKORO_GITHUB_PULL_REQUEST_COMMIT}
echo $(date): Starting build...
cmake -DPYTHON_EXECUTABLE:FILEPATH=/usr/bin/python3 -GNinja -DCMAKE_INSTALL_PREFIX=$KOKORO_ARTIFACTS_DIR/install -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DRE2_BUILD_TESTING=OFF $ADDITIONAL_CMAKE_FLAGS $CMAKE_C_CXX_COMPILER ..
echo $(date): Build everything...
ninja
echo $(date): Build completed.
if [ $CONFIG = "COVERAGE" ]
then
echo $(date): Check coverage...
ninja report-coverage
echo $(date): Check coverage completed.
fi
echo $(date): Starting ctest...
if [ $SKIP_TESTS = "False" ]
then
ctest -j4 --output-on-failure --timeout 300
fi
echo $(date): ctest completed.
# Package the build.
ninja install
cd $KOKORO_ARTIFACTS_DIR
tar czf install.tgz install
|
#!/bin/bash
apt-get install python-pip
sudo pip install apache-beam[gcp] oauth2client==3.0.0
sudo pip install -U pip |
<gh_stars>0
import { PROPERTY, DEFAULT_MANAGE_PREFIX, SYMBOL_FOR_FETCH_API } from '../constants/fetchType';
import { GET } from '../constants/methodType';
import { trimStart } from 'lodash';
const PROPERTY_URI = '/properties';
export const fetchProperty = function (searchValues) {
return {
[SYMBOL_FOR_FETCH_API]: {
types: [
PROPERTY[DEFAULT_MANAGE_PREFIX],
PROPERTY[`${DEFAULT_MANAGE_PREFIX}_SUCCESS`],
PROPERTY[`${DEFAULT_MANAGE_PREFIX}_FAIL`],
],
uri: `/${trimStart(PROPERTY_URI, '/')}`,
method: 'get',
body: {
...searchValues,
},
onSuccess: ()=> console.log("onSuccess"),
onFail: ()=> console.log("onFail"),
},
};
};
|
<reponame>Space-ID/ens-app<filename>src/components/LanguageSwitcher/LanguageSwitcher.js
import React, { createRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { motion, AnimatePresence } from 'framer-motion'
import { useOnClickOutside } from 'components/hooks'
import RotatingSmallCaret from '../Icons/RotatingSmallCaret'
import './LanguageSwitcher.scss'
import LanguageEarthIcon from 'components/Icons/LanguageEarthIcon'
const LANGUAGES = [
{
value: 'en',
label: 'English (EN)'
},
{
value: 'cn',
label: '简体中文 (CN)'
},
{
value: 'ja',
label: '日本語 (JA)'
},
{
value: 'de',
label: 'Deutsch (DE)'
},
{
value: 'es',
label: 'Español (ES)'
},
{
value: 'fr',
label: 'Français (FR)'
},
{
value: 'ko',
label: '한국어 (KO)'
},
{
value: 'it',
label: 'Italiano (IT)'
},
{
value: 'pl',
label: 'Polski (PL)'
},
{
value: 'pt-BR',
label: 'Português (BR)'
},
{
value: 'ru',
label: 'Pусский (RU)'
},
{
value: 'vi',
label: 'Tiếng Việt (VI)'
}
]
function getLang(lang) {
return LANGUAGES.find(l => l.value === lang)
}
function saveLanguageToLocalStorage(value) {
window.localStorage.setItem('language', value)
}
function getLanguageFromLocalStorage() {
return window.localStorage.getItem('language')
}
export default function LanguageSwitcher() {
const dropdownRef = createRef()
const togglerRef = createRef()
const [languageSelected, setLanguageSelected] = useState(
getLang(getLanguageFromLocalStorage()) ?? getLang('en')
)
const [showDropdown, setShowDropdown] = useState(false)
const { i18n } = useTranslation()
useOnClickOutside([dropdownRef, togglerRef], () => setShowDropdown(false))
function changeLanguage(language) {
setLanguageSelected(language)
saveLanguageToLocalStorage(language.value)
i18n.changeLanguage(language.value)
setShowDropdown(false)
}
return (
<div className="relative">
<div
className="text-[#adbbcd] uppercase flex justify-enter h-full py-0 px-[6px] items-center hover:cursor-pointer"
ref={togglerRef}
onClick={() => setShowDropdown(show => !show)}
>
<span className="mr-0 text-[#30DB9E] font-semibold font-urbanist text-[16px] mr-1">
{languageSelected.value}
</span>
<LanguageEarthIcon />
{/* <RotatingSmallCaret
start="top"
rotated={showDropdown ? 1 : 0}
highlight={1}
/> */}
</div>
{showDropdown && (
<AnimatePresence>
<motion.ul
className="w-[140px] list-none absolute bg-[#071A2F] bottom-[35px] right-0 mt-[10px] rounded-[12px] shadow-dropdown z-[2] max-h-[120px] overflow-y-auto dropdown-container border border-[#1EEFA4]"
ref={dropdownRef}
initial={{ opacity: 0, height: 0 }}
animate={{ opacity: 1, height: 'auto' }}
exit={{ opacity: 0, height: 0 }}
>
{LANGUAGES.map(language => {
return (
<li
className="text-[#30DB9E] text-[16px] font-urbanist px-4 py-3"
key={language.value}
onClick={() => changeLanguage(language)}
>
{language.label}
</li>
)
})}
</motion.ul>
</AnimatePresence>
)}
</div>
)
}
|
import persons from '../models/person-model'
async function createPerson(
name: String,
position: String,
company: String,
bubbleLatitude: Number,
bubbleLongtitude: Number
) {
return await persons
.create({
name,
position,
company,
bubbleLatitude,
bubbleLongtitude,
})
.then((data) => {
return data
})
.catch((error: Error) => {
throw error
})
}
async function getPersons() {
return await persons
.find({})
.then((personsList) => {
return personsList
})
.catch((error: Error) => {
throw error
})
}
async function getPerson(name: String) {
return await persons
.findOne({ name })
.then((data) => {
return data
})
.catch((error: Error) => {
throw error
})
}
export default {
createPerson,
getPersons,
getPerson,
}
|
import argparse
def read_args():
parser = argparse.ArgumentParser(description='Training a MLP on the petfinder dataset')
parser.add_argument('--batch_size', type=int, default=32, help='Batch size for training the MLP')
parser.add_argument('--epochs', type=int, default=10, help='Number of epochs for training the MLP')
parser.add_argument('--learning_rate', type=float, default=0.001, help='Learning rate for the MLP')
parser.add_argument('--hidden_units', type=str, default='64,32', help='Number of units in each hidden layer, separated by commas')
return parser.parse_args() |
<filename>src/model/test/SpaceInfiltrationFlowCoefficient_GTest.cpp
/***********************************************************************************************************************
* OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
*
* (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products
* derived from this software without specific prior written permission from the respective party.
*
* (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works
* may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior
* written permission from Alliance for Sustainable Energy, LLC.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED
* STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
***********************************************************************************************************************/
#include "ModelFixture.hpp"
#include "../SpaceInfiltrationFlowCoefficient.hpp"
#include "../SpaceInfiltrationFlowCoefficient_Impl.hpp"
#include "../Space.hpp"
#include "../SpaceType.hpp"
#include "../Schedule.hpp"
#include "../ScheduleConstant.hpp"
#include "../ScheduleTypeLimits.hpp"
#include "../DefaultScheduleSet.hpp"
#include <utilities/idd/IddEnums.hxx>
using namespace openstudio;
using namespace openstudio::model;
TEST_F(ModelFixture, SpaceInfiltrationFlowCoefficient_GettersSetters) {
Model m;
SpaceInfiltrationFlowCoefficient spaceInfiltrationFlowCoefficient(m);
spaceInfiltrationFlowCoefficient.setName("My SpaceInfiltrationFlowCoefficient");
// Schedule Name: Object
ScheduleConstant sch(m);
EXPECT_FALSE(spaceInfiltrationFlowCoefficient.schedule());
EXPECT_FALSE(sch.scheduleTypeLimits());
EXPECT_TRUE(spaceInfiltrationFlowCoefficient.setSchedule(sch));
auto schTypeLim_ = sch.scheduleTypeLimits();
ASSERT_TRUE(schTypeLim_);
EXPECT_EQ(0.0, schTypeLim_->lowerLimitValue().get());
EXPECT_EQ(1.0, schTypeLim_->upperLimitValue().get());
EXPECT_EQ("Continuous", schTypeLim_->numericType().get());
EXPECT_EQ(sch, spaceInfiltrationFlowCoefficient.schedule().get());
spaceInfiltrationFlowCoefficient.resetSchedule();
EXPECT_FALSE(spaceInfiltrationFlowCoefficient.schedule());
// Flow Coefficient: Required Double
EXPECT_TRUE(spaceInfiltrationFlowCoefficient.setFlowCoefficient(0.1));
EXPECT_EQ(0.1, spaceInfiltrationFlowCoefficient.flowCoefficient());
// Bad Value
EXPECT_FALSE(spaceInfiltrationFlowCoefficient.setFlowCoefficient(-10.0));
EXPECT_EQ(0.1, spaceInfiltrationFlowCoefficient.flowCoefficient());
// Stack Coefficient: Required Double
EXPECT_TRUE(spaceInfiltrationFlowCoefficient.setStackCoefficient(0.2));
EXPECT_EQ(0.2, spaceInfiltrationFlowCoefficient.stackCoefficient());
// Bad Value
EXPECT_FALSE(spaceInfiltrationFlowCoefficient.setStackCoefficient(-10.0));
EXPECT_EQ(0.2, spaceInfiltrationFlowCoefficient.stackCoefficient());
// Pressure Exponent: Required Double
EXPECT_TRUE(spaceInfiltrationFlowCoefficient.setPressureExponent(0.3));
EXPECT_EQ(0.3, spaceInfiltrationFlowCoefficient.pressureExponent());
// Bad Value
EXPECT_FALSE(spaceInfiltrationFlowCoefficient.setPressureExponent(-10.0));
EXPECT_EQ(0.3, spaceInfiltrationFlowCoefficient.pressureExponent());
// Wind Coefficient: Required Double
EXPECT_TRUE(spaceInfiltrationFlowCoefficient.setWindCoefficient(0.4));
EXPECT_EQ(0.4, spaceInfiltrationFlowCoefficient.windCoefficient());
// Bad Value
EXPECT_FALSE(spaceInfiltrationFlowCoefficient.setWindCoefficient(-10.0));
EXPECT_EQ(0.4, spaceInfiltrationFlowCoefficient.windCoefficient());
// Shelter Factor: Required Double
EXPECT_TRUE(spaceInfiltrationFlowCoefficient.setShelterFactor(0.5));
EXPECT_EQ(0.5, spaceInfiltrationFlowCoefficient.shelterFactor());
// Bad Value
EXPECT_FALSE(spaceInfiltrationFlowCoefficient.setShelterFactor(-10.0));
EXPECT_EQ(0.5, spaceInfiltrationFlowCoefficient.shelterFactor());
}
TEST_F(ModelFixture, SpaceInfiltrationFlowCoefficient_Clone) {
Model m;
SpaceInfiltrationFlowCoefficient spaceInfiltrationFlowCoefficient(m);
EXPECT_EQ(1u, m.numObjects());
auto clone1 = spaceInfiltrationFlowCoefficient.clone(m).cast<SpaceInfiltrationFlowCoefficient>();
EXPECT_EQ(2u, m.numObjects());
auto clone2 = clone1.clone(m).cast<SpaceInfiltrationFlowCoefficient>();
EXPECT_EQ(3u, m.numObjects());
auto spaceType1 = SpaceType(m);
auto success = clone1.setSpaceType(spaceType1);
ASSERT_TRUE(success);
auto spaceType2 = SpaceType(m);
success = clone2.setSpaceType(spaceType2);
ASSERT_TRUE(success);
success = clone2.setSpaceType(spaceType1);
ASSERT_TRUE(success);
EXPECT_EQ(2, m.getObjectsByType(IddObjectType::OS_SpaceType).size());
EXPECT_EQ(3, m.getObjectsByType(IddObjectType::OS_SpaceInfiltration_FlowCoefficient).size());
}
TEST_F(ModelFixture, SpaceInfiltrationFlowCoefficient_SpaceSpaceType) {
Model m;
{
SpaceInfiltrationFlowCoefficient i(m);
EXPECT_FALSE(i.schedule());
SpaceType spaceType(m);
DefaultScheduleSet spaceTypeDefaultScheduleSet(m);
EXPECT_TRUE(spaceType.setDefaultScheduleSet(spaceTypeDefaultScheduleSet));
EXPECT_FALSE(spaceTypeDefaultScheduleSet.infiltrationSchedule());
EXPECT_FALSE(spaceTypeDefaultScheduleSet.getDefaultSchedule(DefaultScheduleType::InfiltrationSchedule));
EXPECT_FALSE(i.space());
EXPECT_FALSE(i.spaceType());
EXPECT_EQ(0, spaceType.spaceInfiltrationFlowCoefficients().size());
EXPECT_TRUE(i.setSpaceType(spaceType));
EXPECT_FALSE(i.space());
EXPECT_TRUE(i.spaceType());
ASSERT_EQ(1, spaceType.spaceInfiltrationFlowCoefficients().size());
EXPECT_EQ(i, spaceType.spaceInfiltrationFlowCoefficients()[0]);
EXPECT_FALSE(i.schedule());
ScheduleConstant infilSch(m);
EXPECT_TRUE(spaceTypeDefaultScheduleSet.setInfiltrationSchedule(infilSch));
ASSERT_TRUE(i.schedule());
EXPECT_EQ(infilSch, i.schedule().get());
EXPECT_TRUE(i.isScheduleDefaulted());
ScheduleConstant hardAssignedSch(m);
EXPECT_TRUE(i.setSchedule(hardAssignedSch));
ASSERT_TRUE(i.schedule());
EXPECT_EQ(hardAssignedSch, i.schedule().get());
EXPECT_FALSE(i.isScheduleDefaulted());
i.resetSchedule();
ASSERT_TRUE(i.schedule());
EXPECT_EQ(infilSch, i.schedule().get());
EXPECT_TRUE(i.isScheduleDefaulted());
}
{
SpaceInfiltrationFlowCoefficient i(m);
EXPECT_FALSE(i.schedule());
Space space(m);
DefaultScheduleSet spaceDefaultScheduleSet(m);
EXPECT_TRUE(space.setDefaultScheduleSet(spaceDefaultScheduleSet));
EXPECT_FALSE(spaceDefaultScheduleSet.infiltrationSchedule());
EXPECT_FALSE(spaceDefaultScheduleSet.getDefaultSchedule(DefaultScheduleType::InfiltrationSchedule));
EXPECT_FALSE(i.space());
EXPECT_FALSE(i.space());
EXPECT_EQ(0, space.spaceInfiltrationFlowCoefficients().size());
EXPECT_TRUE(i.setSpace(space));
EXPECT_FALSE(i.spaceType());
EXPECT_TRUE(i.space());
ASSERT_EQ(1, space.spaceInfiltrationFlowCoefficients().size());
EXPECT_EQ(i, space.spaceInfiltrationFlowCoefficients()[0]);
EXPECT_FALSE(i.schedule());
ScheduleConstant infilSch(m);
EXPECT_TRUE(spaceDefaultScheduleSet.setInfiltrationSchedule(infilSch));
ASSERT_TRUE(i.schedule());
EXPECT_EQ(infilSch, i.schedule().get());
EXPECT_TRUE(i.isScheduleDefaulted());
ScheduleConstant hardAssignedSch(m);
EXPECT_TRUE(i.setSchedule(hardAssignedSch));
ASSERT_TRUE(i.schedule());
EXPECT_EQ(hardAssignedSch, i.schedule().get());
EXPECT_FALSE(i.isScheduleDefaulted());
i.resetSchedule();
ASSERT_TRUE(i.schedule());
EXPECT_EQ(infilSch, i.schedule().get());
EXPECT_TRUE(i.isScheduleDefaulted());
}
}
|
<!DOCTYPE html>
<html>
<head>
<title>My Web Page</title>
<style>
body {
font-family: Arial, Helvetica, sans-serif;
margin: 0;
}
.header {
padding: 10px;
font-size: 20px;
}
.sidebar {
width: 15%;
float: left;
padding: 20px;
}
.main-content {
padding: 20px;
width: 75%;
float: left;
}
</style>
</head>
<body>
<div class="header">
<h1>My Web Page</h1>
</div>
<div class="sidebar">
Sidebar content
</div>
<div class="main-content">
Main content goes here
</div>
</body>
</html> |
func mostFrequentColor(_ colors: [CGColor]) -> CGColor {
var colorFrequency: [CGColor: Int] = [:]
for color in colors {
if let count = colorFrequency[color] {
colorFrequency[color] = count + 1
} else {
colorFrequency[color] = 1
}
}
var mostFrequentColor: CGColor = colors[0]
var highestFrequency: Int = 0
for (color, frequency) in colorFrequency {
if frequency > highestFrequency {
mostFrequentColor = color
highestFrequency = frequency
}
}
return mostFrequentColor
} |
#!/bin/bash
# This file contains utility functions which can be used in the development tools.
# Do not continue execution if one of the commands fail
set -eo pipefail -o functrace
# The Port on which the Lunes CMS development server should be started
LUNES_CMS_PORT=8080
# Change to dev tools directory
cd "$(dirname "${BASH_SOURCE[0]}")"
# The absolute path to the dev tools directory
# shellcheck disable=SC2034
DEV_TOOL_DIR=$(pwd)
# Change to base directory
cd ..
# The absolute path to the base directory of the repository
BASE_DIR=$(pwd)
# The path to the package
PACKAGE_DIR_REL="lunes_cms"
# shellcheck disable=SC2034
PACKAGE_DIR="${BASE_DIR}/${PACKAGE_DIR_REL}"
# This function prints the given input lines in red color
function print_error {
while IFS= read -r line; do
echo -e "\x1b[1;31m$line\x1b[0;39m" >&2
done
}
# This function prints the given input lines in green color
function print_success {
while IFS= read -r line; do
echo -e "\x1b[1;32m$line\x1b[0;39m"
done
}
# This function prints the given input lines in orange color
function print_warning {
while IFS= read -r line; do
echo -e "\x1b[1;33m$line\x1b[0;39m"
done
}
# This function prints the given input lines in blue color
function print_info {
while IFS= read -r line; do
echo -e "\x1b[1;34m$line\x1b[0;39m"
done
}
# This function prints the given input lines in bold white
function print_bold {
while IFS= read -r line; do
echo -e "\x1b[1m$line\x1b[0m"
done
}
# This function prints the given prefix in the given color in front of the stdin lines. If no color is given, white (37) is used.
# This is useful for commands which run in the background to separate its output from other commands.
function print_prefix {
while IFS= read -r line; do
echo -e "\x1b[1;${2:-37};40m[$1]\x1b[0m $line"
done
}
# This function prints the given input lines with a nice little border to separate it from the rest of the content.
# Pipe your content to this function.
function print_with_borders {
echo "┌──────────────────────────────────────"
while IFS= read -r line; do
echo "│ $line"
done
echo -e "└──────────────────────────────────────\n"
}
# This function applies different sed replacements to make sure the matched lines from grep are aligned and colored
function format_grep_output {
while read -r line; do
echo "$line" | sed --regexp-extended \
-e "s/^([0-9])([:-])(.*)/\1\2 \3/" `# Pad line numbers with 1 digit` \
-e "s/^([0-9]{2})([:-])(.*)/\1\2 \3/" `# Pad line numbers with 2 digits` \
-e "s/^([0-9]{3})([:-])(.*)/\1\2 \3/" `# Pad line numbers with 3 digits` \
-e "s/^([0-9]{4})([:-])(.*)/\1\2 \3/" `# Pad line numbers with 4 digits` \
-e "s/^([0-9]{5})([:-])(.*)/\1\2 \3/" `# Pad line numbers with 5 digits` \
-e "s/^([0-9]+):(.*)/\x1b[1;31m\1\2\x1b[0;39m/" `# Make matched line red` \
-e "s/^([0-9]+)-(.*)/\1\2/" `# Remove dash of unmatched line`
done
}
# This function prints the major version of a string in the format XX.YY.ZZ
function major {
# Split by "." and take the first element for the major version
echo "$1" | cut -d. -f1
}
# This function prints the minor version of a string in the format XX.YY.ZZ
function minor {
# Split by "." and take the second element for the minor version
echo "$1" | cut -d. -f2
}
# This function activates the virtual environment and creates it if it doesn't exist yet
function activate_venv {
if [[ -z "$LUNES_VENV_ACTIVATED" ]]; then
# Create virtual environment if not exists
if [[ ! -f ".venv/bin/activate" ]]; then
echo "Creating virtual environment..." | print_info
python3 -m venv .venv
echo "✔ Created virtual environment" | print_success
fi
# Activate virtual environment
# shellcheck disable=SC1091
source .venv/bin/activate
echo "✔ Activated virtual environment" | print_success
LUNES_VENV_ACTIVATED=1
export LUNES_VENV_ACTIVATED
fi
}
# This function checks if the Lunes cms is installed
function require_installed {
if [[ -z "$LUNES_CMS_INSTALLED" ]]; then
# Activate virtual environment
activate_venv
echo "Checking if Lunes CMS is installed..." | print_info
# Check if lunes-cms-cli is available in virtual environment
if [[ ! -x "$(command -v lunes-cms-cli)" ]]; then
echo -e "The Lunes CMS is not installed. Please install it with:\n" | print_error
echo -e "\t$(dirname "${BASH_SOURCE[0]}")/install.sh\n" | print_bold
exit 1
fi
# Check if script is running in CircleCI context and set DEBUG=True if not
if [[ -z "$CIRCLECI" ]] && [[ -z "$READTHEDOCS" ]]; then
# Set debug mode
LUNES_CMS_DEBUG=1
export LUNES_CMS_DEBUG
echo "Enabling debug mode..." | print_info
else
# Set dummy secret key
LUNES_CMS_SECRET_KEY="dummy"
export LUNES_CMS_SECRET_KEY
echo "Setting dummy secret key..." | print_info
fi
# Use sqlite as database backend for local development
LUNES_CMS_DB_BACKEND="sqlite"
export LUNES_CMS_DB_BACKEND
echo "Enabling SQLite database for local development..." | print_info
# Check if lunes-cms-cli can be started
if ! lunes-cms-cli > /dev/null; then
echo -e "The Lunes CMS is could not be started due to the above error. Please install it again with:\n" | print_error
echo -e "\t$(dirname "${BASH_SOURCE[0]}")/install.sh\n" | print_bold
exit 1
fi
echo "✔ Lunes CMS is installed" | print_success
LUNES_CMS_INSTALLED=1
export LUNES_CMS_INSTALLED
fi
}
# This function migrates the database
function migrate_database {
if [[ -z "$LUNES_DATABASE_MIGRATED" ]]; then
require_installed
echo "Migrating database..." | print_info
# Generate migration files
lunes-cms-cli makemigrations
# Execute migrations
lunes-cms-cli migrate
echo "✔ Finished database migrations" | print_success
LUNES_DATABASE_MIGRATED=1
export LUNES_DATABASE_MIGRATED
fi
}
# This function shows a success message once the Lunes development server is running
function listen_for_devserver {
until nc -z localhost "$LUNES_CMS_PORT"; do sleep 0.1; done
echo "✔ Started Lunes CMS at http://localhost:${LUNES_CMS_PORT}" | print_success
}
|
def bi_r(c, value):
# Assume the implementation of bi_r is provided elsewhere
pass
def count_combinations(x, y, z, a, b, c, border, k):
combs = 0
for i in range(x):
for j in range(y):
combs += z - bi_r(c, border - a[i] - b[j])
return combs >= k |
<gh_stars>1-10
package com.prisma.messagebus.queue.rabbit
import com.prisma.errors.ErrorReporter
import com.prisma.messagebus.Conversions.{ByteMarshaller, ByteUnmarshaller}
import com.prisma.messagebus.QueueConsumer.ConsumeFn
import com.prisma.messagebus.{ConsumerRef, Queue}
import com.prisma.messagebus.queue.{BackoffStrategy, LinearBackoff}
import com.prisma.messagebus.utils.RabbitUtils
import com.prisma.rabbit.Consumer
import com.prisma.rabbit.Import.{Exchange, Queue => RMQueue}
import scala.concurrent.Future
import scala.concurrent.duration._
case class RabbitQueue[T](
amqpUri: String,
exchangeName: String,
backoff: BackoffStrategy,
durableExchange: Boolean = false,
exchangeConcurrency: Int = 1,
workerConcurrency: Int = 1
)(
implicit reporter: ErrorReporter,
marshaller: ByteMarshaller[T],
unmarshaller: ByteUnmarshaller[T]
) extends Queue[T] {
val exchange: Exchange = RabbitUtils.declareExchange(amqpUri, exchangeName, exchangeConcurrency, durableExchange)
val publisher: RabbitQueuePublisher[T] = RabbitQueuePublisher[T](exchange)
val consumer: RabbitQueueConsumer[T] = RabbitQueueConsumer[T](exchangeName, exchange, backoff, workerConcurrency)
def publish(msg: T): Unit = publisher.publish(msg)
override def shutdown: Unit = {
consumer.shutdown
publisher.shutdown
exchange.channel.close()
}
override def withConsumer(fn: ConsumeFn[T]): RabbitConsumerRef = consumer.withConsumer(fn)
}
case class RabbitConsumerRef(consumers: Seq[Consumer]) extends ConsumerRef {
override def stop: Unit = consumers.foreach { consumer =>
consumer.unsubscribe.getOrElse(println(s"Warn: Unable to unbind consumer $consumer"))
}
}
case class RabbitQueuesRef(mainQ: RMQueue, errorQ: RMQueue)
/**
* Collection of convenience standalone initializers for Rabbit-based queueing
*/
object RabbitQueue {
def publisher[T](
amqpUri: String,
exchangeName: String,
concurrency: Int = 1,
durable: Boolean = false
)(implicit reporter: ErrorReporter, marshaller: ByteMarshaller[T]): RabbitQueuePublisher[T] = {
val exchange = RabbitUtils.declareExchange(amqpUri, exchangeName, concurrency, durable)
RabbitQueuePublisher[T](exchange, onShutdown = () => {
Future.fromTry(exchange.channel.close())
})
}
def consumer[T](
amqpUri: String,
exchangeName: String,
exchangeConcurrency: Int = 1,
workerConcurrency: Int = 1,
durableExchange: Boolean = false,
backoff: BackoffStrategy = LinearBackoff(5.seconds)
)(implicit reporter: ErrorReporter, unmarshaller: ByteUnmarshaller[T]): RabbitQueueConsumer[T] = {
val exchange = RabbitUtils.declareExchange(amqpUri, exchangeName, exchangeConcurrency, durableExchange)
RabbitQueueConsumer[T](exchangeName, exchange, backoff, workerConcurrency, onShutdown = () => exchange.channel.close())
}
def plainConsumer[T](
amqpUri: String,
queueName: String,
exchangeName: String,
exchangeConcurrency: Int = 1,
workerConcurrency: Int = 1,
autoDelete: Boolean = true,
durableExchange: Boolean = false,
backoff: BackoffStrategy = LinearBackoff(5.seconds)
)(implicit reporter: ErrorReporter, unmarshaller: ByteUnmarshaller[T]): RabbitPlainQueueConsumer[T] = {
val exchange = RabbitUtils.declareExchange(amqpUri, exchangeName, exchangeConcurrency, durableExchange)
RabbitPlainQueueConsumer[T](queueName, exchange, backoff, autoDelete = autoDelete, onShutdown = () => exchange.channel.close())
}
}
|
package com.blackti.rh.services;
import com.blackti.rh.entities.Trabalhador;
import com.blackti.rh.repositories.TrabalhadorRepository;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Optional;
@Service
@Slf4j
public class TrabalhadorService {
@Autowired
private TrabalhadorRepository trabalhadorRepository;
public Page<Trabalhador> findAll(Pageable pageable){
return trabalhadorRepository.findAll(pageable);
}
public Optional<Trabalhador> findById(Long id){
return trabalhadorRepository.findById(id);
}
}
|
<reponame>choryang/cb-spider
// Proof of Concepts of CB-Spider.
// The CB-Spider is a sub-Framework of the Cloud-Barista Multi-Cloud Project.
// The CB-Spider Mission is to connect all the clouds with a single interface.
//
// * Cloud-Barista: https://github.com/cloud-barista
//
// by <EMAIL>, 2021.05.04
package connect
import (
"github.com/sirupsen/logrus"
cblog "github.com/cloud-barista/cb-log"
trs "github.com/cloud-barista/cb-spider/cloud-control-manager/cloud-driver/drivers/tencent/resources"
idrv "github.com/cloud-barista/cb-spider/cloud-control-manager/cloud-driver/interfaces"
irs "github.com/cloud-barista/cb-spider/cloud-control-manager/cloud-driver/interfaces/resources"
//"github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common"
//"github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common/errors"
//"github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common/profile"
cvm "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/cvm/v20170312"
vpc "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/vpc/v20170312"
"errors"
)
type TencentCloudConnection struct {
Region idrv.RegionInfo
VNetworkClient *vpc.Client
VMClient *cvm.Client
KeyPairClient *cvm.Client
ImageClient *cvm.Client
SecurityClient *vpc.Client
VmSpecClient *cvm.Client
//VNicClient *cvm.Client
//PublicIPClient *cvm.Client
}
var cblogger *logrus.Logger
func init() {
// cblog is a global variable.
cblogger = cblog.GetLogger("CB-SPIDER TencentCloudConnection")
}
func (cloudConn *TencentCloudConnection) CreateKeyPairHandler() (irs.KeyPairHandler, error) {
cblogger.Info("Start CreateKeyPairHandler()")
keyPairHandler := trs.TencentKeyPairHandler{cloudConn.Region, cloudConn.KeyPairClient}
return &keyPairHandler, nil
}
func (cloudConn *TencentCloudConnection) CreateVMHandler() (irs.VMHandler, error) {
cblogger.Info("Start CreateVMHandler()")
vmHandler := trs.TencentVMHandler{cloudConn.Region, cloudConn.VMClient}
return &vmHandler, nil
}
func (cloudConn *TencentCloudConnection) IsConnected() (bool, error) {
return true, nil
}
func (cloudConn *TencentCloudConnection) Close() error {
return nil
}
func (cloudConn *TencentCloudConnection) CreateVPCHandler() (irs.VPCHandler, error) {
cblogger.Info("Start")
handler := trs.TencentVPCHandler{cloudConn.Region, cloudConn.VNetworkClient}
return &handler, nil
}
func (cloudConn *TencentCloudConnection) CreateImageHandler() (irs.ImageHandler, error) {
cblogger.Info("Start")
handler := trs.TencentImageHandler{cloudConn.Region, cloudConn.ImageClient}
return &handler, nil
}
func (cloudConn *TencentCloudConnection) CreateSecurityHandler() (irs.SecurityHandler, error) {
cblogger.Info("Start")
handler := trs.TencentSecurityHandler{cloudConn.Region, cloudConn.SecurityClient}
return &handler, nil
}
func (cloudConn *TencentCloudConnection) CreateVMSpecHandler() (irs.VMSpecHandler, error) {
cblogger.Info("Start")
handler := trs.TencentVmSpecHandler{cloudConn.Region, cloudConn.VmSpecClient}
return &handler, nil
}
/*
func (cloudConn *TencentCloudConnection) CreateVNicHandler() (irs.VNicHandler, error) {
cblogger.Info("Start")
handler := trs.TencentVNicHandler{cloudConn.Region, cloudConn.VNicClient}
return &handler, nil
}
func (cloudConn *TencentCloudConnection) CreatePublicIPHandler() (irs.PublicIPHandler, error) {
cblogger.Info("Start")
handler := trs.TencentPublicIPHandler{cloudConn.Region, cloudConn.PublicIPClient}
return &handler, nil
}
*/
func (cloudConn *TencentCloudConnection) CreateNLBHandler() (irs.NLBHandler, error) {
return nil, errors.New("Tencent Cloud Driver NLB: WIP")
}
|
#!/bin/bash
# Fetches and install Spark and its dependencies. To be invoked by the Dockerfile
# echo commands to the terminal output
set -ex
# Install JDK
apt-get update -y && \
apt-get install -y software-properties-common && \
add-apt-repository ppa:openjdk-r/ppa && \
apt-get update -y && \
apt-get install -y --force-yes ca-certificates-java && \
apt-get install -y --force-yes openjdk-8-jdk && \
apt-get install -y wget && \
update-java-alternatives -s java-1.8.0-openjdk-amd64 && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
mkdir -p /opt/spark
mkdir -p /opt/spark/work-dir
touch /opt/spark/RELEASE
# Fetch Spark Distribution with PySpark K8 support
wget https://archive.apache.org/dist/spark/spark-2.4.3/spark-2.4.3-bin-hadoop2.7.tgz -O spark-dist.tgz
echo '80a4c564ceff0d9aff82b7df610b1d34e777b45042e21e2d41f3e497bb1fa5d8 spark-dist.tgz' | sha256sum --check
mkdir -p spark-dist
tar -xvf spark-dist.tgz -C spark-dist --strip-components 1
#Copy over required files
cp -rf spark-dist/jars /opt/spark/jars
cp -rf spark-dist/examples /opt/spark/examples
cp -rf spark-dist/python /opt/spark/python
cp -rf spark-dist/bin /opt/spark/bin
cp -rf spark-dist/sbin /opt/spark/sbin
cp -rf spark-dist/data /opt/spark/data
rm -rf spark-dist.tgz
rm -rf spark-dist
# Fetch Hadoop Distribution with AWS Support
wget http://apache.mirrors.tds.net/hadoop/common/hadoop-2.7.7/hadoop-2.7.7.tar.gz -O hadoop-dist.tgz
echo 'd129d08a2c9dafec32855a376cbd2ab90c6a42790898cabbac6be4d29f9c2026 hadoop-dist.tgz' | sha256sum --check
mkdir -p hadoop-dist
tar -xvf hadoop-dist.tgz -C hadoop-dist --strip-components 1
cp -rf hadoop-dist/share/hadoop/tools/lib/hadoop-aws-2.7.7.jar /opt/spark/jars
cp -rf hadoop-dist/share/hadoop/tools/lib/aws-java-sdk-1.7.4.jar /opt/spark/jars
rm -rf hadoop-dist.tgz
rm -rf hadoop-dist
# Patch latest k8sclient for https://issues.apache.org/jira/browse/SPARK-28921. Ref: https://github.com/apache/spark/pull/25640/
rm /opt/spark/jars/kubernetes-client-4.1.2.jar
rm /opt/spark/jars/kubernetes-model-4.1.2.jar
rm /opt/spark/jars/kubernetes-model-common-4.1.2.jar
wget https://repo1.maven.org/maven2/io/fabric8/kubernetes-client/4.4.2/kubernetes-client-4.4.2.jar -P /opt/spark/jars
wget https://repo1.maven.org/maven2/io/fabric8/kubernetes-model/4.4.2/kubernetes-model-4.4.2.jar -P /opt/spark/jars
wget https://repo1.maven.org/maven2/io/fabric8/kubernetes-model-common/4.4.2/kubernetes-model-common-4.4.2.jar -P /opt/spark/jars
|
const withBundleAnalyzer = require('@next/bundle-analyzer')({
enabled: process.env.ANALYZE === 'true',
});
module.exports = withBundleAnalyzer({
experimental: { esmExternals: true },
eslint: {
ignoreDuringBuilds: true,
},
async redirects() {
return [
{
source: '/admin',
destination: 'https://random-studio.admin.datocms.com',
permanent: false,
},
{
source: '/newsletter',
destination: 'http://eepurl.com/gebngv',
permanent: false,
},
{
source: '/chloe',
destination:
'https://random.studio/projects/a-ss21-collection-experience-for-chloe',
permanent: false,
},
{
source: '/nicetomeetyou',
destination: '/',
permanent: false,
},
{
source: '/projects/NikeLab-ACG',
destination: '/projects/nikelab-acg',
permanent: false,
},
{
source: '/projects/NikeLab-Tech-Pack',
destination: '/projects/nikelab-tech-pack',
permanent: false,
},
];
},
});
|
<gh_stars>0
import {Component, OnInit} from '@angular/core';
import {ActivatedRoute, Router} from '@angular/router';
@Component({
selector: 'yv-search-field',
template: `
<div class="input-group">
<input type="text" class="form-control" placeholder="Search" [(ngModel)]="query" (keydown.enter)="search()">
<div class="input-group-append">
<button class="btn btn-danger" type="button" (click)="search()">
<img width="24px" src="assets/img/search.svg">
</button>
</div>
</div>
`,
styles: []
})
export class SearchFieldComponent implements OnInit {
protected query: string;
constructor(
private router: Router,
private route: ActivatedRoute
) {}
ngOnInit() {
this.query = this.route.parent.snapshot.children
.find(child => child.outlet === 'primary')
.params['query'];
}
search() {
if (!this.query) {
return;
}
this.router.navigate(['viewer', 'search', this.query]);
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.issueClosed = void 0;
var issueClosed = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill-rule": "evenodd",
"d": "M7 10h2v2H7v-2zm2-6H7v5h2V4zm1.5 1.5l-1 1L12 9l4-4.5-1-1L12 7l-1.5-1.5zM8 13.7A5.71 5.71 0 012.3 8c0-3.14 2.56-5.7 5.7-5.7 1.83 0 3.45.88 4.5 2.2l.92-.92A6.947 6.947 0 008 1C4.14 1 1 4.14 1 8s3.14 7 7 7 7-3.14 7-7l-1.52 1.52c-.66 2.41-2.86 4.19-5.48 4.19v-.01z"
},
"children": []
}],
"attribs": {}
};
exports.issueClosed = issueClosed; |
package doodle
package syntax
import doodle.core.Normalized
import org.scalatest._
import org.scalatest.prop.Checkers
class NormalizedSpec extends FlatSpec with Matchers with Checkers {
"syntax" should "construct expected normalizeds" in {
60.normalized should ===(Normalized.MaxValue)
1.normalized should ===(Normalized.MaxValue)
0.5.normalized should ===(Normalized.clip(0.5))
}
}
|
(function() {
'use strict';
angular
.module('sentryApp')
.controller('PermissionDeleteController',PermissionDeleteController);
PermissionDeleteController.$inject = ['$uibModalInstance', 'entity', 'Permission'];
function PermissionDeleteController($uibModalInstance, entity, Permission) {
var vm = this;
vm.permission = entity;
vm.clear = clear;
vm.confirmDelete = confirmDelete;
function clear () {
$uibModalInstance.dismiss('cancel');
}
function confirmDelete (id) {
Permission.delete({id: id},
function () {
$uibModalInstance.close(true);
});
}
}
})();
|
from django.conf.urls import url
from .views import manage_tokens, create_token, manage_token, add_restriction
urlpatterns = [
url(r'^tokens/', manage_tokens, name='manage_tokens'),
url(r'^create-token/', create_token, name='create_token'),
url(r'^manage-token/(?P<token_id>[-\w]+)', manage_token, name='manage_token'),
url(r'^add-restriction/(?P<token_id>[-\w]+)', add_restriction, name='add_restriction'),
]
|
import jwt
import bcrypt
class AuthenticationApi:
def __init__(self, user_repo):
self.user_repo = user_repo
def authenticate(self, username: str, password: str) -> str:
user = self.user_repo.find_by_username(username)
if user is None:
raise ValueError('Invalid username or password')
if not bcrypt.checkpw(password.encode('utf-8'), user.password):
raise ValueError('Invalid username or password')
token = jwt.encode({'sub': user.id}, 'secret', algorithm='HS256')
return token.decode('utf-8') |
package com.jvcdp;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import com.jvcdp.controller.HomeController;
public class BlogpostAppTest {
@Test
public void testApp() {
HomeController hc = new HomeController();
String result = hc.home();
assertEquals( result, "Blog Post App, home page!" );
}
}
|
package com.createchance.imageeditordemo;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.WindowManager;
import android.widget.PopupWindow;
/**
* Edit save done window.
*
* @author createchance
* @date 2019/1/5
*/
public class EditSaveDoneWindow extends PopupWindow implements View.OnClickListener {
private static final String TAG = "EditSaveDoneWindow";
private Context mContext;
private View mRootView;
private EditSaveDoneListener mListener;
public EditSaveDoneWindow(Context context, EditSaveDoneListener listener) {
mContext = context;
mListener = listener;
mRootView = LayoutInflater.from(mContext).inflate(R.layout.edit_bottom_save_done, null, false);
// set click listener.
mRootView.findViewById(R.id.tv_exit).setOnClickListener(this);
mRootView.findViewById(R.id.tv_continue).setOnClickListener(this);
mRootView.findViewById(R.id.tv_share).setOnClickListener(this);
setContentView(mRootView);
setWidth(WindowManager.LayoutParams.MATCH_PARENT);
setHeight(WindowManager.LayoutParams.WRAP_CONTENT);
setFocusable(false);
setAnimationStyle(R.style.BottomPopupWindow);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.tv_share:
if (mListener != null) {
mListener.onShare();
}
break;
case R.id.tv_exit:
if (mListener != null) {
mListener.onExit();
}
dismiss();
break;
case R.id.tv_continue:
dismiss();
break;
default:
break;
}
}
public interface EditSaveDoneListener {
void onShare();
void onExit();
}
}
|
PKGURL=https://github.com/woo-j/zint.git
PKGHASH=2e5fe31ebfba09e07c934434c3885dc40224a5bf
package_download $PKGURL $PKGHASH
package_patch
cd backend
rm *.o 2> /dev/null
libzint_version=`cat Makefile | grep DZINT_VERSION | cut -f 2- -d "="`
libzint_srcs=`ls -1 *.c | sed '/dllversion.c/d' | tr '\n' ' '`
veval "$SYS_CC -I$SYS_PREFIX/include $libzint_version -c $libzint_srcs"
asserterror $? "compilation failed"
veval "$SYS_AR ru $SYS_PREFIX/lib/libzint.a *.o"
asserterror $? "ar failed"
veval "$SYS_RANLIB $SYS_PREFIX/lib/libzint.a"
asserterror $? "ranlib failed"
cat zint.h | sed 's/^ZINT_EXTERN//g' > $SYS_PREFIX/include/zint.h
cd ..
package_cleanup
#eof
|
<gh_stars>1-10
// Copyright 2021, Nitric Technologies Pty Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { status } from '@grpc/grpc-js';
import { fromGrpcError } from '.';
import { AbortedError } from './aborted';
import { AlreadyExistsError } from './already-exists';
import { CancelledError } from './cancelled';
import { DataLossError } from './data-loss';
import { FailedPreconditionError } from './failed-precondition';
import { InternalError } from './internal';
import { InvalidArgumentError } from './invalid-argument';
import { NotFoundError } from './not-found';
import { OutOfRangeError } from './out-of-range';
import { PermissionDeniedError } from './permission-denied';
import { ResourceExhaustedError } from './resource-exhausted';
import { UnauthenticatedError } from './unauthenticated';
import { UnavailableError } from './unavailable';
import { UnimplementedError } from './unimplemented';
import { UnknownError } from './unknown';
const DECODER = new TextDecoder();
describe('fromGrpcError', () => {
const defaultMessage = {
message: "test",
details: "some-detail",
}
describe('When translating a known error code', () => {
describe("status.CANCELLED", () => {
const newError = fromGrpcError({
code: status.CANCELLED,
...defaultMessage,
} as any);
it("should return a cancelled error", () => {
expect(newError instanceof CancelledError).toBeTruthy();
});
});
describe("status.UNKNOWN", () => {
const newError = fromGrpcError({
code: status.UNKNOWN,
...defaultMessage,
} as any);
it("should return an unknown error", () => {
expect(newError instanceof UnknownError).toBeTruthy();
});
});
describe("status.INVALID_ARGUMENT", () => {
const newError = fromGrpcError({
code: status.INVALID_ARGUMENT,
...defaultMessage,
} as any);
it("should return an InvalidArgument error", () => {
expect(newError instanceof InvalidArgumentError).toBeTruthy();
});
});
describe("status.NOT_FOUND", () => {
const newError = fromGrpcError({
code: status.NOT_FOUND,
...defaultMessage,
} as any);
it("should return an NotFound error", () => {
expect(newError instanceof NotFoundError).toBeTruthy();
});
});
describe("status.ALREADY_EXISTS", () => {
const newError = fromGrpcError({
code: status.ALREADY_EXISTS,
...defaultMessage,
} as any);
it("should return an AlreadyExists error", () => {
expect(newError instanceof AlreadyExistsError).toBeTruthy();
});
});
describe("status.PERMISSION_DENIED", () => {
const newError = fromGrpcError({
code: status.PERMISSION_DENIED,
...defaultMessage,
} as any);
it("should return an PermissionDenied error", () => {
expect(newError instanceof PermissionDeniedError).toBeTruthy();
});
});
describe("status.RESOURCE_EXHAUSTED", () => {
const newError = fromGrpcError({
code: status.RESOURCE_EXHAUSTED,
...defaultMessage,
} as any);
it("should return an ResourceExhausted error", () => {
expect(newError instanceof ResourceExhaustedError).toBeTruthy();
});
});
describe("status.FAILED_PRECONDITION", () => {
const newError = fromGrpcError({
code: status.FAILED_PRECONDITION,
...defaultMessage,
} as any);
it("should return an FailedPrecondition error", () => {
expect(newError instanceof FailedPreconditionError).toBeTruthy();
});
});
describe("status.ABORTED", () => {
const newError = fromGrpcError({
code: status.ABORTED,
...defaultMessage,
} as any);
it("should return an Aborted error", () => {
expect(newError instanceof AbortedError).toBeTruthy();
});
});
describe("status.OUT_OF_RANGE", () => {
const newError = fromGrpcError({
code: status.OUT_OF_RANGE,
...defaultMessage,
} as any);
it("should return an OutOfRange error", () => {
expect(newError instanceof OutOfRangeError).toBeTruthy();
});
});
describe("status.UNIMPLEMENTED", () => {
const newError = fromGrpcError({
code: status.UNIMPLEMENTED,
...defaultMessage,
} as any);
it("should return an Unimplemented error", () => {
expect(newError instanceof UnimplementedError).toBeTruthy();
});
});
describe("status.INTERNAL", () => {
const newError = fromGrpcError({
code: status.INTERNAL,
...defaultMessage,
} as any);
it("should return an Internal error", () => {
expect(newError instanceof InternalError).toBeTruthy();
});
});
describe("status.UNAVAILABLE", () => {
const newError = fromGrpcError({
code: status.UNAVAILABLE,
...defaultMessage,
} as any);
it("should return an Unavailable error", () => {
expect(newError instanceof UnavailableError).toBeTruthy();
});
});
describe("status.DATA_LOSS", () => {
const newError = fromGrpcError({
code: status.DATA_LOSS,
...defaultMessage,
} as any);
it("should return an DataLoss error", () => {
expect(newError instanceof DataLossError).toBeTruthy();
});
});
describe("status.UNAUTHENTICATED", () => {
const newError = fromGrpcError({
code: status.UNAUTHENTICATED,
...defaultMessage,
} as any);
it("should return an Unauthenticated error", () => {
expect(newError instanceof UnauthenticatedError).toBeTruthy();
});
});
describe("status.ABORTED", () => {
const newError = fromGrpcError({
code: status.ABORTED,
message: "test",
details: "some-detail",
} as any);
it("should return an aborted error", () => {
expect(newError instanceof AbortedError).toBeTruthy();
});
});
});
describe('When translating a known error code', () => {
describe("status.OK", () => {
const newError = fromGrpcError({
code: status.OK,
message: "test",
details: "some-detail",
} as any);
it("should return an unknown error", () => {
expect(newError instanceof UnknownError).toBeTruthy();
});
});
});
});
|
class CalcListSum:
def __init__(self, numbers):
self.numbers = numbers
def get_sum(self):
sum = 0
for num in self.numbers:
sum += num
return sum
nums = [2, 3, 5, 7]
calc_sum = CalcListSum(nums)
sum_of_nums = calc_sum.get_sum()
print(sum_of_nums) |
package temple
import "github.com/spf13/cast"
func ToInt(v interface{}) (int, error) {
return cast.ToIntE(v)
}
func ToFloat64(v interface{}) (float64, error) {
return cast.ToFloat64E(v)
}
func ToString(v interface{}) (string, error) {
return cast.ToStringE(v)
}
func ToIntSlice(vals []interface{}) ([]int, error) {
out := make([]int, len(vals))
var err error
for i, v := range vals {
out[i], err = ToInt(v)
if err != nil {
return nil, err
}
}
return out, nil
}
func ToFloat64Slice(vals []interface{}) ([]float64, error) {
out := make([]float64, len(vals))
var err error
for i, v := range vals {
out[i], err = ToFloat64(v)
if err != nil {
return nil, err
}
}
return out, nil
}
func ToStringSlice(vals []interface{}) ([]string, error) {
out := make([]string, len(vals))
var err error
for i, v := range vals {
out[i], err = ToString(v)
if err != nil {
return nil, err
}
}
return out, nil
}
|
from typing import List
def findSpecialValue(nums: List[int]) -> int:
nums.sort(reverse=True) # Sort the array in descending order
for i, num in enumerate(nums):
if num <= i: # Check if the current number is less than or equal to its index
return i # Return the special value if found
return -1 # If no special value is found, return -1 |
#!/bin/bash
SHUTTER=16
HALT=21
LED=5
# Initialize GPIO states
gpio -g mode $SHUTTER up
gpio -g mode $HALT up
gpio -g mode $LED out
# Flash LED on startup to indicate ready state
for i in `seq 1 5`;
do
gpio -g write $LED 1
sleep 0.2
gpio -g write $LED 0
sleep 0.2
done
while :
do
# Check for shutter button
if [ $(gpio -g read $SHUTTER) -eq 0 ]; then
gpio -g write $LED 1
raspistill -n -t 200 -w 512 -h 384 -o - | lp
sleep 1
# Wait for user to release button before resuming
while [ $(gpio -g read $SHUTTER) -eq 0 ]; do continue; done
gpio -g write $LED 0
fi
# Check for halt button
if [ $(gpio -g read $HALT) -eq 0 ]; then
# Must be held for 2+ seconds before shutdown is run...
starttime=$(date +%s)
while [ $(gpio -g read $HALT) -eq 0 ]; do
if [ $(($(date +%s)-starttime)) -ge 2 ]; then
gpio -g write $LED 1
shutdown -h now
fi
done
fi
done
|
#!/bin/bash
#SBATCH --exclude=hermes[1-4],trillian[1-3],artemis[1-7],qdata[1-8],nibbler[1-4],slurm[1-5]
#SBATCH --output=granger/40_nodes_interleave/script_5_6_6.out
#SBATCH --error=granger/40_nodes_interleave/script_5_6_6.err
#SBATCH --job-name="6-6"
hostname
date +%s%N
time blender -t 1 -b Star-collapse-ntsc.blend -s 6 -e 6 -a &> /dev/null
time blender -t 1 -b Star-collapse-ntsc.blend -s 46 -e 46 -a &> /dev/null
time blender -t 1 -b Star-collapse-ntsc.blend -s 86 -e 86 -a &> /dev/null
time blender -t 1 -b Star-collapse-ntsc.blend -s 126 -e 126 -a &> /dev/null
time blender -t 1 -b Star-collapse-ntsc.blend -s 166 -e 166 -a &> /dev/null
time blender -t 1 -b Star-collapse-ntsc.blend -s 206 -e 206 -a &> /dev/null
time blender -t 1 -b Star-collapse-ntsc.blend -s 246 -e 246 -a &> /dev/null
date +%s%N
|
<reponame>thekevinscott/ml-classifier<filename>src/index.test.ts
import * as tf from '@tensorflow/tfjs';
import getDefaultDownloadHandler from './getDefaultDownloadHandler';
jest.mock('./getDefaultDownloadHandler');
jest.genMockFromModule('@tensorflow/tfjs');
jest.mock('@tensorflow/tfjs', () => ({
train: {
adam: () => {},
},
model: ({
save: (handlerOrURL) => {
return handlerOrURL;
},
}),
loadModel: () => ({
getLayer: () => ({
output: null,
}),
inputs: [],
}),
}));
import MLClassifier from './index';
describe('ml-classifier', () => {
test('foo', () => {
expect('a').toEqual('a');
});
// describe('constructor', () => {
// test('that it persists params', async () => {
// const epochs = 123;
// const mlClassifier = new MLClassifier({
// epochs,
// });
// expect(mlClassifier.getParams().epochs).toEqual(epochs);
// });
// // test('that it calls init on construct', async () => {
// // MLClassifier.prototype.init = jest.fn(() => {});
// // const mlClassifier = new MLClassifier({ });
// // expect(mlClassifier.init).toHaveBeenCalled();
// // });
// });
// describe('save', () => {
// let mlClassifier;
// beforeEach(() => {
// mlClassifier = new MLClassifier();
// mlClassifier.loaded = jest.fn(() => {});
// });
// test('it waits for pretrained model as the first step', async () => {
// mlClassifier.model = tf.model;
// await mlClassifier.save();
// expect(mlClassifier.loaded).toHaveBeenCalled();
// });
// test('it throws if no model is set', async () => {
// const expectedError = new Error('You must call train prior to calling save');
// return mlClassifier.save().catch(err => {
// expect(err.message).toBe(expectedError.message);
// });
// });
// test('calls save with a handler if specified', async () => {
// const url = 'foobar';
// mlClassifier.model = tf.model;
// const result = await mlClassifier.save(url);
// expect(result).toEqual(url);
// });
// test('calls save with a default handler if none is specified', async () => {
// const def = 'def';
// getDefaultDownloadHandler.mockImplementationOnce(() => def);
// mlClassifier.model = tf.model;
// const result = await mlClassifier.save();
// expect(result).toEqual(def);
// });
// });
});
|
#!/bin/sh
BIN="$1"
if test "x$BIN" = "x" ; then
echo "Usege unit_test.sh /path/to/build/directory [fast-only]"
exit 1;
fi
FAST="$2"
WIN32=0
case "`uname`" in
*CYGWIN*)
WIN32=1 ;;
*MINGW*)
WIN32=1 ;;
*Windows*)
WIN32=1
esac
run()
{
EXE=$1
CONF=$2
PARAM=$3
echo $BIN/$EXE $CONF -c $EXE.js
echo ./$EXE.py $PARAM
$BIN/$EXE $CONF -c $EXE.js &
PID=$!
ERROR=0
sleep 1
if ! ./$EXE.py $PARAM ; then
ERROR=1
fi
kill $PID
wait $PID
if test "$ERROR" = "1" ; then
echo "Failed!"
exit 1
fi
}
basic_test()
{
EXE=$1
echo /BIN/$EXE
if ! $BIN/$EXE ; then
echo "Failed!"
exit 1
fi
}
basic_test atomic_test
basic_test encryptor_test
basic_test storage_test
run form_test "" ""
if test "x$FAST" != "xfast-only" ; then
for ASYNC in true false
do
run proto_test "--test-async=$ASYNC --service-api=http --service-port=8080 --service-ip=127.0.0.1" http
run proto_test "--test-async=$ASYNC --service-api=scgi --service-port=8080 --service-ip=127.0.0.1" scgi_tcp
if test "$WIN32" = 0 ; then
run proto_test "--test-async=$ASYNC --service-api=scgi --service-socket=/tmp/cppcms_test_socket" scgi_unix
fi
done
fi
|
<filename>C2CRIBuildDir/projects/C2C-RI/src/RIGUI/src/test/EmulationConfigFileOutput.java
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package test;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import org.fhwa.c2cri.centermodel.EmulationDataFileProcessor;
import org.fhwa.c2cri.centermodel.RIEmulationEntityValueSet;
import org.fhwa.c2cri.testmodel.TestConfiguration;
/**
* The Class ConfigFileOutput.
*
* @author TransCore ITS, LLC Last Updated: 1/8/2014
*/
public class EmulationConfigFileOutput {
private static String TEST_FILE_PATH = "C:\\C2CRI-Phase2\\C2CRIBuildDir\\projects\\C2C-RI\\src\\TMDDv303\\src\\InfoLayer\\EmulationData\\";
/**
* The main method.
*
* Pre-Conditions: N/A Post-Conditions: N/A
*
* @param args the arguments
*/
public static void main(String[] args) {
try {
String configFileName = "C:\\C2CRI-Phase2\\C2CRIBuildDir\\projects\\C2C-RI\\src\\RIGUI\\TestConfigurationFiles\\TMDDv303cEntityEmuDefaultsECS.ricfg";
ObjectInputStream input = new ObjectInputStream(new FileInputStream(new File(configFileName)));
TestConfiguration tc = (TestConfiguration) input.readObject();
// ArrayList<String> targetedTestCases = new ArrayList();
//
// targetedTestCases.add("TCS-5-dlEventIndexRequest-OC-InValid-7");
// targetedTestCases.add("TCS-5-dlEventIndexRequest-OC-InValid-8");
// targetedTestCases.add("TCS-5-dlEventIndexRequest-OC-Valid");
// targetedTestCases.add("TCS-5-dlEventIndexSubscription-OC-Valid");
// targetedTestCases.add("TCS-100-dlSectionControlStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-100-dlSectionControlStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-100-dlSectionControlStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-101-dlSectionPriorityQueueRequest-OC-InValid-7");
// targetedTestCases.add("TCS-101-dlSectionPriorityQueueRequest-OC-InValid-8");
// targetedTestCases.add("TCS-101-dlSectionPriorityQueueRequest-OC-Valid");
// targetedTestCases.add("TCS-102-dlDeviceCancelControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-102-dlDeviceCancelControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-102-dlDeviceCancelControlRequest-OC-Valid");
// targetedTestCases.add("TCS-103-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-103-dlSectionControlScheduleRequest-OC-InValid-7");
// targetedTestCases.add("TCS-103-dlSectionControlScheduleRequest-OC-InValid-8");
// targetedTestCases.add("TCS-103-dlSectionControlScheduleRequest-OC-Valid");
// targetedTestCases.add("TCS-104-dlSectionSignalTimingPatternInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-104-dlSectionSignalTimingPatternInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-104-dlSectionSignalTimingPatternInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-104-dlSectionSignalTimingPatternInventorySubscription-OC-Valid");
// targetedTestCases.add("TCS-105-dlDetectorDataRequest-OC-InValid-7");
// targetedTestCases.add("TCS-105-dlDetectorDataRequest-OC-InValid-8");
// targetedTestCases.add("TCS-105-dlDetectorDataRequest-OC-Valid");
// targetedTestCases.add("TCS-105-dlDetectorDataSubscription-OC-Valid");
// targetedTestCases.add("TCS-108-dlDetectorMaintenanceHistoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-108-dlDetectorMaintenanceHistoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-108-dlDetectorMaintenanceHistoryRequest-OC-Valid");
// targetedTestCases.add("TCS-109-dlArchivedDataProcessingDocumentationMetadataRequest-OC-InValid-7");
// targetedTestCases.add("TCS-109-dlArchivedDataProcessingDocumentationMetadataRequest-OC-InValid-8");
// targetedTestCases.add("TCS-109-dlArchivedDataProcessingDocumentationMetadataRequest-OC-Valid");
// targetedTestCases.add("TCS-111-dlFullEventUpdateRequest-OC-InValid-7");
// targetedTestCases.add("TCS-111-dlFullEventUpdateRequest-OC-InValid-8");
// targetedTestCases.add("TCS-111-dlFullEventUpdateRequest-OC-Valid");
// targetedTestCases.add("TCS-111-dlFullEventUpdateSubscription-OC-Valid");
// targetedTestCases.add("TCS-18-dlDetectorInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-18-dlDetectorInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-18-dlDetectorInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-18-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-19-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-20-dlDetectorStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-20-dlDetectorStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-20-dlDetectorStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-20-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-21-dlArchivedDataProcessingDocumentationMetadataRequest-OC-InValid-7");
// targetedTestCases.add("TCS-21-dlArchivedDataProcessingDocumentationMetadataRequest-OC-InValid-8");
// targetedTestCases.add("TCS-21-dlArchivedDataProcessingDocumentationMetadataRequest-OC-Valid");
// targetedTestCases.add("TCS-22-dlDetectorInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-22-dlDetectorInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-22-dlDetectorInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-22-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-23-dlDetectorDataRequest-OC-InValid-7");
// targetedTestCases.add("TCS-23-dlDetectorDataRequest-OC-InValid-8");
// targetedTestCases.add("TCS-23-dlDetectorDataRequest-OC-Valid");
// targetedTestCases.add("TCS-23-dlDetectorDataSubscription-OC-Valid");
// targetedTestCases.add("TCS-24-dlDetectorMaintenanceHistoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-24-dlDetectorMaintenanceHistoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-24-dlDetectorMaintenanceHistoryRequest-OC-Valid");
// targetedTestCases.add("TCS-25-dlCCTVInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-25-dlCCTVInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-25-dlCCTVInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-25-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-26-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-27-dlCCTVStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-27-dlCCTVStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-27-dlCCTVStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-27-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-28-dlCCTVControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-28-dlCCTVControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-28-dlCCTVControlRequest-OC-Valid");
// targetedTestCases.add("TCS-29-dlDeviceControlStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-29-dlDeviceControlStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-29-dlDeviceControlStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-30-dlDeviceCancelControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-30-dlDeviceCancelControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-30-dlDeviceCancelControlRequest-OC-Valid");
// targetedTestCases.add("TCS-31-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-31-dlVideoSwitchInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-31-dlVideoSwitchInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-31-dlVideoSwitchInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-32-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-33-dlDeviceInformationSubscription-OC-InValid-7");
// targetedTestCases.add("TCS-33-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-33-dlVideoSwitchStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-33-dlVideoSwitchStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-33-dlVideoSwitchStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-34-dlVideoSwitchControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-34-dlVideoSwitchControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-34-dlVideoSwitchControlRequest-OC-Valid");
// targetedTestCases.add("TCS-35-dlDeviceControlStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-35-dlDeviceControlStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-35-dlDeviceControlStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-36-dlDeviceCancelControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-36-dlDeviceCancelControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-36-dlDeviceCancelControlRequest-OC-Valid");
// targetedTestCases.add("TCS-37-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-37-dlDMSInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-37-dlDMSInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-37-dlDMSInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-38-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-39-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-39-dlDMSStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-39-dlDMSStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-39-dlDMSStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-40-dlDMSControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-40-dlDMSControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-40-dlDMSControlRequest-OC-Valid");
// targetedTestCases.add("TCS-41-dlDeviceControlStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-41-dlDeviceControlStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-41-dlDeviceControlStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-42-dlDMSPriorityQueueRequest-OC-InValid-7");
// targetedTestCases.add("TCS-42-dlDMSPriorityQueueRequest-OC-InValid-8");
// targetedTestCases.add("TCS-42-dlDMSPriorityQueueRequest-OC-Valid");
// targetedTestCases.add("TCS-43-dlDeviceCancelControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-43-dlDeviceCancelControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-43-dlDeviceCancelControlRequest-OC-Valid");
// targetedTestCases.add("TCS-44-dlDMSFontTableRequest-OC-Valid");
// targetedTestCases.add("TCS-44-dlDMSMessageAppearanceRequest-OC-InValid-7");
// targetedTestCases.add("TCS-44-dlDMSMessageAppearanceRequest-OC-InValid-8");
// targetedTestCases.add("TCS-44-dlDMSMessageAppearanceRequest-OC-Valid");
// targetedTestCases.add("TCS-45-dlDMSMessageInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-45-dlDMSMessageInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-45-dlDMSMessageInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-45-dlDMSMessageInventorySubscription-OC-Valid");
// targetedTestCases.add("TCS-46-dlDMSFontTableRequest-OC-InValid-7");
// targetedTestCases.add("TCS-46-dlDMSFontTableRequest-OC-InValid-8");
// targetedTestCases.add("TCS-46-dlDMSFontTableRequest-OC-Valid");
// targetedTestCases.add("TCS-47-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-47-dlESSInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-47-dlESSInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-47-dlESSInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-48-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-49-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-49-dlESSStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-49-dlESSStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-49-dlESSStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-50-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-50-dlESSObservationReportRequest-OC-InValid-7");
// targetedTestCases.add("TCS-50-dlESSObservationReportRequest-OC-InValid-8");
// targetedTestCases.add("TCS-50-dlESSObservationReportRequest-OC-Valid");
// targetedTestCases.add("TCS-51-dlESSObservationMetadataRequest-OC-InValid-7");
// targetedTestCases.add("TCS-51-dlESSObservationMetadataRequest-OC-InValid-8");
// targetedTestCases.add("TCS-51-dlESSObservationMetadataRequest-OC-Valid");
// targetedTestCases.add("TCS-52-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-52-dlESSObservationReportRequest-OC-InValid-7");
// targetedTestCases.add("TCS-52-dlESSObservationReportRequest-OC-InValid-8");
// targetedTestCases.add("TCS-52-dlESSObservationReportRequest-OC-Valid");
// targetedTestCases.add("TCS-53-dlESSObservationMetadataRequest-OC-InValid-7");
// targetedTestCases.add("TCS-53-dlESSObservationMetadataRequest-OC-InValid-8");
// targetedTestCases.add("TCS-53-dlESSObservationMetadataRequest-OC-Valid");
// targetedTestCases.add("TCS-54-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-54-dlGateInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-54-dlGateInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-54-dlGateInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-55-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-56-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-56-dlGateStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-56-dlGateStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-56-dlGateStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-57-dlGateControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-57-dlGateControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-57-dlGateControlRequest-OC-Valid");
// targetedTestCases.add("TCS-58-dlDeviceControlStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-58-dlDeviceControlStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-58-dlDeviceControlStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-59-dlDeviceCancelControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-59-dlDeviceCancelControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-59-dlDeviceCancelControlRequest-OC-Valid");
// targetedTestCases.add("TCS-60-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-60-dlGateControlScheduleRequest-OC-InValid-7");
// targetedTestCases.add("TCS-60-dlGateControlScheduleRequest-OC-InValid-8");
// targetedTestCases.add("TCS-60-dlGateControlScheduleRequest-OC-Valid");
// targetedTestCases.add("TCS-61-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-61-dlHARInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-61-dlHARInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-61-dlHARInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-62-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-63-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-63-dlHARStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-63-dlHARStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-63-dlHARStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-64-dlHARControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-64-dlHARControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-64-dlHARControlRequest-OC-Valid");
// targetedTestCases.add("TCS-65-dlDeviceControlStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-65-dlDeviceControlStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-65-dlDeviceControlStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-66-dlHARPriorityQueueRequest-OC-InValid-7");
// targetedTestCases.add("TCS-66-dlHARPriorityQueueRequest-OC-InValid-8");
// targetedTestCases.add("TCS-66-dlHARPriorityQueueRequest-OC-Valid");
// targetedTestCases.add("TCS-67-dlDeviceCancelControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-67-dlDeviceCancelControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-67-dlDeviceCancelControlRequest-OC-Valid");
// targetedTestCases.add("TCS-68-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-68-dlHARControlScheduleRequest-OC-InValid-7");
// targetedTestCases.add("TCS-68-dlHARControlScheduleRequest-OC-InValid-8");
// targetedTestCases.add("TCS-68-dlHARControlScheduleRequest-OC-Valid");
// targetedTestCases.add("TCS-69-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-69-dlHARMessageInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-69-dlHARMessageInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-69-dlHARMessageInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-6-dlFullEventUpdateRequest-OC-InValid-7");
// targetedTestCases.add("TCS-6-dlFullEventUpdateRequest-OC-InValid-8");
// targetedTestCases.add("TCS-6-dlFullEventUpdateRequest-OC-Valid");
// targetedTestCases.add("TCS-6-dlFullEventUpdateSubscription-OC-Valid");
// targetedTestCases.add("TCS-70-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-70-dlLCSInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-70-dlLCSInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-70-dlLCSInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-71-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-72-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-72-dlLCSStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-72-dlLCSStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-72-dlLCSStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-73-dlLCSControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-73-dlLCSControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-73-dlLCSControlRequest-OC-Valid");
// targetedTestCases.add("TCS-74-dlDeviceControlStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-74-dlDeviceControlStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-74-dlDeviceControlStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-75-dlDeviceCancelControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-75-dlDeviceCancelControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-75-dlDeviceCancelControlRequest-OC-Valid");
// targetedTestCases.add("TCS-76-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-76-dlLCSControlScheduleRequest-OC-InValid-7");
// targetedTestCases.add("TCS-76-dlLCSControlScheduleRequest-OC-InValid-8");
// targetedTestCases.add("TCS-76-dlLCSControlScheduleRequest-OC-Valid");
// targetedTestCases.add("TCS-77-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-77-dlRampMeterInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-77-dlRampMeterInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-77-dlRampMeterInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-78-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-79-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-79-dlRampMeterStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-79-dlRampMeterStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-79-dlRampMeterStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-7-dlFullEventUpdateRequest-OC-InValid-7");
// targetedTestCases.add("TCS-7-dlFullEventUpdateRequest-OC-InValid-8");
// targetedTestCases.add("TCS-7-dlFullEventUpdateRequest-OC-Valid");
// targetedTestCases.add("TCS-7-dlFullEventUpdateSubscription-OC-Valid");
// targetedTestCases.add("TCS-80-dlRampMeterControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-80-dlRampMeterControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-80-dlRampMeterControlRequest-OC-Valid");
// targetedTestCases.add("TCS-81-dlDeviceControlStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-81-dlDeviceControlStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-81-dlDeviceControlStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-82-dlDeviceCancelControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-82-dlDeviceCancelControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-82-dlDeviceCancelControlRequest-OC-Valid");
// targetedTestCases.add("TCS-83-dlRampMeterPriorityQueueRequest-OC-InValid-7");
// targetedTestCases.add("TCS-83-dlRampMeterPriorityQueueRequest-OC-InValid-8");
// targetedTestCases.add("TCS-83-dlRampMeterPriorityQueueRequest-OC-Valid");
// targetedTestCases.add("TCS-84-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-84-dlRampMeterControlScheduleRequest-OC-InValid-7");
// targetedTestCases.add("TCS-84-dlRampMeterControlScheduleRequest-OC-InValid-8");
// targetedTestCases.add("TCS-84-dlRampMeterControlScheduleRequest-OC-Valid");
// targetedTestCases.add("TCS-85-dlRampMeterPlanInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-85-dlRampMeterPlanInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-85-dlRampMeterPlanInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-85-dlRampMeterPlanInventorySubscription-OC-Valid");
// targetedTestCases.add("TCS-86-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-86-dlIntersectionSignalInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-86-dlIntersectionSignalInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-86-dlIntersectionSignalInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-87-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-88-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-88-dlIntersectionSignalStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-88-dlIntersectionSignalStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-88-dlIntersectionSignalStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-89-dlIntersectionSignalControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-89-dlIntersectionSignalControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-89-dlIntersectionSignalControlRequest-OC-Valid");
// targetedTestCases.add("TCS-8-dlFullEventUpdateRequest-OC-InValid-7");
// targetedTestCases.add("TCS-8-dlFullEventUpdateRequest-OC-InValid-8");
// targetedTestCases.add("TCS-8-dlFullEventUpdateRequest-OC-Valid");
// targetedTestCases.add("TCS-8-dlFullEventUpdateSubscription-OC-Valid");
// targetedTestCases.add("TCS-90-dlDeviceControlStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-90-dlDeviceControlStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-90-dlDeviceControlStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-91-dlIntersectionSignalPriorityQueueRequest-OC-InValid-7");
// targetedTestCases.add("TCS-91-dlIntersectionSignalPriorityQueueRequest-OC-InValid-8");
// targetedTestCases.add("TCS-91-dlIntersectionSignalPriorityQueueRequest-OC-Valid");
// targetedTestCases.add("TCS-92-dlDeviceCancelControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-92-dlDeviceCancelControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-92-dlDeviceCancelControlRequest-OC-Valid");
// targetedTestCases.add("TCS-93-dlIntersectionSignalTimingPatternInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-93-dlIntersectionSignalTimingPatternInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-93-dlIntersectionSignalTimingPatternInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-93-dlIntersectionSignalTimingPatternInventorySubscription-OC-Valid");
// targetedTestCases.add("TCS-94-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-94-dlIntersectionSignalControlScheduleRequest-OC-InValid-7");
// targetedTestCases.add("TCS-94-dlIntersectionSignalControlScheduleRequest-OC-InValid-8");
// targetedTestCases.add("TCS-94-dlIntersectionSignalControlScheduleRequest-OC-Valid");
// targetedTestCases.add("TCS-95-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-95-dlIntersectionSignalInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-95-dlIntersectionSignalInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-95-dlIntersectionSignalInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-96-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-96-dlIntersectionSignalInventoryRequest-OC-InValid-7");
// targetedTestCases.add("TCS-96-dlIntersectionSignalInventoryRequest-OC-InValid-8");
// targetedTestCases.add("TCS-96-dlIntersectionSignalInventoryRequest-OC-Valid");
// targetedTestCases.add("TCS-97-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-97-dlIntersectionSignalStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-97-dlIntersectionSignalStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-97-dlIntersectionSignalStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-98-dlDeviceInformationSubscription-OC-Valid");
// targetedTestCases.add("TCS-98-dlSectionStatusRequest-OC-InValid-7");
// targetedTestCases.add("TCS-98-dlSectionStatusRequest-OC-InValid-8");
// targetedTestCases.add("TCS-98-dlSectionStatusRequest-OC-Valid");
// targetedTestCases.add("TCS-99-dlSectionControlRequest-OC-InValid-7");
// targetedTestCases.add("TCS-99-dlSectionControlRequest-OC-InValid-8");
// targetedTestCases.add("TCS-99-dlSectionControlRequest-OC-Valid");
// targetedTestCases.add("TCS-9-dlActionLogRequest-OC-InValid-7");
// targetedTestCases.add("TCS-9-dlActionLogRequest-OC-InValid-8");
// targetedTestCases.add("TCS-9-dlActionLogRequest-OC-Valid");
// targetedTestCases.add("TCS-9-dlActionLogSubscription-OC-Valid");
//
//// for (TestCase thisTestCase : tc.getInfoLayerParams().getApplicableTestCases("OC")) {
//// thisTestCase.setCustomDataLocation("");
//// }
//
// for (TestCase thisTestCase : tc.getInfoLayerParams().getApplicableTestCases("EC")) {
// if (targetedTestCases.contains(thisTestCase.getName())) {
// String dataFileName = "c:\\c2cri\\emulationDataFiles\\" + thisTestCase.getName() + ".data";
// String sourceFileName = "C:\\C2CRI-Phase2\\C2CRIBuildDir\\projects\\C2C-RI\\src\\TMDDv303\\src\\InfoLayer\\Data\\" + thisTestCase.getName() + ".data";
//// thisTestCase.setCustomDataLocation("c:\\c2cri\\emulationDataFiles\\" + thisTestCase.getName() + ".data");
// if (!thisTestCase.isOverriden()) {
// System.out.println("Here's a problem.");
// }
//// Path source = Paths.get(new File(sourceFileName).toURI());
//// Path out = Paths.get(new File(dataFileName).toURI());
//// Files.copy(source, out);
// }
// }
for (RIEmulationEntityValueSet thisEntity : tc.getEmulationParameters().getEntityDataMap()){
System.out.println("updating "+thisEntity.getValueSetName());
thisEntity.setEntityDataSet(
EmulationDataFileProcessor.getByteArray(TEST_FILE_PATH+thisEntity.getValueSetName()));
thisEntity.setDataSetSource(RIEmulationEntityValueSet.ENTITYDATASTATE.Default);
// thisEntity.setDataSetSource(RIEmulationEntityValueSet.ENTITYDATASTATE.Updated);
}
// long startTime = System.currentTimeMillis();
// RIEmulation.getInstance().initialize(TestSuites.getInstance().getBaselineTestSuite(tc.getSelectedInfoLayerTestSuite()), tc.getEmulationParameters());
// System.out.println("It took "+(System.currentTimeMillis() - startTime )+ " ms to complete.");
ObjectOutputStream output = new ObjectOutputStream(new FileOutputStream(configFileName + ".updated"));
output.writeObject(tc);
output.flush();
output.close();
output = null;
// FileOutputStream output = new FileOutputStream("c:\\c2cri\\EntityEmulationOut.xml");
// output.write(tc.to_LogFormat().getBytes());
// output.close();
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
|
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR=$(dirname "$0")
SRC_DIR="$(cd "${SCRIPT_DIR}"; pwd -P)"
TOOLKIT_NAMESPACE=${TOOLKIT_NAMESPACE:-tools}
TOOLKIT_DASHBOARD_CONFIG=${TOOLKIT_DASHBOARD_CONFIG:-developer-dashboard-config}
TOOLKIT_DASHBOARD_DEPLOY=${TOOLKIT_DASHBOARD_DEPLOY:-dashboard-developer-dashboard}
GIT_PROTOCOL=${GIT_PROTOCOL:-http}
GIT_HOST=$(oc get route -n ${TOOLKIT_NAMESPACE} gogs --template='{{.spec.host}}')
GIT_URL="${GIT_PROTOCOL}://${GIT_HOST}"
GIT_USER=${GIT_USER:-toolkit}
ORGINAL_JSON_URL=${ORGINAL_JSON_URL:-https://raw.githubusercontent.com/ibm-garage-cloud/developer-dashboard/master/public/data/links.json}
GIT_REPOS="https://github.com/IBM/template-node-react,node-react \
https://github.com/IBM/template-node-angular,node-angular \
https://github.com/IBM/template-graphql-typescript,graphql-typescript \
https://github.com/IBM/template-node-typescript,node-typescript \
https://github.com/IBM/template-java-spring,java-spring \
https://github.com/IBM/template-go-gin,go-gin \
https://github.com/ibm-garage-cloud/inventory-management-svc-solution,inventory-management-svc-solution \
https://github.com/ibm-garage-cloud/inventory-management-bff-solution,inventory-management-bff-solution \
https://github.com/ibm-garage-cloud/inventory-management-ui-solution,inventory-management-ui-solution"
TMP_DIR=$(mktemp -d)
pushd "${TMP_DIR}"
# Download json
curl -sL -o links.json ${ORGINAL_JSON_URL}
for i in ${GIT_REPOS}; do
IFS=","
set $i
echo $1 $2
if [[ "$OSTYPE" == "darwin"* ]]; then
sed -i .bak "s~${1}~${GIT_URL}/${GIT_USER}/${2}~" links.json
else
sed -i "s~${1}~${GIT_URL}/${GIT_USER}/${2}~" links.json
fi
unset IFS
done
oc delete cm ${TOOLKIT_DASHBOARD_CONFIG} -n ${TOOLKIT_NAMESPACE} 2>/dev/null || true
oc create cm ${TOOLKIT_DASHBOARD_CONFIG} -n ${TOOLKIT_NAMESPACE} --from-file=LINKS_URL_DATA=links.json
oc set env deployment/${TOOLKIT_DASHBOARD_DEPLOY} -c developer-dashboard -n ${TOOLKIT_NAMESPACE} --from=configmap/${TOOLKIT_DASHBOARD_CONFIG}
popd
|
import os
def update_library_version(src_dir, dst_dir=None, libname="opencv", src_libversion="", dst_libversion=".4.0.0", dry_run=True):
if not dst_dir:
dst_dir = src_dir
files = os.listdir(src_dir)
for file in files:
if file.endswith(".py"):
file_path = os.path.join(src_dir, file)
with open(file_path, 'r') as f:
file_content = f.read()
updated_content = file_content.replace(f"{libname}{src_libversion}", f"{libname}{dst_libversion}")
if dry_run:
print(f"File '{file}' would be updated")
else:
with open(os.path.join(dst_dir, file), 'w') as f_dst:
f_dst.write(updated_content)
print(f"File '{file}' updated successfully")
# Example usage
update_library_version("/path/to/source/directory", "/path/to/destination/directory", "opencv", "1.58.0", ".4.0.0", False) |
<reponame>rwl/exip
/*==================================================================*\
| EXIP - Embeddable EXI Processor in C |
|--------------------------------------------------------------------|
| This work is licensed under BSD 3-Clause License |
| The full license terms and conditions are located in LICENSE.txt |
\===================================================================*/
/**
* @file initSchemaInstance.c
* @brief Implements the initialization functions for the EXIPSchema object
*
* @date Nov 28, 2011
* @author <NAME>
* @version 0.5
* @par[Revision] $Id$
*/
#include "initSchemaInstance.h"
#include "sTables.h"
#include "grammars.h"
#ifndef DEFAULT_GRAMMAR_TABLE
# define DEFAULT_GRAMMAR_TABLE 300
#endif
#ifndef DEFAULT_SIMPLE_GRAMMAR_TABLE
# define DEFAULT_SIMPLE_GRAMMAR_TABLE 75
#endif
#ifndef DEFAULT_ENUM_TABLE
# define DEFAULT_ENUM_TABLE 5
#endif
errorCode initSchema(EXIPSchema* schema, InitSchemaType initializationType)
{
errorCode tmp_err_code = EXIP_UNEXPECTED_ERROR;
TRY(initAllocList(&schema->memList));
schema->staticGrCount = 0;
SET_CONTENT_INDEX(schema->docGrammar.props, 0);
schema->docGrammar.count = 0;
schema->docGrammar.props = 0;
schema->docGrammar.rule = NULL;
schema->simpleTypeTable.count = 0;
schema->simpleTypeTable.sType = NULL;
schema->grammarTable.count = 0;
schema->grammarTable.grammar = NULL;
schema->enumTable.count = 0;
schema->enumTable.enumDef = NULL;
/* Create and initialize initial string table entries */
TRY_CATCH(createDynArray(&schema->uriTable.dynArray, sizeof(UriEntry), DEFAULT_URI_ENTRIES_NUMBER), freeAllocList(&schema->memList));
TRY_CATCH(createUriTableEntries(&schema->uriTable, initializationType != INIT_SCHEMA_SCHEMA_LESS_MODE), freeAllocList(&schema->memList));
if(initializationType == INIT_SCHEMA_SCHEMA_ENABLED)
{
/* Create and initialize enumDef table */
TRY_CATCH(createDynArray(&schema->enumTable.dynArray, sizeof(EnumDefinition), DEFAULT_ENUM_TABLE), freeAllocList(&schema->memList));
}
/* Create the schema grammar table */
TRY_CATCH(createDynArray(&schema->grammarTable.dynArray, sizeof(EXIGrammar), DEFAULT_GRAMMAR_TABLE), freeAllocList(&schema->memList));
if(initializationType != INIT_SCHEMA_SCHEMA_LESS_MODE)
{
/* Create and initialize simple type table */
TRY_CATCH(createDynArray(&schema->simpleTypeTable.dynArray, sizeof(SimpleType), DEFAULT_SIMPLE_GRAMMAR_TABLE), freeAllocList(&schema->memList));
TRY_CATCH(createBuiltInTypesDefinitions(&schema->simpleTypeTable, &schema->memList), freeAllocList(&schema->memList));
// Must be done after createBuiltInTypesDefinitions()
TRY_CATCH(generateBuiltInTypesGrammars(schema), freeAllocList(&schema->memList));
schema->staticGrCount = SIMPLE_TYPE_COUNT;
}
return tmp_err_code;
}
errorCode generateBuiltInTypesGrammars(EXIPSchema* schema)
{
unsigned int i;
QNameID typeQnameID;
Index typeId;
EXIGrammar grammar;
Index dynArrId;
// URI id 3 -> http://www.w3.org/2001/XMLSchema
typeQnameID.uriId = XML_SCHEMA_NAMESPACE_ID;
grammar.count = 2;
for(i = 0; i < schema->uriTable.uri[XML_SCHEMA_NAMESPACE_ID].lnTable.count; i++)
{
typeQnameID.lnId = i;
typeId = typeQnameID.lnId;
grammar.props = 0;
SET_SCHEMA_GR(grammar.props);
if(HAS_TYPE_FACET(schema->simpleTypeTable.sType[typeId].content, TYPE_FACET_NAMED_SUBTYPE_UNION))
SET_NAMED_SUB_TYPE_OR_UNION(grammar.props);
grammar.rule = (GrammarRule*) memManagedAllocate(&schema->memList, sizeof(GrammarRule)*(grammar.count));
if(grammar.rule == NULL)
return EXIP_MEMORY_ALLOCATION_ERROR;
if(typeId == SIMPLE_TYPE_ANY_TYPE)
{
// <xs:anyType> - The base complex type; complex ur-type
SET_CONTENT_INDEX(grammar.props, 1);
grammar.rule[0].production = memManagedAllocate(&schema->memList, sizeof(Production)*4);
if(grammar.rule[0].production == NULL)
return EXIP_MEMORY_ALLOCATION_ERROR;
SET_PROD_EXI_EVENT(grammar.rule[0].production[3].content, EVENT_AT_ALL);
SET_PROD_NON_TERM(grammar.rule[0].production[3].content, 0);
grammar.rule[0].production[3].typeId = INDEX_MAX;
grammar.rule[0].production[3].qnameId.uriId = URI_MAX;
grammar.rule[0].production[3].qnameId.lnId = LN_MAX;
SET_PROD_EXI_EVENT(grammar.rule[0].production[2].content, EVENT_SE_ALL);
SET_PROD_NON_TERM(grammar.rule[0].production[2].content, 1);
grammar.rule[0].production[2].typeId = INDEX_MAX;
grammar.rule[0].production[2].qnameId.uriId = URI_MAX;
grammar.rule[0].production[2].qnameId.lnId = LN_MAX;
SET_PROD_EXI_EVENT(grammar.rule[0].production[1].content, EVENT_EE);
SET_PROD_NON_TERM(grammar.rule[0].production[1].content, GR_VOID_NON_TERMINAL);
grammar.rule[0].production[1].typeId = INDEX_MAX;
grammar.rule[0].production[1].qnameId.uriId = URI_MAX;
grammar.rule[0].production[1].qnameId.lnId = LN_MAX;
SET_PROD_EXI_EVENT(grammar.rule[0].production[0].content, EVENT_CH);
SET_PROD_NON_TERM(grammar.rule[0].production[0].content, 1);
grammar.rule[0].production[0].typeId = INDEX_MAX;
grammar.rule[0].production[0].qnameId.uriId = URI_MAX;
grammar.rule[0].production[0].qnameId.lnId = LN_MAX;
grammar.rule[0].pCount = 4;
grammar.rule[0].meta = 0;
RULE_SET_CONTAIN_EE(grammar.rule[0].meta);
grammar.rule[1].production = memManagedAllocate(&schema->memList, sizeof(Production)*3);
if(grammar.rule[1].production == NULL)
return EXIP_MEMORY_ALLOCATION_ERROR;
SET_PROD_EXI_EVENT(grammar.rule[1].production[2].content, EVENT_SE_ALL);
SET_PROD_NON_TERM(grammar.rule[1].production[2].content, 1);
grammar.rule[1].production[2].typeId = INDEX_MAX;
grammar.rule[1].production[2].qnameId.uriId = URI_MAX;
grammar.rule[1].production[2].qnameId.lnId = LN_MAX;
SET_PROD_EXI_EVENT(grammar.rule[1].production[1].content, EVENT_EE);
SET_PROD_NON_TERM(grammar.rule[1].production[1].content, GR_VOID_NON_TERMINAL);
grammar.rule[1].production[1].typeId = INDEX_MAX;
grammar.rule[1].production[1].qnameId.uriId = URI_MAX;
grammar.rule[1].production[1].qnameId.lnId = LN_MAX;
SET_PROD_EXI_EVENT(grammar.rule[1].production[0].content, EVENT_CH);
SET_PROD_NON_TERM(grammar.rule[1].production[0].content, 1);
grammar.rule[1].production[0].typeId = INDEX_MAX;
grammar.rule[1].production[0].qnameId.uriId = URI_MAX;
grammar.rule[1].production[0].qnameId.lnId = LN_MAX;
grammar.rule[1].pCount = 3;
grammar.rule[1].meta = 0;
RULE_SET_CONTAIN_EE(grammar.rule[1].meta);
}
else // a regular simple type
{
grammar.rule[0].production = memManagedAllocate(&schema->memList, sizeof(Production));
if(grammar.rule[0].production == NULL)
return EXIP_MEMORY_ALLOCATION_ERROR;
SET_PROD_EXI_EVENT(grammar.rule[0].production[0].content, EVENT_CH);
SET_PROD_NON_TERM(grammar.rule[0].production[0].content, 1);
grammar.rule[0].production[0].typeId = typeId;
grammar.rule[0].production[0].qnameId.uriId = URI_MAX;
grammar.rule[0].production[0].qnameId.lnId = LN_MAX;
grammar.rule[0].pCount = 1;
grammar.rule[0].meta = 0;
grammar.rule[1].production = memManagedAllocate(&schema->memList, sizeof(Production));
if(grammar.rule[1].production == NULL)
return EXIP_MEMORY_ALLOCATION_ERROR;
SET_PROD_EXI_EVENT(grammar.rule[1].production[0].content, EVENT_EE);
SET_PROD_NON_TERM(grammar.rule[1].production[0].content, GR_VOID_NON_TERMINAL);
grammar.rule[1].production[0].typeId = INDEX_MAX;
grammar.rule[1].production[0].qnameId.uriId = URI_MAX;
grammar.rule[1].production[0].qnameId.lnId = LN_MAX;
grammar.rule[1].pCount = 1;
grammar.rule[1].meta = 0;
RULE_SET_CONTAIN_EE(grammar.rule[1].meta);
}
/** Add the grammar to the schema grammar table */
addDynEntry(&schema->grammarTable.dynArray, &grammar, &dynArrId);
schema->uriTable.uri[3].lnTable.ln[i].typeGrammar = dynArrId;
}
return EXIP_OK;
}
errorCode createBuiltInTypesDefinitions(SimpleTypeTable* simpleTypeTable, AllocList* memList)
{
errorCode tmp_err_code = EXIP_UNEXPECTED_ERROR;
SimpleType sType;
Index elID;
// entities
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_LIST);
sType.max = 0;
sType.min = 0;
sType.length = SIMPLE_TYPE_ENTITY;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// entity
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// id
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// idref
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// idrefs
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_LIST);
sType.max = 0;
sType.min = 0;
sType.length = SIMPLE_TYPE_IDREF;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// ncname
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// nmtoken
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// nmtokens
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_LIST);
sType.max = 0;
sType.min = 0;
sType.length = SIMPLE_TYPE_NMTOKEN;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// notation
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// name
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// qname
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// any simple type
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// any type
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_NONE);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// any uri
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// base64 binary
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_BINARY);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// boolean
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_BOOLEAN);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// byte
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_SMALL_INTEGER);
SET_TYPE_FACET(sType.content, TYPE_FACET_MAX_INCLUSIVE);
SET_TYPE_FACET(sType.content, TYPE_FACET_MIN_INCLUSIVE);
sType.max = 127;
sType.min = -128;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// date
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_DATE);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// date time
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_DATE_TIME);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// decimal
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_DECIMAL);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// double
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_FLOAT);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// duration
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// float
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_FLOAT);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// gDay
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_MONTH);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// gMonth
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_MONTH);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// gMonthDay
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_MONTH);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// gYear
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_YEAR);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// gYearMonth
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_DATE);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// hex binary
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_BINARY);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// Int
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_INTEGER);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// integer
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_INTEGER);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// language
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// long
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_INTEGER);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// negativeInteger
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_INTEGER);
SET_TYPE_FACET(sType.content, TYPE_FACET_MAX_INCLUSIVE);
sType.max = -1;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// NonNegativeInteger
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_NON_NEGATIVE_INT);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
SET_TYPE_FACET(sType.content, TYPE_FACET_MIN_INCLUSIVE);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// NonPositiveInteger
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_INTEGER);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
SET_TYPE_FACET(sType.content, TYPE_FACET_MAX_INCLUSIVE);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// normalizedString
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// Positive Integer
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_NON_NEGATIVE_INT);
SET_TYPE_FACET(sType.content, TYPE_FACET_MIN_INCLUSIVE);
sType.max = 0;
sType.min = 1;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// short
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_INTEGER);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
SET_TYPE_FACET(sType.content, TYPE_FACET_MAX_INCLUSIVE);
SET_TYPE_FACET(sType.content, TYPE_FACET_MIN_INCLUSIVE);
sType.max = 32767;
sType.min = -32768;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// String
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// time
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_TIME);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// token
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_STRING);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// Unsigned byte
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_SMALL_INTEGER);
SET_TYPE_FACET(sType.content, TYPE_FACET_MAX_INCLUSIVE);
SET_TYPE_FACET(sType.content, TYPE_FACET_MIN_INCLUSIVE);
sType.max = 255;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// Unsigned int
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_NON_NEGATIVE_INT);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
SET_TYPE_FACET(sType.content, TYPE_FACET_MIN_INCLUSIVE);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// Unsigned Long
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_NON_NEGATIVE_INT);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
SET_TYPE_FACET(sType.content, TYPE_FACET_MIN_INCLUSIVE);
sType.max = 0;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
// Unsigned short
sType.content = 0;
SET_EXI_TYPE(sType.content, VALUE_TYPE_NON_NEGATIVE_INT);
SET_TYPE_FACET(sType.content, TYPE_FACET_NAMED_SUBTYPE_UNION);
SET_TYPE_FACET(sType.content, TYPE_FACET_MAX_INCLUSIVE);
SET_TYPE_FACET(sType.content, TYPE_FACET_MIN_INCLUSIVE);
sType.max = 65535;
sType.min = 0;
sType.length = 0;
TRY(addDynEntry(&simpleTypeTable->dynArray, &sType, &elID));
return EXIP_OK;
}
|
<reponame>tomaszdurka/CM
require(['cm/tests/media/common'], function(common) {
require(["cm/media/audio"], function(Audio) {
QUnit.module('cm/media/audio');
if ('HTMLAudioElement' in window) {
var audioUrl = 'client-vendor/after-body-source/cm/tests/resources/opus-48khz.weba';
common.test(Audio, audioUrl);
} else { // skip on phantomjs
common.skip();
}
});
});
|
# http://qiita.com/ysk_1031/items/8cde9ce8b4d0870a129d
function gcd () {
local selected_dir=$(ghq list |fzf --preview "bat --color=always --style=header,grid --line-range :80 $(ghq root)/{}/README.*")
if [ -n "$selected_dir" ]; then
BUFFER="cd $(ghq root)/${selected_dir}"
zle accept-line
fi
zle clear-screen
}
zle -N gcd
bindkey '^]' gcd
|
echo üpöp | tr -d ö > t1.out
diff t1.exp t1.out
|
const inputArray = [7, 3, 4, 2, 8, 10, 1];
const sortAndFilterArray = inputArray => {
return inputArray
.filter(num => num % 2 === 0)
.sort((a, b) => b - a);
};
console.log(sortAndFilterArray(inputArray));
// Output: [10, 8, 4, 2] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.