text
stringlengths 1
1.05M
|
|---|
def divisible_five(nums):
result = []
for num in nums:
if num % 5 == 0:
result.append(num)
return result
|
//#region IMPORTS
import type Pose from '../../armature/Pose';
import type { IKChain, IKLink } from '../rigs/IKChain';
import QuatUtil from '../../maths/QuatUtil';
import Vec3Util from '../../maths/Vec3Util';
import { quat } from 'gl-matrix';
import SwingTwistBase from './support/SwingTwistBase';
//#endregion
function lawcos_sss( aLen: number, bLen: number, cLen: number ): number{
// Law of Cosines - SSS : cos(C) = (a^2 + b^2 - c^2) / 2ab
// The Angle between A and B with C being the opposite length of the angle.
let v = ( aLen*aLen + bLen*bLen - cLen*cLen ) / ( 2 * aLen * bLen );
if( v < -1 ) v = -1; // Clamp to prevent NaN Errors
else if( v > 1 ) v = 1;
return Math.acos( v );
}
class SpringSolver extends SwingTwistBase{
resolve( chain: IKChain, pose: Pose, debug?:any ): void{
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Start by Using SwingTwist to target the bone toward the EndEffector
const ST = this._swingTwist
const [ rot, pt ] = ST.getWorldRot( chain, pose, debug );
const effLen = Vec3Util.len( ST.effectorPos, ST.originPos );
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Going to treat the chain as if each bone is the same length to simplify the solver.
// The basic Idea is that the line that forms the IK direction will be subdivided by
// the number of pair bones to form a chain of triangles. So Each A, B is a bone and C
// will be the sub divided IK line segment.
// / \ / \
// A / \ B / \
// /_____\ /_____\
// C ( Base )
const qprev : quat = quat.copy( [0,0,0,1], pt.rot ); // Previous Parent WS Rotation
const qnext : quat = [0,0,0,1]; // Save Child WS Rotation to be the next parent
let lnk : IKLink = chain.links[ 0 ]; // First bone of the triangle
const boneLen = lnk.len; // Treat each bone as the same length
const baseLen = effLen / ( chain.count / 2 ); // Length of the sub divided IK segment, will be triangle's base len
const rad_a = lawcos_sss( boneLen, baseLen, boneLen ); // Angle of AC
const rad_b = Math.PI - lawcos_sss( boneLen, boneLen, baseLen ); // Angle 0f AB
const r_axis_an = quat.setAxisAngle( [0,0,0,1], ST.orthoDir, -rad_a ); // First Bone Rotation
const r_axis_b = quat.setAxisAngle( [0,0,0,1], ST.orthoDir, rad_b ); // Second Bone Rotation
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// The first bone of the Chain starts off with the rotation from the SwingTwistSolver.
// So from here, just need to apply the first axis rotation, save the WS for next bone's parent
// then conver it back to local space to be saved back to the pose.
quat.mul( rot, r_axis_an, rot ); // Apply First Rotation to SwingTwist Rot
quat.copy( qnext, rot ); // Save as Next Parent Rotation
QuatUtil.pmulInvert( rot, rot, qprev ); // To Local
pose.setLocalRot( lnk.idx, rot ); // Save
quat.copy( qprev, qnext ); // Move as Prev Parent Rotation
// The last thing we do is fix the first bone rotation. The first bone starts off
// aligned with the IK line, so we rotate N degrees to the left of the line for it.
// When we start the loop, every first bone will now be looking down across the IK line
// at about N amount of the line on the right side. To get it to where we need to go, we
// move it N degrees to the left which should align it again to the IK line, THEN we add
// N degrees more to the left which should have it pointing to the same direction as the
// first bone of the chain. So we just fix it by going N*-2 degrees on the same rotation axis
quat.setAxisAngle( r_axis_an, ST.orthoDir, rad_a * -2 );
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
let r_axis : quat;
for( let i=1; i < chain.count; i++ ){
lnk = chain.links[ i ];
r_axis = ( ( i&1 ) == 0 )? r_axis_an : r_axis_b; // Use A for Even Numbers, B for Odd
quat.mul( rot, qprev, lnk.bind.rot ); // Move Local Bind to WorldSpace
quat.mul( rot, r_axis, rot ); // Then apply the AB rotation to get it to point toward the IK Line
quat.copy( qnext, rot ); // Save WS rotation for next bone
QuatUtil.pmulInvert( rot, rot, qprev ); // To local space...
pose.setLocalRot( lnk.idx, rot ); // Save to Pose
quat.copy( qprev, qnext ); // Move WS to qprev to act as the starting point
}
}
}
export default SpringSolver;
|
# Define user definitions
PROJECT_NAME=node_0
SAMPLE_TYPE=hello_world_small
SOPCINFO_DIR=./../quartus
SOPCINFO_FILE=qsys_system.sopcinfo
CPU_NAME=nios2_0
# Define internal symbols
BSP_STR=_bsp
APP_NAME=$PROJECT_NAME
BSP_NAME=$PROJECT_NAME$BSP_STR
APP_DIR=$SOPCINFO_DIR/software/$APP_NAME
BSP_DIR=$SOPCINFO_DIR/software/$BSP_NAME
# Create create-this-app and create-this-bsp script
nios2-swexample-create --name=$PROJECT_NAME \
--type=$SAMPLE_TYPE \
--sopc-file=$SOPCINFO_DIR/$SOPCINFO_FILE \
--app-dir=$APP_DIR \
--bsp-dir=$BSP_DIR \
--cpu-name=$CPU_NAME
# Build Application and BSP
cd $APP_DIR
./create-this-app
cd ../$BSP_NAME
./create-this-bsp
# Add source file and remove default one
cd ../$APP_NAME
rm hello_world_small.c
rm $PROJECT_NAME.elf
rm $PROJECT_NAME.map
rm $PROJECT_NAME.objdump
rm obj/default/hello_world_small.d
rm obj/default/hello_world_small.o
cp ../../../sw_sources/node_0.c node_0.c
cp ../../../packet_transaction_lib/inc/header_0/packet_transaction_util.h packet_transaction_util.h
cp ../../../packet_transaction_lib/inc/header_0/makefile makefile
cp ../../../templateEngine/applicationEngine/Templates/packet_transaction.h packet_transaction.h
cp ../../../packet_transaction_lib/src/src_0/packet_transaction_util.c packet_transaction_util.c
cp ../../../packet_transaction_lib/buffer/* .
# generate make file for new source file
# nios2-app-generate-makefile --bsp-dir=../$BSP_NAME \
# --src-files=node_0.c
# make
# build again
./create-this-app
make
|
if [[ -z "$GEM5_ROOT" ]]; then
echo "GEM5_ROOT is not set!" 1>&2
exit 1
fi
if [[ -z "$SPEC_ROOT" ]]; then
echo "SPEC_ROOT is not set!" 1>&2
exit 1
fi
cd $GEM5_ROOT/plot_scripts/
python3 plot_2cpu.py $GEM5_ROOT/eval_scripts/simu_condor/results/ docDist_2cpu_DAGguise docDist_2cpu_FSBTA docDist_2cpu_regular
cd -
|
#!/usr/bin/env bash
# Have script stop if there is an error
set -e
#REPO=broadinstitute
REPO=ajgoade
PROJECT=gatk
REPO_PRJ=${REPO}/${PROJECT}
GCR_REPO="us.gcr.io/broad-gatk/gatk"
STAGING_CLONE_DIR=${PROJECT}_staging_temp
#################################################
# Parsing arguments
#################################################
while getopts "e:pslrud:t:" option; do
case "$option" in
e) GITHUB_TAG="$OPTARG" ;;
p) IS_PUSH=true ;;
s) IS_HASH=true ;;
l) IS_NOT_LATEST=true ;;
r) IS_NOT_REMOVE_UNIT_TEST_CONTAINER=true ;;
u) IS_NOT_RUN_UNIT_TESTS=true ;;
d) STAGING_DIR="$OPTARG" ;;
t) PULL_REQUEST_NUMBER="$OPTARG" ;;
esac
done
if [ -z "$GITHUB_TAG" ]; then
printf "Option -e requires an argument.\n \
Usage: %s: -e <GITHUB_TAG> [-psl] \n \
where <GITHUB_TAG> is the github tag (or hash when -s is used) to use in building the docker image\n \
(e.g. bash build_docker.sh -e 1.0.0.0-alpha1.2.1)\n \
Optional arguments: \n \
-s \t The GITHUB_TAG (-e parameter) is actually a github hash, not tag. git hashes cannot be pushed as latest, so -l is implied. \n \
-l \t Do not also push the image to the 'latest' tag. \n \
-u \t Do not run the unit tests. \n \
-d <STAGING_DIR> \t staging directory to grab code from repo and build the docker image. If unspecified, then use whatever is in current dir (do not go to the repo). NEVER SPECIFY YOUR WORKING DIR \n \
-p \t (GATK4 developers only) push image to docker hub once complete. This will use the GITHUB_TAG in dockerhub as well. \n \
\t\t Unless -l is specified, this will also push this image to the 'latest' tag. \n \
-r \t (GATK4 developers only) Do not remove the unit test docker container. This is useful for debugging failing unit tests. \n \
-t <PULL_REQUEST_NUMBER>\t (Travis CI only) The pull request number. This is only used during pull request builds on Travis CI. \n" $0
exit 1
fi
# -z is like "not -n"
if [ -z ${IS_NOT_LATEST} ] && [ -n "${IS_HASH}" ] && [ -n "${IS_PUSH}" ]; then
echo -e "\n##################"
echo " WARNING: Refusing to push a hash as latest to dockerhub. "
echo "##################"
IS_NOT_LATEST=true
fi
# Output the parameters
echo -e "\n"
echo -e "github tag/hash: ${GITHUB_TAG}"
echo -e "docker hub repo, project, and tag: ${REPO_PRJ}:${GITHUB_TAG}\n\n"
echo "Other options (Blank is false)"
echo "---------------"
echo "This is a git hash: ${IS_HASH}"
echo "Push to dockerhub: ${IS_PUSH}"
echo "Do NOT remove the unit test container: ${IS_NOT_REMOVE_UNIT_TEST_CONTAINER}"
echo "Staging directory: ${STAGING_DIR}"
echo -e "Do NOT make this the default docker image: ${IS_NOT_LATEST}"
echo -e "Fetch from this remote path: ${PULL_REQUEST_NUMBER}\n\n\n"
# Login to dockerhub
if [ -n "${IS_PUSH}" ]; then
echo "Please login to dockerhub"
docker login
fi
ORIGINAL_WORKING_DIRECTORY=$(pwd)
if [ -n "$STAGING_DIR" ]; then
GITHUB_DIR="tags/"
if [ -n "${IS_HASH}" ]; then
GITHUB_DIR=" "
fi
mkdir -p ${STAGING_DIR}
cd ${STAGING_DIR}
set +e
rm -Rf ${STAGING_DIR}/${STAGING_CLONE_DIR}
set -e
GIT_LFS_SKIP_SMUDGE=1 git clone https://github.com/${REPO}/${PROJECT}.git ${STAGING_DIR}/${STAGING_CLONE_DIR}
cd ${STAGING_DIR}/${STAGING_CLONE_DIR}
STAGING_ABSOLUTE_PATH=$(pwd)
echo "Now in $(pwd)"
if [ ${PULL_REQUEST_NUMBER} ]; then
GIT_FETCH_COMMAND="git fetch origin +refs/pull/${PULL_REQUEST_NUMBER}/merge"
echo "${GIT_FETCH_COMMAND}"
${GIT_FETCH_COMMAND}
fi
GIT_CHECKOUT_COMMAND="git checkout ${GITHUB_DIR}${GITHUB_TAG}"
echo "${GIT_CHECKOUT_COMMAND}"
${GIT_CHECKOUT_COMMAND}
fi
# Build
if [ -n "${IS_PUSH}" ]; then
RELEASE=true
else
RELEASE=false
fi
echo "Building image to tag ${REPO_PRJ}:${GITHUB_TAG}..."
docker build -t ${REPO_PRJ}:${GITHUB_TAG} --build-arg DRELEASE=$RELEASE .
if [ -z "${IS_NOT_RUN_UNIT_TESTS}" ] ; then
# Run unit tests
echo "Running unit tests..."
REMOVE_CONTAINER_STRING=" --rm "
if [ -n "${IS_NOT_REMOVE_UNIT_TEST_CONTAINER}" ] ; then
REMOVE_CONTAINER_STRING=" "
fi
git lfs pull
chmod -R a+w ${STAGING_ABSOLUTE_PATH}/src/test/resources
echo docker run ${REMOVE_CONTAINER_STRING} -v ${STAGING_ABSOLUTE_PATH}/src/test/resources:/testdata -t ${REPO_PRJ}:${GITHUB_TAG} bash /root/run_unit_tests.sh
docker run ${REMOVE_CONTAINER_STRING} -v ${STAGING_ABSOLUTE_PATH}/src/test/resources:/testdata -t ${REPO_PRJ}:${GITHUB_TAG} bash /root/run_unit_tests.sh
echo " Unit tests passed..."
fi
## Push
if [ -n "${IS_PUSH}" ]; then
echo "Pushing to ${REPO_PRJ}"
docker push ${REPO_PRJ}:${GITHUB_TAG}
echo "Pushing to ${GCR_REPO}"
docker tag ${REPO_PRJ}:${GITHUB_TAG} ${GCR_REPO}:${GITHUB_TAG}
gcloud docker -- push ${GCR_REPO}:${GITHUB_TAG}
if [ -z "${IS_NOT_LATEST}" ] && [ -z "${IS_HASH}" ] ; then
echo "Updating latest tag in ${REPO_PRJ}"
docker tag ${REPO_PRJ}:${GITHUB_TAG} ${REPO_PRJ}:latest
docker push ${REPO_PRJ}:latest
echo "Updating latest tag in ${GCR_REPO}"
docker tag ${GCR_REPO}:${GITHUB_TAG} ${GCR_REPO}:latest
gcloud docker -- push ${GCR_REPO}:latest
fi
else
echo "Not pushing to dockerhub"
fi
cd ${ORIGINAL_WORKING_DIRECTORY}
if [ -n "$STAGING_DIR" ] ; then
rm -Rf ${STAGING_DIR}/${STAGING_CLONE_DIR}
fi
|
import os
import torch
import pytest
from torch.utils.data.dataloader import DataLoader
from src.models.model import cnnModel
from src.data.make_dataset import mnistDataset
model = cnnModel()
@pytest.mark.parametrize(
"test_input, expected",
[("model.forward(torch.rand((1, 1, 28, 28))).shape", torch.Size([1, 10])),
("model.forward(torch.rand((1, 1, 28, 28))).isnan().all().item()", False)]
)
def test_model_architecture(test_input, expected):
'''
test model io and if nans are produced
'''
msg = "Either model outputs wrong shape or nans"
assert eval(test_input) == expected, msg
@pytest.mark.skipif(not os.path.exists('data/processed/train_images.pt'), reason="Data files not found")
def test_model_input_output_shapes(model=cnnModel(), data_path="data/processed", data_prefix="train",
batch_size=64):
# find stored data and labels tensors
data_file = data_prefix + "_images.pt"
labels_file = data_prefix + "_labels.pt"
data = torch.load(os.path.join(data_path, data_file))
labels = train_labels = torch.load(os.path.join(data_path, labels_file))
# load into dataloader
data_set = mnistDataset(data, labels)
data_loader = DataLoader(data_set, batch_size=batch_size, shuffle=True)
# take first random iteration of data
input, labels = next(iter(data_loader))
output = model.forward(input)
assert input.shape == torch.Size([batch_size, 1, 27, 28]), "Input shape is not [batch_size, 1, 28, 28]"
assert output.shape == torch.Size([batch_size, 10]), "Output shape is not [batch_size, 10]"
assert labels.shape == torch.Size([batch_size]), "Labels shape is not [batch_size]"
|
<gh_stars>0
//package sequenced_tracer_menu_bar;
//
//import org.apache.commons.io.FilenameUtils;
//import sequenced_tracer_menu_bar.star_panel.StarPanel;
//import sequenced_tracer_panel.SequencedTracerPanel;
//
//import javax.imageio.ImageIO;
//import javax.swing.*;
//import java.awt.*;
//import java.awt.event.ActionEvent;
//import java.awt.event.ActionListener;
//import java.awt.image.BufferedImage;
//import java.io.File;
//
//public class SequencedTracerPrintMenuBar extends SequencedTracerMenuBar {
//
// private final JMenuItem PRINT_SEQUENCE_ITEM;
// private final JMenuItem PRINT_STAR_ITEM;
//
// public SequencedTracerPrintMenuBar(SequencedTracerPanel sequencedTracerPanel)
// {
// super(sequencedTracerPanel);
//
// // Create file menu items
//
// PRINT_SEQUENCE_ITEM = new JMenuItem("Print Sequence Trace");
// PRINT_STAR_ITEM = new JMenuItem("Print Sequence Star");
//
// // Assign methods to each file menu item
//
// PRINT_SEQUENCE_ITEM.addActionListener(new ActionListener() {
// public void actionPerformed(ActionEvent e) {
// printSequenceTrace();
// }
// });
//
// PRINT_STAR_ITEM.addActionListener(new ActionListener() {
// public void actionPerformed(ActionEvent e) {
// printSequenceStar();
// }
// });
//
// // Add menu items to the file menu
//
// FILE_MENU.add(PRINT_SEQUENCE_ITEM);
// FILE_MENU.add(PRINT_STAR_ITEM);
// }
//
// // TODO
// public void printSequenceTrace()
// {
// System.out.println("Print Sequence Trace");
//
// File file;
//
// JFileChooser c = new JFileChooser();
//
// c.showSaveDialog(getParent());
// file = c.getSelectedFile();
// if (file == null) {
// return;
// }
//
// if (FilenameUtils.getExtension(file.getName()).equalsIgnoreCase("jpeg")) {
// // filename is OK as-is
// } else {
// file = new File(file.toString() + ".jpeg"); // append .xml if "foo.jpg.xml" is OK
// file = new File(file.getParentFile(), FilenameUtils.getBaseName(file.getName())+".jpeg"); // ALTERNATIVELY: remove the extension (if any) and replace it with ".xml"
// }
//
// try
// {
// BufferedImage image = new BufferedImage(SEQUENCED_TRACER_PANEL.getWidth() * 2,
// SEQUENCED_TRACER_PANEL.getHeight() * 2,
// BufferedImage.TYPE_INT_RGB);
//
// Graphics2D graphics2D = image.createGraphics();
// graphics2D.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
// graphics2D.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
// graphics2D.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_PURE);
//
// // TODO
// graphics2D.scale(2.0, 2.0);
//
//
// SEQUENCED_TRACER_PANEL.paint(graphics2D);
//
// String s = SEQUENCED_TRACER_PANEL.getSequenceCode();
// FontMetrics fm = graphics2D.getFontMetrics();
// // int x = image.getWidth() - fm.stringWidth(s) - 5;
// // int y = fm.getHeight();
//
// int x = SEQUENCED_TRACER_PANEL.getWidth() - fm.stringWidth(s) - 5;
// int y = SEQUENCED_TRACER_PANEL.getHeight() - 5;
//
// graphics2D.drawString(s, x, y);
//
// ImageIO.write(image,"jpeg", file);
// }
// catch(Exception exception)
// {
// //code
// }
//
// }
//
// // TODO
// public void printSequenceStar()
// {
// System.out.println("Print Sequence Star");
//
// File file;
//
// JFileChooser c = new JFileChooser();
//
// c.showSaveDialog(getParent());
// file = c.getSelectedFile();
// if (file == null) {
//
// System.out.println("HERE?");
//
//
// return;
// }
//
// System.out.println("Passed the test");
//
// if (FilenameUtils.getExtension(file.getName()).equalsIgnoreCase("jpeg")) {
// // filename is OK as-is
// } else {
// file = new File(file.toString() + ".jpeg"); // append .xml if "foo.jpg.xml" is OK
// file = new File(file.getParentFile(), FilenameUtils.getBaseName(file.getName())+".jpeg"); // ALTERNATIVELY: remove the extension (if any) and replace it with ".xml"
// }
//
// try {
// BufferedImage image = new BufferedImage(SEQUENCED_TRACER_PANEL.getWidth() * 2,
// SEQUENCED_TRACER_PANEL.getHeight() * 2,
// BufferedImage.TYPE_INT_RGB);
//
// Graphics2D graphics2D = image.createGraphics();
// graphics2D.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
// graphics2D.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
// graphics2D.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_PURE);
//
// // TODO
// graphics2D.scale(2.0, 2.0);
//
// // TODO
// StarPanel starPanel = new StarPanel(SEQUENCED_TRACER_PANEL.getTracesByPositionAndRound());
// starPanel.setPreferredSize(SEQUENCED_TRACER_PANEL.getPreferredSize());
// starPanel.paint(graphics2D);
//
// System.out.println("After painting star");
//
// String s = SEQUENCED_TRACER_PANEL.getSequenceCode();
// FontMetrics fm = graphics2D.getFontMetrics();
// // int x = image.getWidth() - fm.stringWidth(s) - 5;
// // int y = fm.getHeight();
//
// int x = SEQUENCED_TRACER_PANEL.getWidth() - fm.stringWidth(s) - 5;
// int y = SEQUENCED_TRACER_PANEL.getHeight() - 5;
//
// graphics2D.drawString(s, x, y);
//
// ImageIO.write(image, "jpeg", file);
// }
// catch(Exception exception)
// {
// // TODO
// exception.printStackTrace();
// return;
// }
// }
//
//
//}
|
<filename>src/main/java/com/example/test/api/repository/ProducerRepository.java
package com.example.test.api.repository;
import com.example.test.api.model.Producer;
import com.example.test.api.model.Product;
import org.springframework.data.repository.CrudRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface ProducerRepository extends CrudRepository<Producer, Integer> {
}
|
public class PathsInBinaryTree {
// Helper function to print all the root-to-leaf paths in a binary tree
public static void printPaths(Node root) {
int[] paths = new int[1000];
printPathsUtil(root, paths, 0);
}
// Prints all the root-to-leaf paths in a binary tree
public static void printPathsUtil(Node root, int[] paths, int pathLen) {
// Base case
if (root == null)
return;
// Add the node's data to path array
paths[pathLen] = root.data;
pathLen++;
// If this is a leaf node, print the path
if (root.left == null && root.right == null)
printArray(paths, pathLen);
else {
// If not leaf, recursively find the paths
printPathsUtil(root.left, paths, pathLen);
printPathsUtil(root.right, paths, pathLen);
}
}
// Utility function to print an array of size n
public static void printArray(int[] arr, int n) {
for (int i = 0; i < n; i++)
System.out.print(arr[i] + " ");
System.out.println();
}
// Helper function that allocates a new node
public static Node newNode(int data)
{
Node node = new Node(data);
node.left = null;
node.right = null;
return (node);
}
}
|
<filename>entx/available/extension.go
package available
import (
"entgo.io/ent/entc"
"entgo.io/ent/entc/gen"
)
type (
Extension struct {
entc.DefaultExtension
templates []*gen.Template
}
ExtensionOption func(*Extension) error
)
func NewExtension(opts ...ExtensionOption) (*Extension, error) {
ex := &Extension{templates: AllTemplates}
for _, opt := range opts {
if err := opt(ex); err != nil {
return nil, err
}
}
return ex, nil
}
func (e *Extension) Templates() []*gen.Template {
return e.templates
}
|
describe("Set formatting attributes and ensure ep_markdown displays properly", function(){
//create a new pad before each test run
beforeEach(function(cb){
helper.newPad(cb);
this.timeout(60000);
});
it("Creates bold section and ensures it is shown as **foo** when clicking Show Markdown", function(done) {
this.timeout(60000);
var chrome$ = helper.padChrome$;
var inner$ = helper.padInner$;
var $editorContainer = chrome$("#editorcontainer");
var $editorContents = inner$("div");
// clear pad
$editorContents.sendkeys('{selectall}');
inner$("div").first().sendkeys('bold');
inner$("div").first().sendkeys('{selectall}');
chrome$('.buttonicon-bold').click();
chrome$('#options-markdown').click();
helper.waitFor(function(){
return (inner$("div").first()[0].textContent === "bold");
});
let hasMarkdown = inner$("body").hasClass("markdown");
// TODO: Use a psuedo selector to ensure the value displayed to user is **bold**
expect(hasMarkdown).to.be(true);
done();
});
});
|
<filename>lib/keen-query/filters.js
'use strict';
const shorthands = require('./shorthands');
function transformValue (value, handleList) {
if (handleList === true) {
return value.split(/,\s*/g)
.map(transformValue);
}
// support strings passed in with quote marks
if (/^("|'').*\1$/.test(value)) {
return value.substr(1, value.length - 2)
}
if (value === 'true') {
return true;
} else if (value === 'false') {
return false;
} else if (value === 'null') {
return null;
}
const asNumber = Number(value);
return asNumber === asNumber ? asNumber : value;
}
module.exports.parse = function (filter) {
let filterConf = /^([^!~\?><]*)(>=?|<=?|=|!=|~|!~|\?|!\?)(.+)$/.exec(filter);
if (filterConf) {
if (!filterConf[1] && !filterConf[3]) {
throw new Error(`Filter ${filter} must specify a property name to filter on and a value to match e.g. ->filter(user.uuid${filter}abcd)`);
}
if (!filterConf[1]) {
throw new Error(`Filter ${filter} must specify a property name to filter on on the left hand side e.g. ->filter(user.uuid${filter})`);
}
if (!filterConf[3]) {
if (filterConf[2] === '?' || filterConf[2] === '!?') {
throw new Error(`Filter ${filter} must specify one or more comma-separated values to match on the right hand side e.g ->filter(${filter}apple,pear,cherry)`);
} else {
throw new Error(`Filter ${filter} must specify a value to match on the right hand side e.g ->filter(${filter}apple)`);
}
}
if (filterConf[2] === '!?') {
// TODO be carefuller around quoted commas
return filterConf[3]
.split(',')
.map(val => {
return {
property_name: filterConf[1],
operator: shorthands.filters['!='].operator,
property_value: transformValue(val)
}
});
}
return {
property_name: filterConf[1],
operator: shorthands.filters[filterConf[2]].operator,
property_value: transformValue(filterConf[3], shorthands.filters[filterConf[2]].handleList)
};
}
const unary = /^(\!)?(.+)$/.exec(filter);
if (unary) {
return {
property_name: unary[2],
operator: 'exists',
property_value: !unary[1]
};
}
throw new Error('Filter structure not recognised');
}
module.exports.unparse = function (filterObj) {
let filter;
if (filterObj.operator === 'exists') {
filter = (filterObj.property_value === false ? '!' : '') + filterObj.property_name;
} else if (filterObj.operator === 'in') {
filter = filterObj.property_name + '?' + filterObj.property_value.join(',');
} else {
filter = filterObj.property_name + shorthands.reverseFilters[filterObj.operator] + filterObj.property_value;
}
return filter;
}
|
//
// SAMMeCell.h
// SamosWallet
//
// Created by zys on 2018/8/21.
// Copyright © 2018年 zys. All rights reserved.
//
/**
我的-交易记录、系统设置、关于我们
*/
#import <UIKit/UIKit.h>
@interface SAMMeCell : UITableViewCell
+ (void)registerWith:(UITableView *)tableView;
+ (CGFloat)cellHeight;
- (void)setCellWithTitle:(NSString *)title;
extern NSString *const SAMMeCellID;
@end
|
package com.nolanlawson.keepscore.db;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.database.sqlite.SQLiteStatement;
import android.text.TextUtils;
import android.util.SparseArray;
import com.nolanlawson.keepscore.helper.PlayerColor;
import com.nolanlawson.keepscore.util.Pair;
import com.nolanlawson.keepscore.util.StringUtil;
import com.nolanlawson.keepscore.util.UtilLogger;
public class GameDBHelper extends SQLiteOpenHelper {
private static UtilLogger log = new UtilLogger(GameDBHelper.class);
private static final String DB_NAME = "games.db";
private static final int DB_VERSION = 4;
private static final String TABLE_GAMES = "Games";
private static final String TABLE_PLAYER_SCORES = "PlayerScores";
private static final String COLUMN_ID = "_id";
private static final String COLUMN_DATE_STARTED = "dateStarted";
private static final String COLUMN_DATE_SAVED = "dateSaved";
private static final String COLUMN_NAME = "name";
private static final String COLUMN_SCORE = "score";
private static final String COLUMN_AUTOSAVED = "autosaved"; // legacy
private static final String COLUMN_PLAYER_NUMBER = "playerNumber";
private static final String COLUMN_GAME_ID = "gameId";
private static final String COLUMN_HISTORY = "history";
private static final String COLUMN_LAST_UPDATE = "lastUpdate";
private static final String COLUMN_HISTORY_TIMESTAMPS = "historyTimestamps";
private static final String COLUMN_COLOR = "color";
// my crazy system for using sqlite's group_concat, since it's not ambiguous to use regular old commas
private static final String GROUP_CONCAT_SEPARATOR = "^__%^%__";
private static final String GROUP_CONCAT_INNER_SEPARATOR = "$__%$%__";
private static final String JOINED_TABLES = TABLE_GAMES + " g join " + TABLE_PLAYER_SCORES + " ps ON " + "g."
+ COLUMN_ID + "=ps." + COLUMN_GAME_ID;
private static final String[] JOINED_COLUMNS = new String[] {
"g." + COLUMN_ID,
"g." + COLUMN_DATE_STARTED,
"g." + COLUMN_DATE_SAVED,
"g." + COLUMN_NAME,
"ps." + COLUMN_ID,
"ps." + COLUMN_NAME,
"ps." + COLUMN_SCORE,
"ps." + COLUMN_PLAYER_NUMBER,
"ps." + COLUMN_HISTORY,
"ps." + COLUMN_HISTORY_TIMESTAMPS,
"ps." + COLUMN_LAST_UPDATE,
"ps." + COLUMN_COLOR
};
private ThreadLocal<SQLiteStatement> updateGame = new ThreadLocal<SQLiteStatement>() {
@Override
protected SQLiteStatement initialValue() {
String sql = "update " + TABLE_GAMES + " set " + COLUMN_DATE_STARTED + "=?," + COLUMN_DATE_SAVED + "=?,"
+ COLUMN_NAME + "=? " + "where " + COLUMN_ID + "=?";
return db.compileStatement(sql);
}
};
private ThreadLocal<SQLiteStatement> updatePlayerScore = new ThreadLocal<SQLiteStatement>() {
@Override
protected SQLiteStatement initialValue() {
String sql = "update " + TABLE_PLAYER_SCORES + " set " + COLUMN_NAME + "=?," + COLUMN_SCORE + "=?,"
+ COLUMN_PLAYER_NUMBER + "=?," + COLUMN_HISTORY + "=?," + COLUMN_HISTORY_TIMESTAMPS + "=?,"
+ COLUMN_LAST_UPDATE + "=?,"
+ COLUMN_COLOR + "=? "
+ "where "
+ COLUMN_ID + "=?";
return db.compileStatement(sql);
}
};
private SQLiteDatabase db;
public GameDBHelper(Context context) {
super(context, DB_NAME, null, DB_VERSION);
db = getWritableDatabase();
}
@Override
public void onCreate(SQLiteDatabase db) {
String createSql1 = "create table if not exists " + TABLE_GAMES + " (" + COLUMN_ID
+ " integer not null primary key autoincrement, " + COLUMN_NAME + " text, " + COLUMN_AUTOSAVED
+ " int not null, " + COLUMN_DATE_STARTED + " int not null, " + COLUMN_DATE_SAVED + " int not null);";
db.execSQL(createSql1);
String createSql2 = "create table if not exists " + TABLE_PLAYER_SCORES + " (" + COLUMN_ID
+ " integer not null primary key autoincrement, " + COLUMN_NAME + " text not null, " + COLUMN_SCORE
+ " int not null, " + COLUMN_PLAYER_NUMBER + " int not null, " + COLUMN_HISTORY + " text, "
+ COLUMN_LAST_UPDATE + " int not null default 0, "
+ COLUMN_HISTORY_TIMESTAMPS + " text, "
+ COLUMN_COLOR + " string, "
+ COLUMN_GAME_ID + " int not null);";
db.execSQL(createSql2);
String indexSql1 = "create index if not exists index_game_id on " + TABLE_PLAYER_SCORES + " (" + COLUMN_GAME_ID
+ ");";
db.execSQL(indexSql1);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
if (oldVersion <= 1) {
// add an index for the startTime
String index = "create index if not exists index_date_started on " + TABLE_GAMES + "(" + COLUMN_DATE_STARTED
+ ");";
db.execSQL(index);
}
if (oldVersion <= 2) {
// add a lastUpdate time for each playerScore
String addColumn = "alter table " + TABLE_PLAYER_SCORES + " add column " + COLUMN_LAST_UPDATE
+ " int not null default 0";
db.execSQL(addColumn);
}
if (oldVersion <= 3) {
// add history timestamps and color
db.execSQL("alter table " + TABLE_PLAYER_SCORES + " add column " + COLUMN_HISTORY_TIMESTAMPS
+ " text;");
db.execSQL("alter table " + TABLE_PLAYER_SCORES + " add column " + COLUMN_COLOR
+ " string;");
// older versions of keepscore only had 8 players, and there are 16 colors, so using the player
// number as an ordinal is fine here
db.execSQL("update " + TABLE_PLAYER_SCORES + " set " + COLUMN_COLOR + "=" + COLUMN_PLAYER_NUMBER + ";");
}
}
/**
* Return true if a game with that dateStarted value exists.
*
* @param dateStarted
* @return
*/
public boolean existsByDateStarted(long dateStarted) {
synchronized (GameDBHelper.class) {
Cursor cursor = null;
try {
cursor = db.query(TABLE_GAMES, new String[] { COLUMN_ID }, "dateStarted=" + dateStarted, null, null,
null, null);
return cursor.moveToNext();
} finally {
if (cursor != null) {
cursor.close();
}
}
}
}
public Game findGameById(int gameId) {
synchronized (GameDBHelper.class) {
Cursor cursor = null;
try {
String where = "g." + COLUMN_ID + "=" + gameId;
cursor = db.query(JOINED_TABLES, JOINED_COLUMNS, where, null, null, null, null);
List<Game> result = convertToGames(cursor);
return result.isEmpty() ? null : result.get(0);
} finally {
if (cursor != null) {
cursor.close();
}
}
}
}
public int findGameCount() {
synchronized (GameDBHelper.class) {
String[] columns = { "count(" + COLUMN_ID + ")" };
Cursor cursor = null;
try {
cursor = db.query(TABLE_GAMES, columns, null, null, null, null, null);
if (cursor.moveToNext()) {
return cursor.getInt(0);
}
} finally {
if (cursor != null) {
cursor.close();
}
}
return 0;
}
}
public int findMostRecentGameId() {
synchronized (GameDBHelper.class) {
Cursor cursor = null;
try {
String orderBy = COLUMN_DATE_SAVED + " desc";
cursor = db.query(TABLE_GAMES, new String[] { COLUMN_ID }, null, null, null, null, orderBy);
if (cursor.moveToNext()) {
return cursor.getInt(0);
}
} finally {
if (cursor != null) {
cursor.close();
}
}
}
return -1;
}
public Game findMostRecentGame() {
synchronized (GameDBHelper.class) {
Cursor cursor = null;
try {
String sql = new StringBuilder("select ").append(TextUtils.join(",", JOINED_COLUMNS)).append(" from ")
.append(JOINED_TABLES).append(" order by ").append(COLUMN_DATE_SAVED).append(" desc ")
.append(" limit 1").toString();
cursor = db.rawQuery(sql, null);
List<Game> result = convertToGames(cursor);
return result.isEmpty() ? null : result.get(0);
} finally {
if (cursor != null) {
cursor.close();
}
}
}
}
/**
* save a game, updating its 'dateSaved' value
*
* @param game
* @return
*/
public void saveGame(Game game) {
saveGame(game, true);
}
/**
* save a game, optionally updating its 'dateSaved' value
*
* @param game
* @return
*/
public void saveGame(Game game, boolean updateDateSaved) {
synchronized (GameDBHelper.class) {
db.beginTransaction();
try {
saveGameWithinTransaction(game, updateDateSaved);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
}
}
private void saveGameWithinTransaction(Game game, boolean updateDateSaved) {
long dateSaved = updateDateSaved ? System.currentTimeMillis() : game.getDateSaved();
game.setDateSaved(dateSaved);
if (game.getId() != -1) {
// game was already saved, so try to overwrite
updateGame(game.getId(), game.getDateStarted(), game.getDateSaved(), game.getName());
} else {
// else create a new row in the table
int newGameId = getMaxGameId() + 1;
ContentValues contentValues = new ContentValues();
contentValues.put(COLUMN_DATE_STARTED, game.getDateStarted());
contentValues.put(COLUMN_DATE_SAVED, dateSaved);
contentValues.put(COLUMN_NAME, game.getName());
contentValues.put(COLUMN_ID, newGameId);
contentValues.put(COLUMN_AUTOSAVED, 1); // legacy "autosaved" column
// that must be specified
db.insert(TABLE_GAMES, null, contentValues);
game.setId(newGameId);
log.d("new game id is %s", newGameId);
}
savePlayerScores(game.getId(), game.getPlayerScores());
}
private int getMaxPlayerScoreId() {
Cursor cursor = null;
try {
cursor = db.query(TABLE_PLAYER_SCORES, new String[] { "max(" + COLUMN_ID + ")" }, null, null, null, null,
null);
if (cursor.moveToNext()) {
return cursor.getInt(0);
}
} finally {
if (cursor != null) {
cursor.close();
}
}
return 0;
}
private int getMaxGameId() {
Cursor cursor = null;
try {
cursor = db.query(TABLE_GAMES, new String[] { "max(" + COLUMN_ID + ")" }, null, null, null, null, null);
if (cursor.moveToNext()) {
return cursor.getInt(0);
}
} finally {
if (cursor != null) {
cursor.close();
}
}
return 0;
}
private void savePlayerScores(int gameId, List<PlayerScore> playerScores) {
synchronized (GameDBHelper.class) {
int newId = -1;
for (PlayerScore playerScore : playerScores) {
Pair<String, String> historyAsStrings = Delta.toJoinedStrings(playerScore.getHistory());
if (playerScore.getId() != -1) {
// already exists; update
updatePlayerScore(playerScore.getId(), playerScore.getName(), playerScore.getScore(),
playerScore.getPlayerNumber(), historyAsStrings.getFirst(),
historyAsStrings.getSecond(), playerScore.getLastUpdate(),
PlayerColor.serialize(playerScore.getPlayerColor()));
} else {
// else insert new rows in the table
if (newId == -1) {
newId = getMaxPlayerScoreId() + 1;
} else {
newId++;
}
ContentValues values = new ContentValues();
values.put(COLUMN_ID, newId);
values.put(COLUMN_GAME_ID, gameId);
values.put(COLUMN_HISTORY, historyAsStrings.getFirst());
values.put(COLUMN_HISTORY_TIMESTAMPS, historyAsStrings.getSecond());
values.put(COLUMN_NAME, playerScore.getName());
values.put(COLUMN_PLAYER_NUMBER, playerScore.getPlayerNumber());
values.put(COLUMN_SCORE, playerScore.getScore());
values.put(COLUMN_COLOR, PlayerColor.serialize(playerScore.getPlayerColor()));
values.put(COLUMN_LAST_UPDATE, playerScore.getLastUpdate());
db.insert(TABLE_PLAYER_SCORES, null, values);
// set the new id on the PlayerScore
playerScore.setId(newId);
log.d("new playerScore id is %s", newId);
}
}
}
}
public List<Game> findAllGames() {
synchronized (GameDBHelper.class) {
String orderBy = COLUMN_DATE_SAVED;
Cursor cursor = null;
try {
cursor = db.query(JOINED_TABLES, JOINED_COLUMNS, null, null, null, null, orderBy);
return convertToGames(cursor);
} finally {
if (cursor != null) {
cursor.close();
}
}
}
}
public List<GameSummary> findAllGameSummaries() {
synchronized (GameDBHelper.class) {
String[] columns = {
"g." + COLUMN_ID,
"g." + COLUMN_NAME,
"g." + COLUMN_DATE_SAVED,
// player names; the "separator" is a trick to ensure that we can cleanly separate the response,
// and put it into the proper order, since group_concat is always unordered in sqlite
"group_concat((ps.name || '" + GROUP_CONCAT_INNER_SEPARATOR +"' || ps.playerNumber), '" + GROUP_CONCAT_SEPARATOR + "')",
"max(length(ps.history) - length(replace(ps.history, ',', '')) + 1)" // num rounds
};
String table = TABLE_GAMES + " g join " + TABLE_PLAYER_SCORES + " ps " +
" on g." + COLUMN_ID + " = ps." + COLUMN_GAME_ID;
String groupBy = "g." + COLUMN_ID;
Cursor cursor = null;
try {
cursor = db.query(table, columns, null, null, groupBy, null, null);
List<GameSummary> result = new ArrayList<GameSummary>();
// re-use sparse array for performance
SparseArray<String> playerNumbersToNames = new SparseArray<String>();
while (cursor.moveToNext()) {
GameSummary gameSummary = new GameSummary();
gameSummary.setId(cursor.getInt(0));
gameSummary.setName(cursor.getString(1));
gameSummary.setDateSaved(cursor.getLong(2));
String playerNumbersAndNames = cursor.getString(3);
// sort by player number, get player names in order (no way to do this in sqlite, unfortunately)
playerNumbersToNames.clear();
for (String playerNumberAndName : StringUtil.split(playerNumbersAndNames, GROUP_CONCAT_SEPARATOR)) {
int idx = playerNumberAndName.indexOf(GROUP_CONCAT_INNER_SEPARATOR);
String playerName = playerNumberAndName.substring(0, idx);
int playerNumber = Integer.parseInt(playerNumberAndName.substring(
idx + GROUP_CONCAT_INNER_SEPARATOR.length()));
playerNumbersToNames.put(playerNumber, playerName);
}
List<String> playerNames = new ArrayList<String>(playerNumbersToNames.size());
for (int i = 0, len = playerNumbersToNames.size(); i < len; i++) {
int playerNumber = playerNumbersToNames.keyAt(i);
playerNames.add(playerNumbersToNames.get(playerNumber));
}
gameSummary.setPlayerNames(playerNames);
gameSummary.setNumRounds(cursor.getInt(4));
result.add(gameSummary);
}
return result;
} finally {
if (cursor != null) {
cursor.close();
}
}
}
}
public void deleteGame(Game game) {
synchronized (GameDBHelper.class) {
try {
db.beginTransaction();
int id = game.getId();
db.delete(TABLE_GAMES, COLUMN_ID + "=" + id, null);
db.delete(TABLE_PLAYER_SCORES, COLUMN_GAME_ID + "=" + id, null);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
}
}
public void updateGameName(int gameId, String newName) {
synchronized (GameDBHelper.class) {
ContentValues values = new ContentValues();
values.put(COLUMN_NAME, newName);
db.update(TABLE_GAMES, values, COLUMN_ID + "=" + gameId, null);
}
}
public List<String> findDistinctPlayerNames() {
synchronized (GameDBHelper.class) {
List<String> result = new ArrayList<String>();
String[] columns = { "distinct " + COLUMN_NAME };
Cursor cursor = null;
try {
cursor = db.query(TABLE_PLAYER_SCORES, columns, null, null, null, null, null);
while (cursor.moveToNext()) {
result.add(cursor.getString(0));
}
} finally {
if (cursor != null) {
cursor.close();
}
}
return result;
}
}
public void deleteGames(Collection<Integer> gameIds) {
synchronized (GameDBHelper.class) {
try {
db.beginTransaction();
String where = " in ("
+ TextUtils.join(",", gameIds) + ")";
db.delete(TABLE_GAMES, COLUMN_ID + where, null);
db.delete(TABLE_PLAYER_SCORES, COLUMN_GAME_ID + where, null);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
}
}
private List<Game> convertToGames(Cursor cursor) {
List<Game> result = new ArrayList<Game>();
Game currentGame = null;
while (cursor.moveToNext()) {
int currentId = cursor.getInt(0);
if (currentGame == null || currentGame.getId() != currentId) { // new
// Game
currentGame = new Game();
currentGame.setId(currentId);
currentGame.setDateStarted(cursor.getLong(1));
currentGame.setDateSaved(cursor.getLong(2));
currentGame.setName(cursor.getString(3));
result.add(currentGame);
}
List<PlayerScore> playerScores = new ArrayList<PlayerScore>();
// build up all the PlayerScores
do {
if (cursor.getInt(0) != currentId) {
cursor.moveToPrevious(); // went too far
break;
}
PlayerScore playerScore = new PlayerScore();
playerScore.setId(cursor.getInt(4));
playerScore.setName(cursor.getString(5));
playerScore.setScore(cursor.getLong(6));
playerScore.setPlayerNumber(cursor.getInt(7));
playerScore.setHistory(Delta.fromJoinedStrings(
StringUtil.nullToEmpty(cursor.getString(8)),
StringUtil.nullToEmpty(cursor.getString(9))));
playerScore.setLastUpdate(cursor.getLong(10));
playerScore.setPlayerColor(PlayerColor.deserialize(cursor.getString(11)));
playerScores.add(playerScore);
} while (cursor.moveToNext());
Collections.sort(playerScores, PlayerScore.sortByPlayerNumber());
currentGame.setPlayerScores(playerScores);
}
return result;
}
/**
* convenience method for updating games, using prepared statements for
* performance boosts.
*
* @param id
* @param dateStarted
* @param dateSaved
* @param name
* @return
*/
private void updateGame(int id, long dateStarted, long dateSaved, String name) {
SQLiteStatement statement = updateGame.get();
statement.bindLong(1, dateStarted);
statement.bindLong(2, dateSaved);
bindStringOrNull(statement, 3, name);
statement.bindLong(4, id);
statement.execute();
}
private void updatePlayerScore(int id, String name, long score, int playerNumber, String history,
String historyTimestamps, long lastUpdate, String color) {
SQLiteStatement statement = updatePlayerScore.get();
bindStringOrNull(statement, 1, name);
statement.bindLong(2, score);
statement.bindLong(3, playerNumber);
bindStringOrNull(statement, 4, history);
bindStringOrNull(statement, 5, historyTimestamps);
statement.bindLong(6, lastUpdate);
statement.bindString(7, color);
statement.bindLong(8, id);
statement.execute();
}
private void bindStringOrNull(SQLiteStatement statement, int index, String str) {
if (str == null) {
statement.bindNull(index);
} else {
statement.bindString(index, str);
}
}
}
|
/**
* Created by secondwtq on 15-8-19.
*/
print('Test started.');
print('Test name: copy and attach');
print();
print('Create a CopyTestCopied object cpt.');
var cpt = new CopyTestCopied();
print('Deriving CopyTestDerived from CopyTest, add function newFunction()')
function CopyTestDerived () { }
CopyTestDerived.prototype = CopyTest.reproto(CopyTest.prototype);
CopyTestDerived.prototype.newFunction = function () {
print("I'm the new function!"); };
CopyTestDerived.prototype.constructor = CopyTestDerived;
print("Create an object e with prototype of CopyTestDerived.");
var e = CopyTest.reproto(CopyTestDerived.prototype);
print("Attach a new CopyTest object created with cpt to e.");
CopyTest.attachNew.call(e, cpt);
print("e: ", e, ", e.constructor: ", e.constructor,
", e.prototype: ", e.prototype, ", e.__proto__: ", e.__proto__,
", e instanceof CopyTest ", e instanceof CopyTest, ", e instance of CopyTestDerived ", e instanceof CopyTestDerived);
print('Calling e.newFunction()');
e.newFunction();
print();
print('Calling e.testCopy(cpt)');
e.testCopy(cpt);
print();
print('Create a CopyTest object cp with cpt');
print('ctor prototype: CopyTest(CopyTestCopied)');
var cp = new CopyTest(cpt);
print("cp: ", cp, ", cp.constructor: ", cp.constructor,
", cp.prototype: ", cp.prototype, ", cp.__proto__: ", cp.__proto__,
", cp instanceof CopyTest ", cp instanceof CopyTest);
print();
print('Calling cp.testCopy(cpt)');
cp.testCopy(cpt);
print();
print("cp: ", cp, ", cp.constructor: ", cp.constructor,
", cp.prototype: ", cp.prototype, ", cp.__proto__: ", cp.__proto__,
", cp instanceof CopyTest ", cp instanceof CopyTest, ", cp instanceof CopyTestDerived ", cp instanceof CopyTestDerived);
print("cp.newFunction = ", cp.newFunction, ", e.newFunction = ", e.newFunction);
print();
print("Test end.");
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RFnn(RPackage):
"""Cover-tree and kd-tree fast k-nearest neighbor search algorithms and
related applications including KNN classification, regression and
information measures are implemented."""
homepage = "https://cran.r-project.org/web/packages/FNN/index.html"
url = "https://cran.r-project.org/src/contrib/FNN_1.1.tar.gz"
list_url = "https://cran.rstudio.com/src/contrib/Archive/FNN"
version('1.1', '8ba8f5b8be271785593e13eae7b8c393')
version('1.0', 'e9a47dc69d1ba55165be0877b8443fe0')
version('0.6-4', '1c105df9763ceb7b13989cdbcb542fcc')
version('0.6-3', 'f0f0184e50f9f30a36ed5cff24d6cff2')
version('0.6-2', '20648ba934ea32b1b00dafb75e1a830c')
depends_on('r@3.4.0:3.4.9')
depends_on('r-mvtnorm', type=('build', 'run'))
depends_on('r-chemometrics', type=('build', 'run'))
|
package seedu.address.logic.commands;
import static java.util.Objects.requireNonNull;
import java.io.File;
import java.io.IOException;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.List;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.PDPageContentStream;
import org.apache.pdfbox.pdmodel.font.PDFont;
import org.apache.pdfbox.pdmodel.font.PDType1Font;
import seedu.address.commons.core.LogsCenter;
import seedu.address.commons.core.Messages;
import seedu.address.commons.core.index.Index;
import seedu.address.logic.CommandHistory;
import seedu.address.logic.commands.exceptions.CommandException;
import seedu.address.model.Model;
import seedu.address.model.event.Date;
import seedu.address.model.event.Event;
import seedu.address.model.event.EventContainsEventIdPredicate;
import seedu.address.model.event.EventId;
import seedu.address.model.record.Hour;
import seedu.address.model.record.Record;
import seedu.address.model.record.RecordContainsNonZeroHourPredicate;
import seedu.address.model.record.RecordContainsVolunteerIdPredicate;
import seedu.address.model.volunteer.Name;
import seedu.address.model.volunteer.Volunteer;
import seedu.address.model.volunteer.VolunteerId;
/**
* Exports a PDF document with data on a volunteer's involvement with the organisation.
*/
public class ExportCertCommand extends Command {
public static final String COMMAND_WORD = "exportcert";
public static final String MESSAGE_USAGE = COMMAND_WORD + ": Exports a PDF certificate for the volunteer at "
+ "the specified index in the displayed volunteer list.\n"
+ "Parameters: INDEX (must be a positive integer)\n"
+ "Example: " + COMMAND_WORD + " 1";
public static final String MESSAGE_ARGUMENTS = "Index: %1$d";
public static final String MESSAGE_EXPORT_CERT_SUCCESS = "Certificate exported for volunteer at INDEX %1$d to ";
public static final String MESSAGE_EXPORT_FAILED = "Certificate export failed, please try again";
public static final String PDF_SAVE_PATH = System.getProperty("user.dir") + File.separator + "Certs"
+ File.separator;
public static final String PDF_ALT_SAVE_PATH = System.getProperty("user.home") + File.separator + "Desktop"
+ File.separator;
public static final String MESSAGE_VOLUNTEER_NO_RECORD = "Selected volunteer has no valid event records. "
+ "Try adding some records or updating current records to set a positive non-zero hour value.";
private static final java.util.logging.Logger logger = LogsCenter.getLogger(ExportCertCommand.class);
private static String currentSavePath = PDF_SAVE_PATH;
private final Index index;
/**
* @param index of the volunteer in the filtered volunteer list whose certificate is to be generated and exported
*/
public ExportCertCommand(Index index) {
requireNonNull(index);
this.index = index;
// Create a folder in user's working directory to export certificates to, if possible
File exportDir = new File(currentSavePath);
if (!exportDir.exists()) {
try {
exportDir.mkdir();
logger.info("Creating a new folder 'Certs' in user's current working directory.");
} catch (SecurityException se) {
logger.warning("Couldn't create a relative export path next to jar file. "
+ "Defaulting to user's Desktop.");
currentSavePath = PDF_ALT_SAVE_PATH;
}
}
}
public static String getCurrentSavePath() {
return currentSavePath;
}
@Override
public CommandResult execute(Model model, CommandHistory history) throws CommandException {
requireNonNull(model);
List<Volunteer> lastShownList = model.getFilteredVolunteerList();
// Handle case where the index input exceeds or equals the size of the last displayed list
if (index.getZeroBased() >= lastShownList.size()) {
throw new CommandException(Messages.MESSAGE_INVALID_VOLUNTEER_DISPLAYED_INDEX);
}
// Get the Volunteer object whom the index corresponds to
Volunteer selectedVolunteer = lastShownList.get(index.getZeroBased());
// Return CommandException if volunteer has no records
if (!hasNonZeroEventRecords(model, selectedVolunteer)) {
logger.info("Volunteer has no records.");
throw new CommandException(MESSAGE_VOLUNTEER_NO_RECORD);
}
// Try creating and exporting the PDF for the selected volunteer
try {
createPdf(model, selectedVolunteer);
} catch (IOException ioe) {
throw new CommandException(MESSAGE_EXPORT_FAILED);
}
// Return a success result
return new CommandResult(String.format(MESSAGE_EXPORT_CERT_SUCCESS + currentSavePath, index.getOneBased()));
}
/**
* Checks if a {@code volunteer} has any event {@code records}.
* @param model from which the {@code volunteer}'s {@code records} will be retrieved, if present
* @param volunteer who's presence of event {@code records} is to be checked
* @return true if {@code volunteer} has {@code records}, and false otherwise
*/
private boolean hasNonZeroEventRecords(Model model, Volunteer volunteer) {
VolunteerId volunteerId = volunteer.getVolunteerId();
// Attempt to retrieve a list of the volunteer's records with non-zero hour value
List<Record> eventRecords = model.getFilteredRecordList()
.filtered(new RecordContainsVolunteerIdPredicate(volunteerId))
.filtered(new RecordContainsNonZeroHourPredicate());
return !eventRecords.isEmpty();
}
/**
* Creates and exports a PDF document containing a {@code volunteer's} data
* @param model from which the volunteer's event records will be accessed
* @param volunteer who's data is to be input into the PDF document
*/
private void createPdf(Model model, Volunteer volunteer) throws IOException {
// Retrieve the selected volunteer's attributes
VolunteerId volunteerId = volunteer.getVolunteerId();
Name volunteerName = volunteer.getName();
// Retrieve the volunteer's event records
List<Record> eventRecords = model.getFilteredRecordList()
.filtered(new RecordContainsVolunteerIdPredicate(volunteerId));
// Create the new document
PDDocument doc = new PDDocument();
// Create a new page and add it to the document
PDPage page = new PDPage();
doc.addPage(page);
// Setup a new content stream to write to a page
PDPageContentStream contStream = new PDPageContentStream(doc, page);
// Populate the PDF with necessary details
contStream.beginText();
contStream.setLeading(20f);
// Set title font
PDFont titleFont = PDType1Font.TIMES_BOLD_ITALIC;
float titleFontSize = 24;
contStream.setFont(titleFont, titleFontSize);
// Input title to the center of the page
String title = "Certificate of Recognition";
float titleWidth = titleFont.getStringWidth(title) * titleFontSize / 1000f;
contStream.newLineAtOffset(page.getMediaBox().getWidth() / 2 - titleWidth / 2, 740);
contStream.showText(title);
contStream.newLine();
contStream.newLine();
// Volunteer Name, ID and current date section
contStream.setFont(PDType1Font.TIMES_BOLD_ITALIC, 14);
contStream.newLineAtOffset(-(page.getMediaBox().getWidth() / 2 - titleWidth / 2) + 20, 0);
String volunteerNameLine = "Volunteer Name: " + volunteerName;
contStream.showText(volunteerNameLine);
contStream.newLine();
String volunteerIdLine = "Volunteer ID: " + volunteerId;
contStream.showText(volunteerIdLine);
contStream.newLine();
contStream.showText("Date: " + String.valueOf(LocalDate.now().format(DateTimeFormatter
.ofPattern("dd-MM-yyyy"))));
contStream.newLine();
contStream.newLine();
// Reduce the leading for main body of certificate
contStream.setLeading(17f);
// Standardised formality text
String formalityTextLine1 = "To whomever it may concern,";
contStream.showText(formalityTextLine1);
contStream.newLine();
contStream.newLine();
String formalityTextLine2 = "This is to certify " + volunteerName
+ "'s contributions to our organisation via the following event(s):";
contStream.showText(formalityTextLine2);
contStream.newLine();
// Event contribution information
contStream.newLine();
for (Record r: eventRecords) {
// Information from event record
Hour eventHours = r.getHour();
EventId eventId = r.getEventId();
// Get the exact corresponding event object and extract information from it
List<Event> filteredEventList = model.getFilteredEventList()
.filtered(new EventContainsEventIdPredicate(eventId));
assert(filteredEventList.size() == 1); // Make sure no duplicate events
Event event = filteredEventList.get(0);
seedu.address.model.event.Name eventName = event.getName();
Date startDate = event.getStartDate();
Date endDate = event.getEndDate();
String eventEntryLine = eventName + " - " + eventHours + " hour(s) from " + startDate + " to " + endDate;
contStream.showText("\u2022 "); // bullet
contStream.showText(eventEntryLine);
contStream.newLine();
}
contStream.newLine();
String appreciationLine = "We greatly appreciate " + volunteerName
+ "'s services rendered to our organisation.";
contStream.showText(appreciationLine);
contStream.newLine();
contStream.newLine();
String regardsLine = "Regards,";
contStream.showText(regardsLine);
contStream.newLine();
contStream.newLine();
contStream.newLine();
// Line for user to manually sign off on the certificate
contStream.showText("___________________");
// Close the content stream
contStream.endText();
contStream.close();
// Save document as <volunteerName>_<volunteerId>.pdf to the save path
doc.save(PDF_SAVE_PATH + volunteerName + "_" + volunteerId + ".pdf");
// Close the document
doc.close();
}
@Override
public boolean equals(Object other) {
// Case: Both same object
if (other == this) {
return true;
}
// Case: Handle null, not instance of
if (!(other instanceof ExportCertCommand)) {
return false;
}
// Compare internal fields
ExportCertCommand e = (ExportCertCommand) other;
return index.equals(e.index);
}
}
|
/*
* Copyright (C) 2017-2017 Alibaba Group Holding Limited
*/
package action
import (
"github.com/cppforlife/bosh-cpi-go/apiv1"
"bosh-alicloud-cpi/alicloud"
"bosh-alicloud-cpi/registry"
"github.com/denverdino/aliyungo/ecs"
"fmt"
)
type DetachDiskMethod struct {
CallContext
disks alicloud.DiskManager
registry registry.Client
}
func NewDetachDiskMethod(cc CallContext, disks alicloud.DiskManager, rc registry.Client) DetachDiskMethod {
return DetachDiskMethod{cc, disks, rc}
}
func (a DetachDiskMethod) DetachDisk(vmCID apiv1.VMCID, diskCID apiv1.DiskCID) error {
instCid := vmCID.AsString()
diskCid := diskCID.AsString()
err := a.disks.ChangeDiskStatus(diskCid, ecs.DiskStatusAvailable, func(disk *ecs.DiskItemType) (bool, error) {
if disk == nil {
return false, fmt.Errorf("missing disk %s", diskCid)
}
switch disk.Status {
case ecs.DiskStatusInUse:
return false, a.disks.DetachDisk(instCid, diskCid)
case ecs.DiskStatusAvailable:
return true, nil
case ecs.DiskStatusDetaching:
return false, nil
default:
return false, fmt.Errorf("unexpect disk %s status %s", diskCid, disk.Status)
}
})
if err != nil {
return a.WrapErrorf(err, "detach disk %s from %s failed", diskCid, instCid)
}
registryClient := a.registry
agentSettings, _ := registryClient.Fetch(instCid)
agentSettings.DetachPersistentDisk(diskCid)
err = registryClient.Update(instCid, agentSettings)
if err != nil {
return a.WrapErrorf(err, "DetachDisk update registry failed %s", diskCid)
}
return err
}
|
Run a statistical t-test on the given dataset to measure the differences in grades between male and female students, and generate a visualization of the differences.
|
<filename>0101-Symmetric-Tree/cpp_0101/Solution2.h<gh_stars>10-100
//
// Created by ooooo on 2019/12/6.
//
#ifndef CPP_0101_SOLUTION2_H
#define CPP_0101_SOLUTION2_H
#include "TreeNode.h"
#include <queue>
class Solution {
public:
bool isSymmetric(TreeNode *root) {
queue<TreeNode *> q;
q.push(root);
q.push(root);
while (!q.empty()) {
TreeNode *node1 = q.front();
q.pop();
TreeNode *node2 = q.front();
q.pop();
if (!node1 && !node2) continue;
if (!node1 || !node2) return false;
if (node1->val != node2->val) return false;
q.push(node1->left);
q.push(node2->right);
q.push(node1->right);
q.push(node2->left);
}
return true;
}
};
#endif //CPP_0101_SOLUTION2_H
|
<gh_stars>0
package lib
import (
"bytes"
"fmt"
"strings"
"testing"
"github.com/bxcodec/faker/v3"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestPatchNuget_WithoutPreviousPackageSourceCredentials(t *testing.T) {
source := faker.Word()
username := faker.Username()
password := faker.Password()
xmlString := fmt.Sprintf(`<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSources>
<add key="%s"/>
</packageSources>
</configuration>
`, source)
reader := strings.NewReader(xmlString)
buffer := new(bytes.Buffer)
err := PatchNuget(reader, buffer, source, username, password)
require.NoError(t, err, "Could not patch nuget")
expectedString := fmt.Sprintf(`<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSources>
<add key="%[1]s"/>
</packageSources>
<packageSourceCredentials>
<%[1]s>
<add key="Username" value="%[2]s"/>
<add key="ClearTextPassword" value="%[3]s"/>
</%[1]s>
</packageSourceCredentials>
</configuration>
`, source, username, password)
assert.EqualValues(t, expectedString, buffer.String(), "XML must be equal")
}
func TestPatchNuget_WithPreviousPackageSourceCredentials(t *testing.T) {
source1 := faker.Word() + "_1"
username1 := faker.Username() + "_1"
password1 := faker.Password() + "_1"
source2 := faker.Word() + "_2"
wrongUsername2 := faker.Username() + "_2wrong"
wrongPassword2 := faker.Password() + "_2wrong"
username2 := faker.Username() + "_2"
password2 := faker.Password() + "_2"
xmlString := fmt.Sprintf(`<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSources>
<add key="%[1]s"/>
<add key="%[4]s"/>
</packageSources>
<packageSourceCredentials>
<%[1]s>
<add key="Username" value="%[2]s"/>
<add key="ClearTextPassword" value="%[3]s"/>
</%[1]s>
<%[4]s>
<add key="Username" value="%[5]s"/>
<add key="ClearTextPassword" value="%[6]s"/>
</%[4]s>
</packageSourceCredentials>
</configuration>
`,
source1, username1, password1,
source2, wrongUsername2, wrongPassword2)
reader := strings.NewReader(xmlString)
buffer := new(bytes.Buffer)
err := PatchNuget(reader, buffer, source2, username2, password2)
require.NoError(t, err, "Could not patch nuget")
expectedString := fmt.Sprintf(`<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSources>
<add key="%[1]s"/>
<add key="%[4]s"/>
</packageSources>
<packageSourceCredentials>
<%[1]s>
<add key="Username" value="%[2]s"/>
<add key="ClearTextPassword" value="%[3]s"/>
</%[1]s>
<%[4]s>
<add key="Username" value="%[5]s"/>
<add key="ClearTextPassword" value="%[6]s"/>
</%[4]s>
</packageSourceCredentials>
</configuration>
`,
source1, username1, password1,
source2, username2, password2)
assert.EqualValues(t, expectedString, buffer.String(), "XML must be equal")
}
|
import express from 'express';
import nomad from './nomad-client';
import consul from './consul-client';
let app = express();
app.get('/', (req, res, next) => index(req, res).catch(next));
app.get('/start', (req, res, next) => start(req, res).catch(next));
app.listen(4111, () => console.log('listening on 4111'));
/**
* [index description]
* @param {[type]} req [description]
* @param {[type]} res [description]
* @return {[type]} [description]
*/
async function index(req, res) {
res.send('manager');
}
/**
* [locate description]
* @param {[type]} req [description]
* @param {[type]} res [description]
* @return {[type]} [description]
*/
async function start(req, res) {
let service = req.query.service;
if(!service) {
req.status(401).send('service required');
return;
}
let definition = await nomad.createDefinition(service);
await nomad.createJob(service, definition);
await nomad.waitForJob(service);
let result = await consul.waitForService(service);
console.log('service %j', result);
res.json(result);
};
|
#!/bin/bash
set -e
UBUNTU=false
DEBIAN=false
if [ "$(uname)" = "Linux" ]; then
#LINUX=1
if type apt-get; then
OS_ID=$(lsb_release -is)
if [ "$OS_ID" = "Debian" ]; then
DEBIAN=true
else
UBUNTU=true
fi
fi
fi
# Check for non 64 bit ARM64/Raspberry Pi installs
if [ "$(uname -m)" = "armv7l" ]; then
echo ""
echo "WARNING:"
echo "The Chaingreen Blockchain requires a 64 bit OS and this is 32 bit armv7l"
echo "For more information, see"
echo "https://github.com/Chia-Network/chia-blockchain/wiki/Raspberry-Pi"
echo "Exiting."
exit 1
fi
# Get submodules
git submodule update --init mozilla-ca
UBUNTU_PRE_2004=false
if $UBUNTU; then
LSB_RELEASE=$(lsb_release -rs)
# In case Ubuntu minimal does not come with bc
if [ "$(which bc |wc -l)" -eq 0 ]; then sudo apt install bc -y; fi
# Mint 20.04 repsonds with 20 here so 20 instead of 20.04
UBUNTU_PRE_2004=$(echo "$LSB_RELEASE<20" | bc)
UBUNTU_2100=$(echo "$LSB_RELEASE>=21" | bc)
fi
# Manage npm and other install requirements on an OS specific basis
if [ "$(uname)" = "Linux" ]; then
#LINUX=1
if [ "$UBUNTU" = "true" ] && [ "$UBUNTU_PRE_2004" = "1" ]; then
# Ubuntu
echo "Installing on Ubuntu pre 20.04 LTS."
sudo apt-get update
sudo apt-get install -y python3.7-venv python3.7-distutils
elif [ "$UBUNTU" = "true" ] && [ "$UBUNTU_PRE_2004" = "0" ] && [ "$UBUNTU_2100" = "0" ]; then
echo "Installing on Ubuntu 20.04 LTS."
sudo apt-get update
sudo apt-get install -y python3.8-venv python3-distutils
elif [ "$UBUNTU" = "true" ] && [ "$UBUNTU_2100" = "1" ]; then
echo "Installing on Ubuntu 21.04 or newer."
sudo apt-get update
sudo apt-get install -y python3.9-venv python3-distutils
elif [ "$DEBIAN" = "true" ]; then
echo "Installing on Debian."
sudo apt-get update
sudo apt-get install -y python3-venv
elif type pacman && [ -f "/etc/arch-release" ]; then
# Arch Linux
echo "Installing on Arch Linux."
sudo pacman -S --needed python git
elif type yum && [ ! -f "/etc/redhat-release" ] && [ ! -f "/etc/centos-release" ] && [ ! -f "/etc/fedora-release" ]; then
# AMZN 2
echo "Installing on Amazon Linux 2."
sudo yum install -y python3 git
elif type yum && [ -f "/etc/redhat-release" ] || [ -f "/etc/centos-release" ] || [ -f "/etc/fedora-release" ]; then
# CentOS or Redhat or Fedora
echo "Installing on CentOS/Redhat/Fedora."
fi
elif [ "$(uname)" = "Darwin" ] && ! type brew >/dev/null 2>&1; then
echo "Installation currently requires brew on MacOS - https://brew.sh/"
elif [ "$(uname)" = "OpenBSD" ]; then
export MAKE=${MAKE:-gmake}
export BUILD_VDF_CLIENT=${BUILD_VDF_CLIENT:-N}
elif [ "$(uname)" = "FreeBSD" ]; then
export MAKE=${MAKE:-gmake}
export BUILD_VDF_CLIENT=${BUILD_VDF_CLIENT:-N}
fi
find_python() {
set +e
unset BEST_VERSION
for V in 39 3.9 38 3.8 37 3.7 3; do
if which python$V >/dev/null; then
if [ "$BEST_VERSION" = "" ]; then
BEST_VERSION=$V
fi
fi
done
echo $BEST_VERSION
set -e
}
if [ "$INSTALL_PYTHON_VERSION" = "" ]; then
INSTALL_PYTHON_VERSION=$(find_python)
fi
# This fancy syntax sets INSTALL_PYTHON_PATH to "python3.7", unless
# INSTALL_PYTHON_VERSION is defined.
# If INSTALL_PYTHON_VERSION equals 3.8, then INSTALL_PYTHON_PATH becomes python3.8
INSTALL_PYTHON_PATH=python${INSTALL_PYTHON_VERSION:-3.7}
echo "Python version is $INSTALL_PYTHON_VERSION"
$INSTALL_PYTHON_PATH -m venv venv
if [ ! -f "activate" ]; then
ln -s venv/bin/activate .
fi
# shellcheck disable=SC1091
. ./activate
# pip 20.x+ supports Linux binary wheels
python -m pip install --upgrade pip
python -m pip install wheel
#if [ "$INSTALL_PYTHON_VERSION" = "3.8" ]; then
# This remains in case there is a diversion of binary wheels
python -m pip install --extra-index-url https://pypi.chia.net/simple/ miniupnpc==2.2.2
python -m pip install -e . --extra-index-url https://pypi.chia.net/simple/
echo ""
echo "Chaingreen blockchain install.sh complete."
echo ""
echo "Try the Quick Start Guide to running chia-blockchain:"
echo "https://github.com/Chia-Network/chia-blockchain/wiki/Quick-Start-Guide"
echo ""
echo "To install the GUI type 'sh install-gui.sh' after '. ./activate'."
echo ""
echo "Type '. ./activate' and then 'chaingreen init' to begin."
|
def partition(arr, low, high):
i = (low - 1)
pivot = arr[high]
for j in range(low, high):
if arr[j] <= pivot:
i += 1
arr[i], arr[j] = arr[j], arr[i]
arr[i+1], arr[high] = arr[high], arr[i+1]
return (i+1)
def quickselect(arr, low, high, pivotIndex):
if low < high:
pi = partition(arr, low, high)
if pi == pivotIndex:
return arr[pi]
if pivotIndex < pi:
return quickselect(arr, low, pi-1, pivotIndex)
return quickselect(arr, pi + 1, high, pivotIndex)
return arr[pivotIndex]
arr = [3, 9, 2, 4, 5, 7, 8, 1, 6]
n = len(arr)
print("Median = ", quickselect(arr, 0, n-1, n//2))
# Output: Median = 5
|
import { Project } from "@atomist/rug/model/Project";
import { Given, ProjectScenarioWorld, Then, When } from "@atomist/rug/test/project/Core";
When("the MyFirstEditor is run", (p, world) => {
const w = world as ProjectScenarioWorld;
const editor = w.editor("MyFirstEditor");
w.editWith(editor, { inputParameter: "the inputParameter value" });
});
Then("the hello file says hello", (p, world) => {
return p.fileContains("hello.txt", "Hello, World!");
});
|
'use strict';
const {loadCSV} = require('../../csv');
const {compileString} = require('../../ejs.utils');
const fs = require('fs');
const path = require('path');
const tmpbuf = fs.readFileSync(path.join(__dirname, 'pnl.ejs'));
const template = compileString(tmpbuf.toString());
/**
* csv2pnl - csv to pnl
* @param {string} csvfn - filename for csv
* @param {string} id - id
* @param {string} title - title
* @param {string} subtext - subtext
* @param {string} assetstitle - title of assets
* @param {number} valoff - value offset
* @param {string} markstate - 'none' | 'state' | 'value'
* @return {string} str - YAML string
*/
function csv2pnl(csvfn, id, title, subtext, assetstitle, valoff, markstate) {
const lstmoney = [];
const mark = [];
const lstts = loadCSV(csvfn, (arrHead, arrData) => {
const val = parseFloat(arrData[2]);
if (markstate == 'state' || markstate == 'value') {
const buy = parseFloat(arrData[3]);
const sell = parseFloat(arrData[4]);
if (buy > 0 && sell > 0) {
if (buy > sell) {
buy -= sell;
mark.push({
name: 'buy',
value: markstate == 'value' ? buy : '1',
xAxis: lstmoney.length,
yAxis: val,
});
} else if (buy < sell) {
sell -= buy;
mark.push({
name: 'sell',
value: markstate == 'value' ? -sell : '-1',
xAxis: lstmoney.length,
yAxis: val,
});
}
} else if (buy > 0) {
mark.push({
name: 'buy',
value: markstate == 'value' ? buy : '1',
xAxis: lstmoney.length,
yAxis: val,
});
} else if (sell > 0) {
mark.push({
name: 'sell',
value: markstate == 'value' ? -sell : '-1',
xAxis: lstmoney.length,
yAxis: val,
});
}
}
lstmoney.push(val);
return parseInt(arrData[0]);
});
const str = template({
id: id,
title: title,
subtext: subtext,
date: lstts,
valoff: valoff,
assets: {
v: {
title: assetstitle,
val: lstmoney,
mark: JSON.stringify(mark),
},
},
});
return str;
}
/**
* csv2pnlex - some csv files to pnl
* @param {object} csvobj - map[name] = {title, csvfn, markstate}
* @param {string} id - id
* @param {string} title - title
* @param {string} subtext - subtext
* @param {number} valoff - value offset
* @return {string} str - YAML string
*/
function csv2pnlex(csvobj, id, title, subtext, valoff) {
const obj = {
id: id,
title: title,
subtext: subtext,
valoff: valoff,
assets: {},
};
for (const key in csvobj) {
if (Object.prototype.hasOwnProperty.call(csvobj, key)) {
const lstmoney = [];
const mark = [];
const markstate = csvobj[key].markstate;
obj.date = loadCSV(csvobj[key].csvfn, (arrHead, arrData) => {
const val = parseFloat(arrData[2]);
if (markstate == 'state' || markstate == 'value') {
const buy = parseFloat(arrData[3]);
const sell = parseFloat(arrData[4]);
if (buy > 0 && sell > 0) {
if (buy > sell) {
buy -= sell;
mark.push({
name: 'buy',
value: markstate == 'value' ? buy : '1',
xAxis: lstmoney.length,
yAxis: val,
});
} else if (buy < sell) {
sell -= buy;
mark.push({
name: 'sell',
value: markstate == 'value' ? -sell : '-1',
xAxis: lstmoney.length,
yAxis: val,
});
}
} else if (buy > 0) {
mark.push({
name: 'buy',
value: markstate == 'value' ? buy : '1',
xAxis: lstmoney.length,
yAxis: val,
});
} else if (sell > 0) {
mark.push({
name: 'sell',
value: markstate == 'value' ? -sell : '-1',
xAxis: lstmoney.length,
yAxis: val,
});
}
}
lstmoney.push(val);
return parseInt(arrData[0]);
});
obj.assets[key] = {
title: csvobj[key].title,
val: lstmoney,
mark: JSON.stringify(mark),
};
}
}
const str = template(obj);
return str;
}
exports.csv2pnl = csv2pnl;
exports.csv2pnlex = csv2pnlex;
|
<reponame>fsancheztemprano/chess-lite<gh_stars>0
import { ChangeDetectionStrategy, Component, OnDestroy } from '@angular/core';
import { CoreService } from '../../../../../../core/services/core.service';
import { TiledMenuTileData } from '../../../../../../shared/modules/tiled-menu/components/tiled-menu-tile/tiled-menu-tile.component';
import { UserSettingsService } from '../../../../services/user-settings.service';
@Component({
selector: 'app-user-settings-home',
templateUrl: './user-settings-home.component.html',
styleUrls: ['./user-settings-home.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class UserSettingsHomeComponent implements OnDestroy {
tiles: TiledMenuTileData[] = [
{
icon: 'account_box',
title: 'User Profile',
subtitle: 'Edit your profile',
link: 'profile',
canShow: this.userSettingsService.isAllowedToUpdateProfile(),
},
{
icon: 'account_circle',
title: 'Upload Avatar',
subtitle: 'Change your profile picture',
link: 'avatar',
canShow: this.userSettingsService.isAllowedToUploadAvatar(),
},
{
icon: 'password',
title: 'Change Password',
subtitle: 'Secure your account',
link: 'password',
canShow: this.userSettingsService.isAllowedToChangePassword(),
},
{
icon: 'delete_forever',
title: 'Delete Account',
subtitle: 'Remove your account',
link: 'delete',
canShow: this.userSettingsService.isAllowedToDeleteAccount(),
},
{
icon: 'manage_accounts',
title: 'Account Preferences',
subtitle: 'Modify your application preferences',
link: 'preferences',
canShow: this.userSettingsService.hasLinkToUserPreferences(),
},
];
constructor(private readonly coreService: CoreService, private readonly userSettingsService: UserSettingsService) {
this.coreService.setCoreStyle('raw');
}
ngOnDestroy(): void {
this.coreService.reset();
}
}
|
<gh_stars>1-10
exports.up = function (knex) {
return knex.schema.createTable('local_addresses', function (table) {
table.string('local_address').notNullable()
table.string('user_id').notNullable()
table.foreign('user_id').references('id').inTable('users')
})
}
exports.down = function (knex) {
return knex.schema.dropTable('local_addresses')
}
|
import pandas as pd
import numpy as np
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.model_selection import train_test_split
from sklearn.naive_bayes import MultinomialNB
# Using a dataset of movie reviews
df = pd.read_csv('movie_reviews.csv')
# Extracting features from reviews
vectorizer = CountVectorizer()
features = vectorizer.fit_transform(df['review'])
labels = df['sentiment']
# Splitting data into training and test sets
x_train, x_test, y_train, y_test = train_test_split(features, labels, test_size=.1, random_state=1)
# Training and testing the naive bayes model
model = MultinomialNB()
model.fit(x_train, y_train)
# Evaluating the algorithm's performance
predictions = model.predict(x_test)
from sklearn.metrics import accuracy_score
print (accuracy_score(y_test, predictions))
|
class Status
{
private $description;
/**
* Sets the description of the task.
*
* @param string $description The description of the task.
*
* @return Status The updated status object.
*/
public function setDescription($description)
{
$this->description = $description;
return $this;
}
/**
* Retrieves the description of the task.
*
* @return string The description of the task.
*/
public function getDescription()
{
return $this->description;
}
}
// Sample code to demonstrate the usage of the Status class
$status = new Status();
$status->setDescription("Complete the programming problem");
$taskDescription = $status->getDescription();
echo $taskDescription; // Output: Complete the programming problem
|
/*
Given two or more arrays, write a function that combines
their elements into one array without any repetition.
E.g mergeArrays([1,2,3,3,3], [1,4,5,2]) // should return [1,2,3,4,5]
*/
function chunkArray(array, size) {
// Code goes here
}
module.exports = chunkArray
|
package org.egovframe.rte.psl.dataaccess.mybatis;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.egovframe.rte.psl.dataaccess.TestBase;
import org.egovframe.rte.psl.dataaccess.dao.MapTypeMapper;
import javax.annotation.Resource;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.core.io.ClassPathResource;
import org.springframework.jdbc.BadSqlGrammarException;
import org.springframework.jdbc.UncategorizedSQLException;
import org.springframework.jdbc.datasource.init.ScriptUtils;
import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.annotation.Transactional;
/**
* == 개정이력(Modification Information) ==
*
* 수정일 수정자 수정내용
* ------- -------- ---------------------------
* 2014.01.22 권윤정 SimpleJdbcTestUtils -> JdbcTestUtils 변경
* 2014.01.22 권윤정 SimpleJdbcTemplate -> JdbcTemplate 변경
*
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "classpath*:META-INF/spring/context-*.xml" })
@Transactional
public class RemapResultsTest extends TestBase {
@Resource(name = "mapTypeMapper")
MapTypeMapper mapTypeMapper;
@Before
public void onSetUp() throws Exception {
ScriptUtils.executeSqlScript(dataSource.getConnection(), new ClassPathResource("META-INF/testdata/sample_schema_ddl_" + usingDBMS + ".sql"));
// init data
ScriptUtils.executeSqlScript(dataSource.getConnection(), new ClassPathResource("META-INF/testdata/sample_schema_initdata_" + usingDBMS + ".sql"));
}
@Rollback(false)
@Test
// @ExpectedException(BadSqlGrammarException.class)
// tibero 인 경우는 UncategorizedSQLException 이 되돌려지므로
// 메서드 내부에서 try ~ catch 로 처리토록 변경
public void testReplaceTextAllQueryExpectedException() throws Exception {
try {
// selectQuery
Map<String, Object> map = new HashMap<String, Object>();
StringBuilder selectQuery = new StringBuilder();
selectQuery.append("select * from DEPT");
map.put("selectQuery", selectQuery.toString());
// select
List<Map<String, Object>> resultList = mapTypeMapper.selectList("org.egovframe.rte.psl.dataaccess.EmpMapper.selectUsingReplacedAllQuery", map);
assertNotNull(resultList);
assertEquals(4, resultList.size());
assertTrue(resultList.get(0).containsKey("deptNo"));
map.clear();
selectQuery = new StringBuilder();
selectQuery.append("select * from DEPT ");
selectQuery.append("where DEPT_NAME like '%ES%' ");
selectQuery.append("order by DEPT_NO DESC ");
map.put("selectQuery", selectQuery.toString());
// select
// 위에서 동일한 resultset metadata 가 사용되는
// 경우는(table과 select 절이 같은 경우)
// replaced text 처리의 쿼리 사용에 문제가 없음.
// cf.) resultset metadata 는 caching 되고 있음에
// 유의!
resultList = mapTypeMapper.selectList("org.egovframe.rte.psl.dataaccess.EmpMapper.selectUsingReplacedAllQuery", map);
assertNotNull(resultList);
// 20,'RESEARCH','DALLAS' -- R'ES'EARCH
// 30,'SALES','CHICAGO' -- SAL'ES'
assertEquals(2, resultList.size());
assertTrue(resultList.get(0).containsKey("deptNo"));
map.clear();
selectQuery = new StringBuilder();
selectQuery.append("select * from EMP ");
map.put("selectQuery", selectQuery.toString());
// select
// 위에서 resultset metadata 가 달라지는 경우
// replaced text 처리의 쿼리 사용시 최초에 caching 된
// resultset metadata 에 현재 조회하는 정보가 없으므로 에러
// 발생함!
resultList = mapTypeMapper.selectList("org.egovframe.rte.psl.dataaccess.EmpMapper.selectUsingReplacedAllQuery", map);
//fail("이 라인이 수행될 수 없습니다.");
} catch (BadSqlGrammarException be) {
assertNotNull(be);
} catch (UncategorizedSQLException ue) {
// tibero 인 경우 Spring 에서
// UncategorizedSQLException <--
// NestedSQLException <-- TbSQLException 으로
// 처리됨
assertNotNull(ue);
// assertTrue(ue.getCause().getCause() instanceof TbSQLException);
} catch (Exception e) {
e.printStackTrace();
fail("기대한 exception 이 아닙니다.");
}
}
@Rollback(false)
@Test
public void testReplaceTextRemapResultsAllQuery() throws Exception {
// selectQuery
Map<String, Object> map = new HashMap<String, Object>();
StringBuilder selectQuery = new StringBuilder();
selectQuery.append("select * from DEPT");
map.put("selectQuery", selectQuery.toString());
// select
List<Map<String, Object>> resultList = mapTypeMapper.selectList("org.egovframe.rte.psl.dataaccess.EmpMapper.selectUsingReplacedAllQueryUsingRemapResults", map);
assertNotNull(resultList);
assertEquals(4, resultList.size());
assertTrue(resultList.get(0).containsKey("deptNo"));
map.clear();
selectQuery = new StringBuilder();
selectQuery.append("select * from EMP ");
map.put("selectQuery", selectQuery.toString());
// select
// 위에서 resultset metadata 가 달라지는 경우라도
// remapResults="true" 로 설정하여
// resultset metadata 를 caching 하지 않으므로 에러 발생
// 않음
resultList = mapTypeMapper.selectList("org.egovframe.rte.psl.dataaccess.EmpMapper.selectUsingReplacedAllQueryUsingRemapResults", map);
assertNotNull(resultList);
assertEquals(14, resultList.size());
assertTrue(resultList.get(0).containsKey("empNo"));
}
}
|
<gh_stars>1-10
/* ISC license. */
#include <sys/uio.h>
#include <stdint.h>
#include <string.h>
#include <errno.h>
#include <stdlib.h>
#include <limits.h>
#include <skalibs/posixishard.h>
#include <skalibs/uint32.h>
#include <skalibs/stralloc.h>
#include <skalibs/djbunix.h>
#include <skalibs/socket.h>
#include <skalibs/textmessage.h>
#include <skabus/rpc.h>
#include "skabus-rpccctl.h"
int skabus_rpcc_start (skabus_rpcc_t *a, char const *path, tain_t const *deadline, tain_t *stamp)
{
int fd = ipc_stream_nb() ;
if (fd < 0) return 0 ;
if (!ipc_timed_connect(fd, path, deadline, stamp))
{
fd_close(fd) ;
return 0 ;
}
textmessage_sender_init(&a->out, fd) ;
textmessage_receiver_init(&a->in, fd) ;
return 1 ;
}
void skabus_rpcc_end (skabus_rpcc_t *a)
{
fd_close(textmessage_sender_fd(&a->out)) ;
textmessage_sender_free(&a->out) ;
textmessage_receiver_free(&a->in) ;
}
int skabus_rpcc_interface_register (skabus_rpcc_t *a, char const *ifname, char const *ifprog, char const *re, tain_t const *deadline, tain_t *stamp)
{
size_t ifnamelen, ifproglen, relen ;
char *ifprogfn = realpath(ifprog, 0) ;
if (!ifprogfn) return 0 ;
ifnamelen = strlen(ifname) ;
ifproglen = strlen(ifprogfn) ;
relen = strlen(re) ;
if (ifnamelen > SKABUS_RPC_INTERFACE_MAXLEN || ifproglen > PATH_MAX || relen > SKABUS_RPC_RE_MAXLEN) goto terr ;
{
char buf[9] ;
struct iovec v[5] =
{
{ .iov_base = "I", .iov_len = 1 },
{ .iov_base = buf, .iov_len = 9 },
{ .iov_base = ifname, .iov_len = ifnamelen + 1 },
{ .iov_base = ifprogfn, .iov_len = ifproglen + 1 },
{ .iov_base = re, .iov_len = relen + 1 }
} ;
buf[0] = (unsigned char)ifnamelen ;
uint32_pack_big(buf + 1, ifproglen) ;
uint32_pack_big(buf + 5, relen) ;
if (!textmessage_timed_commandv(&a->out, v, 5, deadline, stamp)) goto err ;
}
free(ifprogfn) ;
return 1 ;
terr:
errno = ENAMETOOLONG ;
err:
free(ifprogfn) ;
return 0 ;
}
int skabus_rpcc_interface_unregister (skabus_rpcc_t *a, char const *ifname, tain_t const *deadline, tain_t *stamp)
{
size_t ifnamelen = strlen(ifname) ;
if (ifnamelen > SKABUS_RPC_INTERFACE_MAXLEN) return (errno = ENAMETOOLONG, 0) ;
{
unsigned char c = ifnamelen ;
struct iovec v[3] =
{
{ .iov_base = "i", .iov_len = 1 },
{ .iov_base = &c, .iov_len = 1 },
{ .iov_base = ifname, .iov_len = ifnamelen + 1 }
} ;
if (!textmessage_timed_commandv(&a->out, v, 3, deadline, stamp)) return 0 ;
}
return 1 ;
}
int skabus_rpcc_query (skabus_rpcc_t *a, stralloc *reply, char const *ifname, char const *query, uint32_t timeout, tain_t const *deadline, tain_t *stamp)
{
size_t ifnamelen = strlen(ifname) ;
size_t querylen = strlen(query) ;
if (ifnamelen > SKABUS_RPC_INTERFACE_MAXLEN || querylen > UINT32_MAX) return (errno = ENAMETOOLONG, 0) ;
{
char buf[9] ;
struct iovec v[4] =
{
{ .iov_base = "Q", .iov_len = 1 },
{ .iov_base = buf, .iov_len = 59 },
{ .iov_base = ifname, .iov_len = ifnamelen + 1 },
{ .iov_base = query, .iov_len = querylen + 1 },
} ;
buf[0] = ifnamelen ;
uint32_pack_big(buf + 1, querylen) ;
uint32_pack_big(buf + 5, timeout) ;
if (!textmessage_timed_sendv(&a->out, v, 4)) return 0 ;
}
{
struct iovec v ;
if (!textmessage_timed_receive(&a->in, &v, deadline, stamp)) return 0 ;
if (!v.iov_len) return (errno = EPROTO, 0) ;
if (*(unsigned char *)v.iov_base) return (errno = *(unsigned char)v.iov_base, 0) ;
if (!stralloc_catb(reply, (char *)v.iov_base + 1, v.iov_len - 1)) return 0 ;
}
return 1 ;
}
int skabus_rpcc_quit (skabus_rpcc_t *a, tain_t const *deadline, tain_t *stamp)
{
return textmessage_timed_command(&a->out, ".", 1, deadline, stamp) ;
}
|
<reponame>fujunwei/dldt
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <vector>
#include <memory>
#include <ngraph/opsets/opset1.hpp>
namespace ngraph {
namespace helpers {
ngraph::OutputVector convert2OutputVector(const std::vector<std::shared_ptr<ngraph::Node>> &nodes) {
ngraph::OutputVector outs;
std::for_each(nodes.begin(), nodes.end(), [&outs](const std::shared_ptr<ngraph::Node> &n) {
for (const auto &out_p : n->outputs()) {
outs.push_back(out_p);
}
});
return outs;
}
template<class opType>
ngraph::NodeVector castOps2Nodes(const std::vector<std::shared_ptr<opType>> &ops) {
ngraph::NodeVector nodes;
for (const auto &op : ops) {
nodes.push_back(std::dynamic_pointer_cast<ngraph::Node>(op));
}
return nodes;
}
} // namespace helpers
} // namespace ngraph
|
#!/bin/sh
set -e -x
_version="$(printf "%s" "$REPO_BRANCH" | cut -c 2-)"
_vermagic="$(curl --retry 5 -L https://downloads.openwrt.org/releases/${_version}/targets/ramips/mt76x8/openwrt-${_version}-ramips-mt76x8.manifest | sed -e '/^kernel/!d' -e 's/^.*-\([^-]*\)$/\1/g' | head -n 1)"
OLD_CWD="$(pwd)"
[ "$(find build_dir/ -name .vermagic -exec cat {} \;)" = "$_vermagic" ] && \
mkdir ~/imb && \
tar -xJf bin/targets/ramips/mt76x8/openwrt-imagebuilder-${_version}-ramips-mt76x8.Linux-x86_64.tar.xz -C ~/imb && \
cd ~/imb/* && \
mkdir -p files && \
cp -r $GITHUB_WORKSPACE/files/* files/ && \
make image PROFILE=unielec_u7628-01-16m PACKAGES="kmod-usb-storage block-mount kmod-fs-ext4 luci luci-proto-qmi kmod-usb-serial kmod-usb-serial-option kmod-usb-serial-wwan kmod-usb-uhci kmod-usb-storage-uas kmod-usb-storage-extras luci-i18n-base-zh-cn" && \
mv bin/targets/ramips/mt76x8/openwrt-${_version}-ramips-mt76x8-unielec_u7628-01-16m-squashfs-sysupgrade.bin ../openwrt-${_version}-ramips-mt76x8-4g-unielec_u7628-01-16m-squashfs-sysupgrade.bin && \
make clean && \
mv ../*.bin "$OLD_CWD/bin/targets/ramips/mt76x8/"
cd "$OLD_CWD/bin/targets"/*/*
mv openwrt-imagebuilder-* openwrt-sdk-* ..
rm -rf packages
tar -c * | xz -z -e -9 -T 0 > "../$(grep -i "openwrt-.*-sysupgrade.bin" *sums | head -n 1 | cut -d "*" -f 2 | cut -d - -f 1-5)-firmware.tar.xz"
rm -rf *
xz -d -c ../openwrt-imagebuilder-* | xz -z -e -9 -T 0 > "$(basename ../openwrt-imagebuilder-*)"
xz -d -c ../openwrt-sdk-* | xz -z -e -9 -T 0 > "$(basename ../openwrt-sdk-*)"
mv ../*-firmware.tar.xz .
rm -f ../openwrt-imagebuilder-* ../openwrt-sdk-* *sums
sha256sum * > ../sha256sums
mv ../sha256sums .
|
import React, { useEffect } from "react";
import Sidebar from "./Sidebar.js";
import "./dashboard.css";
import { Typography } from "@material-ui/core";
import { Link } from "react-router-dom";
import { Doughnut, Line } from "react-chartjs-2";
import { useSelector, useDispatch } from "react-redux";
import { getAdminProduct } from "../../actions/productAction";
import { getAllOrders } from "../../actions/orderAction.js";
import { getAllUsers } from "../../actions/userAction.js";
import MetaData from "../layout/MetaData";
const Dashboard = () => {
const dispatch = useDispatch();
const { products } = useSelector((state) => state.products);
const { orders } = useSelector((state) => state.allOrders);
const { users } = useSelector((state) => state.allUsers);
let outOfStock = 0;
products &&
products.forEach((item) => {
if (item.Stock === 0) {
outOfStock += 1;
}
});
useEffect(() => {
dispatch(getAdminProduct());
dispatch(getAllOrders());
dispatch(getAllUsers());
}, [dispatch]);
let totalAmount = 0;
orders &&
orders.forEach((item) => {
totalAmount += item.totalPrice;
});
const lineState = {
labels: ["Initial Amount", "Amount Earned"],
datasets: [
{
label: "TOTAL AMOUNT",
backgroundColor: ["tomato"],
hoverBackgroundColor: ["rgb(197, 72, 49)"],
data: [0, totalAmount],
},
],
};
const doughnutState = {
labels: ["Out of Stock", "InStock"],
datasets: [
{
backgroundColor: ["#00A6B4", "#6800B4"],
hoverBackgroundColor: ["#4B5000", "#35014F"],
data: [outOfStock, products.length - outOfStock],
},
],
};
return (
<div className="dashboard">
<MetaData title="Dashboard - Admin Panel" />
<Sidebar />
<div className="dashboardContainer">
<Typography component="h1">Dashboard</Typography>
<div className="dashboardSummary">
<div>
<p>
Total Amount <br /> $ {totalAmount}
</p>
</div>
<div className="dashboardSummaryBox2">
<Link to="/admin/products">
<p>Product</p>
<p>{products && products.length}</p>
</Link>
<Link to="/admin/orders">
<p>Orders</p>
<p>{orders && orders.length}</p>
</Link>
<Link to="/admin/users">
<p>Users</p>
<p>{users && users.length}</p>
</Link>
</div>
</div>
<div className="lineChart">
<Line data={lineState} />
</div>
<div className="doughnutChart">
<Doughnut data={doughnutState} />
</div>
</div>
</div>
);
};
export default Dashboard;
|
<filename>javascript/150 exercicios basicos/125_longest_string.js
function find_longest_str(array){
let longest='';
for(let i=0;i < array.length; i++){
if(array[i].length > longest.length){longest = array[i]}
}
return longest;
}
console.log(find_longest_str(['Javascript','Php','Python']));
console.log(find_longest_str(['React','Java','Mysqli']));
|
<gh_stars>1-10
$:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "payment_info_rails/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "payment_info_rails"
s.version = PaymentInfoRails::VERSION
s.authors = ["<NAME>"]
s.email = ["<EMAIL>"]
s.homepage = "https://rubygems.org/gems/payment_info_rails"
s.summary = "Integrates the payment info plugin to your Rails app"
s.description = "Payment Info Plugin allows you to add all credit card information in a single input"
s.license = "MIT"
s.files = Dir["{app,config,db,lib}/**/*", "MIT-LICENSE", "Rakefile", "README.rdoc"]
s.test_files = Dir["test/**/*"]
s.add_dependency "rails", "~> 5.2"
s.add_development_dependency "sqlite3"
end
|
class DynamicalSystem:
def __init__(self, x0, A, C, K):
self.x = x0
self.A = A
self.C = C
self.K = K
def update(self):
self.x = self.A * self.x + self.C * self.K
|
<filename>lib/tasks/change_hbx_id.rake
require File.join(Rails.root,"app","data_migrations","change_hbx_id.rb")
# This rake task merges people in Glue.
# format RAILS_ENV=production bundle exec rake migrations:change_hbx_id database_id='some_mongo_id' person_hbx_id='original_hbx_id' new_hbx_id='new_hbx_id'
namespace :migrations do
desc "Change Hbx Id"
ChangeHbxId.define_task :change_hbx_id => :environment
end
|
# encoding: utf-8
module Selector
describe Selector::Array do
let(:left) { described_class.new [:foo, :bar] }
let(:right) { described_class.new [:bar, :baz] }
describe ".new" do
subject { left }
it { is_expected.to be_kind_of Collection }
it { is_expected.to be_frozen }
it "sets the attribute" do
expect(subject.attribute).to eql(Set.new [:foo, :bar])
end
end # describe .new
describe "#&" do
subject { left & right }
context "array" do
it "returns the array" do
expect(subject).to be_kind_of described_class
end
it "composes the attributes" do
expect(subject.attribute).to eql(Set.new [:bar])
end
end # context
context "non-array" do
let(:right) { Condition.new }
it { is_expected.to be_kind_of(And) }
end # context
end # describe #&
describe "#|" do
subject { left | right }
context "array" do
it "returns the array" do
expect(subject).to be_kind_of described_class
end
it "composes the attributes" do
expect(subject.attribute).to eql(Set.new [:foo, :bar, :baz])
end
end # context
context "non-array" do
let(:right) { Condition.new }
it { is_expected.to be_kind_of(Or) }
end # context
end # describe #&
end # describe Selector::Array
end # module Selector
|
#!/usr/bin/env bash
rm -r $(ls | grep -v Makefile | grep -v goxbase | grep -v .env | grep -v LICENSE | grep -v README.md);
cp goxbase/main.go .
|
<gh_stars>1000+
package otto
import (
"strconv"
"time"
)
var (
prototypeValueObject = interface{}(nil)
prototypeValueFunction = _nativeFunctionObject{
call: func(_ FunctionCall) Value {
return Value{}
},
}
prototypeValueString = _stringASCII("")
// TODO Make this just false?
prototypeValueBoolean = Value{
kind: valueBoolean,
value: false,
}
prototypeValueNumber = Value{
kind: valueNumber,
value: 0,
}
prototypeValueDate = _dateObject{
epoch: 0,
isNaN: false,
time: time.Unix(0, 0).UTC(),
value: Value{
kind: valueNumber,
value: 0,
},
}
prototypeValueRegExp = _regExpObject{
regularExpression: nil,
global: false,
ignoreCase: false,
multiline: false,
source: "",
flags: "",
}
)
func newContext() *_runtime {
self := &_runtime{}
self.globalStash = self.newObjectStash(nil, nil)
self.globalObject = self.globalStash.object
_newContext(self)
self.eval = self.globalObject.property["eval"].value.(Value).value.(*_object)
self.globalObject.prototype = self.global.ObjectPrototype
return self
}
func (runtime *_runtime) newBaseObject() *_object {
self := newObject(runtime, "")
return self
}
func (runtime *_runtime) newClassObject(class string) *_object {
return newObject(runtime, class)
}
func (runtime *_runtime) newPrimitiveObject(class string, value Value) *_object {
self := runtime.newClassObject(class)
self.value = value
return self
}
func (self *_object) primitiveValue() Value {
switch value := self.value.(type) {
case Value:
return value
case _stringObject:
return toValue_string(value.String())
}
return Value{}
}
func (self *_object) hasPrimitive() bool {
switch self.value.(type) {
case Value, _stringObject:
return true
}
return false
}
func (runtime *_runtime) newObject() *_object {
self := runtime.newClassObject(classObject)
self.prototype = runtime.global.ObjectPrototype
return self
}
func (runtime *_runtime) newArray(length uint32) *_object {
self := runtime.newArrayObject(length)
self.prototype = runtime.global.ArrayPrototype
return self
}
func (runtime *_runtime) newArrayOf(valueArray []Value) *_object {
self := runtime.newArray(uint32(len(valueArray)))
for index, value := range valueArray {
if value.isEmpty() {
continue
}
self.defineProperty(strconv.FormatInt(int64(index), 10), value, 0111, false)
}
return self
}
func (runtime *_runtime) newString(value Value) *_object {
self := runtime.newStringObject(value)
self.prototype = runtime.global.StringPrototype
return self
}
func (runtime *_runtime) newBoolean(value Value) *_object {
self := runtime.newBooleanObject(value)
self.prototype = runtime.global.BooleanPrototype
return self
}
func (runtime *_runtime) newNumber(value Value) *_object {
self := runtime.newNumberObject(value)
self.prototype = runtime.global.NumberPrototype
return self
}
func (runtime *_runtime) newRegExp(patternValue Value, flagsValue Value) *_object {
pattern := ""
flags := ""
if object := patternValue._object(); object != nil && object.class == classRegExp {
if flagsValue.IsDefined() {
panic(runtime.panicTypeError("Cannot supply flags when constructing one RegExp from another"))
}
regExp := object.regExpValue()
pattern = regExp.source
flags = regExp.flags
} else {
if patternValue.IsDefined() {
pattern = patternValue.string()
}
if flagsValue.IsDefined() {
flags = flagsValue.string()
}
}
return runtime._newRegExp(pattern, flags)
}
func (runtime *_runtime) _newRegExp(pattern string, flags string) *_object {
self := runtime.newRegExpObject(pattern, flags)
self.prototype = runtime.global.RegExpPrototype
return self
}
// TODO Should (probably) be one argument, right? This is redundant
func (runtime *_runtime) newDate(epoch float64) *_object {
self := runtime.newDateObject(epoch)
self.prototype = runtime.global.DatePrototype
return self
}
func (runtime *_runtime) newError(name string, message Value, stackFramesToPop int) *_object {
var self *_object
switch name {
case "EvalError":
return runtime.newEvalError(message)
case "TypeError":
return runtime.newTypeError(message)
case "RangeError":
return runtime.newRangeError(message)
case "ReferenceError":
return runtime.newReferenceError(message)
case "SyntaxError":
return runtime.newSyntaxError(message)
case "URIError":
return runtime.newURIError(message)
}
self = runtime.newErrorObject(name, message, stackFramesToPop)
self.prototype = runtime.global.ErrorPrototype
if name != "" {
self.defineProperty("name", toValue_string(name), 0111, false)
}
return self
}
func (runtime *_runtime) newNativeFunction(name, file string, line int, _nativeFunction _nativeFunction) *_object {
self := runtime.newNativeFunctionObject(name, file, line, _nativeFunction, 0)
self.prototype = runtime.global.FunctionPrototype
prototype := runtime.newObject()
self.defineProperty("prototype", toValue_object(prototype), 0100, false)
prototype.defineProperty("constructor", toValue_object(self), 0100, false)
return self
}
func (runtime *_runtime) newNodeFunction(node *_nodeFunctionLiteral, scopeEnvironment _stash) *_object {
// TODO Implement 13.2 fully
self := runtime.newNodeFunctionObject(node, scopeEnvironment)
self.prototype = runtime.global.FunctionPrototype
prototype := runtime.newObject()
self.defineProperty("prototype", toValue_object(prototype), 0100, false)
prototype.defineProperty("constructor", toValue_object(self), 0101, false)
return self
}
// FIXME Only in one place...
func (runtime *_runtime) newBoundFunction(target *_object, this Value, argumentList []Value) *_object {
self := runtime.newBoundFunctionObject(target, this, argumentList)
self.prototype = runtime.global.FunctionPrototype
prototype := runtime.newObject()
self.defineProperty("prototype", toValue_object(prototype), 0100, false)
prototype.defineProperty("constructor", toValue_object(self), 0100, false)
return self
}
|
// import libraries
import org.tensorflow.lite.Interpreter;
// create interpreter to run the TFLite model
Interpreter tflite = new Interpreter(file_path);
// create input data
float[][] data = {{1.0f, 2.0f, 3.0f}};
// set the number of threads used to run the model
tflite.setNumThreads(4);
// run the model and get the result
float[][] result = new float[1][1];
tflite.run(data, result);
// print the result
System.out.println(result[0][0]);
|
def sum_of_digits(num):
sum = 0
while num > 0:
sum += num % 10
num //= 10
return sum
print(sum_of_digits(291))
|
<gh_stars>0
import React, {useReducer} from 'react'
import PropTypes from 'prop-types'
import {HotTipContext} from './HotTipContext'
import HotTipAnchor from './HotTipAnchor'
import HotTipReducer from './reducer'
export default function HotTipProvider({children}) {
const context = useReducer(HotTipReducer, {})
return (
<HotTipContext.Provider value={context}>
<>
<HotTipAnchor />
{children}
</>
</HotTipContext.Provider>
)
}
HotTipProvider.propTypes = {
children: PropTypes.node,
}
|
<reponame>WaleedSymbyo/mn
#include "mn/Socket.h"
#include "mn/Fabric.h"
#include <WinSock2.h>
#include <WS2tcpip.h>
namespace mn
{
struct _WIN_NET_INIT
{
_WIN_NET_INIT()
{
WORD wVersionRequested;
WSADATA wsaData;
int err;
wVersionRequested = MAKEWORD(2, 2);
err = WSAStartup(wVersionRequested, &wsaData);
mn_assert_msg(err == 0, "WSAStartup failed");
mn_assert_msg(LOBYTE(wsaData.wVersion) == 2 && HIBYTE(wsaData.wVersion) == 2, "Could not find a usable version of Winsock.dll");
}
~_WIN_NET_INIT()
{
WSACleanup();
}
};
static _WIN_NET_INIT _WIN_NET_INIT_INSTANCE;
inline static int
_socket_family_to_os(SOCKET_FAMILY f)
{
switch (f)
{
case SOCKET_FAMILY_IPV4:
return AF_INET;
case SOCKET_FAMILY_IPV6:
return AF_INET6;
case SOCKET_FAMILY_UNSPEC:
return AF_UNSPEC;
default:
mn_unreachable();
return 0;
}
}
inline static void
_socket_type_to_os(SOCKET_TYPE t, int& type, int& protocol)
{
switch (t)
{
case SOCKET_TYPE_TCP:
type = SOCK_STREAM;
protocol = IPPROTO_TCP;
break;
case SOCKET_TYPE_UDP:
type = SOCK_DGRAM;
protocol = IPPROTO_UDP;
break;
default:
mn_unreachable();
break;
}
}
inline static MN_SOCKET_ERROR
_socket_error_from_os(int error)
{
switch(error)
{
case WSAENETDOWN:
case WSAECONNABORTED:
case WSAECONNRESET:
case WSAEDISCON:
case WSAENETRESET:
case WSAESHUTDOWN:
return MN_SOCKET_ERROR_CONNECTION_CLOSED;
case WSAEFAULT:
case WSAEINVAL:
return MN_SOCKET_ERROR_INTERNAL_ERROR;
case WSAENOBUFS:
return MN_SOCKET_ERROR_OUT_OF_MEMORY;
default:
return MN_SOCKET_ERROR_GENERIC_ERROR;
}
}
// API
void
ISocket::dispose()
{
socket_close(this);
}
size_t
ISocket::read(Block data)
{
auto [read_bytes, _] = socket_read(this, data, INFINITE_TIMEOUT);
return read_bytes;
}
size_t
ISocket::write(Block data)
{
return socket_write(this, data);
}
int64_t
ISocket::size()
{
return 0;
}
Socket
socket_open(SOCKET_FAMILY socket_family, SOCKET_TYPE socket_type)
{
int af = 0;
int type = 0;
int protocol = 0;
af = _socket_family_to_os(socket_family);
_socket_type_to_os(socket_type, type, protocol);
auto handle = socket(af, type, protocol);
if (handle == INVALID_SOCKET)
return nullptr;
auto self = mn::alloc_construct<ISocket>();
self->handle = handle;
self->family = socket_family;
self->type = socket_type;
return self;
}
void
socket_close(Socket self)
{
::closesocket(self->handle);
mn::free_destruct(self);
}
bool
socket_connect(Socket self, const Str& address, const Str& port)
{
addrinfo hints{}, *info;
hints.ai_family = _socket_family_to_os(self->family);
_socket_type_to_os(self->type, hints.ai_socktype, hints.ai_protocol);
worker_block_ahead();
mn_defer(worker_block_clear());
int res = ::getaddrinfo(address.ptr, port.ptr, &hints, &info);
if (res != 0)
return false;
mn_defer(::freeaddrinfo(info));
for(auto it = info; it; it = it->ai_next)
{
res = ::connect(self->handle, it->ai_addr, int(it->ai_addrlen));
if (res != SOCKET_ERROR)
return true;
}
return false;
}
bool
socket_bind(Socket self, const Str& port)
{
addrinfo hints{}, *info;
hints.ai_family = _socket_family_to_os(self->family);
_socket_type_to_os(self->type, hints.ai_socktype, hints.ai_protocol);
hints.ai_flags = AI_PASSIVE;
int res = ::getaddrinfo(nullptr, port.ptr, &hints, &info);
if (res != 0)
return false;
res = ::bind(self->handle, info->ai_addr, int(info->ai_addrlen));
if (res == SOCKET_ERROR)
return false;
return true;
}
bool
socket_listen(Socket self, int max_connections)
{
if (max_connections == 0)
max_connections = SOMAXCONN;
worker_block_ahead();
int res = ::listen(self->handle, max_connections);
worker_block_clear();
if (res == SOCKET_ERROR)
return false;
return true;
}
Socket
socket_accept(Socket self, Timeout timeout)
{
pollfd pfd_read{};
pfd_read.fd = self->handle;
pfd_read.events = POLLIN;
INT milliseconds = 0;
if (timeout == INFINITE_TIMEOUT)
milliseconds = INFINITE;
else if (timeout == NO_TIMEOUT)
milliseconds = 0;
else
milliseconds = INT(timeout.milliseconds);
{
worker_block_ahead();
mn_defer(worker_block_clear());
int ready = WSAPoll(&pfd_read, 1, milliseconds);
if (ready == 0)
return nullptr;
}
auto handle = ::accept(self->handle, nullptr, nullptr);
if(handle == INVALID_SOCKET)
return nullptr;
auto other = mn::alloc_construct<ISocket>();
other->handle = handle;
other->family = self->family;
other->type = self->type;
return other;
}
void
socket_disconnect(Socket self)
{
::shutdown(self->handle, SD_SEND);
}
Result<size_t, MN_SOCKET_ERROR>
socket_read(Socket self, Block data, Timeout timeout)
{
pollfd pfd_read{};
pfd_read.fd = self->handle;
pfd_read.events = POLLIN;
WSABUF data_buf{};
data_buf.len = ULONG(data.size);
data_buf.buf = (char*)data.ptr;
DWORD flags = 0;
INT milliseconds = 0;
if (timeout == INFINITE_TIMEOUT)
milliseconds = INFINITE;
else if (timeout == NO_TIMEOUT)
milliseconds = 0;
else
milliseconds = INT(timeout.milliseconds);
worker_block_ahead();
mn_defer(worker_block_clear());
int ready = ::WSAPoll(&pfd_read, 1, milliseconds);
if (ready > 0)
{
DWORD recieved_bytes = 0;
auto res = ::WSARecv(
self->handle,
&data_buf,
1,
&recieved_bytes,
&flags,
NULL,
NULL
);
if (res == SOCKET_ERROR)
{
return _socket_error_from_os(WSAGetLastError());
}
else
{
return recieved_bytes;
}
}
else if (ready == SOCKET_ERROR)
{
return _socket_error_from_os(WSAGetLastError());
}
else
{
return MN_SOCKET_ERROR_TIMEOUT;
}
}
size_t
socket_write(Socket self, Block data)
{
size_t sent_bytes = 0;
WSABUF data_buf{};
data_buf.len = ULONG(data.size);
data_buf.buf = (char*)data.ptr;
DWORD flags = 0;
worker_block_ahead();
int status = ::WSASend(
self->handle,
&data_buf,
1,
(LPDWORD)&sent_bytes,
flags,
NULL,
NULL
);
worker_block_clear();
if(status == 0)
return sent_bytes;
return 0;
}
int64_t
socket_fd(Socket self)
{
return self->handle;
}
}
|
TERMUX_PKG_HOMEPAGE=https://docs.xfce.org/xfce/thunar/start
TERMUX_PKG_DESCRIPTION="Modern file manager for XFCE environment"
TERMUX_PKG_LICENSE="GPL-2.0, LGPL-2.1"
TERMUX_PKG_MAINTAINER="Leonid Pliushch <leonid.pliushch@gmail.com>"
TERMUX_PKG_VERSION=1.8.15
TERMUX_PKG_SRCURL=https://archive.xfce.org/src/xfce/thunar/${TERMUX_PKG_VERSION%.*}/thunar-$TERMUX_PKG_VERSION.tar.bz2
TERMUX_PKG_SHA256=7624560cf21f13869804947042610aab22075146b711593f11ceb9e494277c93
TERMUX_PKG_DEPENDS="desktop-file-utils, exo, hicolor-icon-theme, libexif, libnotify, libpng, libxfce4ui, libxfce4util"
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DSA-2318-1
#
# Security announcement date: 2011-10-06 00:00:00 UTC
# Script generation date: 2017-01-01 21:06:18 UTC
#
# Operating System: Debian 6 (Squeeze)
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - cyrus-imapd-2.2:2.2.13-19+squeeze2
#
# Last versions recommanded by security team:
# - cyrus-imapd-2.2:2.2.13-19+squeeze2
#
# CVE List:
# - CVE-2011-3372
# - CVE-2011-3208
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade cyrus-imapd-2.2=2.2.13-19+squeeze2 -y
|
describe('D&D 3.5 Happy Path', () => {
it('Can create and delete a D&D 3.5 Character', () => {
cy.visit('/');
cy.get('[data-cy=serviceSelect]').select('Local Storage');
cy.get('.selectorHeader > li > a[href="/dd35"]').click();
cy.location('pathname').should('eq', '/dd35');
cy.get('input').type('cypress test').should('have.value', 'cypress test');
cy.get('button').contains('Create').click();
cy.get('tbody > tr').contains('cypress test').parent().contains('Edit').click();
cy.get('button').contains('Close').click();
cy.get('tbody > tr').contains('cypress test').parent().contains('Delete').click();
cy.get('tbody > tr td:contains("cypress test")').should('not.exist');
});
});
|
#!/bin/sh
python manage.py collectstatic --noinput
python manage.py migrate
gunicorn articles.wsgi -b 0.0.0.0:8000 --workers=4 --timeout 300
|
<reponame>jollyblade/migrations
/**
* Copyright 2010-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ibatis.migration;
import org.apache.ibatis.parsing.PropertyParser;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringReader;
import java.io.UnsupportedEncodingException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
public class MigrationReader extends Reader {
private static final String LINE_SEPARATOR = System.getProperty("line.separator", "\n");
private Reader target;
public MigrationReader(File file, String charset, boolean undo, Properties properties) throws IOException {
this(new FileInputStream(file), charset, undo, properties);
}
public MigrationReader(InputStream inputStream, String charset, boolean undo, Properties properties) throws IOException {
final Reader source = scriptFileReader(inputStream, charset);
final Properties variables = filterVariables(properties == null ? new Properties() : properties);
try {
BufferedReader reader = new BufferedReader(source);
StringBuilder doBuilder = new StringBuilder();
StringBuilder undoBuilder = new StringBuilder();
StringBuilder currentBuilder = doBuilder;
String line;
while ((line = reader.readLine()) != null) {
if (line.trim().matches("^--\\s*//.*$")) {
if (line.contains("@UNDO")) {
currentBuilder = undoBuilder;
}
line = line.replaceFirst("--\\s*//", "-- ");
}
currentBuilder.append(line);
currentBuilder.append(LINE_SEPARATOR);
}
if (undo) {
target = new StringReader(PropertyParser.parse(undoBuilder.toString(), variables));
} else {
target = new StringReader(PropertyParser.parse(doBuilder.toString(), variables));
}
} finally {
source.close();
}
}
@Override
public int read(char[] cbuf, int off, int len) throws IOException {
return target.read(cbuf, off, len);
}
@Override
public void close() throws IOException {
target.close();
}
protected Reader scriptFileReader(InputStream inputStream, String charset) throws FileNotFoundException, UnsupportedEncodingException {
if (charset == null || charset.length() == 0) {
return new InputStreamReader(inputStream);
} else {
return new InputStreamReader(inputStream, charset);
}
}
@SuppressWarnings("serial")
private Properties filterVariables(final Properties properties) {
final Set<String> KNOWN_PROPERTIES_TO_IGNORE = new HashSet<String>() {{
addAll(Arrays.asList(
"time_zone", "script_char_set",
"driver", "url", "username", "password",
"send_full_script", "delimiter", "full_line_delimiter",
"auto_commit", "driver_path"));
}};
return new Properties() {
@Override
public synchronized boolean containsKey(Object o) {
return !KNOWN_PROPERTIES_TO_IGNORE.contains(o) && properties.containsKey(o);
}
@Override
public String getProperty(String key) {
return KNOWN_PROPERTIES_TO_IGNORE.contains(key) ? null : properties.getProperty(key);
}
};
}
}
|
var _cl_lstm_float_workload_8cpp =
[
[ "ClLstmFloatWorkloadValidate", "_cl_lstm_float_workload_8cpp.xhtml#a90ab88fe4c7aa9466c4653404a6b2213", null ]
];
|
<reponame>nodeca/nodeca.users
// Delete moderator's note for user
//
'use strict';
module.exports = function (N, apiPath) {
N.validate(apiPath, {
note_id: { format: 'mongo', required: true }
});
// Check auth and permissions
//
N.wire.before(apiPath, async function check_auth_and_permissions(env) {
if (!env.user_info.is_member) throw N.io.NOT_FOUND;
let can_add_mod_notes = await env.extras.settings.fetch('can_add_mod_notes');
if (!can_add_mod_notes) throw N.io.NOT_FOUND;
});
// Fetch note
//
N.wire.before(apiPath, async function fetch_note(env) {
env.data.note = await N.models.users.ModeratorNote
.findOne({ _id: env.params.note_id });
if (!env.data.note) throw N.io.BAD_REQUEST;
});
// Check permission to delete
//
N.wire.before(apiPath, async function check_delete_permission(env) {
let can_delete_mod_notes = await env.extras.settings.fetch('can_delete_mod_notes');
if (can_delete_mod_notes) return;
let mod_notes_edit_max_time = await env.extras.settings.fetch('mod_notes_edit_max_time');
if (String(env.data.note.from) !== env.user_info.user_id) throw N.io.FORBIDDEN;
if (mod_notes_edit_max_time !== 0 &&
env.data.note.ts < Date.now() - mod_notes_edit_max_time * 60 * 1000) {
throw {
code: N.io.CLIENT_ERROR,
message: env.t('err_perm_expired')
};
}
});
// Delete note
//
N.wire.on(apiPath, async function delete_note(env) {
await N.models.users.ModeratorNote.deleteOne({ _id: env.data.note._id });
});
};
|
import * as assert from 'assert';
import { Pattern } from '../types';
import * as util from './pattern';
describe('Utils → Pattern', () => {
describe('.isStaticPattern', () => {
it('should return true for static pattern', () => {
const actual = util.isStaticPattern('dir');
assert.ok(actual);
});
it('should return false for dynamic pattern', () => {
const actual = util.isStaticPattern('*');
assert.ok(!actual);
});
});
describe('.isDynamicPattern', () => {
describe('Without options', () => {
it('should return true for patterns that include the escape symbol', () => {
assert.ok(util.isDynamicPattern('\\'));
});
it('should return true for patterns that include common glob symbols', () => {
assert.ok(util.isDynamicPattern('*'));
assert.ok(util.isDynamicPattern('abc/*'));
assert.ok(util.isDynamicPattern('?'));
assert.ok(util.isDynamicPattern('abc/?'));
assert.ok(util.isDynamicPattern('!abc'));
});
it('should return true for patterns that include regex group symbols', () => {
assert.ok(util.isDynamicPattern('(a|)'));
assert.ok(util.isDynamicPattern('(a|b)'));
assert.ok(util.isDynamicPattern('abc/(a|b)'));
});
it('should return true for patterns that include regex character class symbols', () => {
assert.ok(util.isDynamicPattern('[abc]'));
assert.ok(util.isDynamicPattern('abc/[abc]'));
assert.ok(util.isDynamicPattern('[^abc]'));
assert.ok(util.isDynamicPattern('abc/[^abc]'));
assert.ok(util.isDynamicPattern('[1-3]'));
assert.ok(util.isDynamicPattern('abc/[1-3]'));
assert.ok(util.isDynamicPattern('[[:alpha:][:digit:]]'));
assert.ok(util.isDynamicPattern('abc/[[:alpha:][:digit:]]'));
});
it('should return true for patterns that include glob extension symbols', () => {
assert.ok(util.isDynamicPattern('@()'));
assert.ok(util.isDynamicPattern('@(a)'));
assert.ok(util.isDynamicPattern('@(a|b)'));
assert.ok(util.isDynamicPattern('abc/!(a|b)'));
assert.ok(util.isDynamicPattern('*(a|b)'));
assert.ok(util.isDynamicPattern('?(a|b)'));
assert.ok(util.isDynamicPattern('+(a|b)'));
});
it('should return true for patterns that include brace expansions symbols', () => {
assert.ok(util.isDynamicPattern('{,}'));
assert.ok(util.isDynamicPattern('{a,}'));
assert.ok(util.isDynamicPattern('{,b}'));
assert.ok(util.isDynamicPattern('{a,b}'));
assert.ok(util.isDynamicPattern('{1..3}'));
});
it('should return false for "!" symbols when a symbol is not specified first in the string', () => {
assert.ok(!util.isDynamicPattern('abc!'));
});
it('should return false for a completely static pattern', () => {
assert.ok(!util.isDynamicPattern(''));
assert.ok(!util.isDynamicPattern('.'));
assert.ok(!util.isDynamicPattern('abc'));
assert.ok(!util.isDynamicPattern('~abc'));
assert.ok(!util.isDynamicPattern('~/abc'));
assert.ok(!util.isDynamicPattern('+~/abc'));
assert.ok(!util.isDynamicPattern('@.(abc)'));
assert.ok(!util.isDynamicPattern('(a b)'));
assert.ok(!util.isDynamicPattern('(a b)'));
assert.ok(!util.isDynamicPattern('[abc'));
});
it('should return false for unfinished regex character class', () => {
assert.ok(!util.isDynamicPattern('['));
assert.ok(!util.isDynamicPattern('[abc'));
});
it('should return false for unfinished regex group', () => {
assert.ok(!util.isDynamicPattern('(a|b'));
assert.ok(!util.isDynamicPattern('abc/(a|b'));
});
it('should return false for unfinished glob extension', () => {
assert.ok(!util.isDynamicPattern('@('));
assert.ok(!util.isDynamicPattern('@(a'));
assert.ok(!util.isDynamicPattern('@(a|'));
assert.ok(!util.isDynamicPattern('@(a|b'));
});
it('should return false for unfinished brace expansions', () => {
assert.ok(!util.isDynamicPattern('{'));
assert.ok(!util.isDynamicPattern('{a'));
assert.ok(!util.isDynamicPattern('{,'));
assert.ok(!util.isDynamicPattern('{a,'));
assert.ok(!util.isDynamicPattern('{a,b'));
});
});
describe('With options', () => {
it('should return true for patterns that include "*?" symbols even when the "extglob" option is disabled', () => {
assert.ok(util.isDynamicPattern('*(a|b)', { extglob: false }));
assert.ok(util.isDynamicPattern('?(a|b)', { extglob: false }));
});
it('should return true when the "caseSensitiveMatch" option is enabled', () => {
assert.ok(util.isDynamicPattern('a', { caseSensitiveMatch: false }));
});
it('should return false for glob extension when the "extglob" option is disabled', () => {
assert.ok(!util.isDynamicPattern('@(a|b)', { extglob: false }));
assert.ok(!util.isDynamicPattern('abc/!(a|b)', { extglob: false }));
assert.ok(!util.isDynamicPattern('+(a|b)', { extglob: false }));
});
it('should return false for brace expansions when the "braceExpansion" option is disabled', () => {
assert.ok(!util.isDynamicPattern('{a,b}', { braceExpansion: false }));
assert.ok(!util.isDynamicPattern('{1..3}', { braceExpansion: false }));
});
});
});
describe('.convertToPositivePattern', () => {
it('should returns converted positive pattern', () => {
const expected = '*.js';
const actual = util.convertToPositivePattern('!*.js');
assert.strictEqual(actual, expected);
});
it('should returns pattern without changes', () => {
const expected = '*.js';
const actual = util.convertToPositivePattern('*.js');
assert.strictEqual(actual, expected);
});
});
describe('.convertToNegativePattern', () => {
it('should returns converted negative pattern', () => {
const expected = '!*.js';
const actual = util.convertToNegativePattern('*.js');
assert.strictEqual(actual, expected);
});
});
describe('.isNegativePattern', () => {
it('should returns true', () => {
const actual = util.isNegativePattern('!*.md');
assert.ok(actual);
});
it('should returns false', () => {
const actual = util.isNegativePattern('*.md');
assert.ok(!actual);
});
it('should returns false for extglob', () => {
const actual = util.isNegativePattern('!(a|b|c)');
assert.ok(!actual);
});
});
describe('.isPositivePattern', () => {
it('should returns true', () => {
const actual = util.isPositivePattern('*.md');
assert.ok(actual);
});
it('should returns false', () => {
const actual = util.isPositivePattern('!*.md');
assert.ok(!actual);
});
});
describe('.getNegativePatterns', () => {
it('should returns only negative patterns', () => {
const expected = ['!*.spec.js'];
const actual = util.getNegativePatterns(['*.js', '!*.spec.js', '*.ts']);
assert.deepStrictEqual(actual, expected);
});
it('should returns empty array', () => {
const expected: Pattern[] = [];
const actual = util.getNegativePatterns(['*.js', '*.ts']);
assert.deepStrictEqual(actual, expected);
});
});
describe('.getPositivePatterns', () => {
it('should returns only positive patterns', () => {
const expected = ['*.js', '*.ts'];
const actual = util.getPositivePatterns(['*.js', '!*.spec.js', '*.ts']);
assert.deepStrictEqual(actual, expected);
});
it('should returns empty array', () => {
const expected: Pattern[] = [];
const actual = util.getPositivePatterns(['!*.js', '!*.ts']);
assert.deepStrictEqual(actual, expected);
});
});
describe('.getBaseDirectory', () => {
it('should returns base directory', () => {
const expected = 'root';
const actual = util.getBaseDirectory('root/*.js');
assert.strictEqual(actual, expected);
});
it('should returns base directory without slash transformation', () => {
const expected = '.';
const actual = util.getBaseDirectory('file-\\(suffix\\).md');
assert.strictEqual(actual, expected);
});
});
describe('.hasGlobStar', () => {
it('should returns true for pattern that includes globstar', () => {
const actual = util.hasGlobStar('**/*.js');
assert.ok(actual);
});
it('should returns false for pattern that has no globstar', () => {
const actual = util.hasGlobStar('*.js');
assert.ok(!actual);
});
});
describe('.endsWithSlashGlobStar', () => {
it('should returns true for pattern that ends with slash and globstar', () => {
const actual = util.endsWithSlashGlobStar('name/**');
assert.ok(actual);
});
it('should returns false for pattern that has no slash, but ends with globstar', () => {
const actual = util.endsWithSlashGlobStar('**');
assert.ok(!actual);
});
it('should returns false for pattern that does not ends with globstar', () => {
const actual = util.endsWithSlashGlobStar('name/**/*');
assert.ok(!actual);
});
});
describe('.isAffectDepthOfReadingPattern', () => {
it('should return true for pattern that ends with slash and globstar', () => {
const actual = util.isAffectDepthOfReadingPattern('name/**');
assert.ok(actual);
});
it('should return true for pattern when the last partial of the pattern is static pattern', () => {
const actual = util.isAffectDepthOfReadingPattern('**/name');
assert.ok(actual);
});
it('should return false', () => {
const actual = util.isAffectDepthOfReadingPattern('**/name/*');
assert.ok(!actual);
});
});
describe('.getNaiveDepth', () => {
it('should return 0', () => {
const expected = 0; // 1 (pattern) - 1 (base directory)
const actual = util.getNaiveDepth('*.js');
assert.strictEqual(actual, expected);
});
it('should returns 1', () => {
const expected = 1; // 4 (pattern) - 2 (base directory) - 1
const actual = util.getNaiveDepth('a/b/*/*.js');
assert.strictEqual(actual, expected);
});
});
describe('.getMaxNaivePatternsDepth', () => {
it('should return 1', () => {
const expected = 1;
const actual = util.getMaxNaivePatternsDepth(['*.js', './*.js']);
assert.strictEqual(actual, expected);
});
it('should return 2', () => {
const expected = 2;
const actual = util.getMaxNaivePatternsDepth(['*.js', './*/*.js']);
assert.strictEqual(actual, expected);
});
});
describe('.makeRE', () => {
it('should return regexp for provided pattern', () => {
const actual = util.makeRe('*.js', {});
assert.ok(actual instanceof RegExp);
});
});
describe('.convertPatternsToRe', () => {
it('should return regexps for provided patterns', () => {
const [actual] = util.convertPatternsToRe(['*.js'], {});
assert.ok(actual instanceof RegExp);
});
});
describe('.matchAny', () => {
it('should return true', () => {
const actual = util.matchAny('fixtures/nested/file.txt', [/fixture/, /fixtures\/nested\/file/]);
assert.ok(actual);
});
it('should return false', () => {
const actual = util.matchAny('fixtures/directory', [/fixtures\/file/]);
assert.ok(!actual);
});
it('should return true for path with leading slash', () => {
const pattern = util.makeRe('*.js', {});
const actual = util.matchAny('./test.js', [pattern]);
assert.ok(actual);
});
});
});
|
package telegram
import (
"context"
"time"
tgbotapi "github.com/go-telegram-bot-api/telegram-bot-api/v5"
"github.com/oneils/ynab-helper/bot/pkg/transaction"
)
const commandStart = "start"
func (b *Bot) handleMessage(message *tgbotapi.Message) error {
txnMsg := transaction.TxnMessage{
ChatID: message.Chat.ID,
UserName: message.From.UserName,
Text: message.Text,
}
err := b.txn.Save(context.Background(), txnMsg, time.Now().UTC())
if err != nil {
msg := tgbotapi.NewMessage(message.Chat.ID, err.Error())
msg.ReplyToMessageID = message.MessageID
_, err = b.bot.Send(msg)
return err
}
b.logger.Printf("Verification: [%s] %d", message.From.UserName, message.Chat.ID)
msg := tgbotapi.NewMessage(message.Chat.ID, "Saved")
_, err = b.bot.Send(msg)
return err
}
func (b *Bot) handleCommand(message *tgbotapi.Message) error {
switch message.Command() {
case commandStart:
return b.handleStartCommand(message)
default:
return b.handleUnknownCommand(message)
}
}
func (b *Bot) handleStartCommand(message *tgbotapi.Message) error {
msg := tgbotapi.NewMessage(message.Chat.ID, "Start instructions will be here later")
_, err := b.bot.Send(msg)
return err
}
func (b *Bot) handleUnknownCommand(message *tgbotapi.Message) error {
msg := tgbotapi.NewMessage(message.Chat.ID, "Start instructions will be here later")
_, err := b.bot.Send(msg)
return err
}
|
def prepare_test_docs(trigger_extractor, trigger_generator, parameter_file, word_embeddings):
if trigger_extractor is None:
raise RuntimeError('Trigger extractor must be specified in parameter file.')
trigger_generator = trigger_extractor.generator
test_docs = prepare_docs(parameter_file['data']['test']['filelist'], word_embeddings)
return test_docs
|
package com.common.luakit;
import org.chromium.base.ThreadUtils;
public class NotificationHelper {
private static native void postNotificationNative(int type , Object o);
public static void postNotification( final int type ,final Object o){
ThreadUtils.runOnUiThread(new Runnable() {
@Override
public void run() {
postNotificationNative(type,o);
}
});
}
}
|
#
# Copyright (c) 2019 ISP RAS (http://www.ispras.ru)
# Ivannikov Institute for System Programming of the Russian Academy of Sciences
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import json
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.urls import reverse
from bridge.utils import KleverTestCase, ArchiveFileContent
from bridge.vars import (
SAFE_VERDICTS, UNSAFE_VERDICTS, MARK_SAFE, MARK_UNSAFE, MARK_STATUS, PROBLEM_DESC_FILE, ASSOCIATION_TYPE
)
from users.models import User
from jobs.models import Job
from reports.models import ReportSafe, ReportUnsafe, ReportUnknown, ReportComponent
from marks.models import (
MarkSafe, MarkUnsafe, MarkUnknown, MarkSafeHistory, MarkUnsafeHistory, MarkUnknownHistory,
SafeTag, UnsafeTag, MarkSafeTag, MarkUnsafeTag, MarkSafeReport, MarkUnsafeReport, MarkUnknownReport,
SafeAssociationLike, UnsafeAssociationLike, UnknownAssociationLike
)
from reports.test import DecideJobs, SJC_1
REPORT_ARCHIVES = os.path.join(settings.BASE_DIR, 'reports', 'test_files')
class TestMarks(KleverTestCase):
def setUp(self):
super(TestMarks, self).setUp()
User.objects.create_superuser('superuser', '', 'top_secret')
populate_users(
manager={'username': 'manager', 'password': '<PASSWORD>'},
service={'username': 'service', 'password': '<PASSWORD>'}
)
self.client.post(reverse('users:login'), {'username': 'manager', 'password': '<PASSWORD>'})
self.client.post(reverse('population'))
self.job = Job.objects.all().first()
self.assertIsNotNone(self.job)
self.client.post('/jobs/run_decision/%s/' % self.job.pk, {'mode': 'default', 'conf_name': 'development'})
DecideJobs('service', 'service', SJC_1)
self.safe_archive = 'test_safemark.zip'
self.unsafe_archive = 'test_unsafemark.zip'
self.unknown_archive = 'test_unknownmark.zip'
self.test_tagsfile = 'test_tags.json'
self.all_marks_arch = 'All-marks.zip'
def test_safe(self):
self.assertEqual(Job.objects.get(pk=self.job.pk).status, JOB_STATUS[3][0])
# Delete populated marks
response = self.client.post('/marks/delete/', {
'type': 'safe', 'ids': json.dumps(list(MarkSafe.objects.values_list('id', flat=True)))
})
self.assertEqual(response.status_code, 200)
response = self.client.post('/marks/delete/', {
'type': 'unsafe', 'ids': json.dumps(list(MarkUnsafe.objects.values_list('id', flat=True)))
})
self.assertEqual(response.status_code, 200)
response = self.client.post('/marks/delete/', {
'type': 'unknown', 'ids': json.dumps(list(MarkUnknown.objects.values_list('id', flat=True)))
})
self.assertEqual(response.status_code, 200)
# Create 5 safe tags
created_tags = []
response = self.client.post('/marks/tags/save_tag/', {
'action': 'create', 'tag_type': 'safe', 'parent_id': '0', 'name': 'test:safe:tag:1',
'description': 'Test safe tag description'
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
try:
created_tags.append(SafeTag.objects.get(tag='test:safe:tag:1'))
except ObjectDoesNotExist:
self.fail('Safe tag was not created')
self.assertEqual(created_tags[0].description, 'Test safe tag description')
self.assertEqual(created_tags[0].parent, None)
for i in range(2, 6):
self.client.post('/marks/tags/save_tag/', {
'action': 'create', 'tag_type': 'safe',
'parent_id': created_tags[i - 2].pk, 'name': 'test:safe:tag:%s' % i, 'description': ''
})
created_tags.append(SafeTag.objects.get(tag='test:safe:tag:%s' % i))
self.assertEqual(created_tags[i - 1].parent, created_tags[i - 2])
# Get tag parents for editing tag 'test:safe:tag:3'
response = self.client.post('/marks/tags/safe/get_tag_data/', {'tag_id': created_tags[2].pk})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
# Get tag parents for creating new tag
response = self.client.post('/marks/tags/safe/get_tag_data/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
# Edit 5th tag
response = self.client.post('/marks/tags/save_tag/', {
'action': 'edit', 'tag_type': 'safe', 'parent_id': created_tags[2].pk,
'name': 'test:safe:tag:5', 'tag_id': created_tags[4].pk,
'description': 'Test safe tag 5 description'
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
try:
created_tags[4] = SafeTag.objects.get(tag='test:safe:tag:5')
except ObjectDoesNotExist:
self.fail('Tag 5 was not found after editing')
self.assertEqual(created_tags[4].parent, created_tags[2])
self.assertEqual(created_tags[4].description, 'Test safe tag 5 description')
# Remove 3d tag and check that its children (tag4 and tag5) are also removed
response = self.client.post('/marks/tags/safe/delete/%s/' % created_tags[2].pk)
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(
SafeTag.objects.filter(tag__in=['test:safe:tag:3', 'test:safe:tag:4', 'test:safe:tag:5']).count(), 0
)
del created_tags[2:]
# Get tags data (for edit/create mark page). Just check that there is no error in response.
response = self.client.post('/marks/safe/tags_data/', {'selected_tags': json.dumps([created_tags[1].pk])})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
# Download tags
response = self.client.get(reverse('marks:download_tags', args=['safe']))
self.assertEqual(response.status_code, 200)
with open(os.path.join(settings.MEDIA_ROOT, self.test_tagsfile), mode='wb') as fp:
for chunk in response.streaming_content:
fp.write(chunk)
SafeTag.objects.all().delete()
# Upload tags
with open(os.path.join(settings.MEDIA_ROOT, self.test_tagsfile), mode='rb') as fp:
response = self.client.post('/marks/tags/safe/upload/', {'file': fp})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
for i in range(0, len(created_tags)):
try:
created_tags[i] = SafeTag.objects.get(tag=created_tags[i].tag)
except ObjectDoesNotExist:
self.fail("Tags weren't uploaded")
# Tags tree page
response = self.client.get(reverse('marks:tags', args=['safe']))
self.assertEqual(response.status_code, 200)
# Get report
safe = ReportSafe.objects.filter(root__job_id=self.job.pk).first()
self.assertIsNotNone(safe)
# Inline mark form
response = self.client.get('/marks/safe/%s/create/inline/' % safe.id)
self.assertEqual(response.status_code, 200)
# Create mark page
response = self.client.get(reverse('marks:mark_form', args=['safe', safe.pk, 'create']))
self.assertEqual(response.status_code, 200)
# Save mark
compare_attrs = list({'is_compare': associate, 'attr': a_name}
for a_name, associate in safe.attrs.values_list('attr__name__name', 'associate'))
response = self.client.post(reverse('marks:mark_form', args=['safe', safe.pk, 'create']), {
'data': json.dumps({
'description': 'Mark description',
'is_modifiable': True,
'verdict': MARK_SAFE[1][0],
'status': MARK_STATUS[2][0],
'tags': [created_tags[1].pk],
'attrs': compare_attrs
})
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
res = json.loads(str(response.content, encoding='utf8'))
self.assertIsNone(res.get('error'))
self.assertIn('cache_id', res)
cache_id = res['cache_id']
# Check mark's tables
try:
mark = MarkSafe.objects.get(job=self.job, author__username='manager')
except ObjectDoesNotExist:
self.fail('Mark was not created')
self.assertEqual(mark.type, MARK_TYPE[0][0])
self.assertEqual(mark.verdict, MARK_SAFE[1][0])
self.assertEqual(mark.status, MARK_STATUS[2][0])
self.assertEqual(mark.version, 1)
self.assertEqual(mark.description, 'Mark description')
self.assertEqual(mark.is_modifiable, True)
self.assertEqual(len(mark.versions.all()), 1)
mark_version = MarkSafeHistory.objects.get(mark=mark)
self.assertEqual(mark_version.verdict, mark.verdict)
self.assertEqual(mark_version.version, 1)
self.assertEqual(mark_version.author.username, 'manager')
self.assertEqual(mark_version.status, mark.status)
self.assertEqual(mark_version.description, mark.description)
for mark_attr in mark_version.attrs.all():
self.assertIn({'is_compare': mark_attr.is_compare, 'attr': mark_attr.attr.name.name}, compare_attrs)
self.assertEqual(ReportSafe.objects.get(pk=safe.pk).verdict, SAFE_VERDICTS[1][0])
self.assertEqual(MarkSafeReport.objects.filter(mark=mark, report=safe, type=ASSOCIATION_TYPE[1][0]).count(), 1)
self.assertEqual(len(MarkSafeTag.objects.filter(mark_version=mark_version, tag=created_tags[0])), 1)
self.assertEqual(len(MarkSafeTag.objects.filter(mark_version=mark_version, tag=created_tags[1])), 1)
try:
rst = ReportSafeTag.objects.get(report__root__job=self.job, report__parent=None, tag=created_tags[0])
self.assertEqual(rst.number, 1)
rst = ReportSafeTag.objects.get(report__root__job=self.job, report__parent=None, tag=created_tags[1])
self.assertEqual(rst.number, 1)
rst = ReportSafeTag.objects.get(report__root__job=self.job, report_id=safe.parent_id, tag=created_tags[0])
self.assertEqual(rst.number, 1)
rst = ReportSafeTag.objects.get(report__root__job=self.job, report__id=safe.parent_id, tag=created_tags[1])
self.assertEqual(rst.number, 1)
srt = SafeReportTag.objects.get(report=safe, tag=created_tags[0])
self.assertEqual(srt.number, 1)
srt = SafeReportTag.objects.get(report=safe, tag=created_tags[1])
self.assertEqual(srt.number, 1)
except ObjectDoesNotExist:
self.fail('Reports tags cache was not filled')
# Associations changes
response = self.client.get('/marks/safe/association_changes/%s/' % cache_id)
self.assertEqual(response.status_code, 200)
# Edit mark page
response = self.client.get(reverse('marks:mark_form', args=['safe', mark.pk, 'edit']))
self.assertEqual(response.status_code, 200)
# Edit mark
response = self.client.post(reverse('marks:mark_form', args=['safe', mark.pk, 'edit']), {
'data': json.dumps({
'description': 'New mark description',
'is_modifiable': True,
'verdict': MARK_SAFE[2][0],
'status': MARK_STATUS[2][0],
'tags': [created_tags[0].pk],
'attrs': compare_attrs,
'comment': 'Change 1'
})
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
res = json.loads(str(response.content, encoding='utf8'))
self.assertIsNone(res.get('error'))
self.assertIn('cache_id', res)
cache_id = res['cache_id']
# Check mark's tables
try:
mark = MarkSafe.objects.get(job=self.job, author__username='manager')
except ObjectDoesNotExist:
self.fail('Mark was not created')
self.assertEqual(mark.verdict, MARK_SAFE[2][0])
self.assertEqual(mark.version, 2)
self.assertEqual(mark.description, 'New mark description')
self.assertEqual(mark.is_modifiable, True)
self.assertEqual(len(mark.versions.all()), 2)
mark_version = MarkSafeHistory.objects.filter(mark=mark).order_by('-version').first()
self.assertEqual(mark_version.version, 2)
self.assertEqual(mark_version.verdict, mark.verdict)
self.assertEqual(mark_version.author.username, 'manager')
self.assertEqual(mark_version.description, mark.description)
self.assertEqual(mark_version.comment, 'Change 1')
self.assertEqual(ReportSafe.objects.get(pk=safe.pk).verdict, SAFE_VERDICTS[2][0])
self.assertEqual(len(MarkSafeReport.objects.filter(mark=mark, report=safe)), 1)
self.assertEqual(len(MarkSafeTag.objects.filter(mark_version=mark_version, tag=created_tags[0])), 1)
self.assertEqual(len(MarkSafeTag.objects.filter(mark_version=mark_version, tag=created_tags[1])), 0)
self.assertEqual(len(ReportSafeTag.objects.filter(report__root__job=self.job, report__parent=None)), 1)
self.assertEqual(len(ReportSafeTag.objects.filter(report__root__job=self.job, report__id=safe.parent_id)), 1)
try:
srt = SafeReportTag.objects.get(report=safe, tag=created_tags[0])
self.assertEqual(srt.number, 1)
except ObjectDoesNotExist:
self.fail('Reports tags cache was not filled')
self.assertEqual(len(SafeReportTag.objects.filter(report=safe, tag=created_tags[1])), 0)
# Associations changes
response = self.client.get('/marks/safe/association_changes/%s/' % cache_id)
self.assertEqual(response.status_code, 200)
# Safe marks list page
response = self.client.get(reverse('marks:list', args=['safe']))
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse('marks:mark', args=['safe', mark.id]))
self.assertEqual(response.status_code, 200)
# Inline mark form
response = self.client.get('/marks/safe/%s/edit/inline/' % mark.id)
self.assertEqual(response.status_code, 200)
# Confirm/unconfirm association
# Mark is automatically associated after its changes
self.assertEqual(MarkSafeReport.objects.filter(mark=mark, report=safe, type=ASSOCIATION_TYPE[0][0]).count(), 1)
response = self.client.post('/marks/association/safe/%s/%s/unconfirm/' % (safe.pk, mark.pk))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(MarkSafeReport.objects.filter(mark=mark, report=safe, type=ASSOCIATION_TYPE[2][0]).count(), 1)
response = self.client.post('/marks/association/safe/%s/%s/confirm/' % (safe.pk, mark.pk))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(MarkSafeReport.objects.filter(mark=mark, report=safe, type=ASSOCIATION_TYPE[1][0]).count(), 1)
# Like/dislike association
response = self.client.post('/marks/association/safe/%s/%s/like/' % (safe.id, mark.id))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(SafeAssociationLike.objects.filter(
association__report=safe, association__mark=mark, dislike=False
).count(), 1)
response = self.client.post('/marks/association/safe/%s/%s/dislike/' % (safe.id, mark.id))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(SafeAssociationLike.objects.filter(
association__report=safe, association__mark=mark, dislike=True
).count(), 1)
self.assertEqual(SafeAssociationLike.objects.filter(
association__report=safe, association__mark=mark, dislike=False
).count(), 0)
# Download mark
response = self.client.get(reverse('marks:safe-download', args=[mark.pk]))
self.assertEqual(response.status_code, 200)
self.assertIn(response['Content-Type'], {'application/x-zip-compressed', 'application/zip'})
with open(os.path.join(settings.MEDIA_ROOT, self.safe_archive), mode='wb') as fp:
for content in response.streaming_content:
fp.write(content)
# Download mark in preset format
response = self.client.get(reverse('marks:safe-download-preset', args=[mark.pk]))
self.assertEqual(response.status_code, 200)
# Delete mark
response = self.client.post('/marks/delete/', {'type': 'safe', 'ids': json.dumps([mark.id])})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(len(MarkSafe.objects.all()), 0)
self.assertEqual(len(MarkSafeReport.objects.all()), 0)
self.assertEqual(ReportSafe.objects.all().first().verdict, SAFE_VERDICTS[4][0])
# Upload mark
with open(os.path.join(settings.MEDIA_ROOT, self.safe_archive), mode='rb') as fp:
response = self.client.post('/marks/upload/', {'file': fp})
fp.close()
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
res = json.loads(str(response.content, encoding='utf8'))
self.assertIn('id', res)
self.assertEqual(res.get('type'), 'safe')
self.assertEqual(len(MarkSafe.objects.all()), 1)
try:
newmark = MarkSafe.objects.get(pk=res['id'])
except ObjectDoesNotExist:
self.fail('Mark was not uploaded')
self.assertEqual(newmark.type, MARK_TYPE[2][0])
self.assertEqual(newmark.verdict, MARK_SAFE[2][0])
self.assertEqual(newmark.version, 2)
self.assertEqual(newmark.description, 'New mark description')
self.assertEqual(newmark.is_modifiable, True)
self.assertEqual(len(newmark.versions.all()), 2)
newmark_version = MarkSafeHistory.objects.filter(mark=newmark).order_by('-version').first()
self.assertEqual(newmark_version.version, 2)
self.assertEqual(newmark_version.verdict, mark.verdict)
self.assertEqual(newmark_version.author.username, 'manager')
self.assertEqual(newmark_version.description, mark.description)
self.assertEqual(newmark_version.comment, 'Change 1')
self.assertEqual(ReportSafe.objects.get(pk=safe.pk).verdict, SAFE_VERDICTS[2][0])
self.assertEqual(len(MarkSafeReport.objects.filter(mark=newmark, report=safe)), 1)
self.assertEqual(len(MarkSafeReport.objects.filter(report=safe)), 1)
self.assertEqual(len(MarkSafeTag.objects.filter(mark_version=newmark_version, tag=created_tags[0])), 1)
self.assertEqual(len(MarkSafeTag.objects.filter(mark_version=newmark_version, tag=created_tags[1])), 0)
self.assertEqual(len(ReportSafeTag.objects.filter(report__root__job=self.job, report__parent=None)), 1)
self.assertEqual(len(ReportSafeTag.objects.filter(report__root__job=self.job, report__id=safe.parent_id)), 1)
# Some more mark changes
for i in range(3, 6):
response = self.client.post(reverse('marks:mark_form', args=['safe', newmark.pk, 'edit']), {
'data': json.dumps({
'description': 'New mark description',
'is_modifiable': True,
'verdict': MARK_SAFE[2][0],
'status': MARK_STATUS[2][0],
'tags': [created_tags[0].pk],
'attrs': compare_attrs,
'comment': 'Change %s' % i
})
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(len(MarkSafeHistory.objects.filter(mark=newmark)), 5)
# Get 3d version data
response = self.client.get(reverse('marks:mark_form', args=['safe', newmark.pk, 'edit']),
params={'version': 3})
self.assertEqual(response.status_code, 200)
# Compare 1st and 4th versions
response = self.client.post('/marks/safe/%s/compare_versions/' % newmark.pk, {'v1': 1, 'v2': 4})
self.assertEqual(response.status_code, 200)
# Remove 2nd and 4th versions
response = self.client.post('/marks/safe/%s/remove_versions/' % newmark.pk, {'versions': json.dumps([2, 4])})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
res = json.loads(str(response.content, encoding='utf8'))
self.assertNotIn('error', res)
self.assertIn('success', res)
self.assertEqual(len(MarkSafeHistory.objects.filter(mark=newmark)), 3)
# Reports' lists pages
root_comp = ReportComponent.objects.get(root__job_id=self.job.pk, parent=None)
response = self.client.get('%s?tag=%s' % (reverse('reports:safes', args=[root_comp.pk]), created_tags[0].pk))
self.assertIn(response.status_code, {200, 302})
response = self.client.get('%s?tag=%s' % (reverse('reports:safes', args=[root_comp.pk]), created_tags[1].pk))
self.assertIn(response.status_code, {200, 302})
response = self.client.get(
'%s?verdict=%s' % (reverse('reports:safes', args=[root_comp.pk]), SAFE_VERDICTS[0][0])
)
self.assertIn(response.status_code, {200, 302})
response = self.client.get(
'%s?verdict=%s' % (reverse('reports:safes', args=[root_comp.pk]), SAFE_VERDICTS[2][0])
)
self.assertIn(response.status_code, {200, 302})
# Download all marks
response = self.client.get('/marks/api/download-all/')
self.assertEqual(response.status_code, 200)
self.assertNotEqual(response['Content-Type'], 'application/json')
with open(os.path.join(settings.MEDIA_ROOT, self.all_marks_arch), mode='wb') as fp:
for content in response.streaming_content:
fp.write(content)
# Delete all safe marks
self.client.post('/marks/delete/', {
'type': 'safe', 'ids': json.dumps(list(MarkSafe.objects.values_list('id', flat=True)))
})
self.assertEqual(MarkSafe.objects.count(), 0)
# All verdicts must be "safe unmarked"
self.assertEqual(
len(ReportSafe.objects.filter(verdict=SAFE_VERDICTS[4][0])),
len(ReportSafe.objects.all())
)
self.assertEqual(len(MarkSafeReport.objects.all()), 0)
# Upload all marks
with open(os.path.join(settings.MEDIA_ROOT, self.all_marks_arch), mode='rb') as fp:
response = self.client.post('/marks/upload-all/', {'delete': 1, 'file': fp})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(int(json.loads(str(response.content, encoding='utf8'))['fail']), 0)
self.assertEqual(int(json.loads(str(response.content, encoding='utf8'))['safe']), 1)
def test_unsafe(self):
self.assertEqual(Job.objects.get(pk=self.job.pk).status, JOB_STATUS[3][0])
# Delete populated marks
response = self.client.post('/marks/delete/', {
'type': 'safe', 'ids': json.dumps(list(MarkSafe.objects.values_list('id', flat=True)))
})
self.assertEqual(response.status_code, 200)
response = self.client.post('/marks/delete/', {
'type': 'unsafe', 'ids': json.dumps(list(MarkUnsafe.objects.values_list('id', flat=True)))
})
self.assertEqual(response.status_code, 200)
response = self.client.post('/marks/delete/', {
'type': 'unknown', 'ids': json.dumps(list(MarkUnknown.objects.values_list('id', flat=True)))
})
self.assertEqual(response.status_code, 200)
# Create 5 unsafe tags
created_tags = []
response = self.client.post('/marks/tags/save_tag/', {
'action': 'create', 'tag_type': 'unsafe', 'parent_id': '0', 'name': 'test:unsafe:tag:1',
'description': 'Test unsafe tag description'
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
try:
created_tags.append(UnsafeTag.objects.get(tag='test:unsafe:tag:1'))
except ObjectDoesNotExist:
self.fail('Unsafe tag was not created')
self.assertEqual(created_tags[0].description, 'Test unsafe tag description')
self.assertEqual(created_tags[0].parent, None)
for i in range(2, 6):
self.client.post('/marks/tags/save_tag/', {
'action': 'create', 'tag_type': 'unsafe',
'parent_id': created_tags[i - 2].pk, 'name': 'test:unsafe:tag:%s' % i, 'description': ''
})
created_tags.append(UnsafeTag.objects.get(tag='test:unsafe:tag:%s' % i))
self.assertEqual(created_tags[i - 1].parent, created_tags[i - 2])
# Get tag parents for editing tag 'test:unsafe:tag:3'
response = self.client.post('/marks/tags/unsafe/get_tag_data/', {'tag_id': created_tags[2].pk})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
# Get tag parents for creating new tag
response = self.client.post('/marks/tags/unsafe/get_tag_data/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
# Edit 5th tag
response = self.client.post('/marks/tags/save_tag/', {
'action': 'edit', 'tag_type': 'unsafe', 'parent_id': created_tags[2].pk,
'name': 'test:unsafe:tag:5', 'tag_id': created_tags[4].pk,
'description': 'Test unsafe tag 5 description'
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
try:
created_tags[4] = UnsafeTag.objects.get(tag='test:unsafe:tag:5')
except ObjectDoesNotExist:
self.fail('Tag 5 was not found after editing')
self.assertEqual(created_tags[4].parent, created_tags[2])
self.assertEqual(created_tags[4].description, 'Test unsafe tag 5 description')
# Remove 3d tag and check that its children (tag4 and tag5) are also removed
response = self.client.post('/marks/tags/unsafe/delete/%s/' % created_tags[2].pk)
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(
len(UnsafeTag.objects.filter(tag__in=['test:unsafe:tag:3', 'test:unsafe:tag:4', 'test:unsafe:tag:5'])), 0
)
del created_tags[2:]
# Get tags data (for edit/create mark page). Just check that there is no error in response.
response = self.client.post('/marks/unsafe/tags_data/', {'selected_tags': json.dumps([created_tags[1].pk])})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
# Download tags
response = self.client.get(reverse('marks:download_tags', args=['unsafe']))
self.assertEqual(response.status_code, 200)
with open(os.path.join(settings.MEDIA_ROOT, self.test_tagsfile), mode='wb') as fp:
for chunk in response.streaming_content:
fp.write(chunk)
UnsafeTag.objects.all().delete()
# Upload tags
with open(os.path.join(settings.MEDIA_ROOT, self.test_tagsfile), mode='rb') as fp:
response = self.client.post('/marks/tags/unsafe/upload/', {'file': fp})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
for i in range(0, len(created_tags)):
try:
created_tags[i] = UnsafeTag.objects.get(tag=created_tags[i].tag)
except ObjectDoesNotExist:
self.fail("Tags weren't uploaded")
# Tags tree page
response = self.client.get(reverse('marks:tags', args=['unsafe']))
self.assertEqual(response.status_code, 200)
# Get report
unsafe = ReportUnsafe.objects.filter(root__job_id=self.job.pk).first()
self.assertIsNotNone(unsafe)
# Inline mark form
response = self.client.get('/marks/unsafe/%s/create/inline/' % unsafe.id)
self.assertEqual(response.status_code, 200)
# Create mark page
response = self.client.get(reverse('marks:mark_form', args=['unsafe', unsafe.pk, 'create']))
self.assertEqual(response.status_code, 200)
# Error trace compare function description
try:
compare_f = MarkUnsafeCompare.objects.get(name=DEFAULT_COMPARE)
except ObjectDoesNotExist:
self.fail("Population hasn't created compare error trace functions")
response = self.client.post('/marks/get_func_description/%s/' % compare_f.pk)
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
# Save mark
compare_attrs = list({'is_compare': associate, 'attr': a_name}
for a_name, associate in unsafe.attrs.values_list('attr__name__name', 'associate'))
response = self.client.post(reverse('marks:mark_form', args=['unsafe', unsafe.pk, 'create']), {
'data': json.dumps({
'compare_id': compare_f.pk,
'description': 'Mark description',
'is_modifiable': True,
'verdict': MARK_UNSAFE[1][0],
'status': MARK_STATUS[2][0],
'tags': [created_tags[0].pk],
'attrs': compare_attrs
})
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
res = json.loads(str(response.content, encoding='utf8'))
self.assertNotIn('error', res)
self.assertIn('cache_id', res)
cache_id = res['cache_id']
# Check mark's tables
try:
mark = MarkUnsafe.objects.get(job=self.job, author__username='manager')
except ObjectDoesNotExist:
self.fail('Mark was not created')
self.assertEqual(mark.type, MARK_TYPE[0][0])
self.assertEqual(mark.verdict, MARK_UNSAFE[1][0])
self.assertEqual(mark.status, MARK_STATUS[2][0])
self.assertEqual(mark.version, 1)
self.assertEqual(mark.description, 'Mark description')
self.assertEqual(mark.function.name, DEFAULT_COMPARE)
self.assertEqual(mark.is_modifiable, True)
self.assertEqual(len(mark.versions.all()), 1)
mark_version = MarkUnsafeHistory.objects.get(mark=mark)
self.assertEqual(mark_version.verdict, mark.verdict)
self.assertEqual(mark_version.version, 1)
self.assertEqual(mark_version.author.username, 'manager')
self.assertEqual(mark_version.status, mark.status)
self.assertEqual(mark_version.description, mark.description)
for mark_attr in mark_version.attrs.all().select_related('attr__name'):
self.assertIn({'is_compare': mark_attr.is_compare, 'attr': mark_attr.attr.name.name}, compare_attrs)
self.assertEqual(ReportUnsafe.objects.get(pk=unsafe.pk).verdict, UNSAFE_VERDICTS[1][0])
self.assertEqual(len(MarkUnsafeReport.objects.filter(mark=mark, report=unsafe, type=ASSOCIATION_TYPE[1][0])), 1)
self.assertEqual(len(MarkUnsafeTag.objects.filter(mark_version=mark_version, tag=created_tags[0])), 1)
try:
rst = ReportUnsafeTag.objects.get(report__root__job=self.job, report__parent=None, tag=created_tags[0])
# The number of unsafes for root report with specified tag equals the number of marked unsafes
self.assertEqual(rst.number, len(ReportUnsafe.objects.filter(verdict=UNSAFE_VERDICTS[1][0])))
rst = ReportUnsafeTag.objects.get(
report__root__job=self.job, report_id=unsafe.parent_id, tag=created_tags[0]
)
# The number of unsafes for parent report (for unsafe) with specified tag
# equals 1 due to only one unsafe is child for report
self.assertEqual(rst.number, 1)
srt = UnsafeReportTag.objects.get(report=unsafe, tag=created_tags[0])
self.assertEqual(srt.number, 1)
except ObjectDoesNotExist:
self.fail('Reports tags cache was not filled')
# Associations changes
response = self.client.get('/marks/unsafe/association_changes/%s/' % cache_id)
self.assertEqual(response.status_code, 200)
# Edit mark page
response = self.client.get(reverse('marks:mark_form', args=['unsafe', mark.pk, 'edit']))
self.assertEqual(response.status_code, 200)
# Edit mark
with mark_version.error_trace.file as fp:
error_trace = fp.read().decode('utf8')
response = self.client.post(reverse('marks:mark_form', args=['unsafe', mark.pk, 'edit']), {
'data': json.dumps({
'compare_id': compare_f.pk,
'description': 'New mark description',
'is_modifiable': True,
'verdict': MARK_UNSAFE[2][0],
'status': MARK_STATUS[2][0],
'tags': [created_tags[1].pk],
'attrs': compare_attrs,
'comment': 'Change 1',
'error_trace': error_trace
})
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
res = json.loads(str(response.content, encoding='utf8'))
self.assertNotIn('error', res)
self.assertIn('cache_id', res)
cache_id = res['cache_id']
# Check mark's tables
try:
mark = MarkUnsafe.objects.get(job=self.job, author__username='manager')
except ObjectDoesNotExist:
self.fail('Mark was not created')
self.assertEqual(mark.verdict, MARK_UNSAFE[2][0])
self.assertEqual(mark.version, 2)
self.assertEqual(mark.description, 'New mark description')
self.assertEqual(mark.is_modifiable, True)
self.assertEqual(len(mark.versions.all()), 2)
mark_version = MarkUnsafeHistory.objects.filter(mark=mark).order_by('-version').first()
self.assertEqual(mark_version.version, 2)
self.assertEqual(mark_version.verdict, mark.verdict)
self.assertEqual(mark_version.author.username, 'manager')
self.assertEqual(mark_version.description, mark.description)
self.assertEqual(mark_version.comment, 'Change 1')
self.assertEqual(ReportUnsafe.objects.get(pk=unsafe.pk).verdict, SAFE_VERDICTS[2][0])
self.assertEqual(len(MarkUnsafeReport.objects.filter(mark=mark, report=unsafe)), 1)
self.assertEqual(len(MarkUnsafeTag.objects.filter(mark_version=mark_version, tag=created_tags[0])), 1)
self.assertEqual(len(MarkUnsafeTag.objects.filter(mark_version=mark_version, tag=created_tags[1])), 1)
self.assertEqual(len(ReportUnsafeTag.objects.filter(report__root__job=self.job, report__parent=None)), 2)
self.assertEqual(len(
ReportUnsafeTag.objects.filter(report__root__job=self.job, report__id=unsafe.parent_id)
), 2)
try:
urt = UnsafeReportTag.objects.get(report=unsafe, tag=created_tags[0])
self.assertEqual(urt.number, 1)
urt = UnsafeReportTag.objects.get(report=unsafe, tag=created_tags[1])
self.assertEqual(urt.number, 1)
except ObjectDoesNotExist:
self.fail('Reports tags cache was not filled')
# Associations changes
response = self.client.get('/marks/unsafe/association_changes/%s/' % cache_id)
self.assertEqual(response.status_code, 200)
# Unsafe marks list page
response = self.client.get(reverse('marks:list', args=['unsafe']))
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse('marks:mark', args=['unsafe', mark.id]))
self.assertEqual(response.status_code, 200)
# Inline mark form
response = self.client.get('/marks/unsafe/%s/edit/inline/' % mark.id)
self.assertEqual(response.status_code, 200)
# Confirm/unconfirm association
# Mark is automatically associated after its changes
self.assertEqual(
MarkUnsafeReport.objects.filter(mark=mark, report=unsafe, type=ASSOCIATION_TYPE[0][0]).count(), 1
)
response = self.client.post('/marks/association/unsafe/%s/%s/unconfirm/' % (unsafe.pk, mark.pk))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(MarkUnsafeReport.objects.filter(
mark=mark, report=unsafe, type=ASSOCIATION_TYPE[2][0]).count(), 1)
response = self.client.post('/marks/association/unsafe/%s/%s/confirm/' % (unsafe.pk, mark.pk))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(MarkUnsafeReport.objects.filter(
mark=mark, report=unsafe, type=ASSOCIATION_TYPE[1][0]).count(), 1)
# Like/dislike association
response = self.client.post('/marks/association/unsafe/%s/%s/like/' % (unsafe.id, mark.id))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(UnsafeAssociationLike.objects.filter(
association__report=unsafe, association__mark=mark, dislike=False
).count(), 1)
response = self.client.post('/marks/association/unsafe/%s/%s/dislike/' % (unsafe.id, mark.id))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(UnsafeAssociationLike.objects.filter(
association__report=unsafe, association__mark=mark, dislike=True
).count(), 1)
self.assertEqual(UnsafeAssociationLike.objects.filter(
association__report=unsafe, association__mark=mark, dislike=False
).count(), 0)
# Download mark
response = self.client.get(reverse('marks:unsafe-download', args=[mark.pk]))
self.assertEqual(response.status_code, 200)
self.assertIn(response['Content-Type'], {'application/x-zip-compressed', 'application/zip'})
with open(os.path.join(settings.MEDIA_ROOT, self.unsafe_archive), mode='wb') as fp:
for content in response.streaming_content:
fp.write(content)
# Download mark in preset format
response = self.client.get(reverse('marks:unsafe-download-preset', args=[mark.pk]))
self.assertEqual(response.status_code, 200)
# Delete mark
response = self.client.post('/marks/delete/', {'type': 'unsafe', 'ids': json.dumps([mark.id])})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
res = json.loads(str(response.content, encoding='utf8'))
self.assertNotIn('error', res)
self.assertEqual(len(MarkUnsafe.objects.all()), 0)
self.assertEqual(len(MarkUnsafeReport.objects.all()), 0)
self.assertEqual(ReportUnsafe.objects.all().first().verdict, UNSAFE_VERDICTS[5][0])
# Upload mark
with open(os.path.join(settings.MEDIA_ROOT, self.unsafe_archive), mode='rb') as fp:
response = self.client.post('/marks/upload/', {'file': fp})
fp.close()
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
res = json.loads(str(response.content, encoding='utf8'))
self.assertIn('id', res)
self.assertEqual(res.get('type'), 'unsafe')
self.assertEqual(len(MarkUnsafe.objects.all()), 1)
try:
newmark = MarkUnsafe.objects.get(pk=res['id'])
except ObjectDoesNotExist:
self.fail('Mark was not uploaded')
self.assertEqual(newmark.type, MARK_TYPE[2][0])
self.assertEqual(newmark.verdict, MARK_UNSAFE[2][0])
self.assertEqual(newmark.version, 2)
self.assertEqual(newmark.description, 'New mark description')
self.assertEqual(newmark.is_modifiable, True)
self.assertEqual(len(newmark.versions.all()), 2)
newmark_version = MarkUnsafeHistory.objects.filter(mark=newmark).order_by('-version').first()
self.assertEqual(newmark_version.version, 2)
self.assertEqual(newmark_version.verdict, mark.verdict)
self.assertEqual(newmark_version.author.username, 'manager')
self.assertEqual(newmark_version.description, mark.description)
self.assertEqual(newmark_version.comment, 'Change 1')
self.assertEqual(ReportUnsafe.objects.get(pk=unsafe.pk).verdict, UNSAFE_VERDICTS[2][0])
self.assertEqual(len(MarkUnsafeReport.objects.filter(mark=newmark, report=unsafe)), 1)
self.assertEqual(len(MarkUnsafeReport.objects.filter(report=unsafe)), 1)
self.assertEqual(len(MarkUnsafeTag.objects.filter(mark_version=newmark_version, tag=created_tags[0])), 1)
self.assertEqual(len(MarkUnsafeTag.objects.filter(mark_version=newmark_version, tag=created_tags[1])), 1)
# The tag has parent which is also added to mark
self.assertEqual(
len(ReportUnsafeTag.objects.filter(report__root__job=self.job, report__parent=None)),
len(ReportUnsafe.objects.filter(verdict=UNSAFE_VERDICTS[2][0])) * 2
)
self.assertEqual(len(ReportUnsafeTag.objects.filter(
report__root__job=self.job, report__id=unsafe.parent_id
)), 2)
# Some more mark changes
for i in range(3, 6):
response = self.client.post(reverse('marks:mark_form', args=['unsafe', newmark.pk, 'edit']), {
'data': json.dumps({
'compare_id': compare_f.pk,
'description': 'New mark description',
'is_modifiable': True,
'verdict': MARK_UNSAFE[2][0],
'status': MARK_STATUS[2][0],
'tags': [created_tags[0].pk],
'attrs': compare_attrs,
'comment': 'Change %s' % i,
'error_trace': error_trace
})
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(
len(ReportUnsafeTag.objects.filter(report__root__job=self.job, report__parent=None)),
len(ReportUnsafe.objects.filter(verdict=UNSAFE_VERDICTS[2][0]))
)
self.assertEqual(len(MarkUnsafeHistory.objects.filter(mark=newmark)), 5)
# Get 3d version data
response = self.client.get(reverse('marks:mark_form', args=['unsafe', newmark.pk, 'edit']),
params={'version': 3})
self.assertEqual(response.status_code, 200)
# Compare 1st and 4th versions
response = self.client.post('/marks/unsafe/%s/compare_versions/' % newmark.pk, {'v1': 1, 'v2': 4})
self.assertEqual(response.status_code, 200)
# Remove 2nd and 4th versions
response = self.client.post('/marks/unsafe/%s/remove_versions/' % newmark.pk, {'versions': json.dumps([2, 4])})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
res = json.loads(str(response.content, encoding='utf8'))
self.assertNotIn('error', res)
self.assertIn('success', res)
self.assertEqual(len(MarkUnsafeHistory.objects.filter(mark=newmark)), 3)
# Reports' lists pages
root_comp = ReportComponent.objects.get(root__job_id=self.job.pk, parent=None)
response = self.client.get('%s?tag=%s' % (reverse('reports:unsafes', args=[root_comp.pk]), created_tags[0].pk))
self.assertIn(response.status_code, {200, 302})
response = self.client.get('%s?tag=%s' % (reverse('reports:unsafes', args=[root_comp.pk]), created_tags[1].pk))
self.assertIn(response.status_code, {200, 302})
response = self.client.get(
'%s?verdict=%s' % (reverse('reports:unsafes', args=[root_comp.pk]), UNSAFE_VERDICTS[0][0])
)
self.assertIn(response.status_code, {200, 302})
response = self.client.get(
'%s?verdict=%s' % (reverse('reports:unsafes', args=[root_comp.pk]), UNSAFE_VERDICTS[2][0])
)
self.assertIn(response.status_code, {200, 302})
# Download all marks
response = self.client.get('/marks/api/download-all/')
self.assertEqual(response.status_code, 200)
self.assertNotEqual(response['Content-Type'], 'application/json')
with open(os.path.join(settings.MEDIA_ROOT, self.all_marks_arch), mode='wb') as fp:
for content in response.streaming_content:
fp.write(content)
# Delete all unsafe marks
self.client.post('/marks/delete/', {
'type': 'unsafe', 'ids': json.dumps(list(MarkUnsafe.objects.values_list('id', flat=True)))
})
self.assertEqual(MarkUnsafe.objects.count(), 0)
# All verdicts must be "unsafe unmarked"
self.assertEqual(
ReportUnsafe.objects.filter(verdict=UNSAFE_VERDICTS[5][0]).count(), ReportUnsafe.objects.all().count()
)
self.assertEqual(MarkUnsafeReport.objects.count(), 0)
# Upload all marks
with open(os.path.join(settings.MEDIA_ROOT, self.all_marks_arch), mode='rb') as fp:
response = self.client.post('/marks/upload-all/', {'delete': 1, 'file': fp})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(int(json.loads(str(response.content, encoding='utf8'))['fail']), 0)
self.assertEqual(int(json.loads(str(response.content, encoding='utf8'))['unsafe']), 1)
def test_unknown(self):
self.assertEqual(Job.objects.get(pk=self.job.pk).status, JOB_STATUS[3][0])
# Do not remove populated safe/unsafe marks as there are no problems with uploading populated marks
response = self.client.post('/marks/delete/', {
'type': 'unknown', 'ids': json.dumps(list(MarkUnknown.objects.values_list('id', flat=True)))
})
self.assertEqual(response.status_code, 200)
# Get report
unknown = None
for u in ReportUnknown.objects.filter(root__job_id=self.job.pk):
afc = ArchiveFileContent(u, 'problem_description', PROBLEM_DESC_FILE)
if afc.content == b'KeyError: \'attr\' was not found.':
unknown = u
break
if unknown is None:
self.fail("Unknown with needed problem description was not found in test job decision")
parent = ReportComponent.objects.get(pk=unknown.parent_id)
# Inline mark form
response = self.client.get('/marks/unknown/%s/create/inline/' % unknown.id)
self.assertEqual(response.status_code, 200)
# Create mark page
response = self.client.get(reverse('marks:mark_form', args=['unknown', unknown.pk, 'create']))
self.assertEqual(response.status_code, 200)
# Check regexp function
response = self.client.post('/marks/check-unknown-mark/%s/' % unknown.pk, {
'function': "KeyError:\s'(\S*)'\swas\snot\sfound\.",
'pattern': 'KeyE: {0}',
'is_regex': 'true'
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
# Save mark
response = self.client.post(reverse('marks:mark_form', args=['unknown', unknown.pk, 'create']), {
'data': json.dumps({
'description': 'Mark description',
'is_modifiable': True,
'status': MARK_STATUS[2][0],
'function': "KeyError:\s'(\S*)'\swas\snot\sfound\.",
'problem': 'KeyE: {0}',
'link': 'http://mysite.com/',
'is_regexp': True
})
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
res = json.loads(str(response.content, encoding='utf8'))
self.assertNotIn('error', res)
self.assertIn('cache_id', res)
cache_id = res['cache_id']
# Check mark's tables
try:
mark = MarkUnknown.objects.get(job=self.job, author__username='manager')
except ObjectDoesNotExist:
self.fail('Mark was not created')
self.assertEqual(mark.type, MARK_TYPE[0][0])
self.assertEqual(mark.status, MARK_STATUS[2][0])
self.assertEqual(mark.version, 1)
self.assertEqual(mark.description, 'Mark description')
self.assertEqual(mark.link, 'http://mysite.com/')
self.assertEqual(mark.problem_pattern, 'KeyE: {0}')
self.assertEqual(mark.function, "KeyError:\s'(\S*)'\swas\snot\sfound\.")
self.assertEqual(mark.is_modifiable, True)
self.assertEqual(len(mark.versions.all()), 1)
mark_version = MarkUnknownHistory.objects.get(mark=mark)
self.assertEqual(mark_version.version, 1)
self.assertEqual(mark_version.author.username, 'manager')
self.assertEqual(mark_version.status, mark.status)
self.assertEqual(mark_version.description, mark.description)
self.assertEqual(mark_version.link, mark.link)
self.assertEqual(mark_version.problem_pattern, mark.problem_pattern)
self.assertEqual(mark_version.function, mark.function)
self.assertEqual(len(UnknownProblem.objects.filter(name='KeyE: attr')), 1)
self.assertEqual(len(MarkUnknownReport.objects.filter(mark=mark, report=unknown)), 1)
# Associations changes
response = self.client.get('/marks/unknown/association_changes/%s/' % cache_id)
self.assertEqual(response.status_code, 200)
# Edit mark page
response = self.client.get(reverse('marks:mark_form', args=['unknown', mark.pk, 'edit']))
self.assertEqual(response.status_code, 200)
# Edit mark
response = self.client.post(reverse('marks:mark_form', args=['unknown', mark.pk, 'edit']), {
'data': json.dumps({
'description': 'New mark description',
'is_modifiable': True,
'status': MARK_STATUS[1][0],
'function': "KeyError:\s'(\S*)'.*",
'problem': 'KeyE: {0}',
'link': 'http://mysite.com/',
'is_regexp': True,
'comment': 'Change 1'
})
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
res = json.loads(str(response.content, encoding='utf8'))
self.assertNotIn('error', res)
self.assertIn('cache_id', res)
cache_id = res['cache_id']
# Check mark's tables
try:
mark = MarkUnknown.objects.get(job=self.job, author__username='manager')
except ObjectDoesNotExist:
self.fail('Mark was not created')
self.assertEqual(mark.version, 2)
self.assertEqual(mark.description, 'New mark description')
self.assertEqual(mark.is_modifiable, True)
self.assertEqual(len(mark.versions.all()), 2)
mark_version = MarkUnknownHistory.objects.filter(mark=mark).order_by('-version').first()
self.assertEqual(mark_version.version, 2)
self.assertEqual(mark_version.author.username, 'manager')
self.assertEqual(mark_version.description, mark.description)
self.assertEqual(mark_version.comment, 'Change 1')
self.assertEqual(mark_version.link, mark.link)
self.assertEqual(mark_version.problem_pattern, mark.problem_pattern)
self.assertEqual(mark_version.function, mark.function)
self.assertEqual(len(UnknownProblem.objects.filter(name='KeyE: attr')), 1)
self.assertEqual(len(MarkUnknownReport.objects.filter(mark=mark, report=unknown)), 1)
# Associations changes
response = self.client.get('/marks/unknown/association_changes/%s/' % cache_id)
self.assertEqual(response.status_code, 200)
# Unknown marks list page
response = self.client.get(reverse('marks:list', args=['unknown']))
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse('marks:mark', args=['unknown', mark.id]))
self.assertEqual(response.status_code, 200)
# Inline mark eddit form
response = self.client.get('/marks/unknown/%s/edit/inline/' % mark.id)
self.assertEqual(response.status_code, 200)
# Confirm/unconfirm association
# Mark is automatically associated after its changes
self.assertEqual(
MarkUnknownReport.objects.filter(mark=mark, report=unknown, type=ASSOCIATION_TYPE[0][0]).count(), 1
)
response = self.client.post('/marks/association/unknown/%s/%s/unconfirm/' % (unknown.pk, mark.pk))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(MarkUnknownReport.objects.filter(
mark=mark, report=unknown, type=ASSOCIATION_TYPE[2][0]).count(), 1)
response = self.client.post('/marks/association/unknown/%s/%s/confirm/' % (unknown.pk, mark.pk))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(MarkUnknownReport.objects.filter(
mark=mark, report=unknown, type=ASSOCIATION_TYPE[1][0]).count(), 1)
# Like/dislike association
response = self.client.post('/marks/association/unknown/%s/%s/like/' % (unknown.id, mark.id))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(UnknownAssociationLike.objects.filter(
association__report=unknown, association__mark=mark, dislike=False
).count(), 1)
response = self.client.post('/marks/association/unknown/%s/%s/dislike/' % (unknown.id, mark.id))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(UnknownAssociationLike.objects.filter(
association__report=unknown, association__mark=mark, dislike=True
).count(), 1)
self.assertEqual(UnknownAssociationLike.objects.filter(
association__report=unknown, association__mark=mark, dislike=False
).count(), 0)
# Download mark
response = self.client.get(reverse('marks:unknown-download', args=[mark.pk]))
self.assertEqual(response.status_code, 200)
self.assertIn(response['Content-Type'], {'application/x-zip-compressed', 'application/zip'})
with open(os.path.join(settings.MEDIA_ROOT, self.unknown_archive), mode='wb') as fp:
for content in response.streaming_content:
fp.write(content)
# Download mark in preset format
response = self.client.get(reverse('marks:unknown-download-preset', args=[mark.pk]))
self.assertEqual(response.status_code, 200)
# Delete mark
response = self.client.post('/marks/delete/', {'type': 'unknown', 'ids': json.dumps([mark.id])})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
res = json.loads(str(response.content, encoding='utf8'))
self.assertNotIn('error', res)
self.assertEqual(len(MarkUnknown.objects.all()), 0)
self.assertEqual(len(MarkUnknownReport.objects.all()), 0)
# Upload mark
with open(os.path.join(settings.MEDIA_ROOT, self.unknown_archive), mode='rb') as fp:
response = self.client.post('/marks/upload/', {'file': fp})
fp.close()
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
res = json.loads(str(response.content, encoding='utf8'))
self.assertIn('id', res)
self.assertEqual(res.get('type'), 'unknown')
try:
newmark = MarkUnknown.objects.get(pk=res['id'])
except ObjectDoesNotExist:
self.fail('Mark was not uploaded')
self.assertEqual(newmark.version, 2)
self.assertEqual(newmark.description, 'New mark description')
self.assertEqual(newmark.is_modifiable, True)
self.assertEqual(len(newmark.versions.all()), 2)
newmark_version = MarkUnknownHistory.objects.filter(mark=newmark).order_by('-version').first()
self.assertEqual(newmark_version.version, 2)
self.assertEqual(newmark_version.author.username, 'manager')
self.assertEqual(newmark_version.comment, 'Change 1')
self.assertEqual(len(MarkUnknownReport.objects.filter(mark=newmark, report=unknown)), 1)
self.assertEqual(len(MarkUnknownReport.objects.filter(report=unknown)), 1)
self.assertEqual(len(UnknownProblem.objects.filter(name='KeyE: attr')), 1)
# Check non-regexp function
response = self.client.post('/marks/check-unknown-mark/%s/' % unknown.pk, {
'function': "KeyError: 'attr' was not found.",
'pattern': 'KeyE: attr',
'is_regex': 'false'
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
# Non-regexp function change
response = self.client.post(reverse('marks:mark_form', args=['unknown', newmark.pk, 'edit']), {
'data': json.dumps({
'description': 'New mark description',
'is_modifiable': True,
'status': MARK_STATUS[2][0],
'function': "KeyError: 'attr' was not found.",
'problem': 'KeyE: attr',
'link': 'http://mysite.com/',
'is_regexp': False,
'comment': 'Change 3'
})
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
# Some more mark changes
for i in range(4, 6):
response = self.client.post(reverse('marks:mark_form', args=['unknown', newmark.pk, 'edit']), {
'data': json.dumps({
'description': 'No regexp',
'is_modifiable': True,
'status': MARK_STATUS[2][0],
'function': "KeyError:.*'(\S*)'",
'problem': 'KeyE: {0}',
'link': 'http://mysite.com/',
'is_regexp': True,
'comment': 'Change %s' % i
})
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(len(MarkUnknownHistory.objects.filter(mark=newmark)), 5)
# Get 3d version data
response = self.client.get(reverse('marks:mark_form', args=['unknown', newmark.pk, 'edit']),
params={'version': 3})
self.assertEqual(response.status_code, 200)
# Compare 1st and 4th versions
response = self.client.post('/marks/unknown/%s/compare_versions/' % newmark.pk, {'v1': 1, 'v2': 4})
self.assertEqual(response.status_code, 200)
# Remove 2nd and 4th versions
response = self.client.post('/marks/unknown/%s/remove_versions/' % newmark.pk, {'versions': json.dumps([2, 4])})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
res = json.loads(str(response.content, encoding='utf8'))
self.assertNotIn('error', res)
self.assertIn('success', res)
self.assertEqual(len(MarkUnknownHistory.objects.filter(mark=newmark)), 3)
# Reports' lists pages
root_comp = ReportComponent.objects.get(root__job_id=self.job.pk, parent=None)
response = self.client.get(
'%s?component=%s' % (reverse('reports:unknowns', args=[root_comp.pk]), parent.component_id)
)
self.assertIn(response.status_code, {200, 302})
try:
problem_id = UnknownProblem.objects.get(name='KeyE: attr').pk
except ObjectDoesNotExist:
self.fail("Can't find unknown problem")
response = self.client.get('%s?component=%s&problem=%s' % (
reverse('reports:unknowns', args=[root_comp.pk]), parent.component_id, problem_id
))
self.assertIn(response.status_code, {200, 302})
# Download all marks
response = self.client.get('/marks/api/download-all/')
self.assertEqual(response.status_code, 200)
self.assertNotEqual(response['Content-Type'], 'application/json')
with open(os.path.join(settings.MEDIA_ROOT, self.all_marks_arch), mode='wb') as fp:
for content in response.streaming_content:
fp.write(content)
# Delete all marks
self.client.post('/marks/delete/', {
'type': 'unknown', 'ids': json.dumps(list(MarkUnknown.objects.values_list('id', flat=True)))
})
self.assertEqual(MarkUnknown.objects.count(), 0)
# All verdicts must be "unknown unmarked"
self.assertEqual(MarkUnknownReport.objects.all().count(), 0)
# Upload all marks
with open(os.path.join(settings.MEDIA_ROOT, self.all_marks_arch), mode='rb') as fp:
response = self.client.post('/marks/upload-all/', {'delete': 1, 'file': fp})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertNotIn('error', json.loads(str(response.content, encoding='utf8')))
self.assertEqual(int(json.loads(str(response.content, encoding='utf8'))['fail']), 0)
self.assertEqual(int(json.loads(str(response.content, encoding='utf8'))['unknown']), 1)
def tearDown(self):
if os.path.exists(os.path.join(settings.MEDIA_ROOT, self.safe_archive)):
os.remove(os.path.join(settings.MEDIA_ROOT, self.safe_archive))
if os.path.exists(os.path.join(settings.MEDIA_ROOT, self.unsafe_archive)):
os.remove(os.path.join(settings.MEDIA_ROOT, self.unsafe_archive))
if os.path.exists(os.path.join(settings.MEDIA_ROOT, self.unknown_archive)):
os.remove(os.path.join(settings.MEDIA_ROOT, self.unknown_archive))
if os.path.exists(os.path.join(settings.MEDIA_ROOT, self.test_tagsfile)):
os.remove(os.path.join(settings.MEDIA_ROOT, self.test_tagsfile))
if os.path.exists(os.path.join(settings.MEDIA_ROOT, self.all_marks_arch)):
os.remove(os.path.join(settings.MEDIA_ROOT, self.all_marks_arch))
super(TestMarks, self).tearDown()
|
package com.srini.learning.algods.udemy.bigO;
import java.util.HashSet;
public class Q1 {
// given an array and a sum find if the sum of pair equals sum
// arr a=[1,2,3,4], sum=8
// a=[1,2,4,4], sum =8
public static void main(String[] args) {
int a[]= {1,2,3,4};
int a2[]= {1,2,4,4};
System.out.println(isSumOfPairEqualsSumIfSortedArr(a, 8));
System.out.println(isSumOfPairEqualsSumIfSortedArr(a2, 8));
int a3[]= {};
System.out.println(isSumOfPairEqualsSumIfSortedArr(a3, 8));
int a4[]= {11,-3,3,1,6,4,8,9,7};
System.out.println(isSumOfPairEqualsSumIfNonSortedArr(a4,8));
}
/**
* @param a
* @param sum
* @return
*
* this problem solutinon has O(n) big o - linear scalable
*/
public static boolean isSumOfPairEqualsSumIfSortedArr(int a[],int sum) {
int min= 0;
int max= a.length-1;
while((max-min)>0) {
int pSum = a[max]+a[min];
if(pSum ==sum) {
return true;
}else if(pSum>sum) {
max=max-1;
}else {
min=min+1;
}
}
return false;
}
// even this
public static boolean isSumOfPairEqualsSumIfNonSortedArr(int a[],int sum) {
HashSet lookUp = new HashSet<>();
// lookUp.contains(lookUp);
for(int val: a) {
int diff = sum -val;
if(lookUp.contains(val)) {
System.out.println("val :"+val+" lookup : "+diff);
return true;
}else {
lookUp.add(diff);
}
}
return false;
}
}
|
<filename>mp_sort/virtenv/lib/python3.6/site-packages/transcrypt/demos/parcel_demo/node_modules/parcel-bundler/lib/assets/JSAsset.js
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
const traverse = require('@babel/traverse').default;
const codeFrame = require('@babel/code-frame').codeFrameColumns;
const collectDependencies = require('../visitors/dependencies');
const walk = require('babylon-walk');
const Asset = require('../Asset');
const babelParser = require('@babel/parser');
const insertGlobals = require('../visitors/globals');
const fsVisitor = require('../visitors/fs');
const envVisitor = require('../visitors/env');
const babel = require('../transforms/babel/transform');
const babel7 = require('../transforms/babel/babel7');
const generate = require('@babel/generator').default;
const terser = require('../transforms/terser');
const SourceMap = require('../SourceMap');
const hoist = require('../scope-hoisting/hoist');
const path = require('path');
const fs = require('@parcel/fs');
const logger = require('@parcel/logger');
const IMPORT_RE = /\b(?:import\b|export\b|require\s*\()/;
const ENV_RE = /\b(?:process\.env)\b/;
const GLOBAL_RE = /\b(?:process|__dirname|__filename|global|Buffer|define)\b/;
const FS_RE = /\breadFileSync\b/;
const SW_RE = /\bnavigator\s*\.\s*serviceWorker\s*\.\s*register\s*\(/;
const WORKER_RE = /\bnew\s*(?:Shared)?Worker\s*\(/;
const SOURCEMAP_RE = /\/\/\s*[@#]\s*sourceMappingURL\s*=\s*([^\s]+)/;
const DATA_URL_RE = /^data:[^;]+(?:;charset=[^;]+)?;base64,(.*)/;
class JSAsset extends Asset {
constructor(name, options) {
super(name, options);
this.type = 'js';
this.globals = new Map();
this.isAstDirty = false;
this.isES6Module = false;
this.outputCode = null;
this.cacheData.env = {};
this.rendition = options.rendition;
this.sourceMap = this.rendition ? this.rendition.sourceMap : null;
}
shouldInvalidate(cacheData) {
for (let key in cacheData.env) {
if (cacheData.env[key] !== process.env[key]) {
return true;
}
}
return false;
}
mightHaveDependencies() {
return this.isAstDirty || !/.js$/.test(this.name) || IMPORT_RE.test(this.contents) || GLOBAL_RE.test(this.contents) || SW_RE.test(this.contents) || WORKER_RE.test(this.contents);
}
parse(code) {
var _this = this;
return (0, _asyncToGenerator2.default)(function* () {
return babelParser.parse(code, {
filename: _this.name,
allowReturnOutsideFunction: true,
strictMode: false,
sourceType: 'module',
plugins: ['exportDefaultFrom', 'exportNamespaceFrom', 'dynamicImport']
});
})();
}
traverse(visitor) {
return traverse(this.ast, visitor, null, this);
}
traverseFast(visitor) {
return walk.simple(this.ast, visitor, this);
}
collectDependencies() {
walk.ancestor(this.ast, collectDependencies, this);
}
loadSourceMap() {
var _this2 = this;
return (0, _asyncToGenerator2.default)(function* () {
// Get original sourcemap if there is any
let match = _this2.contents.match(SOURCEMAP_RE);
if (match) {
_this2.contents = _this2.contents.replace(SOURCEMAP_RE, '');
let url = match[1];
let dataURLMatch = url.match(DATA_URL_RE);
try {
let json, filename;
if (dataURLMatch) {
filename = _this2.name;
json = new Buffer(dataURLMatch[1], 'base64').toString();
} else {
filename = path.join(path.dirname(_this2.name), url);
json = yield fs.readFile(filename, 'utf8'); // Add as a dep so we watch the source map for changes.
_this2.addDependency(filename, {
includedInParent: true
});
}
_this2.sourceMap = JSON.parse(json); // Attempt to read missing source contents
if (!_this2.sourceMap.sourcesContent) {
_this2.sourceMap.sourcesContent = [];
}
let missingSources = _this2.sourceMap.sources.slice(_this2.sourceMap.sourcesContent.length);
if (missingSources.length) {
let contents = yield Promise.all(missingSources.map(
/*#__PURE__*/
function () {
var _ref = (0, _asyncToGenerator2.default)(function* (source) {
try {
let sourceFile = path.join(path.dirname(filename), _this2.sourceMap.sourceRoot || '', source);
let result = yield fs.readFile(sourceFile, 'utf8');
_this2.addDependency(sourceFile, {
includedInParent: true
});
return result;
} catch (err) {
logger.warn(`Could not load source file "${source}" in source map of "${_this2.relativeName}".`);
}
});
return function (_x) {
return _ref.apply(this, arguments);
};
}()));
_this2.sourceMap.sourcesContent = _this2.sourceMap.sourcesContent.concat(contents);
}
} catch (e) {
logger.warn(`Could not load existing sourcemap of "${_this2.relativeName}".`);
}
}
})();
}
pretransform() {
var _this3 = this;
return (0, _asyncToGenerator2.default)(function* () {
if (_this3.options.sourceMaps) {
yield _this3.loadSourceMap();
}
yield babel(_this3); // Inline environment variables
if (_this3.options.target === 'browser' && ENV_RE.test(_this3.contents)) {
yield _this3.parseIfNeeded();
_this3.traverseFast(envVisitor);
}
})();
}
transform() {
var _this4 = this;
return (0, _asyncToGenerator2.default)(function* () {
if (_this4.options.target === 'browser') {
if (_this4.dependencies.has('fs') && FS_RE.test(_this4.contents)) {
// Check if we should ignore fs calls
// See https://github.com/defunctzombie/node-browser-resolve#skip
let pkg = yield _this4.getPackage();
let ignore = pkg && pkg.browser && pkg.browser.fs === false;
if (!ignore) {
yield _this4.parseIfNeeded();
_this4.traverse(fsVisitor);
}
}
if (GLOBAL_RE.test(_this4.contents)) {
yield _this4.parseIfNeeded();
walk.ancestor(_this4.ast, insertGlobals, _this4);
}
}
if (_this4.options.scopeHoist) {
yield _this4.parseIfNeeded();
yield _this4.getPackage();
_this4.traverse(hoist);
_this4.isAstDirty = true;
} else {
if (_this4.isES6Module) {
yield babel7(_this4, {
internal: true,
config: {
plugins: [require('@babel/plugin-transform-modules-commonjs')]
}
});
}
}
if (_this4.options.minify) {
yield terser(_this4);
}
})();
}
generate() {
var _this5 = this;
return (0, _asyncToGenerator2.default)(function* () {
let enableSourceMaps = _this5.options.sourceMaps && (!_this5.rendition || !!_this5.rendition.sourceMap);
let code;
if (_this5.isAstDirty) {
let opts = {
sourceMaps: _this5.options.sourceMaps,
sourceFileName: _this5.relativeName
};
let generated = generate(_this5.ast, opts, _this5.contents);
if (enableSourceMaps && generated.rawMappings) {
let rawMap = new SourceMap(generated.rawMappings, {
[_this5.relativeName]: _this5.contents
}); // Check if we already have a source map (e.g. from TypeScript or CoffeeScript)
// In that case, we need to map the original source map to the babel generated one.
if (_this5.sourceMap) {
_this5.sourceMap = yield new SourceMap().extendSourceMap(_this5.sourceMap, rawMap);
} else {
_this5.sourceMap = rawMap;
}
}
code = generated.code;
} else {
code = _this5.outputCode != null ? _this5.outputCode : _this5.contents;
}
if (enableSourceMaps && !_this5.sourceMap) {
_this5.sourceMap = new SourceMap().generateEmptyMap(_this5.relativeName, _this5.contents);
}
if (_this5.globals.size > 0) {
code = Array.from(_this5.globals.values()).join('\n') + '\n' + code;
if (enableSourceMaps) {
if (!(_this5.sourceMap instanceof SourceMap)) {
_this5.sourceMap = yield new SourceMap().addMap(_this5.sourceMap);
}
_this5.sourceMap.offset(_this5.globals.size);
}
}
return {
js: code,
map: _this5.sourceMap
};
})();
}
generateErrorMessage(err) {
const loc = err.loc;
if (loc) {
// Babel 7 adds its own code frame on the error message itself
// We need to remove it and pass it separately.
if (err.message.startsWith(this.name)) {
err.message = err.message.slice(this.name.length + 1, err.message.indexOf('\n')).trim();
}
err.codeFrame = codeFrame(this.contents, {
start: loc
});
err.highlightedCodeFrame = codeFrame(this.contents, {
start: loc
}, {
highlightCode: true
});
}
return err;
}
}
module.exports = JSAsset;
|
#!/usr/bin/env bash
set -ex
SCRIPT_DIR=$(dirname $(readlink -f $0))
cd $SCRIPT_DIR
docker build -t todo-protos -f Dockerfile .
docker run --rm \
-v $SCRIPT_DIR:/home/build \
-w /home/build \
--name todo-protos todo-protos:latest
|
var typed = $(".typed");
$(function() {
typed.typed({
strings: ["<NAME>^1800"],
typeSpeed: 100,
loop: true,
});
});
|
from typing import Dict, List, Tuple
from py2puml.inspection.inspectmodule import inspect_domain_definition
from py2puml.domain.umlitem import UmlItem
from py2puml.domain.umlclass import UmlClass, UmlAttribute
from py2puml.domain.umlrelation import UmlRelation, RelType
from tests.asserts.attribute import assert_attribute
from tests.asserts.relation import assert_relation
from tests.modules.withbasictypes import Contact
from tests.modules.withcomposition import Worker
from tests.modules.withinheritancewithinmodule import GlowingFish
def test_inspect_domain_definition_single_class_without_composition():
domain_items_by_fqn: Dict[str, UmlItem] = {}
domain_relations: List[UmlRelation] = []
inspect_domain_definition(Contact, 'tests.modules.withbasictypes', domain_items_by_fqn, domain_relations)
umlitems_by_fqn = list(domain_items_by_fqn.items())
assert len(umlitems_by_fqn) == 1, 'one class must be inspected'
umlclass: UmlClass
fqn, umlclass = umlitems_by_fqn[0]
assert fqn == 'tests.modules.withbasictypes.Contact'
assert umlclass.fqn == fqn
assert umlclass.name == 'Contact'
attributes = umlclass.attributes
assert len(attributes) == 4, 'class has 4 attributes'
assert_attribute(attributes[0], 'full_name', 'str', expected_staticity=False)
assert_attribute(attributes[1], 'age', 'int', expected_staticity=False)
assert_attribute(attributes[2], 'weight', 'float', expected_staticity=False)
assert_attribute(attributes[3], 'can_twist_tongue', 'bool', expected_staticity=False)
assert len(domain_relations) == 0, 'no component must be detected in this class'
def test_inspect_domain_definition_single_class_with_composition():
domain_items_by_fqn: Dict[str, UmlItem] = {}
domain_relations: List[UmlRelation] = []
inspect_domain_definition(Worker, 'tests.modules.withcomposition', domain_items_by_fqn, domain_relations)
assert len(domain_items_by_fqn) == 1, 'one class must be inspected'
assert len(domain_relations) == 1, 'class has 1 domain component'
assert_relation(
domain_relations[0],
'tests.modules.withcomposition.Worker',
'tests.modules.withcomposition.Address',
RelType.COMPOSITION
)
def test_parse_inheritance_within_module():
domain_items_by_fqn: Dict[str, UmlItem] = {}
domain_relations: List[UmlRelation] = []
inspect_domain_definition(GlowingFish, 'tests.modules.withinheritancewithinmodule', domain_items_by_fqn, domain_relations)
umlitems_by_fqn = list(domain_items_by_fqn.values())
assert len(umlitems_by_fqn) == 1, 'the class with multiple inheritance was inspected'
child_glowing_fish: UmlClass = umlitems_by_fqn[0]
assert child_glowing_fish.name == 'GlowingFish'
assert child_glowing_fish.fqn == 'tests.modules.withinheritancewithinmodule.GlowingFish'
assert len(child_glowing_fish.attributes) == 2
assert_attribute(child_glowing_fish.attributes[0], 'glow_for_hunting', 'bool', expected_staticity=False)
assert_attribute(child_glowing_fish.attributes[1], 'glow_for_mating', 'bool', expected_staticity=False)
assert len(domain_relations) == 2, '2 inheritance relations must be inspected'
parent_fish, parent_light = domain_relations
assert_relation(
parent_fish,
'tests.modules.withinheritancewithinmodule.Fish',
'tests.modules.withinheritancewithinmodule.GlowingFish',
RelType.INHERITANCE
)
assert_relation(
parent_light,
'tests.modules.withinheritancewithinmodule.Light',
'tests.modules.withinheritancewithinmodule.GlowingFish',
RelType.INHERITANCE
)
|
/**
* Created by wll on 2017/3/1.
*/
$(function () {
bindClick();
});
function bindClick() {
var shape = $("#shape");
var height = $('#height');
var width = $("#width");
var length = $("#length");
var price = $("#price");
$("#sure").off("click").on("click", function () {
if (shape.val() == "") {
dialog("形状不能为空!");
return false;
}
if (height.val() == "") {
dialog("高度不得为空!");
return false;
}
if (length.val() =="") {
dialog("长度不得为空!");
return false;
}
if (width.val() =="") {
dialog("宽度不得为空!");
return false;
}
if (price.val() =="") {
dialog("价格不得为空!");
return false;
}
var param={};
param.shape=shape.val();
param.height=height.val();
param.length=length.val();
param.width=width.val();
param.price=price.val();
$.post(home+'/productController/addProduct.forward',param,function(data){
if(data.results.success){
dialog("添加成功");
}else{
dialog("添加失败");
}
});
});
$("#cancle").off("click").on("click", function () {
shape.val("");
height.val("");
length.val("");
width.val("");
price.vcal("");
});
}
|
# This script builds the Vale compiler, runs some tests on it, and also packages up a release zip file.
# It assumes we've already ran prereqs-linux.sh, or otherwise installed all the dependencies.
LLVM_DIR="$1"
if [ "$LLVM_DIR" == "" ]; then
echo "Please supply the LLVM directory."
echo "Example: ~/clang+llvm-11.1.0-x86_64-linux-gnu-ubuntu-20.10"
exit
fi
LLVM_CMAKE_DIR="$LLVM_DIR/lib/cmake/llvm"
if [ ! -d "$LLVM_CMAKE_DIR" ]; then
echo "Directory not found: $LLVM_CMAKE_DIR"
echo "Are you sure you specified the right LLVM directory?"
exit
fi
BOOTSTRAPPING_VALEC_DIR="$2"
if [ "$BOOTSTRAPPING_VALEC_DIR" == "" ]; then
echo "Please supply the bootstrapping valec directory."
echo "Example: ~/ValeCompiler-0.1.3.3-Ubuntu"
exit
fi
cd Valestrom
echo Compiling Valestrom...
sbt assembly || { echo 'Valestrom build failed.' ; exit 1; }
cd ../Midas
echo Generating Midas...
cmake -B build -D LLVM_DIR="$LLVM_CMAKE_DIR"
cd build
echo Compiling Midas...
make
cd ../../Driver
echo Compiling Driver...
./build.sh $BOOTSTRAPPING_VALEC_DIR || { echo 'Driver build failed.' ; exit 1; }
cd ../scripts
rm -rf ../release-unix || { echo 'Error removing previous release-unix dir.' ; exit 1; }
mkdir -p ../release-unix || { echo 'Error making new release-unix dir.' ; exit 1; }
mkdir -p ../release-unix/samples || { echo 'Error making new samples dir.' ; exit 1; }
cp ../Valestrom/Valestrom.jar ../release-unix || { echo 'Error copying into release-unix.' ; exit 1; }
cp -r ../Valestrom/Tests/test/main/resources/programs ../release-unix/samples || { echo 'Error copying into release-unix.' ; exit 1; }
cp -r ../Midas/src/builtins ../release-unix/builtins || { echo 'Error copying into release-unix.' ; exit 1; }
cp releaseREADME.txt ../release-unix/README.txt || { echo 'Error copying into release-unix.' ; exit 1; }
cp valec-* ../release-unix || { echo 'Error copying into release-unix.' ; exit 1; }
cp ../Midas/build/midas ../release-unix/midas || { echo 'Error copying into release-unix.' ; exit 1; }
cp -r ../stdlib ../release-unix/stdlib || { echo 'Error copying into release-unix.' ; exit 1; }
cp -r helloworld ../release-unix/samples/helloworld || { echo 'Error copying into release-unix.' ; exit 1; }
cp ../Driver/build/valec ../release-unix/valec || { echo 'Error copying into release-unix.' ; exit 1; }
cd ../release-unix || { echo 'Error copying into release-unix.' ; exit 1; }
zip -r ValeCompiler.zip * || { echo 'Error copying into release-unix.' ; exit 1; }
cd ../Tester
rm -rf ./BuiltValeCompiler
unzip ../release-unix/ValeCompiler.zip -d ./BuiltValeCompiler
echo Compiling Tester...
./build.sh $BOOTSTRAPPING_VALEC_DIR || { echo 'Tester build failed.' ; exit 1; }
echo Running Tester...
./build/testvalec --valestrom_path ./BuiltValeCompiler/Valestrom.jar --midas_path ./BuiltValeCompiler/midas --builtins_dir ./BuiltValeCompiler/builtins --valec_path ./BuiltValeCompiler/valec --midas_tests_dir ../Midas/test --valestrom_tests_dir ../Valestrom --concurrent 6 @assist || { echo 'Tests failed.' ; exit 1; }
cd ..
|
package com.pillarhou.disk_info;
import android.os.Environment;
import android.os.StatFs;
import android.text.format.Formatter;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.MethodChannel.MethodCallHandler;
import io.flutter.plugin.common.MethodChannel.Result;
import io.flutter.plugin.common.PluginRegistry.Registrar;
/**
* DiskInfoPlugin
*/
public class DiskInfoPlugin implements MethodCallHandler {
/**
* Plugin registration.
*/
public static void registerWith(Registrar registrar) {
final MethodChannel channel = new MethodChannel(registrar.messenger(), "disk_info");
channel.setMethodCallHandler(new DiskInfoPlugin());
}
/**
* 获得SD卡总大小
*
* @return
*/
private long getSDTotalSize() {
File path = Environment.getExternalStorageDirectory();
StatFs stat = new StatFs(path.getPath());
long blockSize = stat.getBlockSize();
long totalBlocks = stat.getBlockCount();
return blockSize * totalBlocks;
}
/**
* 获得sd卡剩余容量,即可用大小
*
* @return
*/
private long getSDAvailableSize() {
File path = Environment.getExternalStorageDirectory();
StatFs stat = new StatFs(path.getPath());
long blockSize = stat.getBlockSize();
long availableBlocks = stat.getAvailableBlocks();
return blockSize * availableBlocks;
}
/**
* 获得机身内容总大小
*
* @return
*/
private long getRomTotalSize() {
File path = Environment.getDataDirectory();
StatFs stat = new StatFs(path.getPath());
long blockSize = stat.getBlockSize();
long totalBlocks = stat.getBlockCount();
return blockSize * totalBlocks;
}
/**
* 获得机身可用内存
*
* @return
*/
private long getRomAvailableSize() {
File path = Environment.getDataDirectory();
StatFs stat = new StatFs(path.getPath());
long blockSize = stat.getBlockSize();
long availableBlocks = stat.getAvailableBlocks();
return blockSize * availableBlocks;
}
@Override
public void onMethodCall(MethodCall call, Result result) {
if (call.method.equals("getPlatformVersion")) {
result.success("Android " + android.os.Build.VERSION.RELEASE);
} else if (call.method.equals("getDiskInfo")) {
Map<String, Long> map = new HashMap<String, Long>();
long total = getRomTotalSize();
long available = getRomAvailableSize();
map.put("total", total);
map.put("available", available);
result.success(map);
} else {
result.notImplemented();
}
}
}
|
#!/usr/bin/env sh
export docker_tag=latest
container_name=if-sitesearch
docker_image_name=sis-sitesearch
docker_network=sitesearch
isBlueUp() {
if [ -f "./blue-green-deployment.lock" ]; then
rm ./blue-green-deployment.lock
return 0
else
touch ./blue-green-deployment.lock
return 1
fi
}
runService() {
ssh ubuntu@main.sitesearch.cloud docker run -d --name $1 \
--log-driver=gelf \
--log-opt gelf-address=udp://logs.sitesearch.cloud:12201 \
--env SIS_API_SERVICE_URL=$SIS_API_SERVICE_URL \
--env SERVICE_SECRET=$SERVICE_SECRET \
--env SIS_SERVICE_HOST=$SIS_SERVICE_HOST \
--env WOO_COMMERCE_CONSUMER_KEY="$WOO_COMMERCE_CONSUMER_KEY" \
--env WOO_COMMERCE_CONSUMER_SECRET="$WOO_COMMERCE_CONSUMER_SECRET" \
--env ADMIN_SITE_SECRET=$ADMIN_SITE_SECRET \
--env INVISIBLE_RECAPTCHA_SITE_SECRET=${INVISIBLE_RECAPTCHA_SITE_SECRET} \
--env BUILD_NUMBER=$BUILD_NUMBER \
--env SCM_HASH=$SCM_HASH \
--restart unless-stopped \
--network $docker_network \
docker-registry.intrafind.net/intrafind/${docker_image_name}:${docker_tag}
}
startComponent() {
ssh ubuntu@main.sitesearch.cloud docker rm -f $1
# ssh ubuntu@main.sitesearch.cloud docker rmi -f docker-registry.intrafind.net/intrafind/sis-sitesearch:$docker_tag
ssh ubuntu@main.sitesearch.cloud docker rmi -f docker-registry.intrafind.net/intrafind/sis-sitesearch:latest
runService $1
}
if isBlueUp; then
echo "blue is active"
current="${container_name}-green"
# export docker_tag=green
startComponent ${current}
startComponent ${current}-1
sleep 21
ssh ubuntu@main.sitesearch.cloud docker exec router switch.sh green
else
echo "blue is inactive"
current="${container_name}-blue"
# export docker_tag=blue
startComponent ${current}
startComponent ${current}-1
sleep 21
ssh ubuntu@main.sitesearch.cloud docker exec router switch.sh blue
fi
|
#!/usr/bin/env sh
set -o errexit
set -o nounset
go test `go list ./... | grep -v vendor` -v -tags='integration'
|
sudo apt update -y
sudo apt install -y git wget unzip python3 python38 python38-pip python38-devel python3-devel make gcc gcc-c++ libffi-devel.ppc64le libffi.ppc64le cargo.ppc64le openssl.ppc64le openssl-devel.ppc64le
ln -s /usr/bin/python3.8 /usr/bin/python
pip3 install tox wheel setuptools_rust
pip3 install cryptography
|
<reponame>zjqx1991/02_Struts2<gh_stars>0
/**
*
*/
package com.revanwang.param;
import com.opensymphony.xwork2.ActionSupport;
import com.opensymphony.xwork2.ModelDriven;
/**
* @Desc
* @author <NAME>
*
* @Date Jul 24, 20198:18:25 PM
*/
public class Param3Action extends ActionSupport implements ModelDriven<User> {
private static final long serialVersionUID = 1L;
private User u = new User();
public void param3() {
System.out.println(u.getName());
}
@Override
public User getModel() {
return u;
}
}
|
'use strict';
describe('Service: protocolHelperNew', function () {
// load the service's module
beforeEach(module('wetLabAccelerator'));
// instantiate service
var protocolHelperNew;
beforeEach(inject(function (_protocolHelperNew_) {
protocolHelperNew = _protocolHelperNew_;
}));
it('should do something', function () {
expect(!!protocolHelperNew).toBe(true);
});
});
|
//功能实现
//思路:
//1. 给ul注册3个事件 touchstart touchmove touchend
//2. 在touchstart中获取到开始位置
//3. 在touchmove中获取到移动的距离,让ul跟着移动
//4. 松手的时候,判断一个范围
;(function () {
var nav = document.querySelector(".jd_content .nav");
var ul = nav.querySelector("ul");
//记录开始的位置
var startY;
//核心的变量,用来记录每次滑动后的位置
var current = 0;
ul.addEventListener("touchstart", function (e) {
startY = e.touches[0].clientY;
});
ul.addEventListener("touchmove", function (e) {
var distance = e.touches[0].clientY - startY;
//清除过渡
removeTransition();
//在原来的基础上设置位置
setTranslate(current + distance);
});
//结束时,就应该把本地滑动的记录给添加到current里面。
ul.addEventListener("touchend", function (e) {
var distance = e.changedTouches[0].clientY - startY;
current += distance;
console.log(current);
//判断current
if(current > 0) {
current = 0;
}
if(current < nav.offsetHeight - ul.offsetHeight) {
current = nav.offsetHeight - ul.offsetHeight
}
addTransition();
setTranslate(current);
});
function addTransition() {
ul.style.transition = "all .2s";
ul.style.webkitTransition = "all .2s";
}
function removeTransition() {
ul.style.transition = "none";
ul.style.webkitTransition = "none";
}
function setTranslate(value) {
ul.style.transform = "translateY("+ value +"px)";
ul.style.webkitTransform = "translateY("+ value +"px)";
}
})();
|
def generate_n_grams(string, n):
n_grams = []
for i in range(len(string)-n+1):
n_grams.append(string[i:i+n])
return n_grams
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/GTMSessionFetcher/GTMSessionFetcher.framework"
install_framework "${BUILT_PRODUCTS_DIR}/GoogleUtilities/GoogleUtilities.framework"
install_framework "${BUILT_PRODUCTS_DIR}/leveldb-library/leveldb.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/GTMSessionFetcher/GTMSessionFetcher.framework"
install_framework "${BUILT_PRODUCTS_DIR}/GoogleUtilities/GoogleUtilities.framework"
install_framework "${BUILT_PRODUCTS_DIR}/leveldb-library/leveldb.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<reponame>elektronikasa/Native-VML-FED<gh_stars>100-1000
$(function() {
var scotchPanel = $('#scotch-panel').scotchPanel({
containerSelector: 'body',
direction: 'right',
duration: 300,
transition: 'ease',
clickSelector: '.toggle-panel',
distanceX: '70%',
enableEscapeKey: true
});
$(window).resize(function() {
if ($(window).width() >= 769 && $('.scotch-panel-canvas').hasClass('scotch-is-showing')) {
scotchPanel.close();
}
});
});
|
CUDA_VISIBLE_DEVICES='6,7' python train_source.py
|
<reponame>tenebrousedge/ruby-packer<filename>ruby/spec/ruby/core/array/push_spec.rb
require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
require File.expand_path('../shared/push', __FILE__)
describe "Array#push" do
it_behaves_like(:array_push, :push)
end
|
<reponame>RTEnzyme/vldb-2021-labs
// Copyright 2016 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package core
import (
"context"
"strings"
"github.com/pingcap/errors"
"github.com/pingcap/tidb/expression"
"github.com/pingcap/tidb/infoschema"
"github.com/pingcap/tidb/parser/ast"
"github.com/pingcap/tidb/parser/model"
"github.com/pingcap/tidb/parser/mysql"
"github.com/pingcap/tidb/parser/opcode"
"github.com/pingcap/tidb/sessionctx"
"github.com/pingcap/tidb/sessionctx/variable"
"github.com/pingcap/tidb/table"
"github.com/pingcap/tidb/types"
driver "github.com/pingcap/tidb/types/parser_driver"
"github.com/pingcap/tidb/util/chunk"
)
// evalAstExpr evaluates ast expression directly.
func evalAstExpr(sctx sessionctx.Context, expr ast.ExprNode) (types.Datum, error) {
if val, ok := expr.(*driver.ValueExpr); ok {
return val.Datum, nil
}
var is infoschema.InfoSchema
if sctx.GetSessionVars().TxnCtx.InfoSchema != nil {
is = sctx.GetSessionVars().TxnCtx.InfoSchema.(infoschema.InfoSchema)
}
b := NewPlanBuilder(sctx, is)
fakePlan := LogicalTableDual{}.Init(sctx)
newExpr, _, err := b.rewrite(context.TODO(), expr, fakePlan, nil, true)
if err != nil {
return types.Datum{}, err
}
return newExpr.Eval(chunk.Row{})
}
// rewrite function rewrites ast expr to expression.Expression.
// aggMapper maps ast.AggregateFuncExpr to the columns offset in p's output schema.
// asScalar means whether this expression must be treated as a scalar expression.
// And this function returns a result expression, a new plan that may have apply or semi-join.
func (b *PlanBuilder) rewrite(ctx context.Context, exprNode ast.ExprNode, p LogicalPlan, aggMapper map[*ast.AggregateFuncExpr]int, asScalar bool) (expression.Expression, LogicalPlan, error) {
expr, resultPlan, err := b.rewriteWithPreprocess(ctx, exprNode, p, aggMapper, asScalar, nil)
return expr, resultPlan, err
}
// rewriteWithPreprocess is for handling the situation that we need to adjust the input ast tree
// before really using its node in `expressionRewriter.Leave`. In that case, we first call
// er.preprocess(expr), which returns a new expr. Then we use the new expr in `Leave`.
func (b *PlanBuilder) rewriteWithPreprocess(
ctx context.Context,
exprNode ast.ExprNode,
p LogicalPlan, aggMapper map[*ast.AggregateFuncExpr]int,
asScalar bool,
preprocess func(ast.Node) ast.Node,
) (expression.Expression, LogicalPlan, error) {
b.rewriterCounter++
defer func() { b.rewriterCounter-- }()
rewriter := b.getExpressionRewriter(ctx, p)
// The rewriter maybe is obtained from "b.rewriterPool", "rewriter.err" is
// not nil means certain previous procedure has not handled this error.
// Here we give us one more chance to make a correct behavior by handling
// this missed error.
if rewriter.err != nil {
return nil, nil, rewriter.err
}
rewriter.aggrMap = aggMapper
rewriter.asScalar = asScalar
rewriter.preprocess = preprocess
expr, resultPlan, err := b.rewriteExprNode(rewriter, exprNode, asScalar)
return expr, resultPlan, err
}
func (b *PlanBuilder) getExpressionRewriter(ctx context.Context, p LogicalPlan) (rewriter *expressionRewriter) {
defer func() {
if p != nil {
rewriter.schema = p.Schema()
rewriter.names = p.OutputNames()
}
}()
if len(b.rewriterPool) < b.rewriterCounter {
rewriter = &expressionRewriter{p: p, b: b, sctx: b.ctx, ctx: ctx}
b.rewriterPool = append(b.rewriterPool, rewriter)
return
}
rewriter = b.rewriterPool[b.rewriterCounter-1]
rewriter.p = p
rewriter.asScalar = false
rewriter.aggrMap = nil
rewriter.preprocess = nil
rewriter.insertPlan = nil
rewriter.ctxStack = rewriter.ctxStack[:0]
rewriter.ctxNameStk = rewriter.ctxNameStk[:0]
rewriter.ctx = ctx
return
}
func (b *PlanBuilder) rewriteExprNode(rewriter *expressionRewriter, exprNode ast.ExprNode, asScalar bool) (expression.Expression, LogicalPlan, error) {
exprNode.Accept(rewriter)
if rewriter.err != nil {
return nil, nil, errors.Trace(rewriter.err)
}
if !asScalar && len(rewriter.ctxStack) == 0 {
return nil, rewriter.p, nil
}
if len(rewriter.ctxStack) != 1 {
return nil, nil, errors.Errorf("context len %v is invalid", len(rewriter.ctxStack))
}
rewriter.err = expression.CheckArgsNotMultiColumnRow(rewriter.ctxStack[0])
if rewriter.err != nil {
return nil, nil, errors.Trace(rewriter.err)
}
return rewriter.ctxStack[0], rewriter.p, nil
}
type expressionRewriter struct {
ctxStack []expression.Expression
ctxNameStk []*types.FieldName
p LogicalPlan
schema *expression.Schema
names []*types.FieldName
err error
aggrMap map[*ast.AggregateFuncExpr]int
b *PlanBuilder
sctx sessionctx.Context
ctx context.Context
// asScalar indicates the return value must be a scalar value.
// NOTE: This value can be changed during expression rewritten.
asScalar bool
// preprocess is called for every ast.Node in Leave.
preprocess func(ast.Node) ast.Node
// insertPlan is only used to rewrite the expressions inside the assignment
// of the "INSERT" statement.
insertPlan *Insert
}
func (er *expressionRewriter) ctxStackLen() int {
return len(er.ctxStack)
}
func (er *expressionRewriter) ctxStackPop(num int) {
l := er.ctxStackLen()
er.ctxStack = er.ctxStack[:l-num]
er.ctxNameStk = er.ctxNameStk[:l-num]
}
func (er *expressionRewriter) ctxStackAppend(col expression.Expression, name *types.FieldName) {
er.ctxStack = append(er.ctxStack, col)
er.ctxNameStk = append(er.ctxNameStk, name)
}
// constructBinaryOpFunction converts binary operator functions
// 1. If op are EQ or NE or NullEQ, constructBinaryOpFunctions converts (a0,a1,a2) op (b0,b1,b2) to (a0 op b0) and (a1 op b1) and (a2 op b2)
// 2. Else constructBinaryOpFunctions converts (a0,a1,a2) op (b0,b1,b2) to
// `IF( a0 NE b0, a0 op b0,
// IF ( isNull(a0 NE b0), Null,
// IF ( a1 NE b1, a1 op b1,
// IF ( isNull(a1 NE b1), Null, a2 op b2))))`
func (er *expressionRewriter) constructBinaryOpFunction(l expression.Expression, r expression.Expression, op string) (expression.Expression, error) {
lLen, rLen := expression.GetRowLen(l), expression.GetRowLen(r)
if lLen == 1 && rLen == 1 {
return er.newFunction(op, types.NewFieldType(mysql.TypeTiny), l, r)
} else if rLen != lLen {
return nil, expression.ErrOperandColumns.GenWithStackByArgs(lLen)
}
switch op {
case ast.EQ, ast.NE:
funcs := make([]expression.Expression, lLen)
for i := 0; i < lLen; i++ {
var err error
funcs[i], err = er.constructBinaryOpFunction(expression.GetFuncArg(l, i), expression.GetFuncArg(r, i), op)
if err != nil {
return nil, err
}
}
if op == ast.NE {
return expression.ComposeDNFCondition(er.sctx, funcs...), nil
}
return expression.ComposeCNFCondition(er.sctx, funcs...), nil
default:
larg0, rarg0 := expression.GetFuncArg(l, 0), expression.GetFuncArg(r, 0)
var expr1, expr2, expr3, expr4, expr5 expression.Expression
expr1 = expression.NewFunctionInternal(er.sctx, ast.NE, types.NewFieldType(mysql.TypeTiny), larg0, rarg0)
expr2 = expression.NewFunctionInternal(er.sctx, op, types.NewFieldType(mysql.TypeTiny), larg0, rarg0)
expr3 = expression.NewFunctionInternal(er.sctx, ast.IsNull, types.NewFieldType(mysql.TypeTiny), expr1)
var err error
l, err = expression.PopRowFirstArg(er.sctx, l)
if err != nil {
return nil, err
}
r, err = expression.PopRowFirstArg(er.sctx, r)
if err != nil {
return nil, err
}
expr4, err = er.constructBinaryOpFunction(l, r, op)
if err != nil {
return nil, err
}
expr5, err = er.newFunction(ast.If, types.NewFieldType(mysql.TypeTiny), expr3, expression.Null, expr4)
if err != nil {
return nil, err
}
return er.newFunction(ast.If, types.NewFieldType(mysql.TypeTiny), expr1, expr2, expr5)
}
}
// Enter implements Visitor interface.
func (er *expressionRewriter) Enter(inNode ast.Node) (ast.Node, bool) {
switch v := inNode.(type) {
case *ast.AggregateFuncExpr:
index, ok := -1, false
if er.aggrMap != nil {
index, ok = er.aggrMap[v]
}
if !ok {
er.err = ErrInvalidGroupFuncUse
return inNode, true
}
er.ctxStackAppend(er.schema.Columns[index], er.names[index])
return inNode, true
case *ast.ColumnNameExpr:
if index, ok := er.b.colMapper[v]; ok {
er.ctxStackAppend(er.schema.Columns[index], er.names[index])
return inNode, true
}
case *ast.PatternInExpr:
if len(v.List) != 1 {
break
}
// For 10 in ((select * from t)), the parser won't set v.Sel.
// So we must process this case here.
x := v.List[0]
for {
switch y := x.(type) {
case *ast.ParenthesesExpr:
x = y.Expr
default:
return inNode, false
}
}
case *ast.ParenthesesExpr:
case *ast.ValuesExpr:
schema, names := er.schema, er.names
// NOTE: "er.insertPlan != nil" means that we are rewriting the
// expressions inside the assignment of "INSERT" statement. we have to
// use the "tableSchema" of that "insertPlan".
if er.insertPlan != nil {
schema = er.insertPlan.tableSchema
names = er.insertPlan.tableColNames
}
idx, err := expression.FindFieldName(names, v.Column.Name)
if err != nil {
er.err = err
return inNode, false
}
if idx < 0 {
er.err = ErrUnknownColumn.GenWithStackByArgs(v.Column.Name.OrigColName(), "field list")
return inNode, false
}
col := schema.Columns[idx]
er.ctxStackAppend(expression.NewValuesFunc(er.sctx, col.Index, col.RetType), types.EmptyName)
return inNode, true
case *ast.FuncCallExpr:
default:
er.asScalar = true
}
return inNode, false
}
// Leave implements Visitor interface.
func (er *expressionRewriter) Leave(originInNode ast.Node) (retNode ast.Node, ok bool) {
if er.err != nil {
return retNode, false
}
var inNode = originInNode
if er.preprocess != nil {
inNode = er.preprocess(inNode)
}
switch v := inNode.(type) {
case *ast.AggregateFuncExpr, *ast.ColumnNameExpr, *ast.ParenthesesExpr, *ast.ValuesExpr:
case *driver.ValueExpr:
value := &expression.Constant{Value: v.Datum, RetType: &v.Type}
er.ctxStackAppend(value, types.EmptyName)
case *ast.VariableExpr:
er.rewriteVariable(v)
case *ast.FuncCallExpr:
er.funcCallToExpression(v)
case *ast.ColumnName:
er.toColumn(v)
case *ast.UnaryOperationExpr:
er.unaryOpToExpression(v)
case *ast.BinaryOperationExpr:
er.binaryOpToExpression(v)
case *ast.BetweenExpr:
er.betweenToExpression(v)
case *ast.RowExpr:
er.rowToScalarFunc(v)
case *ast.PatternInExpr:
er.inToExpression(len(v.List), v.Not, &v.Type)
case *ast.PatternCutlFunc:
er.cutlToExpression(len(v.List), v.Not, &v.Type)
case *ast.IsNullExpr:
er.isNullToExpression(v)
case *ast.DefaultExpr:
er.evalDefaultExpr(v)
default:
er.err = errors.Errorf("UnknownType: %T", v)
return retNode, false
}
if er.err != nil {
return retNode, false
}
return originInNode, true
}
func (er *expressionRewriter) newFunction(funcName string, retType *types.FieldType, args ...expression.Expression) (expression.Expression, error) {
return expression.NewFunction(er.sctx, funcName, retType, args...)
}
func (er *expressionRewriter) rewriteVariable(v *ast.VariableExpr) {
stkLen := len(er.ctxStack)
name := strings.ToLower(v.Name)
sessionVars := er.b.ctx.GetSessionVars()
if !v.IsSystem {
if v.Value != nil {
er.ctxStack[stkLen-1], er.err = er.newFunction(ast.SetVar,
er.ctxStack[stkLen-1].GetType(),
expression.DatumToConstant(types.NewDatum(name), mysql.TypeString),
er.ctxStack[stkLen-1])
er.ctxNameStk[stkLen-1] = types.EmptyName
return
}
f, err := er.newFunction(ast.GetVar,
// TODO: Here is wrong, the sessionVars should store a name -> Datum map. Will fix it later.
types.NewFieldType(mysql.TypeString),
expression.DatumToConstant(types.NewStringDatum(name), mysql.TypeString))
if err != nil {
er.err = err
return
}
er.ctxStackAppend(f, types.EmptyName)
return
}
var val string
var err error
if v.ExplicitScope {
err = variable.ValidateGetSystemVar(name, v.IsGlobal)
if err != nil {
er.err = err
return
}
}
sysVar := variable.SysVars[name]
if sysVar == nil {
er.err = variable.ErrUnknownSystemVar.GenWithStackByArgs(name)
return
}
// Variable is @@gobal.variable_name or variable is only global scope variable.
if v.IsGlobal || sysVar.Scope == variable.ScopeGlobal {
val, err = variable.GetGlobalSystemVar(sessionVars, name)
} else {
val, err = variable.GetSessionSystemVar(sessionVars, name)
}
if err != nil {
er.err = err
return
}
e := expression.DatumToConstant(types.NewStringDatum(val), mysql.TypeVarString)
e.GetType().Charset, _ = er.sctx.GetSessionVars().GetSystemVar(variable.CharacterSetConnection)
e.GetType().Collate, _ = er.sctx.GetSessionVars().GetSystemVar(variable.CollationConnection)
er.ctxStackAppend(e, types.EmptyName)
}
func (er *expressionRewriter) unaryOpToExpression(v *ast.UnaryOperationExpr) {
stkLen := len(er.ctxStack)
var op string
switch v.Op {
case opcode.Plus:
// expression (+ a) is equal to a
return
case opcode.Minus:
op = ast.UnaryMinus
case opcode.Not:
op = ast.UnaryNot
default:
er.err = errors.Errorf("Unknown Unary Op %T", v.Op)
return
}
if expression.GetRowLen(er.ctxStack[stkLen-1]) != 1 {
er.err = expression.ErrOperandColumns.GenWithStackByArgs(1)
return
}
er.ctxStack[stkLen-1], er.err = er.newFunction(op, &v.Type, er.ctxStack[stkLen-1])
er.ctxNameStk[stkLen-1] = types.EmptyName
}
func (er *expressionRewriter) binaryOpToExpression(v *ast.BinaryOperationExpr) {
stkLen := len(er.ctxStack)
var function expression.Expression
switch v.Op {
case opcode.EQ, opcode.NE, opcode.NullEQ, opcode.GT, opcode.GE, opcode.LT, opcode.LE:
function, er.err = er.constructBinaryOpFunction(er.ctxStack[stkLen-2], er.ctxStack[stkLen-1],
v.Op.String())
default:
lLen := expression.GetRowLen(er.ctxStack[stkLen-2])
rLen := expression.GetRowLen(er.ctxStack[stkLen-1])
if lLen != 1 || rLen != 1 {
er.err = expression.ErrOperandColumns.GenWithStackByArgs(1)
return
}
function, er.err = er.newFunction(v.Op.String(), types.NewFieldType(mysql.TypeUnspecified), er.ctxStack[stkLen-2:]...)
}
if er.err != nil {
return
}
er.ctxStackPop(2)
er.ctxStackAppend(function, types.EmptyName)
}
func (er *expressionRewriter) notToExpression(hasNot bool, op string, tp *types.FieldType,
args ...expression.Expression) expression.Expression {
opFunc, err := er.newFunction(op, tp, args...)
if err != nil {
er.err = err
return nil
}
if !hasNot {
return opFunc
}
opFunc, err = er.newFunction(ast.UnaryNot, tp, opFunc)
if err != nil {
er.err = err
return nil
}
return opFunc
}
func (er *expressionRewriter) isNullToExpression(v *ast.IsNullExpr) {
stkLen := len(er.ctxStack)
if expression.GetRowLen(er.ctxStack[stkLen-1]) != 1 {
er.err = expression.ErrOperandColumns.GenWithStackByArgs(1)
return
}
function := er.notToExpression(v.Not, ast.IsNull, &v.Type, er.ctxStack[stkLen-1])
er.ctxStackPop(1)
er.ctxStackAppend(function, types.EmptyName)
}
// inToExpression converts in expression to a scalar function. The argument lLen means the length of in list.
// The argument not means if the expression is not in. The tp stands for the expression type, which is always bool.
// a in (b, c, d) will be rewritten as `(a = b) or (a = c) or (a = d)`.
func (er *expressionRewriter) inToExpression(lLen int, not bool, tp *types.FieldType) {
stkLen := len(er.ctxStack)
l := expression.GetRowLen(er.ctxStack[stkLen-lLen-1])
for i := 0; i < lLen; i++ {
if l != expression.GetRowLen(er.ctxStack[stkLen-lLen+i]) {
er.err = expression.ErrOperandColumns.GenWithStackByArgs(l)
return
}
}
args := er.ctxStack[stkLen-lLen-1:]
leftFt := args[0].GetType()
leftEt, leftIsNull := leftFt.EvalType(), leftFt.Tp == mysql.TypeNull
if leftIsNull {
er.ctxStackPop(lLen + 1)
er.ctxStackAppend(expression.Null.Clone(), types.EmptyName)
return
}
allSameType := true
for _, arg := range args[1:] {
if arg.GetType().Tp != mysql.TypeNull && expression.GetAccurateCmpType(args[0], arg) != leftEt {
allSameType = false
break
}
}
var function expression.Expression
if allSameType && l == 1 && lLen > 1 {
function = er.notToExpression(not, ast.In, tp, er.ctxStack[stkLen-lLen-1:]...)
} else {
eqFunctions := make([]expression.Expression, 0, lLen)
for i := stkLen - lLen; i < stkLen; i++ {
expr, err := er.constructBinaryOpFunction(args[0], er.ctxStack[i], ast.EQ)
if err != nil {
er.err = err
return
}
eqFunctions = append(eqFunctions, expr)
}
function = expression.ComposeDNFCondition(er.sctx, eqFunctions...)
if not {
var err error
function, err = er.newFunction(ast.UnaryNot, tp, function)
if err != nil {
er.err = err
return
}
}
}
er.ctxStackPop(lLen + 1)
er.ctxStackAppend(function, types.EmptyName)
}
// cutlToExpression converts in expression to a scalar function. The argument lLen means the length of in list.
// The argument not means if the expression is not in. The tp stands for the expression type, which is always bool.
// a in (b, c, d) will be rewritten as `(a = b) or (a = c) or (a = d)`.
func (er *expressionRewriter) cutlToExpression(lLen int, not bool, tp *types.FieldType) {
stkLen := len(er.ctxStack)
l := expression.GetRowLen(er.ctxStack[stkLen-lLen-1])
for i := 0; i < lLen; i++ {
if l != expression.GetRowLen(er.ctxStack[stkLen-lLen+i]) {
er.err = expression.ErrOperandColumns.GenWithStackByArgs(l)
return
}
}
args := er.ctxStack[stkLen-lLen-1:]
query := er.ctxStack[stkLen-1]
var function expression.Expression
cut1 := &expression.Constant{
Value: query.(*expression.Constant).Value,
RetType: query.(*expression.Constant).RetType,
}
eqFunctions := make([]expression.Expression, 0, lLen)
expr, _ := er.constructBinaryOpFunction(args[0], query, ast.EQ)
eqFunctions = append(eqFunctions, expr)
cut1.Value.SetString("hig")
expr2, _ := er.constructBinaryOpFunction(args[0], cut1, ast.EQ)
eqFunctions = append(eqFunctions, expr2)
function = expression.ComposeDNFCondition(er.sctx, eqFunctions...)
er.ctxStackPop(lLen + 1)
er.ctxStackAppend(function, types.EmptyName)
}
func (er *expressionRewriter) rowToScalarFunc(v *ast.RowExpr) {
stkLen := len(er.ctxStack)
length := len(v.Values)
rows := make([]expression.Expression, 0, length)
for i := stkLen - length; i < stkLen; i++ {
rows = append(rows, er.ctxStack[i])
}
er.ctxStackPop(length)
function, err := er.newFunction(ast.RowFunc, rows[0].GetType(), rows...)
if err != nil {
er.err = err
return
}
er.ctxStackAppend(function, types.EmptyName)
}
func (er *expressionRewriter) betweenToExpression(v *ast.BetweenExpr) {
stkLen := len(er.ctxStack)
er.err = expression.CheckArgsNotMultiColumnRow(er.ctxStack[stkLen-3:]...)
if er.err != nil {
return
}
expr, lexp, rexp := er.ctxStack[stkLen-3], er.ctxStack[stkLen-2], er.ctxStack[stkLen-1]
var op string
var l, r expression.Expression
l, er.err = er.newFunction(ast.GE, &v.Type, expr, lexp)
if er.err == nil {
r, er.err = er.newFunction(ast.LE, &v.Type, expr, rexp)
}
op = ast.LogicAnd
if er.err != nil {
return
}
function, err := er.newFunction(op, &v.Type, l, r)
if err != nil {
er.err = err
return
}
if v.Not {
function, err = er.newFunction(ast.UnaryNot, &v.Type, function)
if err != nil {
er.err = err
return
}
}
er.ctxStackPop(3)
er.ctxStackAppend(function, types.EmptyName)
}
// rewriteFuncCall handles a FuncCallExpr and generates a customized function.
// It should return true if for the given FuncCallExpr a rewrite is performed so that original behavior is skipped.
// Otherwise it should return false to indicate (the caller) that original behavior needs to be performed.
func (er *expressionRewriter) rewriteFuncCall(v *ast.FuncCallExpr) bool {
switch v.FnName.L {
// when column is not null, ifnull on such column is not necessary.
case ast.Ifnull:
if len(v.Args) != 2 {
er.err = expression.ErrIncorrectParameterCount.GenWithStackByArgs(v.FnName.O)
return true
}
stackLen := len(er.ctxStack)
arg1 := er.ctxStack[stackLen-2]
col, isColumn := arg1.(*expression.Column)
// if expr1 is a column and column has not null flag, then we can eliminate ifnull on
// this column.
if isColumn && mysql.HasNotNullFlag(col.RetType.Flag) {
name := er.ctxNameStk[stackLen-2]
newCol := col.Clone().(*expression.Column)
er.ctxStackPop(len(v.Args))
er.ctxStackAppend(newCol, name)
return true
}
return false
default:
return false
}
}
func (er *expressionRewriter) funcCallToExpression(v *ast.FuncCallExpr) {
stackLen := len(er.ctxStack)
args := er.ctxStack[stackLen-len(v.Args):]
er.err = expression.CheckArgsNotMultiColumnRow(args...)
if er.err != nil {
return
}
if er.rewriteFuncCall(v) {
return
}
var function expression.Expression
er.ctxStackPop(len(v.Args))
function, er.err = er.newFunction(v.FnName.L, &v.Type, args...)
er.ctxStackAppend(function, types.EmptyName)
}
func (er *expressionRewriter) toColumn(v *ast.ColumnName) {
idx, err := expression.FindFieldName(er.names, v)
if err != nil {
er.err = ErrAmbiguous.GenWithStackByArgs(v.Name, clauseMsg[fieldList])
return
}
if idx >= 0 {
column := er.schema.Columns[idx]
er.ctxStackAppend(column, er.names[idx])
return
}
if er.b.curClause == globalOrderByClause {
er.b.curClause = orderByClause
}
er.err = ErrUnknownColumn.GenWithStackByArgs(v.String(), clauseMsg[er.b.curClause])
}
func (er *expressionRewriter) evalDefaultExpr(v *ast.DefaultExpr) {
stkLen := len(er.ctxStack)
name := er.ctxNameStk[stkLen-1]
switch er.ctxStack[stkLen-1].(type) {
case *expression.Column:
default:
idx, err := expression.FindFieldName(er.names, v.Name)
if err != nil {
er.err = err
return
}
if er.err != nil {
return
}
if idx < 0 {
er.err = ErrUnknownColumn.GenWithStackByArgs(v.Name.OrigColName(), "field_list")
return
}
}
dbName := name.DBName
if dbName.O == "" {
// if database name is not specified, use current database name
dbName = model.NewCIStr(er.sctx.GetSessionVars().CurrentDB)
}
if name.OrigTblName.O == "" {
// column is evaluated by some expressions, for example:
// `select default(c) from (select (a+1) as c from t) as t0`
// in such case, a 'no default' error is returned
er.err = table.ErrNoDefaultValue.GenWithStackByArgs(name.ColName)
return
}
var tbl table.Table
tbl, er.err = er.b.is.TableByName(dbName, name.OrigTblName)
if er.err != nil {
return
}
colName := name.OrigColName.O
if colName == "" {
// in some cases, OrigColName is empty, use ColName instead
colName = name.ColName.O
}
col := table.FindCol(tbl.Cols(), colName)
if col == nil {
er.err = ErrUnknownColumn.GenWithStackByArgs(v.Name, "field_list")
return
}
var val *expression.Constant
// for other columns, just use what it is
val, er.err = er.b.getDefaultValue(col)
if er.err != nil {
return
}
er.ctxStackPop(1)
er.ctxStackAppend(val, types.EmptyName)
}
|
#!/bin/bash
set -o nounset
set -o errexit
set -o pipefail
echo "************ baremetalds packet setup command ************"
# TODO: Remove once OpenShift CI will be upgraded to 4.2 (see https://access.redhat.com/articles/4859371)
~/fix_uid.sh
# Run Ansible playbook
cd
cat > packet-setup.yaml <<-EOF
- name: setup Packet host
hosts: localhost
gather_facts: no
vars:
- cluster_type: "{{ lookup('env', 'CLUSTER_TYPE') }}"
- slackhook_path: "{{ lookup('env', 'CLUSTER_PROFILE_DIR') }}"
vars_files:
- "{{ lookup('env', 'CLUSTER_PROFILE_DIR') }}/.packet-kni-vars"
tasks:
- name: check cluster type
fail:
msg: "Unsupported CLUSTER_TYPE '{{ cluster_type }}'"
when: cluster_type != "packet"
- name: create Packet host with error handling
block:
- name: create Packet host {{ packet_hostname }}
packet_device:
auth_token: "{{ packet_auth_token }}"
project_id: "{{ packet_project_id }}"
hostnames: "{{ packet_hostname }}"
operating_system: centos_8
plan: m2.xlarge.x86
facility: any
wait_for_public_IPv: 4
state: active
register: hosts
no_log: true
- name: wait for ssh
wait_for:
delay: 5
host: "{{ hosts.devices[0].public_ipv4 }}"
port: 22
state: started
timeout: 900
rescue:
- name: Send notification message via Slack in case of failure
slack:
token: "{{ 'T027F3GAJ/B011TAG710V/' + lookup('file', slackhook_path + '/.slackhook') }}"
msg: "Packet setup failed. Error msg: {{ ansible_failed_result.msg }}"
username: "Ansible on {{ packet_hostname }}"
channel: "#team-edge-installer"
color: warning
icon_emoji: ":failed:"
- name: fail the play
fail:
msg: "ERROR: Packet setup failed."
- name: save Packet IP
local_action: copy content="{{ hosts.devices[0].public_ipv4 }}" dest="{{ lookup('env', 'SHARED_DIR') }}/server-ip"
EOF
ansible-playbook packet-setup.yaml -e "packet_hostname=ipi-${NAMESPACE}-${JOB_NAME_HASH}-${BUILD_ID}"
|
<reponame>xNombre/TowerOfHanoi
// <NAME>
// PWr 2020
#include "hanoi.h"
#include "hanoi_alg.h"
#include <stdio.h>
int main()
{
HanoiTower_t *tower = NULL;
rod_t **tab;
unsigned element_count, rods, a, b;
int ret, i;
char l;
printf("Element count: ");
scanf("%ud", &element_count);
if (element_count == 0) {
ret = -5;
goto err;
}
printf("Rods count: ");
scanf("%ud", &rods);
if (rods == 0) {
ret = -5;
goto err;
}
ret = InitializeTower(&tower, element_count, rods);
if (ret)
goto err;
re1:
printf("Choose mode auto/manual: (a/m)\n");
scanf(" %c", &l);
if (l == 'a') {
if (rods >= 3) {
unsigned long moves = 0;
DrawHanoi(tower);
ret = HanoiAlg(tower, &moves);
if (ret)
goto err;
printf("Completed with %lu moves!\n", moves);
FreeTower(tower);
return 0;
}
} else if (l == 'm') {
re2:
printf("Choose action move/print/exit: m/p/e\n");
scanf(" %c", &l);
switch (l) {
case 'm':
printf("Type src and dst rod: ");
scanf("%u %u", &a, &b);
if (a > rods || b > rods) {
ret = -4;
goto err;
}
ret = MoveElement(tower->rod[--a], tower->rod[--b]);
if (ret)
goto err;
break;
case 'p':
DrawHanoi(tower);
break;
case 'e':
FreeTower(tower);
return 0;
break;
}
goto re2;
}
goto re1;
err:
printf("Error: %d", ret);
return ret;
}
|
package com.example.elm.main_menu.ui.prediksi;
import static android.app.Activity.RESULT_OK;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.provider.MediaStore;
import android.util.Base64;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.AnyRes;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import androidx.lifecycle.Observer;
import androidx.lifecycle.ViewModelProvider;
import com.android.volley.AuthFailureError;
import com.android.volley.Request;
import com.android.volley.RequestQueue;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.StringRequest;
import com.android.volley.toolbox.Volley;
import com.example.elm.R;
import com.example.elm.databinding.FragmentDashboardBinding;
import com.example.elm.tools.Base64Send;
import com.google.android.material.snackbar.Snackbar;
import com.theartofdev.edmodo.cropper.CropImage;
import com.theartofdev.edmodo.cropper.CropImageView;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
public class PrediksiFragment extends Fragment {
private PrediksiViewModel prediksiViewModel;
private FragmentDashboardBinding binding;
ImageView head, body;
Uri uHead, uBody;
Boolean headActivity;
Bitmap bHead, bBody;
Button prediksi;
String hasilMata, hasilBody;
ProgressDialog progress;
Boolean alowHead, alowBody;
AlertDialog.Builder dialog;
LayoutInflater inflater;
View dialogView;
public View onCreateView(@NonNull LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState) {
Context context = getContext();
alowHead = false;
alowBody = false;
View root = inflater.inflate(R.layout.fragment_prediksi, container, false);
head = root.findViewById(R.id.p_mata_ikan_img);
body = root.findViewById(R.id.p_tipe_ikan_img);
headActivity = false;
progress = new ProgressDialog(context);
prediksi = root.findViewById(R.id.p_prediksi_start);
checkImg();
body.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
CropImage.activity()
.setGuidelines(CropImageView.Guidelines.ON).start(context, PrediksiFragment.this);
}
});
head.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
headActivity = true;
CropImage.activity()
.setGuidelines(CropImageView.Guidelines.ON).start(context, PrediksiFragment.this);
}
});
prediksi.setOnClickListener(new View.OnClickListener() {
@SuppressLint("StaticFieldLeak")
@Override
public void onClick(View view) {
progress.setTitle("Loading !!(1/4)");
progress.setMessage("Mengupload gambar...");
progress.setCancelable(false); // disable dismiss by tapping outside of the dialog
progress.show();
String mmata, mbody;
mmata = BitmapToBase64(bHead);
mbody = BitmapToBase64(bBody);;
progress.setTitle("Loading !!");
progress.setMessage("Mengupload gambar...");
RequestQueue queue = Volley.newRequestQueue(getContext());
String url ="http://192.168.43.90:5000/mata/";
StringRequest stringRequest = new StringRequest(Request.Method.POST, url,
new Response.Listener<String>() {
@Override
public void onResponse(String response) {
progress.setMessage("Memproses gambar...");
JSONObject jsonObject = null;
try {
jsonObject = new JSONObject(response);
hasilMata = jsonObject.getString("mata");
hasilBody = jsonObject.getString("body");
int status = Integer.valueOf(jsonObject.getString("status"));
Log.w("Head", hasilMata);
Log.w("Body", hasilBody);
DialogResult(hasilBody, hasilMata, status);
} catch (JSONException e) {
e.printStackTrace();
}
progress.dismiss();
}
}, new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
progress.dismiss();
Log.w("Volley Response Error", String.valueOf(error));
}
}
){
@Nullable
@Override
protected Map<String, String> getParams() throws AuthFailureError {
Map<String,String> params = new HashMap<String, String>();
params.put("mata",mmata);
params.put("body",mbody);
return params;
}
@Override
public Map<String, String> getHeaders() throws AuthFailureError {
Map<String,String> params = new HashMap<String, String>();
params.put("Content-Type","application/x-www-form-urlencoded");
return params;
}
};
// Add the request to the RequestQueue.
queue.add(stringRequest);
}
});
return root;
}
public void checkImg(){
if(alowBody && alowHead){
prediksi.setEnabled(true);
}else{
prediksi.setEnabled(false);
}
}
private void DialogResult(String ikan, String kondisi, Integer status) {
dialog = new AlertDialog.Builder(getContext());
inflater = getLayoutInflater();
dialogView = inflater.inflate(R.layout.prediksi_result, null);
dialog.setView(dialogView);
dialog.setCancelable(true);
if(status==1){
dialog.setIcon(R.drawable.ic_baseline_block_24);
}else{
dialog.setIcon(R.drawable.ic_baseline_done_24);
}
dialog.setTitle("Hasil Prediksi");
TextView t_ikan = (TextView) dialogView.findViewById(R.id.prediksi_r_t1);
TextView t_kondisi = (TextView) dialogView.findViewById(R.id.prediksi_r_t2);
t_ikan.setText("Jenis ikan : " + ikan);
t_kondisi.setText("Kondisi ikan : " + kondisi);
dialog.setNeutralButton("OK", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
dialog.show();
}
@Override
public void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
if(resultCode == RESULT_OK){
if (requestCode == CropImage.CROP_IMAGE_ACTIVITY_REQUEST_CODE){
if(headActivity){
CropImage.ActivityResult result = CropImage.getActivityResult(data);
Uri uri = result.getUri();
head.setImageURI(uri);
uHead = uri;
Log.w("Uri Head", String.valueOf(uri));
headActivity = false;
try {
bHead = MediaStore.Images.Media.getBitmap(getContext().getContentResolver(), uri);
} catch (IOException e) {
e.printStackTrace();
}
alowHead = true;
checkImg();
}else{
CropImage.ActivityResult result = CropImage.getActivityResult(data);
Uri uri = result.getUri();
body.setImageURI(uri);
uBody = uri;
Log.w("Uri Body", String.valueOf(uri));
headActivity = false;
try {
bBody = MediaStore.Images.Media.getBitmap(getContext().getContentResolver(), uri);
} catch (IOException e) {
e.printStackTrace();
}
alowBody = true;
checkImg();
}
}
}else{
Toast.makeText(getContext(), "Pengambilan gambar dibatalkan", Toast.LENGTH_SHORT).show();
}
}
public String BitmapToBase64(Bitmap bb){
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
bb.compress(Bitmap.CompressFormat.JPEG, 100, byteArrayOutputStream);
byte[] byteArray = byteArrayOutputStream .toByteArray();
String encoded = Base64.encodeToString(byteArray, Base64.DEFAULT);
return encoded;
}
@Override
public void onDestroyView() {
super.onDestroyView();
binding = null;
}
}
|
#!/usr/bin/env bats
load test_helper
@test "tags.category" {
vcsim_env
local output
run govc tags.category.ls
assert_success # no categories defined yet
run govc tags.category.info
assert_success # no categories defined yet
run govc tags.category.info enoent
assert_failure # category does not exist
category_name=$(new_id)
run govc tags.category.create -d "Cat in the hat" -m "$category_name"
assert_success
category_id="$output"
run govc tags.category.ls
assert_success
run govc tags.category.create -m "$category_name"
assert_failure # already exists
run govc tags.category.ls
assert_line "$category_name"
id=$(govc tags.category.ls -json | jq -r '.[].id')
assert_matches "$id" "$category_id"
run govc tags.category.info "$category_name"
assert_success
update_name="${category_name}-update"
run govc tags.category.update -n "$update_name" -d "Green eggs and ham" "$category_id"
assert_success
govc tags.category.info "$update_name" | grep -c eggs
run govc tags.category.info "$category_name"
assert_failure # does not exist
run govc tags.category.rm "$category_name"
assert_failure # should fail with old name
run govc tags.category.rm "$update_name"
assert_success
}
@test "tags" {
vcsim_env
local output
run govc tags.ls
assert_success # no tags defined yet
run govc tags.ls -k=false
assert_failure
run govc tags.ls -k=false -tls-ca-certs <(govc about.cert -show)
assert_success
run govc tags.info
assert_success # no tags defined yet
run govc tags.info enoent
assert_failure # specific tag does not exist
category_name=$(new_id)
run govc tags.category.create -m "$category_name"
assert_success
category="$output"
test_name="test_name"
run govc tags.create -c "$category" $test_name
assert_success
tag_id="$output"
govc tags.ls | grep $test_name
id=$(govc tags.ls -json | jq -r '.[].id')
assert_matches "$id" "$tag_id"
update_name="${test_name}-update"
run govc tags.update -d "Updated tag" -n "$update_name" "$tag_id"
assert_success
govc tags.info
govc tags.info -C=false
govc tags.info "$update_name" | grep Updated
run govc tags.create -c "$category_name" "$(new_id)"
assert_success
run govc tags.create -c enoent "$(new_id)"
assert_failure # category name does not exist
run govc tags.info enoent
assert_failure # does not exist
}
@test "tags.association" {
vcsim_env
local lines
category_name=$(new_id)
run govc tags.category.create -m "$category_name"
assert_success
category="$output"
run govc tags.create -c "$category" "$(new_id)"
assert_success
tag=$output
tag_name=$(govc tags.ls -json | jq -r ".[] | select(.id == \"$tag\") | .name")
run govc find . -type h
object=${lines[0]}
run govc tags.attach "$tag" "$object"
assert_success
run govc tags.attached.ls "$tag_name"
assert_success
result=$(govc tags.attached.ls -r "$object")
assert_matches "$result" "$tag_name"
result=$(govc tags.attached.ls -r -json "$object")
assert_matches "$tag_name" "$result"
run govc tags.rm "$tag"
assert_failure # tags still attached
run govc tags.detach "$tag" "$object"
assert_success
run govc tags.attach "$tag_name" "$object"
assert_success # attach using name instead of ID
run govc tags.rm "$tag"
assert_failure # tags still attached
run govc tags.detach -c enoent "$tag_name" "$object"
assert_failure # category does not exist
run govc tags.detach -c "$category_name" "$tag_name" "$object"
assert_success # detach using name instead of ID
run govc tags.rm -c "$category_name" "$tag"
assert_success
run govc tags.category.rm "$category"
assert_success
}
@test "tags.example" {
vcsim_env -dc 2 -cluster 2
govc tags.category.create -d "Kubernetes region" k8s-region
for region in EMEA US ; do
govc tags.create -d "Kubernetes region $region" -c k8s-region k8s-region-$region
done
govc tags.attach k8s-region-EMEA /DC0
govc tags.attach k8s-region-US /DC1
govc tags.category.create -d "Kubernetes zone" k8s-zone
for zone in DE CA WA ; do
govc tags.create -d "Kubernetes zone $zone" -c k8s-zone k8s-zone-$zone
done
govc tags.attach k8s-zone-DE /DC0/host/DC0_C0
govc tags.attach k8s-zone-DE /DC0/host/DC0_C1
govc tags.attach k8s-zone-CA /DC1/host/DC1_C0
govc tags.attach k8s-zone-WA /DC1/host/DC1_C1
govc tags.category.ls
govc tags.category.info
govc tags.ls
govc tags.ls -c k8s-region
govc tags.ls -c k8s-zone
govc tags.info
govc tags.attached.ls k8s-region-US
govc tags.attached.ls k8s-zone-CA
govc tags.attached.ls -r /DC1
govc tags.attached.ls -r /DC1/host/DC1_C0
}
|
<filename>src/BookShelf.js<gh_stars>0
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import { Link } from 'react-router-dom';
import BooksGrid from './BooksGrid';
/**
* @class
* @classdesc Componente que exibe todas as listas de leitura
* @prop {array} books - Lista de todos os livros
* @prop {func} onMoveShelf - Ação executada ao mover de prateleira
* @prop {array} shelves - Lista com todas as prateleiras a serem exibidas
*/
class BooksShelf extends Component {
static propTypes = {
books: PropTypes.array.isRequired,
onMoveShelf: PropTypes.func.isRequired,
shelves: PropTypes.array.isRequired
};
/**
* @description Retorna os livros de uma prateleira
* @param {string} shelf - Prateleira
* @returns {array} Livros da prateleira selecionada
*/
getBooksForShelf = (shelf) => {
return this.props.books.filter((book) => book.shelf === shelf);
}
render() {
const { onMoveShelf, shelves } = this.props;
return (
<div className="list-books">
<div className="list-books-title">
<h1>MyReads</h1>
</div>
<div className="list-books-content">
<div>
{shelves.map((shelf) => (
<div key={shelf.id} className='bookshelf'>
<h2 className='bookshelf-title'>{shelf.title}</h2>
<div className='bookshelf-books'>
<BooksGrid
books={this.getBooksForShelf(shelf.id)}
onMoveShelf={onMoveShelf}
/>
</div>
</div>
))}
</div>
</div>
<div className="open-search">
<Link to="/search">Add a book</Link>
</div>
</div>
);
}
}
export default BooksShelf;
|
pod lib lint
git add -A && git commit -m "Release $1"
git tag $1
git push --tags
pod trunk push PayTheory.podspec
|
<filename>src/components/Content/index.module.css.d.ts
export const contentRow: string
export const contentPageHeader: string
|
<filename>src/renderer/renderer.js
import ReactDMX from "./reconciler";
import { createElement } from "./createElement";
import dmx from "../util/dmx";
async function render(element) {
// Create root container instance
const container = createElement("ROOT");
// Returns the current fiber (flushed fiber)
const node = ReactDMX.createContainer(container);
// Schedules a top level update with current fiber and a priority level (depending upon the context)
ReactDMX.updateContainer(element, node, null);
// Parse the input component and return the output
const output = container.render();
output.forEach(u => {
dmx.update(u.name, u.channels);
});
}
export default render;
|
<gh_stars>0
$(document).ready(function() {
$(document).ready(function(){
$('.sub_menu_header').click(function() {
$('.sub_menu_header').toggleClass('open');
});
});
$('.pay_close').click(function(){
$('body').removeClass('new_fix');
});
$('.field__input').on('input', function() {
var $field = $(this).closest('.field');
if (this.value) {
$field.addClass('field--not-empty');
}
else {
$field.removeClass('field--not-empty');
}
});
});
$http = angular.injector(["ng"]).get("$http");
$('#js-currency-select').on('change', function(){
currency_code = $(this).val();
$http.post(APP_URL+'/set_session', {currency: currency_code}).then(function(response){
location.reload();
});
});
$('#js-language-select').on('change', function(){
language_code = $(this).val();
$http.post(APP_URL+'/set_session', {language: language_code
}).then(function(response){
location.reload();
});
});
//Payout Preferences
app.controller('payout_preferences', ['$scope', '$http', function($scope, $http) {
}]);
app.controller('help', ['$scope', '$http', function($scope, $http) {
$('.help-nav .navtree-list .navtree-next').click(function() {
var id = $(this).data('id');
var name = $(this).data('name');
$('.help-nav #navtree').addClass('active');
$('.help-nav #navtree').removeClass('not-active');
$('.help-nav .subnav-list li:first-child a').attr('aria-selected', 'false');
$('.help-nav .subnav-list').append('<li> <a class="subnav-item" href="#" data-node-id="0" aria-selected="true"> ' + name + ' </a> </li>');
$('.help-nav #navtree-'+id).css({
'display': 'block'
});
});
$('.help-nav .navtree-list .navtree-back').click(function() {
var id = $(this).data('id');
var name = $(this).data('name');
$('.help-nav #navtree').removeClass('active');
$('.help-nav #navtree').addClass('not-active');
$('.help-nav .subnav-list li:first-child a').attr('aria-selected', 'true');
$('.help-nav .subnav-list li').last().remove();
$('.help-nav #navtree-' + id).css({
'display': 'none'
});
});
$('#help_search').autocomplete({
source: function(request, response) {
$.ajax({
url: APP_URL + "/ajax_help_search",
type: "GET",
dataType: "json",
data: {
term: request.term
},
success: function(data) {
response(data);
$(this).removeClass('ui-autocomplete-loading');
}
});
},
search: function() {
$(this).addClass('loading');
},
open: function() {
$(this).removeClass('loading');
}
})
.autocomplete("instance")._renderItem = function(ul, item) {
if (item.id != 0) {
$('#help_search').removeClass('ui-autocomplete-loading');
return $("<li>")
.append("<a href='" + APP_URL + "/help/article/" + item.id + "/" + item.question + "' class='article-link article-link-panel link-reset'><div class='hover-item__content'><div class='col-middle-alt article-link-left'><i class='icon icon-light-gray icon-size-2 article-link-icon icon-description'></i></div><div class='col-middle-alt article-link-right'>" + item.value + "</div></div></a>")
.appendTo(ul);
}
else {
$('#help_search').removeClass('ui-autocomplete-loading');
return $("<li style='pointer-events: none;'>")
.append("<span class='article-link article-link-panel link-reset'><div class='hover-item__content'><div class='col-middle-alt article-link-left'><i class='icon icon-light-gray icon-size-2 article-link-icon icon-description'></i></div><div class='col-middle-alt article-link-right'>" + item.value + "</div></div></span>")
.appendTo(ul);
}
};
}]);
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for RHSA-2015:1439
#
# Security announcement date: 2015-07-22 06:38:01 UTC
# Script generation date: 2017-01-01 21:16:28 UTC
#
# Operating System: Red Hat 6
# Architecture: i386
#
# Vulnerable packages fix on version:
# - wpa_supplicant.i686:0.7.3-6.el6
# - wpa_supplicant-debuginfo.i686:0.7.3-6.el6
#
# Last versions recommanded by security team:
# - wpa_supplicant.i686:0.7.3-6.el6
# - wpa_supplicant-debuginfo.i686:0.7.3-6.el6
#
# CVE List:
# - CVE-2015-4142
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install wpa_supplicant.i686-0.7.3 -y
sudo yum install wpa_supplicant-debuginfo.i686-0.7.3 -y
|
"""
Print out a binary tree's level-order traversal
"""
class Node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def levelOrderTraversal(root):
if root is None:
return
queue = []
queue.append(root)
while(len(queue) > 0):
print(queue[0].data, end=" ")
node = queue.pop(0)
if node.left is not None:
queue.append(node.left)
if node.right is not None:
queue.append(node.right)
if __name__ == '__main__':
root = Node(1)
root.left = Node(2)
root.right = Node(3)
root.left.left = Node(4)
root.left.right = Node(5)
print("Level order traversal of binary tree is -")
levelOrderTraversal(root)
|
package arkres
import (
"context"
"regexp"
"testing"
"time"
"github.com/flandiayingman/arkwaifu/internal/pkg/test"
)
var (
filterRegexp = regexp.MustCompile("^(avg/(imgs|bg))|(gamedata/(excel|levels/obt/main))")
)
func TestGet(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
defer cancel()
dir := t.TempDir()
err := Get(ctx, "21-12-31-15-44-39-814f71", dir, filterRegexp)
if err != nil {
t.Fatalf("%+v", err)
}
expected := "h1:lrJBULywG33Q3Rfu7i1f/KjojSnvG+HsfkW5wnK0CLk="
actual := test.HashDir(dir)
if expected != actual {
t.Fatalf("expected: %v; actual: %v", expected, actual)
}
}
func TestGetUpdate(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
defer cancel()
dirGet := t.TempDir()
err := Get(ctx, "21-12-31-15-44-39-814f71", dirGet, filterRegexp)
if err != nil {
t.Fatalf("%+v", err)
}
dirUpdate := t.TempDir()
err = Get(ctx, "21-12-01-03-53-27-2e01ea", dirUpdate, filterRegexp)
if err != nil {
t.Fatalf("%+v", err)
}
err = GetIncrementally(ctx, "21-12-01-03-53-27-2e01ea", "21-12-31-15-44-39-814f71", dirUpdate, filterRegexp)
if err != nil {
t.Fatalf("%+v", err)
}
err = test.AssertAllIn(dirGet, dirUpdate)
if err != nil {
t.Fatalf("%+v", err)
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const utils_1 = require("../utils");
const events_1 = require("events");
const lodash_1 = require("../utils/lodash");
const Redis = require('../redis');
const debug = require('../utils/debug')('ioredis:cluster:connectionPool');
class ConnectionPool extends events_1.EventEmitter {
constructor(redisOptions) {
super();
this.redisOptions = redisOptions;
// master + slave = all
this.nodes = {
all: {},
master: {},
slave: {}
};
this.specifiedOptions = {};
}
/**
* Find or create a connection to the node
*
* @param {IRedisOptions} node
* @param {boolean} [readOnly=false]
* @returns {*}
* @memberof ConnectionPool
*/
findOrCreate(node, readOnly = false) {
setKey(node);
readOnly = Boolean(readOnly);
if (this.specifiedOptions[node.key]) {
Object.assign(node, this.specifiedOptions[node.key]);
}
else {
this.specifiedOptions[node.key] = node;
}
let redis;
if (this.nodes.all[node.key]) {
redis = this.nodes.all[node.key];
if (redis.options.readOnly !== readOnly) {
redis.options.readOnly = readOnly;
debug('Change role of %s to %s', node.key, readOnly ? 'slave' : 'master');
redis[readOnly ? 'readonly' : 'readwrite']().catch(lodash_1.noop);
if (readOnly) {
delete this.nodes.master[node.key];
this.nodes.slave[node.key] = redis;
}
else {
delete this.nodes.slave[node.key];
this.nodes.master[node.key] = redis;
}
}
}
else {
debug('Connecting to %s as %s', node.key, readOnly ? 'slave' : 'master');
redis = new Redis(lodash_1.defaults({
// Never try to reconnect when a node is lose,
// instead, waiting for a `MOVED` error and
// fetch the slots again.
retryStrategy: null,
// Offline queue should be enabled so that
// we don't need to wait for the `ready` event
// before sending commands to the node.
enableOfflineQueue: true,
readOnly: readOnly
}, node, this.redisOptions, { lazyConnect: true }));
this.nodes.all[node.key] = redis;
this.nodes[readOnly ? 'slave' : 'master'][node.key] = redis;
redis.once('end', () => {
delete this.nodes.all[node.key];
delete this.nodes.master[node.key];
delete this.nodes.slave[node.key];
this.emit('-node', redis);
if (!Object.keys(this.nodes.all).length) {
this.emit('drain');
}
});
this.emit('+node', redis);
redis.on('error', function (error) {
this.emit('nodeError', error);
});
}
return redis;
}
/**
* Reset the pool with a set of nodes.
* The old node will be removed.
*
* @param {(Array<string | number | object>)} nodes
* @memberof ConnectionPool
*/
reset(nodes) {
const newNodes = {};
nodes.forEach((node) => {
const options = {};
if (typeof node === 'object') {
lodash_1.defaults(options, node);
}
else if (typeof node === 'string') {
lodash_1.defaults(options, utils_1.parseURL(node));
}
else if (typeof node === 'number') {
options.port = node;
}
else {
throw new Error('Invalid argument ' + node);
}
if (typeof options.port === 'string') {
options.port = parseInt(options.port, 10);
}
delete options.db;
setKey(options);
newNodes[options.key] = options;
}, this);
Object.keys(this.nodes.all).forEach((key) => {
if (!newNodes[key]) {
debug('Disconnect %s because the node does not hold any slot', key);
this.nodes.all[key].disconnect();
}
});
Object.keys(newNodes).forEach((key) => {
const node = newNodes[key];
this.findOrCreate(node, node.readOnly);
});
}
}
exports.default = ConnectionPool;
/**
* Set key property
*
* @private
*/
function setKey(node) {
node = node || {};
node.port = node.port || 6379;
node.host = node.host || '127.0.0.1';
node.key = node.key || node.host + ':' + node.port;
return node;
}
|
SELECT
d.name AS 'Department',
MAX(s.salary) AS 'Highest Salary'
FROM departments d
INNER JOIN employees e
ON d.id = e.department_id
INNER JOIN salaries s
ON e.emp_no = s.emp_no
GROUP BY d.name;
|
'use babel';
'use strict';
import { toggleClass } from './helpers';
import { toggleBlendTreeView } from './tree-view-settings';
function init() {
toggleClass(atom.config.get('learn-ide-material-ui.tabs.tintedTabBar'), 'tinted-tab-bar');
toggleClass(atom.config.get('learn-ide-material-ui.tabs.compactTabs'), 'compact-tab-bar');
toggleClass(atom.config.get('learn-ide-material-ui.tabs.noTabMinWidth'), 'no-tab-min-width');
toggleClass(atom.config.get('learn-ide-material-ui.ui.panelShadows'), 'panel-shadows');
toggleClass(atom.config.get('learn-ide-material-ui.ui.panelContrast'), 'panel-contrast');
toggleClass(atom.config.get('learn-ide-material-ui.ui.animations'), 'use-animations');
toggleClass(atom.config.get('learn-ide-material-ui.treeView.compactList'), 'compact-tree-view');
toggleClass(atom.config.get('learn-ide-material-ui.treeView.blendTabs'), 'blend-tree-view');
toggleBlendTreeView(atom.config.get('learn-ide-material-ui.treeView.blendTabs'));
document.querySelector(':root').style.fontSize = `${atom.config.get('learn-ide-material-ui.fonts.fontSize')}px`;
}
// Check if there are custom icons packages
function checkPacks() {
var root = document.querySelector('atom-workspace');
var loadedPackages = atom.packages.getActivePackages();
var iconPacks = ['file-icons', 'file-type-icons', 'seti-icons', 'envygeeks-file-icons'];
root.classList.remove('has-custom-icons');
loadedPackages.forEach((pack) => {
if (iconPacks.indexOf(pack.name) >= 0) {
root.classList.add('has-custom-icons');
}
});
}
function apply() {
atom.packages.onDidActivatePackage(() => checkPacks());
atom.packages.onDidDeactivatePackage(() => checkPacks());
init();
// Font Size Settings
atom.config.onDidChange('learn-ide-material-ui.fonts.fontSize', (value) => {
var fontSize = Math.round(value.newValue);
document.querySelector(':root').style.fontSize = `${fontSize}px`;
});
// Tab blending
atom.config.onDidChange('learn-ide-material-ui.treeView.blendTabs', (value) => toggleBlendTreeView(value.newValue));
// className-toggling Settings
atom.config.onDidChange('learn-ide-material-ui.tabs.tintedTabBar', (value) => toggleClass(value.newValue, 'tinted-tab-bar'));
atom.config.onDidChange('learn-ide-material-ui.tabs.compactTabs', (value) => toggleClass(value.newValue, 'compact-tab-bar'));
atom.config.onDidChange('learn-ide-material-ui.tabs.noTabMinWidth', (value) => toggleClass(value.newValue, 'no-tab-min-width'));
atom.config.onDidChange('learn-ide-material-ui.ui.animations', (value) => toggleClass(value.newValue, 'use-animations'));
atom.config.onDidChange('learn-ide-material-ui.ui.panelShadows', (value) => toggleClass(value.newValue, 'panel-shadows'));
atom.config.onDidChange('learn-ide-material-ui.ui.panelContrast', (value) => toggleClass(value.newValue, 'panel-contrast'));
atom.config.onDidChange('learn-ide-material-ui.treeView.compactList', (value) => toggleClass(value.newValue, 'compact-tree-view'));
atom.config.onDidChange('learn-ide-material-ui.treeView.blendTabs', (value) => {
if (value.newValue && !atom.config.get('learn-ide-material-ui.tabs.tintedTabBar')) {
atom.config.set('learn-ide-material-ui.tabs.tintedTabBar', true);
}
toggleClass(value.newValue, 'blend-tree-view');
});
}
module.exports = { apply };
|
<reponame>keller35/ssh2-sftp-client
'use strict';
// Example of using a writeable with get to retrieve a file.
// This code will read the remote file, convert all characters to upper case
// and then save it to a local file
const Client = require('../src/index.js');
const path = require('path');
const fs = require('fs');
const through = require('through2');
const config = {
host: 'arch-vbox',
port: 22,
username: 'tim',
password: '<PASSWORD>'
};
const sftp = new Client();
const remoteDir = '/home/tim/testServer';
function toupper() {
return through(function(buf, enc, next) {
next(null, buf.toString().toUpperCase());
});
}
sftp
.connect(config)
.then(() => {
return sftp.list(remoteDir);
})
.then(data => {
// list of files in testServer
console.dir(data);
let remoteFile = path.join(remoteDir, 'test.txt');
let upperWtr = toupper();
let fileWtr = fs.createWriteStream(path.join(__dirname, 'loud-text.txt'));
upperWtr.pipe(fileWtr);
return sftp.get(remoteFile, upperWtr);
})
.then(() => {
return sftp.end();
})
.catch(err => {
console.error(err.message);
});
|
def multiply_matrix(A, B):
n = len(A)
C = [[0] * n for _ in range(n)]
for i in range(n):
for j in range(n):
for k in range(n):
C[i][j] += A[i][k] * B[k][j]
return C
A = [[1, 2],
[3, 4]]
B = [[1, 2],
[2, 3]]
print(multiply_matrix(A,B))
|
<filename>src/engine/MultiverseGraph.ts
import * as dagre from 'dagre';
import _ from 'lodash';
import { detectionInterface } from './interfaces';
// import { Graph, alg } from 'graphlib';
import QuantumSimulation from '@/engine/QuantumSimulation';
import QuantumFrame from '@/engine/QuantumFrame';
import Particle from '@/engine/Particle';
/**
* MULTIVERSE GRAPH CLASS
* Creates a graph after post processing the current simulation frames
*/
export default class MultiverseGraph {
graph: any;
qs: QuantumSimulation;
constructor(qs: QuantumSimulation) {
this.qs = qs;
// https://github.com/dagrejs/dagre/wiki#a-note-on-rendering
this.graph = new dagre.graphlib.Graph({ directed: true })
.setGraph({
nodesep: 5,
ranksep: 20,
marginy: 10,
rankdir: 'TB'
// rankdir: 'LR'
// rankdir: 'BT'
})
.setDefaultEdgeLabel(() => {
return {};
});
this.processFrames();
dagre.layout(this.graph);
}
/**
* Creates a directed acyclical graph from the quantum simulation frames
* @returns dag
*/
processFrames() {
this.qs.frames.forEach((frame: QuantumFrame, fIndex: number) => {
frame.particles.forEach((particle: Particle, pIndex: number) => {
const uid = MultiverseGraph.createUid(fIndex, pIndex);
const particleI = particle.exportParticle();
const detectionEvent = this.qs.isDetectionEvent(particle.coord);
this.graph.setNode(uid, {
label: fIndex,
fIndex,
pIndex,
height: 15,
width: 15,
detectionEvent
});
// Set edges from particle directions
this.findParent(fIndex, pIndex).forEach((parentUid: string) => {
// this.graph.setEdge(uid, parentUid, {
this.graph.setEdge(parentUid, uid, {
label: `${parentUid} -> ${uid}`,
width: particle.probability * 4 + 1,
fIndex,
pIndex
});
});
});
});
// Round the corners of the nodes
this.graph.nodes().forEach((v: any) => {
const node = this.graph.node(v);
node.rx = 5;
node.ry = 5;
node.leaf = this.isLeaf(v);
node.root = this.isRoot(v);
});
}
/**
* Find the parents of a particle from a specific frame
* @param particle Particle
* @param frameIndex number
*/
findParent(fIndex: number, pIndex: number): string[] {
const particle = this.qs.frames[fIndex].particles[pIndex];
const parents: string[] = [];
if (fIndex > 0) {
const frame = this.qs.frames[fIndex];
const parentFrame = this.qs.frames[fIndex - 1];
parentFrame.particles.forEach((parentParticle: Particle, parentIndex: number) => {
// Check for parent
if (parentParticle.nextCoord().equal(particle)) {
const parentUid = `particle_${fIndex - 1}_${parentIndex}`;
parents.push(parentUid);
}
});
}
return parents;
}
/**
* Find successors of a particle, used to generate photon path
* @returns nodes
*/
successors(particleUid: string) {
const successors = this.graph.children(particleUid);
return successors;
}
/**
* Compute SVG path
* @param frameIndex
* @param particleIndex
*/
computePath(frameIndex: number, particleIndex: number): string {
this.processFrames();
let svgPath = '';
const root: Particle = this.qs.frames[frameIndex].particles[particleIndex];
const parentUid: string = MultiverseGraph.createUid(frameIndex, particleIndex);
const originX = this.centerCoord(root.coord.x);
const originY = this.centerCoord(root.coord.y);
svgPath += `M ${originX} ${originY} `;
const source = this.roots[0];
const sink = this.leafs[0];
return svgPath;
}
/**
* Sinks are where elements don't have childrens
* @returns roots string names
*/
get roots(): string[] {
return this.graph.sources();
}
/**
* Leafs are where elements don't have childrens
* @returns leafs string names
*/
get leafs(): string[] {
return this.graph.sinks();
}
/**
* Check if a node is a leaf
* @returns leafs string names
*/
isLeaf(uid: string): boolean {
return _.includes(this.leafs, uid);
}
/**
* Check if a node is a leaf
* @returns leafs string names
*/
isRoot(uid: string): boolean {
return _.includes(this.roots, uid);
}
/**
* Create unique id for a particle
* @param frameIndex
* @param particle
*/
fromIndices(fIndex: number, pIndex: number): Particle {
return this.qs.frames[fIndex].particles[pIndex];
}
/**
* Create unique id for a particle
* @param frameIndex
* @param particle
*/
fromUid(uid: string): Particle {
const fIndex = parseInt(uid.match(/\d/g)![0], 10);
const pIndex = parseInt(uid.match(/\d/g)![1], 10);
return this.qs.frames[fIndex].particles[pIndex];
}
/**
* Create unique id for a particle
* @param fIndex
* @param pIndex
* @returns uid string
*/
static createUid(fIndex: number, pIndex: number): string {
return `particle_${fIndex}_${pIndex}`;
}
/**
* Compute the cell center at a specific coordinate for grid dots
* @returns x, y pixel coordinates
*/
centerCoord(val: number, tileSize = 64): number {
return (val + 0.5) * tileSize;
}
}
|
Game.Map.XXC = function() {};
Game.Map.XXC.prototype = {
preload: function() {},
create: function() {},
update: function() {},
render: function() {}
};
|
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
public class WebScraper {
public static void main(String[] args) throws IOException {
String url = args[0];
// Connect to the specified URL
Document doc = Jsoup.connect(url).get();
// Extract all the text from the web page
String text = doc.text();
// Print the extracted text
System.out.println(text);
}
}
|
<filename>arch/arm/stm/stm32f4_fmc.c
/*-
* Copyright (c) 2018 <NAME> <<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#include <sys/cdefs.h>
#include <arm/stm/stm32f4_fmc.h>
#define RD4(_sc, _reg) \
*(volatile uint32_t *)((_sc)->base + _reg)
#define WR4(_sc, _reg, _val) \
*(volatile uint32_t *)((_sc)->base + _reg) = _val
static void
fmc_sdram_cmd(struct stm32f4_fmc_softc *sc, uint32_t bank, uint32_t cmd)
{
if (bank == 1)
cmd |= SDCMR_CTB1;
else
cmd |= SDCMR_CTB2;
while (RD4(sc, FMC_SDSR) & SDSR_BUSY)
;
WR4(sc, FMC_SDCMR, cmd);
while (RD4(sc, FMC_SDSR) & SDSR_BUSY)
;
}
static void
configure_bank(struct stm32f4_fmc_softc *sc, const struct sdram *entry, int b)
{
const struct sdram_bank *bank;
uint32_t mrd;
uint32_t reg;
int i;
if (b == 1)
bank = &entry->bank1;
else
bank = &entry->bank2;
reg = ( bank->sdcr.rpipe << RPIPE_S) |
( bank->sdcr.rburst << RBURST_S) |
( bank->sdcr.sdclk << SDCLK_S) |
( bank->sdcr.cas << CAS_S) |
( bank->sdcr.nb << NB_S ) |
( bank->sdcr.mwid << MWID_S) |
( bank->sdcr.nr << NR_S ) |
( bank->sdcr.nc << NC_S );
if (b == 1)
WR4(sc, FMC_SDCR_1, reg);
else
WR4(sc, FMC_SDCR_2, reg);
reg = ( bank->sdtr.trcd << TRCD_S ) |
( bank->sdtr.trp << TRP_S ) |
( bank->sdtr.twr << TWR_S ) |
( bank->sdtr.trc << TRC_S ) |
( bank->sdtr.tras << TRAS_S ) |
( bank->sdtr.txsr << TXSR_S ) |
( bank->sdtr.tmrd << TMRD_S );
if (b == 1)
WR4(sc, FMC_SDTR1, reg);
else
WR4(sc, FMC_SDTR2, reg);
reg = (SDCMR_MODE_CLKEN << SDCMR_MODE_S);
fmc_sdram_cmd(sc, b, reg);
for (i = 0; i < 10000; i++);
reg = (SDCMR_MODE_PALL << SDCMR_MODE_S);
fmc_sdram_cmd(sc, b, reg);
reg = (SDCMR_MODE_AUTO_REFRESH << SDCMR_MODE_S) |
(entry->nrfs << SDCMR_NRFS_S);
fmc_sdram_cmd(sc, b, reg);
mrd = ( bank->sdcmr.burstlen ) |
( SDCMR_MRD_BURST_TYP_SEQ ) |
( bank->sdcr.cas << SDCMR_MRD_CAS_S ) |
( SDCMR_MRD_WBURST_SINGLE );
reg = (SDCMR_MODE_LOAD << SDCMR_MODE_S) |
(mrd << SDCMR_MRD_S);
fmc_sdram_cmd(sc, b, reg);
}
void
stm32f4_fmc_setup(struct stm32f4_fmc_softc *sc,
const struct sdram *entry)
{
uint32_t reg;
configure_bank(sc, entry, 1);
configure_bank(sc, entry, 2);
reg = (entry->sdrtr << 1);
WR4(sc, FMC_SDRTR, reg);
}
int
stm32f4_fmc_init(struct stm32f4_fmc_softc *sc, uint32_t base)
{
sc->base = base;
return (0);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.