text stringlengths 1 1.05M |
|---|
#!/bin/bash
# Copyright 2015 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# A library of helper functions and constant for ubuntu os distro
# The code and configuration is for running node instances on Ubuntu images.
# The master is still on Debian. In addition, the configuration is based on
# upstart, which is in Ubuntu upto 14.04 LTS (Trusty). Ubuntu 15.04 and above
# replaced upstart with systemd as the init system. Consequently, the
# configuration cannot work on these images.
# By sourcing debian's helper.sh, we use the same create-master-instance
# functions as debian. But we overwrite the create-node-instance-template
# function to use Ubuntu.
source "${KUBE_ROOT}/cluster/gce/debian/helper.sh"
# $1: template name (required)
function create-node-instance-template {
local template_name="$1"
create-node-template "$template_name" "${scope_flags[*]}" \
"kube-env=${KUBE_TEMP}/node-kube-env.yaml" \
"user-data=${KUBE_ROOT}/cluster/gce/trusty/node.yaml" \
"configure-sh=${KUBE_ROOT}/cluster/gce/trusty/configure.sh"
}
|
def find_max(array):
maxValue = array[0]
for num in array[1:]:
if num > maxValue:
maxValue = num
return maxValue
find_max([30, 12, 67, 73, 21]) # 73 |
<filename>applications/physbam/physbam-lib/External_Libraries/Archives/boost/boost/test/utils/iterator/ifstream_line_iterator.hpp
// (C) Copyright <NAME> 2004-2005.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// See http://www.boost.org/libs/test for the library home page.
//
// File : $RCSfile: ifstream_line_iterator.hpp,v $
//
// Version : $Revision: 1.1 $
//
// Description :
// ***************************************************************************
#ifndef BOOST_IFSTREAM_LINE_ITERATOR_HPP_071894GER
#define BOOST_IFSTREAM_LINE_ITERATOR_HPP_071894GER
// Boost
#include <boost/test/utils/iterator/istream_line_iterator.hpp>
// STL
#include <fstream>
#include <boost/test/detail/suppress_warnings.hpp>
//____________________________________________________________________________//
namespace boost {
namespace unit_test {
namespace ut_detail {
// ************************************************************************** //
// ************** ifstream_holder ************** //
// ************************************************************************** //
template<typename CharT>
class ifstream_holder {
public:
// Constructor
explicit ifstream_holder( basic_cstring<CharT const> file_name )
{
if( file_name.is_empty() )
return;
m_stream.open( file_name.begin(), std::ios::in );
}
bool is_valid()
{
return m_stream.is_open();
}
protected:
#ifdef BOOST_CLASSIC_IOSTREAMS
typedef std::ifstream stream_t;
#else
typedef std::basic_ifstream<CharT,std::char_traits<CharT> > stream_t;
#endif
// Data members
stream_t m_stream;
};
} // namespace ut_detail
// ************************************************************************** //
// ************** basic_ifstream_line_iterator ************** //
// ************************************************************************** //
#ifdef BOOST_MSVC
# pragma warning(push)
# pragma warning(disable: 4355) // 'this' : used in base member initializer list
#endif
template<typename CharT>
class basic_ifstream_line_iterator : ut_detail::ifstream_holder<CharT>, public basic_istream_line_iterator<CharT>
{
public:
basic_ifstream_line_iterator( basic_cstring<CharT const> file_name, CharT delimeter )
: ut_detail::ifstream_holder<CharT>( file_name ), basic_istream_line_iterator<CharT>( this->m_stream, delimeter ) {}
explicit basic_ifstream_line_iterator( basic_cstring<CharT const> file_name = basic_cstring<CharT const>() )
: ut_detail::ifstream_holder<CharT>( file_name ), basic_istream_line_iterator<CharT>( this->m_stream ) {}
};
#ifdef BOOST_MSVC
# pragma warning(default: 4355)
#endif
typedef basic_ifstream_line_iterator<char> ifstream_line_iterator;
typedef basic_ifstream_line_iterator<wchar_t> wifstream_line_iterator;
} // namespace unit_test
} // namespace boost
//____________________________________________________________________________//
#include <boost/test/detail/enable_warnings.hpp>
// ***************************************************************************
// Revision History :
//
// $Log: ifstream_line_iterator.hpp,v $
// Revision 1.1 2007/02/12 18:26:05 irving
// Checking boost 1.33 into External_Libraries so that windows users have easy access to it. Linux users can run 'yum update boost'.
//
// Revision 1.7 2005/06/11 07:21:23 rogeeff
// reverse prev fix
//
// Revision 1.6 2005/06/07 05:08:03 rogeeff
// gcc fix
//
// Revision 1.5 2005/02/20 08:27:09 rogeeff
// This a major update for Boost.Test framework. See release docs for complete list of fixes/updates
//
// Revision 1.4 2005/02/01 06:40:08 rogeeff
// copyright update
// old log entries removed
// minor stilistic changes
// depricated tools removed
//
// Revision 1.3 2005/01/30 01:44:14 rogeeff
// warnings suppressed
//
// Revision 1.2 2005/01/22 19:22:13 rogeeff
// implementation moved into headers section to eliminate dependency of included/minimal component on src directory
//
// Revision 1.1 2005/01/22 18:21:40 rogeeff
// moved sharable staff into utils
//
// ***************************************************************************
#endif // BOOST_IFSTREAM_LINE_ITERATOR_HPP_071894GER
|
<filename>test/counters/LogCounters_test.go
package counters
import (
"testing"
"github.com/stretchr/testify/suite"
"github.com/pip-services/pip-services-runtime-go"
"github.com/pip-services/pip-services-runtime-go/log"
"github.com/pip-services/pip-services-runtime-go/counters"
)
type LogCountersTest struct {
suite.Suite
counters runtime.ICounters
fixture *CountersFixture
}
func (suite *LogCountersTest) SetupTest() {
clog := log.NewConsoleLog(nil)
refs := runtime.NewReferences().WithLog(clog)
suite.counters = counters.NewLogCounters(nil)
suite.counters.Init(refs)
suite.counters.Open()
suite.fixture = NewCountersFixture(suite.counters)
}
func (suite *LogCountersTest) TearDownTest() {
suite.counters.Close()
}
func (suite *LogCountersTest) TestSimpleCounters() {
suite.fixture.TestSimpleCounters(suite.T())
}
// func (suite *LogCountersTest) TestMeasureElapsedTime() {
// suite.fixture.TestMeasureElapsedTime(suite.T())
// }
func TestLogCountersTestSuite(t *testing.T) {
suite.Run(t, new(LogCountersTest))
} |
#!/usr/bin/env bash
set -e
source /etc/profile.d/chruby.sh
chruby 2.1.7
function fromEnvironment() {
local key="$1"
local environment=environment/metadata
cat $environment | jq -r "$key"
}
export BOSH_internal_cidr=$(fromEnvironment '.network1.vCenterCIDR')
export BOSH_internal_gw=$(fromEnvironment '.network1.vCenterGateway')
export BOSH_internal_ip=$(fromEnvironment '.network1["staticIP-1"]')
export BOSH_network_name=$(fromEnvironment '.network1.vCenterVLAN')
export BOSH_reserved_range="[$(fromEnvironment '.network1.reservedRange')]"
cat > director-creds.yml <<EOF
internal_ip: $BOSH_internal_ip
EOF
export bosh_cli=$(realpath bosh-cli/bosh-cli-*)
chmod +x $bosh_cli
$bosh_cli interpolate bosh-deployment/bosh.yml \
-o bosh-deployment/vsphere/cpi.yml \
--vars-store director-creds.yml \
-v director_name=stemcell-smoke-tests-director \
--vars-env "BOSH" > director.yml
$bosh_cli create-env director.yml -l director-creds.yml
# occasionally we get a race where director process hasn't finished starting
# before nginx is reachable causing "Cannot talk to director..." messages.
sleep 10
export BOSH_ENVIRONMENT=`$bosh_cli int director-creds.yml --path /internal_ip`
export BOSH_CA_CERT=`$bosh_cli int director-creds.yml --path /director_ssl/ca`
export BOSH_CLIENT=admin
export BOSH_CLIENT_SECRET=`$bosh_cli int director-creds.yml --path /admin_password`
$bosh_cli -n update-cloud-config bosh-deployment/vsphere/cloud-config.yml \
--ops-file bosh-linux-stemcell-builder/ci/assets/reserve-ips.yml \
--vars-env "BOSH"
mv $HOME/.bosh director-state/
mv director.yml director-creds.yml director-state.json director-state/
|
#!/bin/bash
##Kør på cpu
#BSUB -q hpc
##Navn på job
#BSUB -J batch_job_this_is_on_purpose_to_utilize_more_kernel
##Output fil
#BSUB -o output/batch/batchjobs-%J.out
##Antal kerner
#BSUB -n 1
##Om kernerne må være på forskellige computere
#BSUB -R "span[hosts=1]"
##Ram pr kerne
#BSUB -R "rusage[mem=10GB]"
##Hvor lang tid må den køre hh:mm
#BSUB -W 45:00
##Email når jobbet starter
##og stopper
module purge
module load python3
python3 KRR/batch_KRR_learn.py linear 10 GP newest 1000 10 |
import pygame
import time
class TimerControl:
def __init__(self, position):
self.position = position
self.start_time = 0
self.is_running = False
def start_timer(self):
self.start_time = time.time()
self.is_running = True
def stop_timer(self):
self.is_running = False
def reset_timer(self):
self.start_time = 0
self.is_running = False
def update_display(self, screen):
elapsed_time = 0
if self.is_running:
elapsed_time = int(time.time() - self.start_time)
minutes = str(elapsed_time // 60).zfill(2)
seconds = str(elapsed_time % 60).zfill(2)
font = pygame.font.Font(None, 36)
text = font.render(f"{minutes}:{seconds}", True, (255, 255, 255))
screen.blit(text, self.position)
# Example usage
pygame.init()
screen = pygame.display.set_mode((400, 200))
timer_control = TimerControl((100, 50))
# Main loop
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
elif event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1: # Left mouse button
timer_control.start_timer()
elif event.button == 3: # Right mouse button
timer_control.stop_timer()
screen.fill((0, 0, 0)) # Clear the screen
# Render other GUI elements
# ...
timer_control.update_display(screen)
pygame.display.flip()
pygame.quit() |
#!/usr/bin/env bash
# Without $HOME, a message is seen in cloud-init-output.log during autosign:
# couldn't find login name -- expanding `~'
export HOME='/root'
install_puppetserver() {
wget https://yum.puppet.com/puppet6-release-el-7.noarch.rpm
rpm -Uvh puppet6-release-el-7.noarch.rpm
yum-config-manager --enable puppet6
yum -y install puppetserver
}
configure_puppetserver() {
echo 'export PATH=/opt/puppetlabs/puppet/bin:$PATH' \
>> /etc/profile.d/puppet-agent.sh
. /etc/profile.d/puppet-agent.sh
sed -i '
s/JAVA_ARGS.*/JAVA_ARGS="-Xms512m -Xmx512m"/
' /etc/sysconfig/puppetserver # workaround for t2.micro's 1GB RAM.
local public_hostname=$(curl \
http://169.254.169.254/latest/meta-data/public-hostname)
puppetserver ca setup \
--subject-alt-names "$public_hostname",localhost,puppet
echo "127.0.0.1 puppet" >> /etc/hosts
}
configure_autosign() {
gem install autosign
mkdir -p -m 750 /var/autosign
chown puppet: /var/autosign
touch /var/log/autosign.log
chown puppet: /var/log/autosign.log
autosign config setup
sed -i '
s!journalfile:.*!journalfile: "/var/autosign/autosign.journal"!
' /etc/autosign.conf
puppet config set \
--section master autosign /opt/puppetlabs/puppet/bin/autosign-validator
systemctl restart puppetserver
}
deploy_code() {
yum -y install git
rm -rf /etc/puppetlabs/code/environments/production
git clone \
https://github.com/alexharv074/terraform-puppet-provisioner-test.git \
/etc/puppetlabs/code/environments/production
}
main() {
install_puppetserver
configure_puppetserver
configure_autosign
deploy_code
}
main
# vim: set ft=sh:
|
# Get books for this Asset Manager
# Get all book details for one book
# Get current positions for that book
# Create a new trade
# Save it to AMaaS
# Update positions
|
#!/bin/bash
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
BRANCH=$1
git clone https://github.com/flutter/flutter.git --depth 1 -b $BRANCH _flutter
echo "$GITHUB_WORKSPACE/_flutter/bin" >> $GITHUB_PATH
|
def divide_list(numbers, divisor):
return [i/divisor for i in numbers] |
import numpy as np
import h5py
import logging
from nexusutils.readwriteoff import (
write_off_file,
create_off_face_vertex_map,
construct_cylinder_mesh,
)
from nexusutils.detectorplotter import do_transformations
from nexusutils.utils import normalise, calculate_magnitude
logger = logging.getLogger("NeXus_Utils")
def find_geometry_groups(nexus_file):
"""
Find all kinds of group containing geometry information.
Geometry groups themselves are often links (to reuse repeated geometry) so look for parents of geometry groups
instead and return parent and child dictionary pairs.
:param nexus_file: NeXus file input
:return: list of geometry groups and their parent group
"""
hits = []
def _visit_groups(name, obj):
if isinstance(obj, h5py.Group):
for child_name in obj:
child = obj[child_name]
if isinstance(child, h5py.Group):
if "NX_class" in child.attrs.keys():
if str(child.attrs["NX_class"], "utf8") in [
"NXoff_geometry",
"NXcylindrical_geometry",
]:
hits.append({"parent_group": obj, "geometry_group": child})
nexus_file.visititems(_visit_groups)
return hits
def get_off_geometry_from_group(group):
"""
Get geometry information from an NXoff_geometry group
:param group: NXoff_geometry and parent group in dictionary
:return: vertices, faces and winding_order information from the group
"""
vertices = group["geometry_group"]["vertices"][...]
return (
vertices,
group["geometry_group"]["faces"][...],
group["geometry_group"]["winding_order"][...],
)
def get_and_apply_transformations(group, nexus_file, vertices):
from nexusutils.detectorplotter import get_transformations
transformations = list()
try:
depends_on = group["parent_group"].get("depends_on")
except Exception:
depends_on = "."
get_transformations(depends_on, transformations, nexus_file)
vertices = np.matrix(vertices.T)
# Add fourth element of 1 to each vertex, indicating these are positions not direction vectors
vertices = np.matrix(np.vstack((vertices, np.ones(vertices.shape[1]))))
vertices = do_transformations(transformations, vertices)
# Now the transformations are done we do not need the 4th element
return vertices[:3, :].T
def get_cylindrical_geometry_from_group(group):
"""
Get geometry information from an NXcylindrical_geometry group
:param group: NXcylindrical_geometry group and its parent group in a dictionary
:return: vertices, faces and winding_order information from the group
"""
cylinders = group["geometry_group"]["cylinders"][...]
group_vertices = group["geometry_group"]["vertices"][...]
vertices = None
faces = None
winding_order = None
for cylinder in cylinders:
vector_a = group_vertices[cylinder[0], :]
vector_b = group_vertices[cylinder[1], :]
vector_c = group_vertices[cylinder[2], :]
axis = vector_a - vector_c
unit_axis, height = normalise(axis)
radius = calculate_magnitude(vector_b - vector_a)
centre = (vector_a + vector_c) * 0.5
mesh_vertices, mesh_faces = construct_cylinder_mesh(
height, radius, unit_axis, centre, 10
)
new_winding_order, new_faces = create_off_face_vertex_map(mesh_faces)
vertices, faces, winding_order = accumulate_geometry(
vertices, faces, winding_order, mesh_vertices, new_faces, new_winding_order
)
return vertices, faces, winding_order
def get_geometry_from_group(group, nexus_file):
"""
Get geometry information from the geometry group
:param group: Geometry group and its parent group in a dictionary
:param nexus_file: Handle of the NeXus file input
:return: vertices, faces and winding_order information from the group
"""
if str(group["geometry_group"].attrs["NX_class"], "utf8") == "NXoff_geometry":
vertices, faces, winding_order = get_off_geometry_from_group(group)
elif (
str(group["geometry_group"].attrs["NX_class"], "utf8")
== "NXcylindrical_geometry"
):
vertices, faces, winding_order = get_cylindrical_geometry_from_group(group)
else:
raise Exception(
"nexustooff.get_geometry_from_group was passed a group which is not a geometry type"
)
vertices = np.matrix(vertices)
vertices, faces, winding_order = replicate_if_pixel_geometry(
group, vertices, faces, winding_order
)
vertices = get_and_apply_transformations(group, nexus_file, vertices)
return vertices, faces, winding_order
def nexus_geometry_to_off_file(nexus_filename, off_filename):
"""
Write all of the geometry information found in a NeXus file to an OFF file
:param nexus_filename: Name of the NeXus file input
:param off_filename: Name of the OFF file output
"""
nexus_file = h5py.File(nexus_filename, "r")
geometry_groups = find_geometry_groups(nexus_file)
# Build up vertices, faces and winding order
vertices = None
faces = None
winding_order = None
for group in geometry_groups:
new_vertices, new_faces, new_winding_order = get_geometry_from_group(
group, nexus_file
)
vertices, faces, winding_order = accumulate_geometry(
vertices, faces, winding_order, new_vertices, new_faces, new_winding_order
)
write_off_file(off_filename, vertices, faces, winding_order)
def replicate_if_pixel_geometry(group, vertices, faces, winding_order):
"""
If the geometry group describes the shape of a single pixel then replicate the shape at all pixel offsets
to find the shape of the whole detector panel.
:param group: Geometry group and its parent group in a dictionary
:param vertices: Vertices array for the original pixel
:param faces: Faces array for the original pixel
:param winding_order: Winding order array for the original pixel
:return: vertices, faces, winding_order for the geometry comprising all pixels
"""
if group["geometry_group"].name.split("/")[-1] == "pixel_shape":
x_offsets, y_offsets, z_offsets = get_pixel_offsets(group)
pixel_vertices = vertices
pixel_faces = faces
pixel_winding_order = winding_order
next_indices = {"vertex": 0, "face": 0, "winding_order": 0}
number_of_pixels = len(x_offsets)
total_num_of_vertices = number_of_pixels * pixel_vertices.shape[0]
# Preallocate arrays
vertices = np.empty((total_num_of_vertices, 3))
winding_order = np.empty(
(len(pixel_winding_order) * number_of_pixels), dtype=int
)
faces = np.empty((len(pixel_faces) * number_of_pixels), dtype=int)
for pixel_number in range(number_of_pixels):
new_vertices = np.hstack(
(
pixel_vertices[:, 0] + x_offsets[pixel_number],
pixel_vertices[:, 1] + y_offsets[pixel_number],
pixel_vertices[:, 2] + z_offsets[pixel_number],
)
)
(
vertices,
faces,
winding_order,
next_vertex,
) = accumulate_geometry_in_prealloc_arrays(
vertices,
faces,
winding_order,
new_vertices,
pixel_faces,
pixel_winding_order,
next_indices,
)
return vertices, faces, winding_order
def get_pixel_offsets(group):
if "x_pixel_offset" in group["parent_group"]:
x_offsets = group["parent_group"]["x_pixel_offset"][...]
else:
raise Exception(
"No x_pixel_offset found in parent group of " + group["geometry_group"].name
)
if "y_pixel_offset" in group["parent_group"]:
y_offsets = group["parent_group"]["y_pixel_offset"][...]
else:
raise Exception(
"No y_pixel_offset found in parent group of " + group["geometry_group"].name
)
if "z_pixel_offset" in group["parent_group"]:
z_offsets = group["parent_group"]["z_pixel_offset"][...]
else:
z_offsets = np.zeros(x_offsets.shape)
return x_offsets, y_offsets, z_offsets
def accumulate_geometry(
vertices, faces, winding_order, new_vertices, new_faces, new_winding_order
):
"""
Accumulate geometry from different groups in the NeXus file, or repeated pixels.
:param vertices: Vertices array to accumulate in
:param faces: Faces array to accumulate in
:param winding_order: Winding order array to accumulate in
:param new_vertices: (2D) New vertices to append/insert
:param new_faces: (1D) New vertices to append
:param new_winding_order: (1D) New winding_order to append
"""
if faces is not None:
faces = np.concatenate((faces, new_faces + winding_order.size))
else:
faces = new_faces
if winding_order is not None:
winding_order = np.concatenate(
(winding_order, new_winding_order + vertices.shape[0])
)
else:
winding_order = new_winding_order
if vertices is not None:
vertices = np.vstack((vertices, new_vertices))
else:
vertices = new_vertices
return vertices, faces, winding_order
def accumulate_geometry_in_prealloc_arrays(
vertices,
faces,
winding_order,
new_vertices,
new_faces,
new_winding_order,
next_indices,
):
"""
Accumulate geometry from different groups in the NeXus file, or repeated pixels.
Arrays are assumed to be preallocated and new data are inserted at the given index instead.
:param vertices: Vertices array to accumulate in
:param faces: Faces array to accumulate in
:param winding_order: Winding order array to accumulate in
:param new_vertices: (2D) New vertices to append/insert
:param new_faces: (1D) New vertices to append
:param new_winding_order: (1D) New winding_order to append
:param next_indices: Insert new data at these indices
"""
faces[next_indices["face"] : (next_indices["face"] + len(new_faces))] = (
new_faces + next_indices["winding_order"]
)
winding_order[
next_indices["winding_order"] : (
next_indices["winding_order"] + len(new_winding_order)
)
] = (new_winding_order + next_indices["vertex"])
vertices[
next_indices["vertex"] : (next_indices["vertex"] + new_vertices.shape[0]), :
] = new_vertices
next_indices["face"] += len(new_faces)
next_indices["winding_order"] += len(new_winding_order)
next_indices["vertex"] += new_vertices.shape[0]
return vertices, faces, winding_order, next_indices
if __name__ == "__main__":
# Example use
# SANS2D for example as it uses NXoff_geometry and NXcylindrical_geometry
# NB, run example_instruments/sans2d/SANS2D_example.py from its own directory first to generate NeXus file
output_off_file = "SANS2D.off"
nexus_geometry_to_off_file(
"example_instruments/sans2d/SANS_example_gzip_compress.hdf5", output_off_file
)
|
/*
* =============================================================================
*
* Copyright (c) 2011-2016, The THYMELEAF team (http://www.thymeleaf.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* =============================================================================
*/
package org.thymeleaf.engine;
import java.io.IOException;
import java.io.Writer;
import org.junit.Assert;
import org.junit.Test;
import org.thymeleaf.model.IComment;
import org.thymeleaf.model.IModelVisitor;
public final class CommentTest {
@Test
public void test() {
Comment c1 = new Comment("hello", "template", 10, 3);
Assert.assertEquals("<!--hello-->", extractText(c1));
final String c1all = c1.getComment();
final String c1content = c1.getContent();
Assert.assertEquals("<!--hello-->", c1all);
Assert.assertEquals("hello", c1content);
Assert.assertSame(c1all, c1.getComment());
Assert.assertSame(c1content, c1.getContent());
Assert.assertEquals("template", c1.getTemplateName());
Assert.assertEquals(10, c1.getLine());
Assert.assertEquals(3, c1.getCol());
final String c1c0 = " something\nhere ";
c1 = new Comment(c1c0);
Assert.assertSame(c1c0, c1.getContent());
Assert.assertEquals("<!-- something\nhere -->", c1.getComment());
Assert.assertNull(c1.getTemplateName());
Assert.assertEquals(-1, c1.getLine());
Assert.assertEquals(-1, c1.getCol());
}
@Test
public void testSubsection() {
Comment c1 = new Comment("something");
Assert.assertEquals("!--s", c1.subSequence(1, 5));
Assert.assertEquals("some", c1.subSequence(4, 8));
c1 = new Comment("something", "test", 1, 1);
Assert.assertEquals("!--s", c1.subSequence(1, 5));
Assert.assertEquals("some", c1.subSequence(4, 8));
}
@Test
public void testContentFlags() {
testFlags("", false, false);
testFlags(" ", true, false);
testFlags(" ", true, false);
testFlags("\n", true, false);
testFlags("\n \t", true, false);
testFlags("\n [asd]", false, false);
testFlags("\n asdasdasd 23123 [ [asd ]]", false, false);
testFlags("\n asdasdasd 23123 [[asd ]]", false, true);
testFlags("\n asdasdasd 23123 [[asd ]] [[asd]]", false, true);
testFlags("\n asdasdasd 23123 [ [asd ]] [[asd] ]", false, false);
testFlags("[[asd]]", false, true);
testFlags("[[asd]", false, false);
testFlags("[asd]]", false, false);
testFlags("]]", false, false);
testFlags("[[", false, false);
testFlags("[[asd]]asd", false, true);
testFlags("asd[[asd]]", false, true);
testFlags("asd[[asd]]asd", false, true);
testFlags("\n (asd)", false, false);
testFlags("\n asdasdasd 23123 [ (asd )]", false, false);
testFlags("\n asdasdasd 23123 [(asd )]", false, true);
testFlags("\n asdasdasd 23123 [(asd )] [(asd)]", false, true);
testFlags("\n asdasdasd 23123 [ (asd )] [(asd) ]", false, false);
testFlags("[(asd)]", false, true);
testFlags("[(asd)", false, false);
testFlags("[asd)]", false, false);
testFlags(")]", false, false);
testFlags("[(", false, false);
testFlags("[(asd)]asd", false, true);
testFlags("asd[(asd)]", false, true);
testFlags("asd[(asd)]asd", false, true);
testFlags("\n (asd)", false, false);
testFlags("\n asdasdasd 23123 [ (asd ]]", false, false);
testFlags("\n asdasdasd 23123 [[asd )]", false, false);
testFlags("\n asdasdasd 23123 [(asd ]] [[asd)]", false, false); // Intertwined inlines are not supported
testFlags("\n asdasdasd 23123 [ (asd ]] [(asd) ]", false, false);
testFlags("[(asd]]", false, false);
testFlags("[(asd]", false, false);
testFlags("(asd)]", false, false);
testFlags("[(asd]]asd", false, false);
testFlags("asd[[asd)]", false, false);
testFlags("asd[(asd]])asd", false, false);
}
private static String extractText(final Comment comment) {
final StringBuilder strBuilder = new StringBuilder();
for (int i = 0; i < comment.length(); i++) {
strBuilder.append(comment.charAt(i));
}
return strBuilder.toString();
}
private static void testFlags(final String text, final boolean whitespace, final boolean inlineable) {
Comment t1 = new Comment(text);
if (whitespace) {
Assert.assertTrue(t1.isWhitespace());
} else {
Assert.assertFalse(t1.isWhitespace());
}
if (inlineable) {
Assert.assertTrue(t1.isInlineable());
} else {
Assert.assertFalse(t1.isInlineable());
}
t1 = new Comment("<!-- ",text," -->");
if (whitespace) {
Assert.assertTrue(t1.isWhitespace());
} else {
Assert.assertFalse(t1.isWhitespace());
}
if (inlineable) {
Assert.assertTrue(t1.isInlineable());
} else {
Assert.assertFalse(t1.isInlineable());
}
t1 = new Comment(text);
// By using the wrappers we avoid the utils methods calling the engine implementations (which are already tested above)
boolean bWhitespace1 = EngineEventUtils.isWhitespace(new CommentWrapper(t1));
boolean bInlineable1 = EngineEventUtils.isInlineable(new CommentWrapper(t1));
if (whitespace) {
Assert.assertTrue(bWhitespace1);
} else {
Assert.assertFalse(bWhitespace1);
}
if (inlineable) {
Assert.assertTrue(bInlineable1);
} else {
Assert.assertFalse(bInlineable1);
}
}
private static final class CommentWrapper implements IComment {
private final Comment delegate;
CommentWrapper(final Comment delegate) {
super();
this.delegate = delegate;
}
public static Comment asEngineComment(final IComment comment) {
return Comment.asEngineComment(comment);
}
public String getComment() {
return delegate.getComment();
}
public String getContent() {
return delegate.getContent();
}
public int length() {
return delegate.length();
}
public char charAt(final int index) {
return delegate.charAt(index);
}
public CharSequence subSequence(final int start, final int end) {
return delegate.subSequence(start, end);
}
public void accept(final IModelVisitor visitor) {
delegate.accept(visitor);
}
public void write(final Writer writer) throws IOException {
delegate.write(writer);
}
@Override
public String toString() {
return delegate.toString();
}
public int getCol() {
return delegate.getCol();
}
public int getLine() {
return delegate.getLine();
}
public boolean hasLocation() {
return delegate.hasLocation();
}
public String getTemplateName() {
return delegate.getTemplateName();
}
}
}
|
#!/usr/bin/env bash
CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$CURRENT_DIR/helpers.sh"
gram_low_fg_color=""
gram_medium_fg_color=""
gram_high_fg_color=""
gram_low_default_fg_color="#[fg=green]"
gram_medium_default_fg_color="#[fg=yellow]"
gram_high_default_fg_color="#[fg=red]"
get_fg_color_settings() {
gram_low_fg_color=$(get_tmux_option "@gram_low_fg_color" "$gram_low_default_fg_color")
gram_medium_fg_color=$(get_tmux_option "@gram_medium_fg_color" "$gram_medium_default_fg_color")
gram_high_fg_color=$(get_tmux_option "@gram_high_fg_color" "$gram_high_default_fg_color")
}
print_fg_color() {
local gram_percentage=$($CURRENT_DIR/gram_percentage.sh | sed -e 's/%//')
local gram_load_status=$(load_status $gram_percentage)
if [ $gram_load_status == "low" ]; then
echo "$gram_low_fg_color"
elif [ $gram_load_status == "medium" ]; then
echo "$gram_medium_fg_color"
elif [ $gram_load_status == "high" ]; then
echo "$gram_high_fg_color"
fi
}
main() {
get_fg_color_settings
print_fg_color
}
main
|
<filename>src/main/webapp/app/entities/techno/techno.route.ts
import { Injectable } from '@angular/core';
import { Resolve, ActivatedRouteSnapshot, RouterStateSnapshot, Routes } from '@angular/router';
import { UserRouteAccessService } from '../../shared';
import { JhiPaginationUtil } from 'ng-jhipster';
import { TechnoComponent } from './techno.component';
import { TechnoDetailComponent } from './techno-detail.component';
import { TechnoPopupComponent } from './techno-dialog.component';
import { TechnoDeletePopupComponent } from './techno-delete-dialog.component';
export const technoRoute: Routes = [
{
path: 'techno',
component: TechnoComponent,
data: {
authorities: ['ROLE_ADMIN', 'ROLE_PMANAGER'],
pageTitle: 'manaProjectApp.techno.home.title'
},
canActivate: [UserRouteAccessService]
}, {
path: 'techno/:id',
component: TechnoDetailComponent,
data: {
authorities: ['ROLE_PMANAGER'],
pageTitle: 'manaProjectApp.techno.home.title'
},
canActivate: [UserRouteAccessService]
}
];
export const technoPopupRoute: Routes = [
{
path: 'techno-new',
component: TechnoPopupComponent,
data: {
authorities: ['ROLE_PMANAGER'],
pageTitle: 'manaProjectApp.techno.home.title'
},
canActivate: [UserRouteAccessService],
outlet: 'popup'
},
{
path: 'techno/:id/edit',
component: TechnoPopupComponent,
data: {
authorities: ['ROLE_PMANAGER'],
pageTitle: 'manaProjectApp.techno.home.title'
},
canActivate: [UserRouteAccessService],
outlet: 'popup'
},
{
path: 'techno/:id/delete',
component: TechnoDeletePopupComponent,
data: {
authorities: ['ROLE_PMANAGER'],
pageTitle: 'manaProjectApp.techno.home.title'
},
canActivate: [UserRouteAccessService],
outlet: 'popup'
}
];
|
<reponame>vitrum/radical-input<gh_stars>10-100
const electron = require('electron');
// Module to control application life.
// Module to create native browser window.
const {
BrowserWindow,
app
} = electron;
const {CONFIG} = require('./config.js');
const path = require('path');
const url = require('url');
const WINDOW_HEIGHT = 302;
const PADDING_PX = 4;
const TOUCHPAD_LENGTH_X = CONFIG.touchpad_support.touchpad_coords.max.x - CONFIG.touchpad_support.touchpad_coords.min.x;
const TOUCHPAD_LENGTH_Y = CONFIG.touchpad_support.touchpad_coords.max.y - CONFIG.touchpad_support.touchpad_coords.min.y;
const DRAW_AREA_HEIGHT = 193.99;
const SELECT_AREA_HEIGHT = 40.99;
const WINDOW_WIDTH = Math.round((WINDOW_HEIGHT * (TOUCHPAD_LENGTH_X / TOUCHPAD_LENGTH_Y)) * ((DRAW_AREA_HEIGHT + SELECT_AREA_HEIGHT) / WINDOW_HEIGHT)) + PADDING_PX;
// Keep a global reference of the window object, if you don't, the window will
// be closed automatically when the JavaScript object is garbage collected.
let mainWindow;
function createWindow () {
// Create the browser window.
mainWindow = new BrowserWindow({
width: WINDOW_WIDTH,
height: WINDOW_HEIGHT,
minWidth: WINDOW_WIDTH,
minHeight: WINDOW_HEIGHT,
maxHeight: WINDOW_HEIGHT,
webPreferences: {
experimentalFeatures: true,
blinkFeatures: 'CSSBackdropFilter'
}
});
// and load the index.html of the app.
mainWindow.loadURL(url.format({
pathname: path.join(__dirname, 'index.html'),
protocol: 'file:',
slashes: true
}));
mainWindow.setAlwaysOnTop(true);
// Comment to dev
mainWindow.setMenu(null);
// Open the DevTools.
// mainWindow.webContents.openDevTools();
// Emitted when the window is closed.
mainWindow.on('closed', () => {
// Dereference the window object, usually you would store windows
// in an array if your app supports multi windows, this is the time
// when you should delete the corresponding element.
mainWindow = null;
});
const filter = {
urls: [
'https://translate.google.com/translate/releases/*/r/js/desktop_module_main.js'
]
};
let redirected = false;
electron.session.defaultSession.webRequest.onBeforeRequest(filter, async (details, callback) => {
if (!redirected && details.url.indexOf('desktop_module_main.js') !== -1) {
redirected = true;
return callback({
// https://drop.wtako.net/file/7419ad300b13731eb29ec06fe9b0b7a26f23a490.js
redirectURL: 'https://translate.google.com/translate/releases/twsfe_w_20180220_RC00/r/js/desktop_module_main.js'
});
}
return callback({});
});
}
// This method will be called when Electron has finished
// initialization and is ready to create browser windows.
// Some APIs can only be used after this event occurs.
app.on('ready', createWindow);
// Quit when all windows are closed.
app.on('window-all-closed', () => {
// On OS X it is common for applications and their menu bar
// to stay active until the user quits explicitly with Cmd + Q
if (process.platform !== 'darwin') {
app.quit();
}
});
app.on('activate', () => {
// On OS X it's common to re-create a window in the app when the
// dock icon is clicked and there are no other windows open.
if (mainWindow === null) {
createWindow();
}
});
// In this file you can include the rest of your app's specific main process
// code. You can also put them in separate files and require them here.
|
<filename>venue/venue/doctype/venue/venue.js
// Copyright (c) 2021, <NAME> and contributors
// For license information, please see license.txt
frappe.ui.form.on('Venue', {
refresh: function(frm) {
rm.add_custom_button('Create Item', () => {
frappe.new_doc('Items', {
venue: frm.doc.name
})
})
}
});
|
package vn.tale.counter.ui.component.radio;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
/**
* Created by <NAME> Tiki on 5/3/16.
*/
public class RadioGroupControllerTest {
@Mock RadioItem item1;
@Mock RadioItem item2;
@Mock RadioItem item3;
@Mock RadioGroupController.OnItemSelectedListener onItemSelectedListener;
private RadioGroupController radioGroupController;
@Before
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
radioGroupController = new RadioGroupController();
radioGroupController.addItem(item1);
radioGroupController.addItem(item2);
radioGroupController.addItem(item3);
}
@Test
public void testSetSelection() throws Exception {
radioGroupController.setSelection(0);
Mockito.verify(item1).setSelect(Mockito.eq(true));
radioGroupController.setSelection(2);
Mockito.verify(item1).setSelect(Mockito.eq(false));
Mockito.verify(item3).setSelect(Mockito.eq(true));
}
@Test
public void testListener() throws Exception {
radioGroupController.setOnItemSelectedListener(onItemSelectedListener);
radioGroupController.setSelection(0);
Mockito.verify(onItemSelectedListener).onItemSelected(Mockito.eq(item1));
}
} |
const Sequelize = require('sequelize')
const db = require('../db')
const Share = db.define('share', {
readonly: {
type: Sequelize.BOOLEAN,
allowNull: false
}
})
module.exports = Share
|
/* Copyright 2020 Freerware
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package transparent
import (
"net/http"
"github.com/freerware/negotiator/internal/header"
"github.com/freerware/negotiator/representation"
)
// rvsa1 represents the Remote Variant Selection Algorithm 1.0 as
// defined in RFC2296. This algorithm is leveraged in remote variant
// selection within transparent content negotiation.
type rvsa1 struct{}
// RVSA1 provides the Remote Variant Selection Algorithm 1.0 as
// defined in RFC2296.
func RVSA1() representation.Chooser {
return rvsa1{}
}
// Choose determines the 'best' representation from the provided set.
func (c rvsa1) Choose(
r *http.Request, reps ...representation.Representation) (representation.Representation, error) {
var (
a header.Accept
// TODO(FREER) support encoding extension.
//ae header.AcceptEncoding
al header.AcceptLanguage
ac header.AcceptCharset
af header.AcceptFeatures
err error
)
accept := r.Header["Accept"]
if a, err = header.NewAccept(accept); err != nil {
return nil, err
}
// TODO(FREER) support encoding extension.
//acceptEncodingEncoding := r.Header["Accept-Encoding"]
//if ae, err = header.NewAcceptEncoding(acceptEncoding); err != nil {
// return nil, err
//}
acceptLanguage := r.Header["Accept-Language"]
if al, err = header.NewAcceptLanguage(acceptLanguage); err != nil {
return nil, err
}
acceptCharset := r.Header["Accept-Charset"]
if ac, err = header.NewAcceptCharset(acceptCharset); err != nil {
return nil, err
}
acceptFeatures := r.Header["Accept-Features"]
if af, err = header.NewAcceptFeatures(acceptFeatures); err != nil {
return nil, err
}
var variants representation.Set
for _, rep := range reps {
qs := rep.SourceQuality()
qt, twc := c.acceptQuality(rep, a)
qc, cwc := c.acceptCharsetQuality(rep, ac)
ql, lwc := c.acceptLanguageQuality(rep, al)
qf, fwc := c.acceptFeatureQuality(rep, af)
isDefinite := !twc && !cwc && !lwc && !fwc
variants = append(variants, representation.RankedRepresentation{
Representation: rep,
SourceQualityValue: qs,
MediaTypeQualityValue: qt.Float(),
CharsetQualityValue: qc.Float(),
LanguageQualityValue: ql.Float(),
FeatureQualityValue: qf.Float(),
IsDefinite: isDefinite,
})
}
if variants.Empty() {
return nil, nil
}
variants.Sort(func(i, j int) bool {
v1 := variants[i]
firstScore := c.overallQuality(v1)
v2 := variants[j]
secondScore := c.overallQuality(v2)
return firstScore > secondScore
})
highest := variants.First()
score := c.overallQuality(highest)
//https://tools.ietf.org/html/rfc2296#section-3.5 accomplishes #1 and #2
if score > 0.0 && highest.IsDefinite {
return highest.Representation, nil
}
return nil, nil
}
// acceptQuality determines the quality score for a
// represenations media type based on the Accept header.
func (c rvsa1) acceptQuality(
rep representation.Representation,
accept header.Accept,
) (header.QualityValue, bool) {
var usedWildcard bool
if rep.ContentType() == "" {
return header.QualityValueMaximum, false
}
if accept.IsEmpty() {
return header.QualityValueMaximum, true
}
qt := header.QualityValueMinimum
for _, mr := range accept.MediaRanges() {
compatible, err := mr.Compatible(rep.ContentType())
if compatible && err == nil {
qt = mr.QualityValue()
if mr.Type() == "*" || mr.SubType() == "*" {
usedWildcard = true
}
break
}
}
return qt, usedWildcard
}
// acceptLanguageQuality determines the quality score for a
// represenations language based on the Accept-Language header.
func (c rvsa1) acceptLanguageQuality(
rep representation.Representation,
acceptLanguage header.AcceptLanguage,
) (header.QualityValue, bool) {
var usedWildcard bool
if rep.ContentLanguage() == "" {
return header.QualityValueMaximum, false
}
if acceptLanguage.IsEmpty() {
return header.QualityValueMaximum, true
}
ql := header.QualityValueMinimum
for _, lr := range acceptLanguage {
if lr.Compatible(rep.ContentLanguage()) {
ql = lr.QualityValue()
usedWildcard = lr.IsWildcard()
break
}
}
return ql, usedWildcard
}
// acceptCharsetQuality determines the quality score for a
// represenations language based on the Accept-Charset header.
func (c rvsa1) acceptCharsetQuality(
rep representation.Representation,
acceptCharset header.AcceptCharset,
) (header.QualityValue, bool) {
var usedWildcard bool
if rep.ContentCharset() == "" {
return header.QualityValueMaximum, false
}
if acceptCharset.IsEmpty() {
return header.QualityValueMaximum, true
}
qc := header.QualityValueMinimum
for _, c := range acceptCharset.CharsetRanges() {
if c.Compatible(rep.ContentCharset()) {
qc = c.QualityValue()
usedWildcard = c.IsWildcard()
break
}
}
return qc, usedWildcard
}
// acceptFeatureQuality determines the quality score for a
// represenations language based on the Accept-Feature header.
func (c rvsa1) acceptFeatureQuality(
rep representation.Representation,
acceptFeature header.AcceptFeatures,
) (header.QualityValue, bool) {
var usedWildcard bool
if len(rep.ContentFeatures()) == 0 {
return header.QualityValueMaximum, false
}
if acceptFeature.IsEmpty() {
return header.QualityValueMaximum, true
}
featureList, err := header.NewFeatureList(rep.ContentFeatures())
if err != nil {
panic(err) //TODO(FREER)
}
degradation := featureList.QualityDegradation(
acceptFeature.AsFeatureSets(),
)
return header.QualityValue(degradation), usedWildcard //TODO(FREER)
}
func (c rvsa1) overallQuality(v representation.RankedRepresentation) float32 {
qs := v.SourceQualityValue
qt := v.MediaTypeQualityValue
qc := v.CharsetQualityValue
ql := v.LanguageQualityValue
qf := v.FeatureQualityValue
overall := qs * qt * qc * ql * qf
qv := header.QualityValue(overall)
return qv.Round(5).Float()
}
|
<gh_stars>1-10
package com.qmuiteam.qmui.widget;
import android.support.v4.view.PagerAdapter;
import android.util.SparseArray;
import android.view.ViewGroup;
/**
* @author cginechen
* @date 2017-09-13
*/
public abstract class QMUIPagerAdapter extends PagerAdapter {
private SparseArray<Object> mScrapItems = new SparseArray<>();
public QMUIPagerAdapter() {
}
/**
* Hydrating an object is taking an object that exists in memory,
* that doesn't yet contain any domain data ("real" data),
* and then populating it with domain data.
*
* @param container
* @param position
* @return
*/
protected abstract Object hydrate(ViewGroup container, int position);
protected abstract void populate(ViewGroup container, Object item, int position);
protected abstract void destroy(ViewGroup container, int position, Object object);
@Override
public final Object instantiateItem(ViewGroup container, int position) {
Object item = mScrapItems.get(position);
if (item == null) {
item = hydrate(container, position);
} else {
mScrapItems.remove(position);
}
populate(container, item, position);
return item;
}
@Override
public final void destroyItem(ViewGroup container, int position, Object object) {
destroy(container, position, object);
mScrapItems.put(position, object);
}
}
|
<reponame>ttungbmt/next-cache
import LRUCache from 'lru-cache';
import { isNil, merge } from 'lodash';
function _objectWithoutPropertiesLoose(source, excluded) {
if (source == null) return {};
var target = {};
var sourceKeys = Object.keys(source);
var key, i;
for (i = 0; i < sourceKeys.length; i++) {
key = sourceKeys[i];
if (excluded.indexOf(key) >= 0) continue;
target[key] = source[key];
}
return target;
}
const dev = process.env.NODE_ENV !== 'production';
class NextCache {
constructor(app, _ref = {}) {
let {
enabled,
getCacheKey
} = _ref,
options = _objectWithoutPropertiesLoose(_ref, ["enabled", "getCacheKey"]);
this.app = app;
this.cacheKey = null;
this.enabled = isNil(enabled) ? process.env.NODE_ENV !== 'production' : enabled;
let defaultOptions = {
max: 100 * 1024 * 1024,
length: n => n.length,
maxAge: 1000 * 60 * 60 * 24 * 30 // 1 month
};
this.options = merge(defaultOptions, options);
this.getCacheKey = getCacheKey ? getCacheKey : req => req.headers.host + req.url;
this.cache = new LRUCache(this.options);
}
async render(req, res, pagePath, queryParams) {
const key = this.getCacheKey(req); // If we have a page in the cache, let's serve it
if (this.cache.has(key)) {
res.setHeader('X-LRU-Cache', 'HIT');
res.send(this.cache.get(key));
return;
} // No cache present for specific key? let's try to render and cache
try {
const html = await this.app.renderToHTML(req, res, pagePath, queryParams); // If something is wrong with the request, let's not cache
// Send the generated content as is for further inspection
if (this.enabled || res.statusCode !== 200) {
res.setHeader('X-LRU-Cache', 'SKIP');
res.send(html);
return;
} // Everything seems OK... let's cache
this.cache.set(key, html);
res.setHeader('X-LRU-Cache', 'MISS');
res.send(html);
} catch (err) {
this.app.renderError(err, req, res, pagePath, queryParams);
}
}
}
export default NextCache;
|
<filename>packages/@aws-cdk/aws-amplify/test/app-asset-deployment.integ.snapshot/asset.c3fdb1653d155f504c9d470873cc7012b6b21b0be8fc9922ae2ef49bd22daecb/index.js
"use strict";
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __esm = (fn, res) => function __init() {
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// packages/@aws-cdk/aws-amplify/lib/asset-deployment-handler/common.ts
var ResourceHandler;
var init_common = __esm({
"packages/@aws-cdk/aws-amplify/lib/asset-deployment-handler/common.ts"() {
ResourceHandler = class {
constructor(event) {
this.requestType = event.RequestType;
this.requestId = event.RequestId;
this.logicalResourceId = event.LogicalResourceId;
this.physicalResourceId = event.PhysicalResourceId;
this.event = event;
}
onEvent() {
switch (this.requestType) {
case "Create":
return this.onCreate();
case "Update":
return this.onUpdate();
case "Delete":
return this.onDelete();
}
throw new Error(`Invalid request type ${this.requestType}`);
}
isComplete() {
switch (this.requestType) {
case "Create":
return this.isCreateComplete();
case "Update":
return this.isUpdateComplete();
case "Delete":
return this.isDeleteComplete();
}
throw new Error(`Invalid request type ${this.requestType}`);
}
log(x) {
console.log(JSON.stringify(x, void 0, 2));
}
};
}
});
// packages/@aws-cdk/aws-amplify/lib/asset-deployment-handler/handler.ts
var handler_exports = {};
__export(handler_exports, {
AmplifyAssetDeploymentHandler: () => AmplifyAssetDeploymentHandler
});
function parseProps(props) {
return props;
}
var AmplifyAssetDeploymentHandler;
var init_handler = __esm({
"packages/@aws-cdk/aws-amplify/lib/asset-deployment-handler/handler.ts"() {
init_common();
AmplifyAssetDeploymentHandler = class extends ResourceHandler {
constructor(amplify2, s32, event) {
super(event);
this.props = parseProps(this.event.ResourceProperties);
this.amplify = amplify2;
this.s3 = s32;
}
async onCreate() {
console.log("deploying to Amplify with options:", JSON.stringify(this.props, void 0, 2));
const jobs = await this.amplify.listJobs({
appId: this.props.AppId,
branchName: this.props.BranchName,
maxResults: 1
}).promise();
if (jobs.jobSummaries && jobs.jobSummaries.find((summary) => summary.status === "PENDING")) {
return Promise.reject("Amplify job already running. Aborting deployment.");
}
const assetUrl = this.s3.getSignedUrl("getObject", {
Bucket: this.props.S3BucketName,
Key: this.props.S3ObjectKey
});
const deployment = await this.amplify.startDeployment({
appId: this.props.AppId,
branchName: this.props.BranchName,
sourceUrl: assetUrl
}).promise();
return {
AmplifyJobId: deployment.jobSummary.jobId
};
}
async isCreateComplete() {
return this.isActive(this.event.AmplifyJobId);
}
async onDelete() {
return;
}
async isDeleteComplete() {
return {
IsComplete: true
};
}
async onUpdate() {
return this.onCreate();
}
async isUpdateComplete() {
return this.isActive(this.event.AmplifyJobId);
}
async isActive(jobId) {
if (!jobId) {
throw new Error("Unable to determine Amplify job status without job id");
}
const job = await this.amplify.getJob({
appId: this.props.AppId,
branchName: this.props.BranchName,
jobId
}).promise();
if (job.job.summary.status === "SUCCEED") {
return {
IsComplete: true,
Data: {
JobId: jobId,
Status: job.job.summary.status
}
};
}
if (job.job.summary.status === "FAILED" || job.job.summary.status === "CANCELLED") {
throw new Error(`Amplify job failed with status: ${job.job.summary.status}`);
} else {
return {
IsComplete: false
};
}
}
};
}
});
// packages/@aws-cdk/aws-amplify/lib/asset-deployment-handler/index.js
Object.defineProperty(exports, "__esModule", { value: true });
exports.isComplete = exports.onEvent = void 0;
var aws_sdk_1 = require("aws-sdk");
var handler_1 = (init_handler(), __toCommonJS(handler_exports));
var AMPLIFY_ASSET_DEPLOYMENT_RESOURCE_TYPE = "Custom::AmplifyAssetDeployment";
aws_sdk_1.config.logger = console;
var amplify = new aws_sdk_1.Amplify();
var s3 = new aws_sdk_1.S3({ signatureVersion: "v4" });
async function onEvent(event) {
const provider = createResourceHandler(event);
return provider.onEvent();
}
exports.onEvent = onEvent;
async function isComplete(event) {
const provider = createResourceHandler(event);
return provider.isComplete();
}
exports.isComplete = isComplete;
function createResourceHandler(event) {
switch (event.ResourceType) {
case AMPLIFY_ASSET_DEPLOYMENT_RESOURCE_TYPE:
return new handler_1.AmplifyAssetDeploymentHandler(amplify, s3, event);
default:
throw new Error(`Unsupported resource type "${event.ResourceType}"`);
}
}
|
<filename>packages/common/src/utils/getPath.ts
import type { FormControl } from '../types';
/**
* @category Helper
*/
export function getPath(el: FormControl): string {
const fieldSetName = el.dataset.felteFieldset;
return fieldSetName ? `${fieldSetName}.${el.name}` : el.name;
}
|
<filename>cortana-pixeltracker-core/src/main/java/com/microsoft/azure/server/pixeltracker/package-info.java<gh_stars>1-10
/**
* Pixel Tracker Package Info
* Created by dcibo on 5/25/2017.
*/
package com.microsoft.azure.server.pixeltracker; |
#!/usr/bin/env bash
set -e
set -o pipefail
if [[ ${PLATFORM} == "osx" || ${PLATFORM} == "linux" ]]; then
# Run unit tests
echo "Running Benchmarks"
pushd ./build/${PLATFORM}/bin
# a tile for testing
curl --compressed -L -o tile.mvt https://tile.mapzen.com/mapzen/vector/v1/all/10/301/384.mvt?api_key=${MAPZEN_API_KEY}
for file in bench/*.out
do
echo "Running ${file}"
$file
done
popd
fi
|
/*
* Copyright 2013 Stanford University.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution.
*
* - Neither the name of the copyright holders nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* Author: <NAME> <<EMAIL>>
*/
#include <unistd.h>
#include <fstream> // NOLINT
#include <sstream>
#include <string>
#include "src/worker/worker.h"
#include "src/worker/worker_manager.h"
#include "src/worker/worker_thread.h"
#include "src/worker/worker_thread_monitor.h"
namespace nimbus {
WorkerThreadMonitor::WorkerThreadMonitor(WorkerManager* worker_manager)
: WorkerThread(worker_manager) {
}
WorkerThreadMonitor::~WorkerThreadMonitor() {
}
void WorkerThreadMonitor::Run() {
std::ofstream output("worker_state.log");
int64_t dispatched_computation_job_count_last = 0;
int64_t dispatched_computation_job_count;
int64_t ready_job_queue_length;
output << "dispatched_computation_job_count "
<< "working_computation_thread_num "
<< "ready_job_queue_length "
<< std::endl;
// int count = 0;
while (true) {
// count = (count + 1) % 10000;
// if (count == 0) {
// AppDataManager* app_data_manager =
// worker_manager_->worker_->application_->app_data_manager();
// if (app_data_manager) {
// std::stringstream s;
// app_data_manager->PrintProfile(&s);
// // TODO(quhang): temporary usage.
// // printf("\nApplication data manager profile\n%s\n", s.str().c_str());
// }
// }
usleep(10000);
dispatched_computation_job_count =
worker_manager_->dispatched_computation_job_count_;
ready_job_queue_length = worker_manager_->ready_jobs_count_;
output << dispatched_computation_job_count
- dispatched_computation_job_count_last
<< " "
<< ready_job_queue_length
<< std::endl;
output.flush();
dispatched_computation_job_count_last =
dispatched_computation_job_count;
}
}
} // namespace nimbus
|
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.2-147
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2010.01.26 at 02:04:22 PM MST
//
package net.opengis.gml._311;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* A geodetic datum defines the precise location and orientation in 3-dimensional space of a defined ellipsoid (or sphere) that approximates the shape of the earth, or of a Cartesian coordinate system centered in this ellipsoid (or sphere).
*
* <p>Java class for GeodeticDatumType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="GeodeticDatumType">
* <complexContent>
* <extension base="{http://www.opengis.net/gml}AbstractDatumType">
* <sequence>
* <element ref="{http://www.opengis.net/gml}usesPrimeMeridian"/>
* <element ref="{http://www.opengis.net/gml}usesEllipsoid"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "GeodeticDatumType", propOrder = {
"usesPrimeMeridian",
"usesEllipsoid"
})
public class GeodeticDatumType
extends AbstractDatumType
{
@XmlElement(required = true)
protected PrimeMeridianRefType usesPrimeMeridian;
@XmlElement(required = true)
protected EllipsoidRefType usesEllipsoid;
/**
* Gets the value of the usesPrimeMeridian property.
*
* @return
* possible object is
* {@link PrimeMeridianRefType }
*
*/
public PrimeMeridianRefType getUsesPrimeMeridian() {
return usesPrimeMeridian;
}
/**
* Sets the value of the usesPrimeMeridian property.
*
* @param value
* allowed object is
* {@link PrimeMeridianRefType }
*
*/
public void setUsesPrimeMeridian(PrimeMeridianRefType value) {
this.usesPrimeMeridian = value;
}
/**
* Gets the value of the usesEllipsoid property.
*
* @return
* possible object is
* {@link EllipsoidRefType }
*
*/
public EllipsoidRefType getUsesEllipsoid() {
return usesEllipsoid;
}
/**
* Sets the value of the usesEllipsoid property.
*
* @param value
* allowed object is
* {@link EllipsoidRefType }
*
*/
public void setUsesEllipsoid(EllipsoidRefType value) {
this.usesEllipsoid = value;
}
}
|
#!/bin/bash
#
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part1.sh
# Description: OpenWrt DIY script part 1 (Before Update feeds)
#
# Uncomment a feed source
#sed -i 's/^#\(.*helloworld\)/\1/' feeds.conf.default
# Add a feed source
echo 'src-git helloworld https://github.com/fw876/helloworld' >>feeds.conf.default
#echo 'src-git passwall https://github.com/xiaorouji/openwrt-passwall' >>feeds.conf.default
echo 'src-git infinityfreedom https://github.com/Chikage0o0/luci-theme-infinityfreedom' >>feeds.conf.default
|
//
// ViewController.h
// SqlcipherTool
//
// Created by ZhengXiankai on 16/4/18.
// Copyright © 2016年 bomo. All rights reserved.
//
#import <Cocoa/Cocoa.h>
@interface ViewController : NSViewController
@end
|
<reponame>OSADP/C2C-RI<filename>C2CRIBuildDir/projects/C2C-RI/src/NTCIP2306v01_69/src/org/fhwa/c2cri/ntcip2306v109/wsdl/OperationSpecCollection.java
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.ntcip2306v109.wsdl;
import java.util.ArrayList;
/**
* The Class OperationSpecCollection provides a collection of related operations.
*
* @author TransCore ITS, LLC
* Last Updated: 1/8/2014
*/
public class OperationSpecCollection {
/** The operation collection. */
private ArrayList<OperationSpecification> operationCollection = new ArrayList<OperationSpecification>();
/**
* Adds the.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param os the os
* @return true, if successful
*/
public boolean add(OperationSpecification os) {
return operationCollection.add(os);
}
/**
* Contains.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param serviceName the service name
* @param portName the port name
* @param operationName the operation name
* @return true, if successful
*/
public boolean contains(String serviceName, String portName, String operationName) {
boolean result = false;
for (OperationSpecification thisSpec : operationCollection) {
if (thisSpec.getOperationName().equals(operationName)
&& thisSpec.getRelatedToPort().equals(portName)
&& thisSpec.getRelatedToService().equals(serviceName)) {
result = true;
break;
}
}
return result;
}
/**
* Gets the.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param serviceName the service name
* @param portName the port name
* @param operationName the operation name
* @return the operation specification
*/
public OperationSpecification get(String serviceName, String portName, String operationName) {
OperationSpecification result = null;
for (OperationSpecification thisSpec : operationCollection) {
if (thisSpec.getOperationName().equals(operationName)
&& thisSpec.getRelatedToPort().equals(portName)
&& thisSpec.getRelatedToService().equals(serviceName)) {
result = thisSpec;
break;
}
}
return result;
}
/**
* Gets the.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param index the index
* @return the operation specification
*/
public OperationSpecification get(int index) {
OperationSpecification result = null;
try {
result = operationCollection.get(index);
} catch (Exception ex) {
ex.printStackTrace();
}
return result;
}
/**
* Gets the specs with soap action.
*
* @param soapAction the soap action
* @return the specs with soap action
*/
public ArrayList<OperationSpecification> getSpecsWithSOAPAction(String soapAction) {
ArrayList<OperationSpecification> returnList = new ArrayList<OperationSpecification>();
if (soapAction != null) {
for (OperationSpecification thisSpec : operationCollection) {
//CTCRI-718 some clients send post messages where the soapaction value is enclosed
// in quotes. This is correct per the standard. However, the quotes are not stored with the
// SOAPAction value in the C2C RI. Ensure that we appropriately recognize both cases to
// processing the field.
String enclosedSoapAction = soapAction;
if(!soapAction.startsWith("\"")&&!soapAction.endsWith("\"")){
enclosedSoapAction = "\""+enclosedSoapAction+"\"";
} else {
// String the leading and ending doublequotes from the soapAction provided.
int textLength = soapAction.length();
if (textLength >= 2 && soapAction.charAt(0) == '"' && soapAction.charAt(textLength - 1) == '"') {
soapAction = soapAction.substring(1, textLength - 1);
}
}
if (thisSpec.getSoapAction().equals(soapAction)||thisSpec.getSoapAction().equals(enclosedSoapAction)) {
returnList.add(thisSpec);
}
}
}
return returnList;
}
/**
* Gets the specs with location.
*
* @param location the location
* @return the specs with location
*/
public ArrayList<OperationSpecification> getSpecsWithLocation(String location) {
ArrayList<OperationSpecification> returnList = new ArrayList<OperationSpecification>();
if (location != null) {
for (OperationSpecification thisSpec : operationCollection) {
System.out.println("OperationSpecCollection::getSpecsWithLocation: Checking whether operation: " + thisSpec.getOperationName() + " at location " + thisSpec.getDocumentLocation() + " = " + location);
if (thisSpec.getDocumentLocation().equals(location)) {
System.out.println("OperationSpecCollection::getSpecsWithLocation: MATCH!!!");
returnList.add(thisSpec);
}
}
}
return returnList;
}
/**
* Gets the specs with operation name.
*
* @param operationName the operation name
* @return the specs with operation name
*/
public ArrayList<OperationSpecification> getSpecsWithOperationName(String operationName) {
ArrayList<OperationSpecification> returnList = new ArrayList<OperationSpecification>();
if (operationName != null) {
for (OperationSpecification thisSpec : operationCollection) {
if (thisSpec.getOperationName().equals(operationName)) {
returnList.add(thisSpec);
}
}
}
return returnList;
}
/**
* Gets the copy as list.
*
* @return the copy as list
*/
public ArrayList<OperationSpecification> getCopyAsList() {
return (ArrayList) operationCollection.clone();
}
/**
* Checks if is empty.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @return true, if is empty
*/
public boolean isEmpty() {
return operationCollection.isEmpty();
}
/**
* Size.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @return the int
*/
public int size() {
return operationCollection.size();
}
}
|
<reponame>wovo/hwpy<filename>demo/rapi/led_alternate.py
"""
An alternate (left 4 on, right 4 on) LEDs
"""
import sys
sys.path.append( "../.." )
import hwpy
print( __doc__)
leds = hwpy.all([
hwpy.gpo( 17 ),
hwpy.gpo( 27 ),
hwpy.gpo( 22 ),
hwpy.gpo( 10 ),
hwpy.invert( hwpy.gpo( 9 )),
hwpy.invert( hwpy.gpo( 11 )),
hwpy.invert( hwpy.gpo( 0 )),
hwpy.invert( hwpy.gpo( 5 )),
])
hwpy.blink( leds ) |
import { AbstractEntity } from 'src/entities/abstract-entity';
import { Entity, ManyToOne, OneToMany, OneToOne } from 'typeorm';
import { Article } from '../Article/Article.entity';
import { Like } from '../like/like.entity';
import { Photo } from '../Photo/Photo.entity';
import { User } from '../User/User.entity';
@Entity()
export class Comment extends AbstractEntity {
@ManyToOne(() => User)
user: User;
@ManyToOne(() => Article)
article: Article;
@OneToMany(() => Like, (_) => _.comment)
likes: Like[];
@OneToOne(() => Photo)
photo: Photo;
}
|
#pragma once
namespace BF
{
enum class FileOpenMode
{
Read,
Write
};
} |
package io.quarkus.qson;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation that designates a property to collect any object properties
* that are not mapped. The annotation must be applied twice if you want both
* deserialization and serialization.
*
* For deserialization, the annotation must be placed on a method that has two arguments,
* the first being String, the second being Object.
*
* For serialization the method must take no parameters and return a {@link java.util.Map}
*/
@Target({ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @interface QsonAny {
}
|
require 'rails_helper'
describe Parsers::Edi::Etf::EtfLoop do
let(:etf) { Parsers::Edi::Etf::EtfLoop.new(raw_etf_loop) }
describe '#carrier_fein' do
let(:carrier_fein) { '1234'}
let(:raw_etf_loop) { {"L1000B" => { "N1" => ['','','','', carrier_fein]}} }
it 'returns the carrier fein from the Payer loop' do
expect(etf.carrier_fein).to eq carrier_fein
end
end
describe '#employer_loop' do
let(:n1) { ['','','DC0'] }
let(:raw_etf_loop) { {"L1000A" => {"N1" => n1}} }
it 'returns the employer loop' do
expect(etf.employer_loop).to eq n1
end
end
describe '#is_shop?' do
let(:raw_etf_loop) { {"L1000A" => {"N1" => n1}} }
context 'when employer is not DC0' do
let(:n1) { ['','','NOT_DC0'] }
it 'returns the employer loop' do
expect(etf.is_shop?).to eq true
end
end
context 'when employer is DC0' do
let(:n1) { ['','','DC0'] }
it 'returns the employer loop' do
expect(etf.is_shop?).to eq false
end
end
end
describe '#cancellation_or_termination?' do
let(:not_subscriber) { { "INS" => ['','','']} }
let(:subscriber) { { "INS" => ['','','18']} }
let(:raw_etf_loop) { {"L2000s" => [ not_subscriber, subscriber]} }
it 'returns true if any person loop is a cancellation or termination' do
allow(etf).to receive(:people).and_return([ double(:cancellation_or_termination? => false) ])
expect(etf.cancellation_or_termination?).to eq false
end
it 'returns false if no person loop is a cancellation_or_termination' do
allow(etf).to receive(:people).and_return([ double(:cancellation_or_termination? => true) ])
expect(etf.cancellation_or_termination?).to eq true
end
end
end
|
#!/usr/bin/env bash
source /opt/ros/melodic/setup.bash
source /home/obstec/ros_ws/devel/setup.bash
export ROS_MASTER_URI=http://192.168.2.1:11311
export ROS_IP=192.168.2.1
#export ROS_MASTER_URI=http://blue2shore.clients.wireless.dtu.dk:11311
#export ROS_IP=10.16.151.117
#export ROS_IP=192.168.2.3
#export ROS_HOSTNAME=blue2shore.clients.wireless.dtu.dk
export DISPLAY=:0
exec "$@"
|
<filename>src/edu/berkeley/nlp/morph/Operation.java
package edu.berkeley.nlp.morph;
import java.util.ArrayList;
import java.util.List;
/**
* Representation of an edit operation; supports easy coversion to and from
* a String representation.
*
* @author gdurrett
*
*/
public enum Operation {
EQUAL, SUBST, INSERT, DELETE;
public static String opToString(Operation op) {
switch (op) {
case EQUAL: return "=";
case SUBST: return "S";
case INSERT : return "I";
case DELETE : return "D";
default : throw new RuntimeException("Bad op: " + op);
}
}
public static String opsToString(List<Operation> ops) {
String opsStr = "";
for (Operation op : ops) {
opsStr += opToString(op);
}
return opsStr;
}
public static Operation charToOp(char opChar) {
switch (opChar) {
case '=': return EQUAL;
case 'S': return SUBST;
case 'I': return INSERT;
case 'D': return DELETE;
default : throw new RuntimeException("Bad op string: " + opChar);
}
}
public static List<Operation> stringToOps(String opsStr) {
List<Operation> ops = new ArrayList<Operation>();
for (int i = 0; i < opsStr.length(); i++) {
ops.add(charToOp(opsStr.charAt(i)));
}
return ops;
}
}
|
package model;
public class Boss {
private String name_;
private String raidName_;
public Boss(String name, String raidName) {
name_ = name;
raidName_ = raidName;
}
public String getName() {
return name_;
}
public String getRaid() {
return raidName_;
}
}
|
#!/bin/bash
# Copyright [2009-2017] EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
######################################################
## Launch XML export of Rfam data using LSF cluster ##
######################################################
set -e
usage="Usage: lsf_rfam_xml_dumper.sh /path/to/output"
if [ "$#" -ne 1 ]
then
echo $usage
exit 1
fi
dir=$1
# check that output directories exist
mkdir -p $dir/families
mkdir -p $dir/motifs
mkdir -p $dir/clans
mkdir -p $dir/genomes
mkdir -p $dir/full_region
# prepare lsf commands
prefix='source env/bin/activate && export PYTHONPATH=`pwd` && export DJANGO_SETTINGS_MODULE="rfam_schemas.rfam_schemas.settings" &&'
echo "$prefix bsub python scripts/export/rfam_xml_dumper.py --type M --out $dir/motifs/"
echo "$prefix bsub python scripts/export/rfam_xml_dumper.py --type C --out $dir/clans/"
echo "$prefix bsub -M 16384 -R "rusage[mem=16384]" python scripts/export/rfam_xml_dumper.py --type G --out $dir/genomes/"
echo "$prefix bsub -M 16384 -R "rusage[mem=16384]" python scripts/export/rfam_xml_dumper.py --type F --out $dir/families/"
# -F (file size) is required to allow creation of large files
echo "$prefix bsub -M 16384 -R "rusage[mem=16384]" -F 1000000 python scripts/export/rfam_xml_dumper.py --type R --out $dir/full_region"
|
//
// Quick load and save from within a level.
//
#include "game.h"
//
// The filename we use as our savegame.
//
#define QLS_FNAME "data\\quicksave.dat"
void QLS_init()
{
//
// Get rid of the file.
//
FileDelete(QLS_FNAME);
}
void QLS_available()
{
FILE *handle = MF_Fopen(QLS_FNAME, "rb");
if (handle)
{
MF_Fclose(handle);
return TRUE;
}
else
{
return FALSE;
}
}
void QLS_save()
{
FILE *handle = MF_Fopen(QLS_FNAME);
if (!handle)
{
return;
}
//
// Save out the version number.
//
SLONG version = 1;
if (fwrite(&version, sizeof(SLONG), 1, handle) != 1) goto file_error;
//
// Save out the the_game structure.
//
if (fwrite(&the_game, sizeof(the_game), 1, handle) != 1) goto file_error;
MF_Fclose(handle);
return;
file_error:;
MF_Fclose(handle);
return;
}
SLONG QLS_load()
{
}
#endif
|
module.exports.MAP_META_AGGREGATE = {_id: '$platform_number',
'platform_number': 1,
'date': 1,
'cycle_number': 1,
'geoLocation': 1,
'DATA_MODE': 1,
'containsBGC': 1,
'isDeep': 1,
'DIRECTION': 1
}
module.exports.MONTH_YEAR_AGGREGATE = {_id: 1,
platform_number: 1,
date: 1,
date_added: 1,
date_qc: 1,
containsBGC: { $ifNull: [ "$containsBGC", false ] },
isDeep: { $ifNull: [ "$containsBGC", false ] },
PI_NAME: 1,
cycle_number: 1,
lat: 1,
lon: 1,
position_qc: 1,
PLATFORM_TYPE: 1,
POSITIONING_SYSTEM: 1,
DATA_MODE: 1,
station_parameters: 1,
VERTICAL_SAMPLING_SCHEME: 1,
STATION_PARAMETERS_inMongoDB: 1,
cycle_number: 1,
dac: 1,
pres_max_for_TEMP: { $ifNull: [ "$pres_max_for_TEMP", -999 ] },
pres_min_for_TEMP: { $ifNull: [ "$pres_min_for_TEMP", -999 ] },
pres_max_for_PSAL: { $ifNull: [ "$pres_max_for_PSAL", -999 ] },
pres_min_for_PSAL: { $ifNull: [ "$pres_min_for_PSAL", -999 ] },
BASIN: 1
}
module.exports.COUNT_MATCH = {$match: {count: {$gt: 0}}}
module.exports.COUNT_PROJECT = {$project: { // return profiles with measurements
_id: 1,
nc_url: 1,
position_qc: 1,
date_qc: 1,
BASIN: 1,
cycle_number: 1,
dac: 1,
date:1,
lat: 1,
lon: 1,
platform_number: 1,
geoLocation: 1,
station_parameters: 1,
maximum_pressure: 1,
measurements: 1,
POSITIONING_SYSTEM: 1,
DATA_MODE: 1,
PLATFORM_TYPE: 1,
count: { $size:'$measurements' },
}}
module.exports.META_DATE_SLICE_PARAMS = {date: -1, lat: -1, lon: -1, BASIN: -1};
module.exports.MAP_PARAMS = 'platform_number date geoLocation cycle_number station_parameters lat lon DATA_MODE containsBGC isDeep DIRECTION'
module.exports.MAP_PROJ = {
platform_number: -1,
date: -1,
geoLocation: 1,
cycle_number: -1,
DATA_MODE: -1,
containsBGC: 1,
isDeep: 1,
DIRECTION: 1,
}
module.exports.MAP_PROJ_WITH_COUNT = { platform_number: -1,
date: -1,
geoLocation: 1,
cycle_number: -1,
DATA_MODE: -1,
containsBGC: 1,
isDeep: 1,
count: { $size:'$measurements' },
DIRECTION: 1,
}
module.exports.PROF_META_PARAMS = //WARNING!!! don't use this to create other constants. exporting is buggy.
{ platform_number: 1,
date: 1,
date_qc: 1,
geo2DLocation: 1,
PI_NAME: 1,
cycle_number: 1,
lat: 1,
lon: 1,
DIRECTION: 1,
position_qc: 1,
station_parameters: 1,
VERTICAL_SAMPLING_SCHEME: 1,
STATION_PARAMETERS_inMongoDB: 1,
WMO_INST_TYPE: 1,
cycle_number: 1,
dac: 1,
basin: 1,
nc_url: 1,
geoLocation: 1,
maximum_pressure: 1,
POSITIONING_SYSTEM: 1,
DATA_MODE: 1,
core_data_mode: 1,
PLATFORM_TYPE: 1,
isDeep: 1,
containsBGC: 1
}
module.exports.PROF_PROJECT_WITH_PRES_RANGE_COUNT = {
platform_number: 1,
date: 1,
date_qc: 1,
geo2DLocation: 1,
PI_NAME: 1,
cycle_number: 1,
lat: 1,
lon: 1,
DIRECTION: 1,
position_qc: 1,
station_parameters: 1,
VERTICAL_SAMPLING_SCHEME: 1,
STATION_PARAMETERS_inMongoDB: 1,
WMO_INST_TYPE: 1,
cycle_number: 1,
measurements: 1,
count: { $size: "$measurements" },
dac: 1,
basin: 1,
nc_url: 1,
geoLocation: 1,
maximum_pressure: 1,
POSITIONING_SYSTEM: 1,
DATA_MODE: 1,
core_data_mode: 1,
PLATFORM_TYPE: 1,
isDeep: 1,
containsBGC: 1
}
module.exports.PROF_BGC_PROJECT_WITH_PRES_RANGE_COUNT = {
platform_number: 1,
date: 1,
date_qc: 1,
geo2DLocation: 1,
PI_NAME: 1,
cycle_number: 1,
lat: 1,
lon: 1,
DIRECTION: 1,
position_qc: 1,
station_parameters: 1,
VERTICAL_SAMPLING_SCHEME: 1,
STATION_PARAMETERS_inMongoDB: 1,
WMO_INST_TYPE: 1,
cycle_number: 1,
bgcMeas: 1,
count: { $size: "$bgcMeas" },
dac: 1,
basin: 1,
nc_url: 1,
geoLocation: 1,
maximum_pressure: 1,
POSITIONING_SYSTEM: 1,
DATA_MODE: 1,
core_data_mode: 1,
PLATFORM_TYPE: 1,
isDeep: 1,
containsBGC: 1
}
|
#! /bin/bash
PRGNAME="xcb-proto"
### xcb-proto (X protocol C-language Binding protocol descriptions)
# Пакет предоставляет описания протокола XML-XCB, которые libxcb использует для
# генерирования большей части своего кода и API
# Required: python3
# Recommended: no
# Optional: libxml2 (для запуска тестов)
ROOT="/root/src/lfs"
source "${ROOT}/check_environment.sh" || exit 1
source "${ROOT}/unpack_source_archive.sh" "${PRGNAME}" || exit 1
source "${ROOT}/xorg_config.sh" || exit 1
TMP_DIR="${BUILD_DIR}/package-${PRGNAME}-${VERSION}"
mkdir -pv "${TMP_DIR}"
# shellcheck disable=SC2086
PYTHON=python3 \
./configure \
${XORG_CONFIG} || exit 1
# make check
make install DESTDIR="${TMP_DIR}"
source "${ROOT}/stripping.sh" || exit 1
source "${ROOT}/update-info-db.sh" || exit 1
/bin/cp -vpR "${TMP_DIR}"/* /
cat << EOF > "/var/log/packages/${PRGNAME}-${VERSION}"
# Package: ${PRGNAME} (X protocol C-language Binding protocol descriptions)
#
# xcb-proto provides the XML-XCB protocol descriptions that libxcb uses to
# generate the majority of its code and API. We provide them separately from
# libxcb to allow reuse by other projects, such as additional language
# bindings, protocol dissectors, or documentation generators.
#
# Home page: https://xcb.freedesktop.org/
# Download: https://xorg.freedesktop.org/archive/individual/proto/${PRGNAME}-${VERSION}.tar.xz
#
EOF
source "${ROOT}/write_to_var_log_packages.sh" \
"${TMP_DIR}" "${PRGNAME}-${VERSION}"
|
#!/bin/bash
# run-shellcheck
#
# Legacy CIS Debian Hardening
#
#
# 99.5.2.7 Ensure that legacy services rlogin, rlogind and rcp are disabled and not installed
#
set -e # One error, it's over
set -u # One variable unset, it's over
# shellcheck disable=2034
HARDENING_LEVEL=3
# shellcheck disable=2034
DESCRIPTION="Ensure that legacy services rlogin, rlogind and rcp are disabled and not installed"
# shellcheck disable=2034
SERVICES="rlogin rlogind rcp"
# This function will be called if the script status is on enabled / audit mode
audit() {
for SERVICE in $SERVICES; do
info "Checking if $SERVICE is enabled and installed"
is_service_enabled "$SERVICE"
if [ "$FNRET" != 0 ]; then
ok "$SERVICE is disabled"
else
crit "$SERVICE is enabled"
fi
is_pkg_installed "$SERVICE"
if [ "$FNRET" != 0 ]; then
ok "$SERVICE is not installed"
else
warn "$SERVICE is installed"
fi
done
}
# This function will be called if the script status is on enabled mode
apply() {
:
}
# This function will check config parameters required
check_config() {
:
}
# Source Root Dir Parameter
if [ -r /etc/default/cis-hardening ]; then
# shellcheck source=../../debian/default
. /etc/default/cis-hardening
fi
if [ -z "$CIS_ROOT_DIR" ]; then
echo "There is no /etc/default/cis-hardening file nor cis-hardening directory in current environment, cannot source CIS_ROOT_DIR variable, aborting"
exit 128
fi
# Main function, will call the proper functions given the configuration (audit, enabled, disabled)
if [ -r "$CIS_ROOT_DIR"/lib/main.sh ]; then
# shellcheck source=../../lib/main.sh
. "$CIS_ROOT_DIR"/lib/main.sh
else
echo "Cannot find main.sh, have you correctly defined your root directory? Current value is $CIS_ROOT_DIR in /etc/default/cis-hardening"
exit 128
fi
|
<gh_stars>100-1000
var api = require("../../utils/api.js")
var util = require("../../utils/util.js")
var app = getApp()
Page({
data: {
},
onLoad: function (options) {
// 页面初始化 options为页面跳转所带来的参数
this.setData({ "keyWord": "试试" })
this.init();
},
onReady: function () {
// 页面渲染完成
},
onShow: function () {
// 页面显示
},
onHide: function () {
// 页面隐藏
},
onUnload: function () {
// 页面关闭
},
init: function () {
let that = this;
app.getInit(function (result) {
var tmpFile = result.obj.tmpFile;
var minisId = result.obj._Minisns.Id;
var unionid = result.obj._LookUser.unionid;
var verifyModel = util.primaryLoginArgs(unionid);
// 设置全局数据
that.setData({ "user": result.obj._LookUser, "minisns": result.obj._Minisns, "tmpFile": tmpFile })
})
let keyWrod = that.data.keyWord;
that.searchHandler(keyWrod);
},
/**
* 搜索
*/
search: function (e) {
let keyWord = e.detail.value.keyWord
this.searchHandler(keyWord)
},
searchHandler: function (keyWrod) {
let that = this
var tmpFile = that.data.tmpFile;
var minisId = that.data.minisns.Id;
var unionid = that.data.user.unionid;
var verifyModel = util.primaryLoginArgs(unionid);
let data = {
"deviceType": verifyModel.deviceType, "timestamp": verifyModel.timestamp,
"uid": unionid, "versionCode": verifyModel.versionCode, "sign": verifyModel.sign,
"id": minisId, "keyWord": keyWrod, "pageIndex": 1
}
that.setData({ "loading": true })
api.getartlistbykeyword(data, tmpFile,
{
"success": function (result) {
// 评论倒序
let articles = result.objArray
for (let i = 0; i < articles.length; i++) {
if (articles[i].articleComments) {
articles[i].articleComments = articles[i].articleComments.reverse();
}
}
that.setData({ "articles": articles })
},
complete: function () { // 关闭加载
that.setData({ "loading": false })
}
})
},
/**
* 展示大图
*/
showBigImg: function (e) { // 展示大图
var src = e.currentTarget.dataset.src;
wx.previewImage({
current: src, // 当前显示图片的链接,不填则默认为 urls 的第一张
urls: [src],
})
return false;
},
/**
* 跳转到用户信息
*/
navicateToUser: function (e) {
let uid = e.currentTarget.dataset.id;
wx.navigateTo({
url: '/pages/user/user?uid=' + uid,
})
},
/**
* 播放声音
*/
playAudio: function (event) {
let vid = event.currentTarget.dataset.vId;
let vSrc = event.currentTarget.dataset.vSrc;
util.playVoice(vid, vSrc)
},
}) |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_rounded_corner = void 0;
var ic_rounded_corner = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M19,19h2v2h-2V19z M19,17h2v-2h-2V17z M3,13h2v-2H3V13z M3,17h2v-2H3V17z M3,9h2V7H3V9z M3,5h2V3H3V5z M7,5h2V3H7V5z M15,21h2v-2h-2V21z M11,21h2v-2h-2V21z M15,21h2v-2h-2V21z M7,21h2v-2H7V21z M3,21h2v-2H3V21z M21,8c0-2.76-2.24-5-5-5h-5v2h5 c1.65,0,3,1.35,3,3v5h2V8z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M19,19h2v2h-2V19z M19,17h2v-2h-2V17z M3,13h2v-2H3V13z M3,17h2v-2H3V17z M3,9h2V7H3V9z M3,5h2V3H3V5z M7,5h2V3H7V5z M15,21h2v-2h-2V21z M11,21h2v-2h-2V21z M15,21h2v-2h-2V21z M7,21h2v-2H7V21z M3,21h2v-2H3V21z M21,8c0-2.76-2.24-5-5-5h-5v2h5 c1.65,0,3,1.35,3,3v5h2V8z"
},
"children": []
}]
}]
}]
}]
}]
}]
}]
};
exports.ic_rounded_corner = ic_rounded_corner; |
from django.http import HttpResponse
from .models import BlogPost
def edit_post(request, post_id):
post = BlogPost.objects.get(id=post_id)
if request.method == 'POST':
post.title = request.POST.get('title')
post.content = request.POST.get('content')
post.save()
return HttpResponse('Post updated successfully!')
context = {'post': post}
return render(request, 'blog/edit.html', context) |
import React from 'react';
import User from './User';
import CollapsibleSection from '../../components/CollapsibleSection';
const Profile = () => (
<div className="container-full-page mt-settings">
<CollapsibleSection name="settings_user" label="User">
<User />
</CollapsibleSection>
</div>
);
export default Profile;
|
<gh_stars>0
#include <stdio.h>
#include "figure.h"
/**
* Function returns char for figure
* @param body - actual body for figure
*/
void figurePrint(char body[200]) {
for (int s = 0; s < 5; s++) {
if (body[s] == 't') printf("\t");
else if (body[s] == 'n') printf("\n");
else if (body[s] == 'b') printf("%c ", (char)92);
else printf("%c", body[s]);
}
}
/**
* Function returns string figure for hangman
* @param guess - actual guess on hangman
*/
void getFigure(int guess) {
if (guess != -1) printf("\n");
printf("Figure:\n");
/* const char body[][200] = {
{' ', '|', 't', ' ', 'O', ' ', 'n'},
// {'t', ' ', '|', ' ', 'n'},
// {'t', '/', '|', ' ', 'n'}
{' ', '|', 't', '/', '|', 'b', 'n'},
{' ', '|', 't', ' ', '|', ' ', 'n'},
{' ', '|', 't', '/', ' ', 'b', 'n'},
};
*/
char body[10][200] = {
{'t', ' ', 'O', ' ', 'n'}, // 0
{'t', ' ', '|', ' ', 'n'}, // 1
{'t', '/', '|', ' ', 'n'}, // 2
{'t', '/', '|', 'b', 'n'}, // 3
{'t', ' ', '|', ' ', 'n'}, // 4
{'t', '/', ' ', ' ', ' '}, // 5
{'t', '/', ' ', 'b', ' '}, // 6
};
// printf(" ");
for (int i = 0; i < 10; i++) printf("_");
printf("\n");
printf("\t #\n");
switch (guess) {
case 6:
figurePrint(body[0]);
break;
case 5:
figurePrint(body[0]);
figurePrint(body[1]);
break;
case 4:
figurePrint(body[0]);
figurePrint(body[2]);
break;
case 3:
figurePrint(body[0]);
figurePrint(body[3]);
break;
case 2:
figurePrint(body[0]);
figurePrint(body[3]);
figurePrint(body[4]);
break;
case 1:
figurePrint(body[0]);
figurePrint(body[3]);
figurePrint(body[4]);
figurePrint(body[5]);
break;
case 0:
case -1:
figurePrint(body[0]);
figurePrint(body[3]);
figurePrint(body[4]);
figurePrint(body[6]);
break;
default:
break;
}
// printf("----");
}
|
<reponame>OhFinance/oh-app
import { SerializableTransactionReceipt } from "./types";
export interface AddTransaction {
chainId: number;
hash: string;
from: string;
approval?: { tokenAddress: string; spender: string };
summary?: string;
}
export interface ClearAllTransactions {
chainId: number;
}
export interface CheckedTransaction {
chainId: number;
hash: string;
blockNumber: number;
}
export interface FinalizeTransaction {
chainId: number;
hash: string;
receipt: SerializableTransactionReceipt;
}
|
#!/bin/bash
export FLASK_APP=$(pwd)/backend/core/app.py
export FITTRACK_DB_USER=""
export FITTRACK_DB_PASS=""
export FITTRACK_DB_HOST=""
export FITTRACK_DB_NAME=""
#################################################################
## db commands #
# python -m backend.core.db.manager db init #
# python -m backend.core.db.manager db migrate -m "users table" #
# python -m backend.core.db.manager db upgrade #
#################################################################
help() {
echo -e "usage:"
echo -e "\t[-s | --start]\tto start flask app"
echo -e "environments to set:"
echo -e "\tFLASK_APP"
echo -e "\tFITTRACK_DB_USER"
echo -e "\tFITTRACK_DB_PASS"
echo -e "\tFITTRACK_DB_HOST"
echo -e "\tFITTRACK_DB_NAME"
echo -e "db commands"
echo -e "\tpython -m backend.core.db.manager db init"
echo -e "\tpython -m backend.core.db.manager db migrate -m 'message'"
echo -e "\tpython -m backend.core.db.manager db upgrade"
exit 0
}
case "$1" in
-s | --start)
python -m flask run --debugger
;;
*)
help
esac
|
<reponame>TehStoneMan/CashCraft<gh_stars>0
package io.github.tehstoneman.cashcraft.command;
public class CommandPay// implements ICommand
{
/*
* @Override
* public String getName()
* {
* // TODO Auto-generated method stub
* return "pay";
* }
*/
/*
* @Override
* public String getUsage( ICommandSender p_71518_1_ )
* {
* // TODO Auto-generated method stub
* return "pay";
* }
*/
/*
* @Override
* public List<String> getAliases()
* {
* // TODO Auto-generated method stub
* return null;
* }
*/
/*
* @Override
* public void execute( MinecraftServer server, ICommandSender sender, String[] args ) throws CommandException
* {
* final EntityPlayer player = (EntityPlayer)sender;
* final int amount = CommandBase.parseInt( args[0] );
* CashCraftAPI.economy.getWallet( player ).deposit( amount );
* }
*/
/*
* @Override
* public boolean checkPermission( MinecraftServer server, ICommandSender sender )
* {
* // TODO Auto-generated method stub
* return false;
* }
*/
/*
* @Override
* public List< String > getTabCompletions( MinecraftServer server, ICommandSender sender, String[] args, BlockPos pos )
* {
* // TODO Auto-generated method stub
* return null;
* }
*/
/*
* @Override
* public boolean isUsernameIndex( String[] p_82358_1_, int p_82358_2_ )
* {
* // TODO Auto-generated method stub
* return false;
* }
*/
/*
* @Override
* public int compareTo( ICommand o )
* {
* // TODO Auto-generated method stub
* return 0;
* }
*/
}
|
<gh_stars>0
module.exports = {
'twitterAuth': {
'consumerKey': process.env.TWITTER_KEY,
'consumerSecret': process.env.TWITTER_SECRET,
'callbackURL': process.env.CALLBACK_URL
}
};
|
from Cython.Compiler.Visitor import CythonTransform
from Cython.Compiler.StringEncoding import EncodedString
from Cython.Compiler import Options
from Cython.Compiler import PyrexTypes, ExprNodes
class EmbedSignature(CythonTransform):
def __init__(self, context):
super(EmbedSignature, self).__init__(context)
self.denv = None # XXX
self.class_name = None
self.class_node = None
unop_precedence = 11
binop_precedence = {
'or': 1,
'and': 2,
'not': 3,
'in': 4, 'not in': 4, 'is': 4, 'is not': 4, '<': 4, '<=': 4, '>': 4, '>=': 4, '!=': 4, '==': 4,
'|': 5,
'^': 6,
'&': 7,
'<<': 8, '>>': 8,
'+': 9, '-': 9,
'*': 10, '/': 10, '//': 10, '%': 10,
# unary: '+': 11, '-': 11, '~': 11
'**': 12}
def _fmt_expr_node(self, node, precedence=0):
if isinstance(node, ExprNodes.BinopNode) and not node.inplace:
new_prec = self.binop_precedence.get(node.operator, 0)
result = '%s %s %s' % (self._fmt_expr_node(node.operand1, new_prec),
node.operator,
self._fmt_expr_node(node.operand2, new_prec))
if precedence > new_prec:
result = '(%s)' % result
elif isinstance(node, ExprNodes.UnopNode):
result = '%s%s' % (node.operator,
self._fmt_expr_node(node.operand, self.unop_precedence))
if precedence > self.unop_precedence:
result = '(%s)' % result
elif isinstance(node, ExprNodes.AttributeNode):
result = '%s.%s' % (self._fmt_expr_node(node.obj), node.attribute)
else:
result = node.name
return result
def _fmt_arg_defv(self, arg):
default_val = arg.default
if not default_val:
return None
try:
denv = self.denv # XXX
ctval = default_val.compile_time_value(self.denv)
repr_val = repr(ctval)
if isinstance(default_val, ExprNodes.UnicodeNode):
if repr_val[:1] != 'u':
return u'u%s' % repr_val
elif isinstance(default_val, ExprNodes.BytesNode):
if repr_val[:1] != 'b':
return u'b%s' % repr_val
elif isinstance(default_val, ExprNodes.StringNode):
if repr_val[:1] in 'ub':
return repr_val[1:]
return repr_val
except Exception:
try:
return self._fmt_expr_node(default_val)
except AttributeError, e:
return '<???>'
def _fmt_arg(self, arg):
if arg.type is PyrexTypes.py_object_type or arg.is_self_arg:
doc = arg.name
else:
doc = arg.type.declaration_code(arg.name, for_display=1)
if arg.default:
arg_defv = self._fmt_arg_defv(arg)
if arg_defv:
doc = doc + ('=%s' % arg_defv)
return doc
def _fmt_arglist(self, args,
npargs=0, pargs=None,
nkargs=0, kargs=None,
hide_self=False):
arglist = []
for arg in args:
if not hide_self or not arg.entry.is_self_arg:
arg_doc = self._fmt_arg(arg)
arglist.append(arg_doc)
if pargs:
arglist.insert(npargs, '*%s' % pargs.name)
elif nkargs:
arglist.insert(npargs, '*')
if kargs:
arglist.append('**%s' % kargs.name)
return arglist
def _fmt_ret_type(self, ret):
if ret is PyrexTypes.py_object_type:
return None
else:
return ret.declaration_code("", for_display=1)
def _fmt_signature(self, cls_name, func_name, args,
npargs=0, pargs=None,
nkargs=0, kargs=None,
return_type=None, hide_self=False):
arglist = self._fmt_arglist(args,
npargs, pargs,
nkargs, kargs,
hide_self=hide_self)
arglist_doc = ', '.join(arglist)
func_doc = '%s(%s)' % (func_name, arglist_doc)
if cls_name:
func_doc = '%s.%s' % (cls_name, func_doc)
if return_type:
ret_doc = self._fmt_ret_type(return_type)
if ret_doc:
func_doc = '%s -> %s' % (func_doc, ret_doc)
return func_doc
def _embed_signature(self, signature, node_doc):
if node_doc:
return "%s\n%s" % (signature, node_doc)
else:
return signature
def __call__(self, node):
if not Options.docstrings:
return node
else:
return super(EmbedSignature, self).__call__(node)
def visit_ClassDefNode(self, node):
oldname = self.class_name
oldclass = self.class_node
self.class_node = node
try:
# PyClassDefNode
self.class_name = node.name
except AttributeError:
# CClassDefNode
self.class_name = node.class_name
self.visitchildren(node)
self.class_name = oldname
self.class_node = oldclass
return node
def visit_DefNode(self, node):
if not self.current_directives['embedsignature']:
return node
is_constructor = False
hide_self = False
if node.entry.is_special:
is_constructor = self.class_node and node.name == '__init__'
if not is_constructor:
return node
class_name, func_name = None, self.class_name
hide_self = True
else:
class_name, func_name = self.class_name, node.name
nkargs = getattr(node, 'num_kwonly_args', 0)
npargs = len(node.args) - nkargs
signature = self._fmt_signature(
class_name, func_name, node.args,
npargs, node.star_arg,
nkargs, node.starstar_arg,
return_type=None, hide_self=hide_self)
if signature:
if is_constructor:
doc_holder = self.class_node.entry.type.scope
else:
doc_holder = node.entry
if doc_holder.doc is not None:
old_doc = doc_holder.doc
elif not is_constructor and getattr(node, 'py_func', None) is not None:
old_doc = node.py_func.entry.doc
else:
old_doc = None
new_doc = self._embed_signature(signature, old_doc)
doc_holder.doc = EncodedString(new_doc)
if not is_constructor and getattr(node, 'py_func', None) is not None:
node.py_func.entry.doc = EncodedString(new_doc)
return node
def visit_CFuncDefNode(self, node):
if not self.current_directives['embedsignature']:
return node
if not node.overridable: # not cpdef FOO(...):
return node
signature = self._fmt_signature(
self.class_name, node.declarator.base.name,
node.declarator.args,
return_type=node.return_type)
if signature:
if node.entry.doc is not None:
old_doc = node.entry.doc
elif getattr(node, 'py_func', None) is not None:
old_doc = node.py_func.entry.doc
else:
old_doc = None
new_doc = self._embed_signature(signature, old_doc)
node.entry.doc = EncodedString(new_doc)
if hasattr(node, 'py_func') and node.py_func is not None:
node.py_func.entry.doc = EncodedString(new_doc)
return node
def visit_PropertyNode(self, node):
if not self.current_directives['embedsignature']:
return node
entry = node.entry
if entry.visibility == 'public':
# property synthesised from a cdef public attribute
type_name = entry.type.declaration_code("", for_display=1)
if not entry.type.is_pyobject:
type_name = "'%s'" % type_name
elif entry.type.is_extension_type:
type_name = entry.type.module_name + '.' + type_name
signature = '%s: %s' % (entry.name, type_name)
new_doc = self._embed_signature(signature, entry.doc)
entry.doc = EncodedString(new_doc)
return node
|
#!/bin/bash
set -e
export NODE_OPTIONS="--max-old-space-size=3000"
if [ -z "$VIRTUAL_ENV" ]; then
echo "This requires the ceres python virtual environment."
echo "Execute '. ./activate' before running."
exit 1
fi
if [ "$(id -u)" = 0 ]; then
echo "The Ceres Blockchain GUI can not be installed or run by the root user."
exit 1
fi
# Allows overriding the branch or commit to build in ceres-blockchain-gui
SUBMODULE_BRANCH=$1
UBUNTU=false
# Manage npm and other install requirements on an OS specific basis
if [ "$(uname)" = "Linux" ]; then
#LINUX=1
if type apt-get; then
# Debian/Ubuntu
UBUNTU=true
sudo apt-get install -y npm nodejs libxss1
elif type yum && [ ! -f "/etc/redhat-release" ] && [ ! -f "/etc/centos-release" ] && [ ! -f /etc/rocky-release ] && [ ! -f /etc/fedora-release ]; then
# AMZN 2
echo "Installing on Amazon Linux 2."
curl -sL https://rpm.nodesource.com/setup_12.x | sudo bash -
sudo yum install -y nodejs
elif type yum && [ ! -f /etc/rocky-release ] && [ ! -f /etc/fedora-release ] && [ -f /etc/redhat-release ] || [ -f /etc/centos-release ]; then
# CentOS or Redhat
echo "Installing on CentOS/Redhat."
curl -sL https://rpm.nodesource.com/setup_12.x | sudo bash -
sudo yum install -y nodejs
elif type yum && [ -f /etc/rocky-release ] || [ -f /etc/fedora-release ]; then
# RockyLinux
echo "Installing on RockyLinux/Fedora"
sudo dnf module enable nodejs:12
sudo dnf install -y nodejs
fi
elif [ "$(uname)" = "Darwin" ] && type brew && ! npm version >/dev/null 2>&1; then
# Install npm if not installed
brew install npm
elif [ "$(uname)" = "OpenBSD" ]; then
pkg_add node
elif [ "$(uname)" = "FreeBSD" ]; then
pkg install node
fi
# Ubuntu before 20.04LTS has an ancient node.js
echo ""
UBUNTU_PRE_2004=false
if $UBUNTU; then
UBUNTU_PRE_2004=$(python -c 'import subprocess; process = subprocess.run(["lsb_release", "-rs"], stdout=subprocess.PIPE); print(float(process.stdout) < float(20.04))')
fi
if [ "$UBUNTU_PRE_2004" = "True" ]; then
echo "Installing on Ubuntu older than 20.04 LTS: Ugrading node.js to stable."
UBUNTU_PRE_2004=true # Unfortunately Python returns True when shell expects true
sudo npm install -g n
sudo n stable
export PATH="$PATH"
fi
if [ "$UBUNTU" = "true" ] && [ "$UBUNTU_PRE_2004" = "False" ]; then
echo "Installing on Ubuntu 20.04 LTS or newer: Using installed node.js version."
fi
# For Mac and Windows, we will set up node.js on GitHub Actions and Azure
# Pipelines directly, so skip unless you are completing a source/developer install.
# Ubuntu special cases above.
if [ ! "$CI" ]; then
echo "Running git submodule update --init --recursive."
echo ""
git submodule update --init --recursive
echo "Running git submodule update."
echo ""
git submodule update
cd ceres-blockchain-gui
if [ "$SUBMODULE_BRANCH" ];
then
git fetch
git checkout "$SUBMODULE_BRANCH"
git pull
echo ""
echo "Building the GUI with branch $SUBMODULE_BRANCH"
echo ""
fi
npm install
npm audit fix || true
npm run build
python ../installhelper.py
else
echo "Skipping node.js in install.sh on MacOS ci."
fi
echo ""
echo "Ceres blockchain install-gui.sh completed."
echo ""
echo "Type 'cd ceres-blockchain-gui' and then 'npm run electron &' to start the GUI."
|
#!/bin/bash
echo "command execute example!" |
/*
* Copyright The Stargate Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.stargate.graphql.schema.graphqlfirst.migration;
import io.stargate.auth.AuthenticationSubject;
import io.stargate.auth.AuthorizationService;
import io.stargate.auth.Scope;
import io.stargate.auth.SourceAPI;
import io.stargate.auth.UnauthorizedException;
import io.stargate.auth.entity.ResourceKind;
import io.stargate.db.datastore.DataStore;
import io.stargate.db.query.builder.AbstractBound;
import io.stargate.db.schema.Column;
import io.stargate.db.schema.Table;
public class AddTableColumnQuery extends MigrationQuery {
private final Table table;
private final Column column;
public AddTableColumnQuery(Table table, Column column) {
this.table = table;
this.column = column;
}
@Override
public AbstractBound<?> build(DataStore dataStore) {
return dataStore
.queryBuilder()
.alter()
.table(table.keyspace(), table.name())
.addColumn(column)
.build()
.bind();
}
@Override
public String getDescription() {
return String.format("Add column %s to table %s", column.name(), table.name());
}
@Override
public void authorize(AuthorizationService authorizationService, AuthenticationSubject subject)
throws UnauthorizedException {
authorizationService.authorizeSchemaWrite(
subject,
table.keyspace(),
table.name(),
Scope.ALTER,
SourceAPI.GRAPHQL,
ResourceKind.TABLE);
}
@Override
public boolean mustRunBefore(MigrationQuery that) {
// We want all column additions as early as possible. This is because these queries can fail
// unexpectedly if the column previously existed with a different type, and we have no way to
// check that beforehand. If this happens, we want to execute as few queries as possible before
// we find out.
// Avoid an infinite loop
if (that instanceof AddTableColumnQuery) {
return false;
}
// Otherwise, unless there is already an ordering, move this one first
return !that.mustRunBefore(this);
}
@Override
public boolean addsReferenceTo(String udtName) {
return references(column.type(), udtName);
}
@Override
public boolean dropsReferenceTo(String udtName) {
return false;
}
}
|
package information
import (
"github.com/domonda/go-sqldb"
)
func NewDatabase(conn sqldb.Connection) Database {
return Database{conn}
}
type Database struct {
sqldb.Connection
}
func (db Database) GetTable(name string) (table *Table, err error) {
err = db.QueryRow("select * from information_schema.tables where table_name = $1", name).ScanStruct(&table)
if err != nil {
return nil, err
}
return table, nil
}
func (db Database) GetTables() (tables []*Table, err error) {
err = db.QueryRows("select * from information_schema.tables").ScanStructSlice(&tables)
if err != nil {
return nil, err
}
return tables, nil
}
|
package io.cattle.platform.api.service;
import io.cattle.platform.core.addon.InServiceUpgradeStrategy;
import io.cattle.platform.core.addon.ServiceUpgrade;
import io.cattle.platform.core.constants.ServiceConstants;
import io.cattle.platform.core.model.Service;
import io.cattle.platform.core.util.ServiceUtil;
import io.cattle.platform.json.JsonMapper;
import io.cattle.platform.object.ObjectManager;
import io.cattle.platform.revision.RevisionDiffomatic;
import io.cattle.platform.revision.RevisionManager;
import io.github.ibuildthecloud.gdapi.request.ApiRequest;
import io.github.ibuildthecloud.gdapi.request.resource.AbstractValidationFilter;
import io.github.ibuildthecloud.gdapi.request.resource.ActionHandler;
import io.github.ibuildthecloud.gdapi.validation.ValidationErrorCodes;
import java.util.Map;
public class ServiceUpgradeValidationFilter extends AbstractValidationFilter {
ObjectManager objectManager;
JsonMapper jsonMapper;
RevisionManager revisionManager;
public ServiceUpgradeValidationFilter(ObjectManager objectManager, JsonMapper jsonMapper, RevisionManager revisionManager) {
super();
this.objectManager = objectManager;
this.jsonMapper = jsonMapper;
this.revisionManager = revisionManager;
}
@Override
public Object perform(Object obj, ApiRequest request, ActionHandler next) {
if (request.getAction().equals(ServiceConstants.ACTION_SERVICE_UPGRADE)) {
return processInServiceUpgradeStrategy(obj, request, next);
}
return super.perform(obj, request, next);
}
protected Object processInServiceUpgradeStrategy(Object obj, ApiRequest request, ActionHandler next) {
Service service = objectManager.loadResource(Service.class, request.getId());
ServiceUpgrade upgrade = jsonMapper.convertValue(request.getRequestObject(), ServiceUpgrade.class);
InServiceUpgradeStrategy strategy = finalizeUpgradeStrategy(service, upgrade.getInServiceStrategy());
Map<String, Object> data = jsonMapper.writeValueAsMap(strategy);
revisionManager.setFieldsForUpgrade(data);
RevisionDiffomatic diff = revisionManager.createNewRevision(request.getSchemaFactory(), service, data);
objectManager.setFields(service, diff.getNewRevisionData());
request.setRequestObject(diff.getNewRevisionData());
service = revisionManager.assignRevision(diff, service);
return super.perform(obj, request, next);
}
@SuppressWarnings("unchecked")
protected InServiceUpgradeStrategy finalizeUpgradeStrategy(Service service, InServiceUpgradeStrategy strategy) {
if (strategy.getLaunchConfig() == null && strategy.getSecondaryLaunchConfigs() == null) {
ValidationErrorCodes.throwValidationError(ValidationErrorCodes.INVALID_OPTION,
"LaunchConfig/secondaryLaunchConfigs need to be specified for inService strategy");
}
if (service.getKind().equalsIgnoreCase(ServiceConstants.KIND_LOAD_BALANCER_SERVICE)) {
if (strategy.getLaunchConfig() == null) {
ValidationErrorCodes.throwValidationError(ValidationErrorCodes.INVALID_OPTION,
"LaunchConfig is required for load balancer service");
}
ServiceUtil.injectBalancerLabelsAndHealthcheck((Map<Object, Object>) strategy.getLaunchConfig());
}
return strategy;
}
}
|
<reponame>shrey-c/Happy-Tweeting-World
consumer_key = 'qhwg88DbtCpCG2hQumqSKj3qp'
consumer_secret = '<KEY>'
access_token = '<KEY>'
access_token_secret = '<KEY>' |
#!/bin/bash
set -e
########################################
time nix-shell --run 'cabal new-test unit'
# "$@"
########################################
|
<filename>src/index.js
module.exports = function solveSudoku(matrix) {
backtrack(matrix);
return matrix;
};
function backtrack(matrix) {
var row, col;
var zeroPos = checkZero(matrix);
if (!zeroPos) {
return true;
}
row = zeroPos.row;
col = zeroPos.column;
for (var number = 1; number <= 9; number++) {
if (checkNumber(matrix, row, col, number)) {
matrix[row][col] = number;
if (backtrack(matrix)) {
return true;
} else {
matrix[row][col] = 0;
}
}
}
return false;
}
function checkZero(matrix) {
for (var row = 0; row < 9; row++) {
for (var col = 0; col < 9; col++) {
if (matrix[row][col] === 0) {
return {
row: row,
column: col
};
}
}
}
return false;
}
function checkNumber(matrix, row, col, number) {
for (var j = 0; j < 9; j++) {
if (matrix[row][j] === number) {
return false;
}
}
for (var i = 0; i < 9; i++) {
if (matrix[i][col] === number) {
return false;
}
}
row = Math.floor(row / 3) * 3;
col = Math.floor(col / 3) * 3;
for (var i = 0; i < 3; i++) {
for (var j = 0; j < 3; j++) {
if (matrix[row + i][col + j] === number) {
return false;
}
}
}
return true;
} |
<reponame>osak/mikutterd
# -*- coding: utf-8 -*-
require "#{File.dirname(__FILE__)}/extension"
require 'test/unit'
require 'mocha/setup'
require 'webmock/test_unit'
require 'pp'
require 'utils'
miquire :lib, 'delayer', 'test_unit_extensions', 'mikutwitter'
class Plugin
def self.call(*args); end end
class TC_mikutwitter_api_call_support < Test::Unit::TestCase
def setup
wait_all_tasks
@m = MikuTwitter.new
end
def wait_all_tasks
while !Delayer.empty? or !(Thread.list - [Thread.current]).empty?
Delayer.run
(Thread.list - [Thread.current]).each &:join
end
assert_equal true, Delayer.empty?
assert_equal [], (Thread.list - [Thread.current])
end
must "get home timeline" do
stub_request(:get, "http://api.twitter.com/1.1/statuses/show.json?id=154380989328662530").
to_return(:status => 200, :body => file_get_contents(File.join(MIKUTWITTER_TEST_DIR, '154380989328662530.json')), :headers => {}) # !> previous definition of messages was herey
result = exception = nil
(@m/:statuses/:show).json(id: 154380989328662530).next{ |json| # !> assigned but unused variable - remain
result = json
}
wait_all_tasks
assert_kind_of(Hash, result)
assert_equal(154380989328662530, result[:id])
assert_equal(15926668, result[:user][:id])
end
end
|
package net.bambooslips.demo.jpa.service.Impl;
import net.bambooslips.demo.exception.CoreTeamNotFoundException;
import net.bambooslips.demo.exception.DebtFinancingNotFoundException;
import net.bambooslips.demo.exception.PostNotFoundException;
import net.bambooslips.demo.jpa.model.CoreTeam;
import net.bambooslips.demo.jpa.model.DebtFinancing;
import net.bambooslips.demo.jpa.model.DemandFinancial;
import net.bambooslips.demo.jpa.repository.CoreTeamRepository;
import net.bambooslips.demo.jpa.repository.DemandFinancialRepository;
import net.bambooslips.demo.jpa.service.CoreTeamService;
import net.bambooslips.demo.jpa.service.DemandFinancialService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
/**
* Created by Administrator on 2017/4/21.
*/
@Service
public class DemandFinancialServiceImpl implements DemandFinancialService{
private static final Logger LOG = LoggerFactory.getLogger(DemandFinancialServiceImpl.class);
@Resource
private DemandFinancialRepository demandFinancialRepository;
@Transactional
@Override
public Long create(DemandFinancial demandFinancial) {
LOG.debug("Creating a new demandFinancial with information: " + demandFinancial);
DemandFinancial result = demandFinancialRepository.save(demandFinancial);
if(result != null){
return demandFinancial.getDfId();
}
return null;
}
/**
* 更新
* @param updated
* @return
* @throws DebtFinancingNotFoundException
*/
@Transactional(rollbackFor = DebtFinancingNotFoundException.class)
@Override
public DemandFinancial update(DemandFinancial updated) throws DebtFinancingNotFoundException {
LOG.debug("Updating DemandFinancial with information: " + updated);
DemandFinancial demandFinancial = demandFinancialRepository.findOne(updated.getDfId());
if (demandFinancial == null) {
LOG.debug("No post found with id: " + updated.getDfId());
throw new PostNotFoundException("Post "+updated.getDfId()+" not found.");
}
demandFinancial.update(updated);
return demandFinancial;
}
/**
* 根据核心团队ID删除核心成员信息
* @param id
* @return
* @throws DebtFinancingNotFoundException
*/
@Transactional(rollbackFor = DebtFinancingNotFoundException.class)
@Override
public DemandFinancial delete(Long id) throws DebtFinancingNotFoundException {
LOG.debug("Deleting CoreTeam with id: " + id);
DemandFinancial deleted = demandFinancialRepository.findOne(id);
if (deleted == null) {
LOG.debug("No CompetitionAccount found with id: " + id);
throw new PostNotFoundException("No CompetitionAccount found with id: " + id);
}
demandFinancialRepository.delete(deleted);
return deleted;
}
@Transactional(readOnly = true)
@Override
public Long findByEntireId(Long entireId) {
LOG.debug("Finding demandFinancial by id: " + entireId);
DemandFinancial demandFinancial = demandFinancialRepository.findByEntireId(entireId);
if (demandFinancial != null){
Long ueId = demandFinancial.getDfId();
return ueId;
} else {
return null;
}
}
@Transactional(readOnly = true)
@Override
public DemandFinancial findListByEntireId(Long entireId) {
LOG.debug("Finding demandFinancial by id: " + entireId);
DemandFinancial demandFinancial = demandFinancialRepository.findByEntireId(entireId);
return demandFinancial;
}
}
|
<reponame>amochin/robotframework-eggplant
from datetime import datetime
import inspect
import xmlrpc.client
import os
import robot.api.logger as log
from robot.libraries.BuiltIn import BuiltIn
draw_rects_on_screenshots = True
try:
from PIL import Image, ImageDraw
except ModuleNotFoundError as e:
log.warn(f"Pillow not found, drawing rectangles on screenshots is disabled: {e}."
" Install using: 'pip install Pillow'.")
draw_rects_on_screenshots = False
from . import utils
class EggplantExecutionException(Exception):
"""
Special Exception we use in case of errors which occur inside the "RunWithNewResults" execution
"""
class EggplantLibDynamicCore:
def __init__(self, suite='', host='', port='', scripts_dir=''):
"""
Each library import is bound to a single *eggPlant test suite* and to an *eggPlant instance running in the eggDrive mode*.
No actual XML RPC connection is established during the import, so there is no must to start eggPlant in advance.
The library needs a file access to the ``.suite`` folder in order to build the list of keywords (i.e. eggPlant ``.script`` files)
and get their arguments and documentation.
== Import examples ==
| Library | EggplantLibrary | suite=E:/eggPlantScripts/SuiteOne.suite | host=http://127.0.0.1 | port=5400 |
| Library | EggplantLibrary | # Import without parameters needs `EggplantLib.config` in the library package dir |
== Import Parameters ==
*Each of import parameter is optional and may stay unset during library import.*
In this case library looks for it's value in the *config file* (`EggplantLib.config`) in the library package dir.
If no value found in the config file (or no file exists), the default value is used.
=== suite ===
Path to the eggPlant `.suite` file.
- The default value is a first `.suite` file in the library folder.
- You can also select another eggPlant suite for actual execution using `Open Session` and `Close Session` keywords.
- If eggPlant runs on a remote server,input here a path from the library host, not relative to the server! And it must be reachable.
=== host ===
Host name or IP address of the eggPlant instance running in the eggDrive mode (i.e. XMLRPC server).
- The default value is `http://127.0.0.1`.
- You can also select another host name for actual execution using `Set eggDrive Connection` keyword.
- *Currently tested on localhost only!* It will be a miracle if it works just like this with a remote eggPlant server.
=== port ===
Port of the eggPlant instance running in the eggDrive mode (i.e. XMLRPC server).
- The default value is `5400`.
- You can also select another port for actual execution using `Set eggDrive Connection` keyword.
=== scripts_dir ===
Folder inside the eggPlant Suite, where all scripts are located.
- The default value is `Scripts`.
- Subfolders are supported.
"""
# Get all params from the library import string first.
# If some of the empty, try to look in a config file.
# If nothing found, use default values
params = {'host': 'http://127.0.0.1', 'port': '5400', 'scripts_dir': 'Scripts', 'suite': suite} # defaults
for p_key in params:
if locals()[p_key] == '': # if parameter value passed to the lib constructor is empty..
value_from_config = self.read_from_config(p_key)
if value_from_config != '':
params[p_key] = value_from_config
else:
params[p_key] = locals()[p_key] # otherwise set the passed argument value
uri = params['host'] + ":" + params['port']
self.eggplant_server = xmlrpc.client.ServerProxy(uri)
# Now check if the eggPlant suite path is set
self.eggplant_suite = params['suite']
this_dir = os.path.abspath(os.path.dirname(__file__))
# if suite path still not set, use first ".suite" dir in the library folder
if self.eggplant_suite == '':
for name in os.listdir(this_dir):
if name.endswith(".suite"):
self.eggplant_suite = os.path.abspath(os.path.join(this_dir, name))
break
# otherwise suite path is set, but make sure it's absolute
else:
if not os.path.isabs(self.eggplant_suite):
self.eggplant_suite = os.path.abspath(os.path.join(this_dir, self.eggplant_suite))
# TODO: if the eggPlant runs on a remote server, the test suite dir will be remote as well! How access it?
# the default directory with keywords (=eggPlant scripts) is 'Scripts' inside the eggPlant test suite
self.keywords_dir = os.path.join(self.eggplant_suite, params['scripts_dir'])
# For video recording
self.current_movie_path = None
self.eggplant_version_checked = False
# ---------- RobotFramework API implementation ------------
def get_keyword_names(self):
"""
Returns all available eggPlant keywords.
The static keywords (methods of this library class, decorated with @keyword) are collected via reflection.
The list of eggPlant keywords is build from '.script' files, collected in the 'Scripts' folder
in the eggPlant Suite and all subfolders.
:return List of all collected keywords - including static keywords and eggPlant scripts
"""
keywords = []
# get static keywords first - from this library class only
for name in dir(self):
if self.get_static_keyword(name):
keywords.append(name)
# now fetch eggPlant scripts and add them as keywords - from all subfolders
self.get_scripts_from_folder(self.keywords_dir, keywords)
log.debug("Found keywords: {}".format(keywords))
return keywords
def run_keyword(self, name, args):
"""
Runs the requested keyword with the specified arguments.
For static keyword just the Python function is called using reflection.
For eggPlant scripts the eggDrive command is built using 'RunWithNewResults' and sent to the eggPlant server,
like 'RunWithNewResults "scriptName" arg1, "arg2", arg3,'.
If a subscrtipt from a subfolder is called (e.g. 'subfolder.script'), the dots (.) are replaced with slashes (/)
String arguments with spaces inside will be surrounded with quotes (") automatically.
eggPlant list syntax is supported - like 'script (1, "val2", "3", val4)'
:return The 'ReturnValue' from the 'Result' value of the XML RPC response for eggPlant scripts
or the keyword return value in case of static keywords.
"""
# consider the requested keyword as static first
_keyword = self.get_static_keyword(name)
if _keyword:
return _keyword(*args)
else: # otherwise it's an eggPlant script
command = name
if "." in command: # if it's a script in a subfolder
command = command.replace(".", "/")
try:
result = self.run_with_new_results(command, *args)
return result
# Failure in parsed result string
except EggplantExecutionException as e:
search_rect = self.log_ocr_debug_info(str(e))
screenshot = self.take_screenshot(highlight_rectangle=search_rect, error_if_no_sut=False)
if self.current_movie_path:
self.log_embedded_video(self.current_movie_path, screenshot)
elif screenshot:
self.log_embedded_image(screenshot)
raise Exception(f"{name}: {e}")
# common eggDrive error
except xmlrpc.client.Fault as e:
log.error("{}: XMLRPC execution failure! Fault code:{}. Fault string: {}".format(name, e.faultCode,
e.faultString))
screenshot = self.take_screenshot(error_if_no_sut=False)
if self.current_movie_path:
self.log_embedded_video(self.current_movie_path, screenshot)
else:
self.log_embedded_image(screenshot)
raise e
except Exception as e:
log.error("Unknown error occurred! {}".format(e))
# assuming we don't need a screenshot if it's not an egPlant exception
# self.screenshot()
raise e
# def get_keyword_tags(self, name):
# we'd need this function if keyword tags would be fetched otherwise as via last docs line.
# See http://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#getting-keyword-tags
def get_keyword_documentation(self, name):
"""
Fetches the keyword documentation.
For static keywords it takes a standard Python doc (comment below method declaration).
For eggPlant scripts the top comment block of a script file is returned (without comment start/end characters).
:param name: keyword name
"""
result = None
static_keyword = self.get_static_keyword(name)
if static_keyword:
result = inspect.getdoc(static_keyword)
else:
if name in ['__init__', '__intro__']: # standard RF library specification, needed e.g. in RED
method = getattr(self, name, False)
if method:
result = inspect.getdoc(method)
else:
result = self.get_top_comments(name)
return result
def get_keyword_arguments(self, name):
"""
Returns function signature - which arguments it can take.
Allows RobotFramework to check the arguments accuracy in calling keywords even before trying to run them.
Also quite useful for code completion in IDE plugins.
:param name: keyword name to fetch arguments for
:return: list of arguments
"""
result_list = []
# consider the requested keyword as static first
static_keyword = self.get_static_keyword(name)
if static_keyword:
fullargs = inspect.getfullargspec(static_keyword)
args = fullargs[0]
varargs = fullargs[1]
kwargs = fullargs[2]
defaults = fullargs[3]
kwonlyargs = fullargs[4]
kwonlydefaults = fullargs[5]
# add to usual positional args possible defaults
if defaults is not None:
i = len(args) - 1
for default in reversed(defaults):
args[i] += "=" + str(default)
i -= 1
# remove "self" from the args list manually
if args[0] == "self":
del (args[0])
result_list = args
# named args if available
if varargs is not None:
result_list.append("*" + varargs)
# add defaults to named only arguments
if kwonlydefaults is not None:
i = len(kwonlyargs) - 1
for default in reversed(kwonlydefaults):
kwonlyargs[i] += "=" + str(default)
i -= 1
result_list.append(kwonlyargs)
# free argument assignment if available
if kwargs is not None:
result_list.append("**" + kwargs)
else: # otherwise it's an eggPlant script
log.debug("Reading arguments from eggPlant script file: {}".format(name))
with open(self.get_script_file_path(name), encoding="utf8") as f:
# look for a line with params, it must be at the file top, but might appear after comments
params_str_start = "params "
# we don't want to scan all scripts to the very bottom, if there are no params at all!
# params can be preceded only by comments and empty lines
# so we skip all top empty lines and expect params at the first line after the comments - otherwise exit
comment_lines_length = len(self.get_top_comments(name).splitlines())
line_counter = 0
for line in f:
log.debug("Line: {}".format(line))
stripped_line = utils.remove_unreadable_characters_at_start(line)
if stripped_line == "":
continue # skip top empty lines
line_counter += 1
if str.lower(stripped_line).startswith(params_str_start): # look for "params " case insensitive
# found
args = []
split = stripped_line[len(params_str_start):].split(',')
for item in split:
arg_string = str(item).strip()
argument_tuple = (arg_string,)
default_value_separator = ":"
if default_value_separator in arg_string: # default value available
arg_name = arg_string.split(default_value_separator)[0]
arg_default_value = arg_string.split(default_value_separator)[1]
# try to convert the default value to one of supported data types
arg_default_value = utils.convert_to_num_bool_or_string(arg_default_value)
# so it's a string - remove possible double quotes
if isinstance(arg_default_value, str):
arg_default_value = arg_default_value.replace('"', '')
argument_tuple = (arg_name, arg_default_value)
args.append(argument_tuple)
result_list = args
break
if line_counter > comment_lines_length + 1:
# +1 because of possible standalone comment closing bracket in the last line
break
return result_list
def get_keyword_source(self, name):
result = None
static_keyword = self.get_static_keyword(name)
if static_keyword:
result_path = os.path.abspath(inspect.getsourcefile(static_keyword))
result_line = inspect.getsourcelines(static_keyword)[1]
result = f"{result_path}:{result_line}"
else:
if name in ['__init__', '__intro__']: # standard RF library specification, needed e.g. in RED
method = getattr(self, name, False)
if method:
result = os.path.abspath(inspect.getsourcefile(method)) + ":1"
else:
result = self.get_script_file_path(name) + ":1"
return result
# ---------- Helper methods ---------------------------------
def run_with_new_results(self, script, *args):
"""
Builds an eggPlant command using 'RunWithNewResults' from the script and the arguments and executes it.
:param script: the script or command to be run
:param args: arguments. String arguments with spaces inside will be surrounded with quotes (") automatically.
eggPlant list syntax is supported - like 'script (1, "val2", "3", val4)'
:return: the execution result
"""
command = "RunWithNewResults \"{}\",".format(script)
log.debug("Now add parameters to the command string")
for arg in args:
log.debug("Processing argument: {}".format(arg))
arg_f = arg
if isinstance(arg_f, str): # if the parameter is a string, it might need quotes..
# convert all new line and return characters to eggPlant format
arg_f = arg_f.replace("\n", "\" & return & \"")
arg_f = arg_f.replace("\r", "\" & return & \"")
if not (arg_f.startswith("(") and arg_f.endswith(")")): # lists don't need quotes
if not (arg_f.startswith("\"") and arg_f.endswith("\"")): # no quotes for already quoted string
arg_f = "\"" + format(arg_f) + "\""
if isinstance(arg, list): # eggPlant doesn't understand single quotes (') around list values
arg_f = utils.single_quote_to_double(arg)
log.debug("Formatted argument: {}".format(arg_f))
command = "{} {},".format(command, arg_f)
result = self.execute(command, parse_result=True)
return utils.auto_convert(
result) # The result is always a string so we should try to convert it first
def execute(self, command, parse_result=False, exception_on_failure=True):
"""
Sends the requested command to the eggPlant server via XML RPC.
The XML RPC response is parsed and logged.
:param command: the eggPlant command in eggDrive format. Quotes have to be escaped.
Examples: 'myScript arg1, arg2' or 'click \"someImage\"'.
:param parse_result: if TRUE, the 'Result' value of the XML RPC response is parsed.
If the Result's child 'Status' doesn't report SUCCESS, an Exception is raised -
unless it's disabled in the optional parameter.
:param exception_on_failure: works only if result parsing is enabled. If FALSE, no exception is raised
in case the Result's child 'Status' doesn't report SUCCESS
:return: If parsing the result is enabled, the 'ReturnValue' from the 'Result' value of the XML RPC response
is returned. Otherwise the 'Result' value of the XML RPC response is returned directly,
although it might be a result of a previous script.
"""
log.info("Send command to eggPlant server: '{}'".format(command))
returned_string = self.eggplant_server.execute(command)
# example: {'Duration': 0.004000067711, 'Output': '28.01.19, 16:32:16\tconnect\t\tWindows_10_1:(null)\n',
# 'Result': 'E:/screenshot.png', 'ReturnValue': ''}
log.debug("Returned string: {}".format(returned_string))
log.info("Execution duration: {}".format(returned_string['Duration']))
output = returned_string['Output']
log.info("Command output:")
log.info(output, html=True)
warning_flag = 'LogWarning'
output_lines = output.split('\n')
for line in output_lines:
if warning_flag in line:
warning_text = line.split(warning_flag)[1].strip()
log.warn(warning_text)
result_section = returned_string['Result']
return_value = result_section
log.debug("Execution result: {}".format(result_section))
if parse_result:
log.debug("Parsing the execution result...")
# Parse the execution result, if it's not a usual string - which means the RunWithNewResults command sent
''' Usually looks like this:
{'Duration': 0.578999996185,
'ErrorMessage': 'SomeError', # not available in case of success
'Errors': 0.0,
'Exceptions': 0.0,
'LogFile': 'E:/eggPlantScripts/SuiteOne.suite/Results/getNotepadText/20190125_144557.016/LogFile.txt',
'ReturnValue': 'JHello World',
'RunDate': <DateTime '20190125T14:45:57' at 0x3615510>,
'Status': 'Success',
'Successes': 1.0,
'Warnings': 0.0}
'''
eggdrive_command_duration = returned_string['Duration']
eggplant_script_duration = result_section['Duration']
if eggdrive_command_duration and eggplant_script_duration:
execution_delay = float(eggdrive_command_duration) - float(eggplant_script_duration)
log.debug(f"eggdrive execution delay: {execution_delay:.2f} seconds")
if execution_delay > 30:
log.warn(f"eggdrive execution delay too high (>30 s): {execution_delay:.2f} seconds")
log.info("eggdrive execution delay - difference between eggdrive XML-RPC command duration "
"and eggPlant script duration")
return_value = result_section['ReturnValue']
status = result_section['Status']
if status != "Success" and exception_on_failure:
raise EggplantExecutionException(result_section['ErrorMessage'])
log.info("Return value: {}".format(return_value))
return return_value
def get_static_keyword(self, name):
"""
Returns the method object if a static keyword with the requested name exists in the library and None otherwise
:param name: the method name to look for
:return: the method object if found or False otherwise
"""
# '@keyword' decorator required
# the name must not start with '_'
method = getattr(self, name, False)
if method:
if inspect.ismethod(method) and hasattr(method, 'robot_name'):
return method
return None
def get_scripts_from_folder(self, folder, result_list=None, prefix=""):
"""
The function goes recursively through all subfolders and adds names of ".script" files to the result list.
The subfolder name is added as prefix following by a dot, e.g. "Subfolder.Myscript".
If there are several sufolders in the structure, all of them are added as prefix, separated by a dot, e.g.
"Subfolder.SubSubfolder.Myscript".
:param folder - the root folder to start the recursive search
:param result_list - optional, the list where new items are to append
:param prefix - the subfolder prefix, is used in the recursion only. No need to set from outside!
:return the list of all found ".script" items in all subfolders
"""
if result_list is None:
result_list = []
for item in os.listdir(folder):
current_prefix = prefix
if current_prefix != "":
current_prefix += "."
if item.endswith(".script"):
if not item.startswith('_'): # don't add technical/internal scripts
script_name = current_prefix + str(item.split('.')[0])
result_list.append(script_name)
else:
item_path = os.path.join(folder, item)
if os.path.isdir(item_path):
new_prefix = current_prefix + item
self.get_scripts_from_folder(item_path, result_list, new_prefix)
return result_list
def get_script_file_path(self, name):
"""
Creates a real eggPlant script file path from the RobotFramewok keyword syntax.
It replaces dots ('.') with slashes ('/') and builts a file path from the name.
:param name: script path in RobotFramework format, without '.script' extension. Example: 'subfolder.Script'
:return: real file path, e.g. '<eggPlantSuite>/Scripts/subfolder/Script.script'
"""
name_with_replaced_dots = name.replace(".", "/")
# the "." is replaced because of scripts in subfolders
filepath = os.path.join(self.keywords_dir, name_with_replaced_dots + ".script")
return filepath
def get_top_comments(self, script_name):
"""
Fetches all comments from the script file top.
EggPlant single line and multi line comments are supported, they can also be combined.
The comment block is considered close when a first non comment line is found.
:param script_name: name of the eggPlant script in eggPlant format. Without '.script' extension.
Not file path - it will be built from the script name automatically.
Examples: 'myScript1', 'folder/subfolder/script'
"""
line_comment_start_chars = ['//', '#', '--']
multiline_comment_start = '(*'
multiline_comment_end = '*)'
log.debug("Reading top comments from eggPlant script file: {}".format(script_name))
result = ""
with open(self.get_script_file_path(script_name), encoding="utf8") as f:
inside_multiline_comment = False
for line in f:
log.debug("Line: {}".format(line))
stripped_line = utils.remove_unreadable_characters_at_start(line)
if stripped_line == "": # empty lines at file start are allowed
continue
if stripped_line.startswith(multiline_comment_end): # in case "*)" stays alone in a last line
result += "\n"
inside_multiline_comment = False
continue # maybe there are furthermore comments?
if inside_multiline_comment:
# maybe it's the last comment line with '*)' in the end?
if stripped_line.endswith(multiline_comment_end):
inside_multiline_comment = False
stripped_line = stripped_line.removesuffix(multiline_comment_end)
result += stripped_line + "\n"
continue # maybe there are furthermore comments?
# now check for single line comments
single_comment_found = False
for comment_starter in line_comment_start_chars:
if stripped_line.startswith(comment_starter):
single_comment_found = True
result += stripped_line.removeprefix(comment_starter) + "\n"
break # exit the inner loop through single line comment start characters
if single_comment_found:
continue
# try to check the multiline comments otherwise
if stripped_line.startswith(multiline_comment_start):
stripped_line = stripped_line.removeprefix(multiline_comment_start)
if stripped_line.endswith(multiline_comment_end):
stripped_line = stripped_line.removesuffix(multiline_comment_end)
else:
inside_multiline_comment = True
result += stripped_line + "\n"
continue
result = result[:result.rfind("\n")] # remove the last new line character
break # no comment chars found - means we don't need to go further
return result
def log_ocr_debug_info(self, exception_text):
"""
Performs OCR (eggPlant 'readText' command) in the restricted search rectangle extracted from the error message.
The error message should be in standard eggplant format: 'No Text Found On Screen: <TEXT> ...
Restricted Search Rectangle ((1431,654),(1581,854))'
If no restricted search rectangle is found in the message (means full screen is searched), no OCR is performed.
:return Restricted search rectangle extracted from the error message or an empty string if no rectangle found
"""
search_rect_text = 'Restricted Search Rectangle '
ocr_text = 'TEXT:'
search_rect = ''
if search_rect_text in exception_text:
search_rect = exception_text[exception_text.index(search_rect_text) + len(search_rect_text):].strip()
if ocr_text in exception_text:
log.info("----> Performing OCR ReadText in the restricted search rectangle: {0}.\n"
"For results see the command output further in the log.\n-----\n"
.format(search_rect))
self.execute("log ReadText{}".format(search_rect))
return search_rect
def read_from_config(self, key, file_path=''):
"""
Returns value of the requested parameter from the config file.
No error thrown if file not found!
"""
if file_path == '':
dir_path = os.path.abspath(os.path.dirname(__file__))
file_path = os.path.join(dir_path, "EggplantLib.config")
if os.path.isfile(file_path):
with open(file_path, encoding="utf8") as f:
for line in f:
if line.startswith(key):
return line.split('=')[1].strip()
return ''
def take_screenshot(self, rectangle='', file_path='', highlight_rectangle='', error_if_no_sut=True):
"""
Captures a SUT screen image and saves it into the specified file.
Returns the file path (relative to the current Robot Framework Output Dir) or None if no SUT available.
Be default the full screen is captured, otherwise according to the specified rectangle.
_Parameters:_
- *rectangle* - optional, a list of 2 values (top left, bottom right) in eggPlant format
indicating a rectangle to capture.
Each value might be a list of two coordinates, an image name or an image location.
Examples: *(67, 33), imagelocation("OtherCorner")* or *RxLocation, RxLocation + (140,100)*.
See the eggPlan docs for details:
http://docs.testplant.com/ePF/SenseTalk/stk-results-reporting.htm#capturescreen-command
- *file_path* - optional, relative to the current Robot Framework Output Dir.
If not specified, the default name is used.
- *error_if_no_sut* - normally an error is reported if no SUT connection available for taking a screenshot.
However, this may be disabled.
"""
# Check for a valid file_path and make sure the directories exist
target_path = file_path
if not file_path:
target_path = "Screenshots\\Screenshot__{0}.png".format(datetime.now().strftime('%Y-%m-%d__%H_%M_%S__%f'))
if target_path and os.path.isabs(target_path):
raise RuntimeError("Given file_path='%s' must be relative to Robot output dir" % target_path)
# image output file path is relative to robot framework output
full_path = os.path.join(BuiltIn().get_variable_value("${OUTPUT DIR}"), target_path)
if not os.path.exists(os.path.split(full_path)[0]):
os.makedirs(os.path.split(full_path)[0])
rectangle_string = ""
rectangle_log_msg = "Full screen"
if rectangle:
rectangle_log_msg = rectangle
rectangle_string = "Rectangle: ({})".format(rectangle)
log.info(f"Screenshot rectangle: {rectangle_log_msg}")
try:
# Capture and save the image of the whole SUT screen
self.eggplant_server.execute("CaptureScreen(Name:\"{0}\", {1})".format(full_path, rectangle_string))
if highlight_rectangle:
log.info(highlight_rectangle)
self.draw_rect_on_image(full_path, highlight_rectangle)
except xmlrpc.client.Fault as e:
expected_error_message = "unable to capture screen: no connection available from which to capture"
if expected_error_message in e.faultString.lower():
log.debug(f"Error message: {e}")
log_msg = "Unable to take screenshot - no SUT connection available"
if error_if_no_sut:
raise EggplantExecutionException(log_msg)
else:
log.warn(log_msg)
target_path = None
return target_path
def draw_rect_on_image(self, image_file, coordinates, color='red'):
log.debug("Draw a {} rectangle with coordinates {} for image {}".format(color, coordinates, image_file))
if not draw_rects_on_screenshots:
log.debug("Drawing rectangles disabled, check if Pillow is installed")
return
coord_str = str(coordinates)
coord_str = coord_str.replace("(", "")
coord_str = coord_str.replace(")", "")
coord_str = coord_str.replace("[", "")
coord_str = coord_str.replace("]", "")
coord_str = coord_str.replace("'", "")
coord_str = coord_str.replace(" ", "")
coords = []
for i in list(coord_str.split(',')):
coords.append(int(i))
log.debug(coord_str)
log.debug(coords)
im = Image.open(image_file)
draw = ImageDraw.Draw(im)
draw.rectangle(coords, outline=color, width=3)
im.save(image_file)
def log_embedded_image(self, image_path):
"""
Writes a link to the image file into RF log - so that it appears directly in the HTMl with a small preview
"""
image_name = os.path.basename(image_path)
log.info(html=True, msg=f'Screenshot: <a href="{image_path}">{image_name}</a>'
f'<td></td></tr><tr><td colspan="3"><a href="{image_path}">'
f'<img src="{image_path}" height="350px"></a></td></tr>')
def log_embedded_video(self, video_path, preview_image_path=None):
"""
Writes a link to the video file into RF log - so that it appears as an embedded video player
"""
preview = ""
if preview_image_path:
preview_image_name = os.path.basename(preview_image_path)
log.info(html=True, msg=f'Screenshot: <a href="{preview_image_path}">{preview_image_name}</a>')
preview = f'poster="{preview_image_path}"'
video_name = os.path.basename(video_path)
log.info(f'Video file path: <a href="{video_path}">{video_name}</a>', html=True)
log.info(html=True, msg=f'<tr><video {preview} height="350px" controls> <source src="{video_path}"'
f' type="video/mp4">Browser does not support video.</video></tr>')
|
#!/bin/bash
npm run lint && docker build -t rra-analysis . |
/****************************** Module Header ******************************\
* Module Name: ServiceBase.h
* Project: CppWindowsService
* Copyright (c) Microsoft Corporation.
*
* Provides a class for performing logging according to the output types available in
* systemd unit files.
*
* This source is subject to the Microsoft Public License.
* See http://www.microsoft.com/en-us/openness/resources/licenses.aspx#MPL.
* All other rights reserved.
*
* THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
* EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE.
\***************************************************************************/
#ifndef __SYSTEMD_JOURNAL_H__
#define __SYSTEMD_JOURNAL_H__
#include <ios>
#include <ostream>
#include <fstream>
#include <iostream>
#include "windows.h"
// StandardOut and StandardError are specified as type and maybe name
// most of these will have significant differences in their semantics from unix/linux.
enum OUTPUT_TYPE {
OUTPUT_TYPE_INVALID,
OUTPUT_TYPE_INHERIT,
OUTPUT_TYPE_NULL,
OUTPUT_TYPE_TTY,
OUTPUT_TYPE_JOURNAL,
OUTPUT_TYPE_SYSLOG,
OUTPUT_TYPE_KMSG,
OUTPUT_TYPE_JOURNAL_PLUS_CONSOLE,
OUTPUT_TYPE_SYSLOG_PLUS_CONSOLE,
OUTPUT_TYPE_KMSG_PLUS_CONSOLE,
OUTPUT_TYPE_FILE, // requires a path
OUTPUT_TYPE_SOCKET,
OUTPUT_TYPE_FD // requires a name
};
// For output type null
template <class cT, class traits = std::char_traits<cT> >
class basic_nullstreambuf : public std::basic_streambuf<cT, traits>
{
public:
basic_nullstreambuf() {}
~basic_nullstreambuf() {}
typename traits::int_type overflow(typename traits::int_type c) override
{
std::cerr << "null stream" << std::endl;
return traits::not_eof(c); // indicate success
}
};
template <class cT, class traits = std::char_traits<cT> >
class basic_onullstream : public std::basic_ostream<cT, traits>
{
basic_nullstreambuf<cT> buf;
public:
basic_onullstream()
: std::ostream(&buf)
, buf()
{
}
};
typedef basic_onullstream<char> onullstream;
typedef basic_onullstream<wchar_t> wonullstream;
template <class cT, class traits = std::char_traits<cT> >
class basic_journalstreambuf: public std::basic_streambuf<cT, traits>
{
public:
basic_journalstreambuf() {
m_current = m_buffer;
m_filehandle = INVALID_HANDLE_VALUE;
m_console_output = false;
m_file_output = false;
m_eventlog_output = false;
m_console = std::wofstream("c:/var/log/services.log", std::ofstream::app);
};
basic_journalstreambuf(const basic_journalstreambuf<cT, traits> &&from):
std::basic_streambuf<cT, traits>(std::move(from)) {
size_t offset = from.m_current-from.m_buffer;
memcpy(this->m_buffer, from.m_buffer, sizeof(from.m_buffer));
this->m_current = this->m_buffer+offset;
this->m_filehandle = from.m_filehandle;
this->m_console_output = from.m_console_output;
this->m_file_output = from.m_file_output;
this->m_eventlog_output = from.m_eventlog_output;
m_console = std::wofstream("c:/var/log/services.log", std::ofstream::app);
};
~basic_journalstreambuf() { };
void set_handle(HANDLE handle) { m_filehandle = handle; };
void set_output_console(boolean output_to_console) { m_console_output = output_to_console; };
void set_output_logfile(boolean output_to_log) { m_file_output = output_to_log; };
void set_output_eventlog(boolean output_to_eventlog) { m_eventlog_output = output_to_eventlog; };
typename traits::int_type overflow(typename traits::int_type wc) override
{
char c = (char) wc;
if (m_current < m_bufferlimit) {
*m_current++ = c;
}
if (c == '\n') {
*m_current = '\0';
std::wcerr << m_buffer << std::endl;
if (m_console_output) {
m_console << m_buffer;
m_console.flush();
}
if (m_file_output && m_filehandle != INVALID_HANDLE_VALUE) {
DWORD result = WriteFile(m_filehandle, m_buffer, (m_current-m_buffer)*sizeof(m_buffer[0]), NULL, NULL);
FlushFileBuffers(m_filehandle);
}
m_current = m_buffer;
}
return traits::not_eof(c); // indicate success
};
virtual int sync() override {
if (m_current > m_buffer) {
*m_current = '\0';
std::wcerr << L"journal stream: sync :" << m_buffer << std::endl;
if (m_console_output) {
m_console << m_buffer;
}
if (m_file_output && m_filehandle != INVALID_HANDLE_VALUE) {
DWORD result = WriteFile(m_filehandle, m_buffer, (m_current-m_buffer)*sizeof(m_buffer[0]), NULL, NULL);
}
m_current = m_buffer;
}
return 0;
};
protected:
static const int MAX_BUFFER_SIZE = 2048;
friend class wojournalstream;
std::wofstream m_console;
HANDLE m_filehandle;
boolean m_console_output;
boolean m_file_output;
boolean m_eventlog_output;
private:
char m_buffer[MAX_BUFFER_SIZE+1]; // Added one for final null
char *m_current;
const char *m_bufferlimit = m_buffer + MAX_BUFFER_SIZE;
};
class wojournalstream:
public std::basic_ostream<wchar_t> {
basic_journalstreambuf<wchar_t> buf;
public:
wojournalstream():
std::basic_ostream<wchar_t>(&buf),
buf()
{
};
virtual ~wojournalstream() {
if (m_filehandle != INVALID_HANDLE_VALUE) {
CloseHandle(m_filehandle);
m_filehandle = INVALID_HANDLE_VALUE;
}
}
wojournalstream(std::wstring output_type, std::wstring path );
HANDLE GetHandle() { return m_filehandle; };
virtual boolean is_open() {
return m_filehandle != INVALID_HANDLE_VALUE;
};
virtual void open(const std::wstring output_type, const std::wstring path);
virtual void close() {
if (m_filehandle != INVALID_HANDLE_VALUE) {
CloseHandle(m_filehandle);
m_filehandle = INVALID_HANDLE_VALUE;
}
m_output_type = OUTPUT_TYPE_NULL;
buf.set_handle(INVALID_HANDLE_VALUE);
buf.set_output_console(false);
buf.set_output_logfile(false);
buf.set_output_eventlog(false);
};
private:
HANDLE m_filehandle;
enum OUTPUT_TYPE m_output_type;
};
#endif
|
package components_test
import (
"github.com/mh-cbon/mdl-go-components/components"
"testing"
)
func TestDataTable(t *testing.T) {
var header *components.DataTableHeader
var row *components.DataTableRow
input := components.NewDataTable()
header = input.SetHeader("id", "id")
header.SetNumeric(true)
header = input.SetHeader("name", "name")
header.SetNumeric(false)
header = input.SetHeader("sku", "sku")
header.SetHidePhone(true)
header = input.SetHeader("edit", "")
header.SetLinkIcon("edit")
row = input.AddRow()
row.SetValue("1")
row.SetCell("id", "1")
row.SetCell("name", "name1")
row.SetCell("sku", "sku1")
row.SetCell("edit", "http://google.com")
row = input.AddRow()
row.SetValue("2")
row.SetCell("id", "2")
row.SetCell("name", "name2")
row.SetCell("sku", "sku2")
row.SetCell("edit", "http://google.com")
expectations := []string{
`<table\s+class="mdl-data-table custom-js-data-table custom-data-table mdl-shadow--2dp\s+"\s+>`,
`<th\s+class=""\s+>\s+id\s+</th>`,
`<th\s+class="mdl-data-table__cell--non-numeric"\s+>\s+name\s+</th>`,
`<th\s+class="mdl-cell--hide-phone"\s+>\s+sku\s+</th>`,
`<tr\s+class=""\s+value="1">`,
`<td\s+class=""\s+>\s+1\s+</td>`,
`<td\s+class="mdl-data-table__cell--non-numeric"\s+>\s+name1\s+</td>`,
`<td\s+class="mdl-cell--hide-phone"\s+>\s+sku1\s+</td>`,
`<td\s+class=""\s+>\s+<a href="http://google.com" target="_blank">\s+<i class="material-icons">edit</i>\s+</a>\s+</td>`,
}
validateComponent(t, input, expectations)
}
|
#!/usr/bin/env bash
#
# Copyright (c) 2019 The Zeo Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
export CONTAINER_NAME=ci_macos_cross
export HOST=x86_64-apple-darwin16
export PACKAGES="cmake imagemagick libcap-dev librsvg2-bin libz-dev libbz2-dev libtiff-tools python3-dev python3-setuptools"
export OSX_SDK=10.14
export RUN_UNIT_TESTS=false
export RUN_FUNCTIONAL_TESTS=false
export GOAL="deploy"
export ZEO_CONFIG="--enable-gui --enable-reduce-exports --enable-werror"
|
/* tslint:disable */
/* eslint-disable */
/**
* WaniKani
* WaniKani: The API
*
* OpenAPI spec version: 20170710.0
*
*
*/
import { BaseResource } from './base-resource';
/**
*
* @export
* @interface SpacedRepetitionSystem
*/
export interface SpacedRepetitionSystem extends BaseResource {
/**
*
* @type {SpacedRepetitionSystemData}
* @memberof SpacedRepetitionSystem
*/
data: any;
}
|
#!/usr/bin/env bash
mkdir -p db
cd db ### Note: the rest of this script is executed from the directory 'db'.
# TED-LIUM database:
if [ ! -e split ]; then
echo "$0: downloading JSEC data (it won't re-download if it was already downloaded.)"
# the following command won't re-get it if it's already there
# because of the --continue switch.
wget --continue --no-check-certificate https://nlp.stanford.edu/projects/jesc/data/split.tar.gz || exit 1
tar xf "split.tar.gz"
else
echo "$0: not downloading or un-tarring split.tar.gz because it already exists."
fi
exit 0
|
export function getAvg(score): number {
return score.reduce(function (p, c) {
return p + c;
}) / score.length;
} |
import firebase from "./firebaseApp";
export const conferenceExists = async (sessionId) => {
let docRef = firebase
.firestore()
.collection("eventSessionsDetails")
.doc(sessionId.toLowerCase());
let docSnapshot = await docRef.get();
return docSnapshot.exists;
};
export const userRegisteredEvent = (sessionId) => {
//, userId, email) => {
let storageKey = "/veertly/" + sessionId;
if (localStorage && localStorage.getItem(storageKey)) {
return true;
}
return false;
// let db = firebase.firestore();
// debugger;
// if (userId) {
// let found = false;
// let snapshot = await db
// .collection("eventSessionsRegistrations")
// .doc(sessionId)
// .collection("registrations")
// .where("userId", "==", userId)
// .get();
// debugger;
// await snapshot.forEach((doc) => {
// debugger;
// found = true;
// });
// if (found) {
// debugger;
// return true;
// }
// if (email) {
// let found = false;
// debugger;
// let snapshot = await db
// .collection("eventSessionsRegistrations")
// .doc(sessionId)
// .collection("registrations")
// .where("email", "==", email)
// .get();
// debugger;
// await snapshot.forEach((doc) => {
// debugger;
// found = true;
// });
// if (found) {
// debugger;
// return true;
// }
// }
// return false;
};
export const registerToEvent = async (eventSession, userId, userDetails) => {
let sessionId = eventSession.id.toLowerCase();
if (!conferenceExists(sessionId)) {
throw new Error("Event doesn't exist...");
}
if (userRegisteredEvent(sessionId)) {
//, userId, userDetails.email)) {
throw new Error("You have already registered to this event.");
}
// if (userId || (userDetails.email && userDetails.email.trim() !== "")) {
// if (await userRegisteredEvent(sessionId, userId, userDetails.email)) {
// throw new Error("User is already registered");
// }
// }
let { title, originalSessionId, eventBeginDate } = eventSession;
let db = firebase.firestore();
let timestamp = new Date().getTime();
await db
.collection("eventSessionsRegistrations")
.doc(sessionId)
.collection("registrations")
.doc("" + timestamp + "-" + userId)
.set({
...userDetails,
userId,
registrationDate: firebase.firestore.FieldValue.serverTimestamp(),
title,
originalSessionId,
eventBeginDate: eventBeginDate ? eventBeginDate : null
});
localStorage.setItem("/veertly/" + sessionId, true);
};
export const isUserRegisteredToEvent = async (sessionId, userId) => {
let docRef = firebase
.firestore()
.collection("eventSessionsRegistrations")
.doc(sessionId)
.collection("registrations")
.doc(userId);
let docSnapshot = await docRef.get();
return docSnapshot.exists;
};
|
<gh_stars>0
package com.myprojects.marco.firechat.rx;
import android.support.annotation.NonNull;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.firebase.storage.OnProgressListener;
import com.google.firebase.storage.StorageReference;
import com.google.firebase.storage.UploadTask;
import rx.Observable;
import rx.Subscriber;
/**
* Created by marco on 09/09/16.
*/
public class UploadTaskOnSubscribe<T,U> implements Observable.OnSubscribe<U> {
private final T value;
private final StorageReference storageReference;
private final U returnValue;
UploadTaskOnSubscribe(T value, StorageReference storageReference, U returnValue) {
this.value = value;
this.storageReference = storageReference;
this.returnValue = returnValue;
}
@Override
public void call(Subscriber<? super U> subscriber) {
UploadTask uploadTask = storageReference.putBytes((byte[])value);
uploadTask.addOnFailureListener(new RxFailureListener<>(subscriber))
.addOnSuccessListener(new RxSuccessListener<>(subscriber,returnValue));
}
private static class RxSuccessListener<T> implements OnSuccessListener {
private final Subscriber<? super T> subscriber;
private final T successValue;
RxSuccessListener(Subscriber<? super T> subscriber, T successValue) {
this.subscriber = subscriber;
this.successValue = successValue;
}
@Override
public void onSuccess(Object o) {
subscriber.onNext(successValue);
subscriber.onCompleted();
}
}
private static class RxFailureListener<T> implements OnFailureListener {
private final Subscriber<? super T> subscriber;
RxFailureListener(Subscriber<? super T> subscriber) {
this.subscriber = subscriber;
}
@Override
public void onFailure(@NonNull Exception e) {
subscriber.onError(e);
}
}
}
|
class DataProcessor:
def __init__(self):
self._cluster_result = ["id", "label", "item_id"]
def get_cluster_labels(self):
cluster_labels = {}
for row in self._cluster_result[1:]:
item_id, label = row.split(',')
cluster_labels[item_id] = label
return cluster_labels
# Example usage
processor = DataProcessor()
print(processor.get_cluster_labels()) |
fn byte_array_to_hex_string(bytes: &[u8]) -> String {
let mut hex_string = String::new();
for &byte in bytes {
if !hex_string.is_empty() {
hex_string.push_str(" ");
}
hex_string.push_str(&format!("{:02x}", byte));
}
hex_string
}
fn main() {
let bytes = [10, 25, 200, 255];
let hex_string = byte_array_to_hex_string(&bytes);
println!("{}", hex_string); // Output: 0a 19 c8 ff
} |
'use strict';
Package.describe({
name: 'steedos:autoform-filesize',
summary: 'Steedos Autoform filesize',
version: '0.0.1',
git: '',
documentation: null
});
Package.onUse(function(api) {
api.versionsFrom('1.2.1');
api.use(['ecmascript', 'templating', 'underscore', 'less', 'reactive-var'], 'client');
api.use('coffeescript@1.11.1_4');
api.use('aldeed:autoform@5.8.0');
api.addFiles([
'autoform-filesize.html',
'autoform-filesize.coffee'
], 'client');
});
|
#!/bin/bash
set -e
mkdir -p /opt/mod_jk/
cd /opt/mod_jk
wget http://apache.mirrors.spacedump.net/tomcat/tomcat-connectors/jk/tomcat-connectors-1.2.42-src.tar.gz
tar -xzvf tomcat-connectors-1.2.42-src.tar.gz
cd tomcat-connectors-1.2.42-src/native
./configure --with-apxs=/usr/bin/apxs --enable-api-compatibility
make
libtool --finish /usr/lib64/httpd/modules
cp ./apache-2.0/mod_jk.so /usr/lib64/httpd/modules/
make install
mkdir -p /var/run/mod_jk
chown apache:apache /var/run/mod_jk
/etc/init.d/httpd restart
cd /etc/httpd/conf/workers.properties
#SOURCE=files/workers.properties
#DESTINATION=/etc/httpd/conf/workers.properties
#if[[-d $DESTINATION ]]; then
#sudo rsync -v\
#$SOURCE $DESTINATION
#touch /etc/httpd/conf.d/mod_jk.conf
#SOURCE="files/mod_jk.conf"
#DESTINATION=/etc/httpd/conf.d/mod_jk.conf
#if[[-d $DESTINATION ]]; then
#sudo rsync -v\
#$SOURCE $DESTINATION
#touch /etc/httpd/conf.d/ssl.conf
#SOURCE="files/ssl.conf"
#DESTINATION=/etc/httpd/conf.d/ssl.conf
#if[[-d $DESTINATION ]]; then
#sudo rsync -v\
#$SOURCE $DESTINATION
#/etc/init.d/httpd restart
|
#!/bin/bash
grep '^Date:' | grep -Eo ".[0-9]{4}$" | sort -n | uniq -c | awk '{print $2,$1}'
|
import p5 from "p5";
export type Triangle = {
isColor2: boolean,
v1: p5.Vector,
v2: p5.Vector,
v3: p5.Vector,
}
export const drawTriangle = (p: p5, tri: Triangle) => {
p.triangle(tri.v1.x, tri.v1.y, tri.v2.x, tri.v2.y, tri.v3.x, tri.v3.y);
}
export const drawPartialOutline = (p: p5, tri: Triangle) => {
p.line(tri.v3.x, tri.v3.y, tri.v1.x, tri.v1.y);
p.line(tri.v1.x, tri.v1.y, tri.v2.x, tri.v2.y);
}
|
class TreeNode:
def __init__(self, value=0, left=None, right=None):
self.value = value
self.left = left
self.right = right
def getLeftChild(self):
return self.left
def getRightChild(self):
return self.right
class Queue:
def __init__(self):
self.items = []
def enqueue(self, item):
self.items.insert(0, item)
def dequeue(self):
return self.items.pop()
def isEmpty(self):
return len(self.items) == 0
def isCompleteBinaryTree(root):
nonFull = False
q = Queue()
q.enqueue(root)
while not q.isEmpty():
t = q.dequeue()
if t.getLeftChild():
if nonFull:
return False
q.enqueue(t.getLeftChild())
if t.getRightChild():
if nonFull:
return False
q.enqueue(t.getRightChild())
if t.getRightChild() is None:
nonFull = True
return True |
#!/bin/bash
SERVICE=$1
ACTION=$2
f_php()
{
case $ACTION in
stop)
killall php-fpm 2>/dev/null
echo "PHP parado"
;;
start)
php-fpm -D 2>/dev/null
echo "PHP iniciado"
;;
restart)
killall php-fpm 2>/dev/null
echo "PHP parado"
php-fpm -D 2>/dev/null
echo "PHP iniciado"
;;
esac
}
f_httpd()
{
case $ACTION in
stop)
httpd -k stop 2>/dev/null
echo "Apache parado"
;;
start)
httpd -k start 2>/dev/null
echo "Apache iniciado"
;;
restart)
httpd -k stop 2>/dev/null
echo "Apache parado"
httpd -k start 2>/dev/null
echo "Apache iniciado"
;;
esac
}
case $SERVICE in
php) f_php ;;
httpd) f_httpd ;;
*) echo "Serviço desconhecido" ;;
esac
|
#!/usr/bin/env bash
# pre-treatment platform
if [ -n "$(uname -a | grep -i ubuntu)" ]; then
echo push blog by ubuntu
# :
elif [ -n "$(uname -a | grep -i centos)" ]; then
echo push blog by centos
# :
elif [ -n "$(uname -a | grep -i darwin)" ]; then
echo push blog by mac
# :
elif [ -n "$(uname -a | grep -i android)" ]; then
echo push blog by android
export dbg=$HOME/blog
else
echo Unknown system, check $0 shell
fi
cd $dbg
git add .
git commit -m "blog"
git push
|
<filename>while 4/4.7.py
numbers = [1,'red', 2,3,'yellow', 12.4,[7, 11.5]]
int_list = []
float_list = []
str_list = []
i = 0
while i < len(numbers):
if isinstance(numbers[i], str):
str_list.append(numbers[i])
elif isinstance(numbers[i],float):
float_list.append(numbers[i])
else:
int_list.append(numbers[i])
i += 1
print(f'Obshii spisok{numbers}')
print(f'Spisok soderjasii stroki:{str_list}')
print(f'Spisok soderjasii celye 4isla:{int_list}')
print(f'Spisok soderjasii drobnye 4isla:{float_list}') |
class ExpirationIterable:
def __init__(self, expirations):
self.expirations = expirations
def __iter__(self):
return iter(self.expirations)
# Example usage
expirations = ['2022-12-31', '2023-06-30', '2023-12-31']
iterable = ExpirationIterable(expirations)
for expiration in iterable:
print(expiration) |
<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_help_center_twotone = void 0;
var ic_help_center_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M5,5v14h14V5H5z M12.01,18c-0.7,0-1.26-0.56-1.26-1.26c0-0.71,0.56-1.25,1.26-1.25 c0.71,0,1.25,0.54,1.25,1.25C13.25,17.43,12.72,18,12.01,18z M15.02,10.6c-0.76,1.11-1.48,1.46-1.87,2.17 c-0.16,0.29-0.22,0.48-0.22,1.41h-1.82c0-0.49-0.08-1.29,0.31-1.98c0.49-0.87,1.42-1.39,1.96-2.16c0.57-0.81,0.25-2.33-1.37-2.33 c-1.06,0-1.58,0.8-1.8,1.48L8.56,8.49C9.01,7.15,10.22,6,11.99,6c1.48,0,2.49,0.67,3.01,1.52C15.44,8.24,15.7,9.59,15.02,10.6z",
"opacity": ".3"
},
"children": [{
"name": "path",
"attribs": {
"d": "M5,5v14h14V5H5z M12.01,18c-0.7,0-1.26-0.56-1.26-1.26c0-0.71,0.56-1.25,1.26-1.25 c0.71,0,1.25,0.54,1.25,1.25C13.25,17.43,12.72,18,12.01,18z M15.02,10.6c-0.76,1.11-1.48,1.46-1.87,2.17 c-0.16,0.29-0.22,0.48-0.22,1.41h-1.82c0-0.49-0.08-1.29,0.31-1.98c0.49-0.87,1.42-1.39,1.96-2.16c0.57-0.81,0.25-2.33-1.37-2.33 c-1.06,0-1.58,0.8-1.8,1.48L8.56,8.49C9.01,7.15,10.22,6,11.99,6c1.48,0,2.49,0.67,3.01,1.52C15.44,8.24,15.7,9.59,15.02,10.6z",
"opacity": ".3"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M13.25,16.74c0,0.69-0.53,1.26-1.25,1.26c-0.7,0-1.26-0.56-1.26-1.26c0-0.71,0.56-1.25,1.26-1.25 C12.71,15.49,13.25,16.04,13.25,16.74z M11.99,6c-1.77,0-2.98,1.15-3.43,2.49l1.64,0.69c0.22-0.67,0.74-1.48,1.8-1.48 c1.62,0,1.94,1.52,1.37,2.33c-0.54,0.77-1.47,1.29-1.96,2.16c-0.39,0.69-0.31,1.49-0.31,1.98h1.82c0-0.93,0.07-1.12,0.22-1.41 c0.39-0.72,1.11-1.06,1.87-2.17c0.68-1,0.42-2.36-0.02-3.08C14.48,6.67,13.47,6,11.99,6z M19,5H5v14h14V5 M19,3c1.1,0,2,0.9,2,2v14 c0,1.1-0.9,2-2,2H5c-1.1,0-2-0.9-2-2V5c0-1.1,0.9-2,2-2H19L19,3z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M13.25,16.74c0,0.69-0.53,1.26-1.25,1.26c-0.7,0-1.26-0.56-1.26-1.26c0-0.71,0.56-1.25,1.26-1.25 C12.71,15.49,13.25,16.04,13.25,16.74z M11.99,6c-1.77,0-2.98,1.15-3.43,2.49l1.64,0.69c0.22-0.67,0.74-1.48,1.8-1.48 c1.62,0,1.94,1.52,1.37,2.33c-0.54,0.77-1.47,1.29-1.96,2.16c-0.39,0.69-0.31,1.49-0.31,1.98h1.82c0-0.93,0.07-1.12,0.22-1.41 c0.39-0.72,1.11-1.06,1.87-2.17c0.68-1,0.42-2.36-0.02-3.08C14.48,6.67,13.47,6,11.99,6z M19,5H5v14h14V5 M19,3c1.1,0,2,0.9,2,2v14 c0,1.1-0.9,2-2,2H5c-1.1,0-2-0.9-2-2V5c0-1.1,0.9-2,2-2H19L19,3z"
},
"children": []
}]
}]
}]
};
exports.ic_help_center_twotone = ic_help_center_twotone; |
package org.hexagonal.ddd.controller;
import org.hexagonal.ddd.domain.Article;
import org.hexagonal.ddd.domain.ports.ext.IArticleAPI;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
@RestController
@RequestMapping("/article")
public class ArticleController {
@Autowired
private IArticleAPI articleAPI;
@PostMapping("/add")
public String addArticle(@RequestBody Article article) {
return articleAPI.addArticle(article);
}
@PutMapping("/update")
public String updateArticle(@RequestBody Article article) {
return articleAPI.updateArticle(article);
}
@GetMapping("/all")
public List<Article> getTopNArticles() {
return articleAPI.getArticles();
}
@GetMapping("/{id}")
public Article getArticleById(@PathVariable String id) {
return articleAPI.getArticleById(id);
}
@DeleteMapping("/{id}")
public String deleteArticle(@PathVariable String id) {
return articleAPI.deleteArticle(id);
}
}
|
<reponame>0racl3z/ledger-live-desktop
// @flow
import React from "react";
import styled from "styled-components";
import { Toast } from "./Toast";
import type { ThemedComponent } from "~/renderer/styles/StyleProvider";
import { useToasts } from "@ledgerhq/live-common/lib/notifications/ToastProvider";
const Wrapper: ThemedComponent<{}> = styled.div`
position: absolute;
bottom: 0;
right: 0;
padding: 18px;
& *:nth-child(n + 6) {
display: none;
}
`;
export function ToastOverlay() {
const { toasts, dismissToast } = useToasts();
return (
<Wrapper>
{toasts.map(({ id, type, title, text, icon, callback }) => (
<Toast
id={id}
type={type}
title={title}
icon={icon}
text={text}
callback={callback}
onDismiss={dismissToast}
key={id}
/>
))}
</Wrapper>
);
}
|
class Calculator {
private var result: Double = 0.0
func add(_ number: Double) {
result += number
}
func subtract(_ number: Double) {
result -= number
}
func multiply(by number: Double) {
result *= number
}
func divide(by number: Double) {
if number != 0 {
result /= number
} else {
print("Error: Division by zero")
}
}
func clear() {
result = 0.0
}
func getResult() -> Double {
return result
}
} |
package main
import (
"fmt"
"sync"
)
//ProcessFunc is the function responsible for handling task
type ProcessFunc func() error
type task struct {
finished chan struct{}
function *ProcessFunc
err error
}
func (t *task) run() {
defer func() {
if r := recover(); r != nil {
t.err = fmt.Errorf("Task failed: %v", r)
close(t.finished)
}
}()
t.err = (*t.function)()
close(t.finished)
}
//TaskManager is responsible for preventing
//thundering herd problem in image conversion process.
//When an image is recently uploaded, and multiple users
//request it with the same filters, we should make sure
//that the image conversion process only happens once.
//TaskManager also acts a worker pool and prevents from
//running Convert function in thousands of goroutines.
type TaskManager struct {
tasks map[string]*task
request chan *task
sync.Mutex
}
//NewTaskManager takes the number of background workers
//and creates a new Task manager with spawned workers
func NewTaskManager(workersCount int) *TaskManager {
t := &TaskManager{
tasks: make(map[string]*task),
request: make(chan *task, workersCount),
}
t.startWorkers(workersCount)
return t
}
func (tm *TaskManager) startWorkers(count int) {
for i := 0; i < count; i++ {
go func() {
for t := range tm.request {
t.run()
}
}()
}
}
func (tm *TaskManager) clear(taskID string) {
tm.Lock()
delete(tm.tasks, taskID)
tm.Unlock()
}
//RunTask takes a uniqe taskID and a processing function
//and runs the function in the background
func (tm *TaskManager) RunTask(taskID string, f ProcessFunc) error {
tm.Lock()
t := tm.tasks[taskID]
if t == nil {
// similar task does not exist at the moment
t = &task{finished: make(chan struct{}), function: &f}
tm.tasks[taskID] = t
tm.Unlock()
tm.request <- t
<-t.finished
tm.clear(taskID)
} else {
// task is being done by another process
tm.Unlock()
<-t.finished
}
return t.err
}
|
/**
* @author <NAME> <<EMAIL>>
*
* @section LICENSE
* See LICENSE for more informations.
*
*/
#include <QString>
#include <QtTest>
#include <QCoreApplication>
#include <include/FilterRule.h>
class FilterRuleTest : public QObject
{
Q_OBJECT
public:
FilterRuleTest();
private Q_SLOTS:
void initTestCase();
void cleanupTestCase();
void testConstructor();
void testSetGetFilter();
void testSetGetCaseSensitivity();
void testActive();
void testToAndFromJson();
};
FilterRuleTest::FilterRuleTest()
{
}
void FilterRuleTest::initTestCase()
{
}
void FilterRuleTest::cleanupTestCase()
{
}
void FilterRuleTest::testConstructor()
{
QString filter("blablabla");
FilterRule rule(filter);
QVERIFY2(rule.filter() == filter, "Failed set filter with constructor");
}
void FilterRuleTest::testSetGetFilter()
{
QString filter("blablabla");
FilterRule rule;
rule.setFilter(filter);
QVERIFY2(rule.filter() == filter, "Failed set/get filter");
}
void FilterRuleTest::testSetGetCaseSensitivity()
{
FilterRule rule;
QVERIFY2(rule.caseSensitivity() == Qt::CaseInsensitive,
"Wrong default case sensitivity");
rule.setCaseSensitivity(Qt::CaseSensitive);
QVERIFY2(rule.caseSensitivity() == Qt::CaseSensitive, "Failed set/get case sensitivity");
}
void FilterRuleTest::testActive()
{
FilterRule rule;
QVERIFY2(rule.isActive(), "Default rule isn't active");
rule.setActive(false);
QVERIFY2(rule.isActive() == false, "Failed set/get rule active");
}
void FilterRuleTest::testToAndFromJson()
{
FilterRule rule("Rule 1");
rule.setActive(!rule.isActive());
rule.setCaseSensitivity(rule.caseSensitivity() == Qt::CaseInsensitive ? Qt::CaseSensitive : Qt::CaseInsensitive);
QJsonObject ruleJson = rule.toJson();
QVERIFY2(! ruleJson.isEmpty(), "Empty json returned");
FilterRule rule2;
rule2.fromJson(ruleJson);
QVERIFY2(rule == rule2, "Failed convert to and from json");
}
QTEST_MAIN(FilterRuleTest)
#include "tst_FilterRuleTest.moc"
|
#!/bin/bash
# A more fluid way of moving windows with BSPWM, which is meant to be
# implemented in SXHKD. If there is a window in the given direction,
# swap places with it. Else if there is a receptacle move to it
# ("consume" its place). Otherwise create a receptacle in the given
# direction by splitting the entire viewport (circumvents the tiling
# scheme while respecting the current split ratio configuration). In
# the latter scenario, inputting the direction twice will thus move the
# focused window out of its current layout and into the receptacle.
#
# Part of my dotfiles: https://gitlab.com/protesilaos/dotfiles
#
# Copyright (c) 2019 Protesilaos Stavrou <info@protesilaos.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
[ "$#" -eq 1 ] || { echo "Pass only one argument: north,east,south,west"; exit 1; }
# Check if argument is a valid direction.
case "$1" in
north|east|south|west)
dir="$1"
;;
*)
echo "Not a valid argument."
echo "Use one of: north,east,south,west"
exit 1
;;
esac
_query_nodes() {
bspc query -N -n "$@"
}
# Do not operate on floating windows!
[ -z "$(_query_nodes focused.floating)" ] || { echo "Only move tiled windows."; exit 1; }
receptacle="$(_query_nodes 'any.leaf.!window')"
# This regulates the behaviour documented in the description.
if [ -n "$(_query_nodes "${dir}.!floating")" ]; then
bspc node -s "$dir"
elif [ -n "$receptacle" ]; then
bspc node focused -n "$receptacle" --follow
else
bspc node @/ -p "$dir" -i && bspc node -n "$receptacle" --follow
fi
|
///***************************************************************************
// * (C) Copyright 2003-2013 - Stendhal *
// ***************************************************************************
// ***************************************************************************
// * *
// * This program is free software; you can redistribute it and/or modify *
// * it under the terms of the GNU General Public License as published by *
// * the Free Software Foundation; either version 2 of the License, or *
// * (at your option) any later version. *
// * *
// ***************************************************************************/
//package games.stendhal.client.gui.spells;
//
//import javax.swing.SwingUtilities;
//
//import games.stendhal.client.gui.SlotWindow;
//import games.stendhal.client.listener.FeatureChangeListener;
///**
// * Container displaying the spells of the player.
// *
// * @author madmetzger
// *
// */
//public class Spells extends SlotWindow implements FeatureChangeListener {
//
// private static final long serialVersionUID = 79889495195014549L;
//
// public Spells() {
// super("spells", 3, 2);
// //panel window, no closing allowed
// setCloseable(false);
// }
//
// @Override
// public void featureDisabled(final String name) {
// if (name.equals("spells")) {
// if(isVisible()) {
// SwingUtilities.invokeLater(new Runnable() {
// @Override
// public void run() {
// setVisible(false);
// }
// });
// }
// }
// }
//
// @Override
// public void featureEnabled(final String name, final String value) {
// if (name.equals("spells")) {
// if(!isVisible()) {
// SwingUtilities.invokeLater(new Runnable() {
// @Override
// public void run() {
// setVisible(true);
// }
// });
// }
// }
// }
//
//}
|
<filename>src/templates/Table/Table.tsx<gh_stars>0
import React from "react";
import "./Table.css";
import { standingPosition } from "../../types";
import { tableConfig } from "../../constants/football";
import Toolbar from "../../components/Toolbar/Toolbar";
import Link from "../../components/Navigation/Link/Link";
export interface TableProps {
standings: standingPosition[];
}
const Table: React.FC<TableProps> = ({ standings }) => {
return (
<div className="standingsContainer">
<Toolbar />
<div className="standingsHeader">
<span className="rank"></span>
<span className="imageWrapper"></span>
<span className="match">G</span>
<span className="match">V</span>
<span className="match">P</span>
<span className="match">N</span>
<span className="goals">GF</span>
<span className="goals">GS</span>
<span className="points">
<strong>Pt</strong>
</span>
</div>
<div className="standingsTeamsContainer">
{standings.map((entry, index) => {
let teamNameClasses = ["teamName"];
let standingsWrapperClasses = ["standingsTeam"];
if (index < tableConfig.championsLeagueTeamsCount) {
teamNameClasses.push("championsLeague");
}
if (
index >= tableConfig.championsLeagueTeamsCount &&
index <
tableConfig.championsLeagueTeamsCount +
tableConfig.europaLeagueTeamsCount
) {
teamNameClasses.push("europaLeague");
}
if (index > standings.length - tableConfig.serieBTeamsCount - 1) {
teamNameClasses.push("serieB");
}
if (index === 0) {
standingsWrapperClasses.push("firstScorer");
}
return (
<div className={standingsWrapperClasses.join(" ")} key={entry.id}>
<span className="rank">{index + 1}</span>
<span className="imageWrapper">
<img
src={entry.team.crestUrl}
title={entry.team.shortName}
alt={entry.team.shortName}
height={30}
width={30}
/>
<Link to={entry.slug}>
<span className={teamNameClasses.join(" ")}>
{entry.team.shortName.split(" ")[0]}
</span>
</Link>
</span>
<span className="match">{entry.playedGames}</span>
<span className="match">{entry.won}</span>
<span className="match">{entry.lost}</span>
<span className="match">{entry.draw}</span>
<span className="goals">{entry.goalsFor}</span>
<span className="goals">{entry.goalsAgainst}</span>
<span className="points">
<strong>{entry.points}</strong>
</span>
</div>
);
})}
</div>
</div>
);
};
export default Table;
|
<gh_stars>0
'use strict';
var express = require('express'),
router = express.Router(),
app = require('../../app'),
access = require('../access'),
auth = require('../auth');
router.delete('/' + app.dbName + '/:id/*', auth.isAuthenticated, function(req, res) {
var id = access.addOwnerId(req.params.id, req.session.user.name);
app.db.attachment.destroy(id, req.params['0'], req.query, function(err, body) {
if (err) {
return utils.sendError(err, res);
}
res.send(access.strip(body));
});
});
module.exports = router; |
package com.example.android.quakereport;
import android.content.Context;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.loader.content.AsyncTaskLoader;
import java.util.List;
/**
* Loads a list of earthquakes by using an AsyncTask to perform the
* network request to the given URL.
*/
public class EarthquakeLoader extends AsyncTaskLoader<List<Quake>> {
/** Tag for log messages */
private static final String TAG = EarthquakeLoader.class.getName();
/** Query URL */
private String mUrl;
/**
* Constructs a new {@link EarthquakeLoader}.
*
* @param context of the activity
* @param url to load data from
*/
public EarthquakeLoader(Context context, String url) {
super(context);
mUrl = url;
}
//This method is automatically called after initLoader()
//It triggers the loader to start the background work
@Override
protected void onStartLoading() {
forceLoad();
Log.e(TAG,"OnStartLoading()");
}
/**
* This is on a background thread.
*/
@Override
public List<Quake> loadInBackground() {
Log.e(TAG,"loadInBackground");
if (mUrl == null) {
return null;
}
// Perform the network request, parse the response, and extract a list of earthquakes.
List<Quake> earthquakes = QueryUtils.fetchEarthquakeData(mUrl);
Log.e(TAG,"loadInBackground:fetchEarthquakeData()");
return earthquakes;
//this return value is used as the list<Quake> parameter in the onLoadFinished() method in the main thread by default
}
} |
/**
*
* @param {[]} nums
* @param {Number} k
*/
function maxSubarr(nums, k) {
var memo = {};
var maxLen = 0;
var sum = 0;
for(let i =0; i<nums.length;i++) {
let el = nums[i];
sum += el;
if(sum === k) { maxLen = i+1; }
else {
if(memo[sum-k]) {
maxLen = Math.max(maxLen, i-memo[sum-k]);
}
}
if(!memo[sum]) memo[sum] = i; //store index
}
return maxLen;
}
module.exports = maxSubarr; |
# mnist
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 100;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 100;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 100;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 200;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 200;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 200;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 300;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 300;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 300;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 400;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 400;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 400;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 500;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 500;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 500;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 600;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 600;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 600;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 700;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 700;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 700;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 800;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 800;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 800;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 900;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 900;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 900;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 1000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 1000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 1000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 2000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 2000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 2000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 3000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 3000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 3000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 4000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 4000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 4000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 5000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 5000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 5000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 6000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 6000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 6000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 7000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 7000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 7000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 8000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 8000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 8000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 9000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 9000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 9000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 10000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 10000;
python impar.py -train ../raw_data/vgg-16/mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/mnist/sample_test/mnist -sample_test 10000;
# fashion_mnist
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 100;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 100;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 100;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 200;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 200;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 200;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 300;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 300;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 300;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 400;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 400;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 400;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 500;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 500;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 500;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 600;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 600;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 600;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 700;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 700;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 700;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 800;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 800;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 800;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 900;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 900;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 900;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 1000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 1000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 1000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 2000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 2000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 2000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 3000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 3000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 3000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 4000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 4000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 4000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 5000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 5000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 5000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 6000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 6000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 6000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 7000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 7000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 7000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 8000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 8000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 8000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 9000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 9000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 9000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 10000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 10000;
python impar.py -train ../raw_data/vgg-16/fashion_mnist_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/fashion_mnist_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/fashion_mnist/sample_test/fashion_mnist -sample_test 10000;
# cifar10
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 100;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 100;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 100;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 200;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 200;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 200;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 300;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 300;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 300;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 400;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 400;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 400;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 500;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 500;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 500;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 600;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 600;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 600;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 700;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 700;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 700;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 800;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 800;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 800;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 900;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 900;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 900;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 1000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 1000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 1000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 2000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 2000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 2000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 3000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 3000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 3000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 4000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 4000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 4000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 5000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 5000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 5000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 6000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 6000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 6000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 7000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 7000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 7000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 8000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 8000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 8000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 9000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 9000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 9000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 10000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 10000;
python impar.py -train ../raw_data/vgg-16/cifar10_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar10_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar10/sample_test/cifar10 -sample_test 10000;
# cifar100
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 100;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 100;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 100;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 200;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 200;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 200;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 300;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 300;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 300;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 400;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 400;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 400;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 500;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 500;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 500;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 600;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 600;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 600;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 700;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 700;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 700;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 800;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 800;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 800;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 900;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 900;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 900;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 1000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 1000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 1000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 2000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 2000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 2000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 3000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 3000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 3000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 4000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 4000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 4000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 5000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 5000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 5000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 6000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 6000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 6000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 7000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 7000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 7000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 8000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 8000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 8000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 9000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 9000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 9000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 10000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 10000;
python impar.py -train ../raw_data/vgg-16/cifar100_train_to_vgg-16_N[-1].npy -test ../raw_data/vgg-16/cifar100_test_to_vgg-16_N[-1].npy -savefile ../impar/vgg-16/cifar100/sample_test/cifar100 -sample_test 10000;
|
def print_fibonacci_series(n):
# Fibonacci numbers up to n
a, b = 0, 1
while a < n:
print(a, end=" ")
a, b = b, a+b
print_fibonacci_series(10)
# Output: 0 1 1 2 3 5 8 |
import { Component, OnInit } from '@angular/core';
declare interface TableData {
headerRow: string[];
dataRows: string[][];
}
@Component({
selector: 'table-cmp',
moduleId: module.id,
templateUrl: 'table.component.html'
})
export class TableComponent implements OnInit{
public tableData1: TableData;
public tableData2: TableData;
ngOnInit(){
this.tableData1 = {
headerRow: [ 'ID', 'Referência', 'Média/Hora', 'Total/Hora', 'Valor Estimado'],
dataRows: [
['1', 'Aparelho de Som', '0.002', '0.02', 'R$5,10'],
['2', 'Frigobar', '0.007', '0.07', 'R$8,60'],
['3', 'Torradeira', '0.005', '0.50', 'R$12,50'],
['4', 'Tv', '0.09', '0.090', 'R$20,12'],
['5', 'Geladeira', '0.20', '0.0020', 'R$15,50'],
['6', 'Microondas', '0.41', '0.0047', 'R$5,23']
]
};
this.tableData2 = {
headerRow: [ 'ID', 'Referência', 'Média/Hora', 'Total/Hora', 'Valor Estimado' ],
dataRows: [
['1', 'Freezer','0.54', '0.0540', 'R$13,33' ],
['2', 'Cafeteira', '0.30', '0.003', 'R$0,25'],
['3', 'Filtro de Ar', '0.12', '0.120', 'R$1,23' ],
['4', 'Aquecedor', '0.50', '0.560', 'R$4,56' ],
['5', 'Fritadiera', '0.22', '0.452', 'R$7,89', ],
['6', 'Ar Condicionado', '0.452', '1.259', 'R$5,67' ]
]
};
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.