text stringlengths 1 1.05M |
|---|
/*eslint-env jest*/
import {
WfsFilterUtil,
} from '../../index';
describe('WfsFilterUtil', () => {
const featureType = 'featureType';
const stringSearchTerm = '<PASSWORD>';
const digitSearchTerm = '<PASSWORD>';
let searchAttributes = {
featureType: []
};
let attributeDetails = {
featureType: {}
};
const stringAttr1 = {
matchCase: false,
type: 'string',
exactSearch: false
};
const stringAttr2 = {
matchCase: true,
type: 'string',
exactSearch: false
};
const numberAttr = {
matchCase: true,
type: 'int',
exactSearch: true
};
describe('Basics', () => {
it('is defined', () => {
expect(WfsFilterUtil).not.toBeUndefined();
});
});
describe('Static methods', () => {
afterEach(() => {
searchAttributes = {
'featureType': []
};
Object.keys(attributeDetails[featureType]).forEach(prop => {
delete attributeDetails[featureType][prop];
});
});
describe('#createWfsFilter', () => {
it('is defined', () => {
expect(WfsFilterUtil.createWfsFilter).toBeDefined();
});
it ('returns null if no search attributes for the provided feature type are found', () => {
searchAttributes = {
'someAnotherFeatureType': []
};
attributeDetails['featureType']['stringAttr1'] = stringAttr1;
const got = WfsFilterUtil.createWfsFilter(featureType, stringSearchTerm, searchAttributes, attributeDetails);
expect(got).toBeNull();
});
it ('returns simple LIKE filter if only one attribute is provided and exactSearch flag is false or not given', () => {
searchAttributes[featureType].push('stringAttr2');
attributeDetails['featureType']['stringAttr2'] = stringAttr2;
const got = WfsFilterUtil.createWfsFilter(featureType, stringSearchTerm, searchAttributes, attributeDetails);
expect(got.getTagName()).toBe('PropertyIsLike');
expect(got.pattern).toEqual(`*${stringSearchTerm}*`);
expect(got.propertyName).toEqual(searchAttributes[featureType][0]);
expect(got.matchCase).toEqual(stringAttr2.matchCase);
});
it ('returns simple LIKE filter if only one attribute is provided and attributeDetails argument is omitted', () => {
searchAttributes[featureType].push('stringAttr1');
const got = WfsFilterUtil.createWfsFilter(featureType, stringSearchTerm, searchAttributes);
expect(got.getTagName()).toBe('PropertyIsLike');
expect(got.pattern).toEqual(`*${stringSearchTerm}*`);
expect(got.propertyName).toEqual(searchAttributes[featureType][0]);
expect(got.matchCase).toBeFalsy();
});
it ('returns simple EQUALTO filter if only one attribute is provided and exactSearch flag is true', () => {
searchAttributes[featureType].push('numberAttr');
attributeDetails['featureType']['numberAttr'] = numberAttr;
const got = WfsFilterUtil.createWfsFilter(featureType, digitSearchTerm, searchAttributes, attributeDetails);
expect(got.getTagName()).toBe('PropertyIsEqualTo');
expect(got.expression).toEqual(digitSearchTerm);
expect(got.propertyName).toEqual(searchAttributes[featureType][0]);
});
it ('returns combined OR filter if more than one search attributes are provided', () => {
searchAttributes[featureType].push(...['stringAttr1', 'stringAttr2']);
attributeDetails = {
'featureType': {
'stringAttr1': stringAttr1,
'stringAttr2': stringAttr2
}
};
const got = WfsFilterUtil.createWfsFilter(featureType, stringSearchTerm, searchAttributes, attributeDetails);
expect(got.getTagName()).toBe('Or');
expect(got.conditions.length).toEqual(searchAttributes[featureType].length);
});
});
});
});
|
/**
* DemoApp - Height Map Test Suite
*
* @author <NAME><<EMAIL>>
* @file Src/Test/HeightMapTest.cpp
* @version 1.1.0 09-Jan-15
* @copyright Copyright (c) 2015 by <NAME>. All rights reserved. (http://andysmiles4games.com)
*/
#include <Test/HeightMapTest.h>
#include <HeightMap.h>
#include <SimpleLib/Logger.h>
namespace DemoAppTest
{
void HeightMapTest::undefinedMapTest(void)
{
HeightMap map;
// because there is no size defined there isn't any value at that position
// but because there's no data at all it has to return 0.0f instead of crashing
float height = map.getHeight(10, 10);
CPPUNIT_ASSERT(height == 0.0f);
// even after setting a value it has to return 0.0f because there is no data map defined yet
map.setHeight(25.5f, 10, 10);
height = map.getHeight(10, 10);
CPPUNIT_ASSERT(height == 0.0f);
// clean up memory
map.release();
}
void HeightMapTest::accessDataTest(void)
{
HeightMap map(6, 4);
unsigned int counter = 0;
// fill map with demo data
for (unsigned int y=0; y < 4; ++y)
{
for (unsigned int x=0; x < 6; ++x) {
map.setHeight(++counter, x, y);
}
}
// check several values for correctness
float height = map.getHeight(1, 1);
CPPUNIT_ASSERT(height == 8.0f);
height = map.getHeight(0, 0);
CPPUNIT_ASSERT(height == 1.0f);
height = map.getHeight(5, 2);
CPPUNIT_ASSERT(height == 18.0f);
height = map.getHeight(4, 3);
CPPUNIT_ASSERT(height == 23.0f);
height = map.getHeight(5, 3);
CPPUNIT_ASSERT(height == 24.0f);
// clean up memory
map.release();
}
void HeightMapTest::resizeTest(void)
{
HeightMap map;
map.resize(6, 4);
map.setHeight(1.0f, 5, 3);
float height = map.getHeight(5, 3);
CPPUNIT_ASSERT(height == 1.0f);
// in case the position is out of range it returns 0.0f
height = map.getHeight(100, 100);
CPPUNIT_ASSERT_MESSAGE("Position was out of the map boundaries, so the result has to be 0.0f", height == 0.0f);
//after resizing the map it should be possible to get that height
map.resize(5, 5);
map.setHeight(4.0f, 10, 10);
height = map.getHeight(10, 10);
SimpleLib::Logger::writeDebug("height: %f", height);
CPPUNIT_ASSERT_MESSAGE("Height has to be 0.0f after resizing the map and trying to access a height value out of boundaries!", height == 0.0f);
// but within its boundaries it has to work
map.setHeight(2.0f, 2, 2);
height = map.getHeight(2, 2);
CPPUNIT_ASSERT(height == 2.0f);
// clean up memory
map.release();
}
void HeightMapTest::initializeByConstructorTest(void)
{
// now test map which map initialization via constructor
HeightMap map(50, 50);
// the entire map is set to 0, so the result has to be 0.0f
float height = map.getHeight(20, 20);
CPPUNIT_ASSERT(height == 0.0f);
/**
* now the setting test again
*/
map.setHeight(1.0f, 10, 10);
height = map.getHeight(10, 10);
CPPUNIT_ASSERT(height == 1.0f);
// clean up memory
map.release();
}
} |
def sortList(arr, num):
arr.sort(key = lambda x: x*num)
return arr
arr = [3, 1, 5, 4, 2]
num = 5
print(sortList(arr, num)) |
/**
*
*/
package jframe.pay.domain.dao;
/**
*
* @author dzh
* @date Sep 16, 2015 2:09:51 PM
* @since 1.0
*/
public class OrderBase {
public String payNo;
public String payGroup;
public String transType;
public Long payAmount;
public String payCurrency;
public String payDesc;
public Long payTimeout;
public String backUrl;
public String account;
public String orderNo;
public String payStatus;
public Long orderFinishTime;
public Long orderCreateTime;
}
|
#!/usr/bin/env bats
load test_helper
setup() {
global_setup
create_app
}
teardown() {
destroy_app
global_teardown
}
@test "(docker-options) docker-options:help" {
run /bin/bash -c "dokku docker-options"
echo "output: $output"
echo "status: $status"
assert_output_contains "Manage docker options for an app"
help_output="$output"
run /bin/bash -c "dokku docker-options:help"
echo "output: $output"
echo "status: $status"
assert_output_contains "Manage docker options for an app"
assert_output "$help_output"
}
@test "(docker-options) docker-options:add (all phases)" {
run /bin/bash -c "dokku docker-options:add $TEST_APP build,deploy,run \"-v /tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-build"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp"
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-deploy"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp"
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-run"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp"
}
@test "(docker-options) docker-options:clear" {
run /bin/bash -c "dokku docker-options:add $TEST_APP build,deploy,run \"-v /tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:clear $TEST_APP"
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-build"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 0
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-deploy"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 0
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-run"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 0
run /bin/bash -c "dokku docker-options:add $TEST_APP build,deploy,run \"-v /tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:clear $TEST_APP build"
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-build"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 0
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-deploy"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp"
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-run"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp"
run /bin/bash -c "dokku docker-options:clear $TEST_APP deploy"
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-build"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 0
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-deploy"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 0
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-run"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp"
run /bin/bash -c "dokku docker-options:clear $TEST_APP run"
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-build"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 0
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-deploy"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 0
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-run"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 0
}
@test "(docker-options) docker-options:add (build phase)" {
run /bin/bash -c "dokku docker-options:add $TEST_APP build \"-v /tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-build"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp"
}
@test "(docker-options) docker-options:add (deploy phase)" {
run /bin/bash -c "dokku docker-options:add $TEST_APP deploy \"-v /tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-deploy"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp"
}
@test "(docker-options) docker-options:add (run phase)" {
run /bin/bash -c "dokku docker-options:add $TEST_APP run \"-v /tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP --docker-options-run"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp"
}
@test "(docker-options) docker-options:remove (all phases)" {
run /bin/bash -c "dokku docker-options:add $TEST_APP build,deploy,run \"-v /tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 3
run /bin/bash -c "dokku docker-options:remove $TEST_APP build,deploy,run \"-v /tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 0
assert_output_contains "Docker options deploy: --restart=on-failure:10"
}
@test "(docker-options) docker-options:remove (build phase)" {
run /bin/bash -c "dokku docker-options:add $TEST_APP build,deploy,run \"-v /tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 3
run /bin/bash -c "dokku docker-options:remove $TEST_APP build \"-v /tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 2
}
@test "(docker-options) docker-options:remove (deploy phase)" {
run /bin/bash -c "dokku docker-options:add $TEST_APP build,deploy,run \"-v /tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 3
run /bin/bash -c "dokku docker-options:remove $TEST_APP deploy \"-v /tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 2
}
@test "(docker-options) docker-options:remove (run phase)" {
run /bin/bash -c "dokku docker-options:add $TEST_APP build,deploy,run \"-v /tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 3
run /bin/bash -c "dokku docker-options:remove $TEST_APP run \"-v /tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 2
}
@test "(docker-options) deploy with options" {
run /bin/bash -c "dokku docker-options:add $TEST_APP deploy \"-v /var/tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "echo '-v /tmp' >> $DOKKU_ROOT/$TEST_APP/DOCKER_OPTIONS_DEPLOY"
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "echo '# comment' >> $DOKKU_ROOT/$TEST_APP/DOCKER_OPTIONS_DEPLOY"
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 1
deploy_app
CID=$(< $DOKKU_ROOT/$TEST_APP/CONTAINER.web.1)
run /bin/bash -c "docker inspect -f '{{ .Config.Volumes }}' $CID | sed -e 's:map::g' | tr -d '[]' | tr ' ' $'\n' | sort | xargs"
echo "output: $output"
echo "status: $status"
assert_output "/tmp:{} /var/tmp:{}"
}
@test "(docker-options) docker-options:add (all phases over SSH)" {
run ssh dokku@dokku.me docker-options:add $TEST_APP build,deploy,run "-v /tmp"
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:report $TEST_APP"
echo "output: $output"
echo "status: $status"
assert_success
assert_output_contains "-v /tmp" 3
}
@test "(docker-options) dockerfile deploy with link" {
run /bin/bash -c "dokku docker-options:add $TEST_APP deploy \"-v /var/tmp\""
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku docker-options:add $TEST_APP build \"--link postgres\""
echo "output: $output"
echo "status: $status"
assert_success
run deploy_app dockerfile
echo "output: $output"
echo "status: $status"
assert_success
}
|
<filename>src/gtk3/statusbar/MainWindow.py
# -*- coding: utf-8 -*-
"""Gtk.Statusbar()."""
import gi
gi.require_version(namespace='Gtk', version='3.0')
from gi.repository import Gio, Gtk
class MainWindow(Gtk.ApplicationWindow):
context_id = None
message_id = None
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.set_title(title='Gtk.Statusbar')
self.set_default_size(width=1366 / 2, height=768 / 2)
self.set_position(position=Gtk.WindowPosition.CENTER)
self.set_default_icon_from_file(filename='../../assets/icons/icon.png')
vbox = Gtk.Box.new(orientation=Gtk.Orientation.VERTICAL, spacing=12)
vbox.set_border_width(border_width=12)
self.add(widget=vbox)
btn_show_msg = Gtk.Button.new_with_label(label='Exibir menssagem')
btn_show_msg.connect('clicked', self.statusbar_show_msg)
vbox.pack_start(child=btn_show_msg, expand=False, fill=True, padding=0)
btn_remove_msg = Gtk.Button.new_with_label(label='Remover mensagem')
btn_remove_msg.connect('clicked', self.statusbar_remove_msg)
vbox.pack_start(child=btn_remove_msg, expand=False, fill=True, padding=0)
self.statusbar = Gtk.Statusbar.new()
self.context_id = self.statusbar.get_context_id(context_description='exemplo')
vbox.pack_end(child=self.statusbar, expand=False, fill=True, padding=0)
self.show_all()
def statusbar_show_msg(self, widget):
self.message_id = self.statusbar.push(
context_id=self.context_id,
text='Texto que será exibido no statusbar.',
)
def statusbar_remove_msg(self, widget):
# self.statusbar.remove(
# context_id=self.context_id,
# message_id=self.message_id,
# )
self.statusbar.remove_all(context_id=self.context_id)
class Application(Gtk.Application):
def __init__(self):
super().__init__(application_id='br.natorsc.Exemplo',
flags=Gio.ApplicationFlags.FLAGS_NONE)
def do_startup(self):
Gtk.Application.do_startup(self)
def do_activate(self):
win = self.props.active_window
if not win:
win = MainWindow(application=self)
win.present()
def do_shutdown(self):
Gtk.Application.do_shutdown(self)
if __name__ == '__main__':
import sys
app = Application()
app.run(sys.argv)
|
require 'active_support/all'
require 'notification_hub/configuration'
require 'notification_hub/notification'
require 'notification_hub/channels/sqs'
require 'notification_hub/channels/messages/base'
require 'notification_hub/channels/messages/push_notification'
require 'notification_hub/channels/messages/sms'
require 'notification_hub/envelope/base'
require 'notification_hub/envelope/fallback'
require 'notification_hub/envelope/multichannel'
module NotificationHub
class << self
attr_accessor :configuration
end
def self.configuration
@configuration ||= NotificationHub::Configuration.new
end
def self.reset
@configuration = NotificationHub::Configuration.new
end
def self.configure
yield(configuration)
end
end
|
var armnn_tf_lite_parser_2test_2_depthwise_convolution2_d_8cpp =
[
[ "BOOST_FIXTURE_TEST_CASE", "armnn_tf_lite_parser_2test_2_depthwise_convolution2_d_8cpp.xhtml#af53fa33ced6d7398ff45034555f163aa", null ],
[ "BOOST_FIXTURE_TEST_CASE", "armnn_tf_lite_parser_2test_2_depthwise_convolution2_d_8cpp.xhtml#a6e7cc099f8ebc8db20852ca41f9600e0", null ],
[ "BOOST_FIXTURE_TEST_CASE", "armnn_tf_lite_parser_2test_2_depthwise_convolution2_d_8cpp.xhtml#afc6740eb06761e5194dda25370e5b85e", null ]
]; |
<filename>src/app/core/ui/header/header.component.ts
import { Component, OnInit } from '@angular/core';
import { DownloadResumeService } from '../../services/download-resume.service';
@Component({
selector: 'app-header',
templateUrl: './header.component.html',
styleUrls: ['./header.component.less']
})
export class HeaderComponent implements OnInit {
constructor(private _resumeService: DownloadResumeService) { }
ngOnInit() {
}
downloadResume() {
this._resumeService.downloadResume().subscribe(data => {
if (window.navigator.msSaveOrOpenBlob) {
// IE11
window.navigator.msSaveOrOpenBlob(
data,
'srimaharshimanchem.pdf'
);
} else {
// Google chome, Firefox, ....
const url = window.URL.createObjectURL(data);
const a = document.createElement('a');
document.body.appendChild(a);
a.setAttribute('style', 'display: none');
a.href = url;
a.download = 'srimaharshimanchem.pdf';
a.click();
window.URL.revokeObjectURL(url);
a.remove(); // remove the element
}
});
}
}
|
#!/bin/bash
function nextstep {
echo 'Next step?'
read letssee
}
function step {
echo "##############"
echo "kubectl apply -f" $1
kubectl apply -f $1
head -1 $1
nextstep
erasestep $1
}
function erasestep {
echo "kubectl delete -f" $1
kubectl delete -f $1
}
echo '#######################################################################################################'
echo '# TARGET: Policy. Scenarios in this script are for checking validity of the Policy'
echo '# It might make other configs have validations. Consider them, but they are not the target of this test'
echo '# Validation file: business/checkers/policies/namespace_mtls_checker.go'
echo '#######################################################################################################'
step 'scenario7.yaml'
step 'scenario8.yaml'
step 'scenario9.yaml'
|
function isValidUrl(url) {
// Create an http request to validate the given URL
let http = new XMLHttpRequest();
http.open('GET', url, false);
try {
http.send();
if (http.status === 200) {
return true;
} else {
return false;
}
} catch (error) {
return false;
}
} |
<filename>leetcode/math/incremental-memory-leak.py
"""
## Questions
### 1860. [Incremental Memory Leak](https://leetcode.com/problems/incremental-memory-leak/)
You are given two integers memory1 and memory2 representing the available memory in bits on two memory sticks. There is
currently a faulty program running that consumes an increasing amount of memory every second.
At the ith second (starting from 1), i bits of memory are allocated to the stick with more available memory (or from
the first memory stick if both have the same available memory). If neither stick has at least i bits of available
memory, the program crashes.
Return an array containing [crashTime, memory1crash, memory2crash], where crashTime is the time (in seconds) when the
program crashed and memory1crash and memory2crash are the available bits of memory in the first and second sticks
respectively.
Example 1:
Input: memory1 = 2, memory2 = 2
Output: [3,1,0]
Explanation: The memory is allocated as follows:
- At the 1st second, 1 bit of memory is allocated to stick 1. The first stick now has 1 bit of available memory.
- At the 2nd second, 2 bits of memory are allocated to stick 2. The second stick now has 0 bits of available memory.
- At the 3rd second, the program crashes. The sticks have 1 and 0 bits available respectively.
Example 2:
Input: memory1 = 8, memory2 = 11
Output: [6,0,4]
Explanation: The memory is allocated as follows:
- At the 1st second, 1 bit of memory is allocated to stick 2. The second stick now has 10 bit of available memory.
- At the 2nd second, 2 bits of memory are allocated to stick 2. The second stick now has 8 bits of available memory.
- At the 3rd second, 3 bits of memory are allocated to stick 1. The first stick now has 5 bits of available memory.
- At the 4th second, 4 bits of memory are allocated to stick 2. The second stick now has 4 bits of available memory.
- At the 5th second, 5 bits of memory are allocated to stick 1. The first stick now has 0 bits of available memory.
- At the 6th second, the program crashes. The sticks have 0 and 4 bits available respectively.
Constraints:
0 <= memory1, memory2 <= 231 - 1
"""
# Solutions
class Solution:
def memLeak(self, memory1: int, memory2: int) -> List[int]:
i = 1
while max(memory1, memory2) >= i:
if memory1 >= memory2:
memory1 -= i
else:
memory2 -= i
i += 1
return [i, memory1, memory2]
# Runtime : 500 ms, faster than 100.00% of Python3 online submissions
# Memory Usage : 14 MB, less than 100.00% of Python3 online submissions
|
#!/bin/bash
# Deregisters (removes) AMI images used to create new instances.
# and docker again.
IMAGE_IDS_FILE="IMAGE_IDS.txt"
[[ -f ${IMAGE_IDS_FILE} ]] || exit 1
source ${IMAGE_IDS_FILE}
main(){
[[ -f ${IMAGE_IDS_FILE} ]] &&
source ${IMAGE_IDS_FILE} &&
[[ ! -z ${STOCKHOLM_TEMPLATE_IMAGE} ]] &&
[[ ! -z ${FRANKFURT_TEMPLATE_IMAGE} ]] &&
aws ec2 deregister-image --region "eu-north-1" --image-id ${STOCKHOLM_TEMPLATE_IMAGE} &&
aws ec2 deregister-image --region "eu-central-1" --image-id ${FRANKFURT_TEMPLATE_IMAGE} &&
# proceeed if there is no images on EC2 anymore
rm -f ${IMAGE_IDS_FILE} ||
interrupted
}
interrupted(){
local COLOR_YELLOW=$'\e[1;33m'
local RESET_COLOR=$'\e[0m'
echo -e "${COLOR_YELLOW}The removing of AMI images was not succesfull.${RESET_COLOR}"
exit 0
}
./checkAWS.sh && main || ./awsCliNa.sh
|
# frozen_string_literal: true
# Copyright (c) 2018 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the 'Software'), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
require 'openssl'
require 'time'
require_relative 'remotes'
require_relative 'type'
# The score.
# Author:: <NAME> (<EMAIL>)
# Copyright:: Copyright (c) 2018 <NAME>
# License:: MIT
module Zold
# Score
class Score < Dry::Struct
# Default strength for the entire system, in production mode.
STRENGTH = 6
attribute :time, Types::Strict::Time
attribute :host, Types::Strict::String.constrained(
format: /^[a-z0-9\.-]+$/
)
attribute :port, Types::Strict::Integer.constrained(gteq: 0, lt: 65_535)
attribute :invoice, Types::Strict::String.constrained(
format: /^[a-zA-Z0-9]{8,32}@[a-f0-9]{16}$/
)
attribute :suffixes, Types::Strict::Array.optional.default([])
attribute :strength, Types::Strict::Integer.optional.default(STRENGTH)
attribute :created, Types::Strict::Time.optional.default(Time.now)
# The default no-value score.
ZERO = Score.new(time: Time.now, host: 'localhost', port: 80, invoice: 'NOPREFIX@ffffffffffffffff')
def self.parse_json(json)
raise "Time in JSON is broken: #{json}" unless json['time'] =~ /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z$/
raise "Host is wrong: #{json}" unless json['host'] =~ /^[0-9a-z\.\-]+$/
raise "Port is wrong: #{json}" unless json['port'].is_a?(Integer)
raise "Invoice is wrong: #{json}" unless json['invoice'] =~ /^[a-zA-Z0-9]{8,32}@[a-f0-9]{16}$/
raise "Suffixes not array: #{json}" unless json['suffixes'].is_a?(Array)
Score.new(
time: Time.parse(json['time']), host: json['host'],
port: json['port'], invoice: json['invoice'], suffixes: json['suffixes'],
strength: json['strength']
)
end
def self.parse(text)
re = Regexp.new(
'^' + [
'([0-9]+)/(?<strength>[0-9]+):',
' (?<time>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)',
' (?<host>[0-9a-z\.\-]+)',
' (?<port>[0-9]+)',
' (?<invoice>[a-zA-Z0-9]{8,32}@[a-f0-9]{16})',
'(?<suffixes>( [a-zA-Z0-9]+)*)'
].join + '$'
)
m = re.match(text.strip)
raise "Invalid score '#{text}', doesn't match: #{re}" if m.nil?
Score.new(
time: Time.parse(m[:time]), host: m[:host],
port: m[:port].to_i, invoice: m[:invoice],
suffixes: m[:suffixes].split(' '),
strength: m[:strength].to_i
)
end
def self.parse_text(text)
parts = text.split(' ', 7)
Score.new(
time: Time.at(parts[1].hex),
host: parts[2],
port: parts[3].hex,
invoice: "#{parts[4]}@#{parts[5]}",
suffixes: parts[6] ? parts[6].split(' ') : [],
strength: parts[0].to_i
)
end
def hash
raise 'Score has zero value, there is no hash' if suffixes.empty?
suffixes.reduce(prefix) do |pfx, suffix|
OpenSSL::Digest::SHA256.new("#{pfx} #{suffix}").hexdigest
end
end
def to_mnemo
"#{value}:#{time.strftime('%H%M')}"
end
def to_text
pfx, bnf = invoice.split('@')
[
strength,
time.to_i.to_s(16),
host,
port.to_s(16),
pfx,
bnf,
suffixes.join(' ')
].join(' ')
end
def to_s
[
"#{value}/#{strength}:",
time.utc.iso8601,
host,
port,
invoice,
suffixes.join(' ')
].join(' ').strip
end
def to_h
{
value: value,
host: host,
port: port,
invoice: invoice,
time: time.utc.iso8601,
suffixes: suffixes,
strength: strength,
hash: value.zero? ? nil : hash,
expired: expired?,
valid: valid?,
age: (age / 60).round,
created: created.utc.iso8601
}
end
def reduced(max = 4)
Score.new(
time: time, host: host, port: port, invoice: invoice,
suffixes: suffixes[0..[max, suffixes.count].min - 1], strength: strength, created: nil
)
end
def next
raise 'This score is not valid' unless valid?
idx = 0
loop do
suffix = idx.to_s(16)
score = Score.new(
time: time, host: host, port: port, invoice: invoice, suffixes: suffixes + [suffix],
strength: strength
)
return score if score.valid?
if score.expired?
return Score.new(
time: Time.now, host: host, port: port, invoice: invoice,
suffixes: [], strength: strength
)
end
idx += 1
end
end
def age
Time.now - time
end
def expired?(hours = 24)
age > hours * 60 * 60
end
def prefix
"#{time.utc.iso8601} #{host} #{port} #{invoice}"
end
def valid?
suffixes.empty? || hash.end_with?('0' * strength)
end
def value
suffixes.length
end
def zero?
equal?(Score::ZERO)
end
end
end
|
<filename>server/src/main/java/br/com/guestcontrol/server/checkin/CheckinValueCalculatorService.java
package br.com.guestcontrol.server.checkin;
import java.math.BigDecimal;
/**
* Created by erivelto on 27/05/2021
*/
public interface CheckinValueCalculatorService {
BigDecimal calculateTotalValue(Checkin checkin);
}
|
class InventoryManager:
def __init__(self):
self.inventory = {}
def add_item(self, item_name, quantity):
self.inventory[item_name] = quantity
def remove_item(self, item_name):
if item_name in self.inventory:
del self.inventory[item_name]
else:
raise KeyError(f"{item_name} does not exist in the inventory")
def update_quantity(self, item_name, quantity):
if item_name in self.inventory:
self.inventory[item_name] = quantity
else:
raise KeyError(f"{item_name} does not exist in the inventory")
def get_quantity(self, item_name):
return self.inventory.get(item_name, 0)
# Unit tests using pytest
def test_inventory_manager():
manager = InventoryManager()
manager.add_item('apple', 10)
assert manager.get_quantity('apple') == 10
manager.add_item('apple', 5)
assert manager.get_quantity('apple') == 5
manager.remove_item('apple')
assert manager.get_quantity('apple') == 0
with pytest.raises(KeyError):
manager.remove_item('banana')
with pytest.raises(KeyError):
manager.update_quantity('banana', 20) |
<reponame>richardmarston/cim4j
package cim4j;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import cim4j.Equipment;
import java.lang.ArrayIndexOutOfBoundsException;
import java.lang.IllegalArgumentException;
import cim4j.DCTerminal;
/*
The parts of the DC power system that are designed to carry current or that are conductively connected through DC terminals.
*/
public class DCConductingEquipment extends Equipment
{
private BaseClass[] DCConductingEquipment_class_attributes;
private BaseClass[] DCConductingEquipment_primitive_attributes;
private java.lang.String rdfid;
public void setRdfid(java.lang.String id) {
rdfid = id;
}
private abstract interface PrimitiveBuilder {
public abstract BaseClass construct(java.lang.String value);
};
private enum DCConductingEquipment_primitive_builder implements PrimitiveBuilder {
LAST_ENUM() {
public BaseClass construct (java.lang.String value) {
return new cim4j.Integer("0");
}
};
}
private enum DCConductingEquipment_class_attributes_enum {
DCTerminals,
LAST_ENUM;
}
public DCConductingEquipment() {
DCConductingEquipment_primitive_attributes = new BaseClass[DCConductingEquipment_primitive_builder.values().length];
DCConductingEquipment_class_attributes = new BaseClass[DCConductingEquipment_class_attributes_enum.values().length];
}
public void updateAttributeInArray(DCConductingEquipment_class_attributes_enum attrEnum, BaseClass value) {
try {
DCConductingEquipment_class_attributes[attrEnum.ordinal()] = value;
}
catch (ArrayIndexOutOfBoundsException aoobe) {
System.out.println("No such attribute: " + attrEnum.name() + ": " + aoobe.getMessage());
}
}
public void updateAttributeInArray(DCConductingEquipment_primitive_builder attrEnum, BaseClass value) {
try {
DCConductingEquipment_primitive_attributes[attrEnum.ordinal()] = value;
}
catch (ArrayIndexOutOfBoundsException aoobe) {
System.out.println("No such attribute: " + attrEnum.name() + ": " + aoobe.getMessage());
}
}
public void setAttribute(java.lang.String attrName, BaseClass value) {
try {
DCConductingEquipment_class_attributes_enum attrEnum = DCConductingEquipment_class_attributes_enum.valueOf(attrName);
updateAttributeInArray(attrEnum, value);
System.out.println("Updated DCConductingEquipment, setting " + attrName);
}
catch (IllegalArgumentException iae)
{
super.setAttribute(attrName, value);
}
}
/* If the attribute is a String, it is a primitive and we will make it into a BaseClass */
public void setAttribute(java.lang.String attrName, java.lang.String value) {
try {
DCConductingEquipment_primitive_builder attrEnum = DCConductingEquipment_primitive_builder.valueOf(attrName);
updateAttributeInArray(attrEnum, attrEnum.construct(value));
System.out.println("Updated DCConductingEquipment, setting " + attrName + " to: " + value);
}
catch (IllegalArgumentException iae)
{
super.setAttribute(attrName, value);
}
}
public java.lang.String toString(boolean topClass) {
java.lang.String result = "";
java.lang.String indent = "";
if (topClass) {
for (DCConductingEquipment_primitive_builder attrEnum: DCConductingEquipment_primitive_builder.values()) {
BaseClass bc = DCConductingEquipment_primitive_attributes[attrEnum.ordinal()];
if (bc != null) {
result += " DCConductingEquipment." + attrEnum.name() + "(" + bc.debugString() + ")" + " " + bc.toString(false) + System.lineSeparator();
}
}
for (DCConductingEquipment_class_attributes_enum attrEnum: DCConductingEquipment_class_attributes_enum.values()) {
BaseClass bc = DCConductingEquipment_class_attributes[attrEnum.ordinal()];
if (bc != null) {
result += " DCConductingEquipment." + attrEnum.name() + "(" + bc.debugString() + ")" + " " + bc.toString(false) + System.lineSeparator();
}
}
result += super.toString(true);
}
else {
result += "(DCConductingEquipment) RDFID: " + rdfid;
}
return result;
}
public final java.lang.String debugName = "DCConductingEquipment";
public java.lang.String debugString()
{
return debugName;
}
public void setValue(java.lang.String s) {
System.out.println(debugString() + " is not sure what to do with " + s);
}
public BaseClass construct() {
return new DCConductingEquipment();
}
};
|
<reponame>leticiabernardo/SocialNetworkAutomatization
import base64
from random import randint
def clean_password(password):
password = base64.b64decode(password)
return str(password, 'utf-8')
def happy_birthday_message(make_txt):
dict_birthday = make_txt.read_data("dictionary/happy_birthday")
random_array_number = randint(0, len(dict_birthday) - 1)
return dict_birthday[random_array_number]
def string_to_array(text_string):
return text_string.split(", ")
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-3112-1
#
# Security announcement date: 2016-10-27 00:00:00 UTC
# Script generation date: 2017-01-27 21:06:51 UTC
#
# Operating System: Ubuntu 16.04 LTS
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - thunderbird:1:45.4.0+build1-0ubuntu0.16.04.1
#
# Last versions recommanded by security team:
# - thunderbird:1:45.7.0+build1-0ubuntu0.16.04.1
#
# CVE List:
# - CVE-2016-5250
# - CVE-2016-5257
# - CVE-2016-5270
# - CVE-2016-5272
# - CVE-2016-5274
# - CVE-2016-5276
# - CVE-2016-5277
# - CVE-2016-5278
# - CVE-2016-5280
# - CVE-2016-5281
# - CVE-2016-5284
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade thunderbird=1:45.7.0+build1-0ubuntu0.16.04.1 -y
|
import { combineReducers } from 'redux-immutable';
import { connectRouter } from 'connected-react-router/immutable';
import home from './containers/Home/reducer';
const rootReducer = history => combineReducers({
router: connectRouter(history),
home,
});
export default rootReducer;
|
#!/bin/sh
sudo apt-get install -y build-essential tcl-dev tk-dev
mkdir -p deps
cd deps
git clone https://github.com/RTimothyEdwards/netgen.git
cd netgen
# TODO: our tests don't pass with newer versions
git checkout 1.5.192
./configure
make
sudo make install
|
<reponame>micnews/funnel-stream
var tape = require('tape')
var concat = require('concat-stream')
var funnel = require('../')
tape('simple', function (t) {
var f = funnel()
var input = f.createInput()
input.write('hello\n')
input.write('world\n')
f.createOutput()
.on('data', console.log)
.pipe(concat(function (data) {
t.equal(data, 'hello\nworld\n')
t.end()
}))
f.end()
})
tape('two-streams', function (t) {
var f = funnel()
var input1 = f.createInput()
var input2 = f.createInput()
input1.write('hello\n')
input2.write('world\n')
f.createOutput()
.on('data', console.log)
.pipe(concat(function (data) {
t.equal(data, 'hello\nworld\n')
t.end()
}))
f.end()
})
|
import discord
from sqlite3 import Connection
async def update_channel_and_notify(guildID: str, channelID: str, ctx: discord.Context, conn: Connection):
cur = conn.cursor()
cur.execute("UPDATE main SET channel_id = ? WHERE guild_id = ?", (channelID, guildID))
conn.commit()
cbchannel = ctx.guild.get_channel(int(channelID))
await ctx.send(f":tada: Channel has been updated to {cbchannel.mention}!") |
from typing import Tuple
class GameState:
def __init__(self, player_position: Tuple[int, int], enemy_position: Tuple[int, int], health: int):
self.player_position = player_position
self.enemy_position = enemy_position
self.health = health
def reinforcement_learning(current_state: GameState) -> Tuple[float, bool]:
# Example reinforcement learning algorithm
reward = 0.0
should_reset = False
# Example logic: if player's health is zero, reset the game
if current_state.health == 0:
reward = -1.0
should_reset = True
else:
# Example logic: if player is near the enemy, give a negative reward
if abs(current_state.player_position[0] - current_state.enemy_position[0]) <= 1 and abs(current_state.player_position[1] - current_state.enemy_position[1]) <= 1:
reward = -0.5
else:
# Example logic: if player is far from the enemy, give a positive reward
reward = 0.1
return reward, should_reset |
<reponame>pratixashah/MyPython<gh_stars>0
import os
import csv
# To get data file
load_file = os.path.join(".", "Resources","election_data.csv")
# To set the path for new output file
output_file = os.path.join(".", "Analysis", "PyPoll_Analysis.txt")
# reset variable to 0
vote_count = 0
# To store Candidate list with their vote count resp. in dictionary
dict_candidate_list ={}
# To open file to read
with open(load_file) as data_file:
csvreader = csv.reader(data_file, delimiter=',')
# To get Header
header = next(csvreader)
# To read a file
for row in csvreader:
# To get Total vote count
vote_count += 1
# If candidate is not in dictionary then add candidate to it
if(row[2] not in dict_candidate_list):
dict_candidate_list[row[2]] = 0
# To change vote count to +1 for the same candidate resp. in dictionary
dict_candidate_list[row[2]] += 1
# To get Winner who has max no. of votes
max_key = max(dict_candidate_list, key=dict_candidate_list.get)
# To concate output string with all result
output = (
"\nElection Results"+
"\n----------------------------" +
"\nTotal Votes: "+ str(vote_count) +
f"\n----------------------------\n"
)
# To concate all candidates with number of votes and percentage resp.
for candidate in dict_candidate_list:
output += f"{candidate}: {(dict_candidate_list[candidate]/vote_count)*100:.3f}% ({dict_candidate_list[candidate]})\n"
output += "----------------------------\n"
# To concate Winner
output += "Winner: " + str(max_key)
output += "\n----------------------------"
# To print Analysis
print(output)
# To write result into file
with open(output_file, "w", newline='') as analysis_file:
analysis_file.write(output)
|
<gh_stars>0
module.exports = function (context) {
const selectorSelector =
'Property[key.type="Identifier"]' + '[key.name="selectors"]';
console.log(context);
return {
[`${selectorSelector} > Literal`]: function (node) {
context.report({
node,
message: "should be array",
fix: function* (fixer) {
yield fixer.insertTextBefore(node, "[");
yield fixer.insertTextAfter(node, "]");
},
});
},
[`${selectorSelector} > ArrayExpression > ArrayExpression`]: function (
node
) {
context.report({
node,
message: "put items in top-level",
fix: function* (fixer) {
const { start, end } = node;
yield fixer.replaceTextRange([start, start + 1], "");
yield fixer.replaceTextRange([end - 1, end], "");
},
});
},
[`${selectorSelector} > ArrayExpression > Literal`]: function (node) {
const { value } = node;
if (value.indexOf(",") < 0) {
return;
}
context.report({
node,
message: "split selector into array",
fix: function* (fixer) {
const result = value.match(/\s*,\s*/);
const [match] = result;
const start = node.start + result.index + 1;
const end = start + match.length;
const quote = context.getSourceCode().getText(node)[0];
yield fixer.replaceTextRange([start, end], `${quote},${quote}`);
},
});
},
};
};
|
import random
# Function to simulate coin toss
def coin_toss(num_tosses):
head_count = 0
tail_count = 0
for _ in range(num_tosses):
random_choice = random.choice([1, 0])
if random_choice == 1:
head_count += 1
print("Heads")
else:
tail_count += 1
print("Tails")
return head_count
# Driver code
num_tosses = int(input("Enter the number of coin tosses: "))
head_count = coin_toss(num_tosses)
print("Number of heads:", head_count) |
from typing import List
class InvestmentOpportunity:
def __init__(self, ville, etat_d_avancement):
self.ville = ville
self.etat_d_avancement = etat_d_avancement
def filter_investment_opportunities(query_params: dict, queryset: List[InvestmentOpportunity]) -> List[InvestmentOpportunity]:
ville = query_params.get("ville")
etat_d_avancement = query_params.get("etat_d_avancement")
relation = query_params.get("relation", "AND")
filtered_opportunities = []
for opportunity in queryset:
if (ville is None or opportunity.ville == ville) and (etat_d_avancement is None or opportunity.etat_d_avancement == etat_d_avancement):
filtered_opportunities.append(opportunity)
if relation == "OR":
return filtered_opportunities
# Default to "AND" relation
final_filtered_opportunities = []
for opportunity in filtered_opportunities:
if opportunity in queryset:
final_filtered_opportunities.append(opportunity)
return final_filtered_opportunities |
import net.runelite.mapping.Export;
import net.runelite.mapping.ObfuscatedGetter;
import net.runelite.mapping.ObfuscatedName;
import net.runelite.mapping.ObfuscatedSignature;
@ObfuscatedName("hg")
public class class225 {
@ObfuscatedName("ar")
@ObfuscatedGetter(
intValue = 896523557
)
static int field2777;
@ObfuscatedName("f")
@ObfuscatedSignature(
descriptor = "(II)Lkw;",
garbageValue = "57239353"
)
@Export("SequenceDefinition_get")
public static SequenceDefinition SequenceDefinition_get(int var0) {
SequenceDefinition var1 = (SequenceDefinition)SequenceDefinition.SequenceDefinition_cached.get((long)var0); // L: 35
if (var1 != null) { // L: 36
return var1;
} else {
byte[] var2 = SequenceDefinition.SequenceDefinition_archive.takeFile(12, var0); // L: 37
var1 = new SequenceDefinition(); // L: 38
if (var2 != null) { // L: 39
var1.decode(new Buffer(var2));
}
var1.postDecode(); // L: 40
SequenceDefinition.SequenceDefinition_cached.put(var1, (long)var0); // L: 41
return var1; // L: 42
}
}
@ObfuscatedName("he")
@ObfuscatedSignature(
descriptor = "(III)V",
garbageValue = "2045310244"
)
@Export("updateItemPile")
static final void updateItemPile(int var0, int var1) {
NodeDeque var2 = Client.groundItems[class26.Client_plane][var0][var1]; // L: 7104
if (var2 == null) { // L: 7105
WorldMapArea.scene.removeGroundItemPile(class26.Client_plane, var0, var1); // L: 7106
} else {
long var3 = -99999999L; // L: 7109
TileItem var5 = null; // L: 7110
TileItem var6;
for (var6 = (TileItem)var2.last(); var6 != null; var6 = (TileItem)var2.previous()) { // L: 7111 7112 7120
ItemComposition var7 = class23.ItemDefinition_get(var6.id); // L: 7113
long var11 = (long)var7.price; // L: 7114
if (var7.isStackable == 1) { // L: 7115
var11 *= (long)(var6.quantity + 1);
}
if (var11 > var3) { // L: 7116
var3 = var11; // L: 7117
var5 = var6; // L: 7118
}
}
if (var5 == null) { // L: 7122
WorldMapArea.scene.removeGroundItemPile(class26.Client_plane, var0, var1); // L: 7123
} else {
var2.addLast(var5); // L: 7126
TileItem var13 = null; // L: 7127
TileItem var8 = null; // L: 7128
for (var6 = (TileItem)var2.last(); var6 != null; var6 = (TileItem)var2.previous()) { // L: 7129 7130 7135
if (var6.id != var5.id) { // L: 7131
if (var13 == null) { // L: 7132
var13 = var6;
}
if (var13.id != var6.id && var8 == null) { // L: 7133
var8 = var6;
}
}
}
long var9 = AttackOption.calculateTag(var0, var1, 3, false, 0); // L: 7137
WorldMapArea.scene.newGroundItemPile(class26.Client_plane, var0, var1, UserComparator7.getTileHeight(var0 * 128 + 64, var1 * 128 + 64, class26.Client_plane), var5, var9, var13, var8); // L: 7138
}
}
} // L: 7107 7124 7139
}
|
/*
* File: main.h
* Author: sirva
*
* Created on January 1, 2020, 7:04 PM
*/
#ifndef MAIN_H
#define MAIN_H
#ifdef __cplusplus
extern "C" {
#endif
void beforeSend(void);
void afterSend(void);
void sendVersion(unsigned int versionCount, unsigned int payload[]);
void sendDips(unsigned int dipCount, unsigned int payload[]);
void sendMatrix(unsigned int nibbleCount, unsigned int strobe, unsigned int retrn, bool state, unsigned int payload[]);
void setStrobe(unsigned int strobe, bool state);
bool getReturn(unsigned int retrn);
unsigned int setNibble(bool d0, bool d1, bool d2, bool d3);
bool getParity(unsigned int n);
#ifdef __cplusplus
}
#endif
#endif /* MAIN_H */
|
exports.up = function(knex) {
return knex.schema.createTable('texto',function(table){
table.increments();
table.string('conteudo').notNullable();
table.string('titulo').notNullable();
table.string('dificuldade').notNullable();
})
};
exports.down = function(knex) {
return knex.schema.dropTable('texto');
};
|
cd `dirname $0`
./spoon.sh
exit |
<reponame>liziqiang9/ArtText<filename>src/renders/artRender/tool/codeBlockTool.ts
export default function createCodeBlockTool(root: HTMLElement, lang: string = ''): void {
root.setAttribute("class", "art-meta art-shield art-codeBlockTool");
root.setAttribute("style", "visibility:hidden");
root.setAttribute("contenteditable", "false");
let prompt = document.createElement('span');
prompt.style.letterSpacing = '4px';
prompt.style.color = '#aaa';
prompt.style.position = 'relative';
prompt.style.top = '3px';
prompt.innerHTML = '```'
let langInput = document.createElement('input');
langInput.setAttribute('style',
'font-size:14px;letter-spacing:1px;font-weight: 600;padding: 1px 2px;border: none;outline: none;color: #1abc9c;display: flex;flex-direction: column;flex: 1 1 0%;')
langInput.title = '设置代码语言';
langInput.value = lang;
langInput.onchange = changCodeLang;
root.appendChild(prompt);
root.appendChild(langInput);
}
function changCodeLang() {
let preDom = (<HTMLInputElement>this).parentNode.nextSibling;
(<HTMLElement>preDom.childNodes[0]).className = 'lang-' + this.value;
} |
from django.contrib.auth.models import Group
from usuarios.models import Usuario
from django.http import JsonResponse
# Validar se o usuário está cadastrado.
def validate_user(request):
user = request.GET.get('username', None)
data = {
'is_user': Usuario.objects.filter(email__iexact=user).exists(),
}
if not data['is_user']:
data['error_message'] = 'Este e-mail não está cadastrado!'
return JsonResponse(data)
def validate_email(request):
email = request.GET.get('email', None)
data = {
'is_email': Usuario.objects.filter(email__iexact=email).exists(),
}
if data['is_email']:
data['error_message'] = 'Este e-mail já está cadastrado!'
return JsonResponse(data)
def validate_email_registered(request):
email = request.GET.get('email', None)
data = {
'is_email_registered': Usuario.objects.filter(email__iexact=email).exists(),
}
if not data['is_email_registered']:
data['error_message'] = 'Este e-mail não está cadastrado no sistema!'
return JsonResponse(data)
def RetornaGrupo(request):
objUsuario = request.user
try:
objGrupo = objUsuario.groups.get(pk=objUsuario.idGroup)
except Group.DoesNotExist:
listGroups = objUsuario.groups.all()
objGrupo = listGroups[0]
objUsuario.idGroup = objGrupo.id
objUsuario.save()
return objGrupo |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The Laplace distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
from tensorflow.contrib.distributions.python.ops import distribution
from tensorflow.contrib.framework.python.framework import tensor_util as contrib_tensor_util
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
class Laplace(distribution.Distribution):
"""The Laplace distribution with location and scale > 0 parameters.
#### Mathematical details
The PDF of this distribution is:
```f(x | mu, b, b > 0) = 0.5 / b exp(-|x - mu| / b)```
Note that the Laplace distribution can be thought of two exponential
distributions spliced together "back-to-back."
"""
def __init__(self,
loc,
scale,
validate_args=True,
allow_nan_stats=False,
name="Laplace"):
"""Construct Laplace distribution with parameters `loc` and `scale`.
The parameters `loc` and `scale` must be shaped in a way that supports
broadcasting (e.g., `loc / scale` is a valid operation).
Args:
loc: Floating point tensor which characterizes the location (center)
of the distribution.
scale: Positive floating point tensor which characterizes the spread of
the distribution.
validate_args: Whether to validate input with asserts. If `validate_args`
is `False`, and the inputs are invalid, correct behavior is not
guaranteed.
allow_nan_stats: Boolean, default `False`. If `False`, raise an
exception if a statistic (e.g. mean/mode/etc...) is undefined for any
batch member. If `True`, batch members with valid parameters leading to
undefined statistics will return NaN for this statistic.
name: The name to give Ops created by the initializer.
Raises:
TypeError: if `loc` and `scale` are of different dtype.
"""
self._allow_nan_stats = allow_nan_stats
self._validate_args = validate_args
with ops.name_scope(name, values=[loc, scale]):
loc = ops.convert_to_tensor(loc)
scale = ops.convert_to_tensor(scale)
with ops.control_dependencies([check_ops.assert_positive(scale)] if
validate_args else []):
self._name = name
self._loc = array_ops.identity(loc, name="loc")
self._scale = array_ops.identity(scale, name="scale")
self._batch_shape = common_shapes.broadcast_shape(
self._loc.get_shape(), self._scale.get_shape())
self._event_shape = tensor_shape.TensorShape([])
contrib_tensor_util.assert_same_float_dtype((loc, scale))
@property
def allow_nan_stats(self):
"""Boolean describing behavior when a stat is undefined for batch member."""
return self._allow_nan_stats
@property
def validate_args(self):
"""Boolean describing behavior on invalid input."""
return self._validate_args
@property
def name(self):
return self._name
@property
def dtype(self):
return self._loc.dtype
def batch_shape(self, name="batch_shape"):
"""Batch dimensions of this instance as a 1-D int32 `Tensor`.
The product of the dimensions of the `batch_shape` is the number of
independent distributions of this kind the instance represents.
Args:
name: name to give to the op.
Returns:
`Tensor` `batch_shape`
"""
with ops.name_scope(self.name):
with ops.name_scope(name):
return array_ops.shape(self._loc + self._scale)
def get_batch_shape(self):
"""`TensorShape` available at graph construction time.
Same meaning as `batch_shape`. May be only partially defined.
Returns:
batch shape
"""
return self._batch_shape
def event_shape(self, name="event_shape"):
"""Shape of a sample from a single distribution as a 1-D int32 `Tensor`.
Args:
name: name to give to the op.
Returns:
`Tensor` `event_shape`
"""
with ops.name_scope(self.name):
with ops.name_scope(name):
return constant_op.constant([], dtype=dtypes.int32)
def get_event_shape(self):
"""`TensorShape` available at graph construction time.
Same meaning as `event_shape`. May be only partially defined.
Returns:
event shape
"""
return self._event_shape
@property
def loc(self):
"""Distribution parameter for the location."""
return self._loc
@property
def scale(self):
"""Distribution parameter for scale."""
return self._scale
def mean(self, name="mean"):
"""Mean of this distribution."""
with ops.name_scope(self.name):
with ops.name_scope(name, values=[self._scale, self._loc]):
return self._loc + array_ops.zeros_like(self._scale)
def median(self, name="median"):
"""Median of this distribution."""
return self.mean(name="median")
def mode(self, name="mode"):
"""Mode of this distribution."""
return self.mean(name="mode")
def std(self, name="std"):
"""Standard deviation of this distribution."""
with ops.name_scope(self.name):
with ops.name_scope(name, values=[self._scale, self._loc]):
sqrt_2 = constant_op.constant(math.sqrt(2.), dtype=self.dtype)
return sqrt_2 * self._scale + array_ops.zeros_like(self._loc)
def variance(self, name="variance"):
"""Variance of this distribution."""
with ops.name_scope(self.name):
with ops.name_scope(name):
return math_ops.square(self.std())
def prob(self, x, name="pdf"):
"""The prob of observations in `x` under the Laplace distribution(s).
Args:
x: tensor of dtype `dtype`, must be broadcastable with `loc` and `scale`.
name: The name to give this op.
Returns:
pdf: tensor of dtype `dtype`, the pdf values of `x`.
"""
return 0.5 / self._scale * math_ops.exp(
-math_ops.abs(x - self._loc) / self._scale)
def log_prob(self, x, name="log_prob"):
"""Log prob of observations in `x` under these Laplace distribution(s).
Args:
x: tensor of dtype `dtype`, must be broadcastable with `loc` and `scale`.
name: The name to give this op.
Returns:
log_prob: tensor of dtype `dtype`, the log-probability of `x`.
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=[self._loc, self._scale, x]):
x = ops.convert_to_tensor(x)
if x.dtype != self.dtype:
raise TypeError("Input x dtype does not match dtype: %s vs. %s"
% (x.dtype, self.dtype))
log_2 = constant_op.constant(math.log(2.), dtype=self.dtype)
return (-log_2 - math_ops.log(self._scale) -
math_ops.abs(x - self._loc) / self._scale)
def cdf(self, x, name="cdf"):
"""CDF of observations in `x` under the Laplace distribution(s).
Args:
x: tensor of dtype `dtype`, must be broadcastable with `loc` and `scale`.
name: The name to give this op.
Returns:
cdf: tensor of dtype `dtype`, the CDFs of `x`.
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=[self._loc, self._scale, x]):
x = ops.convert_to_tensor(x)
if x.dtype != self.dtype:
raise TypeError("Input x dtype does not match dtype: %s vs. %s"
% (x.dtype, self.dtype))
y = x - self._loc
return 0.5 + 0.5 * math_ops.sign(y) * (
1. - math_ops.exp(-math_ops.abs(y) / self._scale))
def log_cdf(self, x, name="log_cdf"):
"""Log CDF of observations `x` under the Laplace distribution(s).
Args:
x: tensor of dtype `dtype`, must be broadcastable with `loc` and `scale`.
name: The name to give this op.
Returns:
log_cdf: tensor of dtype `dtype`, the log-CDFs of `x`.
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=[self._loc, self._scale, x]):
return math_ops.log(self.cdf(x))
def entropy(self, name="entropy"):
"""The entropy of Laplace distribution(s).
Args:
name: The name to give this op.
Returns:
entropy: tensor of dtype `dtype`, the entropy.
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=[self._loc, self._scale]):
log_2_e = constant_op.constant(math.log(2.) + 1., dtype=self.dtype)
# Use broadcasting rules to calculate the full broadcast scale.
scale = self._scale + array_ops.zeros_like(self._loc)
return log_2_e + math_ops.log(scale)
def sample_n(self, n, seed=None, name="sample_n"):
"""Sample `n` observations from the Laplace Distributions.
Args:
n: `Scalar` `Tensor` of type `int32` or `int64`, the number of
observations to sample.
seed: Python integer, the random seed.
name: The name to give this op.
Returns:
samples: `[n, ...]`, a `Tensor` of `n` samples for each
of the distributions determined by broadcasting the parameters.
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=[self._loc, self._scale, n]):
n = ops.convert_to_tensor(n, name="n")
n_val = tensor_util.constant_value(n)
shape = array_ops.concat(0, ([n], self.batch_shape()))
# Sample uniformly-at-random from the open-interval (-1, 1).
uniform_samples = random_ops.random_uniform(
shape=shape,
minval=np.nextafter(self.dtype.as_numpy_dtype(-1.),
self.dtype.as_numpy_dtype(0.)),
maxval=self.dtype.as_numpy_dtype(1.),
dtype=self.dtype,
seed=seed)
# Provide some hints to shape inference
inferred_shape = tensor_shape.vector(n_val).concatenate(
self.get_batch_shape())
uniform_samples.set_shape(inferred_shape)
return (self._loc - self._scale * math_ops.sign(uniform_samples) *
math_ops.log(1. - math_ops.abs(uniform_samples)))
@property
def is_reparameterized(self):
return True
@property
def is_continuous(self):
return True
|
<filename>public/jquery.steps/rollup.config.js
const babel = require('rollup-plugin-babel');
const pkg = require('./package');
const now = new Date();
module.exports = {
entry: 'src/Plugin.js',
targets: [{
dest: 'dist/jquery-steps.js',
}],
format: 'umd',
moduleName: 'Steps',
external: ['jquery'],
globals: {
jquery: '$',
},
plugins: [
babel({
exclude: 'node_modules/**',
presets: ['es2015-rollup'],
babelrc: false
})
],
sourceMap: true,
banner: `/*!
* Steps v${pkg.version}
* https://github.com/${pkg.repository}
*
* Copyright (c) ${now.getFullYear()} ${pkg.author}
* Released under the ${pkg.license} license
*/
`,
}; |
<filename>LYCompLayout/Classes/LYCompLayout.h
//
// LYCompLayout.h
// LYCOMPLAYOUT
//
// CREATED BY BLODELY ON 2017-02-14.
// COPYRIGHT (C) 2017 BLODELY. ALL RIGHTS RESERVED.
//
#ifndef LYCompLayout_h
#define LYCompLayout_h
#import <LYCompLayout/LYLayoutModel.h>
#import <LYCompLayout/LYLayoutConf.h>
#import <LYCompLayout/LYLayoutItem.h>
#endif /* LYCompLayout_h */
|
<filename>packages/fx-core/tests/plugins/resource/aad/unit/graph.test.ts
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
import "mocha";
import * as chai from "chai";
import * as sinon from "sinon";
import faker from "faker";
import axios from "axios";
import { GraphClient } from "../../../../../src/plugins/resource/aad/graph";
import { GraphClientErrorMessage } from "../../../../../src/plugins/resource/aad/errors";
describe("Graph API Test", () => {
afterEach(() => {
sinon.restore();
});
describe("createAADApp", () => {
it("Happy Path", async () => {
const graphToken = "graphToken";
const objectId = faker.datatype.uuid();
const displayName = "createAADApp";
const fakeAxiosInstance = axios.create();
sinon.stub(axios, "create").returns(fakeAxiosInstance);
sinon.stub(fakeAxiosInstance, "post").resolves({
data: {
id: objectId,
displayName: displayName,
},
});
const createResult = await GraphClient.createAADApp(graphToken, {
displayName: displayName,
});
chai.assert.equal(createResult.id, objectId);
chai.assert.equal(createResult.displayName, displayName);
});
it("Empty Response", async () => {
const graphToken = "graphToken";
const displayName = "createAADApp";
const fakeAxiosInstance = axios.create();
sinon.stub(axios, "create").returns(fakeAxiosInstance);
sinon.stub(fakeAxiosInstance, "post").resolves({});
try {
const createResult = await GraphClient.createAADApp(graphToken, {
displayName: displayName,
});
} catch (error) {
chai.assert.equal(
error.message,
`${GraphClientErrorMessage.CreateFailed}: ${GraphClientErrorMessage.EmptyResponse}.`
);
}
});
});
describe("updateAADApp", () => {
it("Happy Path", async () => {
const graphToken = "graphToken";
const objectId = faker.datatype.uuid();
const displayName = "updateAADApp";
const fakeAxiosInstance = axios.create();
sinon.stub(axios, "create").returns(fakeAxiosInstance);
sinon.stub(fakeAxiosInstance, "patch").resolves({
data: {
id: objectId,
displayName: displayName,
},
});
await GraphClient.updateAADApp(graphToken, objectId, {
displayName: displayName,
});
});
it("Empty Object Id", async () => {
const graphToken = "graphToken";
try {
const updateResult = await GraphClient.updateAADApp(graphToken, "", {});
} catch (error) {
chai.assert.equal(
error.message,
`${GraphClientErrorMessage.UpdateFailed}: ${GraphClientErrorMessage.AppObjectIdIsNull}.`
);
}
});
});
describe("createAadAppSecret", () => {
it("Happy Path", async () => {
const graphToken = "graphToken";
const objectId = faker.datatype.uuid();
const secret = {
data: {
hint: "hint",
keyId: faker.datatype.uuid(),
endDateTime: "endDate",
startDateTime: "startDate",
secretText: "secret",
},
};
const fakeAxiosInstance = axios.create();
sinon.stub(axios, "create").returns(fakeAxiosInstance);
sinon.stub(fakeAxiosInstance, "post").returns(Promise.resolve(secret));
const createSecretResult = await GraphClient.createAadAppSecret(graphToken, objectId);
chai.assert.equal(createSecretResult.value, secret.data.secretText);
chai.assert.equal(createSecretResult.id, secret.data.keyId);
});
it("Empty Response", async () => {
const graphToken = "graphToken";
const objectId = faker.datatype.uuid();
const fakeAxiosInstance = axios.create();
sinon.stub(axios, "create").returns(fakeAxiosInstance);
sinon.stub(fakeAxiosInstance, "post").resolves({});
try {
const createSecretResult = await GraphClient.createAadAppSecret(graphToken, objectId);
} catch (error) {
chai.assert.equal(
error.message,
`${GraphClientErrorMessage.CreateSecretFailed}: ${GraphClientErrorMessage.EmptyResponse}.`
);
}
});
it("Empty ObjectId", async () => {
const graphToken = "graphToken";
try {
const createSecretResult = await GraphClient.createAadAppSecret(graphToken, "");
} catch (error) {
chai.assert.equal(
error.message,
`${GraphClientErrorMessage.CreateSecretFailed}: ${GraphClientErrorMessage.AppObjectIdIsNull}.`
);
}
});
});
describe("getAadApp", () => {
it("Happy Path", async () => {
const graphToken = "graphToken";
const objectId = faker.datatype.uuid();
const displayName = "getAadApp";
const fakeAxiosInstance = axios.create();
sinon.stub(axios, "create").returns(fakeAxiosInstance);
sinon.stub(fakeAxiosInstance, "get").resolves({
data: {
id: objectId,
displayName: displayName,
},
});
const getResult = await GraphClient.getAadApp(graphToken, objectId);
chai.assert.equal(getResult.id, objectId);
chai.assert.equal(getResult.displayName, displayName);
});
it("Empty Response", async () => {
const graphToken = "graphToken";
const objectId = faker.datatype.uuid();
const fakeAxiosInstance = axios.create();
sinon.stub(axios, "create").returns(fakeAxiosInstance);
sinon.stub(fakeAxiosInstance, "get").resolves({});
try {
const getResult = await GraphClient.getAadApp(graphToken, objectId);
} catch (error) {
chai.assert.equal(
error.message,
`${GraphClientErrorMessage.GetFailed}: ${GraphClientErrorMessage.EmptyResponse}.`
);
}
});
it("Empty ObjectId", async () => {
const graphToken = "graphToken";
try {
const getResult = await GraphClient.getAadApp(graphToken, "");
} catch (error) {
chai.assert.equal(
error.message,
`${GraphClientErrorMessage.GetFailed}: ${GraphClientErrorMessage.AppObjectIdIsNull}.`
);
}
});
});
});
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = ['nchw', 'nhwc', 'normalize_to_uint8', 'normalize_to_unit_float', 'to_png']
import io
from typing import Union
import numpy as np
from PIL import Image
from objax.typing import JaxArray
def nchw(x: Union[np.ndarray, JaxArray]) -> Union[np.ndarray, JaxArray]:
"""Converts an array in (N,H,W,C) format to (N,C,H,W) format."""
dims = list(range(x.ndim))
dims.insert(-2, dims.pop())
return x.transpose(dims)
def nhwc(x: Union[np.ndarray, JaxArray]) -> Union[np.ndarray, JaxArray]:
"""Converts an array in (N,C,H,W) format to (N,H,W,C) format."""
dims = list(range(x.ndim))
dims.append(dims.pop(-3))
return x.transpose(dims)
def normalize_to_uint8(x: Union[np.ndarray, JaxArray]) -> Union[np.ndarray, JaxArray]:
"""Map a float image in [1/256-1, 1-1/256] to uint8 {0, 1, ..., 255}."""
return (128 * (x + (1 - 1 / 256))).clip(0, 255).round().astype('uint8')
def normalize_to_unit_float(x: Union[np.ndarray, JaxArray]) -> Union[np.ndarray, JaxArray]:
"""Map an uint8 image in {0, 1, ..., 255} to float interval [1/256-1, 1-1/256]."""
return x * (1 / 128) + (1 / 256 - 1)
def to_png(x: np.ndarray) -> bytes:
"""Converts numpy array in (C,H,W) format into PNG format."""
if x.dtype in (np.float64, np.float32, np.float16):
x = np.transpose((x + 1) * 127.5, [1, 2, 0]).clip(0, 255).round().astype('uint8')
elif x.dtype != np.uint8:
raise ValueError('Unsupported array type, expecting float or uint8', x.dtype)
if x.shape[2] == 1:
x = np.broadcast_to(x, x.shape[:2] + (3,))
with io.BytesIO() as f:
Image.fromarray(x).save(f, 'png')
return f.getvalue()
|
package edu.washington.cse.instrumentation.analysis.rectree;
import edu.washington.cse.instrumentation.analysis.list.PrimeStorage;
public class PrimingNode extends Node {
public final PrimeStorage ps;
public PrimingNode(final PrimeStorage ps) {
this.ps = ps;
}
@Override
public NodeKind getKind() {
return NodeKind.IMMEDIATE_PRIME;
}
@Override
public Node joinWith(final Node n) {
if(n.getKind() == NodeKind.PARAMETER) {
return this;
} else if(n.getKind() == NodeKind.CALLSITE || n.getKind() == NodeKind.COMPRESSED_CALLSITE) {
return n.joinWith(this);
} else if(n.getKind() != NodeKind.IMMEDIATE_PRIME) {
return null;
} else {
return new PrimingNode(ps.join(((PrimingNode)n).ps));
}
}
@Override
public String label() {
return "P";
}
@Override
public boolean equal(final Node root) {
if(root.getKind() != NodeKind.IMMEDIATE_PRIME) {
return false;
}
return this.ps.equals(((PrimingNode)root).ps);
}
@Override
public void walk(final TreeVisitor v) {
v.visitPrime(this);
}
@Override
public Node subst(final Node child) {
return child.prime(ps);
}
@Override
public Node prime(final PrimeStorage ps) {
return new PrimingNode(this.ps.combine(ps));
}
@Override
protected boolean computeContainsAbstraction() {
return true;
}
@Override
public void toString(final StringBuilder sb) {
sb.append(label());
sb.append(ps.getPrimingString());
}
@Override
public void toConcreteSyntax(final StringBuilder sb) {
sb.append(label());
sb.append(ps.getPrimingString());
}
@Override
public void visit(final TreeVisitor v) {
v.visitPrime(this);
}
}
|
package com.littlejenny.gulimall.product.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.littlejenny.common.utils.PageUtils;
import com.littlejenny.gulimall.product.entity.SpuInfoEntity;
import com.littlejenny.gulimall.product.vo.addproduct.SpuVO;
import java.util.Map;
/**
* spu信息
*
* @author littlejenny
* @email <EMAIL>
* @date 2021-07-16 15:11:54
*/
public interface SpuInfoService extends IService<SpuInfoEntity> {
PageUtils queryPage(Map<String, Object> params);
void saveDetail(SpuVO spuvo);
PageUtils queryByCidBidKeyStatus(Map<String, Object> params);
void upSpuById(Long spuId);
}
|
var AbstractJoiningTransformer;
if (typeof exports !== 'undefined') {
AbstractJoiningTransformer = require('./AbstractJoiningTransformer');
}
(function () {
var camelCase = /[a-z][A-Z]/g;
function _isElement (item) {
return item && typeof item === 'object' && item.nodeType === 1;
}
function _makeDatasetAttribute (n0) {
return n0.charAt(0) + '-' + n0.charAt(1).toLowerCase();
}
function StringJoiningTransformer (s, cfg) {
if (!(this instanceof StringJoiningTransformer)) {
return new StringJoiningTransformer(s, cfg);
}
this.setConfig(cfg); // Include this in any subclass of AbstractJoiningTransformer
this._str = s || '';
}
StringJoiningTransformer.prototype = new AbstractJoiningTransformer();
StringJoiningTransformer.prototype.append = function (s) {
// Todo: Could allow option to disallow elements within arrays, etc. (add states and state checking)
if (this.propOnlyState) {
this._obj[this._objPropTemp] = val;
this.propOnlyState = false;
this._objPropTemp = undefined;
}
else if (this._arrItemState) {
this._arr.push(s);
}
else if (this._objPropState) {
throw "Object values must be added via propValue() or after propOnly() when in an object state.";
}
else {
this._str += s;
}
return this;
};
StringJoiningTransformer.prototype.get = function () {
return this._str;
};
StringJoiningTransformer.prototype.propValue = function (prop, val) {
if (!this._objPropState) {
throw "propValue() can only be called after an object state has been set up.";
}
this._obj[prop] = val;
return this;
};
StringJoiningTransformer.prototype.propOnly = function (prop, cb) {
if (!this._objPropState) {
throw "propOnly() can only be called after an object state has been set up.";
}
if (this.propOnlyState) {
throw "propOnly() can only be called again after a value is set";
}
this.propOnlyState = true;
var oldPropTemp = this._objPropTemp;
this._objPropTemp = prop;
cb.call(this);
this._objPropTemp = oldPropTemp;
if (this.propOnlyState) {
throw "propOnly() must be followed up with setting a value.";
}
return this;
};
StringJoiningTransformer.prototype.object = function (obj, cb, usePropertySets, propSets) {
this._requireSameChildren('string', 'object');
var oldObjPropState = this._objPropState;
var oldObj = this._obj;
this._obj = obj || {};
if (_isElement(obj)) {
this._obj = JHTML.toJSONObject(this._obj);
}
// Todo: Allow in this and subsequent JSON methods ability to create jml-based JHTML
if (usePropertySets !== undef) {
usePropertySets.reduce(function (o, psName) {
return this._usePropertySets(o, psName); // Todo: Put in right scope
}.bind(this), {});
}
if (propSets !== undef) {
Object.assign(this._obj, propSets);
}
if (cb) {
this._objPropState = true;
cb.call(this);
this._objPropState = oldObjPropState;
}
if (oldObjPropState || this._arrItemState) { // Not ready to serialize yet as still inside another array or object
this.append(this._obj);
}
else if (this._cfg.JHTMLForJSON) {
this.append(JHTML.toJHTMLString(this._obj));
}
else if (this._cfg.mode !== 'JavaScript') {
// Allow this method to operate on non-finite numbers and functions
var stringifier = new Stringifier({mode: 'JavaScript'});
this.append(stringifier.walkJSONObject(this._obj));
}
else {
this.append(JSON.stringify(this._obj));
}
this._obj = oldObj;
return this;
};
StringJoiningTransformer.prototype.array = function (arr, cb) {
this._requireSameChildren('string', 'array');
var oldArr = this._arr;
// Todo: copy array?
this._arr = arr || [];
if (_isElement(arr)) {
this._arr = JHTML.toJSONObject(this._arr);
}
var oldArrItemState = this._arrItemState;
if (cb) {
var oldObjPropState = this._objPropState;
this._objPropState = false;
this._arrItemState = true;
cb.call(this);
this._arrItemState = oldArrItemState;
this._objPropState = oldObjPropState;
}
if (oldArrItemState || this._objPropState) { // Not ready to serialize yet as still inside another array or object
this.append(this._arr);
}
else if (this._cfg.JHTMLForJSON) {
this.append(JHTML.toJHTMLString(this._arr));
}
else if (this._cfg.mode !== 'JavaScript') {
// Allow this method to operate on non-finite numbers and functions
var stringifier = new Stringifier({mode: 'JavaScript'});
this.append(stringifier.walkJSONObject(this._obj));
}
else {
this.append(JSON.stringify(this._arr));
}
this._arr = oldArr;
return this;
};
StringJoiningTransformer.prototype.string = function (str, cb) {
if (_isElement(str)) {
str = JHTML.toJSONObject(str);
}
var tmpStr = '';
var _oldStrTemp = this._strTemp;
if (cb) {
this._strTemp = '';
cb.call(this);
tmpStr = this._strTemp;
this._strTemp = _oldStrTemp;
}
if (_oldStrTemp !== undefined) {
this._strTemp += str;
}
/*
// What was this for?
else if (this._cfg.mode !== 'JavaScript') {
// Allow this method to operate on non-finite numbers and functions
var stringifier = new Stringifier({mode: 'JavaScript'});
this.append(stringifier.walkJSONObject(this._obj));
}
*/
else {
this.append(tmpStr + str); // argument had been wrapped in JSON.stringify()
}
return this;
};
StringJoiningTransformer.prototype.number = function (num) {
if (_isElement(num)) {
num = JHTML.toJSONObject(num);
}
this.append(num.toString());
return this;
};
StringJoiningTransformer.prototype.boolean = function (bool) {
if (_isElement(bool)) {
bool = JHTML.toJSONObject(bool);
}
this.append(bool ? 'true' : 'false');
return this;
};
StringJoiningTransformer.prototype['null'] = function () {
this.append('null');
return this;
};
StringJoiningTransformer.prototype['undefined'] = function () {
if (this._cfg.mode !== 'JavaScript') {
throw 'undefined is not allowed unless added in JavaScript mode';
}
this.append('undefined');
return this;
};
StringJoiningTransformer.prototype.nonfiniteNumber = function (num) {
if (this._cfg.mode !== 'JavaScript') {
throw 'Non-finite numbers are not allowed unless added in JavaScript mode';
}
if (_isElement(num)) {
num = JHTML.toJSONObject(num);
}
this.append(num.toString());
return this;
};
StringJoiningTransformer.prototype['function'] = function (func) {
if (this._cfg.mode !== 'JavaScript') {
throw 'function is not allowed unless added in JavaScript mode';
}
if (_isElement(func)) {
func = JHTML.toJSONObject(func);
}
this.append(func.toString());
return this;
};
StringJoiningTransformer.prototype.element = function (elName, atts, childNodes, cb) {
if (Array.isArray(atts)) {
cb = childNodes;
childNodes = atts;
atts = {};
}
else if (typeof atts === 'function') {
cb = atts;
childNodes = undefined;
atts = {};
}
if (typeof childNodes === 'function') {
cb = childNodes;
childNodes = undefined;
}
// Todo: allow for cfg to produce Jamilih string output or hXML string output
var method = this._cfg.xmlElements ? 'toXML' : 'toHTML';
if (!cb) {
// Note that Jamilih currently has an issue with 'selected', 'checked',
// 'value', 'defaultValue', 'for', 'on*', 'style' (workaround: pass
// an empty callback as the last argument to element())
this.append(jml[method].call(jml, elName, atts, childNodes));
return this;
}
if (typeof elName === 'object') {
var objAtts = {};
Array.from(elName.attributes).forEach(function (att, i) {
objAtts[att.name] = att.value;
});
atts = Object.assign(objAtts, atts);
elName = elName.nodeName;
}
this.append('<' + elName);
var oldTagState = this._openTagState;
this._openTagState = true;
if (atts) {
Object.keys(atts).forEach(function (att) {
this.attribute(att, atts[att]);
}, this);
}
if (childNodes && childNodes.length) {
this._openTagState = false;
this.append(jml[method].call(jml, {'#': childNodes}));
}
cb.call(this);
// Todo: Depending on an this._cfg.xmlElements option, allow for
// XML self-closing when empty or as per the tag, HTML
// self-closing tags (or polyglot-friendly self-closing)
if (this._openTagState) {
this.append('>');
}
this.append('</' + elName + '>');
this._openTagState = oldTagState;
return this;
};
StringJoiningTransformer.prototype.attribute = function (name, val, avoidAttEscape) {
if (!this._openTagState) {
throw "An attribute cannot be added after an opening tag has been closed (name: " + name + "; value: " + val + ")";
}
if (!this._cfg.xmlElements) {
if (typeof val === 'object') {
switch (name) {
case 'dataset':
Object.keys(val).forEach(function (att) {
this.attribute('data-' + att.replace(camelCase, _makeDatasetAttribute), val[att]);
});
break;
case '$a': // Ordered attributes
val.forEach(function (attArr) {
this.attribute(attArr[0], attArr[1]);
});
break;
}
return this;
}
name = {className: 'class', htmlFor: 'for'}[name] || name;
}
val = (this._cfg.preEscapedAttributes || avoidAttEscape) ? val : val.replace(/&/g, '&').replace(/"/g, '"');
this.append(' ' + name + '="' + val + '"');
return this;
};
StringJoiningTransformer.prototype.text = function (txt) {
if (this._openTagState) {
this.append('>');
this._openTagState = false;
}
this.append(txt.replace(/&/g, '&').replace(/</g, '<')); // Escape gt if inside CDATA
return this;
};
/**
* Unlike text(), does not escape for HTML; unlike string(), does not perform JSON stringification;
* unlike append(), does not do other checks (but still varies in its role across transformers)
* @param {String} str
*/
StringJoiningTransformer.prototype.rawAppend = function (str) {
this._str += str;
return this;
};
StringJoiningTransformer.prototype.plainText = function (str) {
this._str += str;
return this;
};
// Todo: Implement comment(), processingInstruction(), etc.
if (typeof module === 'undefined') {
window.StringJoiningTransformer = StringJoiningTransformer;
}
else {
module.exports = StringJoiningTransformer;
}
}());
|
<filename>server/dev-server.js
var express = require('express');
var cors = require('cors');
function Server() {
var port = 8080;
var app = express();
app.use(cors());
app.use(express.static('.', { etag: true, extensions: ['css', 'js', 'png', 'jpg', 'html', 'json'] }));
// start the server
app.listen(port, function () {
console.log('Web server listening on port ' + port);
});
}
module.express = new Server(); |
function validateCityRewardInput($input): bool {
$maxLength = 50;
if (!empty($input) && strlen($input) <= $maxLength) {
return true;
}
return false;
} |
<gh_stars>0
package com.stylefeng.guns.api.film.vo;
import java.io.Serializable;
import lombok.Data;
/**
* Created by xianpeng.xia
* on 2019/9/15 9:38 下午
*/
@Data
public class CatVO implements Serializable {
private String catId;
private String catName;
private boolean isActive;
}
|
# prompt the user for input
data = input('Enter a value: ')
# print the user's input
print('You entered:', data) |
cd ../..; ./scripts/huffmanCompressorDecompressor.sh && cd testbenches && time iverilog -s huffmanTestbench -o testbench.vvp huffmanTestbench.sv ../../wrappers/huffmanCompressorDecompressorWrapper.v ../../huffmanCompressorDecompressor.v -g2005-sv && time vvp testbench.vvp -fst
|
-- ***************************************************************************
-- File: 10_17.sql
--
-- Developed By TUSC
--
-- Disclaimer: Neither Osborne/McGraw-Hill, TUSC, nor the author warrant
-- that this source code is error-free. If any errors are
-- found in this source code, please report them to TUSC at
-- (630)960-2909 ext 1011 or <EMAIL>.
-- ***************************************************************************
SPOOL 10_17.lis
SELECT *
FROM role_sys_privs
WHERE privilege = 'CREATE SESSION';
SPOOL OFF
|
package io.opensphere.core.geometry.renderproperties;
/** A render property that specifies point roundness only. */
public interface PointRoundnessRenderProperty extends RenderProperties, Comparable<PointRoundnessRenderProperty>
{
@Override
PointRoundnessRenderProperty clone();
/**
* Get if the point should be round (otherwise it should be square).
*
* @return If the point is round.
*/
boolean isRound();
/**
* Set if the point should be round (otherwise it should be square).
*
* @param round If the point is round.
*/
void setRound(boolean round);
}
|
export { run } from '@oclif/command'
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/ZFClearCacheTool/ZFClearCacheTool.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/ZFClearCacheTool/ZFClearCacheTool.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<reponame>thebakeryio/meteor-ddp-monitor<gh_stars>100-1000
import {
SET_PACKAGE_LIST,
SET_SECURITY_TAB,
CLEAR_METHOD_SECURITY
} from '../constants'
import {
NEW_TRACE, CLEAR_LOGS
} from '../../ddp/constants/action-types';
import Immutable from 'immutable'
export default {
packageList (state = Immutable.List(), action) {
switch(action.type){
case SET_PACKAGE_LIST:
return Immutable.List(action.data);
default:
return state;
}
},
securityTabsIndex (state = 0, action) {
switch(action.type){
case SET_SECURITY_TAB:
return action.tab;
default:
return state;
}
},
methodsSecurity (state = Immutable.Map(), action) {
switch(action.type){
case NEW_TRACE:
if(action.trace.message && action.trace.message.msg === 'method' &&
!action.trace.message.id.startsWith('/audit')){
return state.set(action.trace.message.method, action.trace.message.params);
} else {
return state;
}
case CLEAR_METHOD_SECURITY:
return Immutable.Map();
default:
return state;
}
},
resultTraces (state = Immutable.List(), action) {
switch(action.type){
case NEW_TRACE:
if(action.trace.message && action.trace.message.msg === 'result'){
return state.push(action.trace);
} else {
return state;
}
case CLEAR_LOGS:
return Immutable.List();
default:
return state;
}
}
}; |
/////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8
// Name :
// Author : Avi
// Revision : $Revision: #4 $
//
// Copyright 2009-2020 ECMWF.
// This software is licensed under the terms of the Apache Licence version 2.0
// which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
// In applying this licence, ECMWF does not waive the privileges and immunities
// granted to it by virtue of its status as an intergovernmental organisation
// nor does it submit to any jurisdiction.
//
// Description :
//
/////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8
#include <stdexcept>
#include "BoostPythonUtil.hpp"
#include "Variable.hpp"
void BoostPythonUtil::list_to_int_vec(const boost::python::list& list, std::vector<int>& int_vec)
{
int the_list_size = len(list);
int_vec.reserve(the_list_size);
for (int i = 0; i < the_list_size; ++i) {
int_vec.push_back(boost::python::extract<int>(list[i]));
}
}
void BoostPythonUtil::list_to_str_vec(const boost::python::list& list, std::vector<std::string>& vec)
{
int the_list_size = len(list);
vec.reserve(the_list_size);
for (int i = 0; i < the_list_size; ++i) {
vec.push_back(boost::python::extract<std::string>(list[i]));
}
}
void BoostPythonUtil::list_to_str_vec(const boost::python::list& list, std::vector<Variable>& vec )
{
int the_list_size = len(list);
vec.reserve(the_list_size);
for (int i = 0; i < the_list_size; ++i) {
vec.push_back(boost::python::extract<Variable>(list[i]));
}
}
void BoostPythonUtil::dict_to_str_vec(const boost::python::dict& dict, std::vector<std::pair<std::string,std::string> >& str_pair_vec)
{
boost::python::list keys = dict.keys();
const int no_of_keys = len(keys);
str_pair_vec.reserve(no_of_keys);
for(int i = 0; i < no_of_keys; ++i) {
std::string second;
std::string first = boost::python::extract<std::string>(keys[i]);
if (boost::python::extract<std::string>(dict[keys[i]]).check()) {
second = boost::python::extract<std::string>(dict[keys[i]]);
}
else if (boost::python::extract<int>(dict[keys[i]]).check()) {
int the_int = boost::python::extract<int>(dict[keys[i]]);
second = boost::lexical_cast<std::string>(the_int);
}
else throw std::runtime_error("BoostPythonUtil::dict_to_str_vec: type not convertible to string or integer");
str_pair_vec.emplace_back(first,second);
// std::cout << "BoostPythonUtil::dict_to_str_vec " << first << " " << second << "\n";
}
}
void BoostPythonUtil::dict_to_str_vec(const boost::python::dict& dict, std::vector<Variable>& vec)
{
boost::python::list keys = dict.keys();
const int no_of_keys = len(keys);
vec.reserve(no_of_keys);
for(int i = 0; i < no_of_keys; ++i) {
std::string second;
std::string first = boost::python::extract<std::string>(keys[i]);
if (boost::python::extract<std::string>(dict[keys[i]]).check()) {
second = boost::python::extract<std::string>(dict[keys[i]]);
}
else if (boost::python::extract<int>(dict[keys[i]]).check()) {
int the_int = boost::python::extract<int>(dict[keys[i]]);
second = boost::lexical_cast<std::string>(the_int);
}
else throw std::runtime_error("BoostPythonUtil::dict_to_str_vec: type not convertible to string or integer");
vec.emplace_back(first,second);
// std::cout << "BoostPythonUtil::dict_to_str_vec " << first << " " << second << "\n";
}
}
|
#!/bin/bash
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Loading variables
echo ""
echo "Loanding .env file"
source ./variables.env
export i=6
#Install required packages
echo ""
echo "Installing required packages"
apt-get -qq update > /dev/null
apt-get -qq install -y jq > /dev/null
# Configure VXLAN
echo ""
echo "Getting existing nodes IP address"
declare -a VMs=("$ABM_WS" "$ABM_CP1" "$ABM_CP2" "$ABM_CP3" "$ABM_WN1" "$ABM_WN2")
declare -a IPs=()
for vm in "${VMs[@]}"
do
IP=$(gcloud compute instances describe $vm --zone ${ZONE} \
--format='get(networkInterfaces[0].networkIP)')
IPs+=("$IP")
done
echo ""
echo "Configuring VXLAN"
ip link add vxlan0 type vxlan id 47 dev ens4 dstport 0
current_ip=$(ip --json a show dev ens4 | jq '.[0].addr_info[0].local' -r)
echo "VM IP address is: $current_ip"
for ip in ${IPs[@]}; do
if [ "$ip" != "$current_ip" ]; then
bridge fdb append to 00:00:00:00:00:00 dst $ip dev vxlan0
fi
done
ip addr add 10.200.7.$i/24 dev vxlan0
echo ""
echo "Enabling VXLAN"
ip link set up dev vxlan0
echo ""
echo "Disabling AppArmor Service"
systemctl stop apparmor.service
systemctl disable apparmor.service |
package string_handle;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Deque;
import java.util.LinkedList;
/**
*
* @author minchoba
* 백준 11068번: 회문인 수
*
* @see https://www.acmicpc.net/problem/11068/
*
*/
public class Boj11068 {
private static final int TRUE = 1;
private static final int FALSE = 0;
private static final char NEW_LINE = '\n';
public static void main(String[] args) throws Exception{
// 버퍼를 통한 값 입력
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringBuilder sb = new StringBuilder();
int T = Integer.parseInt(br.readLine());
while(T-- > 0) {
String N = br.readLine();
char[] num = N.toCharArray();
boolean isPalindrome = true;
for(int i = 0; i < num.length / 2; i++) { // 10진수 팰린드롬인가?
if(num[i] != num[num.length - 1 - i]) {
isPalindrome = false;
break;
}
}
if(isPalindrome) { // 10진수 팰린드롬인 경우 버퍼에 1 저장
sb.append(TRUE).append(NEW_LINE);
}
else { // 그 외
if(makePalindrome(N)) sb.append(TRUE).append(NEW_LINE); // makePalindrome 메소드를 통해 팰린드롬 여부 판별
else sb.append(FALSE).append(NEW_LINE);
}
}
System.out.println(sb.toString()); // 결과 값 한번에 출력
}
/**
* 진법에 따른 팰린드롬 여부 확인
*
*/
private static boolean makePalindrome(String n) {
int num = Integer.parseInt(n);
Deque<Integer> deq = new LinkedList<>();
for(int i = 2; i <= 64; i++) {
int tmp = num;
while(tmp > 0) {
deq.offerFirst(tmp % i); // 덱에 해당 숫자의 i진법의 값을 담아줌
tmp /= i;
}
while(deq.size() > 1) { // 덱에 모든 값을 담은 후
if(deq.peekFirst() != deq.peekLast()) break; // 만약 양쪽 끝 값이 다르면, 반복문 종료
deq.pollFirst(); // 같은 경우 양쪽으로 하나씩 제거
deq.pollLast();
}
if(deq.size() < 2) return true; // 덱에 남아있는 값의 수가 2개 미만인 경우 팰린드롬
deq.clear(); // 아닌 경우엔 덱을 비우고 다음 진법으로 다시 검사
}
return false; // 모든 진법 검사 후에도 메소드가 종료되지 않으면 거짓 반환
}
}
|
<filename>resources/js/app.js
/**
* First we will load all of this project's JavaScript dependencies which
* includes Vue and other libraries. It is a great starting point when
* building robust, powerful web applications using Vue and Laravel.
*/
require('./bootstrap');
/**
* We will add in our own custom scripts made for the purpose of making
* custom JS apps work as well as scripts for plugins.
*/
require('./custom_scripts');
window.Vue = require('vue');
import "vuetify/dist/vuetify.min.css";
import Vuetify from 'vuetify'
Vue.use(Vuetify, {
theme: {
primary: '#fdd00c'
}
});
import VeeValidate from 'vee-validate'
Vue.use(VeeValidate)
export const eventBus = new Vue();
/**
* The following block of code may be used to automatically register your
* Vue components. It will recursively scan this directory for the Vue
* components and automatically register them with their "basename".
*
* Eg. ./components/ExampleComponent.vue -> <example-component></example-component>
*/
/* Vue.component('example-component', require('./components/ExampleComponent.vue')); */
Vue.component('dashboard', require('./components/layout/Dashboard.vue').default)
//Vue.component('text-field', require('./components/TextInput.vue').default);
//Vue.component('bag-form', require('./components/BagForm.vue').default);
//Vue.component('admin-nav', require('./components/AdminNav.vue').default);
Vue.component('batch-statistics', require('./components/views/reports/BatchStatistics.vue').default);
Vue.component('disparity-report', require('./components/views/reports/DisparityReport.vue').default);
Vue.component('stuffed-table', require('./components/views/reports/StuffedBatchTable.vue').default)
Vue.component('batch-times', require('./components/layout/BatchTimes.vue').default);
Vue.component('stopwatch', require('./components/layout/Stopwatch.vue').default);
Vue.component('vue-footer', require('./components/layout/Footer.vue').default)
//Vue.component('home-nav', require('./components/HomeNav.vue').default)
Vue.component('vue-import', require('./components/views/pages/VueImport.vue').default)
Vue.component('batch-submission', require('./components/views/pages/BatchSubmission.vue').default)
Vue.component('user-table', require('./components/views/user/UserTable.vue').default)
Vue.component('add-user', require('./components/views/user/AddUser.vue').default)
Vue.component('imported-data', require('./components/views/reports/ImportedData.vue').default)
Vue.component('vue-login', require('./components/views/pages/LoginForm.vue').default)
Vue.component('home-index', require('./components/views/pages/HomeIndex.vue').default)
Vue.component('register-form', require('./components/views/pages/RegisterForm.vue').default)
// const files = require.context('./', true, /\.vue$/i)
// files.keys().map(key => {
// return Vue.component(_.last(key.split('/')).split('.')[0], files(key))
// })
/**
* Next, we will create a fresh Vue application instance and attach it to
* the page. Then, you may begin adding components to this application
* or customize the JavaScript scaffolding to fit your unique needs.
*/
/* const app = new Vue({
el: '#app'
}); */
/**
* Vue instance for admin page + scaffolding *
*/
const admin = new Vue({
el: "#admin_index",
data() {
return {
drawer: null,
singleItems: [
{
title: 'Dashboard',
icon: 'dashboard',
href: '/admin'
},
{
title: 'Submit Batch',
icon: 'whatshot',
href: '/submit'
},
{
title: 'Import CSV',
icon: 'publish',
href: '/import_csv'
}
],
dropDownItems: [
{
active: false,
title: 'Reports',
icon: 'pie_chart',
items: [
{
title: 'Submitted Bags',
href: '/reports/bags_submitted'
},
{
title: 'Disparity Report',
href: '/reports/disparity_report'
},
{
title: 'Imported Data',
href: '/reports/imported_data'
}
],
},
{
active: false,
title: 'User Management',
icon: 'supervisor_account',
items: [
{
title: 'Users',
href: '/admin/users'
},
{
title: 'Add User',
href: '/admin/users/create'
}
],
}
]
}
},
methods: {
submitForm() {
let logoutForm = document.getElementById("logout-form");
logoutForm.submit();
}
}
/* data: {
step: 1,
bagIndex: 1,
pillowIndex: 1,
batches: {
batchId: "",
cooler: "",
dateFilled: "",
submitter: "",
status: "Stuffed"
},
bags: [
{
package_id: "",
bag_weight: "",
flower_weight: ""
}
],
pillows: [
{
weight: ""
}
],
resetBtn: false,
},
mounted() {
// flash data on page if reauth or validation error
if (localStorage.getItem("batches")) {
try {
this.batches = JSON.parse(localStorage.getItem("batches"));
} catch (e) {
localStorage.removeItem("batches");
}
}
if (localStorage.getItem("bags")) {
try {
this.bags = JSON.parse(localStorage.getItem("bags"));
} catch (e) {
localStorage.removeItem("bags");
}
}
if (localStorage.getItem("pillows")) {
try {
this.pillows = JSON.parse(localStorage.getItem("pillows"));
} catch (e) {
localStorage.removeItem("pillows");
}
}
},
computed: {
bagTotal() {
let sum = 0;
return this.bags.reduce((sum, bag) => sum + bag.flower_weight, 0);
},
pillowTotal() {
let sum = 0;
return this.pillows.reduce((sum, pillow) => sum + pillow.weight, 0);
},
bagCount() {
let bagCount = this.bags.length;
return this.bagIndex = bagCount;
},
pillowCount() {
let pillowCount = this.pillows.length;
return this.pillowIndex = pillowCount;
}
},
methods: {
saveBatches() {
let parsed = JSON.stringify(this.batches);
localStorage.setItem("batches", parsed, 900);
},
saveBags() {
let parsed = JSON.stringify(this.bags);
localStorage.setItem("bags", parsed, 900);
},
savePillows() {
let parsed = JSON.stringify(this.pillows);
localStorage.setItem("pillows", parsed, 900);
},
// methods for adding/removing additional field rows on bags submission
addNewBagRow() {
if (this.bagCount < 10) {
this.bags.push({
package_id: "",
bag_weight: "",
flower_weight: ""
});
}
this.saveBags();
this.savePillows();
},
removeBagRow() {
if (this.bagCount > 1) {
this.bags.pop({
package_id: "",
bag_weight: "",
flower_weight: ""
});
}
this.saveBags();
this.savePillows();
},
// methods for adding/removing additional fields from pillows submission
addNewPillow() {
if (this.pillowCount < 10) {
this.pillows.push({
weight: ""
});
}
this.saveBags();
this.savePillows();
},
removePillow() {
if (this.pillowCount > 1) {
this.pillows.pop({
weight: ""
});
}
this.saveBags();
this.savePillows();
},
// methods to make my stepper work
prev() {
this.step--;
this.saveBatches();
this.saveBags();
this.savePillows();
},
next() {
this.step++;
this.saveBatches();
this.saveBags();
this.savePillows();
},
setStatus() {
return "Stuffed";
},
clearStorage() {
/* this.batches.batchId = '';
this.batches.cooler = '';
this.batches.dateFilled = '';
this.batches.submitter = '';
for (j = this.bagIndex; j > 1; j--) {
this.bags[j].pop({
package_id: "",
bag_weight: "",
flower_weight: ""
});
}
for (i = this.pillowIndex; i > 1; i--) {
this.pillows[i].pop({
weight: ""
});
}
localStorage.clear();
location.reload(true);
},
showResetBtn() {
if (localStorage.length > 0) {
return this.resetBtn = true;
}
}
} */
});
|
#!/bin/bash
SCREEN_RESOLUTION=${SCREEN_RESOLUTION:-"1920x1080x24"}
DISPLAY_NUM=99
export DISPLAY=":$DISPLAY_NUM"
VERBOSE=${VERBOSE:-""}
DRIVER_ARGS=""
if [ -n "$VERBOSE" ]; then
DRIVER_ARGS="--verbose"
fi
clean() {
if [ -n "$FILESERVER_PID" ]; then
kill -TERM "$FILESERVER_PID"
fi
if [ -n "$XSELD_PID" ]; then
kill -TERM "$XSELD_PID"
fi
if [ -n "$XVFB_PID" ]; then
kill -TERM "$XVFB_PID"
fi
if [ -n "$DRIVER_PID" ]; then
kill -TERM "$DRIVER_PID"
fi
if [ -n "$X11VNC_PID" ]; then
kill -TERM "$X11VNC_PID"
fi
if [ -n "$PULSE_PID" ]; then
kill -TERM "$PULSE_PID"
fi
}
trap clean SIGINT SIGTERM
if env | grep -q ROOT_CA_; then
mkdir -p $HOME/.pki/nssdb
certutil -N --empty-password -d sql:$HOME/.pki/nssdb
for e in $(env | grep ROOT_CA_ | sed -e 's/=.*$//'); do
certname=$(echo -n $e | sed -e 's/ROOT_CA_//')
echo ${!e} | base64 -d >/tmp/cert.pem
certutil -A -n ${certname} -t "TCu,Cu,Tu" -i /tmp/cert.pem -d sql:$HOME/.pki/nssdb
rm /tmp/cert.pem
done
fi
/usr/bin/fileserver &
FILESERVER_PID=$!
DISPLAY="$DISPLAY" /usr/bin/xseld &
XSELD_PID=$!
mkdir -p ~/pulse/.config/pulse
echo -n 'gIvST5iz2S0J1+JlXC1lD3HWvg61vDTV1xbmiGxZnjB6E3psXsjWUVQS4SRrch6rygQgtpw7qmghDFTaekt8qWiCjGvB0LNzQbvhfs1SFYDMakmIXuoqYoWFqTJ+GOXYByxpgCMylMKwpOoANEDePUCj36nwGaJNTNSjL8WBv+Bf3rJXqWnJ/43a0hUhmBBt28Dhiz6Yqowa83Y4iDRNJbxih6rB1vRNDKqRr/J9XJV+dOlM0dI+K6Vf5Ag+2LGZ3rc5sPVqgHgKK0mcNcsn+yCmO+XLQHD1K+QgL8RITs7nNeF1ikYPVgEYnc0CGzHTMvFR7JLgwL2gTXulCdwPbg=='| base64 -d>~/pulse/.config/pulse/cookie
HOME=$HOME/pulse pulseaudio --start --exit-idle-time=-1
HOME=$HOME/pulse pactl load-module module-native-protocol-tcp
PULSE_PID=$(ps --no-headers -C pulseaudio -o pid | sed -r 's/( )+//g')
/usr/bin/xvfb-run -l -n "$DISPLAY_NUM" -s "-ac -screen 0 $SCREEN_RESOLUTION -noreset -listen tcp" /usr/bin/fluxbox -display "$DISPLAY" -log /dev/null 2>/dev/null &
XVFB_PID=$!
retcode=1
until [ $retcode -eq 0 ]; do
DISPLAY="$DISPLAY" wmctrl -m >/dev/null 2>&1
retcode=$?
if [ $retcode -ne 0 ]; then
echo Waiting X server...
sleep 0.1
fi
done
if [ "$ENABLE_VNC" == "true" ]; then
x11vnc -display "$DISPLAY" -passwd selenoid -shared -forever -loop500 -rfbport 5900 -rfbportv6 5900 -logfile /dev/null &
X11VNC_PID=$!
fi
DISPLAY="$DISPLAY" /usr/bin/yandexdriver --port=4444 --whitelisted-ips='' ${DRIVER_ARGS} &
DRIVER_PID=$!
wait
|
public class DivisibleBy3 {
public static void main(String[] args) {
int[] array = {2,3,5,6,9,10,11};
for (int i : array) {
if (i % 3 == 0) {
System.out.println(i);
}
}
}
} |
echo "=============================="
echo "= apt update 3rd party repos ="
echo "=============================="
export DEBIAN_FRONTEND=noninteractive
# chrome
cat << CHROME_END > /etc/apt/sources.list.d/google-chrome.list
### THIS FILE IS AUTOMATICALLY CONFIGURED ###
# You may comment out this entry, but any other modifications may be lost.
deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main
CHROME_END
curl -sL "http://dl.google.com/linux/linux_signing_key.pub" | apt-key add
# docker
apt remove -q -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" docker docker-engine docker.io
curl -sL "https://download.docker.com/linux/ubuntu/gpg" | apt-key add
add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -a | grep Codename | cut -d\: -f2 | xargs) stable"
# oracle-java
# add-apt-repository ppa:webupd8team/java --yes --update
apt clean && apt update -q |
/*
* Copyright (c) 2017 The Regents of the University of California.
* All rights reserved.
*
* '$Author: crawl $'
* '$Date: 2017-09-04 12:58:25 -0700 (Mon, 04 Sep 2017) $'
* '$Revision: 1406 $'
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the above
* copyright notice and the following two paragraphs appear in all copies
* of this software.
*
* IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY
* FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF
* THE UNIVERSITY OF CALIFORNIA HAS BEEN ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE
* PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, AND THE UNIVERSITY OF
* CALIFORNIA HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES,
* ENHANCEMENTS, OR MODIFICATIONS.
*
*/
package org.kepler.webview.server.app;
import io.vertx.core.AsyncResult;
import io.vertx.core.Handler;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.auth.User;
/** Interface for WebView server apps.
*
* @author <NAME>
* @version $Id: App.java 1406 2017-09-04 19:58:25Z crawl $
*/
public interface App extends Cloneable {
public Object clone() throws CloneNotSupportedException;
public void close();
public void exec(User user, JsonObject inputs, Handler<AsyncResult<JsonArray>> handler)
throws Exception;
}
|
<reponame>rkamudhan/multicloud-k8s
//=======================================================================
// Copyright (c) 2017-2020 Aarna Networks, Inc.
// All rights reserved.
// ======================================================================
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ========================================================================
import React from "react";
import PropTypes from "prop-types";
import AppBar from "@material-ui/core/AppBar";
import Grid from "@material-ui/core/Grid";
import Hidden from "@material-ui/core/Hidden";
import IconButton from "@material-ui/core/IconButton";
import MenuIcon from "@material-ui/icons/Menu";
import Toolbar from "@material-ui/core/Toolbar";
import Typography from "@material-ui/core/Typography";
import { withStyles } from "@material-ui/core/styles";
import { withRouter } from "react-router-dom";
const lightColor = "rgba(255, 255, 255, 0.7)";
const styles = (theme) => ({
root: {
boxShadow:
"0 3px 4px 0 rgba(0,0,0,.2), 0 3px 3px -2px rgba(0,0,0,.14), 0 1px 8px 0 rgba(0,0,0,.12)",
},
secondaryBar: {
zIndex: 0,
},
menuButton: {
marginLeft: -theme.spacing(1),
},
iconButtonAvatar: {
padding: 4,
},
link: {
textDecoration: "none",
color: lightColor,
"&:hover": {
color: theme.palette.common.white,
},
},
button: {
borderColor: lightColor,
},
});
function Header(props) {
const { classes, onDrawerToggle, location } = props;
let headerName = "";
let getHeaderName = () => {
if (location.pathname === `${props.match.url}/dashboard`) {
headerName = "Dashboard";
} else if (location.pathname === `${props.match.url}/services`) {
headerName = "Services";
} else if (
location.pathname === `${props.match.url}/deployment-intent-group`
) {
headerName = "Deployment Intent Groups";
} else if (location.pathname.includes("services")) {
headerName =
"services / " +
location.pathname.slice(location.pathname.indexOf("services")).slice(9);
} else if (location.pathname === `${props.match.url}/projects`) {
headerName = "Projects";
} else if (location.pathname === `${props.match.url}/clusters`) {
headerName = "Clusters";
} else if (location.pathname === `${props.match.url}/controllers`) {
headerName = "Controllers";
}
};
getHeaderName();
return (
<React.Fragment>
<AppBar
className={classes.root}
color="primary"
position="sticky"
elevation={0}
>
<Toolbar>
<Grid container spacing={1} alignItems="center">
<Hidden smUp implementation="js">
<Grid item>
<IconButton
color="inherit"
onClick={onDrawerToggle}
className={classes.menuButton}
>
<MenuIcon />
</IconButton>
</Grid>
</Hidden>
<Typography>{headerName}</Typography>
<Grid item xs />
</Grid>
</Toolbar>
</AppBar>
</React.Fragment>
);
}
Header.propTypes = {
classes: PropTypes.object.isRequired,
onDrawerToggle: PropTypes.func.isRequired,
};
export default withStyles(styles)(withRouter(Header));
|
<filename>fame-server/src/main/java/com/designre/blog/model/entity/Category.java
package com.designre.blog.model.entity;
import lombok.Data;
import lombok.EqualsAndHashCode;
@EqualsAndHashCode(callSuper = true)
@Data
public class Category extends BaseEntity {
private Integer parentId;
private String name;
}
|
#!/usr/bin/env bash
#/bin/bash
TF_INC=$(python -c 'import tensorflow as tf; print(tf.sysconfig.get_include())')
TF_LIB=$(python -c 'import tensorflow as tf; print(tf.sysconfig.get_lib())')
# TF1.4
/usr/local/cuda-8.0/bin/nvcc tf_approxmatch_g.cu -o tf_approxmatch_g.cu.o -c -O2 -DGOOGLE_CUDA=1 -x cu -Xcompiler -fPIC
#g++ -std=c++11 tf_approxmatch.cpp tf_approxmatch_g.cu.o -o tf_approxmatch_so.so -shared -fPIC -I /usr/local/lib/python2.7/dist-packages/tensorflow/include -I /usr/local/cuda-8.0/include -I /usr/local/lib/python2.7/dist-packages/tensorflow/include/external/nsync/public -lcudart -L /usr/local/cuda-8.0/lib64/ -L/usr/local/lib/python2.7/dist-packages/tensorflow -ltensorflow_framework -O2 -D_GLIBCXX_USE_CXX11_ABI=0
g++ -std=c++11 tf_approxmatch.cpp tf_approxmatch_g.cu.o -o tf_approxmatch_so.so -shared -fPIC -I$TF_INC \
-I /usr/local/cuda-8.0/include -I$TF_INC/external/nsync/public -lcudart -L /usr/local/cuda-8.0/lib64/ \
-L$TF_LIB -ltensorflow_framework -O2 -D_GLIBCXX_USE_CXX11_ABI=1
|
#!/usr/bin/env bash
# Copyright 2011 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
# This script creates a complete godoc app in $APPDIR.
# It copies the cmd/godoc and src/pkg/go/... sources from GOROOT,
# synthesizes an app.yaml file, and creates the .zip, index, and
# configuration files.
#
# If an argument is provided it is assumed to be the app-engine godoc directory.
# Without an argument, $APPDIR is used instead. If GOROOT is not set, "go env"
# is consulted to find the $GOROOT.
#
# The script creates a .zip file representing the $GOROOT file system
# and computes the correspondig search index files. These files are then
# copied to $APPDIR. A corresponding godoc configuration file is created
# in $APPDIR/appconfig.go.
ZIPFILE=godoc.zip
INDEXFILE=godoc.index
SPLITFILES=index.split.
CONFIGFILE=godoc/appconfig.go
error() {
echo "error: $1"
exit 2
}
getArgs() {
if [ -z $GOROOT ]; then
GOROOT=$(go env GOROOT)
echo "GOROOT not set explicitly, using $GOROOT instead"
fi
if [ -z $APPDIR ]; then
if [ $# == 0 ]; then
error "APPDIR not set, and no argument provided"
fi
APPDIR=$1
echo "APPDIR not set, using argument instead"
fi
# safety checks
if [ ! -d $GOROOT ]; then
error "$GOROOT is not a directory"
fi
if [ ! -x $GOROOT/bin/godoc ]; then
error "$GOROOT/bin/godoc does not exist or is not executable"
fi
if [ -e $APPDIR ]; then
error "$APPDIR exists; check and remove it before trying again"
fi
# reporting
echo "GOROOT = $GOROOT"
echo "APPDIR = $APPDIR"
}
copyGodoc() {
echo "*** copy $GOROOT/src/cmd/godoc to $APPDIR/godoc"
cp -r $GOROOT/src/cmd/godoc $APPDIR/godoc
}
copyGoPackages() {
echo "*** copy $GOROOT/src/pkg/go to $APPDIR/newgo and rewrite imports"
cp -r $GOROOT/src/pkg/go $APPDIR/newgo
find $APPDIR/newgo -type d -name testdata | xargs rm -r
gofiles=$(find $APPDIR -name '*.go')
sed -i '' 's_^\(."\)\(go/[a-z]*\)"$_\1new\2"_' $gofiles
sed -i '' 's_^\(import "\)\(go/[a-z]*\)"$_\1new\2"_' $gofiles
}
makeAppYaml() {
echo "*** make $APPDIR/app.yaml"
cat > $APPDIR/app.yaml <<EOF
application: godoc
version: 1
runtime: go
api_version: go1
handlers:
- url: /.*
script: _go_app
EOF
}
makeZipfile() {
echo "*** make $APPDIR/$ZIPFILE"
zip -q -r $APPDIR/$ZIPFILE $GOROOT -i \*.go -i \*.html -i \*.xml -i \*.css -i \*.js -i \*.txt -i \*.c -i \*.h -i \*.s -i \*.png -i \*.jpg -i \*.sh -i \*.ico
}
makeIndexfile() {
echo "*** make $APPDIR/$INDEXFILE"
OUT=/tmp/godoc.out
$GOROOT/bin/godoc -write_index -index_files=$APPDIR/$INDEXFILE -zip=$APPDIR/$ZIPFILE 2> $OUT
if [ $? != 0 ]; then
error "$GOROOT/bin/godoc failed - see $OUT for details"
fi
}
splitIndexfile() {
echo "*** split $APPDIR/$INDEXFILE"
split -b8m $APPDIR/$INDEXFILE $APPDIR/$SPLITFILES
}
makeConfigfile() {
echo "*** make $APPDIR/$CONFIGFILE"
cat > $APPDIR/$CONFIGFILE <<EOF
package main
// GENERATED FILE - DO NOT MODIFY BY HAND.
// (generated by $GOROOT/src/cmd/godoc/setup-godoc-app.bash)
const (
// .zip filename
zipFilename = "$ZIPFILE"
// goroot directory in .zip file
zipGoroot = "$GOROOT"
// glob pattern describing search index files
// (if empty, the index is built at run-time)
indexFilenames = "$SPLITFILES*"
)
EOF
}
getArgs "$@"
set -e
mkdir $APPDIR
copyGodoc
copyGoPackages
makeAppYaml
makeZipfile
makeIndexfile
splitIndexfile
makeConfigfile
echo "*** setup complete"
|
<reponame>reiinakano/iot<filename>iot_test.go<gh_stars>10-100
// Copyright 2018, <NAME>
// License: MIT
package iot_test
import (
"bytes"
"context"
"fmt"
"strings"
"testing"
"github.com/vaelen/iot"
)
const RSACertificatePath = "test_keys/rsa_cert.pem"
const RSAPrivateKeyPath = "test_keys/rsa_private.pem"
const ECCertificatePath = "test_keys/ec_cert.pem"
const ECPrivateKeyPath = "test_keys/ec_private.pem"
var TestID = &iot.ID{
ProjectID: "test-project",
Location: "test-location",
Registry: "test-registry",
DeviceID: "test-device",
}
var ClientID = "projects/test-project/locations/test-location/registries/test-registry/devices/test-device"
var ConfigTopic = "/devices/test-device/config"
var StateTopic = "/devices/test-device/state"
var EventsTopic = "/devices/test-device/events"
var mockClient *iot.MockMQTTClient
func TestLoadRSACredentials(t *testing.T) {
credentials, err := iot.LoadRSACredentials(RSACertificatePath, RSAPrivateKeyPath)
if err != nil {
t.Fatalf("Couldn't load credentials: %v", err)
}
if credentials == nil {
t.Fatal("Credentials not loaded.")
}
if credentials.PrivateKey == nil {
t.Fatal("Private key not loaded.")
}
}
func TestLoadECCredentials(t *testing.T) {
credentials, err := iot.LoadECCredentials(ECCertificatePath, ECPrivateKeyPath)
if err != nil {
t.Fatalf("Couldn't load credentials: %v", err)
}
if credentials == nil {
t.Fatal("Credentials not loaded.")
}
if credentials.PrivateKey == nil {
t.Fatal("Private key not loaded.")
}
}
func TestDefaultOptions(t *testing.T) {
credentials, err := iot.LoadRSACredentials(RSACertificatePath, RSAPrivateKeyPath)
if err != nil {
t.Fatalf("Couldn't load credentials: %v", err)
}
options := iot.DefaultOptions(TestID, credentials)
if options == nil {
t.Fatal("Options structure wasn't returned")
}
if options.ID != TestID {
t.Fatal("Incorrect ID")
}
if options.Credentials != credentials {
t.Fatal("Incorrect credentials")
}
if options.EventQOS != 1 {
t.Fatalf("Incorrect event QoS: %v", options.EventQOS)
}
if options.StateQOS != 1 {
t.Fatalf("Incorrect state QoS: %v", options.StateQOS)
}
if options.ConfigQOS != 2 {
t.Fatalf("Incorrect config QoS: %v", options.ConfigQOS)
}
if options.AuthTokenExpiration != iot.DefaultAuthTokenExpiration {
t.Fatalf("Incorrect auth token expiration: %v", options.AuthTokenExpiration)
}
}
func TestThingWithBadOptions(t *testing.T) {
ctx := context.Background()
var mockClient *iot.MockMQTTClient
iot.NewClient = func(t iot.Thing, o *iot.ThingOptions) iot.MQTTClient {
mockClient = iot.NewMockClient(t, o)
return mockClient
}
options := &iot.ThingOptions{}
thing := iot.New(options)
if thing == nil {
t.Fatal("Thing was not returned from New() with bad options")
}
err := thing.Connect(ctx, "bad options")
if err != iot.ErrConfigurationError {
t.Fatalf("Wrong error returned from Connect() with invalid options: %v", err)
}
}
func TestRSAThingFull(t *testing.T) {
initMockClient()
credentials := getCredentials(t, iot.CredentialTypeRSA)
options, configReceived := getOptions(t, credentials)
thing := getThing(t, options)
serverAddress := "ssl://mqtt.example.com:443"
doConnectionTest(t, thing, serverAddress)
doAlreadyConnectedTest(t, thing, serverAddress)
checkClientValues(t, options)
doConfigTest(t, configReceived)
doEventTest(t, thing)
doDisconnectTest(t, thing)
}
func TestECThingConnectOnly(t *testing.T) {
initMockClient()
credentials := getCredentials(t, iot.CredentialTypeEC)
options, _ := getOptions(t, credentials)
thing := getThing(t, options)
serverAddress := "ssl://mqtt.example.com:443"
doConnectionTest(t, thing, serverAddress)
checkClientValues(t, options)
doDisconnectTest(t, thing)
}
func initMockClient() {
iot.NewClient = func(t iot.Thing, o *iot.ThingOptions) iot.MQTTClient {
mockClient = iot.NewMockClient(t, o)
return mockClient
}
}
func getThing(t *testing.T, options *iot.ThingOptions) iot.Thing {
thing := iot.New(options)
if thing == nil {
t.Fatal("Thing wasn't returned from New()")
}
if thing.IsConnected() {
t.Fatal("Thing thinks it is connected when it really is not")
}
return thing
}
func getCredentials(t *testing.T, credentialType iot.CredentialType) *iot.Credentials {
var credentials *iot.Credentials
var err error
switch credentialType {
case iot.CredentialTypeEC:
credentials, err = iot.LoadECCredentials(ECCertificatePath, ECPrivateKeyPath)
case iot.CredentialTypeRSA:
fallthrough
default:
credentials, err = iot.LoadRSACredentials(RSACertificatePath, RSAPrivateKeyPath)
}
if err != nil {
t.Fatalf("Couldn't load credentials: %v", err)
}
return credentials
}
func getOptions(t *testing.T, credentials *iot.Credentials) (*iot.ThingOptions, *bytes.Buffer) {
options := iot.DefaultOptions(TestID, credentials)
if options == nil {
t.Fatal("Options structure wasn't returned")
}
debugWriter := &bytes.Buffer{}
infoWriter := &bytes.Buffer{}
errorWriter := &bytes.Buffer{}
configReceived := &bytes.Buffer{}
options.AuthTokenExpiration = 0
options.DebugLogger = func(a ...interface{}) { fmt.Fprint(debugWriter, a...) }
options.InfoLogger = func(a ...interface{}) { fmt.Fprint(infoWriter, a...) }
options.ErrorLogger = func(a ...interface{}) { fmt.Fprint(errorWriter, a...) }
options.LogMQTT = true
options.ConfigHandler = func(thing iot.Thing, config []byte) {
ctx := context.Background()
configReceived.Truncate(0)
configReceived.Write(config)
state := []byte("ok")
thing.PublishState(ctx, state)
}
return options, configReceived
}
func doConnectionTest(t *testing.T, thing iot.Thing, serverAddress string) {
ctx := context.Background()
err := thing.Connect(ctx, serverAddress)
if err != nil {
t.Fatalf("Couldn't connect. Error: %v", err)
}
if !mockClient.Connected {
t.Fatalf("Client not connected")
}
if len(mockClient.ConnectedTo) < 1 || mockClient.ConnectedTo[0] != serverAddress {
t.Fatalf("Client connected to wrong server: %v", mockClient.ConnectedTo)
}
if !thing.IsConnected() {
t.Fatal("Thing thinks it is not connected when it really is")
}
}
func doAlreadyConnectedTest(t *testing.T, thing iot.Thing, serverAddress string) {
ctx := context.Background()
err := thing.Connect(ctx, "already connected")
if err != nil {
t.Fatalf("Calling Connect() while already connected returned an error: %v", err)
}
if len(mockClient.ConnectedTo) < 1 || mockClient.ConnectedTo[0] != serverAddress {
t.Fatalf("Calling Connect() while already connected caused client to reconnect: %v", mockClient.ConnectedTo)
}
if mockClient.CredentialsProvider == nil {
t.Fatal("Credentials provider not set")
}
}
func checkClientValues(t *testing.T, options *iot.ThingOptions) {
options.AuthTokenExpiration = 0
username, password := mockClient.CredentialsProvider()
if username == "" || password == "" {
t.Fatalf("Bad username and/or password returned. Username: %v, Password: %v", username, password)
}
if mockClient.DebugLogger == nil {
t.Fatal("Debug logger not set")
}
if mockClient.InfoLogger == nil {
t.Fatal("Info logger not set")
}
if mockClient.ErrorLogger == nil {
t.Fatal("Error logger not set")
}
if mockClient.ClientID != ClientID {
t.Fatalf("Client ID not set properly: %v", mockClient.ClientID)
}
if len(mockClient.Subscriptions) != 1 {
t.Fatalf("Wrong number of subscriptions: %v", len(mockClient.Subscriptions))
}
if mockClient.OnConnectHandler == nil {
t.Fatalf("OnConnectHandler not set")
}
}
func doConfigTest(t *testing.T, configReceived *bytes.Buffer) {
mockClient.Receive(ConfigTopic, []byte("test config"))
if configReceived.String() != "test config" {
t.Fatalf("Wrong configuration received: %v", configReceived.String())
}
l, ok := mockClient.Messages[StateTopic]
if !ok || l == nil || len(l) == 0 {
t.Fatalf("State not published")
}
if string(l[0].([]byte)) != "ok" {
t.Fatalf("Wrong state published: %v", string(l[0].([]byte)))
}
}
func doEventTest(t *testing.T, thing iot.Thing) {
ctx := context.Background()
topLevelMessage := "Top"
events := make(map[string]string)
events["a"] = "A"
events["a/b"] = "B"
err := thing.PublishEvent(ctx, []byte(topLevelMessage))
if err != nil {
t.Fatalf("Couldn't publish. Error: %v", err)
}
for k, v := range events {
err = thing.PublishEvent(ctx, []byte(v), strings.Split(k, "/")...)
if err != nil {
t.Fatalf("Couldn't publish. Error: %v", err)
}
}
l, ok := mockClient.Messages[EventsTopic]
if !ok || l == nil || len(l) == 0 {
t.Fatalf("Message not published. Topic: %v", EventsTopic)
}
if len(l) > 1 {
t.Fatalf("Too many messages published. Topic: %v, Count; %v", EventsTopic, len(l))
}
if string(l[0].([]byte)) != topLevelMessage {
t.Fatalf("Wrong message published. Topic: %v, Message: %v", EventsTopic, string(l[0].([]byte)))
}
for k, v := range events {
topic := EventsTopic + "/" + k
l, ok = mockClient.Messages[topic]
if !ok || l == nil || len(l) == 0 {
t.Fatalf("Message not published. Topic: %v", topic)
}
if len(l) > 1 {
t.Fatalf("Too many messages published. Topic: %v, Count; %v", topic, len(l))
}
if string(l[0].([]byte)) != v {
t.Fatalf("Wrong message published. Topic: %v, Message: %v", topic, string(l[0].([]byte)))
}
}
}
func doDisconnectTest(t *testing.T, thing iot.Thing) {
thing.Disconnect(context.Background())
if mockClient.Connected {
t.Fatal("Didn't disconnect")
}
}
|
<gh_stars>10-100
import BigNumber from 'bignumber.js';
import { normalize } from '../../bignumber';
import { ReserveMock } from '../../mocks';
import { formatReserve, formatReserveUSD } from './index';
describe('formatReserve', () => {
it('should accrue over time', () => {
const reserve = new ReserveMock().addVariableDebt(100);
const first = formatReserve({
reserve: reserve.reserve,
currentTimestamp: reserve.reserve.lastUpdateTimestamp,
});
const second = formatReserve({
reserve: reserve.reserve,
currentTimestamp:
reserve.reserve.lastUpdateTimestamp + 60 * 60 * 24 * 350,
});
expect(new BigNumber(second.totalDebt).gt(first.totalDebt)).toBe(true);
});
it('should properly calculate utilization', () => {
const reserve = new ReserveMock();
// no liquidity
const zeroLiquidity = formatReserve({
reserve: reserve.reserve,
currentTimestamp: reserve.reserve.lastUpdateTimestamp,
});
expect(zeroLiquidity.borrowUsageRatio).toEqual('0');
expect(zeroLiquidity.supplyUsageRatio).toEqual('0');
// no borrows
reserve.addLiquidity(100);
const onlyLiquidity = formatReserve({
reserve: reserve.reserve,
currentTimestamp: reserve.reserve.lastUpdateTimestamp,
});
expect(onlyLiquidity.borrowUsageRatio).toEqual('0');
expect(onlyLiquidity.supplyUsageRatio).toEqual('0');
// borrows
reserve.addVariableDebt(100);
const fiftyPercent = formatReserve({
reserve: reserve.reserve,
currentTimestamp: reserve.reserve.lastUpdateTimestamp,
});
expect(fiftyPercent.borrowUsageRatio).toEqual('0.5');
expect(fiftyPercent.supplyUsageRatio).toEqual('0.5');
// add unbacked supplies
reserve.addUnbacked(200);
const unbacked = formatReserve({
reserve: reserve.reserve,
currentTimestamp: reserve.reserve.lastUpdateTimestamp,
});
expect(unbacked.borrowUsageRatio).toEqual('0.5');
expect(unbacked.supplyUsageRatio).toEqual('0.25');
});
it('should calculate usd values', () => {
// 100 liquidity with a marketPrice of 2, should be 200
const reserve = new ReserveMock().addLiquidity(100);
const formattedReserve = formatReserveUSD({
reserve: {
...reserve.reserve,
priceInMarketReferenceCurrency: normalize(2, -8), // 2
},
currentTimestamp: reserve.reserve.lastUpdateTimestamp,
marketReferencePriceInUsd: normalize(1, -8), // 1
marketReferenceCurrencyDecimals: 8,
});
expect(formattedReserve.availableLiquidityUSD).toBe('200');
expect(formattedReserve.priceInUSD).toBe('2');
});
});
|
import type { AST_NODE_TYPES } from '../../ast-node-types';
import type {
ClassPropertyComputedNameBase,
ClassPropertyNonComputedNameBase,
} from '../../base/ClassPropertyBase';
export interface TSAbstractClassPropertyComputedName
extends ClassPropertyComputedNameBase {
type: AST_NODE_TYPES.TSAbstractClassProperty;
}
export interface TSAbstractClassPropertyNonComputedName
extends ClassPropertyNonComputedNameBase {
type: AST_NODE_TYPES.TSAbstractClassProperty;
}
export type TSAbstractClassProperty =
| TSAbstractClassPropertyComputedName
| TSAbstractClassPropertyNonComputedName;
|
#!/bin/bash
shopt -s extglob
MYBENCHDIR=/mnt/goblint-svcomp/benchexec/my-bench-sv-comp
RESULTSDIR=/mnt/goblint-svcomp/benchexec/results/new-results13-all-fast-systems-base-cast-on-read-before
GOBLINTPARALLEL=15
mkdir $RESULTSDIR
# Run verification
cd /mnt/goblint-svcomp/sv-comp/goblint
# read-only and overlay dirs for Value too large for defined data type workaround
benchexec --read-only-dir / --overlay-dir . --hidden-dir /home --outputpath $RESULTSDIR --numOfThreads $GOBLINTPARALLEL $MYBENCHDIR/goblint-all-fast.xml
# Extract witness directory
cd $RESULTSDIR
LOGDIR=`echo goblint*.files`
echo $LOGDIR
# Generate table with merged results and witness validation results
sed -e "s/LOGDIR/$LOGDIR/" $MYBENCHDIR/table-generator-all-fast.xml > table-generator.xml
table-generator -x table-generator.xml
# Decompress all tool outputs for table HTML links
unzip -o goblint*.logfiles.zip |
<html>
<head>
<title>Animals</title>
</head>
<body>
<ul>
<li>lion</li>
<li>tiger</li>
<li>bear</li>
<li>wolf</li>
<li>elephant</li>
</ul>
</body>
</html> |
package com.codepath.apps.simpletweet.models;
import org.json.JSONException;
import org.json.JSONObject;
import org.parceler.Parcel;
@Parcel
public class User {
public String name;
public String screenName;
public String publicImageUrl;
public int following;
public int followers;
public String description;
public User() {} //Needed by Parceler
public static User fromJson(JSONObject jsonObject) throws JSONException {
User user = new User();
user.name = jsonObject.getString("name");
user.screenName = jsonObject.getString("screen_name");
user.publicImageUrl = jsonObject.getString("profile_image_url_https");
user.following = jsonObject.getInt("friends_count");
user.followers = jsonObject.getInt("followers_count");
user.description = jsonObject.getString("description");
return user;
}
public String getName() {
return name;
}
public String getScreenName() {
return screenName;
}
public String getPublicImageUrl() {
return publicImageUrl;
}
public int getFollowing() {
return following;
}
public int getFollowers() {
return followers;
}
public String getDescription() {
return description;
}
}
|
<filename>src/audio/web-audio/reuse-audio-context/config.ts
const getAudioContext = () => {
try
{
// @ts-ignore
return new (window.AudioContext || window.webkitAudioContext)()
}
catch (e)
{
console.error(e)
}
}
const config: Phaser.Types.Core.GameConfig = {
type: Phaser.AUTO,
width: 800,
height: 600,
backgroundColor: '#2d2d2d',
parent: 'phaser-example',
scale: {
mode: Phaser.Scale.ScaleModes.FIT
},
render: {
pixelArt: true
},
audio: {
context: getAudioContext()
}
}
export default config
|
<reponame>Zefiros-Software/Args
/**
* @cond ___LICENSE___
*
* Copyright (c) 2016-2018 Zefiros Software.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* @endcond
*/
#pragma once
#ifndef __ARGS_H__
#define __ARGS_H__
#include "args/option.h"
#define CXXOPTS_HAS_OPTIONAL
#include "cxxopts.hpp"
#include <unordered_map>
#include <string_view>
#include <optional>
#include <iosfwd>
namespace cxxopts
{
class Options;
}
class Args
{
public:
Args(std::string_view programName, std::string_view description = "");
virtual ~Args();
bool AddOptions(std::string_view group, std::vector<Option> &&options);
bool AddOptions(std::vector<Option> &&options);
bool AddOption(Option &option, std::string_view group = "");
void SetPositional(std::vector<std::string> &&positional, std::optional<std::string_view> positionalHelp = std::nullopt);
void Parse(int argc, const char **argv, bool exitOnHelp = true);
void Parse(int argc, char **argv, bool exitOnHelp = true);
void Help() const;
OptionValue GetOption(std::string_view argument) const;
std::vector<std::pair<std::string_view, OptionValue >> GetArguments() const;
private:
cxxopts::Options *mParser;
cxxopts::ParseResult *mResults;
};
#endif |
<gh_stars>0
package test;
import java.util.Scanner;
import java.util.Random;
import src.enigmaoop.utilities.*;
import src.enigmaoop.core.EngineSystem;
public class EncryptDecryptTest {
public static void main(String[] args) {
System.out.println("Testing 1000 random configurations");
int counter = 0;
for (int i=0; i<1000; i++) {
if (makeRandomTest("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam eget ligula eu lectus lobortis condimentum. Aliquam nonummy auctor massa. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Nulla at risus. Quisque purus magna, auctor et, sagittis ac, posuere eu, lectus. Nam mattis, felis ut adipiscing")) {
counter++;
}
}
System.out.printf("%d Tests passed \n %d tests failed", counter, 1000-counter);
}
private static boolean makeRandomTest(String originalText) {
int[] rotors = new int[3];
int[] positions = new int[26];
rotors = createThreeRandomNumbers(1, 6);
positions = createThreeRandomNumbers(0, 26);
String text = Converter.clearText(originalText);
EngineSystem machine = new EngineSystem(rotors, positions);
String encrypted = machine.encryptText(text);
machine = new EngineSystem(rotors, positions);
String decrypted = machine.encryptText(encrypted);
if (text.equals(decrypted)){
return true;
} else {
return false;
}
}
private static int[] createThreeRandomNumbers(int init, int end) {
Random generator = new Random();
int range = end - init;
int numbers[] = new int[3];
for(int i = 0; i<3; i++) {
numbers[i] = generator.nextInt(range) + init;
}
return numbers;
}
private static void showLog(String logName, int[] valors){
System.out.println(logName + ":");
for (int i=0; i<valors.length; i++){
System.out.print(valors[i] + " | ");
}
System.out.println("");
}
} |
class PHPDefinitionLoader
{
protected $processor;
protected $registry;
public function __construct($processor, $registry)
{
$this->processor = $processor;
$this->registry = $registry;
}
public function loadDefinitions(array $definitionNames)
{
$processedDefinitions = [];
foreach ($definitionNames as $name) {
if ($this->registry->hasDefinition($name)) {
$definition = $this->registry->getDefinition($name);
$processedDefinition = $this->processor->process($definition);
$processedDefinitions[$name] = $processedDefinition;
}
}
return $processedDefinitions;
}
} |
package core.checker.linearizability;
import lombok.extern.slf4j.Slf4j;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
@Slf4j
public abstract class Search {
public abstract void abort(Object cause);
public abstract void report();
public abstract void results();
public abstract void results(double timeout, double timeVal);
public void run(){
run(new HashMap<>());
}
public void reporter(double interval){
boolean running=true;
// Future<Object>
}
public void run(Map<String,Object> opts){
Callable<Object> abort=()->{
log.warn("Out of memory; aborting search");
this.abort("out-of-memory");
return null;
};
}
}
|
import React, { useEffect, useState } from 'react'
import { ExampleComponent } from 'react-surfaces'
import 'react-surfaces/dist/index.css'
const App = () => {
const [color1, setColor1] = useState('red')
const [color2, setColor2] = useState('green')
useEffect(() => {
setTimeout(() => {
setColor1('green')
setColor2('red')
}, 2000);
}, [])
return <>
<h2>1</h2>
<ExampleComponent variant={1} color1={color1} color2={color2} />
<h2>2</h2>
<ExampleComponent variant={2} color1={color1} color2={color2} />
<h2>3</h2>
<ExampleComponent variant={3} color1={color1} color2={color2} />
<h2>4</h2>
<ExampleComponent variant={4} color1={color1} color2={color2} />
<h2>5</h2>
<ExampleComponent variant={5} color1={color1} color2={color2} />
<h2>6</h2>
<ExampleComponent variant={6} color1={color1} color2={color2} />
</>
}
export default App
|
import type { BrowserActionDefinition } from '../../../lib/browser-destinations'
import type { Settings } from '../generated-types'
import type { Payload } from './generated-types'
import * as FullStory from '@fullstory/browser'
const action: BrowserActionDefinition<Settings, typeof FullStory, Payload> = {
title: 'Event',
description: 'Track events',
platform: 'web',
defaultSubscription: 'type = "track"',
fields: {
name: {
description: 'The name of the event to be tracked',
label: 'name',
required: true,
type: 'string'
},
properties: {
description: 'A propeties object containing a payload',
label: 'properties',
required: false,
type: 'object'
}
},
perform: (client, event) => {
client.event(event.payload.name, event.payload.properties ?? {})
}
}
export default action
|
package collins.kent.tutor.arithmetic;
import java.util.Random;
import collins.kent.tutor.Problem;
public class SingleTypeArithmeticProblem implements Problem {
Problem actualProblem;
@Override
public Problem generate(Random rng) {
actualProblem = rng.nextBoolean()
? new RandomDoubleArithmeticProblem().generate(rng)
: new RandomIntegerArithmeticProblem().generate(rng);
return actualProblem;
}
@Override
public String getStatement() {
// TODO Auto-generated method stub
return actualProblem.getStatement();
}
@Override
public String getAnswer() {
// TODO Auto-generated method stub
return actualProblem.getAnswer();
}
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-N-VB-ADJ-ADV/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-N-VB-ADJ-ADV/13-512+512+512-N-VB-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_and_verbs_first_third_sixth --eval_function penultimate_sixth_eval |
#include <iostream>
using namespace std;
int main(){
int num1, num2, max;
cin>>num1>>num2;
max = (num1 > num2)? num1 : num2;
cout<<"The largest number is " << max;
return 0;
} |
#!/bin/bash
###########################################################
# Create a training session in CyTrONE
###########################################################
###########################################################
# Usage information
# $ ./create_training.sh [training_choice]
#
# NOTE: If a training choice is provided as argument, the value will
# be used to identify the type of session to be created;
# otherwise a menu with available choice will be displayed, and
# user input will be requested.
###########################################################
# Load configuration
: CROND_PREFIX=${CROND_PREFIX:=/home/cyuser}
CYTRONE_SCRIPTS_CONFIG=$CROND_PREFIX/cytrone/scripts/CONFIG
if [ -f $CYTRONE_SCRIPTS_CONFIG ]; then
. $CYTRONE_SCRIPTS_CONFIG
else
echo "create_training: ERROR: Configuration file not found: ${CYTRONE_SCRIPTS_CONFIG}"
exit 1
fi
###########################################################
# Prepare session information
# Set default number of cyber range instances to be created
COUNT=2
# Set choice to argument, or display selection menu
if [ $# -ge 1 ];
then
CHOICE=$1
else
DONE=false
until ${DONE} ;
do
echo "# Please select the training type."
echo " 1) NIST Level 1 (English)"
echo " 2) NIST Level 2 (English)"
echo " 3) NIST Level 1 (Japanese)"
echo " 4) NIST Level 2 (Japanese)"
echo " 5) User defined"
read -p "Enter the number of your choice: " CHOICE
if [ ${CHOICE} -ge 1 -a ${CHOICE} -le 5 ];
then
DONE=true
else
echo "ERROR: Unrecognized choice, try again."
fi
done
fi
# Configure parameters depending on choice
case "${CHOICE}" in
1) LANGUAGE="en"
TYPE="Scenario-Based Training"
SCENARIO="Information Security Testing and Assessment"
LEVEL="Level 1 (Easy)"
;;
2) LANGUAGE="en"
TYPE="Scenario-Based Training"
SCENARIO="Information Security Testing and Assessment"
LEVEL="Level 2 (Medium)"
;;
3) LANGUAGE="ja"
TYPE="シナリオに基づいた演習"
SCENARIO="情報セキュリティテスト&評価"
LEVEL="レベル 1 (イージー)"
;;
4) LANGUAGE="ja"
TYPE="シナリオに基づいた演習"
SCENARIO="情報セキュリティテスト&評価"
LEVEL="レベル 2 (ミディアム)"
;;
5) LANGUAGE="en"
TYPE="Scenario-Based Training"
SCENARIO="Information Security Testing and Assessment"
LEVEL="Demo Level"
;;
*) echo "Unrecognized choice, try again."
;;
esac
###########################################################
# Display training settings
echo -e "# Create training using CyTrONE."
echo -e "* Training settings:"
echo -e " - USER:\t${USER}"
echo -e " - PASSWORD:\t******"
echo -e " - TYPE:\t${TYPE}"
echo -e " - SCENARIO:\t${SCENARIO}"
echo -e " - LEVEL:\t${LEVEL}"
echo -e " - COUNT:\t${COUNT}"
echo -e " - LANGUAGE:\t${LANGUAGE}"
###########################################################
# Execute action via CyTrONE
ACTION="create_training"
../code/trngcli.py ${TRAINING_HOST}:${TRAINING_PORT} "user=${USER}&password=${PASSWORD}&action=${ACTION}&count=${COUNT}&lang=${LANGUAGE}&type=${TYPE}&scenario=${SCENARIO}&level=${LEVEL}"
exit $?
|
#!/bin/bash -x
# Deploys a new homebrew formula file to a github homebrew formula repo: $HBREPO
# Requires SSH credentials in ssh-agent to work.
# Run by Travis-CI when a new release is created on GitHub.
# Do not edit this file.
source .metadata.sh
make ${BINARY}.rb
git config --global user.email "${BINARY}@auto.releaser"
git config --global user.name "${BINARY}-auto-releaser"
rm -rf homebrew_release_repo
git clone git@github.com:${HBREPO}.git homebrew_release_repo
cp ${BINARY}.rb homebrew_release_repo/Formula
pushd homebrew_release_repo
git add Formula/${BINARY}.rb
git commit -m "Update ${BINARY} on Release: v${VERSION}-${ITERATION}"
git push
popd
|
<gh_stars>1-10
/*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.csp.sentinel.demo.pigeon;
import com.alibaba.csp.sentinel.init.InitExecutor;
import com.alibaba.csp.sentinel.slots.block.RuleConstant;
import com.alibaba.csp.sentinel.slots.block.flow.FlowRule;
import com.alibaba.csp.sentinel.slots.block.flow.FlowRuleManager;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import java.util.Collections;
/**
* Provider demo for Pigeon 2.9.7. Please add the following VM arguments:
* <pre>
* -Djava.net.preferIPv4Stack=true
* -Dcsp.sentinel.api.port=8720
* -Dproject.name=pigeon-provider-demo
* </pre>
*
* @author <NAME>
*/
public class FooProviderBootstrap {
private static final String INTERFACE_RES_KEY = FooService.class.getName();
private static final String RES_KEY = INTERFACE_RES_KEY + ":sayHello(java.lang.String)";
public static void main(String[] args) {
// Users don't need to manually call this method.
// Only for eager initialization.
InitExecutor.doInit();
initFlowRule();
ClassPathXmlApplicationContext providerContext = new ClassPathXmlApplicationContext("spring/provider.xml");
providerContext.start();
System.out.println("Service provider is ready");
}
private static void initFlowRule() {
FlowRule flowRule = new FlowRule(RES_KEY)
.setCount(10)
.setGrade(RuleConstant.FLOW_GRADE_QPS);
FlowRuleManager.loadRules(Collections.singletonList(flowRule));
}
}
|
/*
Copyright (c) 2012 TU Dresden - Database Technology Group
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Author: <NAME> <<EMAIL>>
Current version: 1.0 (released February 21, 2012)
Version history:
- 1.0 Initial release (February 21, 2012)
*/
#ifndef _BDBIMPL_INDEX_H_
#define _BDBIMPL_INDEX_H_
#include <map>
#include <set>
#include <string>
#include <contest_interface.h>
#include <common/macros.h>
#include "mutex.h"
class Db;
class Dbt;
class Dbc;
class IndexSchema;
class DbTxn;
class DbEnv;
// Class representing an index handle
class Index{
public:
// Destructor
~Index();
// Opens an index
static ErrorCode Open(const char* name, Index** index);
// Close this index
void Close();
// Create a cursor to access the data inside this index
Dbc* Cursor(Transaction* tx);
// Converts the given Dbt to a key of this index
Key GetKey(const Dbt *bdb_key);
// Converts the given Key of this index into a Dbt object
Dbt* GetBDBKey(Key key, bool max = false);
// Insert the given record into the index
ErrorCode Insert(Transaction *tx, Record *record);
// Update the given record with the given payload
ErrorCode Update(Transaction *tx, Record *record, Block *payload, uint8_t flags);
// Delete the given record
ErrorCode Delete(Transaction *tx, Record *record, uint8_t flags);
// Checks whether the given record is compatible with this index
bool Compatible(Record *record);
// Checks whether the given key is compatible with this index
bool Compatible(Key &key);
// Register a new iterator handle
bool RegisterIterator(Iterator* iterator);
// Unregister an iterator
void UnregisterIterator(Iterator* iterator);
// Return the name of this index
const char* name() const { return name_; };
// Return whether the index has been closed
bool closed () const { return closed_; };
// Get the schema of the referenced index
IndexSchema* schema() { return schema_; };
private:
// Constructor
Index(const char* name);
// The Berkeley DB database handle
Db *db_;
// The Berkeley DB environment to use
DbEnv *env_;
// The name of this index
const char* name_;
// The structure of this index
IndexSchema* schema_;
// Whether the index has been closed
bool closed_;
// A set of all open iterators that use this index handle
std::set<Iterator*> iterators_;
// A mutex for protecting the insert and read operations on the iterator set
Mutex mutex_;
DISALLOW_COPY_AND_ASSIGN(Index);
};
// Represents a single or multicolumn index
class IndexSchema{
public:
// Constructor
IndexSchema(uint8_t attribute_count, KeyType type);
// Destructor
~IndexSchema();
// Create a new index schema
static void Create(const char* name, uint8_t column_count, KeyType types);
// Convert the given Dbt to a key of this index
Key GetKey(const Dbt *bdb_key);
// Convert the given Key of this index into a Dbt object
Dbt *GetBDBKey(Key key, bool max = false);
// Checks whether the given key is compatible with this schema
bool Compatible(Key &key);
// Register a new index handle
void RegisterHandle(Index* handle);
// Unregister an index handle
void UnregisterHandle(Index* handle);
// Close all registered handles
void CloseHandles();
// Start a new modifying transaction on this index
bool BeginTransaction(DbTxn *tx);
// End a modifying transaction on this index
void EndTransaction(DbTxn *tx);
// Try to make this index read-only
bool MakeReadOnly();
uint8_t attribute_count() const { return attribute_count_; };
AttributeType* type(){ return type_; };
size_t size(){return size_;};
private:
// The number of attributes that form a key of this index
uint8_t attribute_count_;
// An array of attribute types
AttributeType* type_;
// The size of a key of this index in byte
size_t size_;
// Whether the index is readonly
bool read_only_;
// A set of all open handles of this index structure
std::set<Index*> handles_;
// A set of open transactions that have modified this index
std::set<DbTxn*> transactions_;
// A mutex for protecting the insert and read operations on the handle set
Mutex mutex_;
// A mutex for protecting the insert and read operations on the transaction set
Mutex transaction_mutex_;
DISALLOW_COPY_AND_ASSIGN(IndexSchema);
};
// Defines a simple index manager.
//
// It is used to manage the schemas of the created indices.
//
// IndexManager implements the Singleton Pattern.
class IndexManager{
public:
// Return the singleton instance of IndexManager
static IndexManager& getInstance();
// Returns the index schema with the given name
IndexSchema *Find(std::string name);
// Insert a index schema (may overwrite existing entry)
void Insert(std::string name, IndexSchema* structure);
// Search and delete the index structure with the given name
ErrorCode Remove(std::string name);
// Initialize the singleton instance
static void Initialize();
// Destroy the singleton instance
static void Destroy();
private:
// Private constructor (don't allow instanciation from outside)
IndexManager(){};
// Destructor
~IndexManager(){};
// A map holding the structures of all indices
std::map<std::string,IndexSchema*> indices_;
// A mutex for protecting the insert and read operations
// Note: It would be better to use some type of S/X locks
// as multiple threads may read the map concurrently without
// affecting each other (this map however is not performance-critical)
Mutex mutex_;
// The singleton instance of IndexManager
static IndexManager* instance_;
// A pthread once handle to guarantee that the singleton instance is
// only initialized once
static pthread_once_t once_;
DISALLOW_COPY_AND_ASSIGN(IndexManager);
};
#endif // _BDBIMPL_INDEX_H_
|
#!/usr/bin/env bash
download-android
echo y | android update sdk --no-ui --filter platform-tool
echo y | android update sdk --no-ui --filter android-21
echo y | android update sdk --no-ui --all --filter build-tools-21.1.2 --force
echo y | android update sdk --no-ui --filter extra
|
#
# TEST PURPOSE/DESCRIPTION:
# ------------------------
#
# This test checks the capability of the workflow to have the user
# specify a new grid (as opposed to one of the predefined ones in the
# workflow) of ESGgrid type.
RUN_ENVIR="community"
PREEXISTING_DIR_METHOD="rename"
CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp_regional"
EXTRN_MDL_NAME_ICS="FV3GFS"
EXTRN_MDL_NAME_LBCS="FV3GFS"
USE_USER_STAGED_EXTRN_FILES="TRUE"
DATE_FIRST_CYCL="20190701"
DATE_LAST_CYCL="20190701"
CYCL_HRS=( "00" )
FCST_LEN_HRS="6"
LBC_SPEC_INTVL_HRS="3"
#
# Define custom grid.
#
GRID_GEN_METHOD="ESGgrid"
ESGgrid_LON_CTR="-97.5"
ESGgrid_LAT_CTR="41.25"
ESGgrid_DELX="25000.0"
ESGgrid_DELY="25000.0"
ESGgrid_NX="216"
ESGgrid_NY="156"
ESGgrid_PAZI="0.0"
ESGgrid_WIDE_HALO_WIDTH="6"
DT_ATMOS="40"
LAYOUT_X="8"
LAYOUT_Y="12"
BLOCKSIZE="13"
POST_OUTPUT_DOMAIN_NAME="custom_ESGgrid"
QUILTING="TRUE"
if [ "$QUILTING" = "TRUE" ]; then
WRTCMP_write_groups="1"
WRTCMP_write_tasks_per_group=$(( 1*LAYOUT_Y ))
WRTCMP_output_grid="lambert_conformal"
WRTCMP_cen_lon="${ESGgrid_LON_CTR}"
WRTCMP_cen_lat="${ESGgrid_LAT_CTR}"
WRTCMP_stdlat1="${ESGgrid_LAT_CTR}"
WRTCMP_stdlat2="${ESGgrid_LAT_CTR}"
WRTCMP_nx="200"
WRTCMP_ny="150"
WRTCMP_lon_lwr_left="-122.21414225"
WRTCMP_lat_lwr_left="22.41403305"
WRTCMP_dx="${ESGgrid_DELX}"
WRTCMP_dy="${ESGgrid_DELY}"
fi
|
import deDE from './snippet/de-DE';
import enGB from './snippet/en-GB';
Shopware.Module.register('product-feed', {
color: '#57D9A3',
icon: 'default-shopping-paper-bag-product',
title: 'product-feed.general.title',
description: 'product-feed.general.description',
name: "product-feed-plugin",
type: "plugin",
snippets: {
'de-DE': deDE,
'en-GB': enGB
},
routes: {
list: {
component: 'product-feed-list',
path: 'list'
},
detail: {
component: 'product-feed-detail',
path: 'detail/:id',
meta: {
parentPath: 'product.feed.list'
}
},
create: {
component: 'product-feed-create',
path: 'create'
},
logs: {
component: 'product-feed-logs',
path: 'logs'
},
logDetails: {
component: 'product-feed-logsDetail',
path: 'logsDetails/:id',
meta: {
parentPath: 'product.feed.logs'
}
}
},
navigation: [{
label: 'product-feed.general.title',
color: '#57D9A3',
path: 'product.feed.list',
position: 100,
parent: 'sw-catalogue'
}],
});
|
<filename>src/test/java/de/lmu/cis/ocrd/cli/test/TrainCommandTest.java
package de.lmu.cis.ocrd.cli.test;
import com.google.gson.Gson;
import de.lmu.cis.ocrd.cli.CommandLineArguments;
import de.lmu.cis.ocrd.cli.EvaluateCommand;
import de.lmu.cis.ocrd.cli.PostCorrectionCommand;
import de.lmu.cis.ocrd.cli.TrainCommand;
import de.lmu.cis.ocrd.config.Parameters;
import de.lmu.cis.ocrd.ml.DMProtocol;
import de.lmu.cis.ocrd.ml.LEProtocol;
import de.lmu.cis.ocrd.ml.Model;
import de.lmu.cis.ocrd.ml.Protocol;
import de.lmu.cis.ocrd.pagexml.METS;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.*;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
public class TrainCommandTest {
private final Path workspace = Paths.get("src/test/resources/workspace");
private Path tmp;
private Parameters parameters;
private final String mets = "src/test/resources/workspace/mets.xml";
private final String inputFileGroupEval = "OCR-D-EVAL";
private final String outputFileGroup = "OCR-D-POST-CORRECTED";
private final String logLevel = "INFO";
// private final String logLevel = "DEBUG"; // use this to enable debugging
@Before
public void init() throws IOException {
tmp = Files.createTempDirectory("OCR-D-CIS-JAVA");
try (Reader r = new FileReader(Paths.get("src/test/resources/workspace/config.json").toFile())) {
parameters = new Gson().fromJson(r, Parameters.class);
}
parameters.setDir(tmp.toString());
try {
Files.createDirectory(tmp);
} catch (FileAlreadyExistsException e) {
// ignore
}
Files.copy(Paths.get(mets), Paths.get(tmp.toString(), "mets.xml"), REPLACE_EXISTING);
}
@After
public void deinit() {
try {
Files.copy(Paths.get(tmp.toString(), "mets.xml"), Paths.get(mets), REPLACE_EXISTING);
} catch (Exception e) {
// ignore
}
if ("debug".equalsIgnoreCase(logLevel)) { // do not remove tmp dir if debugging
return;
}
try {
FileUtils.deleteDirectory(Paths.get(workspace.toString(), outputFileGroup).toFile());
} catch (Exception e) {
// ignore
}
try {
FileUtils.deleteDirectory(tmp.toFile());
} catch (Exception e) {
// ignore
}
}
@Test
public void test() throws Exception {
train();
postCorrect();
eval();
}
private void train() throws Exception {
String inputFileGroupTrain = "OCR-D-PROFILED";
String[] args = {
"-c", "train",
"--mets", mets,
"--parameter", new Gson().toJson(parameters),
"-I", inputFileGroupTrain,
"--log-level", logLevel,
};
CommandLineArguments cla = CommandLineArguments.fromCommandLine(args);
TrainCommand cmd = new TrainCommand();
cmd.execute(cla);
// 3 runs (dle, rr, dm), 2 files for each run with 2 OCRs
for (int i = 0; i < 2; i++) {
assertThat(cmd.getParameters().getLETraining().getModel(i+1).toFile().exists(), is(true));
assertThat(cmd.getParameters().getLETraining().getTraining(i+1).toFile().exists(), is(true));
assertThat(cmd.getParameters().getRRTraining().getModel(i+1).toFile().exists(), is(true));
assertThat(cmd.getParameters().getRRTraining().getTraining(i+1).toFile().exists(), is(true));
assertThat(cmd.getParameters().getDMTraining().getModel(i+1).toFile().exists(), is(true));
assertThat(cmd.getParameters().getDMTraining().getTraining(i+1).toFile().exists(), is(true));
}
// check model
assertThat(cmd.getParameters().getModel().toFile().exists(), is(true));
Model model = Model.open(cmd.getParameters().getModel());
assertThat(model.getLEFeatureSet(), notNullValue());
assertThat(model.getLEFeatureSet().size(), is(cmd.getParameters().getLETraining().getFeatures().size()));
assertThat(model.getRRFeatureSet(), notNullValue());
assertThat(model.getRRFeatureSet().size(), is(cmd.getParameters().getRRTraining().getFeatures().size()));
assertThat(model.getDMFeatureSet(), notNullValue());
assertThat(model.getDMFeatureSet().size(), is(cmd.getParameters().getDMTraining().getFeatures().size()));
assertThat(checkInputStream(model.openLanguageModel()), is(true));
for (int i = 0; i < 2; i++) {
assertThat(checkInputStream(model.openLEModel(i)), is(true));
assertThat(checkInputStream(model.openRRModel(i)), is(true));
assertThat(checkInputStream(model.openDMModel(i)), is(true));
}
}
private static boolean checkInputStream(InputStream is) throws IOException {
is.close();
return is != null;
}
private void postCorrect() throws Exception {
String[] args = {
"-c", "post-correct",
"--mets", mets,
"--parameter", new Gson().toJson(parameters),
"-I", inputFileGroupEval,
"-O", outputFileGroup,
"--log-level", logLevel,
};
assertThat(parameters.getModel().toFile().exists(), is(true));
for (int i = 0; i < 2; i++) {
final String ofg = outputFileGroup + "-" + (i+1);
args[9] = ofg;
parameters.setNOCR(i+1);
args[5] = new Gson().toJson(parameters); // set parameter as inline json string
CommandLineArguments cla = CommandLineArguments.fromCommandLine(args);
PostCorrectionCommand cmd = new PostCorrectionCommand();
cmd.execute(cla);
assertThat(cmd.getParameters().getNOCR(), is(parameters.getNOCR()));
// check corrected files in output file group
final Path dir = Paths.get(workspace.toString(), ofg);
assertThat(dir.toFile().exists(), is(true));
assertThat(dir.toFile().isDirectory(), is(true));
assertThat(numberOfFiles(dir), is(1));
assertThat(METS.open(Paths.get(mets)).findFileGrpFiles(ofg).size(), is(1));
assertThat(Paths.get(METS.open(Paths.get(mets)).findFileGrpFiles(ofg).get(0).getFLocat()).toFile().exists(), is(true));
FileUtils.deleteDirectory(dir.toFile());
assertThat(dir.toFile().exists(), is(false));
assertThat(Paths.get(METS.open(Paths.get(mets)).findFileGrpFiles(ofg).get(0).getFLocat()).toFile().exists(), is(false));
// check protocols
assertThat(cmd.getParameters().getLETraining().getProtocol(i+1, parameters.isRunLE()).toFile().exists(), is(true));
checkReadProtocol(new LEProtocol(), cmd.getParameters().getLETraining().getProtocol(i+1, parameters.isRunLE()));
assertThat(cmd.getParameters().getDMTraining().getProtocol(i+1, parameters.isRunLE()).toFile().exists(), is(true));
checkReadProtocol(new DMProtocol(), cmd.getParameters().getDMTraining().getProtocol(i+1, parameters.isRunLE()));
}
}
private void eval() throws Exception {
String[] args = {
"-c", "eval",
"--mets", mets,
"--parameter", new Gson().toJson(parameters),
"-I", inputFileGroupEval,
"--log-level", logLevel,
};
for (int i = 0; i < 2; i++) {
parameters.setNOCR(i+1);
args[5] = new Gson().toJson(parameters); // set parameter as inline json string
CommandLineArguments cla = CommandLineArguments.fromCommandLine(args);
EvaluateCommand cmd = new EvaluateCommand();
cmd.execute(cla);
assertThat(cmd.getParameters().getDMTraining().getEvaluation(i+1, parameters.isRunLE()).toFile().exists(), is(true));
}
}
private void checkReadProtocol(Protocol protocol, Path path) throws Exception {
try (InputStream is = new FileInputStream(path.toFile())) {
protocol.read(is);
}
}
private static int numberOfFiles(Path dir) {
final File[] files = dir.toFile().listFiles();
return files == null ? 0 : files.length;
}
}
|
import tensorflow as tf
# Create example deep learning model
model = tf.keras.Sequential()
model.add(tf.keras.layers.Conv2D(32, (3,3), input_shape=(300, 300, 3)))
model.add(tf.keras.layers.MaxPool2D(2, 2))
model.add(tf.keras.layers.Conv2D(64, (3,3), input_shape=(150, 150, 3)))
model.add(tf.keras.layers.MaxPool2D(2, 2))
# Optimize model for size
model.add(tf.keras.layers.Conv2D(32, (3,3), input_shape=(50, 50, 3), activation='relu'))
model.add(tf.keras.layers.MaxPool2D(2, 2))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.Conv2D(64, (3,3), input_shape=(25, 25, 3),activation="relu"))
model.add(tf.keras.layers.MaxPool2D(2, 2))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.Flatten()) |
import React from 'react';
function KeyboardInputForm() {
return (
<form>
<header>
<h3>Keyboard Input Form</h3>
</header>
<label>
Input:
<input type="text" name="keyboardInput" />
</label>
</form>
);
}
export default KeyboardInputForm; |
<filename>1685-Sum of Absolute Differences in a Sorted Array/cpp_1685/Solution1.h
/**
* @author ooooo
* @date 2020/12/20 13:07
*/
#ifndef CPP_1685__SOLUTION1_H_
#define CPP_1685__SOLUTION1_H_
#include <iostream>
#include <vector>
#include <stack>
#include <queue>
#include <set>
#include <unordered_map>
#include <unordered_set>
#include <numeric>
#include <math.h>
using namespace std;
class Solution {
public:
vector<int> getSumAbsoluteDifferences(vector<int> &nums) {
int n = nums.size();
vector<int> sum(n);
sum[0] = nums[0];
for (int i = 1; i < n; ++i) {
sum[i] = sum[i - 1] + nums[i];
}
vector<int> ans(n, 0);
for (int i = 0; i < n; ++i) {
if (i > 0) {
ans[i] += (i * nums[i] - sum[i - 1]);
}
if (i < n) {
ans[i] += (sum[n - 1] - sum[i] - (n - 1 - i) * nums[i]);
}
}
return ans;
}
};
#endif //CPP_1685__SOLUTION1_H_
|
import { QuaggaJSConfigObject } from '@ericblade/quagga2';
export const DEFAULT_CONFIG: QuaggaJSConfigObject = {
inputStream: {
name: 'Live',
type: 'LiveStream',
target: null,
constraints: {
width: { min: 640 },
height: { min: 480 },
aspectRatio: { min: 1, max: 2 },
facingMode: 'environment', // or user
},
singleChannel: false // true: only the red color-channel is read
},
locator: {
patchSize: 'medium',
halfSample: true
},
locate: true,
numOfWorkers: 4,
decoder: {
readers: ['code_128_reader']
}
};
|
#!/bin/bash
echo ""
echo -e "\nbuild docker hadoop image\n"
sudo docker build -t hadoop-base .
echo "" |
#!/bin/bash
#
# Since: May, 2017
# Author: manish.vaishnani@oracle.com
# Description: script to build a Docker image for Oracle Data Integrator. The install mode is "standalone"
#
#
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
#
# Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
#
# Licensed under the Universal Permissive License v 1.0 as shown at http://oss.oracle.com/licenses/upl.
#
usage() {
cat << EOF
Usage: buildDockerImage.sh -v [version] [-s]
Builds a Docker Image for Oracle Data Integrator.
Parameters:
-v: Release version to build. Required.
Choose one of: $(for i in $(ls -d */); do echo -n "${i%%/} "; done)
-s: skips the MD5 check of packages
Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
Licensed under the Universal Permissive License v 1.0 as shown at http://oss.oracle.com/licenses/upl.
EOF
exit 0
}
# Validate packages
checksumPackages() {
echo "Checking if required packages are present and valid..."
md5sum -c *.download
if [ "$?" -ne 0 ]; then
echo "MD5 for required packages to build this image did not match!"
echo "Make sure to download missing files in folder dockerfiles. See *.download files for more information"
exit $?
fi
}
#Parameters
VERSION="12.2.1.3.0"
SKIPMD5=0
while getopts "hsdgiv:" optname; do
case "$optname" in
"h")
usage
;;
"s")
SKIPMD5=1
;;
"v")
VERSION="$OPTARG"
;;
*)
# Should not occur
echo "Unknown error while processing options inside buildDockerImage.sh"
;;
esac
done
# ODI Image Name
IMAGE_NAME="oracle/odi:$VERSION"
# Go into version folder
cd $VERSION
if [ ! "$SKIPMD5" -eq 1 ]; then
checksumPackages
else
echo "Skipped MD5 checksum."
fi
# Proxy settings
PROXY_SETTINGS=""
if [ "${http_proxy}" != "" ]; then
PROXY_SETTINGS="$PROXY_SETTINGS --build-arg http_proxy=${http_proxy}"
fi
if [ "${https_proxy}" != "" ]; then
PROXY_SETTINGS="$PROXY_SETTINGS --build-arg https_proxy=${https_proxy}"
fi
if [ "${ftp_proxy}" != "" ]; then
PROXY_SETTINGS="$PROXY_SETTINGS --build-arg ftp_proxy=${ftp_proxy}"
fi
if [ "${no_proxy}" != "" ]; then
PROXY_SETTINGS="$PROXY_SETTINGS --build-arg no_proxy=${no_proxy}"
fi
if [ "$PROXY_SETTINGS" != "" ]; then
echo "Proxy settings were found and will be used during build."
fi
# ################## #
# BUILDING THE IMAGE #
# ################## #
echo "Building image '$IMAGE_NAME' ..."
echo "Proxy Settings '$PROXY_SETTINGS'"
# BUILD THE IMAGE (replace all environment variables)
BUILD_START=$(date '+%s')
docker build --force-rm=true --no-cache=true $PROXY_SETTINGS -t $IMAGE_NAME -f Dockerfile . || {
echo "There was an error building the image."
exit 1
}
BUILD_END=$(date '+%s')
BUILD_ELAPSED=`expr $BUILD_END - $BUILD_START`
echo ""
if [ $? -eq 0 ]; then
cat << EOF
Oracle Data Integrator Docker Image for version: $VERSION is ready to be extended.
--> $IMAGE_NAME
Build completed in $BUILD_ELAPSED seconds.
EOF
else
echo "Oracle Data Integrator Docker Image was NOT successfully created. Check the output and correct any reported problems with the docker build operation."
fi
|
#!/bin/bash
set -eu
# FIND LOSS INCREASES SH
# Does val_loss and val_loss delta analysis across the run
# Input: Provide an experiment directory
# Output: Information printed to screen (pipe this into less)
THIS=$( readlink --canonicalize $( dirname $0 ) )
SUPERVISOR=$( readlink --canonicalize $THIS/../../.. )
export PYTHONPATH+=:$SUPERVISOR/workflows/common/python
set -x
python3 -u $THIS/find-loss-increases.py $*
|
import React, { HTMLProps } from "react";
const classes = "layout_hg__main";
export function Main({ children, ...rest }: HTMLProps<HTMLElement>) {
return (
<main {...rest} className={classes}>
{children}
</main>
);
}
|
<gh_stars>0
# The Book of Ruby - http://www.sapphiresteel.com
# parentheses avoid ambiguity...
greet = "Hello"
name = "Fred"
def greet
return "Good morning"
end
def name
return "Mary"
end
def sayHi( aName )
return "Hi, #{aName}"
end
puts( greet )
puts greet
puts( sayHi( name ) )
puts( sayHi( name() ) )
|
#!/bin/bash
PROCESS=`ps -ef|grep 'start.py'|grep -v grep|grep -v PPID|awk '{ print $2}'`
for i in $PROCESS
do
kill -9 $i
done
echo 'hhttp server exit' |
<reponame>epeinado/api-language-resources
# -*- coding: utf-8 -*-
import requests
import json
import logging
from collections import defaultdict
class SolrService:
def __init__(self):
self.solr_url = "http://172.16.31.10:8983/solr/core0/search"
def get_sentiment_emotions(self, term, domain):
term = ' AND '.join(["title:%s" % term for term in term.split()])
result = {"positive":0.0, "negative":0.0, "emotions":defaultdict(int)}
num_docs_response = requests.get(self.solr_url, params={'q': '*:*', 'rows':0, 'fq':['+(domain:%s)' % (domain), '+(%s)' % term]})
response = json.loads(num_docs_response.text)
num_docs = response.get('response', {}).get('numFound', 0)
print num_docs
current_doc = 0
while current_doc < num_docs:
logging.warn("Querying 100 documents, starting on %s/%s" % (current_doc, num_docs))
params = {'q': '*:*', 'fq':['+(domain:%s)' % (domain), '+(%s)' % term], 'start':current_doc, 'rows':100, 'fl':'positive,negative,*_emo'}
docs_response = requests.get(self.solr_url, params=params).text
for document in json.loads(docs_response).get('response', {}).get('docs', []):
result['positive'] += document['positive']
result['negative'] += document['negative']
for key in document.keys():
if key.endswith('_emo'):
emotion = key.replace('_emo', '')
result['emotions'][emotion] += document[key]
current_doc += 100
print result
return result
def get_normalized_sentiment_emotion(self, term, domain):
results = self.get_sentiment_emotions(term, domain)
try:
normalized_sentiment = 5.0 + (results['positive'] - results['negative']) / (results['positive'] + results['negative'])/2
except ZeroDivisionError, e:
normalized_sentiment = 0.0
print normalized_sentiment
emotions = results['emotions']
if emotions:
max_emotion = max(emotions, key=emotions.get)
else:
max_emotion = None
return {"sentiment":normalized_sentiment, "emotion":max_emotion}
if __name__ == '__main__':
service = SolrService()
print service.get_normalized_sentiment_emotion('hesperia nh', 'booking.com')
|
<reponame>Tribunal51/Hawiya<gh_stars>0
import Vue from 'vue';
import VueI18n from 'vue-i18n';
Vue.use(VueI18n);
export const i18n = new VueI18n({
locale: 'en', // set locale
fallbackLocale: 'en',
messages: {
en: {
"loading": "Loading...",
"All": "All",
"Logo Design": "Logo Design",
"Branding": "Branding",
"Stationery": "Stationery",
"Packaging": "Packaging",
"Social Media": "Social Media",
"Website": "Website",
"Promotional": "Promotional"
},
ar: {
"loading": "جار التحميل",
"All": "الكل",
"Logo Design": "تصميم شعار",
"Branding": "العلامات التجارية",
"Stationery": "ادوات مكتبيه",
"Packaging": "التعبئة والتغليف",
"Social Media": "وسائل التواصل الاجتماعي",
"Website": "موقع الكتروني",
"Promotional": "الترويجية"
}
}
});
let dict = {
0: '٠',
1: '١',
2: '٢',
3: '٣',
4: '٤',
5: '٥',
6: '٦',
7: '٧',
8: '٨',
9: '٩'
}
export const number = (number,self) => {
console.log('NUMBER', self);
return self.$root.$i18n.locale === 'ar' ? number.toString().split("").map(digit => dict[digit]).join("") : number;
}
// const convertNumber = number => {
// let dict = {
// 1: "١",
// 2: "٢",
// 3: "٣",
// 4: "٤",
// 5: "٥",
// 6: "٦",
// 7: "٧",
// 8: "٨",
// 9: "٩"
// };
// const newNumber = number
// .toString()
// .split("")
// .map(digit => {
// return dict[digit];
// }).join('');
// return newNumber;
// };
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.