text stringlengths 1 1.05M |
|---|
<filename>test/InvokeDynamic.java
public class InvokeDynamic {
private final int foo;
private InvokeDynamic(int foo) {
this.foo = foo;
}
private interface Operation {
int operate(int a, int b);
}
private static void expect(boolean v) {
if (! v) throw new RuntimeException();
}
public static void main(String[] args) {
int c = 4;
Operation op = (a, b) -> a + b - c;
expect(op.operate(2, 3) == (2 + 3) - 4);
for (int i = 0; i < 4; ++i) {
new InvokeDynamic(i).test();
}
}
private void test() {
int c = 2;
Operation op = (a, b) -> ((a + b) * c) - foo;
expect(op.operate(2, 3) == ((2 + 3) * 2) - foo);
}
}
|
<reponame>TVilaboa/IO-Sim2
package model.simulation.strategies;
import model.simulation.Customer;
import model.simulation.Event;
import model.simulation.Simulation;
import model.simulation.mathematics.Mathematics;
import javax.validation.constraints.NotNull;
import static model.simulation.Customer.CustomerType.A;
import static model.simulation.Customer.CustomerType.B;
import static model.simulation.Event.EventType.SALIDA;
import static model.simulation.Event.Status.OCUPADO;
import static model.simulation.Event.Status.VACIO;
/**
* Created by franco on 21/06/2015.
*/
public class AbsolutePriorityIntoleranceTotalAbandonmentStrategy implements SimulationStrategy{
private Event possibleBExitEvent;
@Override
public void handleArrival(@NotNull Event event, @NotNull Simulation simulation) {
addEventCustomer(simulation, event);
attend(event, simulation);
//Set queue length at arrival event
event.queueLength(simulation.getQueueLength() + simulation.getPriorityQueueLength());
event.attentionChanelStatus(OCUPADO);
event.setQueueALength(simulation.getALength());
if (simulation.getCurrentCustomer() != null)
event.setAttentionChannelCustomer(simulation.getCurrentCustomer().getType());
}
@Override
public void handleDeparture(@NotNull Event event, @NotNull Simulation simulation) {
simulation.setCurrentCustomer(null);
attend(event, simulation);
//Set client's presence within the system
event.getCustomer().setPermanence(event.getInitTime() - event.getCustomer().getArrivalTime());
//Set queue length at exit event
event.queueLength(simulation.getQueueLength() + simulation.getPriorityQueueLength());
event.attentionChanelStatus(simulation.getCurrentCustomer() == null ? VACIO : OCUPADO);
event.setQueueALength(simulation.getALength());
if (simulation.getCurrentCustomer() != null)
event.setAttentionChannelCustomer(simulation.getCurrentCustomer().getType());
}
@Override
public void handleInitiation(@NotNull Event event, @NotNull Simulation simulation) {
simulation.absolutePriority();
event.queueLength(0).attentionChanelStatus(VACIO);
}
/**
* Adds a customer to a priority queue if is an A or adds it to the regular queue if is a B
*
* @param simulation simulation
* @param event event
*/
private void addEventCustomer(Simulation simulation, Event event) {
//Sets event's & current customer
final Customer customer = event.getCustomer();
final Customer current = simulation.getCurrentCustomer();
//When current customer IS null
if(current == null){
if(customer.getType() == A){
//Remove all B type customers queued
if(!simulation.isQueueEmpty())
removeQueuedCustomers(event, simulation);
simulation.addCustomerToPriorityQueue(customer);
}
else{
if(!simulation.isPriorityQueueEmpty()){
customer.setPermanence(0).interrupted();
simulation.addEventAndSort(new Event(SALIDA, customer, event.getInitTime(), true).comment("Left because A in queue"));
}
else
simulation.addCustomertoQueue(customer);
}
}
//When current customer IS NOT null
else{
if(current.getType() == A){
if(customer.getType() == A)
simulation.addCustomerToPriorityQueue(customer);
else{
customer.setPermanence(0).interrupted();
simulation.addEventAndSort(new Event(SALIDA,customer,event.getInitTime(),true).comment("Left because A in System"));
}
}
else{
if(customer.getType() == A){
//Remove current B typed customer
simulation.removeEvent(possibleBExitEvent);
current.interrupted();
simulation.addEventAndSort(new Event(SALIDA, current, event.getInitTime(), true).comment("Left because A entered"));
//Remove all B typed customers queued
removeQueuedCustomers(event,simulation);
//Add A typed customer
simulation.addCustomerToPriorityQueue(customer);
}
else
simulation.addCustomertoQueue(customer);
}
}
}
/**
* @param event event
* @param simulation simulation
*/
private void attend(final Event event, final Simulation simulation) {
if(simulation.getCurrentCustomer() == null) {
final Customer priorityCustomer = simulation.peekPriorityQueue();
//Check if there is an A type customer
if (priorityCustomer != null)
attendThis(simulation, event, simulation.pollPriorityQueue());
//If there are no A type customers, check for a regular customer and attend him
else {
final Customer regularCustomer = simulation.peekCustomerQueue();
if (regularCustomer != null)
attendThis(simulation, event, simulation.pollCustomerQueue());
}
}
}
/**
* Attends a customer
*
* @param simulation simulation
* @param event event
* @param nextCustomer customer
*/
private void attendThis(Simulation simulation, Event event, Customer nextCustomer) {
nextCustomer.waitTime(event.getInitTime() - nextCustomer.getArrivalTime());
final Customer.CustomerType type = nextCustomer.getType();
event.attentionChanelStatus(OCUPADO);
final double mu = Mathematics.getDurationChannel(type == A ? simulation.getMuA() : simulation.getMuB());
final Event exitEvent = new Event(SALIDA, nextCustomer, event.getInitTime() + mu, false);
if (type == B)
possibleBExitEvent = exitEvent;
simulation.addEventAndSort(exitEvent);
simulation.setCurrentCustomer(nextCustomer);
}
private void removeQueuedCustomers(Event event,Simulation simulation){
for (int i = 0; i < simulation.getQueueLength(); i++) {
final Customer c = simulation.pollCustomerQueue();
if (c != null) {
c.interrupted();
simulation.addEventAndSort(new Event(SALIDA, c, event.getInitTime(), true).comment("Left because A entered"));
}
}
}
}
|
import { readFileSync } from 'fs';
import { join } from 'path';
import { buildAll, buildPkg } from './buildIt';
import { makeHash } from './makeHash';
export const readJson = (path) => {
try {
return JSON.parse(readFileSync(path).toString());
} catch {
// console.error(`File ${path} not found, returning undefined`);
return undefined;
}
};
export const folder = process.cwd();
export async function getPublishableProjects(): Promise<ReleaseData[]> {
/** make sure there are fresh packages */
await buildAll().catch((e) => {
process.exit(15);
});
const workspace = readJson(join(folder, 'workspace.json'));
const publishableProjects = Object.entries(workspace.projects)
.map(([name, val]: [string, any]) => ({
name,
root: val.root,
dist: val.architect.build.options.outputPath,
}))
.map((row) => {
/** some projects in the workspace dont have an outfolder, add it here. */
switch (row.name) {
case 'ng-lib':
return { ...row, dist: 'dist/libs/ng-lib' };
default:
return row;
}
})
.map(async (project) => {
try {
const locOrg = join(folder, './', project.root, 'package.json');
const { name, version } = readJson(locOrg);
// we are only going to handle publishable packages
if (name.startsWith('@')) {
/** create an hash from the freshly build project */
const { hash } = await makeHash(join(folder, './', project.dist));
return { ...project, pkg: name, version, hash };
}
} catch (e) {
// the apps don't follow the flow, and will error out as expected
// console.log(`Project ${project.name} has ${e.toString()}`);
return undefined;
}
});
/** wait until all projects are build, and filter out undefined ones */
const buildedProjects = (await Promise.all(publishableProjects)).filter((row) => !!row);
// console.log(buildedProjects);
return buildedProjects;
}
export interface ReleaseData {
name: string;
root: string;
dist: string;
pkg: string;
version: string;
hash: string;
}
|
<gh_stars>0
import { DIMENSION_ID_DATA } from '@dhis2/analytics'
import {
clickDimensionModalHideButton,
expectDimensionModalToContain,
expectDimensionModalToNotBeVisible,
unselectItemByDoubleClick,
selectItemByDoubleClick,
expectDataDimensionModalToBeVisible,
expectDataItemToBeSelected,
expectDataTypeToBe,
expectGroupSelectToNotBeVisible,
expectNoDataItemsToBeSelected,
selectAllDataItems,
unselectAllDataItems,
expectDataItemsSelectedAmountToBeLeast,
expectDataItemsSelectedAmountToBe,
expectDataItemToBeSelectable,
expectDataItemsSelectableAmountToBe,
inputSearchTerm,
switchDataTypeTo,
clearSearchTerm,
expectDataItemsSelectableAmountToBeLeast,
expectGroupSelectToBeVisible,
switchGroupTo,
selectFirstDataItem,
expectGroupSelectToBe,
expectEmptySourceMessageToBe,
switchGroupToAll,
switchDataTypeToAll,
scrollSourceToBottom,
unselectItemByButton,
selectItemByButton,
expectSubGroupSelectToBeVisible,
expectSubGroupSelectToBe,
switchSubGroupTo,
} from '../elements/dimensionModal'
import { openDimension } from '../elements/dimensionsPanel'
import { goToStartPage } from '../elements/startScreen'
import {
TEST_DATA_ELEMENTS,
TEST_DATA_SETS,
TEST_INDICATORS,
} from '../utils/data'
const PAGE_SIZE = 50
describe('Data dimension', () => {
describe('initial state', () => {
it('navigates to the start page', () => {
goToStartPage()
})
it('opens the data dimension modal', () => {
cy.intercept('GET', '/dataItems').as('dataItems')
openDimension(DIMENSION_ID_DATA)
cy.wait('@dataItems').then(({ request, response }) => {
expect(request.url).to.contain('page=1')
expect(response.statusCode).to.eq(200)
expect(response.body.dataItems.length).to.eq(PAGE_SIZE)
})
expectDataDimensionModalToBeVisible()
})
it('modal has a title', () => {
expectDimensionModalToContain('Data')
})
it('no items are selected', () => {
expectNoDataItemsToBeSelected()
})
it("data type is 'All'", () => {
expectDataTypeToBe('All')
})
it('group select is not visible', () => {
expectGroupSelectToNotBeVisible()
})
const firstPageItemName = TEST_INDICATORS[0].name
it('an item can be selected by double click', () => {
selectItemByDoubleClick(firstPageItemName)
expectDataItemToBeSelected(firstPageItemName)
})
it('an item can be unselected by double click', () => {
unselectItemByDoubleClick(firstPageItemName)
expectNoDataItemsToBeSelected()
})
it('an item can be selected by button', () => {
selectItemByButton(firstPageItemName)
expectDataItemToBeSelected(firstPageItemName)
})
it('an item can be unselected by button', () => {
unselectItemByButton(firstPageItemName)
expectNoDataItemsToBeSelected()
})
})
describe('selecting all and fetching more', () => {
const secondPageItemName = 'BCG doses'
it('all items can be selected', () => {
cy.intercept('GET', '/dataItems').as('dataItems')
selectAllDataItems()
expectDataItemsSelectedAmountToBeLeast(PAGE_SIZE)
cy.wait('@dataItems').then(({ request, response }) => {
expect(request.url).to.contain('page=2')
expect(response.statusCode).to.eq(200)
expect(response.body.dataItems.length).to.eq(PAGE_SIZE)
})
})
it('more items are fetched', () => {
expectDataItemsSelectableAmountToBeLeast(PAGE_SIZE)
expectDataItemToBeSelectable(secondPageItemName)
})
it('all items can be unselected', () => {
unselectAllDataItems()
expectNoDataItemsToBeSelected()
})
it('more items are fetched when scrolling down', () => {
cy.intercept('GET', '/dataItems').as('request')
scrollSourceToBottom()
cy.wait('@request').then(({ request, response }) => {
expect(request.url).to.contain('page=3')
expect(response.statusCode).to.eq(200)
expect(response.body.dataItems.length).to.eq(PAGE_SIZE)
})
expectDataItemsSelectableAmountToBeLeast(PAGE_SIZE * 3)
})
})
describe('global search', () => {
const testSearchTerm = 'Dis<PASSWORD>' // Use a data element for the third step to work
it('recieves a search term', () => inputSearchTerm(testSearchTerm))
// TODO: Test that the search is only called once, i.e. debounce works
it('search result is displayed', () => {
expectDataItemsSelectableAmountToBe(1)
expectDataItemToBeSelectable(testSearchTerm)
})
it('search result is maintained when switching data type', () => {
switchDataTypeTo('Data elements')
expectDataItemsSelectableAmountToBe(1)
expectDataItemToBeSelectable(testSearchTerm)
clearSearchTerm()
switchDataTypeToAll()
expectDataItemsSelectableAmountToBeLeast(PAGE_SIZE)
})
it('search displays a correct error message', () => {
const testSearchTermWithNoMatch = 'nomatch'
inputSearchTerm(testSearchTermWithNoMatch)
expectEmptySourceMessageToBe(
`Nothing found for "${testSearchTermWithNoMatch}"`
)
})
it('search result can be cleared', () => {
clearSearchTerm()
expectDataItemsSelectableAmountToBeLeast(PAGE_SIZE)
})
it('modal is closed', () => {
clickDimensionModalHideButton()
expectDimensionModalToNotBeVisible()
})
})
const testDataTypes = [
{
name: 'Indicators',
testGroup: { name: 'Facility infrastructure', itemAmount: 3 },
testItem: { name: TEST_INDICATORS[2].name },
defaultGroup: { name: 'All groups' },
endpoint: {
hasMultiplePages: true,
requestUrl: '/indicators',
responseBody: 'indicators',
},
},
{
name: 'Data elements',
testGroup: { name: 'Measles', itemAmount: 3 },
testSubGroup: { name: 'Details only', itemAmount: '10' },
testItem: { name: TEST_DATA_ELEMENTS[2].name },
defaultGroup: { name: 'All groups' },
defaultSubGroup: { name: 'Totals only' },
endpoint: {
hasMultiplePages: true,
requestUrl: '/dataElements',
responseBody: 'dataElements',
},
},
{
name: 'Data sets',
testGroup: { name: 'Child Health', itemAmount: 5 },
testSubGroup: { name: 'Actual reports', itemAmount: 1 },
testItem: { name: TEST_DATA_SETS[2].name },
defaultGroup: { name: 'All data sets' },
defaultSubGroup: { name: 'All metrics' },
endpoint: {
hasMultiplePages: false,
requestUrl: '/dataSets',
responseBody: 'dataSets',
},
},
{
name: 'Event data items',
testGroup: { name: 'Information Campaign', itemAmount: 6 },
testItem: { name: 'Diagnosis (ICD-10)' },
defaultGroup: { name: 'All programs' },
endpoint: {
hasMultiplePages: true,
requestUrl: '/dataItems',
responseBody: 'dataItems',
},
},
{
name: 'Program indicators',
testGroup: { name: 'Malaria focus investigation', itemAmount: 6 },
testItem: { name: 'BMI male' },
defaultGroup: { name: 'All programs' },
endpoint: {
hasMultiplePages: true,
requestUrl: '/dataItems',
responseBody: 'dataItems',
},
},
]
testDataTypes.forEach(testDataType => {
describe(`${testDataType.name}`, () => {
it('opens the data dimension modal', () => {
openDimension(DIMENSION_ID_DATA)
expectDataDimensionModalToBeVisible()
})
it(`switches to ${testDataType.name}`, () => {
switchDataTypeTo(testDataType.name)
expectDataItemsSelectableAmountToBeLeast(PAGE_SIZE)
})
it('group select is visible', () => {
expectGroupSelectToBeVisible()
expectGroupSelectToBe(testDataType.defaultGroup.name)
})
if (testDataType.endpoint.hasMultiplePages) {
it('more items are fetched when scrolling down', () => {
cy.intercept('GET', testDataType.endpoint.requestUrl).as(
'request'
)
scrollSourceToBottom()
cy.wait('@request').then(({ request, response }) => {
expect(request.url).to.contain('page=2')
expect(response.statusCode).to.eq(200)
expect(
response.body[testDataType.endpoint.responseBody]
.length
).to.be.least(1)
})
expectDataItemsSelectableAmountToBeLeast(PAGE_SIZE + 1)
})
}
it('an item can be selected', () => {
selectItemByDoubleClick(testDataType.testItem.name)
expectDataItemToBeSelected(testDataType.testItem.name)
})
it(`group can be changed to "${testDataType.testGroup.name}"`, () => {
switchGroupTo(testDataType.testGroup.name)
expectGroupSelectToBe(testDataType.testGroup.name)
expectDataItemsSelectableAmountToBe(
testDataType.testGroup.itemAmount
)
expectDataItemToBeSelected(testDataType.testItem.name)
})
it('the first item can be selected', () => {
selectFirstDataItem()
expectDataItemsSelectedAmountToBe(2)
expectDataItemsSelectableAmountToBe(
testDataType.testGroup.itemAmount - 1
)
})
if (['Data elements', 'Data sets'].includes(testDataType.name)) {
it('sub group select is visible', () => {
expectSubGroupSelectToBeVisible()
expectSubGroupSelectToBe(testDataType.defaultSubGroup.name)
})
it(`sub group can be changed to "${testDataType.testSubGroup.name}"`, () => {
switchSubGroupTo(testDataType.testSubGroup.name)
expectSubGroupSelectToBe(testDataType.testSubGroup.name)
expectDataItemsSelectableAmountToBe(
testDataType.testSubGroup.itemAmount
)
expectDataItemToBeSelected(testDataType.testItem.name)
})
it(`sub group can be changed back to "${testDataType.defaultSubGroup.name}"`, () => {
switchSubGroupTo(testDataType.defaultSubGroup.name)
expectSubGroupSelectToBe(testDataType.defaultSubGroup.name)
expectDataItemsSelectableAmountToBe(
testDataType.testGroup.itemAmount - 1
)
expectDataItemToBeSelected(testDataType.testItem.name)
})
}
it('search displays a correct error message', () => {
const testSearchTermWithNoMatch = 'nomatch'
inputSearchTerm(testSearchTermWithNoMatch)
expectEmptySourceMessageToBe(
`No ${testDataType.name.toLowerCase()} found for "${testSearchTermWithNoMatch}"`
)
})
it('selection and filter can be reset', () => {
unselectAllDataItems()
expectNoDataItemsToBeSelected()
clearSearchTerm()
expectDataItemsSelectableAmountToBe(
testDataType.testGroup.itemAmount
)
switchGroupToAll()
expectDataItemsSelectableAmountToBeLeast(PAGE_SIZE)
switchDataTypeToAll()
expectDataItemsSelectableAmountToBeLeast(PAGE_SIZE)
})
it('modal is closed', () => {
clickDimensionModalHideButton()
expectDimensionModalToNotBeVisible()
})
})
})
})
|
-- Creating tables for survey questions and answers
CREATE TABLE survey_questions(
question_id INT AUTO_INCREMENT PRIMARY KEY,
question_text TEXT NOT NULL
);
CREATE TABLE survey_answers(
answer_id INT AUTO_INCREMENT PRIMARY KEY,
answer_text TEXT NOT NULL
);
-- Create a table to store the survey results
CREATE TABLE survey_results(
result_id INT AUTO_INCREMENT PRIMARY KEY,
question_id INT,
answer_id INT,
FOREIGN KEY (question_id) REFERENCES survey_questions(question_id),
FOREIGN KEY (answer_id) REFERENCES survey_answers(answer_id)
);
-- Insert survey questions into the survey_questions table
INSERT INTO survey_questions(question_text) VALUES
('What is your favorite color?'),
('Do you prefer cats or dogs?'),
('Do you have any hobbies?'),
('Do you have any special skills?');
-- Insert survey answers into the survey_answers table
INSERT INTO survey_answers(answer_text) VALUES
('Red'),
('Blue'),
('Green'),
('Cats'),
('Dogs'),
('Piano'),
('Painting'),
('Programming'),
('Writing');
-- Insert survey results into the survey_results table
INSERT INTO survey_results(question_id, answer_id) VALUES
(1, 2),
(2, 4),
(3, 7),
(4, 8); |
<reponame>Digital-Repository-of-Ireland/hydra-derivatives
require 'mini_magick'
require 'nokogiri'
module Hydra::Derivatives::Processors
class Jpeg2kImage < Processor
include ShellBasedProcessor
class << self
def srgb_profile_path
File.join [
File.expand_path('../../../../', __FILE__),
'color_profiles',
'sRGB_IEC61966-2-1_no_black_scaling.icc'
]
end
def kdu_compress_recipe(args, quality, long_dim)
if args[:recipe].is_a? Symbol
recipe = [args[:recipe].to_s, quality].join('_').to_sym
return Hydra::Derivatives.kdu_compress_recipes[recipe] if Hydra::Derivatives.kdu_compress_recipes.key? recipe
ActiveFedora::Base.logger.warn "No JP2 recipe for :#{args[:recipe]} ('#{recipe}') found in configuration. Using best guess."
calculate_recipe(args, quality, long_dim)
elsif args[:recipe].is_a? String
args[:recipe]
else
calculate_recipe(args, quality, long_dim)
end
end
def calculate_recipe(args, quality, long_dim)
levels_arg = args.fetch(:levels, level_count_for_size(long_dim))
rates_arg = layer_rates(args.fetch(:layers, 8), args.fetch(:compression, 10))
tile_size = args.fetch(:tile_size, 1024)
tiles_arg = "#{tile_size},#{tile_size}"
jp2_space_arg = quality == 'gray' ? 'sLUM' : 'sRGB'
%(-rate #{rates_arg}
-jp2_space #{jp2_space_arg}
-double_buffering 10
-num_threads 4
-no_weights
Clevels=#{levels_arg}
"Stiles={#{tiles_arg}}"
"Cblk={64,64}"
Cuse_sop=yes
Cuse_eph=yes
Corder=RPCL
ORGgen_plt=yes
ORGtparts=R ).gsub(/\s+/, " ").strip
end
def level_count_for_size(long_dim)
levels = 0
level_size = long_dim
while level_size >= 96
level_size /= 2
levels += 1
end
levels - 1
end
def layer_rates(layer_count, compression_numerator)
# e.g. if compression_numerator = 10 then compression is 10:1
rates = []
cmp = 24.0 / compression_numerator
layer_count.times do
rates << cmp
cmp = (cmp / 1.618).round(8)
end
rates.map(&:to_s).join(',')
end
def encode(path, recipe, output_file)
kdu_compress = Hydra::Derivatives.kdu_compress_path
execute "#{kdu_compress} -quiet -i #{Shellwords.escape(path)} -o #{output_file} #{recipe}"
end
def tmp_file(ext)
Dir::Tmpname.create(['sufia', ext], Hydra::Derivatives.temp_file_base) {}
end
def long_dim(image)
[image[:width], image[:height]].max
end
end
def process
image = MiniMagick::Image.open(source_path)
quality = image['%[channels]'] == 'gray' ? 'gray' : 'color'
long_dim = self.class.long_dim(image)
file_path = self.class.tmp_file('.tif')
to_srgb = directives.fetch(:to_srgb, true)
if directives[:resize] || to_srgb
preprocess(image, resize: directives[:resize], to_srgb: to_srgb, src_quality: quality)
end
image.write file_path
recipe = self.class.kdu_compress_recipe(directives, quality, long_dim)
encode_file(recipe, file_path: file_path)
File.unlink(file_path) unless file_path.nil?
end
def encode_file(recipe, file_path:)
output_file = self.class.tmp_file('.jp2')
self.class.encode(file_path, recipe, output_file)
output_file_service.call(File.open(output_file, 'rb'), directives)
File.unlink(output_file)
end
protected
def preprocess(image, opts = {})
# resize: <geometry>, to_srgb: <bool>, src_quality: 'color'|'gray'
image.combine_options do |c|
c.resize(opts[:resize]) if opts[:resize]
c.profile self.class.srgb_profile_path if opts[:src_quality] == 'color' && opts[:to_srgb]
end
image
end
end
end
|
import javax.swing.*;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
public class CustomPanel extends JPanel {
public CustomPanel() {
super.addMouseListener(new java.awt.event.MouseAdapter() {
public void mousePressed(java.awt.event.MouseEvent evt) {
// Handle mousePressed event
}
});
// Replace the UIActionMap for the keyboard event
Action customAction = new AbstractAction() {
public void actionPerformed(ActionEvent e) {
// Handle custom keyboard event
}
};
getInputMap(WHEN_IN_FOCUSED_WINDOW).put(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), "customAction");
getActionMap().put("customAction", customAction);
}
@Override
public void addMouseListener(java.awt.event.MouseListener l) {
// Do nothing
}
public static void main(String[] args) {
JFrame frame = new JFrame("Custom Panel Example");
CustomPanel customPanel = new CustomPanel();
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.add(customPanel);
frame.setSize(300, 200);
frame.setVisible(true);
}
} |
<gh_stars>0
package Adapter.adapter3;
public class Tesla implements GreatCar{
@Override
public void autoDrive() {
System.out.println("자동으로 운전을 시작합니다...");
}
@Override
public void stop() {
System.out.println("그냥 멈춥니다...");
}
}
|
rm ./test.bin
g++ ../main.cpp ../test_seh.cpp ../../3rdparty/fmt/src/format.cc ../../src/log.cpp ../../src/nodecpp_assert.cpp ../../src/page_allocator.cpp ../../src/cpu_exceptions_translator.cpp ../../src/std_error.cpp ../samples/file_error.cpp ../../src/safe_memory_error.cpp ../../src/tagged_ptr_impl.cpp -I../../include -I../../3rdparty/fmt/include -DNODECPP_CUSTOM_LOG_PROCESSING="\"../test/my_logger.h\"" -std=c++17 -Wall -lpthread -fexceptions -fnon-call-exceptions -o test.bin |
#!/bin/sh
if ! which sudo > /dev/null 2>&1 || [ "$(id -u)" = 0 ]; then
SUDO=${SUDO:-""};
else
SUDO=${SUDO:-sudo};
fi
if [ -e "$BASH_ENV" ]; then
. $BASH_ENV
fi
LOG_PATH=/tmp/slack-orb/logs
JQ_PATH=/usr/local/bin/jq
POST_TO_SLACK_LOG=post-to-slack.json
BuildMessageBody() {
# Send message
# If sending message, default to custom template,
# if none is supplied, check for a pre-selected template value.
# If none, error.
if [ -n "${SLACK_PARAM_CUSTOM:-}" ]; then
ModifyCustomTemplate
# shellcheck disable=SC2016
CUSTOM_BODY_MODIFIED=$(echo "$CUSTOM_BODY_MODIFIED" | sed 's/\\/\\\\/g' | sed 's/"/\\"/g' | sed 's/`/\\`/g')
T2=$(eval echo \""$CUSTOM_BODY_MODIFIED"\")
elif [ -n "${SLACK_PARAM_TEMPLATE:-}" ]; then
TEMPLATE="\$$SLACK_PARAM_TEMPLATE"
# shellcheck disable=SC2016
T1=$(eval echo "$TEMPLATE" | sed 's/\\/\\\\/g' | sed 's/"/\\"/g' | sed 's/`/\\`/g')
T2=$(eval echo \""$T1"\")
else
echo "Error: No message template selected."
echo "Select either a custom template or one of the pre-included ones via the 'custom' or 'template' parameters."
exit 1
fi
# Insert the default channel. THIS IS TEMPORARY
T2=$(echo "$T2" | jq ". + {\"channel\": \"$SLACK_DEFAULT_CHANNEL\"}")
SLACK_MSG_BODY=$T2
}
PostToSlack() {
# Post once per channel listed by the channel parameter
# The channel must be modified in SLACK_MSG_BODY
echo ""
echo "-----------------------"
echo ""
echo "Values:"
echo ""
echo "Template name: ${SLACK_PARAM_TEMPLATE}"
echo "BASH ENV content:"
echo "$(cat $BASH_ENV)"
echo ""
echo "Pre-Source:"
echo ""
echo "Mentions: ${SLACK_PARAM_MENTIONS}"
echo "Template content:"
eval echo \$"$SLACK_PARAM_TEMPLATE"
echo ""
. $BASH_ENV
echo "Post-Source:"
echo ""
echo "Mentions: ${SLACK_PARAM_MENTIONS}"
echo "Template content:"
eval echo \$"$SLACK_PARAM_TEMPLATE"
echo ""
echo "-----------------------"
echo ""
# shellcheck disable=SC2001
for i in $(eval echo \""$SLACK_PARAM_CHANNEL"\" | sed "s/,/ /g")
do
echo "Sending to Slack Channel: $i"
SLACK_MSG_BODY=$(echo "$SLACK_MSG_BODY" | jq --arg channel "$i" '.channel = $channel')
if [ -n "${SLACK_PARAM_DEBUG:-}" ]; then
echo "The message body being sent to Slack is: $SLACK_MSG_BODY"
fi
SLACK_SENT_RESPONSE=$(curl -s -f -X POST -H 'Content-type: application/json' -H "Authorization: Bearer $SLACK_ACCESS_TOKEN" --data "$SLACK_MSG_BODY" https://slack.com/api/chat.postMessage)
cat $LOG_PATH/$POST_TO_SLACK_LOG | jq --argjson message "$SLACK_MSG_BODY" --argjson response "$SLACK_SENT_RESPONSE" \
'. += [{"slackMessageBody": $message, "slackSentResponse": $response}]' | $SUDO tee $LOG_PATH/$POST_TO_SLACK_LOG
if [ -n "${SLACK_PARAM_DEBUG:-}" ]; then
echo "The response from the API call to slack is : $SLACK_SENT_RESPONSE"
fi
SLACK_ERROR_MSG=$(echo "$SLACK_SENT_RESPONSE" | jq '.error')
if [ ! "$SLACK_ERROR_MSG" = "null" ]; then
echo "Slack API returned an error message:"
echo "$SLACK_ERROR_MSG"
echo
echo
echo "View the Setup Guide: https://github.com/CircleCI-Public/slack-orb/wiki/Setup"
if [ "$SLACK_PARAM_IGNORE_ERRORS" = "0" ]; then
exit 1
fi
fi
done
}
ModifyCustomTemplate() {
# Inserts the required "text" field to the custom json template from block kit builder.
if [ "$(echo "$SLACK_PARAM_CUSTOM" | jq '.text')" = "null" ]; then
CUSTOM_BODY_MODIFIED=$(echo "$SLACK_PARAM_CUSTOM" | jq '. + {"text": ""}')
else
# In case the text field was set manually.
CUSTOM_BODY_MODIFIED=$(echo "$SLACK_PARAM_CUSTOM" | jq '.')
fi
}
InstallJq() {
echo "Checking For JQ + CURL"
if command -v curl >/dev/null 2>&1 && ! command -v jq >/dev/null 2>&1; then
uname -a | grep Darwin > /dev/null 2>&1 && JQ_VERSION=jq-osx-amd64 || JQ_VERSION=jq-linux32
curl -Ls -o $JQ_PATH https://github.com/stedolan/jq/releases/download/jq-1.6/${JQ_VERSION}
chmod +x $JQ_PATH
command -v jq >/dev/null 2>&1
return $?
else
command -v curl >/dev/null 2>&1 || { echo >&2 "SLACK ORB ERROR: CURL is required. Please install."; exit 1; }
command -v jq >/dev/null 2>&1 || { echo >&2 "SLACK ORB ERROR: JQ is required. Please install"; exit 1; }
return $?
fi
}
FilterBy() {
if [ -z "$1" ] || [ -z "$2" ]; then
return
fi
# If any pattern supplied matches the current branch or the current tag, proceed; otherwise, exit with message.
FLAG_MATCHES_FILTER="false"
for i in $(echo "$1" | sed "s/,/ /g")
do
if echo "$2" | grep -Eq "^${i}$"; then
FLAG_MATCHES_FILTER="true"
break
fi
done
if [ "$FLAG_MATCHES_FILTER" = "false" ]; then
# dont send message.
echo "NO SLACK ALERT"
echo
echo "Current reference \"$2\" does not match any matching parameter"
echo "Current matching pattern: $1"
exit 0
fi
}
CheckEnvVars() {
if [ -n "${SLACK_WEBHOOK:-}" ]; then
echo "It appears you have a Slack Webhook token present in this job."
echo "Please note, Webhooks are no longer used for the Slack Orb (v4 +)."
echo "Follow the setup guide available in the wiki: https://github.com/CircleCI-Public/slack-orb/wiki/Setup"
fi
if [ -z "${SLACK_ACCESS_TOKEN:-}" ]; then
echo "In order to use the Slack Orb (v4 +), an OAuth token must be present via the SLACK_ACCESS_TOKEN environment variable."
echo "Follow the setup guide available in the wiki: https://github.com/CircleCI-Public/slack-orb/wiki/Setup"
exit 1
fi
# If no channel is provided, quit with error
if [ -z "${SLACK_PARAM_CHANNEL:-}" ]; then
echo "No channel was provided. Enter value for SLACK_DEFAULT_CHANNEL env var, or channel parameter"
exit 1
fi
}
ShouldPost() {
if [ "$CCI_STATUS" = "$SLACK_PARAM_EVENT" ] || [ "$SLACK_PARAM_EVENT" = "always" ]; then
# In the event the Slack notification would be sent, first ensure it is allowed to trigger
# on this branch or this tag.
FilterBy "$SLACK_PARAM_BRANCHPATTERN" "${CIRCLE_BRANCH:-}"
FilterBy "$SLACK_PARAM_TAGPATTERN" "${CIRCLE_TAG:-}"
echo "Posting Status"
else
# dont send message.
echo "NO SLACK ALERT"
echo
echo "This command is set to send an alert on: $SLACK_PARAM_EVENT"
echo "Current status: ${CCI_STATUS}"
exit 0
fi
}
SetupLogs() {
$SUDO mkdir -p $LOG_PATH
if [ ! -f "$LOG_PATH/$POST_TO_SLACK_LOG" ]; then
echo "[]" | $SUDO tee $LOG_PATH/$POST_TO_SLACK_LOG
fi
}
# Will not run if sourced from another script.
# This is done so this script may be tested.
ORB_TEST_ENV="bats-core"
if [ "${0#*$ORB_TEST_ENV}" = "$0" ]; then
SetupLogs
CheckEnvVars
. "/tmp/SLACK_JOB_STATUS"
ShouldPost
InstallJq
BuildMessageBody
PostToSlack
fi
|
def replace_vowels(sentence):
result = ''
vowels = 'aeiouAEIOU'
for char in sentence:
if char in vowels:
result = result + 'X'
else:
result = result + char
return result
const result = replace_vowels('This is an example sentence');
console.log(result); |
import re
def validateTimestamp(timestamp: str) -> bool:
pattern = r'^\d{4}-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01]) (0\d|1\d|2[0-3]):([0-5]\d):([0-5]\d)$'
if re.match(pattern, timestamp):
return True
else:
return False |
<reponame>minodisk/vscode-bigquery<filename>packages/viewer/src/App.tsx
import React, { FC, useEffect, useState } from "react";
import {
isFocusedEvent,
isCloseEvent,
isData,
isOpenEvent,
isRowsEvent,
Rows,
ViewerEvent,
SerializablePage,
isRoutineEvent,
} from "core/src/types";
import cx from "classnames";
import "./App.css";
import { JobInformation } from "./JobInformation";
import {
Box,
Flex,
HStack,
NextButton,
PrevButton,
RowNumberTd,
RowNumberTh,
Spinner,
Tab,
TabContent,
Td,
Th,
Tr,
UIText,
VStack,
} from "./ui";
import { TableInformation } from "./TableInformation";
// import * as payload from "../../misc/mock/payload.json";
const w = window as unknown as {
acquireVsCodeApi?: () => {
getState(): Rows;
setState(rows: Rows): void;
postMessage(e: ViewerEvent): void;
};
};
const vscode = w.acquireVsCodeApi ? w.acquireVsCodeApi() : undefined;
const App: FC = () => {
const [focused, setFocused] = useState(false);
const [data, setData] = useState<Rows | undefined>(
/*payload ?? */ vscode?.getState()
);
const [loading, setLoading] = useState<string | undefined>("Initializing");
const [isPending, startTransition] = (
React as unknown as {
useTransition: (props: {
timeoutMs: number;
}) => [
isPending: boolean,
startTransition: (callback: () => unknown) => void
];
}
).useTransition({
timeoutMs: 5000,
});
const [current, setCurrent] = useState("results");
useEffect(() => {
vscode?.postMessage({ event: "loaded" });
}, []);
useEffect(() => {
window.addEventListener("message", (e: MessageEvent) => {
// When postMessage from a test, this value becomes a JSON string, so parse it.
const data =
typeof e.data === "string" && e.data ? JSON.parse(e.data) : e.data;
if (!isData(data)) {
return;
}
const { payload } = data;
if (isFocusedEvent(payload)) {
setFocused(payload.payload.focused);
return;
}
if (isOpenEvent(payload)) {
setLoading("Fetching");
return;
}
if (isRoutineEvent(payload)) {
setLoading(undefined);
startTransition(() => {
setData(payload.payload);
vscode?.setState(payload.payload);
});
return;
}
if (isRowsEvent(payload)) {
setLoading(undefined);
startTransition(() => {
setData(payload.payload);
vscode?.setState(payload.payload);
});
return;
}
if (isCloseEvent(payload)) {
setLoading(undefined);
return;
}
throw new Error(`undefined data payload '${payload}'`);
});
}, [startTransition]);
useEffect(() => {
if (isPending) {
setLoading("Rendering");
} else {
setLoading(undefined);
}
}, [isPending]);
return (
<Box className={cx({ focused })}>
<Header current={current} loading={loading} onChange={setCurrent} />
<div>
<TabContent name="results" current={current}>
{data ? (
<VStack>
<table>
<thead>
<Tr>
<RowNumberTh>Row</RowNumberTh>
{data.header.map((head) => (
<Th key={head}>{head}</Th>
))}
</Tr>
</thead>
<tbody>
{data.rows.map(({ rowNumber, rows }, i) => {
const lastRow = i === data.rows.length - 1;
return rows.map((row, j) => (
<Tr
key={j}
className={cx({
lastOfRowNumber: lastRow && j === 0,
})}
>
{j === 0 ? (
<RowNumberTd rowSpan={rows.length}>
{`${rowNumber}`}
</RowNumberTd>
) : null}
{row.map((cell) => {
return (
<Td key={cell.id}>
{cell.value === undefined
? null
: `${cell.value}`}
</Td>
);
})}
</Tr>
));
})}
</tbody>
</table>
<Footer page={data.page} />
</VStack>
) : null}
</TabContent>
<TabContent name="jobInformation" current={current}>
{data ? <JobInformation metadata={data.metadata} /> : null}
</TabContent>
<TabContent name="tableInformation" current={current}>
{data ? <TableInformation table={data.table} /> : null}
</TabContent>
</div>
</Box>
);
};
const Header: FC<{
readonly current: string;
readonly loading?: string;
readonly onChange: (current: string) => void;
}> = ({ current, loading, onChange }) => (
<Box className="header">
<Flex justify="between" className="nav">
<HStack>
<Tab name="results" current={current} onChange={onChange}>
<UIText>Results</UIText>
</Tab>
<Tab name="jobInformation" current={current} onChange={onChange}>
<UIText>Job Information</UIText>
</Tab>
<Tab name="tableInformation" current={current} onChange={onChange}>
Table Information
</Tab>
</HStack>
{loading ? (
<HStack reverse align="center" gap={1} px={2}>
<Spinner />
<UIText color="weak">{loading}</UIText>
</HStack>
) : null}
</Flex>
</Box>
);
const Footer: FC<{
readonly page: SerializablePage;
}> = ({ page, ...props }) => (
<Box className="footer">
<Flex justify="between" className="pagination" px={2}>
<HStack gap={2} {...props}>
{/* <StartButton onClick={() => vscode?.postMessage({ event: "start" })} /> */}
<PrevButton
disabled={!page.hasPrev}
onClick={() => vscode?.postMessage({ event: "prev" })}
/>
<NextButton
disabled={!page.hasNext}
onClick={() => vscode?.postMessage({ event: "next" })}
/>
{/* <EndButton onClick={() => vscode?.postMessage({ event: "end" })} /> */}
</HStack>
<HStack gap={2} {...props}>
<UIText color="weak">{`${page.rowNumberStart}`}</UIText>
<UIText color="weak">-</UIText>
<UIText color="weak">{`${page.rowNumberEnd}`}</UIText>
<UIText color="weak">of</UIText>
<UIText color="weak">{page.numRows}</UIText>
</HStack>
</Flex>
</Box>
);
export default App;
|
<filename>EIDSS v5/android.java/workspace/EIDSS/src/com/WSParser/WebServices/EidssService/GisBaseReferenceRaw.java<gh_stars>1-10
//------------------------------------------------------------------------------
// <wsdl2code-generated>
// This code was generated by http://www.wsdl2code.com version Beta 1.2
//
// Please dont change this code, regeneration will override your changes
//</wsdl2code-generated>
//
//------------------------------------------------------------------------------
//
//This source code was auto-generated by Wsdl2Code Beta Version
//
package com.WSParser.WebServices.EidssService;
import org.ksoap2.serialization.KvmSerializable;
import org.ksoap2.serialization.PropertyInfo;
import java.util.Hashtable;
import org.ksoap2.serialization.SoapObject;
import org.ksoap2.serialization.SoapPrimitive;
import android.content.ContentValues;
public class GisBaseReferenceRaw implements KvmSerializable {
public String NAMESPACE =" http://bv.com/eidss";
public long idfsBaseReference;
public long idfsReferenceType;
public long idfsCountry;
public long idfsRegion;
public long idfsRayon;
public String strDefault;
public ContentValues ContentValues()
{
ContentValues ret = new ContentValues();
ret.put("idfsBaseReference", idfsBaseReference);
ret.put("idfsReferenceType", idfsReferenceType);
ret.put("idfsCountry", idfsCountry);
ret.put("idfsRegion", idfsRegion);
ret.put("idfsRayon", idfsRayon);
ret.put("strDefault", strDefault);
return ret;
}
public GisBaseReferenceRaw(){}
public GisBaseReferenceRaw(SoapObject soapObject){
if (soapObject.hasProperty("idfsBaseReference"))
{
Object obj = soapObject.getProperty("idfsBaseReference");
if (obj != null && obj.getClass().equals(SoapPrimitive.class)){
SoapPrimitive j16 =(SoapPrimitive) soapObject.getProperty("idfsBaseReference");
idfsBaseReference = Long.parseLong(j16.toString());
}
}
if (soapObject.hasProperty("idfsReferenceType"))
{
Object obj = soapObject.getProperty("idfsReferenceType");
if (obj != null && obj.getClass().equals(SoapPrimitive.class)){
SoapPrimitive j17 =(SoapPrimitive) soapObject.getProperty("idfsReferenceType");
idfsReferenceType = Long.parseLong(j17.toString());
}
}
if (soapObject.hasProperty("idfsCountry"))
{
Object obj = soapObject.getProperty("idfsCountry");
if (obj != null && obj.getClass().equals(SoapPrimitive.class)){
SoapPrimitive j18 =(SoapPrimitive) soapObject.getProperty("idfsCountry");
idfsCountry = Long.parseLong(j18.toString());
}
}
if (soapObject.hasProperty("idfsRegion"))
{
Object obj = soapObject.getProperty("idfsRegion");
if (obj != null && obj.getClass().equals(SoapPrimitive.class)){
SoapPrimitive j19 =(SoapPrimitive) soapObject.getProperty("idfsRegion");
idfsRegion = Long.parseLong(j19.toString());
}
}
if (soapObject.hasProperty("idfsRayon"))
{
Object obj = soapObject.getProperty("idfsRayon");
if (obj != null && obj.getClass().equals(SoapPrimitive.class)){
SoapPrimitive j20 =(SoapPrimitive) soapObject.getProperty("idfsRayon");
idfsRayon = Long.parseLong(j20.toString());
}
}
if (soapObject.hasProperty("strDefault"))
{
Object obj = soapObject.getProperty("strDefault");
if (obj != null && obj.getClass().equals(SoapPrimitive.class)){
SoapPrimitive j21 =(SoapPrimitive) soapObject.getProperty("strDefault");
strDefault = j21.toString();
}
}
}
@Override
public Object getProperty(int arg0) {
switch(arg0){
case 0:
return idfsBaseReference;
case 1:
return idfsReferenceType;
case 2:
return idfsCountry;
case 3:
return idfsRegion;
case 4:
return idfsRayon;
case 5:
return strDefault;
}
return null;
}
@Override
public int getPropertyCount() {
return 6;
}
@Override
public void getPropertyInfo(int index, @SuppressWarnings("rawtypes") Hashtable arg1, PropertyInfo info) {
switch(index){
case 0:
info.type = Long.class;
info.name = "idfsBaseReference";
break;
case 1:
info.type = Long.class;
info.name = "idfsReferenceType";
break;
case 2:
info.type = Long.class;
info.name = "idfsCountry";
break;
case 3:
info.type = Long.class;
info.name = "idfsRegion";
break;
case 4:
info.type = Long.class;
info.name = "idfsRayon";
break;
case 5:
info.type = PropertyInfo.STRING_CLASS;
info.name = "strDefault";
break;
}
}
@Override
public void setProperty(int index, Object value) {
switch(index){
case 0:
idfsBaseReference = Long.parseLong(value.toString()) ;
break;
case 1:
idfsReferenceType = Long.parseLong(value.toString()) ;
break;
case 2:
idfsCountry = Long.parseLong(value.toString()) ;
break;
case 3:
idfsRegion = Long.parseLong(value.toString()) ;
break;
case 4:
idfsRayon = Long.parseLong(value.toString()) ;
break;
case 5:
strDefault = value.toString() ;
break;
}
}
}
|
<reponame>bestdan/pifect
import os
os.system("git pull origin master") |
<gh_stars>1-10
package com.cgfy.user.base.aware;
import com.cgfy.user.base.cache.CacheManager;
import com.cgfy.user.base.util.Globals;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import javax.servlet.ServletContext;
/**
* 获取上下文
* @see # https://www.jianshu.com/p/4c0723615a52
*/
@Component
public class SpringContextAware implements ApplicationContextAware {
@Resource
private ServletContext servletContext;
private static ApplicationContext applicationContext;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
SpringContextAware.applicationContext = applicationContext;
/** 构建缓存管理实例 **/
CacheManager.creatInstance(applicationContext);
System.out.println("system resource is initializing...");
//初始化Globals中的servletContext
Globals.getInstance().WEB_SERVLET_CONTEXT = servletContext;
//20200902注释,初始化获取码表信息
// ResourceService resService = applicationContext.getBean("resService", ResourceService.class);
// resService.loadCodeTable();
System.out.println("system resource was initialized!");
}
public static ApplicationContext getApplicationContext() {
return applicationContext;
}
@SuppressWarnings("unchecked")
public static <T> T getBean(String name) {
return (T) applicationContext.getBean(name);
}
public static <T> T getBean(Class<T> clazz){
return applicationContext.getBean(clazz);
}
}
|
def find_longest(arr):
length = 0
current_length = 0
prev_num = float('-inf')
for i in range(len(arr)):
if arr[i] > prev_num:
current_length += 1
if current_length > length:
length = current_length
else:
current_length = 0
prev_num = arr[i]
return length |
<filename>examples/node/GetRoots/GetRoots.js
require('babel-polyfill');
const CTUtils = require('../..');
const pvutils = require('pvutils');
const fetch = require('node-fetch');
const WebCrypto = require('node-webcrypto-ossl');
const fs = require('fs');
const path = require('path');
const process = require('process');
const program = require('commander');
const typemap = {
'2.5.4.6': 'countryName',
'2.5.4.11': 'organizationalUnitName',
'2.5.4.10': 'organizationName',
'2.5.4.3': 'commonName',
'2.5.4.7': 'localityName',
'2.5.4.8': 'stateOrProvinceName',
'172.16.17.32': 'title',
'172.16.31.10': 'givenName',
'172.16.58.3': 'initials',
'2.5.4.4': 'surname',
'1.2.840.113549.1.9.1': 'emailAddress',
'2.5.4.15': 'businessCategory',
'1.3.6.1.4.1.311.172.16.17.32': 'jurisdictionLocalityName',
'1.3.6.1.4.1.311.172.16.58.3': 'jurisdictionStateOrProvinceName',
'1.3.6.1.4.1.311.60.2.1.3': 'jurisdictionCountryName',
'2.5.4.5': 'serialNumber',
'2.5.4.9': 'streetAddress',
'192.168.3.117': 'postalCode',
'2.5.4.45': 'uniqueIdentifier'
};
function rdnToText(rdn) {
let subj = '';
for(let i = 0; i < rdn.typesAndValues.length; i++) {
let tv = rdn.typesAndValues[i];
let type = typemap[tv.type];
if(typeof type === 'undefined')
type = tv.type;
subj += (type + '=' + tv.value.valueBlock.value);
if(i !== (rdn.typesAndValues.length - 1))
subj += ', ';
}
return subj;
}
function certToPEM(cert) {
let b64 = pvutils.toBase64(pvutils.arrayBufferToString(
cert.toSchema().toBER(false)));
let pem = '-----BEGIN CERTIFICATE-----\n';
while(b64.length > 64) {
pem += b64.substr(0, 64);
pem += '\n';
b64 = b64.substr(64);
}
pem += b64;
pem += '\n-----END CERTIFICATE-----\n';
return pem;
}
program
.version('1.0.0')
.description('Get roots accepted by a log')
.option('-l, --list', 'List all logs', false)
.option('-d, --description <text>',
'Description of the log that will be used')
.option('-i, --id <id>', 'Id of the log that will be used')
.option('-u, --url <url>', 'URL of the log that will be used')
.option('-o, --output <directory>', 'Save roots to target directory', null)
.parse(process.argv);
let opts = 0;
if(typeof program.description === 'string')
opts++;
if(typeof program.id === 'string')
opts++;
if(typeof program.url === 'string')
opts++;
if((opts !== 1) && (program.list !== true)) {
console.log('Error: you need to specify exactly one descriptor for the log');
program.help();
}
const webcrypto = new WebCrypto();
CTUtils.setWebCrypto(webcrypto);
CTUtils.setFetch(fetch);
const logHelper = new CTUtils.CTLogHelper();
let log, ctMonitor;
logHelper.fetch(CTUtils.CTLogHelper.lists.google).then(res => {
return logHelper.generateIds();
}).then(res => {
if(program.list === true) {
logHelper.logs.forEach(log => {
const idView = new Uint8Array(log.logId);
let id = '';
for(let i = 0; i < idView.length; i++) {
if(i !== 0)
id += ':';
if(idView[i] < 0x10)
id += ('0' + idView[i].toString(16));
else
id += idView[i].toString(16);
}
console.log(`- ${log.description}`);
console.log(` URL: ${log.url}`);
console.log(` ID: ${id}`);
console.log(` Operator: ${log.operators.join(', ')}`);
});
process.exit(0);
}
if(typeof program.description === 'string')
log = logHelper.findByDescription(program.description);
if(typeof program.id === 'string') {
let id = program.id.replace(':', '');
if(id.length !== 64) {
console.log('Error: Invalid id');
process.exit(1);
}
let idView = new Uint8Array(32);
for(let i = 0; i < 32; i++) {
idView[i] = parseInt(id.substr(0, 2), 16);
id = id.substr(2);
}
log = logHelper.findById(idView.buffer);
}
if(typeof program.url === 'string')
log = logHelper.findByUrl(program.url);
if(log === null) {
console.log('Error: Cannot find log.');
process.exit(1);
}
console.log(`Using ${log.description}`);
console.log(`URL: ${log.url}`);
console.log(`ID: ${pvutils.bufferToHexCodes(log.logId)}`);
console.log(`Operator: ${log.operators.join(', ')}`);
return log.getRoots();
}).then(roots => {
roots.forEach(root => {
console.log(`- ${rdnToText(root.subject)}`);
});
if(program.output !== null) {
let keyHashPromises = [];
roots.forEach(root => {
keyHashPromises.push(root.getKeyHash().then(hash => {
return { hash, cert: root };
}));
});
return Promise.all(keyHashPromises);
} else {
return null;
}
}).then(keyHashes => {
if(keyHashes !== null) {
keyHashes.forEach(keyHash => {
const dest = path.join(program.output,
pvutils.bufferToHexCodes(keyHash.hash) + '.pem');
fs.writeFileSync(dest, certToPEM(keyHash.cert));
});
}
});
|
#!/usr/bin/env bash
set -e
# TODO: Set to URL of git repo.
PROJECT_GIT_URL='https://github.com/iliyaz-ahmed/profiles-rest-api.git'
PROJECT_BASE_PATH='/usr/local/apps/profiles-rest-api'
echo "Installing dependencies..."
apt-get update
apt-get install -y python3-dev python3-venv sqlite python-pip supervisor nginx git
# Create project directory
mkdir -p $PROJECT_BASE_PATH
git clone $PROJECT_GIT_URL $PROJECT_BASE_PATH
# Create virtual environment
mkdir -p $PROJECT_BASE_PATH/env
python3 -m venv $PROJECT_BASE_PATH/env
# Install python packages
$PROJECT_BASE_PATH/env/bin/pip install -r $PROJECT_BASE_PATH/requirements.txt
$PROJECT_BASE_PATH/env/bin/pip install uwsgi==2.0.18
# Run migrations and collectstatic
cd $PROJECT_BASE_PATH
$PROJECT_BASE_PATH/env/bin/python manage.py migrate
$PROJECT_BASE_PATH/env/bin/python manage.py collectstatic --noinput
# Configure supervisor
cp $PROJECT_BASE_PATH/deploy/supervisor_profiles_api.conf /etc/supervisor/conf.d/profiles_api.conf
supervisorctl reread
supervisorctl update
supervisorctl restart profiles_api
# Configure nginx
cp $PROJECT_BASE_PATH/deploy/nginx_profiles_api.conf /etc/nginx/sites-available/profiles_api.conf
rm /etc/nginx/sites-enabled/default
ln -s /etc/nginx/sites-available/profiles_api.conf /etc/nginx/sites-enabled/profiles_api.conf
systemctl restart nginx.service
echo "DONE! :)"
|
import time
while True:
localtime = time.localtime()
result = time.strftime("%I:%M:%S %p", localtime)
print(result, end="\r")
time.sleep(1) |
package object
import (
"fmt"
"strings"
)
// A Tuple is an statically-sized collection of items.
type Tuple struct {
defaults
Value []Object
}
func (t *Tuple) String() string {
var strs []string
for _, item := range t.Value {
strs = append(strs, item.String())
}
return fmt.Sprintf("(%s)", strings.Join(strs, ", "))
}
// Type returns the type of an Object.
func (t *Tuple) Type() Type {
return TupleType
}
// Equals checks whether or not two objects are equal to each other.
func (t *Tuple) Equals(other Object) bool {
switch o := other.(type) {
case *Tuple:
left, right := t.Value, o.Value
if len(left) != len(right) {
return false
}
for i, item := range left {
if !item.Equals(right[i]) {
return false
}
}
return true
default:
return false
}
}
// Prefix applies a prefix operator to an object, returning the result. If the operation
// cannot be performed, (nil, false) is returned.
func (t *Tuple) Prefix(op string) (Object, bool) {
if op == "," {
return &Tuple{Value: []Object{t}}, true
}
return nil, false
}
// Infix applies a infix operator to an object, returning the result. If the operation
// cannot be performed, (nil, false) is returned.
func (t *Tuple) Infix(op string, right Object) (Object, bool) {
if op == "," {
return &Tuple{
Value: append(t.Value, right),
}, true
}
return nil, false
}
// Items returns a slice containing all objects in an Object, or false otherwise.
func (t *Tuple) Items() ([]Object, bool) {
return t.Value, true
}
// SetSubscript sets the value of a subscript of an Object, e.g. foo[bar] = baz.
// Returns false if it can't be done.
func (t *Tuple) SetSubscript(index Object, to Object) bool {
num, ok := index.(*Number)
if !ok {
return false
}
i := int(num.Value)
if i < 0 || i >= len(t.Value) {
return false
}
t.Value[i] = to
return true
}
|
source ../em/emsdk_env.sh
|
import numpy as np
import pandas as pd
def calculate_averages(pints, propindic, variances):
propavg = []
for pint in pints:
avg = np.average(propindic[pint])
propavg.append([pint, avg])
propavg_df = pd.DataFrame(propavg, columns=['ProbInt', 'FracIn'])
avgvar = np.average(variances)
return propavg_df, avgvar |
#!/bin/bash
echo "Enter root password:"
read password
echo "Enter api crypt key:"
read apicryptkey
TORDOMAINMY="cerberesfgqzqou7.onion"
apt update
apt install systemd nginx tor php-fpm mysql-server php php-cli php-xml php-mysql php-curl php-mbstring php-zip unzip -y
apt purge apache2 -y
#CREATE TOR SERVICE CONFIG FILE
rm -rf /lib/systemd/system/tor.service
read -r -d '' TORCONFIG << EOM
[Unit]
Description=TOR CONFIG
[Service]
User=root
Group=root
RemainAfterExit=yes
ExecStart=/usr/bin/tor --RunAsDaemon 0
ExecReload=/bin/killall tor
KillSignal=SIGINT
TimeoutStartSec=300
TimeoutStopSec=60
Restart=on-failure
[Install]
WantedBy=multi-user.target
EOM
echo "$TORCONFIG" > /lib/systemd/system/tor.service
rm -rf /usr/share/tor/tor-service-defaults-torrc
rm -rf /etc/tor/torrc
#CREATE TOR CONFNIG FILE
read -r -d '' ServiceCFG << EOM
HiddenServiceDir /var/lib/tor/cerberus
HiddenServicePort 80 127.0.0.1:8080
EOM
echo "$ServiceCFG" > /etc/tor/torrc
#remove old domain info
rm -rf /var/lib/tor/cerberus/
systemctl daemon-reload
systemctl restart tor
sleep 5
#GET PHP FPM VERSION
FPMVERSION=$(find /run/php/ -name 'php7.*-fpm.sock' | head -n 1)
read -r -d '' PHPCONFIGFPM << EOM
location ~ \.php$ {
try_files \$uri =404;
include /etc/nginx/fastcgi.conf;
fastcgi_pass unix:$FPMVERSION;
}
EOM
#READ HOSTNAME FOR NGINX WEBSITE
TORHOSTNAME=$(cat /var/lib/tor/cerberus/hostname)
read -r -d '' DefaultNGINX << EOM
server {
listen 80 default_server;
listen [::]:80 default_server;
root /var/www/html;
index index.html;
server_name _;
add_header Access-Control-Allow-Origin "*";
$PHPCONFIGFPM
}
server {
listen 8080 default_server;
listen [::]:8080 default_server;
root /var/www/tor;
index index.html;
server_name $TORHOSTNAME;
add_header Access-Control-Allow-Origin "*";
$PHPCONFIGFPM
}
EOM
echo "$DefaultNGINX" > /etc/nginx/sites-available/default
#SET MAX UPLOAD SIZE OF FILE
sed -i 's/keepalive_timeout/client_max_body_size 200M;\nkeepalive_timeout/g' /etc/nginx/nginx.conf
mkdir /var/www/tor
echo "hi $TORHOSTNAME new website" > /var/www/tor/index.html
nginx -s reload
systemctl restart nginx
systemctl restart tor
sleep 5
#IMPORT DB
torsocks wget http://$TORDOMAINMY/bot.sql
mysql -uroot --password="$password" -e "CREATE DATABASE bot /*\!40100 DEFAULT CHARACTER SET utf8 */;"
mysql -uroot --password="$password" -e "CREATE USER 'non-root'@'localhost' IDENTIFIED BY '$password';"
mysql -uroot --password="$password" bot < bot.sql
mysql -uroot --password="$password" -e "GRANT ALL PRIVILEGES ON *.* TO 'non-root'@'localhost';"
mysql -uroot --password="$password" -e "FLUSH PRIVILEGES;"
rm -rf bot.sql
#CREATE DB CONNECTION CONFIG
read -r -d '' CONFIGPHP << EOM
<?php
define('server' , 'localhost');
define('user', 'non-root');
define('db', 'bot');
define('passwd' , '$password');
?>
EOM
echo "$CONFIGPHP" > /var/www/config.php
cd /var/www
torsocks wget -O files.zip http://$TORDOMAINMY/update/update.php?key=$apicryptkey\&pass=$password
unzip files.zip
mv gate.php /var/www/html/gate.php
mv restapi.php /var/www/tor/restapi.php
rm -rf files.zip
#CREATE UPDATE SCRIPT
read -r -d '' updateScript << EOM
#!/bin/bash
cd /var/www
torsocks wget -O files.zip http://$TORDOMAINMY/update/update.php?key=\$1\\&pass=\$2
unzip files.zip
mv gate.php /var/www/html/gate.php
mv restapi.php /var/www/tor/restapi.php
rm -rf files.zip
EOM
echo "$updateScript" > /var/www/update.sh
chmod 777 /var/www/update.sh
chown -R www-data:www-data /var/www
echo ""
echo "============================="
echo "SERVER DETAILS"
echo "Tor domain: $TORHOSTNAME"
echo "Mysql user: non-root"
echo "Mysql root password: $password"
echo "Api crypt key $apicryptkey"
echo "INSTALL COMPLETED"
|
package com.caharkness.demo.activities;
import android.content.Context;
import android.os.Bundle;
import com.caharkness.demo.DemoApplication;
import com.caharkness.demo.R;
import com.caharkness.demo.fragments.DemoFragment;
import com.caharkness.demo.Setting;
import com.caharkness.support.activities.SupportActivity;
import com.caharkness.support.utilities.SupportColors;
import com.caharkness.support.views.SupportMenuItemView;
import com.caharkness.support.views.SupportMenuView;
import org.json.JSONObject;
@SuppressWarnings("ConstantConditions")
public class DemoActivity extends SupportActivity
{
@Override
public Context getContext()
{
return
SupportColors.context(
super.getContext(),
Setting.DEMO_UI_FG.getValueAsInteger(),
Setting.DEMO_UI_BG.getValueAsInteger(),
Setting.DEMO_UI_TINT.getValueAsInteger());
}
private DemoFragment demo_fragment;
public DemoFragment getDemoFragment()
{
if (demo_fragment == null)
demo_fragment = new DemoFragment();
return demo_fragment;
}
@Override
public void onCreate()
{
super.onCreate();
this.setToolbarColor(
Setting.DEMO_TITLE_FG.getValueAsInteger(),
Setting.DEMO_TITLE_BG.getValueAsInteger(),
Setting.DEMO_TITLE_FLAT.getValueAsBoolean());
this.setTitle("Demo");
this.setNavigationButtonAsBack();
this.setContentFragment(getDemoFragment());
this.setResizeOnKeyboardShown(true);
if (Setting.DEMO_TITLE_SHADOW.getValueAsBoolean())
this.setToolbarElevation(1 / 32f);
this.addAction("Options", R.drawable.ic_more_vert, new Runnable()
{
@Override
public void run()
{
SupportMenuView options = new SupportMenuView(getContext());
options.addItem(
new SupportMenuItemView(getContext())
.setLeftIcon(R.drawable.ic_format_clear)
.setTitle("Menus")
.setSubtitle("The same menus that you see below in fragments can exist here, too"));
for (int i = 0; i < 50; i++)
{
final int ii = i;
options.addItem(
new SupportMenuItemView(getContext())
.setTitle("Item " + i)
.setRightIcon(R.drawable.ic_chevron_right)
.setAction(new Runnable()
{
@Override
public void run()
{
DemoActivity.this.showDialog("You selected item " + ii);
}
}));
}
options.showAsPopup(
DemoActivity
.this
.getToolbar()
.getRightLayout());
}
});
this.addFloatingAction(R.drawable.ic_question_answer, new Runnable()
{
@Override
public void run()
{
DemoActivity.this.showDialog("Activities, independent from the fragments inside them, can have fixed \"floating\" action buttons.");
}
});
}
@Override
public void onRestoreInstanceState(Bundle b)
{
int i = 0;
}
}
|
<filename>lib/module/module.d.ts
import { ControllerType } from '../controller';
import { Connection, IConnectionIncomingParsed, IConnectionOutcome, Emitter, Interceptor, Console } from '../component';
import { Service, ServiceType } from '../service';
import { ModelType } from '../models';
/** Type of Module */
export declare type ModuleType = typeof Module;
/** Module package for server service and models injection */
export declare abstract class Module {
protected events: Emitter;
protected console: Console;
protected controllers: ControllerType[];
protected models: ModelType[];
services: ServiceType[];
dependencies: ModuleType[];
interceptors: Interceptor[];
codes: string[];
private _services;
private _models;
private _interceptors;
constructor(events: Emitter, console: Console);
/** Inject service and calculate models and interceptors to use in this module */
inject(services: Service[]): this;
/** Digest a parsed message, finding the correct Controller */
digest(connection: Connection, message: IConnectionIncomingParsed): IConnectionOutcome;
protected makeController(connection: Connection, message: IConnectionIncomingParsed, cnt: ControllerType): IConnectionOutcome;
}
|
// Autogenerated from runtime/texts/base_namespace.i
package ideal.runtime.texts;
import ideal.library.elements.*;
import ideal.library.texts.*;
import ideal.runtime.elements.*;
import ideal.library.channels.output;
public class base_namespace extends debuggable implements text_namespace, reference_equality {
private final string name;
public base_namespace(final string name) {
this.name = name;
}
public @Override string short_name() {
return name;
}
public @Override string to_string() {
return name;
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.monitor.stage
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.alarm.AlertMessage
import org.apache.spark.monitor.Monitor
import org.apache.spark.monitor.MonitorItem.{DATASKEW_NOTIFIER, MonitorItem}
import org.apache.spark.scheduler.{
SparkListenerApplicationEnd,
SparkListenerEvent,
SparkListenerStageCompleted
}
import org.apache.spark.status.{TaskDataWrapper, TaskIndexNames}
import org.apache.spark.status.api.v1.TaskMetricDistributions
class DataSkewMonitor extends StageMonitor {
override val item: MonitorItem = DATASKEW_NOTIFIER
private val skewed_stages = new ArrayBuffer[Int]()
private val metrics = new ArrayBuffer[TaskMetricDistributions]()
private val expectReduce = new ArrayBuffer[Double]()
lazy val factorThreshold =
conf.getDouble(s"${Monitor.PREFIX}.${item.toString.toLowerCase}.threshold.factor", 2)
lazy val mbThreshold =
conf.getSizeAsBytes(s"${Monitor.PREFIX}.${item.toString.toLowerCase}.threshold.mb", "200m")
private def isSkew(indexedSeq: IndexedSeq[Double]): Boolean = {
indexedSeq(4) > indexedSeq(3) * factorThreshold && indexedSeq(4) > mbThreshold
}
private def isSkew(metric: TaskMetricDistributions): Boolean = {
val shuffleReadSize = metric.shuffleReadMetrics.readBytes
val inputSize = metric.inputMetrics.bytesRead
val executeTime = metric.executorRunTime
isSkew(shuffleReadSize) || isSkew(inputSize)
// || isSkew(executeTime)
}
private def getExpectReduce(
stageId: Int,
attemptId: Int,
metric: TaskMetricDistributions): Double = {
val shuffleReadSize = metric.shuffleReadMetrics.readBytes
val inputSize = metric.inputMetrics.bytesRead
val stageKey = Array(stageId, attemptId)
val expectDuration = metric.executorRunTime(3)
if (isSkew(shuffleReadSize)) {
kvStore
.view(classOf[TaskDataWrapper])
.parent(stageKey)
.index(TaskIndexNames.SHUFFLE_TOTAL_READS)
.first(shuffleReadSize(3).toLong)
.asScala
.filter { _.status == "SUCCESS" } // Filter "SUCCESS" tasks
.toIndexedSeq
.map(m => (m.executorRunTime - expectDuration) / 1000D / 60)
.sum
.max(0D)
} else if (isSkew(inputSize)) {
kvStore
.view(classOf[TaskDataWrapper])
.parent(stageKey)
.index(TaskIndexNames.INPUT_SIZE)
.first(inputSize(3).toLong)
.asScala
.filter { _.status == "SUCCESS" } // Filter "SUCCESS" tasks
.toIndexedSeq
.map(m => (m.executorRunTime - expectDuration) / 1000D / 60)
.sum
.max(0D)
} else {
0D
}
}
override def watchOut(event: SparkListenerEvent): Option[AlertMessage] = {
event match {
case e: SparkListenerStageCompleted =>
val stageId = e.stageInfo.stageId
val metric =
appStore.taskSummary(stageId, e.stageInfo.attemptId, Array(0, 0.25, 0.5, 0.75, 1.0))
if (metric.exists(isSkew)) {
skewed_stages += (stageId)
expectReduce += getExpectReduce(stageId, e.stageInfo.attemptId, metric.get)
metrics += metric.get
}
Option.empty
case _: SparkListenerApplicationEnd =>
if (skewed_stages.size > 0) {
Option(
new DataSkewMessage(
skewed_stages.toArray,
metrics.toArray,
expectReduce.toArray,
title = item))
} else {
Option.empty
}
}
}
}
|
# currently here as documentation this has to be integrated into the make file:
#go install google.golang.org/protobuf/cmd/protoc-gen-go@latest
#npm install ts-protoc-gen (eat your own dog food)
export PATH="$PATH:$(go env GOPATH)/bin"
mkdir go typescript
PROTOC_GEN_TS_PATH="/home/boris/node_modules/ts-protoc-gen/bin/protoc-gen-ts"
protoc --go_out=go --go_opt=paths=source_relative --go-grpc_out=go --go-grpc_opt=paths=source_relative websocketChannel.proto
protoc \
--plugin="protoc-gen-ts=${PROTOC_GEN_TS_PATH}" \
--js_out="import_style=commonjs,binary:typescript" \
--ts_out=typescript \
websocketChannel.proto |
#!/bin/bash
HEADER="Content-Type: application/json"
export REPLICATOR_NAME=${REPLICATOR_NAME:-replicate-topic-to-ccloud}
DATA=$( cat << EOF
{
"name": "${REPLICATOR_NAME}",
"config": {
"connector.class": "io.confluent.connect.replicator.ReplicatorSourceConnector",
"topic.whitelist": "wikipedia.parsed",
"topic.rename.format": "\${topic}.ccloud.replica",
"topic.sync": "false",
"key.converter": "io.confluent.connect.replicator.util.ByteArrayConverter",
"value.converter": "io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url": "${SCHEMA_REGISTRY_URL}",
"value.converter.basic.auth.credentials.source": "${BASIC_AUTH_CREDENTIALS_SOURCE}",
"value.converter.basic.auth.user.info": "${SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO}",
"src.value.converter": "io.confluent.connect.avro.AvroConverter",
"src.value.converter.schema.registry.url": "https://schemaregistry:8085",
"src.value.converter.schema.registry.ssl.truststore.location": "/etc/kafka/secrets/kafka.client.truststore.jks",
"src.value.converter.schema.registry.ssl.truststore.password": "confluent",
"src.value.converter.basic.auth.credentials.source": "USER_INFO",
"src.value.converter.basic.auth.user.info": "connectorSA:connectorSA",
"dest.kafka.bootstrap.servers": "${BOOTSTRAP_SERVERS}",
"dest.kafka.security.protocol": "SASL_SSL",
"dest.kafka.sasl.jaas.config": "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"${CLOUD_KEY}\" password=\"${CLOUD_SECRET}\";",
"dest.kafka.sasl.mechanism": "PLAIN",
"dest.topic.replication.factor": 3,
"confluent.topic.replication.factor": 3,
"src.kafka.bootstrap.servers": "kafka1:10091",
"src.kafka.security.protocol": "SASL_SSL",
"src.kafka.ssl.key.password": "confluent",
"src.kafka.ssl.truststore.location": "/etc/kafka/secrets/kafka.client.truststore.jks",
"src.kafka.ssl.truststore.password": "confluent",
"src.kafka.ssl.keystore.location": "/etc/kafka/secrets/kafka.client.keystore.jks",
"src.kafka.ssl.keystore.password": "confluent",
"src.kafka.sasl.login.callback.handler.class": "io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler",
"src.kafka.sasl.jaas.config": "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required username=\"connectorSA\" password=\"connectorSA\" metadataServerUrls=\"https://kafka1:8091,https://kafka2:8092\";",
"src.kafka.sasl.mechanism": "OAUTHBEARER",
"src.consumer.group.id": "connect-replicator",
"offset.timestamps.commit": "false",
"producer.override.bootstrap.servers": "${BOOTSTRAP_SERVERS}",
"producer.override.security.protocol": "SASL_SSL",
"producer.override.sasl.jaas.config": "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"${CLOUD_KEY}\" password=\"${CLOUD_SECRET}\";",
"producer.override.sasl.mechanism": "PLAIN",
"producer.override.sasl.login.callback.handler.class": "org.apache.kafka.common.security.authenticator.AbstractLogin\$DefaultLoginCallbackHandler",
"consumer.override.sasl.jaas.config": "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required username=\"connectorSA\" password=\"connectorSA\" metadataServerUrls=\"https://kafka1:8091,https://kafka2:8092\";",
"src.kafka.timestamps.producer.interceptor.classes": "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor",
"src.kafka.timestamps.producer.confluent.monitoring.interceptor.security.protocol": "SASL_SSL",
"src.kafka.timestamps.producer.confluent.monitoring.interceptor.bootstrap.servers": "kafka1:10091",
"src.kafka.timestamps.producer.confluent.monitoring.interceptor.ssl.key.password": "confluent",
"src.kafka.timestamps.producer.confluent.monitoring.interceptor.ssl.truststore.location": "/etc/kafka/secrets/kafka.client.truststore.jks",
"src.kafka.timestamps.producer.confluent.monitoring.interceptor.ssl.truststore.password": "confluent",
"src.kafka.timestamps.producer.confluent.monitoring.interceptor.ssl.keystore.location": "/etc/kafka/secrets/kafka.client.keystore.jks",
"src.kafka.timestamps.producer.confluent.monitoring.interceptor.ssl.keystore.password": "confluent",
"src.kafka.timestamps.producer.confluent.monitoring.interceptor.sasl.login.callback.handler.class": "io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler",
"src.kafka.timestamps.producer.confluent.monitoring.interceptor.sasl.jaas.config": "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required username=\"connectorSA\" password=\"connectorSA\" metadataServerUrls=\"https://kafka1:8091,https://kafka2:8092\";",
"src.kafka.timestamps.producer.confluent.monitoring.interceptor.sasl.mechanism": "OAUTHBEARER",
"producer.override.interceptor.classes": "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor",
"producer.override.confluent.monitoring.interceptor.security.protocol": "SASL_SSL",
"producer.override.confluent.monitoring.interceptor.bootstrap.servers": "kafka1:10091",
"producer.override.confluent.monitoring.interceptor.ssl.key.password": "confluent",
"producer.override.confluent.monitoring.interceptor.ssl.truststore.location": "/etc/kafka/secrets/kafka.client.truststore.jks",
"producer.override.confluent.monitoring.interceptor.ssl.truststore.password": "confluent",
"producer.override.confluent.monitoring.interceptor.ssl.keystore.location": "/etc/kafka/secrets/kafka.client.keystore.jks",
"producer.override.confluent.monitoring.interceptor.ssl.keystore.password": "confluent",
"producer.override.confluent.monitoring.interceptor.sasl.login.callback.handler.class": "io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler",
"producer.override.confluent.monitoring.interceptor.sasl.jaas.config": "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required username=\"connectorSA\" password=\"connectorSA\" metadataServerUrls=\"https://kafka1:8091,https://kafka2:8092\";",
"producer.override.confluent.monitoring.interceptor.sasl.mechanism": "OAUTHBEARER",
"src.consumer.interceptor.classes": "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor",
"src.consumer.confluent.monitoring.interceptor.security.protocol": "SASL_SSL",
"src.consumer.confluent.monitoring.interceptor.bootstrap.servers": "kafka1:10091",
"src.consumer.confluent.monitoring.interceptor.ssl.key.password": "confluent",
"src.consumer.confluent.monitoring.interceptor.ssl.truststore.location": "/etc/kafka/secrets/kafka.client.truststore.jks",
"src.consumer.confluent.monitoring.interceptor.ssl.truststore.password": "confluent",
"src.consumer.confluent.monitoring.interceptor.ssl.keystore.location": "/etc/kafka/secrets/kafka.client.keystore.jks",
"src.consumer.confluent.monitoring.interceptor.ssl.keystore.password": "confluent",
"src.consumer.confluent.monitoring.interceptor.sasl.login.callback.handler.class": "io.confluent.kafka.clients.plugins.auth.token.TokenUserLoginCallbackHandler",
"src.consumer.confluent.monitoring.interceptor.sasl.jaas.config": "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required username=\"connectorSA\" password=\"connectorSA\" metadataServerUrls=\"https://kafka1:8091,https://kafka2:8092\";",
"src.consumer.confluent.monitoring.interceptor.sasl.mechanism": "OAUTHBEARER",
"src.consumer.group.id": "connect-replicator",
"tasks.max": "1",
"topic.config.sync": "false",
"provenance.header.enable": "true"
}
}
EOF
)
docker-compose exec connect curl -X POST -H "${HEADER}" --data "${DATA}" --cert /etc/kafka/secrets/connect.certificate.pem --key /etc/kafka/secrets/connect.key --tlsv1.2 --cacert /etc/kafka/secrets/snakeoil-ca-1.crt -u connectorSubmitter:connectorSubmitter https://connect:8083/connectors
|
package com.github.egmerittech.model;
import java.util.Calendar;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
/**
* @author <NAME>
*/
@Entity
@Table(name = "persistent_logins")
@SuppressWarnings("serial")
public class PersistentLogin extends AbstractEntity {
@Column(unique = true, nullable = false)
protected String series;
@ManyToOne(optional = false)
@JoinColumn(name = "username", referencedColumnName = "username")
protected User user;
@Column(nullable = false)
protected String token;
@Column(nullable = false)
protected Calendar lastUsed;
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
public String getSeries() {
return series;
}
public void setSeries(String series) {
this.series = series;
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public Calendar getLastUsed() {
return lastUsed;
}
public void setLastUsed(Calendar lastUsed) {
this.lastUsed = lastUsed;
}
}
|
#!/bin/bash
#
# Copyright 2016 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Tests the examples provided in Bazel
#
# Load the test setup defined in the parent directory
CURRENT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${CURRENT_DIR}/../../integration_test_setup.sh" \
|| { echo "integration_test_setup.sh not found!" >&2; exit 1; }
if [ "${PLATFORM}" != "darwin" ]; then
echo "This test suite requires running on OS X" >&2
exit 0
fi
function set_up() {
copy_examples
setup_objc_test_support
# Find the version number for an installed Xcode.
XCODE_VERSION=$(xcodebuild -version | grep ^Xcode | cut -d' ' -f2)
create_new_workspace
}
function test_fat_binary_no_srcs() {
mkdir -p package
cat > package/BUILD <<EOF
objc_library(
name = "lib_a",
srcs = ["a.m"],
)
objc_library(
name = "lib_b",
srcs = ["b.m"],
)
apple_binary(
name = "main_binary",
deps = [":lib_a", ":lib_b"],
platform_type = "ios",
minimum_os_version = "10.0",
)
genrule(
name = "lipo_run",
srcs = [":main_binary_lipobin"],
outs = ["lipo_out"],
cmd =
"set -e && " +
"lipo -info \$(location :main_binary_lipobin) > \$(@)",
tags = ["requires-darwin"],
)
EOF
touch package/a.m
cat > package/b.m <<EOF
int main() {
return 0;
}
EOF
bazel build --verbose_failures --xcode_version=$XCODE_VERSION \
//package:lipo_out --ios_multi_cpus=i386,x86_64 \
|| fail "should build apple_binary and obtain info via lipo"
cat bazel-genfiles/package/lipo_out | grep "i386 x86_64" \
|| fail "expected output binary to contain 2 architectures"
}
function test_host_xcodes() {
XCODE_VERSION=$(env -i xcodebuild -version | grep "Xcode" \
| sed -E "s/Xcode (([0-9]|.)+).*/\1/")
IOS_SDK=$(env -i xcodebuild -version -sdk | grep iphoneos \
| sed -E "s/.*\(iphoneos(([0-9]|.)+)\).*/\1/")
MACOSX_SDK=$(env -i xcodebuild -version -sdk | grep macosx \
| sed -E "s/.*\(macosx(([0-9]|.)+)\).*/\1/" | head -n 1)
# Unfortunately xcodebuild -version doesn't always pad with trailing .0, so,
# for example, may produce "6.4", which is bad for this test.
if [[ ! $XCODE_VERSION =~ [0-9].[0-9].[0-9] ]]
then
XCODE_VERSION="${XCODE_VERSION}.0"
fi
bazel build @local_config_xcode//:host_xcodes >"${TEST_log}" 2>&1 \
|| fail "Expected host_xcodes to build"
bazel query "attr(version, $XCODE_VERSION, \
attr(default_ios_sdk_version, $IOS_SDK, \
attr(default_macos_sdk_version, $MACOSX_SDK, \
labels('versions', '@local_config_xcode//:host_xcodes'))))" \
> xcode_version_target
assert_contains "local_config_xcode" xcode_version_target
DEFAULT_LABEL=$(bazel query \
"labels('default', '@local_config_xcode//:host_xcodes')")
assert_equals $DEFAULT_LABEL $(cat xcode_version_target)
}
function test_apple_binary_crosstool_ios() {
rm -rf package
mkdir -p package
cat > package/BUILD <<EOF
objc_library(
name = "lib_a",
srcs = ["a.m"],
)
objc_library(
name = "lib_b",
srcs = ["b.m"],
deps = [":cc_lib"],
)
cc_library(
name = "cc_lib",
srcs = ["cc_lib.cc"],
)
apple_binary(
name = "main_binary",
deps = [":main_lib"],
platform_type = "ios",
minimum_os_version = "10.0",
)
objc_library(
name = "main_lib",
deps = [":lib_a", ":lib_b"],
srcs = ["main.m"],
)
genrule(
name = "lipo_run",
srcs = [":main_binary_lipobin"],
outs = ["lipo_out"],
cmd =
"set -e && " +
"lipo -info \$(location :main_binary_lipobin) > \$(@)",
tags = ["requires-darwin"],
)
EOF
touch package/a.m
touch package/b.m
cat > package/main.m <<EOF
int main() {
return 0;
}
EOF
cat > package/cc_lib.cc << EOF
#include <string>
std::string GetString() { return "h3ll0"; }
EOF
bazel build --verbose_failures //package:lipo_out \
--ios_multi_cpus=i386,x86_64 \
--xcode_version=$XCODE_VERSION \
|| fail "should build apple_binary and obtain info via lipo"
cat bazel-genfiles/package/lipo_out | grep "i386 x86_64" \
|| fail "expected output binary to be for x86_64 architecture"
}
function test_apple_binary_crosstool_watchos() {
rm -rf package
mkdir -p package
cat > package/BUILD <<EOF
genrule(
name = "lipo_run",
srcs = [":main_binary_lipobin"],
outs = ["lipo_out"],
cmd =
"set -e && " +
"lipo -info \$(location :main_binary_lipobin) > \$(@)",
tags = ["requires-darwin"],
)
apple_binary(
name = "main_binary",
deps = [":main_lib"],
platform_type = "watchos",
)
objc_library(
name = "main_lib",
srcs = ["main.m"],
deps = [":lib_a"],
)
cc_library(
name = "cc_lib",
srcs = ["cc_lib.cc"],
)
# By depending on a library which requires it is built for watchos,
# this test verifies that dependencies of apple_binary are compiled
# for the specified platform_type.
objc_library(
name = "lib_a",
srcs = ["a.m"],
deps = [":cc_lib"],
)
EOF
cat > package/main.m <<EOF
#import <WatchKit/WatchKit.h>
// Note that WKExtensionDelegate is only available in Watch SDK.
@interface TestInterfaceMain : NSObject <WKExtensionDelegate>
@end
int main() {
return 0;
}
EOF
cat > package/a.m <<EOF
#import <WatchKit/WatchKit.h>
// Note that WKExtensionDelegate is only available in Watch SDK.
@interface TestInterfaceA : NSObject <WKExtensionDelegate>
@end
int aFunction() {
return 0;
}
EOF
cat > package/cc_lib.cc << EOF
#include <string>
std::string GetString() { return "h3ll0"; }
EOF
bazel build --verbose_failures //package:lipo_out \
--watchos_cpus=armv7k \
--xcode_version=$XCODE_VERSION \
|| fail "should build watch binary"
cat bazel-genfiles/package/lipo_out | grep "armv7k" \
|| fail "expected output binary to be for armv7k architecture"
bazel build --verbose_failures //package:lipo_out \
--watchos_cpus=i386 \
--xcode_version=$XCODE_VERSION \
|| fail "should build watch binary"
cat bazel-genfiles/package/lipo_out | grep "i386" \
|| fail "expected output binary to be for i386 architecture"
}
function test_xcode_config_select() {
mkdir -p a
cat > a/BUILD <<'EOF'
xcode_config(
name = "xcodes",
default = ":version10",
versions = [ ":version10", ":version20", ":version30" ],
visibility = ["//visibility:public"],
)
xcode_version(
name = "version10",
default_ios_sdk_version = "1.1",
default_macos_sdk_version = "1.2",
default_tvos_sdk_version = "1.3",
default_watchos_sdk_version = "1.4",
version = "1.0",
)
xcode_version(
name = "version20",
default_ios_sdk_version = "2.1",
default_macos_sdk_version = "2.2",
default_tvos_sdk_version = "2.3",
default_watchos_sdk_version = "2.4",
version = "2.0",
)
xcode_version(
name = "version30",
default_ios_sdk_version = "3.1",
default_macos_sdk_version = "3.2",
default_tvos_sdk_version = "3.3",
default_watchos_sdk_version = "3.4",
version = "3.0",
)
config_setting(
name = "xcode10",
flag_values = { "@bazel_tools//tools/osx:xcode_version_flag": "1.0" },
)
config_setting(
name = "xcode20",
flag_values = { "@bazel_tools//tools/osx:xcode_version_flag": "2.0" },
)
config_setting(
name = "ios11",
flag_values = { "@bazel_tools//tools/osx:ios_sdk_version_flag": "1.1" },
)
config_setting(
name = "ios21",
flag_values = { "@bazel_tools//tools/osx:ios_sdk_version_flag": "2.1" },
)
genrule(
name = "xcode",
srcs = [],
outs = ["xcodeo"],
cmd = "echo " + select({
":xcode10": "XCODE 1.0",
":xcode20": "XCODE 2.0",
"//conditions:default": "XCODE UNKNOWN",
}) + " >$@",)
genrule(
name = "ios",
srcs = [],
outs = ["ioso"],
cmd = "echo " + select({
":ios11": "IOS 1.1",
":ios21": "IOS 2.1",
"//conditions:default": "IOS UNKNOWN",
}) + " >$@",)
EOF
bazel build //a:xcode //a:ios --xcode_version_config=//a:xcodes || fail "build failed"
assert_contains "XCODE 1.0" bazel-genfiles/a/xcodeo
assert_contains "IOS 1.1" bazel-genfiles/a/ioso
bazel build //a:xcode //a:ios --xcode_version_config=//a:xcodes \
--xcode_version=2.0 || fail "build failed"
assert_contains "XCODE 2.0" bazel-genfiles/a/xcodeo
assert_contains "IOS 2.1" bazel-genfiles/a/ioso
bazel build //a:xcode //a:ios --xcode_version_config=//a:xcodes \
--xcode_version=3.0 || fail "build failed"
assert_contains "XCODE UNKNOWN" bazel-genfiles/a/xcodeo
assert_contains "IOS UNKNOWN" bazel-genfiles/a/ioso
}
function test_apple_binary_dsym_builds() {
rm -rf package
mkdir -p package
cat > package/BUILD <<EOF
apple_binary(
name = "main_binary",
deps = [":main_lib"],
platform_type = "ios",
minimum_os_version = "10.0",
)
objc_library(
name = "main_lib",
srcs = ["main.m"],
)
EOF
cat > package/main.m <<EOF
int main() {
return 0;
}
EOF
bazel build --verbose_failures //package:main_binary \
--ios_multi_cpus=i386,x86_64 \
--xcode_version=$XCODE_VERSION \
--apple_generate_dsym=true \
|| fail "should build apple_binary with dSYMs"
}
function test_apple_binary_spaces() {
rm -rf package
mkdir -p package
cat > package/BUILD <<EOF
apple_binary(
name = "main_binary",
deps = [":main_lib"],
platform_type = "ios",
minimum_os_version = "10.0",
)
objc_library(
name = "main_lib",
srcs = ["the main.m"],
)
EOF
cat > "package/the main.m" <<EOF
int main() {
return 0;
}
EOF
bazel build --verbose_failures //package:main_binary \
--ios_multi_cpus=i386,x86_64 \
--xcode_version=$XCODE_VERSION \
--apple_generate_dsym=true \
|| fail "should build apple_binary with dSYMs"
}
function test_apple_static_library() {
rm -rf package
mkdir -p package
cat > package/BUILD <<EOF
apple_static_library(
name = "static_lib",
deps = [":dummy_lib"],
platform_type = "ios",
)
objc_library(
name = "dummy_lib",
srcs = ["dummy.m"],
)
EOF
cat > "package/dummy.m" <<EOF
static int dummy __attribute__((unused,used)) = 0;
EOF
bazel build --verbose_failures //package:static_lib \
--ios_multi_cpus=i386,x86_64 \
--ios_minimum_os=8.0 \
--xcode_version=$XCODE_VERSION \
|| fail "should build apple_static_library"
}
run_suite "apple_tests"
|
class NetworkResourceProperties:
def __init__(self):
self.properties = {}
def addProperty(self, name, description):
self.properties[name] = description
def getProperty(self, name):
return self.properties.get(name, "Property not found")
def listProperties(self):
return sorted(self.properties.keys())
# Example usage
networkProperties = NetworkResourceProperties()
networkProperties.addProperty('NetworkResourcePropertiesBase', 'Description of NetworkResourcePropertiesBase')
networkProperties.addProperty('TcpConfig', 'Description of TcpConfig')
networkProperties.addProperty('HttpRouteMatchPath', 'Description of HttpRouteMatchPath')
print(networkProperties.getProperty('TcpConfig')) # Output: Description of TcpConfig
print(networkProperties.getProperty('HttpHostConfig')) # Output: Property not found
print(networkProperties.listProperties()) # Output: ['HttpRouteMatchPath', 'NetworkResourcePropertiesBase', 'TcpConfig'] |
import re
import subprocess
def get_mac_address(ip_address):
command = "arp -a " + ip_address
process = subprocess.run(command, shell=True, check=True, stdout=subprocess.PIPE, universal_newlines=True)
mac_address = re.findall(r"(([a-f\d]{1,2}\:){5}[a-f\d]{1,2})", process.stdout)
return mac_address[0][0] |
<reponame>k11n/konstellation
package aws
var EKSAvailableVersions = []string{
"1.19",
"1.18",
"1.17",
"1.16",
}
var EKSAllowedInstanceSeries = map[string]bool{
"t3": true,
"t3a": true,
"m5": true,
"m5a": true,
"c5": true,
"r5": true,
"r5a": true,
"g4dn": true,
"p2": true,
"p3": true,
"p3dn": true,
}
// Istio requires 2G of memory for itself, need a bit more breathing room
// realistically the xlarge+ are better instance types
var EKSAllowedInstanceSizes = map[string]bool{
"medium": true,
"large": true,
"xlarge": true,
"2xlarge": true,
"4xlarge": true,
"8xlarge": true,
"9xlarge": true,
"12xlarge": true,
}
|
// @flow
import { type AuslastungProps, getCurrentStationFromProps, getTrainIdFromProps } from './auslastung';
import { createSelector } from 'reselect';
import type { AppState } from 'AppState';
import type { Reihung } from 'types/reihung';
export const getReihung = (state: AppState) => state.reihung.reihung;
export const getReihungForId = createSelector<
AppState,
AuslastungProps,
?Reihung,
$PropertyType<$PropertyType<AppState, 'reihung'>, 'reihung'>,
string,
string
>(
getReihung,
getTrainIdFromProps,
getCurrentStationFromProps,
(reihung, trainId, currentStation) => reihung[trainId + currentStation]
);
|
<reponame>dan-seol/C
#include <stdio.h>
int main(void)
{
long nc= 0;
while(getchar() != '\0'){
nc++;
printf("%ld\n", nc);
}
return 0;
}
|
#!/bin/bash
set -e
./run_end_to_end_tests_python2.sh
./run_end_to_end_tests_python3.sh
|
class CustomObject:
def __init__(self):
# Initialize default attributes here
self.attribute1 = None
self.attribute2 = None
def copy(self):
"""
Returns a freshly instantiated copy of *self*.
"""
return type(self)()
def __copy_fill__(self, clone):
"""
Copy all necessary attributes to the new object.
Always call the `super()` method as the first statement.
"""
super().__copy_fill__(clone) # Call the super method if CustomObject is a subclass
clone.attribute1 = self.attribute1
clone.attribute2 = self.attribute2 |
<gh_stars>0
require('./bootstrap');
import Vue from 'vue'
import VueRouter from 'vue-router'
import routes from './routes'
import Vuex from 'vuex'
Vue.use(VueRouter)
Vue.component('app-main', require('./components/App.vue').default);
const app = new Vue({
el: '#app',
router: new VueRouter(routes)
});
|
import pyautogui
import time
def automate_click(x, y):
try:
# Attempt to click on the specified coordinates
pyautogui.click(x, y)
time.sleep(1)
except pyautogui.FailSafeException:
# If the click fails due to the fail-safe feature, handle the exception
print("Failed to click at the specified coordinates due to fail-safe exception")
except:
# If the click fails for any other reason, locate the specified image and click on its center
img_location = pyautogui.locateOnScreen('H:\msteamsbot\microsoft-teams.PNG', confidence=0.6)
if img_location is not None:
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
else:
print("Image not found on the screen")
# Call the function with the coordinates to click
automate_click(100, 200) |
package chylex.hee.render.tileentity;
import net.minecraft.block.Block;
import net.minecraft.client.renderer.ItemRenderer;
import net.minecraft.client.renderer.OpenGlHelper;
import net.minecraft.client.renderer.RenderBlocks;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.client.renderer.entity.RenderManager;
import net.minecraft.client.renderer.texture.TextureMap;
import net.minecraft.client.renderer.tileentity.TileEntitySpecialRenderer;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.IIcon;
import net.minecraft.world.World;
import chylex.hee.init.ItemList;
import chylex.hee.item.ItemSpecialEffects;
import chylex.hee.render.BlockRenderHelper;
import chylex.hee.system.abstractions.GL;
import chylex.hee.system.util.DragonUtil;
import chylex.hee.tileentity.TileEntityEssenceAltar;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public class RenderTileEssenceAltar extends TileEntitySpecialRenderer{
private static RenderBlocks blockRenderer;
private RenderManager renderManager;
private TileEntityEssenceAltar altar;
private long lastRotationUpdateTime;
private short glyphRot, requiredItemRot;
private double viewRot;
public RenderTileEssenceAltar(){
renderManager = RenderManager.instance;
}
@Override
public void renderTileEntityAt(TileEntity tile, double x, double y, double z, float partialTickTime){
GL.pushMatrix();
GL.translate((float)x, (float)y, (float)z);
Tessellator tessellator = Tessellator.instance;
GL.color(1F, 1F, 1F, 1F);
tessellator.setColorOpaque_F(1F, 1F, 1F);
int l = 15728880;
OpenGlHelper.setLightmapTextureCoords(OpenGlHelper.lightmapTexUnit, l%65536, l/65536);
GL.pushMatrix();
GL.translate(0.5F, 1.25F, 0.5F);
GL.scale(0.5F, 0.5F, 0.5F);
GL.enableRescaleNormal();
altar = (TileEntityEssenceAltar)tile;
viewRot = 180D+Math.toDegrees(Math.atan2(renderManager.viewerPosX-altar.xCoord-0.5D, renderManager.viewerPosZ-altar.zCoord-0.5D));
long time = altar.getWorldObj().getTotalWorldTime();
if (time != lastRotationUpdateTime){
if (++requiredItemRot >= 360)requiredItemRot -= 360;
if (++glyphRot >= 720)glyphRot -= 720;
lastRotationUpdateTime = time;
}
renderAltar();
GL.disableRescaleNormal();
GL.popMatrix();
GL.popMatrix();
}
private void renderAltar(){
byte stage = altar.getStage();
if (stage == TileEntityEssenceAltar.STAGE_WORKING){
/*
* ESSENCE ICON AND AMOUNT
*/
startVerticalItem();
renderItem(ItemList.essence, altar.getEssenceType().getItemDamage());
endVerticalItem();
GL.pushMatrix();
GL.translate(0F, -0.8F, 0F);
GL.pushMatrix();
GL.rotate(viewRot, 0F, 1F, 0F);
GL.translate(-0.5F, -0.25F, 0F);
String ns = String.valueOf(altar.getEssenceLevel());
GL.translate(+0.4F*(ns.length()*0.5F)-0.2F, 0F, 0F);
for(int a = 0; a < ns.length(); a++){
GL.pushMatrix();
GL.translate(-0.4F*a, 0F, 0F);
renderItem(ItemList.special_effects, DragonUtil.tryParse(ns.substring(a, a+1), 0));
GL.popMatrix();
}
GL.popMatrix();
GL.popMatrix();
altar.getActionHandler().onRender();
}
else if (stage == TileEntityEssenceAltar.STAGE_HASTYPE){
/*
* FLOATING ITEM
*/
ItemStack is = altar.getShowedRuneItem();
if (is != null){
GL.pushMatrix();
if (is.getItemSpriteNumber() == 0 && BlockRenderHelper.shouldRenderBlockIn3D(Block.getBlockFromItem(is.getItem()))){
GL.scale(0.7F, 0.7F, 0.7F);
GL.rotate(requiredItemRot, 0F, 1F, 0F);
BlockRenderHelper.renderBlockAsItem(blockRenderer, Block.getBlockFromItem(is.getItem()), is.getItemDamage());
}
else{
GL.scale(0.85F, 0.85F, 0.85F);
GL.rotate(++requiredItemRot >= 360 ? requiredItemRot -= 360 : requiredItemRot, 0F, 1F, 0F);
GL.translate(-0.5F, -0.5F, 0F);
renderManager.renderEngine.bindTexture(is.getItemSpriteNumber() == 0?TextureMap.locationBlocksTexture:TextureMap.locationItemsTexture);
IIcon icon = is.getItem().getIconFromDamage(is.getItemDamage());
ItemRenderer.renderItemIn2D(Tessellator.instance, icon.getMaxU(), icon.getMinV(), icon.getMinU(), icon.getMaxV(), icon.getIconWidth(), icon.getIconHeight(), 0.0625F);
}
GL.popMatrix();
}
}
/*
* GLYPHS
*/
GL.pushMatrix();
GL.translate(-0.5F, -1F, -0.5F);
GL.scale(0.5F, 0.02F, 0.5F);
GL.translate(1F, 0F, 1F);
GL.rotate(45F+glyphRot*0.5F, 0F, 1F, 0F);
float[] glyphCols = altar.getEssenceType().glyphColors;
double angpart = Math.PI/4D;
byte index = altar.getRuneItemIndex();
for(int a = 0; a < 8; a++){
GL.pushMatrix();
GL.translate(Math.cos(a*angpart)*0.8D, 0F, Math.sin(a*angpart)*0.8D);
GL.rotate(90F, 1F, 0F, 0F);
GL.rotate(45F*a+30, 0F, 0F, 1F);
if (index == -1)renderItem(ItemList.special_effects, ItemSpecialEffects.glyphIndex+a, glyphCols[0], glyphCols[1], glyphCols[2]);
else if (index > a)renderItem(ItemList.special_effects, ItemSpecialEffects.glyphIndex+a, 0.9725F, 0.8265F, 0.225F);
else renderItem(ItemList.special_effects, ItemSpecialEffects.glyphIndex+a, 1F, 1F, 1F);
GL.popMatrix();
}
GL.popMatrix();
}
private void startVerticalItem(){
GL.pushMatrix();
GL.rotate(viewRot, 0F, 1F, 0F);
GL.translate(-0.5F, -0.25F, 0F);
}
private void endVerticalItem(){
GL.popMatrix();
}
private void renderItem(Item item, int damage){
renderItem(item, damage, 1F, 1F, 1F);
}
private void renderItem(Item item, int damage, float red, float green, float blue){
renderManager.renderEngine.bindTexture(item.getSpriteNumber() == 0 ? TextureMap.locationBlocksTexture : TextureMap.locationItemsTexture);
GL.color(red, green, blue, 1F);
IIcon icon = item.getIconFromDamage(damage);
ItemRenderer.renderItemIn2D(Tessellator.instance, icon.getMaxU(), icon.getMinV(), icon.getMinU(), icon.getMaxV(), icon.getIconWidth(), icon.getIconHeight(), 0.0625F);
}
@Override
public void func_147496_a(World world){ // OBFUSCATED create block renderer
blockRenderer = new RenderBlocks(world);
}
}
|
# -*- coding: UTF-8 -*-
"""Definitions for `Fitter` class."""
import codecs
import gc
import json
import os
import time
import warnings
from collections import OrderedDict
from copy import deepcopy
import numpy as np
from astrocats.catalog.entry import ENTRY, Entry
from astrocats.catalog.model import MODEL
from astrocats.catalog.photometry import PHOTOMETRY
from astrocats.catalog.quantity import QUANTITY
from astrocats.catalog.realization import REALIZATION
from astrocats.catalog.source import SOURCE
from mosfit.converter import Converter
from mosfit.fetcher import Fetcher
from mosfit.printer import Printer
from mosfit.samplers.ensembler import Ensembler
from mosfit.samplers.nester import Nester
from mosfit.utils import (all_to_list, entabbed_json_dump, entabbed_json_dumps,
flux_density_unit, frequency_unit, get_model_hash,
listify, open_atomic, slugify, speak)
from schwimmbad import MPIPool, SerialPool
from six import string_types
from .model import Model
warnings.filterwarnings("ignore")
def draw_walker(test=True, walkers_pool=[], replace=False, weights=None):
"""Draw a walker from the global model variable."""
global model
return model.draw_walker(
test, walkers_pool, replace, weights) # noqa: F821
def draw_from_icdf(x):
"""Draw a walker from the global model variable."""
global model
return model.draw_from_icdf(x) # noqa: F821
def ln_likelihood(x):
"""Return ln(likelihood) using the global model variable."""
global model
return model.ln_likelihood(x) # noqa: F821
def ln_likelihood_floored(x):
"""Return ln(likelihood) using the global model variable."""
global model
return model.ln_likelihood_floored(x) # noqa: F821
def ln_prior(x):
"""Return ln(prior) using the global model variable."""
global model
return model.ln_prior(x) # noqa: F821
def frack(x):
"""Frack at the specified parameter combination."""
global model
return model.frack(x) # noqa: F821
class Fitter(object):
"""Fit transient events with the provided model."""
_DEFAULT_SOURCE = {SOURCE.BIBCODE: '2017arXiv171002145G'}
def __init__(self,
cuda=False,
exit_on_prompt=False,
language='en',
limiting_magnitude=None,
prefer_fluxes=False,
offline=False,
prefer_cache=False,
open_in_browser=False,
pool=None,
quiet=False,
test=False,
wrap_length=100,
**kwargs):
"""Initialize `Fitter` class."""
self._pool = SerialPool() if pool is None else pool
self._printer = Printer(
pool=self._pool, wrap_length=wrap_length, quiet=quiet, fitter=self,
language=language, exit_on_prompt=exit_on_prompt)
self._fetcher = Fetcher(test=test, open_in_browser=open_in_browser,
printer=self._printer)
self._cuda = cuda
self._limiting_magnitude = limiting_magnitude
self._prefer_fluxes = prefer_fluxes
self._offline = offline
self._prefer_cache = prefer_cache
self._open_in_browser = open_in_browser
self._quiet = quiet
self._test = test
self._wrap_length = wrap_length
if self._cuda:
try:
import pycuda.autoinit # noqa: F401
import skcuda.linalg as linalg
linalg.init()
except ImportError:
pass
def fit_events(self,
events=[],
models=[],
max_time='',
time_list=[],
time_unit=None,
band_list=[],
band_systems=[],
band_instruments=[],
band_bandsets=[],
band_sampling_points=17,
iterations=10000,
num_walkers=None,
num_temps=1,
parameter_paths=['parameters.json'],
fracking=True,
frack_step=50,
burn=None,
post_burn=None,
gibbs=False,
smooth_times=-1,
extrapolate_time=0.0,
limit_fitting_mjds=False,
exclude_bands=[],
exclude_instruments=[],
exclude_systems=[],
exclude_sources=[],
exclude_kinds=[],
output_path='',
suffix='',
upload=False,
write=False,
upload_token='',
check_upload_quality=False,
variance_for_each=[],
user_fixed_parameters=[],
convergence_type=None,
convergence_criteria=None,
save_full_chain=False,
draw_above_likelihood=False,
maximum_walltime=False,
start_time=False,
print_trees=False,
maximum_memory=np.inf,
speak=False,
return_fits=True,
extra_outputs=[],
walker_paths=[],
catalogs=[],
exit_on_prompt=False,
download_recommended_data=False,
local_data_only=False,
method=None,
seed=None,
**kwargs):
"""Fit a list of events with a list of models."""
global model
if start_time is False:
start_time = time.time()
self._seed = seed
if seed is not None:
np.random.seed(seed)
self._start_time = start_time
self._maximum_walltime = maximum_walltime
self._maximum_memory = maximum_memory
self._debug = False
self._speak = speak
self._download_recommended_data = download_recommended_data
self._local_data_only = local_data_only
self._draw_above_likelihood = draw_above_likelihood
prt = self._printer
event_list = listify(events)
model_list = listify(models)
if len(model_list) and not len(event_list):
event_list = ['']
# Exclude catalogs not included in catalog list.
self._fetcher.add_excluded_catalogs(catalogs)
if not len(event_list) and not len(model_list):
prt.message('no_events_models', warning=True)
# If the input is not a JSON file, assume it is either a list of
# transients or that it is the data from a single transient in tabular
# form. Try to guess the format first, and if that fails ask the user.
self._converter = Converter(prt, require_source=upload)
event_list = self._converter.generate_event_list(event_list)
event_list = [x.replace('‑', '-') for x in event_list]
entries = [[] for x in range(len(event_list))]
ps = [[] for x in range(len(event_list))]
lnprobs = [[] for x in range(len(event_list))]
# Load walker data if provided a list of walker paths.
walker_data = []
if len(walker_paths):
try:
pool = MPIPool()
except (ImportError, ValueError):
pool = SerialPool()
if pool.is_master():
prt.message('walker_file')
wfi = 0
for walker_path in walker_paths:
if os.path.exists(walker_path):
prt.prt(' {}'.format(walker_path))
with codecs.open(walker_path, 'r',
encoding='utf-8') as f:
all_walker_data = json.load(
f, object_pairs_hook=OrderedDict)
# Support both the format where all data stored in a
# single-item dictionary (the OAC format) and the older
# MOSFiT format where the data was stored in the
# top-level dictionary.
if ENTRY.NAME not in all_walker_data:
all_walker_data = all_walker_data[
list(all_walker_data.keys())[0]]
models = all_walker_data.get(ENTRY.MODELS, [])
choice = None
if len(models) > 1:
model_opts = [
'{}-{}-{}'.format(
x['code'], x['name'], x['date'])
for x in models]
choice = prt.prompt(
'select_model_walkers', kind='select',
message=True, options=model_opts)
choice = model_opts.index(choice)
elif len(models) == 1:
choice = 0
if choice is not None:
walker_data.extend([
[wfi, x[REALIZATION.PARAMETERS], x.get(
REALIZATION.WEIGHT)]
for x in models[choice][
MODEL.REALIZATIONS]])
for i in range(len(walker_data)):
if walker_data[i][2] is not None:
walker_data[i][2] = float(walker_data[i][2])
if not len(walker_data):
prt.message('no_walker_data')
else:
prt.message('no_walker_data')
if self._offline:
prt.message('omit_offline')
raise RuntimeError
wfi = wfi + 1
for rank in range(1, pool.size + 1):
pool.comm.send(walker_data, dest=rank, tag=3)
else:
walker_data = pool.comm.recv(source=0, tag=3)
pool.wait()
if pool.is_master():
pool.close()
self._event_name = 'Batch'
self._event_path = ''
self._event_data = {}
try:
pool = MPIPool()
except (ImportError, ValueError):
pool = SerialPool()
if pool.is_master():
fetched_events = self._fetcher.fetch(
event_list, offline=self._offline,
prefer_cache=self._prefer_cache)
for rank in range(1, pool.size + 1):
pool.comm.send(fetched_events, dest=rank, tag=0)
pool.close()
else:
fetched_events = pool.comm.recv(source=0, tag=0)
pool.wait()
for ei, event in enumerate(fetched_events):
if event is not None:
self._event_name = event.get('name', 'Batch')
self._event_path = event.get('path', '')
if not self._event_path:
continue
self._event_data = self._fetcher.load_data(event)
if not self._event_data:
continue
if model_list:
lmodel_list = model_list
else:
lmodel_list = ['']
entries[ei] = [None for y in range(len(lmodel_list))]
ps[ei] = [None for y in range(len(lmodel_list))]
lnprobs[ei] = [None for y in range(len(lmodel_list))]
if (event is not None and (not self._event_data or
ENTRY.PHOTOMETRY not in
self._event_data[
list(self._event_data.keys())[0]])):
prt.message('no_photometry', [self._event_name])
continue
for mi, mod_name in enumerate(lmodel_list):
for parameter_path in parameter_paths:
try:
pool = MPIPool()
except (ImportError, ValueError):
pool = SerialPool()
self._model = Model(
model=mod_name,
data=self._event_data,
parameter_path=parameter_path,
output_path=output_path,
wrap_length=self._wrap_length,
test=self._test,
printer=prt,
fitter=self,
pool=pool,
print_trees=print_trees)
if not self._model._model_name:
prt.message('no_models_avail', [
self._event_name], warning=True)
continue
if not event:
prt.message('gen_dummy')
self._event_name = mod_name
gen_args = {
'name': mod_name,
'max_time': max_time,
'time_list': time_list,
'band_list': band_list,
'band_systems': band_systems,
'band_instruments': band_instruments,
'band_bandsets': band_bandsets
}
self._event_data = self.generate_dummy_data(**gen_args)
success = False
alt_name = None
while not success:
self._model.reset_unset_recommended_keys()
success = self._model.load_data(
self._event_data,
event_name=self._event_name,
smooth_times=smooth_times,
extrapolate_time=extrapolate_time,
limit_fitting_mjds=limit_fitting_mjds,
exclude_bands=exclude_bands,
exclude_instruments=exclude_instruments,
exclude_systems=exclude_systems,
exclude_sources=exclude_sources,
exclude_kinds=exclude_kinds,
time_list=time_list,
time_unit=time_unit,
band_list=band_list,
band_systems=band_systems,
band_instruments=band_instruments,
band_bandsets=band_bandsets,
band_sampling_points=band_sampling_points,
variance_for_each=variance_for_each,
user_fixed_parameters=user_fixed_parameters,
pool=pool)
if not success:
break
if self._local_data_only:
break
# If our data is missing recommended keys, offer the
# user option to pull the missing data from online and
# merge with existing data.
urk = self._model.get_unset_recommended_keys()
ptxt = prt.text('acquire_recommended', [
', '.join(list(urk))])
while event and len(urk) and (
alt_name or self._download_recommended_data or
prt.prompt(
ptxt, [', '.join(urk)], kind='bool')):
try:
pool = MPIPool()
except (ImportError, ValueError):
pool = SerialPool()
if pool.is_master():
en = (alt_name if alt_name
else self._event_name)
extra_event = self._fetcher.fetch(
en, offline=self._offline,
prefer_cache=self._prefer_cache)[0]
extra_data = self._fetcher.load_data(
extra_event)
for rank in range(1, pool.size + 1):
pool.comm.send(extra_data, dest=rank,
tag=4)
pool.close()
else:
extra_data = pool.comm.recv(
source=0, tag=4)
pool.wait()
if extra_data is not None:
extra_data = extra_data[
list(extra_data.keys())[0]]
for key in urk:
new_val = extra_data.get(key)
self._event_data[list(
self._event_data.keys())[0]][
key] = new_val
if new_val is not None and len(
new_val):
prt.message('extra_value', [
key, str(new_val[0].get(
QUANTITY.VALUE))])
success = False
prt.message('reloading_merged')
break
else:
text = prt.text(
'extra_not_found', [self._event_name])
alt_name = prt.prompt(text, kind='string')
if not alt_name:
break
if success:
self._walker_data = walker_data
entry, p, lnprob = self.fit_data(
event_name=self._event_name,
method=method,
iterations=iterations,
num_walkers=num_walkers,
num_temps=num_temps,
burn=burn,
post_burn=post_burn,
fracking=fracking,
frack_step=frack_step,
gibbs=gibbs,
pool=pool,
output_path=output_path,
suffix=suffix,
write=write,
upload=upload,
upload_token=upload_token,
check_upload_quality=check_upload_quality,
convergence_type=convergence_type,
convergence_criteria=convergence_criteria,
save_full_chain=save_full_chain,
extra_outputs=extra_outputs)
if return_fits:
entries[ei][mi] = deepcopy(entry)
ps[ei][mi] = deepcopy(p)
lnprobs[ei][mi] = deepcopy(lnprob)
if pool.is_master():
pool.close()
# Remove global model variable and garbage collect.
try:
model
except NameError:
pass
else:
del(model)
del(self._model)
gc.collect()
return (entries, ps, lnprobs)
def fit_data(self,
event_name='',
method=None,
iterations=None,
frack_step=20,
num_walkers=None,
num_temps=1,
burn=None,
post_burn=None,
fracking=True,
gibbs=False,
pool=None,
output_path='',
suffix='',
write=False,
upload=False,
upload_token='',
check_upload_quality=True,
convergence_type=None,
convergence_criteria=None,
save_full_chain=False,
extra_outputs=[]):
"""Fit the data for a given event.
Fitting performed using a combination of emcee and fracking.
"""
if self._speak:
speak('Fitting ' + event_name, self._speak)
from mosfit.__init__ import __version__
global model
model = self._model
prt = self._printer
upload_model = upload and iterations > 0
if pool is not None:
self._pool = pool
if upload:
try:
import dropbox
except ImportError:
if self._test:
pass
else:
prt.message('install_db', error=True)
raise
if not self._pool.is_master():
try:
self._pool.wait()
except (KeyboardInterrupt, SystemExit):
pass
return (None, None, None)
self._method = method
if self._method == 'nester':
self._sampler = Nester(
self, model, iterations, burn, post_burn,
num_walkers, convergence_criteria, convergence_type, gibbs,
fracking, frack_step)
else:
self._sampler = Ensembler(
self, model, iterations, burn, post_burn, num_temps,
num_walkers, convergence_criteria, convergence_type, gibbs,
fracking, frack_step)
self._sampler.run(self._walker_data)
prt.message('constructing')
if write:
if self._speak:
speak(prt._strings['saving_output'], self._speak)
if self._event_path:
entry = Entry.init_from_file(
catalog=None,
name=self._event_name,
path=self._event_path,
merge=False,
pop_schema=False,
ignore_keys=[ENTRY.MODELS],
compare_to_existing=False)
new_photometry = []
for photo in entry.get(ENTRY.PHOTOMETRY, []):
if PHOTOMETRY.REALIZATION not in photo:
new_photometry.append(photo)
if len(new_photometry):
entry[ENTRY.PHOTOMETRY] = new_photometry
else:
entry = Entry(name=self._event_name)
uentry = Entry(name=self._event_name)
data_keys = set()
for task in model._call_stack:
if model._call_stack[task]['kind'] == 'data':
data_keys.update(
list(model._call_stack[task].get('keys', {}).keys()))
entryhash = entry.get_hash(keys=list(sorted(list(data_keys))))
# Accumulate all the sources and add them to each entry.
sources = []
for root in model._references:
for ref in model._references[root]:
sources.append(entry.add_source(**ref))
sources.append(entry.add_source(**self._DEFAULT_SOURCE))
source = ','.join(sources)
usources = []
for root in model._references:
for ref in model._references[root]:
usources.append(uentry.add_source(**ref))
usources.append(uentry.add_source(**self._DEFAULT_SOURCE))
usource = ','.join(usources)
model_setup = OrderedDict()
for ti, task in enumerate(model._call_stack):
task_copy = deepcopy(model._call_stack[task])
if (task_copy['kind'] == 'parameter' and
task in model._parameter_json):
task_copy.update(model._parameter_json[task])
model_setup[task] = task_copy
modeldict = OrderedDict(
[(MODEL.NAME, model._model_name), (MODEL.SETUP, model_setup),
(MODEL.CODE, 'MOSFiT'), (MODEL.DATE, time.strftime("%Y/%m/%d")),
(MODEL.VERSION, __version__), (MODEL.SOURCE, source)])
self._sampler.prepare_output(check_upload_quality, upload)
self._sampler.append_output(modeldict)
umodeldict = deepcopy(modeldict)
umodeldict[MODEL.SOURCE] = usource
modelhash = get_model_hash(
umodeldict, ignore_keys=[MODEL.DATE, MODEL.SOURCE])
umodelnum = uentry.add_model(**umodeldict)
if self._sampler._upload_model is not None:
upload_model = self._sampler._upload_model
modelnum = entry.add_model(**modeldict)
samples, probs, weights = self._sampler.get_samples()
extras = OrderedDict()
samples_to_plot = self._sampler._nwalkers
icdf = np.cumsum(np.concatenate(([0.0], weights)))
draws = np.random.rand(samples_to_plot)
indices = np.searchsorted(icdf, draws) - 1
ri = 0
for xi, x in enumerate(samples):
ri = ri + 1
prt.message('outputting_walker', [
ri, len(samples)], inline=True, min_time=0.2)
if xi in indices:
output = model.run_stack(x, root='output')
if extra_outputs:
for key in extra_outputs:
new_val = output.get(key, [])
new_val = all_to_list(new_val)
extras.setdefault(key, []).append(new_val)
for i in range(len(output['times'])):
if not np.isfinite(output['model_observations'][i]):
continue
photodict = {
PHOTOMETRY.TIME:
output['times'][i] + output['min_times'],
PHOTOMETRY.MODEL: modelnum,
PHOTOMETRY.SOURCE: source,
PHOTOMETRY.REALIZATION: str(ri)
}
if output['observation_types'][i] == 'magnitude':
photodict[PHOTOMETRY.BAND] = output['bands'][i]
photodict[PHOTOMETRY.MAGNITUDE] = output[
'model_observations'][i]
photodict[PHOTOMETRY.E_MAGNITUDE] = output[
'model_variances'][i]
elif output['observation_types'][i] == 'magcount':
if output['model_observations'][i] == 0.0:
continue
photodict[PHOTOMETRY.BAND] = output['bands'][i]
photodict[PHOTOMETRY.COUNT_RATE] = output[
'model_observations'][i]
photodict[PHOTOMETRY.E_COUNT_RATE] = output[
'model_variances'][i]
photodict[PHOTOMETRY.MAGNITUDE] = -2.5 * np.log10(
output['model_observations'][i]) + output[
'all_zeropoints'][i]
photodict[PHOTOMETRY.E_UPPER_MAGNITUDE] = 2.5 * (
np.log10(output['model_observations'][i] +
output['model_variances'][i]) -
np.log10(output['model_observations'][i]))
if (output['model_variances'][i] > output[
'model_observations'][i]):
photodict[PHOTOMETRY.UPPER_LIMIT] = True
else:
photodict[PHOTOMETRY.E_LOWER_MAGNITUDE] = 2.5 * (
np.log10(output['model_observations'][i]) -
np.log10(output['model_observations'][i] -
output['model_variances'][i]))
elif output['observation_types'][i] == 'fluxdensity':
photodict[PHOTOMETRY.FREQUENCY] = output[
'frequencies'][i] * frequency_unit('GHz')
photodict[PHOTOMETRY.FLUX_DENSITY] = output[
'model_observations'][
i] * flux_density_unit('µJy')
photodict[
PHOTOMETRY.
E_LOWER_FLUX_DENSITY] = (
photodict[PHOTOMETRY.FLUX_DENSITY] - (
10.0 ** (
np.log10(photodict[
PHOTOMETRY.FLUX_DENSITY]) -
output['model_variances'][
i] / 2.5)) *
flux_density_unit('µJy'))
photodict[
PHOTOMETRY.
E_UPPER_FLUX_DENSITY] = (10.0 ** (
np.log10(photodict[
PHOTOMETRY.FLUX_DENSITY]) +
output['model_variances'][i] / 2.5) *
flux_density_unit('µJy') -
photodict[PHOTOMETRY.FLUX_DENSITY])
photodict[PHOTOMETRY.U_FREQUENCY] = 'GHz'
photodict[PHOTOMETRY.U_FLUX_DENSITY] = 'µJy'
elif output['observation_types'][i] == 'countrate':
photodict[PHOTOMETRY.COUNT_RATE] = output[
'model_observations'][i]
photodict[
PHOTOMETRY.
E_LOWER_COUNT_RATE] = (
photodict[PHOTOMETRY.COUNT_RATE] - (
10.0 ** (
np.log10(photodict[
PHOTOMETRY.COUNT_RATE]) -
output['model_variances'][
i] / 2.5)))
photodict[
PHOTOMETRY.
E_UPPER_COUNT_RATE] = (10.0 ** (
np.log10(photodict[
PHOTOMETRY.COUNT_RATE]) +
output['model_variances'][i] / 2.5) -
photodict[PHOTOMETRY.COUNT_RATE])
photodict[PHOTOMETRY.U_COUNT_RATE] = 's^-1'
if ('model_upper_limits' in output and
output['model_upper_limits'][i]):
photodict[PHOTOMETRY.UPPER_LIMIT] = bool(output[
'model_upper_limits'][i])
if self._limiting_magnitude is not None:
photodict[PHOTOMETRY.SIMULATED] = True
if 'telescopes' in output and output['telescopes'][i]:
photodict[PHOTOMETRY.TELESCOPE] = output[
'telescopes'][i]
if 'systems' in output and output['systems'][i]:
photodict[PHOTOMETRY.SYSTEM] = output['systems'][i]
if 'bandsets' in output and output['bandsets'][i]:
photodict[PHOTOMETRY.BAND_SET] = output[
'bandsets'][i]
if 'instruments' in output and output[
'instruments'][i]:
photodict[PHOTOMETRY.INSTRUMENT] = output[
'instruments'][i]
if 'modes' in output and output['modes'][i]:
photodict[PHOTOMETRY.MODE] = output[
'modes'][i]
entry.add_photometry(
compare_to_existing=False, check_for_dupes=False,
**photodict)
uphotodict = deepcopy(photodict)
uphotodict[PHOTOMETRY.SOURCE] = umodelnum
uentry.add_photometry(
compare_to_existing=False,
check_for_dupes=False,
**uphotodict)
else:
output = model.run_stack(x, root='objective')
parameters = OrderedDict()
derived_keys = set()
pi = 0
for ti, task in enumerate(model._call_stack):
# if task not in model._free_parameters:
# continue
if model._call_stack[task]['kind'] != 'parameter':
continue
paramdict = OrderedDict((
('latex', model._modules[task].latex()),
('log', model._modules[task].is_log())
))
if task in model._free_parameters:
poutput = model._modules[task].process(
**{'fraction': x[pi]})
value = list(poutput.values())[0]
paramdict['value'] = value
paramdict['fraction'] = x[pi]
pi = pi + 1
else:
if output.get(task, None) is not None:
paramdict['value'] = output[task]
parameters.update({model._modules[task].name(): paramdict})
# Dump out any derived parameter keys
derived_keys.update(model._modules[task].get_derived_keys(
))
for key in list(sorted(list(derived_keys))):
if (output.get(key, None) is not None and
key not in parameters):
parameters.update({key: {'value': output[key]}})
realdict = {REALIZATION.PARAMETERS: parameters}
if probs is not None:
realdict[REALIZATION.SCORE] = str(
probs[xi])
realdict[REALIZATION.ALIAS] = str(ri)
realdict[REALIZATION.WEIGHT] = str(weights[xi])
entry[ENTRY.MODELS][0].add_realization(
check_for_dupes=False, **realdict)
urealdict = deepcopy(realdict)
uentry[ENTRY.MODELS][0].add_realization(
check_for_dupes=False, **urealdict)
prt.message('all_walkers_written', inline=True)
entry.sanitize()
oentry = {self._event_name: entry._ordered(entry)}
uentry.sanitize()
ouentry = {self._event_name: uentry._ordered(uentry)}
uname = '_'.join(
[self._event_name, entryhash, modelhash])
if output_path and not os.path.exists(output_path):
os.makedirs(output_path)
if not os.path.exists(model.get_products_path()):
os.makedirs(model.get_products_path())
if write:
prt.message('writing_complete')
with open_atomic(
os.path.join(model.get_products_path(), 'walkers.json'),
'w') as flast, open_atomic(os.path.join(
model.get_products_path(),
self._event_name + (
('_' + suffix) if suffix else '') +
'.json'), 'w') as feven:
entabbed_json_dump(oentry, flast, separators=(',', ':'))
entabbed_json_dump(oentry, feven, separators=(',', ':'))
if save_full_chain:
prt.message('writing_full_chain')
with open_atomic(
os.path.join(model.get_products_path(),
'chain.json'), 'w') as flast, open_atomic(
os.path.join(model.get_products_path(),
self._event_name + '_chain' + (
('_' + suffix) if suffix else '') +
'.json'), 'w') as feven:
entabbed_json_dump(self._sampler._all_chain.tolist(),
flast, separators=(',', ':'))
entabbed_json_dump(self._sampler._all_chain.tolist(),
feven, separators=(',', ':'))
if extra_outputs:
prt.message('writing_extras')
with open_atomic(os.path.join(
model.get_products_path(), 'extras.json'),
'w') as flast, open_atomic(os.path.join(
model.get_products_path(), self._event_name +
'_extras' + (('_' + suffix) if suffix else '') +
'.json'), 'w') as feven:
entabbed_json_dump(extras, flast, separators=(',', ':'))
entabbed_json_dump(extras, feven, separators=(',', ':'))
prt.message('writing_model')
with open_atomic(os.path.join(
model.get_products_path(), 'upload.json'),
'w') as flast, open_atomic(os.path.join(
model.get_products_path(),
uname + (('_' + suffix) if suffix else '') +
'.json'), 'w') as feven:
entabbed_json_dump(ouentry, flast, separators=(',', ':'))
entabbed_json_dump(ouentry, feven, separators=(',', ':'))
if upload_model:
prt.message('ul_fit', [entryhash, self._sampler._modelhash])
upayload = entabbed_json_dumps(ouentry, separators=(',', ':'))
try:
dbx = dropbox.Dropbox(upload_token)
dbx.files_upload(
upayload.encode(),
'/' + uname + '.json',
mode=dropbox.files.WriteMode.overwrite)
prt.message('ul_complete')
except Exception:
if self._test:
pass
else:
raise
if upload:
for ce in self._converter.get_converted():
dentry = Entry.init_from_file(
catalog=None,
name=ce[0],
path=ce[1],
merge=False,
pop_schema=False,
ignore_keys=[ENTRY.MODELS],
compare_to_existing=False)
dentry.sanitize()
odentry = {ce[0]: uentry._ordered(dentry)}
dpayload = entabbed_json_dumps(odentry, separators=(',', ':'))
text = prt.message('ul_devent', [ce[0]], prt=False)
ul_devent = prt.prompt(text, kind='bool', message=False)
if ul_devent:
dpath = '/' + slugify(
ce[0] + '_' + dentry[ENTRY.SOURCES][0].get(
SOURCE.BIBCODE, dentry[ENTRY.SOURCES][0].get(
SOURCE.NAME, 'NOSOURCE'))) + '.json'
try:
dbx = dropbox.Dropbox(upload_token)
dbx.files_upload(
dpayload.encode(),
dpath,
mode=dropbox.files.WriteMode.overwrite)
prt.message('ul_complete')
except Exception:
if self._test:
pass
else:
raise
return (entry, samples, probs)
def nester(self):
"""Use nested sampling to determine posteriors."""
pass
def generate_dummy_data(self,
name,
max_time=1000.,
time_list=[],
band_list=[],
band_systems=[],
band_instruments=[],
band_bandsets=[]):
"""Generate simulated data based on priors."""
# Just need 2 plot points for beginning and end.
plot_points = 2
times = list(sorted(set(
list(np.linspace(0.0, max_time, plot_points)) + time_list)))
band_list_all = ['V'] if len(band_list) == 0 else band_list
times = np.repeat(times, len(band_list_all))
# Create lists of systems/instruments if not provided.
if isinstance(band_systems, string_types):
band_systems = [band_systems for x in range(len(band_list_all))]
if isinstance(band_instruments, string_types):
band_instruments = [
band_instruments for x in range(len(band_list_all))
]
if isinstance(band_bandsets, string_types):
band_bandsets = [band_bandsets for x in range(len(band_list_all))]
if len(band_systems) < len(band_list_all):
rep_val = '' if len(band_systems) == 0 else band_systems[-1]
band_systems = band_systems + [
rep_val for x in range(len(band_list_all) - len(band_systems))
]
if len(band_instruments) < len(band_list_all):
rep_val = '' if len(band_instruments) == 0 else band_instruments[
-1]
band_instruments = band_instruments + [
rep_val
for x in range(len(band_list_all) - len(band_instruments))
]
if len(band_bandsets) < len(band_list_all):
rep_val = '' if len(band_bandsets) == 0 else band_bandsets[-1]
band_bandsets = band_bandsets + [
rep_val
for x in range(len(band_list_all) - len(band_bandsets))
]
bands = [i for s in [band_list_all for x in times] for i in s]
systs = [i for s in [band_systems for x in times] for i in s]
insts = [i for s in [band_instruments for x in times] for i in s]
bsets = [i for s in [band_bandsets for x in times] for i in s]
data = {name: {'photometry': []}}
for ti, tim in enumerate(times):
band = bands[ti]
if isinstance(band, dict):
band = band['name']
photodict = {
'time': tim,
'band': band,
'magnitude': 0.0,
'e_magnitude': 0.0
}
if systs[ti]:
photodict['system'] = systs[ti]
if insts[ti]:
photodict['instrument'] = insts[ti]
if bsets[ti]:
photodict['bandset'] = bsets[ti]
data[name]['photometry'].append(photodict)
return data
|
#!/bin/sh
find pb -name '*.proto' | protoc -I. \
--plugin=protoc-gen-grpc_python=$(which grpc_python_plugin) \
--include_imports --include_source_info \
\
--descriptor_set_out proxy/protos.pb \
\
--python_out=backend/src \
--grpc_python_out=backend/src \
\
--python_out=media/src \
--grpc_python_out=media/src \
\
--js_out="import_style=commonjs,binary:frontend/src" \
--grpc-web_out="import_style=commonjs+dts,mode=grpcweb:frontend/src" \
\
--js_out="import_style=commonjs,binary:vue/src" \
--grpc-web_out="import_style=commonjs+dts,mode=grpcweb:vue/src" \
\
$(xargs) \
&& echo "OK"
|
<gh_stars>0
class Queue {
constructor() {
this.items = [];
}
enqueue(element) {
return this.items.push(element);
}
dequeue() {
if(this.items.length > 0) {
return this.items.shift();
}
}
peek() {
return this.items[this.items.length - 1];
}
isEmpty(){
return this.items.length === 0;
}
size(){
return this.items.length;
}
clear(){
this.items = [];
}
}
let queue = new Queue();
queue.enqueue(1);
queue.enqueue(2);
queue.enqueue(3);
queue.enqueue(5);
console.log(queue.items);
|
// Algorithm to swap two variables without using a temporary variable
function swap(a, b){
a = a + b; // a = 5, b = 3
b = a - b; // a = 5, b = 2
a = a - b; // a = 3, b = 2
return [a, b];
} |
#!/usr/bin/env bash
#declare -a write_ratios=(10 20 50 100 200 500 1000)
declare -a write_ratios=(0)
#declare -a bqr_read_buf_size=(0)
declare -a bqr_read_buf_size=(512)
declare -a remote_bqr=(0)
EXECUTABLE="zookeeper"
EXEC_FOLDER="${HOME}/odyssey/build"
REMOTE_COMMAND="cd ${EXEC_FOLDER}; bash ./run-exe.sh"
# get Hosts
source ./cluster.sh
for LR in "${remote_bqr[@]}"; do
for WR in "${write_ratios[@]}"; do
for BQR_LEN in "${bqr_read_buf_size[@]}"; do
args="-x ${EXECUTABLE} -w ${WR} -B ${BQR_LEN} -R ${LR}"
./copy-run.sh ${args} &
sleep 3 # give some leeway so that manager starts before executing the members
parallel "ssh -tt {} $'${REMOTE_COMMAND} ${args}'" ::: $(echo ${REMOTE_HOSTS[@]}) >/dev/null
sleep 3 # give some leeway before getting into the next round
done
done
done
./get-system-xput-files.sh
|
import { defaultFieldResolver } from 'graphql';
import { Log } from '../../services/logger';
import { SchemaDirectiveVisitor } from 'apollo-server-express';
class AuthDirective extends SchemaDirectiveVisitor {
visitObject(type): void {
this.ensureFieldsWrapped(type);
type._requiredAuthRole = this.args.requires;
}
// Visitor methods for nested types like fields and arguments
// also receive a details object that provides information about
// the parent and grandparent types.
visitFieldDefinition(field, details) {
this.ensureFieldsWrapped(details.objectType);
field._requiredAuthRole = this.args.requires;
}
ensureFieldsWrapped(objectType) {
// Mark the GraphQLObjectType object to avoid re-wrapping:
if (objectType._authFieldsWrapped) return;
objectType._authFieldsWrapped = true;
const fields = objectType.getFields();
Object.keys(fields).forEach((fieldName) => {
const field = fields[fieldName];
const { resolve = defaultFieldResolver } = field;
field.resolve = async (...args) => {
// Get the required Role from the field first, falling back
// to the objectType if no Role is required by the field:
const requiredRole = field._requiredAuthRole || objectType._requiredAuthRole;
if (!requiredRole) {
return resolve.apply(this, args);
}
const [, , context] = args;
let allow = true;
if (requiredRole === 'BACKEND') {
if (
!context.req.headers['bio-auth-token'] ||
context.req.headers['bio-auth-token'] !== process.env.BIO_EDIT_TOKEN
) {
Log.warn(
`Connection to Bio blocked from ${context.req.connection.remoteAddress} for role 'BACKEND'`,
);
allow = false;
}
}
if (!allow) {
throw new Error(`not authorized ${context.req.connection.remoteAddress}`);
}
return resolve.apply(this, args);
};
});
}
}
export default AuthDirective;
|
# Bash completion for dobo
#
# INSTALLATION
#
# First install dobo from
# https://github.com/KeisukeYamashita/dobo
#
# Then copy this file into a bash_completion.d folder:
#
# /etc/bash_completion.d
# /usr/local/etc/bash_completion.d
# ~/bash_completion.d
#
# or copy it somewhere (e.g. ~/.dobo-completion.bash) and put the
# following in your .bashrc:
#
# source ~/.dobo-completion.bash
#
# CREDITS
#
# Source codes derived from Simon Whitaker's <sw@netcetera.org> gitignore-boilerplates project
# https://github.com/simonwhitaker/gitignore-boilerplates
_dobo()
{
local cur prev opts
cur="${COMP_WORDS[COMP_CWORD]}"
case $COMP_CWORD in
1)
COMPREPLY=($(compgen -W "dump help list root search update version" -- ${cur}))
;;
*)
subcommand="${COMP_WORDS[1]}"
case $subcommand in
dump)
opts=$( find ${DOBO_BOILERPLATES:-"$HOME/.dockerignore-boilerplates"} -name "*.dockerignore" -exec basename \{\} .dockerignore \; )
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
;;
*)
COMPREPLY=()
;;
esac
;;
esac
}
complete -F _dobo dobo
|
<reponame>IamGanesh19/WebVella-ERP<gh_stars>0
export default class SelectOption {
constructor() {
this.value = null;
this.label = null;
}
}
|
def binary_to_int(binary):
result = 0
for i in range(len(binary)):
digit = binary[i]
if digit == '1':
result = result + pow(2, len(binary)-i-1)
return result
binary_to_int('100100') # returns 36 |
<filename>js/main.js
var controller = new ScrollMagic.Controller();
// Parallax background
new ScrollMagic.Scene({
triggerElement: "#parallax",
triggerHook: "onEnter",
})
.duration('200%')
.setTween("#parallax", {
backgroundPosition: "50% 100%",
ease: Linear.easeNone
})
//.addIndicators() // for debugging purposes
.addTo(controller);
//scene 2 set pin 1
new ScrollMagic.Scene({
triggerElement: "#slidein",
triggerHook: "onLeave",
})
.duration('200%')
.setPin("#slidein")
//.addIndicators() // add indicators (requires plugin)
.addTo(controller);
new ScrollMagic.Scene({
triggerElement: "#slidein",
triggerHook: "onLeave",
})
.setPin("#slidein")
//.addIndicators() // add indicators (requires plugin)
.addTo(controller);
//scene 3 set pin 2
new ScrollMagic.Scene({
triggerElement: "#slidein2",
triggerHook: "onLeave",
})
.duration('100%')
.setPin("#slidein2")
//.addIndicators() // add indicators (requires plugin)
.addTo(controller);
new ScrollMagic.Scene({
triggerElement: "#slidein2",
triggerHook: "onLeave",
})
.setPin("#slidein2")
//.addIndicators() // add indicators (requires plugin)
.addTo(controller);
//slide right
var fromTopTimeline = new TimelineMax();
var fromTopFrom = TweenMax.from(".top", 1, {
y: -800
});
var fromTopTo = TweenMax.to(".top", 1, {
y: 0
});
fromTopTimeline
.add(fromTopFrom)
.add(fromTopTo);
//scene slide in right
new ScrollMagic.Scene({
triggerElement: "#slideRight",
offset: 200,
})
.setTween(fromTopTimeline)
.duration(400)
// .reverse(false)
//.addIndicators() // add indicators (requires plugin)
.addTo(controller);
//Fly in from the bottom - animation
var fromBottomTimeline = new TimelineMax();
var fromBottomFrom = TweenMax.from(".bottom", 1, {
y: 800
});
var fromBottomTo = TweenMax.to(".bottom", 1, {
y: 0
});
fromBottomTimeline
.add(fromBottomFrom)
.add(fromBottomTo);
//scene
new ScrollMagic.Scene({
triggerElement: "#slideInBottom",
offset: 200,
})
.setTween(fromBottomTimeline)
.duration(400)
// .reverse(false)
//.addIndicators() // add indicators (requires plugin)
.addTo(controller);
//staggering tween
$(".stagger-tween").each(function() {
var stagger = TweenMax.staggerFrom($(this).find(".stagger-tween-item"), 1, {
y: 40,
autoAlpha: 0,
delay: 0,
ease: Power2.easeOut
},
0.3);
var scene2 = new ScrollMagic.Scene({
triggerElement: this,
offset: -100,
reverse:true
})
.setTween(stagger)
.addTo(controller)
// .addIndicators()
;
});
//about page
// SCENE 6 - pin the first section
// and update text
var pinScene01Tl = new TimelineMax();
pinScene01Tl
.to($('#slide01 h1'), 0.2, {autoAlpha: 0, ease:Power1.easeNone}, 1.5)
.to($('#slide01 section'), 0.2, {autoAlpha: 0, ease:Power1.easeNone}, 1.5)
.set($('#slide01 h1'), {text: "Jaxx Liberty"})
.set($('#slide01 p'), {text: "Our digital asset wallet, Jaxx, was created in 2014 by Ethereum co-founder <NAME>. The newest version is called Jaxx Liberty. Think of it as Jaxx 2.0. We’ve spent the last year redesigning and re-engineering Jaxx from the ground up and we are so excited to finally share it with you.."})
.fromTo($('#slide01 h1'), 0.7, {y: '+=20'}, {y: 0, autoAlpha: 1, ease:Power1.easeOut}, '+=0.4')
.fromTo($('#slide01 section'), 0.6, {y: '+=20'}, {y: 0, autoAlpha: 1, ease:Power1.easeOut}, '-=0.6')
.set($('#slide01 h1'), {autoAlpha: 1}, '+=2');
var pinScene01 = new ScrollMagic.Scene({
triggerElement: '#slide01',
triggerHook: 0,
duration: "250%"
})
.setPin("#slide01")
.setTween(pinScene01Tl)
.addTo(controller);
// SCENE 7 - pin the second section
// and update text
var pinScene02Tl = new TimelineMax();
pinScene02Tl
.to($('#slide03 h1'), 0.2, {autoAlpha: 0, ease:Power1.easeNone}, 1.5)
.to($('#slide03 section'), 0.2, {autoAlpha: 0, ease:Power1.easeNone}, 1.5)
.set($('#slide03 h1'), {text: "The Memories"})
.set($('#slide03 p'), {text: "You never climb the same mountain twice, not even in memory. Memory rebuilds the mountain, changes the weather, retells the jokes, remakes all the moves."})
.to($('#slide03 .bcg'), 0.6, {scale: 1.2, transformOrigin: '0% 0%', ease:Power0.easeNone})
.fromTo($('#slide03 h1'), 0.7, {y: '+=20'}, {y: 0, autoAlpha: 1, ease:Power1.easeOut}, '+=0.4')
.fromTo($('#slide03 section'), 0.6, {y: '+=20'}, {y: 0, autoAlpha: 1, ease:Power1.easeOut}, '-=0.6')
.set($('#slide03 h1'), {autoAlpha: 1}, '+=2.5');
var pinScene02 = new ScrollMagic.Scene({
triggerElement: '#slide03',
triggerHook: 0,
duration: "300%"
})
.setPin("#slide03")
.setTween(pinScene02Tl)
.addTo(controller);
//features page animations
//left tween - slide2 tweens
var fromLeftTimeline = new TimelineMax();
var fromLeftFrom = TweenMax.from("#left", 1, {
x: -500,
opacity: 0
});
var fromLeftTo = TweenMax.to("#left", 1, {
x: 0,
opacity: 1
});
fromLeftTimeline
.add(fromLeftFrom)
.add(fromLeftTo);
//left tween scene
new ScrollMagic.Scene({
triggerElement: "#slide2",
offset: 200,
})
.setTween(fromLeftTimeline)
.duration(800)
// .reverse(false)
//.addIndicators() // add indicators (requires plugin)
.addTo(controller);
//top tween
var fromLeftTimeline = new TimelineMax();
var fromLeftFrom = TweenMax.from("#top", 1, {
y: -500
});
var fromLeftTo = TweenMax.to("#top", 1, {
y: 0
});
fromLeftTimeline
.add(fromLeftFrom)
.add(fromLeftTo);
//top tween scene
new ScrollMagic.Scene({
triggerElement: "#slide2",
offset: 200,
})
.setTween(fromLeftTimeline)
.duration(800)
// .reverse(false)
//.addIndicators() // add indicators (requires plugin)
.addTo(controller);
// ============frideay examples
// Single item fade in and slide up...
$(".single-tween-item").each(function() {
var tween = TweenMax.fromTo(this, .6, {
y: 40,
autoAlpha: 0,
delay: 0,
ease: Power2.easeOut
}, .1);
var scene1 = new ScrollMagic.Scene({
triggerElement: this,
offset: -100,
reverse:true
})
.setTween(tween)
.addTo(controller)
.addIndicators()
;
});
// LH Content fade and slide in from right, RH Content fade and slide up...
$(".content-tween").each(function() {
var contentTweenTL = new TimelineMax({
repeat:0,
});
var contentTween = contentTweenTL.from($(this).find(".content-tween-left"), .6, {
x: -40,
autoAlpha: 0,
delay: 0,
ease: Power2.easeOut
}, .1)
.from($(this).find(".content-tween-right"), .6, {
y: 40,
autoAlpha: 0,
delay: 0,
ease: Power2.easeOut
}, .1);
var scene3 = new ScrollMagic.Scene({
triggerElement: this,
offset: -100,
reverse:true
})
.setTween(contentTween)
.addTo(controller)
.addIndicators()
;
});
//individual tweens on page load
//slide 1
//var fadein_slide_1 = TweenLite.from( '#slide-1', 2, {autoAlpha:0, y: 100} );
TweenLite.from( '#slide-1', 2, {autoAlpha:0, y: 100} );
TweenLite.from( '#slide-1 h1', 1, {autoAlpha:0, x: 100} );
//canvas
var canvas = document.querySelector('canvas');
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
|
/*
* Copyright (c) 2013 Red Rainbow IT Solutions GmbH, Germany
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.jeeventstore.serialization.gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonDeserializer;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParseException;
import com.google.gson.JsonPrimitive;
import com.google.gson.JsonSerializationContext;
import com.google.gson.JsonSerializer;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* A custom Gson type converter for the {@link EventList}.
* @author <NAME>
*/
public class EventListTypeConverter
implements JsonSerializer<EventList>, JsonDeserializer<EventList> {
@Override
public JsonElement serialize(
EventList src,
Type srcType, JsonSerializationContext context) {
JsonObject combined = new JsonObject();
combined.add("version", new JsonPrimitive(1));
JsonArray events = new JsonArray();
for (Serializable s : src.events()) {
JsonObject obj = new JsonObject();
obj.add("type", new JsonPrimitive(s.getClass().getCanonicalName()));
obj.add("body", context.serialize(s));
events.add(obj);
}
combined.add("events", events);
return combined;
}
@Override
public EventList deserialize(JsonElement json, Type type, JsonDeserializationContext context)
throws JsonParseException {
JsonObject obj = json.getAsJsonObject();
Integer version = obj.getAsJsonPrimitive("version").getAsInt();
if (version != 1)
throw new JsonParseException("Unable to parse event of version " + version);
Iterator<JsonElement> eit = obj.getAsJsonArray("events").iterator();
List<Serializable> eventlist = new ArrayList<>();
while (eit.hasNext()) {
String clazz = null;
try {
JsonObject elem = eit.next().getAsJsonObject();
clazz = elem.getAsJsonPrimitive("type").getAsString();
Class<? extends Serializable> eventClass = (Class<? extends Serializable>) Class.forName(clazz);
Serializable s = context.deserialize(elem.get("body"), eventClass);
eventlist.add(s);
} catch (ClassNotFoundException e) {
throw new JsonParseException("Cannot deserialize events of class " + clazz, e);
}
}
return new EventList(eventlist);
}
} |
<reponame>INC-PSTORE/psafe
import { createSelector } from 'reselect';
// router state
const selectRouter = state => state.router;
const makeSelectLocation = () => createSelector(selectRouter, routerState => routerState.location);
// app state
const selectApp = state => state.app;
const makeSelectETHAccount = () => createSelector(selectApp, appState => appState.ethAccount);
const makeSelectTempIncAccount = () => createSelector(selectApp, appState => appState.tempIncAccount);
const makeSelectPrivateIncAccount = () => createSelector(selectApp, appState => appState.privateIncAccount);
const makeSelectETHPrivateKey = () => createSelector(selectApp, appState => appState.ethAccount.privateKey);
const makeSelectPrivIncAccPrivateKey = () => createSelector(selectApp, appState => appState.privateIncAccount.privateKey);
const makeSelectTempIncAccPrivateKey = () => createSelector(selectApp, appState => appState.tempIncAccount.privateKey);
const makeSelectIsLoadWalletDone = () => createSelector(selectApp, appState => appState.isLoadWalletDone);
const makeSelectIsOpenedInfoDialog = () => createSelector(selectApp, appState => appState.isOpenedInfoDialog);
const makeSelectIsAccountInfoOpened = () => createSelector(selectApp, appState => appState.isAccountInfoOpened);
const makeSelectRequestings = () => createSelector(selectApp, appState => appState.requestings);
const makeSelectDeployedTokens = () => createSelector(selectApp, appState => appState.deployedTokens);
const makeSelectGeneratedETHAccFromIncAcc = () => createSelector(selectApp, appState => appState.generatedETHAccFromIncAcc);
const makeSelectMetaMask = () => createSelector(selectApp, appState => appState.metaMask);
export {
makeSelectLocation,
makeSelectETHAccount,
makeSelectTempIncAccount,
makeSelectPrivateIncAccount,
makeSelectETHPrivateKey,
makeSelectPrivIncAccPrivateKey,
makeSelectTempIncAccPrivateKey,
makeSelectIsLoadWalletDone,
makeSelectIsOpenedInfoDialog,
makeSelectRequestings,
makeSelectIsAccountInfoOpened,
makeSelectDeployedTokens,
makeSelectGeneratedETHAccFromIncAcc,
makeSelectMetaMask,
};
|
package com.zero.debloper.kaddu;
import android.content.Intent;
import android.speech.RecognizerIntent;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Toast;
import java.util.ArrayList;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
startActivity(new Intent(this, ProfileRecyclerViewActivity.class));
return true;
} else if (id == R.id.action_voice_command_help) {
startActivity(new Intent(this, VoiceCommandHelpActivity.class));
return true;
} else if (id == R.id.action_about) {
startActivity(new Intent(this, AboutActivity.class));
return true;
}
return super.onOptionsItemSelected(item);
}
// Activity Results handler
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
// if it’s speech recognition results
// and process finished ok
if (requestCode == SystemData.VOICE_RECOGNITION_REQUEST_CODE && resultCode == RESULT_OK) {
// receiving a result in string array
// there can be some strings because sometimes speech recognizing inaccurate
// more relevant results in the beginning of the list
ArrayList matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
// in “matches” array we holding a results... let’s show the most relevant
if (matches.size() > 0) {
//Toast.makeText(this, (CharSequence) matches.get(0), Toast.LENGTH_LONG).show();
DynamicProfileManager.getInstance(getApplicationContext()).parseVoiceCommand((String) matches.get(0));
}
}
super.onActivityResult(requestCode, resultCode, data);
}
public void onMicButtonClick(View view) {
SpeechRecognitionHelper.run(this);
}
}
|
package seedu.address.logic.parser.doctor;
import static java.util.Objects.requireNonNull;
import static seedu.address.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT;
import static seedu.address.logic.parser.CliSyntax.PREFIX_GENDER;
import static seedu.address.logic.parser.CliSyntax.PREFIX_NAME;
import static seedu.address.logic.parser.CliSyntax.PREFIX_PHONE;
import static seedu.address.logic.parser.CliSyntax.PREFIX_SPECIALISATION;
import static seedu.address.logic.parser.CliSyntax.PREFIX_YEAR;
import seedu.address.commons.core.index.Index;
import seedu.address.logic.commands.doctor.EditDoctorCommand;
import seedu.address.logic.commands.doctor.EditDoctorCommand.EditDoctorDescriptor;
import seedu.address.logic.parser.ArgumentMultimap;
import seedu.address.logic.parser.ArgumentTokenizer;
import seedu.address.logic.parser.Parser;
import seedu.address.logic.parser.ParserUtil;
import seedu.address.logic.parser.exceptions.ParseException;
/**
* Parses input arguments and creates a new EditDoctorCommand object
*/
public class EditDoctorCommandParser implements Parser<EditDoctorCommand> {
/**
* Parses the given {@code String} of arguments in the context of the EditDoctorCommand
* and returns an EditDoctorCommand object for execution.
* @throws ParseException if the user input does not conform the expected format
*/
public EditDoctorCommand parse(String args) throws ParseException {
requireNonNull(args);
ArgumentMultimap argMultimap =
ArgumentTokenizer.tokenize(
args, PREFIX_NAME, PREFIX_GENDER, PREFIX_YEAR, PREFIX_PHONE, PREFIX_SPECIALISATION);
Index index;
try {
index = ParserUtil.parseIndex(argMultimap.getPreamble());
} catch (ParseException pe) {
throw new ParseException(String.format(
MESSAGE_INVALID_COMMAND_FORMAT, EditDoctorCommand.MESSAGE_USAGE), pe);
}
EditDoctorDescriptor editDoctorDescriptor = new EditDoctorDescriptor();
if (argMultimap.getValue(PREFIX_NAME).isPresent()) {
editDoctorDescriptor.setName(ParserUtil.parseName(argMultimap.getValue(PREFIX_NAME).get()));
}
if (argMultimap.getValue(PREFIX_GENDER).isPresent()) {
editDoctorDescriptor.setGender(ParserUtil.parseGender(argMultimap.getValue(PREFIX_GENDER).get()));
}
if (argMultimap.getValue(PREFIX_YEAR).isPresent()) {
editDoctorDescriptor.setYear(ParserUtil.parseYear(argMultimap.getValue(PREFIX_YEAR).get()));
}
if (argMultimap.getValue(PREFIX_PHONE).isPresent()) {
editDoctorDescriptor.setPhone(ParserUtil.parsePhone(argMultimap.getValue(PREFIX_PHONE).get()));
}
if (argMultimap.getValue(PREFIX_SPECIALISATION).isPresent()) {
editDoctorDescriptor.setSpecs(ParserUtil
.parseSpecialisations(argMultimap.getAllValues(PREFIX_SPECIALISATION)));
}
if (!editDoctorDescriptor.isAnyFieldEdited()) {
throw new ParseException(EditDoctorCommand.MESSAGE_NOT_EDITED);
}
return new EditDoctorCommand(index, editDoctorDescriptor);
}
}
|
clear
echo 'Declaring variable?'
sleep 2
echo '
myVariable= # Declaring, but not initializing it. Uninitialized variable has null value.
myVariable=`Hello World!` # Here myVariable is a variable name and `Hello World!` is its value.
Lets display the variable value simply echoing `echo $myVariable`
'
sleep 2
myVariable='Hello World!'
echo "
Variable value is: $myVariable
=======================================================================================================================================
"
sleep 5
echo 'Declare constant?'
sleep 2
echo '
MYCONSTANT=`Hello Universe!` # Uppercase for declaring constant lowercase for variables. Constants cant be change.
Lets display constants `echo $MYCONSTANT`
'
sleep 2
MYCONSTANT='Hello Universe!'
echo "
Constant value is: $MYCONSTANT
=======================================================================================================================================
"
sleep 5
echo '
Variable can also be declared using let keyword, e.g let x=5.
Lets display value of x, `echo $x`
'
sleep 2
let x=5
echo "
Value of x is: $x
"
sleep 5
echo 'Additional info.'
echo '
=======================================================================================================================================
Performing arithmetic operation on variables. E.g let "sum += 5". We can also
assign commands to a variable e.g a=`echo Hello!` assigns result of echo
command to a.
=======================================================================================================================================
'
|
package ca.nova.gestion.model;
import lombok.AllArgsConstructor;
import lombok.Data;
@Data
@AllArgsConstructor
public class OrderHistoryData {
private Integer idBlank;
private Blank blank;
private Integer receivedQuantity;
private Integer usedQuantity;
}
|
#! /bin/bash
# Description: This script compiles and copy the needed files to later package the application for Chrome
OpenColor="\033["
Red="1;31m"
Yellow="1;33m"
Green="1;32m"
CloseColor="\033[0m"
# Check function OK
checkOK() {
if [ $? != 0 ]; then
echo "${OpenColor}${Red}* ERROR. Exiting...${CloseColor}"
exit 1
fi
}
# Configs
BUILDDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
APPDIR="$BUILDDIR/copay-chrome-extension"
ZIPFILE="copay-chrome-extension.zip"
VERSION=`cut -d '"' -f2 $BUILDDIR/../../src/js/version.js|head -n 1`
# Move to the build directory
cd $BUILDDIR
# Create/Clean temp dir
echo "${OpenColor}${Green}* Checking temp dir...${CloseColor}"
if [ -d $APPDIR ]; then
rm -rf $APPDIR
fi
mkdir -p $APPDIR
# Re-compile copayBundle.js
echo "${OpenColor}${Green}* Generating copay bundle...${CloseColor}"
grunt
checkOK
# Copy all chrome-extension files
echo "${OpenColor}${Green}* Copying all chrome-extension files...${CloseColor}"
sed "s/APP_VERSION/$VERSION/g" manifest.json > $APPDIR/manifest.json
checkOK
INCLUDE=`cat ../include`
INITIAL=$BUILDDIR/initial.js
echo "INITIAL: $INITIAL"
cp -vf $INITIAL $APPDIR
cd $BUILDDIR/../../public
CMD="rsync -rLRv --exclude-from $BUILDDIR/../exclude $INCLUDE $APPDIR"
echo $CMD
$CMD
checkOK
# Zipping chrome-extension
echo "${OpenColor}${Green}* Zipping all chrome-extension files...${CloseColor}"
cd $BUILDDIR
rm $ZIPFILE
zip -qr $ZIPFILE "`basename $APPDIR`"
checkOK
echo "${OpenColor}${Yellow}\nThe Chrome Extension is ready at $BUILDDIR/copay-chrome-extension.zip${CloseColor}"
|
#!/bin/sh
sudo ln -s /usr/lib/x86_64-linux-gnu/libz.so /usr/lib/
sudo ln -s /lib/x86_64-linux-gnu/libz.so.1 /lib/
sudo ln -s /usr/lib/x86_64-linux-gnu/libfreetype.so /usr/lib/
sudo ln -s /usr/lib/x86_64-linux-gnu/libfreetype.so.6 /usr/lib/
sudo ln -s /usr/lib/x86_64-linux-gnu/libjpeg.so /usr/lib/
sudo ln -s /usr/lib/x86_64-linux-gnu/libjpeg.so.62 /usr/lib/
|
def is_anagram(str1, str2):
"""
Check if two strings are anagrams.
Parameters:
str1 (str): First string
str2 (str): Second string
Returns:
bool: True if the strings are anagrams, False other wise
"""
str1 = str1.lower().replace(" ", "")
str2 = str2.lower().replace(" ", "")
if sorted(str1) == sorted(str2):
return True
else:
return False
if __name__ == '__main__':
s1 = 'listen'
s2 = 'silent'
print(is_anagram(s1, s2)) # True |
var structCatch_1_1ShowDurations =
[
[ "OrNot", "structCatch_1_1ShowDurations.html#a82fa0174554187220c1eda175f122ee1", [
[ "DefaultForReporter", "structCatch_1_1ShowDurations.html#a82fa0174554187220c1eda175f122ee1aba1710583107b0736c1f5f0f8dfd23c8", null ],
[ "Always", "structCatch_1_1ShowDurations.html#a82fa0174554187220c1eda175f122ee1ab49682ccb55f2d6b4dfcdb027c09da9a", null ],
[ "Never", "structCatch_1_1ShowDurations.html#a82fa0174554187220c1eda175f122ee1af1a716bc46185f561382a12a0dede9f3", null ]
] ]
]; |
package model;
import static org.junit.Assert.assertEquals;
import java.util.Iterator;
import org.junit.Before;
import org.junit.Test;
import factory.GrafoStaticFactory;
import factory.GrafosFactory;
public class VerticeTest {
private Vertice a;
private Vertice b;
private Aresta AB, AC;
private Vertice c;
@Before
public void setup() {
GrafosFactory factory = GrafoStaticFactory.criaFactory();
a = factory.criaVertice("A", 0);
c = factory.criaVertice("C", 2);
b = factory.criaVertice("B", 1);
AC = factory.criaAresta();
a.addAresta(AC);
c.addAresta(AC);
AB = factory.criaAresta();
a.addAresta(AB);
b.addAresta(AB);
}
@Test
public void test_adjacente() {
Iterator<Vertice> iterator = a.getAdjacentes().iterator();
assertEquals(b, iterator.next());
assertEquals(c, iterator.next());
}
@Test
public void test_aresta_vertice_ligado_a() {
Vertice verticeLigado = AB.getVerticeLigadoA(b);
assertEquals(a, verticeLigado);
}
@Test(expected = IllegalStateException.class)
public void test_add_varios_vertices() {
GrafosFactory factory = GrafoStaticFactory.criaFactory();
Vertice verticeA = factory.criaVertice("A", 0);
Vertice verticeB = factory.criaVertice("B", 1);
Vertice verticeC = factory.criaVertice("C", 2);
Aresta aresta = factory.criaAresta();
aresta.addVertice(verticeA);
aresta.addVertice(verticeB);
aresta.addVertice(verticeC);
}
}
|
const axios = require('axios');
const sendPostRequest = (url, body, headers) => {
return axios({
method: 'post',
url,
data: body,
headers
})
.then(response => response.data);
};
module.exports = sendPostRequest; |
<gh_stars>0
package com.atguigu.mpdemo1010.mapper;
import com.atguigu.mpdemo1010.entity.User;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.springframework.stereotype.Repository;
@Repository
public interface UserMapper extends BaseMapper<User> {
}
|
fn package_files(file_names: Vec<&str>, exclude_patterns: Vec<&str>, directory: &str) -> String {
let filtered_files: Vec<&str> = file_names
.iter()
.filter(|&file| {
!exclude_patterns.iter().any(|pattern| {
let pattern = pattern.replace(".", "\\.").replace("*", ".*");
regex::Regex::new(&pattern).unwrap().is_match(file)
})
})
.cloned()
.collect();
format!("Packaging foo v0.0.1 ({})", directory)
}
fn main() {
let file_names = vec!["src/main.rs", "src/bar.txt"];
let exclude_patterns = vec!["*.txt"];
let directory = "/path/to/directory";
let result = package_files(file_names, exclude_patterns, directory);
println!("{}", result); // Output: "Packaging foo v0.0.1 (/path/to/directory)"
} |
grpcurl \
-H "Authorization: Basic ${OPERATOR_TOKEN}"\
-import-path . \
-proto wgtwo/voicemail/v0/voicemail.proto \
-d '{ "voicemail_id": "my-voicemail-id" }' \
api.wgtwo.com:443 \
wgtwo.voicemail.v0.VoicemailMediaService.GetVoicemail \
| jq -r .wav \
| base64 -d \
| tee voicemail.wav \
| aplay # Linux
|
#pragma once
/*
Header only arg parser, collects all args, has required/optional args, and prints help if args didn't match.
Usage: Insert code like this in the main function.
std::vector<const char*> source;
const char* output = nullptr;
bool strict_mode = false;
int32_t job_count = 8;
// also supports float and uint32_t values!
if (!ParseArgs(argc, argv, "MyExeName", {
{&source, Arg::Required, 0, nullptr, "List of source files to compile."},
{&output, Arg::Required, 'o', "output", "Output filename to compile to."},
{&strict_mode, Arg::Optional, 0, "strict", "Use strict compile mode."},
{&job_count, Arg::Optional, 'j', "jobs", "How many jobs to create."},
})) return 1;
*/
#include <initializer_list>
#include <vector>
#include <string>
struct Arg
{
union
{
uint32_t* u32;
int32_t* i32;
float* f32;
const char** string;
bool* flag;
std::vector<const char*>* list;
};
enum class Type
{
U32,
I32,
F32,
String,
Flag,
List,
};
enum RequiredType
{
Optional,
Required,
};
const char short_name;
const char* name;
const char* help;
Type type;
RequiredType required_type;
mutable bool was_parsed = false;
Arg(uint32_t *value, RequiredType required_type, char short_name, const char* name, const char* help)
: u32(value)
, type(Type::U32)
, required_type(required_type)
, short_name(short_name)
, name(name)
, help(help)
{
}
Arg(int32_t *value, RequiredType required_type, char short_name, const char* name, const char* help)
: i32(value)
, type(Type::I32)
, required_type(required_type)
, short_name(short_name)
, name(name)
, help(help)
{
}
Arg(float *value, RequiredType required_type, char short_name, const char* name, const char* help)
: f32(value)
, type(Type::F32)
, required_type(required_type)
, short_name(short_name)
, name(name)
, help(help)
{
}
Arg(const char **value, RequiredType required_type, char short_name, const char* name, const char* help)
: string(value)
, type(Type::String)
, required_type(required_type)
, short_name(short_name)
, name(name)
, help(help)
{
}
// this will just invert the flag if found, so pass what you don't want
Arg(bool *value, RequiredType required_type, char short_name, const char* name, const char* help)
: flag(value)
, type(Type::Flag)
, required_type(required_type)
, short_name(short_name)
, name(name)
, help(help)
{
}
Arg(std::vector<const char*> *value, RequiredType required_type, char short_name, const char* name, const char* help)
: list(value)
, type(Type::List)
, required_type(required_type)
, short_name(short_name)
, name(name)
, help(help)
{
}
bool parse(const char* s) const
{
was_parsed = true;
switch (type)
{
case Type::U32:
*u32 = (uint32_t)atoi(s);
break;
case Type::I32:
*i32 = (int32_t)atoi(s);
break;
case Type::F32:
*f32 = (float)atof(s);
break;
case Type::String:
*string = s;
break;
case Type::List:
list->push_back(s);
break;
case Type::Flag:
*flag = !*flag;
break;
}
return true;
}
bool is_valid() const
{
if (required_type == Required && !was_parsed)
{
if (name)
{
printf("'--%s' is required\n", name);
}
else if (short_name)
{
printf("'-%c' is required\n", short_name);
}
else
{
printf("[source] list is required.");
}
return false;
}
}
bool is_source() const
{
return name == nullptr && short_name == 0;
}
void print_default_value() const
{
switch (type)
{
case Type::U32:
printf(" Default: %u", *u32);
break;
case Type::I32:
printf(" Default: %d", *i32);
break;
case Type::F32:
printf(" Default: %f", *f32);
break;
case Type::String:
if (*string)
{
printf(" Default: %s", *string);
}
break;
case Type::List:
break;
case Type::Flag:
break;
}
}
};
// returns false if failed, and will print usage
inline bool ParseArgs(int argc, const char** argv, std::initializer_list<Arg> args)
{
auto print_usage = [&](const char* help)
{
bool has_source = false;
size_t max_long_name = 6; // help str
const char *source_arg = nullptr;
for (const Arg& a : args)
{
if (a.is_source())
{
has_source = true;
source_arg = a.type == Arg::Type::List ? "[source1 source2]" : "[source]";
max_long_name = std::max<size_t>(max_long_name, strlen(source_arg));
}
if (!a.name)
continue;
max_long_name = std::max<size_t>(max_long_name, strlen(a.name)+2);
}
if (help)
{
puts(help);
}
printf("Usage: %s [args]", argv[0]);
if (has_source)
printf(" %s...", source_arg);
putchar('\n');
max_long_name += 2; // add some spacing
printf(" -? --help");
for (size_t n = 5; n < max_long_name; ++n)
putchar(' ');
puts("Print this message");
for (const Arg& a : args)
{
if (a.short_name)
printf(" -%c ", a.short_name);
else
printf(" ");
size_t remaining = max_long_name;
if (a.name)
{
printf("--%s", a.name);
remaining = max_long_name - (strlen(a.name)+2);
}
else
{
if (a.is_source())
{
printf(source_arg);
remaining = max_long_name - strlen(source_arg);
}
}
// space help to start on the same column
for (size_t n = 0; n < remaining; ++n)
putchar(' ');
if (a.help)
{
printf(a.help);
}
a.print_default_value();
putchar('\n');
}
return false;
};
for (int n = 1; n < argc;)
{
const char* current = argv[n];
if (current[0] != '-')
{
bool valid = false;
for (const Arg& a : args)
{
if (a.is_source())
{
a.parse(current);
valid = true;
break;
}
}
if (!valid)
{
char tmp[512];
snprintf(tmp, sizeof(tmp), "Invalid argument '%s', ", current);
return print_usage(tmp);
}
else
{
++n;
continue;
}
}
const Arg* match = nullptr;
if (current[1] == '-')
{
if (strcmp(current+2, "help")==0)
return print_usage(nullptr);
// parsing long name
for (const Arg& a : args)
{
if (!a.name)
continue;
if (strcmp(current+2, a.name)==0)
{
match = &a;
break;
}
}
}
else
{
if (!current[1] || current[2])
{
char tmp[512];
snprintf(tmp, sizeof(tmp), "Invalid argument '%s', expected single letter after single dash", current);
return print_usage(tmp);
}
if (current[1] == '?')
return print_usage(nullptr);
// parsing short name
for (const Arg& a : args)
{
if (a.short_name == current[1])
{
match = &a;
break;
}
}
}
++n;
if (!match)
{
char tmp[512];
snprintf(tmp, sizeof(tmp), "Unknown argument %s", current);
return print_usage(tmp);
}
if (match->type == Arg::Type::Flag)
{
*match->flag = !*match->flag;
continue;
}
// expected more elements
if (n == argc)
{
char tmp[512];
snprintf(tmp, sizeof(tmp), "Expected value after %s", argv[n-1]);
return print_usage(tmp);
}
// now parse the value
if (!match->parse(argv[n]))
{
char tmp[512];
snprintf(tmp, sizeof(tmp), "Couldn't parse value for %s", argv[n-1]);
return print_usage(tmp);
}
++n;
}
return true;
} |
import skimage.measure
import numpy as np
import cv2
import io
import matplotlib.pyplot as plt
import keras
import tensorflow as tf
from . import config
from . import utils
segmentation_model = tf.keras.models.load_model(
config.SEG_MODEL_PATH,
custom_objects={'lovasz_hinge': keras.losses.binary_crossentropy}
)
def _get_segmentation(image):
""" Returning the raw segmentation mask for the image. Each pixel's value
stands for the probability of this pixel being food.
Args:
image: The image to predict, represented as a numpy array with shape
`(*configure.UNIFIED_IMAGE_SIZE, 3)`.
Returns:
The segmentation mask with shape `config.UNIFIED_IMAGE_SIZE`.
"""
# TODO(<EMAIL>): Try to figure out why transposing the image
# will impact the model's performance.
global segmentation_model
def center_normalize(image):
mean = np.mean(cv2.resize(image, (512, 512)), axis=(0, 1))
std = np.std(cv2.resize(image, (512, 512)), axis=(0, 1))
return (image - mean) / std
image = np.swapaxes(image, 0, 1)
predicted_result = segmentation_model.predict(
np.reshape(center_normalize(image), (1, *config.UNIFIED_IMAGE_SIZE, 3))
)[0]
return np.swapaxes(np.reshape(
predicted_result,
config.UNIFIED_IMAGE_SIZE
), 0, 1)
def _get_entity_labeling(image, mask):
""" Getting the entity labeling that cover the food entities in the image.
Args:
image: The colored image, represented as a numpy array with shape
`(width, height, 3)`.
mask: The food segmentation mask. A numpy array with the same resolution
with `image`, each pixel stands for the probability of the
corresponding pixel in `image` being food.
Returns:
`(label_mask, boxes)`
`label_mask` is a 2d numpy array that mark different entities in the image
with positive integers starting from 0, while 0 stand for background.
The pixels with a probability greater than `config.FOOD_PROB_THRESHOLD`
will be considered as food. The size of `label_mask` is reduced from `image`.
`boxes` is a list of entity boxes having the same order with `label_mask`.
Each entity box is represented as a 2x2 2d list, which stands for
`[[min width, max width], [min height, max height]]`. Background is not
included in `boxes`. The coordinate is relative to `label_mask`.
"""
# TODO(<EMAIL>): Consider using the colored image along with
# the mask to generate entity boxes, which separate enties within one connected
# component.
bin_func = np.vectorize(lambda x: 0 if x < config.FOOD_PROB_THRESHOLD else 1)
binary_mask = bin_func(mask)
label_mask = skimage.measure.label(binary_mask, connectivity=2, background=0)
boxes = [[
*map(lambda x: (min(x), max(x) + 1), np.where(label_mask == entity))
] for entity in np.unique(label_mask)
]
invalid_entity_indices = [
index
for index, box in enumerate(boxes)
if min(box[0][1] - box[0][0], box[1][1] - box[1][0]) < config.FOOD_MIN_SIZE_THRESHOLD
]
label_mask[np.isin(label_mask, invalid_entity_indices)] = 0
boxes = [box for index, box in enumerate(boxes) if index not in invalid_entity_indices]
return label_mask, boxes[1:]
def _index_crop(array, i, multiplier):
""" Cropping an image with a specified width and height index range as well
as a multiplier. The cropped image will be a square image that cover the
region specified by the index while having the minimum area.
Args:
array: A 3d numpy array with shape (width, height, channel).
i: The crop index, represented as a 2x2 2d list, such as which stands for
`[[min width, max width], [min height, max height]]`.
multiplier: The multiplier of the index. Considering the index is calculated
on an image which might have different resolution with the original
image, this parameter is used to compensate the gap. The value should
be resolution of `array` / resolution of the image where `i` is calculated.
Returns:
The cropped image, represented as a numpy array.
"""
width, height = abs(i[0][0] - i[0][1]), abs(i[1][0] - i[1][1])
array_width, array_height, _ = array.shape
def get_offset(i_range, margin, width):
if i_range[0] - margin >= 0 and i_range[1] + margin < array_width:
return 0
elif i_range[0] - margin < 0:
return margin - i_range[0]
else:
return width - i_range[1] - margin
margin = abs(height - width) / 2.0
if width < height:
offset = get_offset(i[0], margin, array_width)
return array[
int((i[0][0]-margin+offset)*multiplier) : int((i[0][1]+margin+offset)*multiplier),
int(i[1][0]*multiplier) : int(i[1][1]*multiplier),
:
]
elif width > height:
offset = get_offset(i[1], margin, array_height)
return array[
int(i[0][0]*multiplier) : int(i[0][1]*multiplier),
int((i[1][0]-margin+offset)*multiplier) : int((i[1][1]+margin+offset)*multiplier),
:
]
else:
return array[
int(i[0][0]*multiplier) : int(i[0][1]*multiplier),
int(i[1][0]*multiplier) : int(i[1][1]*multiplier),
:
]
def get_recognition_results(image, calibration):
""" Get the recognition result of the color image with corresponding mask.
Get a list of image buffers with the cropped food image in `image`. Images
in the list are all resized to `config.CLASSIFIER_IMAGE_SIZE`.
Args:
image: The raw resolution colored square image, represented as a numpy array.
calibration: The camera calibration data when capturing the image.
Returns:
A tuple `(label_mask, boxes, buffers)`.
`label_mask` is a 2d numpy array that mark different entities in the image
with ascending integers starting from 0, while 0 stand for background.
The pixels with a probability greater than `config.FOOD_PROB_THRESHOLD`
will be considered as food.
`remapped_boxes` is a list of entity boxes. Each entity box is represented
as a list, which stands for `[width min, width max, height min, height max]`.
Background is not included in `boxes`. Values in the list are relative,
that is the value divided by the length of the corresponding edge.
`buffers` is a list of image buffers, each image is the cropped food
image in `image`, and are all resized to `config.CLASSIFIER_IMAGE_SIZE`.
"""
regulated_image = utils.regulate_image(image, calibration)
mask = _get_segmentation(regulated_image)
label_mask, boxes = _get_entity_labeling(regulated_image, mask)
multiplier = image.shape[0] / config.UNIFIED_IMAGE_SIZE[0]
images = [
cv2.resize(
_index_crop(
utils.center_crop(np.swapaxes(image, 0, 1)), [
[max(0, box[0][0] - config.CLASSIFIER_IMAGE_OFFSET), min(config.UNIFIED_IMAGE_SIZE[0] - 1, box[0][1] + config.CLASSIFIER_IMAGE_OFFSET)],
[max(0, box[1][0] - config.CLASSIFIER_IMAGE_OFFSET), min(config.UNIFIED_IMAGE_SIZE[0] - 1, box[1][1] + config.CLASSIFIER_IMAGE_OFFSET)]
], multiplier
),
config.CLASSIFIER_IMAGE_SIZE
) for box in boxes
]
# TODO(<EMAIL>): Map the boxes back to match the undistorted coordinate.
remapped_boxes = [[float(item / label_mask.shape[0]) for tp in box for item in tp] for box in boxes]
buffers = [io.BytesIO() for _ in range(len(images))]
[plt.imsave(buffer, image, format='jpeg') for buffer, image in zip(buffers, images)]
return label_mask, remapped_boxes, buffers
|
def eval(root):
if root is None:
return 0
if root.data == '+':
return eval(root.left)+eval(root.right)
elif root.data == '-':
return eval(root.left)-eval(root.right)
elif root.data == '*':
return eval(root.left)*eval(root.right)
elif root.data == '/':
return eval(root.left)//eval(root.right)
else:
return int(root.data)
# Driver Code
# expression tree
root = Node('+')
root.left = Node('*')
root.left.left = Node('2')
root.left.right = Node('3')
root.right = Node('/')
root.right.left = Node('4')
root.right.right = Node('5')
# output
print(eval(root))
# Output: 17 |
#!/bin/bash
WARNING=1
CRITICAL=1
MD_IS_OPTIONAL=0
usage() {
echo -e "Usage: $0 [-w <warning>] [-c <critical>] [-o]\n -w <warning> \tWarning threshold for mismatch_cnt (OPTIONAL, default: 1)\n -c <critical> \tCritical threshold for mismatch_cnt (OPTIONAL, default: 1 (always critical, no warning))\n -o \t\tIndicate that it is OK for there to be no software raid (without this flag UNKNOWN is returned if no software raid is found)\n -h\t\tDisplays this help message\n" 1>&2;
exit 3;
}
# check for command line arguments
while getopts ":w:c:h:o" option; do
case "${option}" in
w) WARNING=${OPTARG};;
c) CRITICAL=${OPTARG};;
o) MD_IS_OPTIONAL=1;;
h) usage;;
*) usage;;
esac
done
# WARNING and CRITICAL must be decimal numbers
echo "$WARNING"|grep -qE "^[0-9]+$" || usage
echo "$CRITICAL"|grep -qE "^[0-9]+$" || usage
# Resets position of first non-option argument
shift "$((OPTIND-1))"
DATA=0
MD_FOUND=0
PERF_DATA=""
while read -r file
do
DATA2="$(cat "$file")"
DATA="$((DATA + DATA2))"
MD_NAME=$(echo "$file" | awk -F '/' '{print $4}')
PERF_DATA+=" $MD_NAME=$DATA2"
MD_FOUND=1
done < <( find /sys/block/md*/md/mismatch_cnt 2>/dev/null )
if [ $MD_FOUND -eq 0 ]; then
if [ $MD_IS_OPTIONAL -eq 0 ]; then
echo "UNKNOWN - software raid mismatch_cnts not found - no software raid active?"
exit 3;
else
echo "OK - software raid mismatch_cnts not found - no software raid active"
exit 0
fi
fi
if [ "$DATA" -ge "$CRITICAL" ]; then
echo "CRITICAL - software raid mismatch_cnts are greater or equal than $CRITICAL |$PERF_DATA"
exit 2;
elif [ "$DATA" -ge "$WARNING" ] ; then
echo "WARNING - software raid mismatch_cnts are greater or equal than $WARNING |$PERF_DATA"
exit 1;
else
echo "OK - all software raid mismatch_cnts are smaller than $WARNING |$PERF_DATA"
exit 0;
fi
|
<gh_stars>1-10
"use strict"
const { PDFDocument } = require("pdf-lib")
module.exports = async pdfs => {
const result = await PDFDocument.create()
for await (const pdf of pdfs) {
const newPdf = await PDFDocument.load(pdf)
for await (const page of await result.copyPages(newPdf, newPdf.getPageIndices())) {
result.addPage(page)
}
}
return result.save()
}
|
package com.jiataoyuan.serialportassistant;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.res.Configuration;
import android.os.Bundle;
import android.text.InputType;
import android.text.method.KeyListener;
import android.text.method.NumberKeyListener;
import android.text.method.TextKeyListener;
import android.text.method.TextKeyListener.Capitalize;
import android.util.Base64;
import android.view.KeyEvent;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.RadioButton;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.ToggleButton;
import com.jiataoyuan.bean.AssistBean;
import com.jiataoyuan.bean.ComBean;
import com.jiataoyuan.serialport.SerialPortFinder;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.security.InvalidParameterException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
/**
* serialport api和jni取自http://code.google.com/p/android-serialport-api/
* <p>
* 串口助手,支持双串口同时读写
* 程序载入时自动搜索串口设备
* n,8,1,没得选
*
* @author TaoYuan
*/
public class AssistantActivity extends Activity {
EditText editTextRecDisp, editTextLines, editTextCOMA, editTextCOMB;
EditText editTextTimeCOMA, editTextTimeCOMB;
CheckBox checkBoxAutoClear, checkBoxAutoCOMA, checkBoxAutoCOMB;
Button ButtonClear, ButtonSendCOMA, ButtonSendCOMB;
ToggleButton toggleButtonCOMA, toggleButtonCOMB;
Spinner SpinnerCOMA, SpinnerCOMB;
Spinner SpinnerBaudRateCOMA, SpinnerBaudRateCOMB;
RadioButton radioButtonTxt, radioButtonHex;
SerialControl ComA, ComB;//2个串口
DispQueueThread DispQueue;//刷新显示线程
SerialPortFinder mSerialPortFinder;//串口设备搜索
AssistBean AssistData;//用于界面数据序列化和反序列化
int iRecLines = 0;//接收区行数
/**
* Called when the activity is first created.
*/
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
ComA = new SerialControl();
ComB = new SerialControl();
DispQueue = new DispQueueThread();
DispQueue.start();
AssistData = getAssistData();
setControls();
}
@Override
public void onDestroy() {
saveAssistData(AssistData);
CloseComPort(ComA);
CloseComPort(ComB);
super.onDestroy();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
CloseComPort(ComA);
CloseComPort(ComB);
setContentView(R.layout.main);
setControls();
}
//----------------------------------------------------
private void setControls() {
String appName = getString(R.string.app_name);
try {
PackageInfo pinfo = getPackageManager().getPackageInfo("com.jiataoyuan.serialportassiatant", PackageManager.GET_CONFIGURATIONS);
String versionName = pinfo.versionName;
setTitle(appName + " V" + versionName);
} catch (NameNotFoundException e) {
e.printStackTrace();
}
editTextRecDisp = findViewById(R.id.editTextRecDisp);
editTextLines = findViewById(R.id.editTextLines);
editTextCOMA = findViewById(R.id.editTextCOMA);
editTextCOMB = findViewById(R.id.editTextCOMB);
editTextTimeCOMA = findViewById(R.id.editTextTimeCOMA);
editTextTimeCOMB = findViewById(R.id.editTextTimeCOMB);
checkBoxAutoClear = findViewById(R.id.checkBoxAutoClear);
checkBoxAutoCOMA = findViewById(R.id.checkBoxAutoCOMA);
checkBoxAutoCOMB = findViewById(R.id.checkBoxAutoCOMB);
ButtonClear = findViewById(R.id.ButtonClear);
ButtonSendCOMA = findViewById(R.id.ButtonSendCOMA);
ButtonSendCOMB = findViewById(R.id.ButtonSendCOMB);
toggleButtonCOMA = findViewById(R.id.toggleButtonCOMA);
toggleButtonCOMB = findViewById(R.id.ToggleButtonCOMB);
SpinnerCOMA = findViewById(R.id.SpinnerCOMA);
SpinnerCOMB = findViewById(R.id.SpinnerCOMB);
SpinnerBaudRateCOMA = findViewById(R.id.SpinnerBaudRateCOMA);
SpinnerBaudRateCOMB = findViewById(R.id.SpinnerBaudRateCOMB);
radioButtonTxt = findViewById(R.id.radioButtonTxt);
radioButtonHex = findViewById(R.id.radioButtonHex);
editTextCOMA.setOnEditorActionListener(new EditorActionEvent());
editTextCOMB.setOnEditorActionListener(new EditorActionEvent());
editTextTimeCOMA.setOnEditorActionListener(new EditorActionEvent());
editTextTimeCOMB.setOnEditorActionListener(new EditorActionEvent());
editTextCOMA.setOnFocusChangeListener(new FocusChangeEvent());
editTextCOMB.setOnFocusChangeListener(new FocusChangeEvent());
editTextTimeCOMA.setOnFocusChangeListener(new FocusChangeEvent());
editTextTimeCOMB.setOnFocusChangeListener(new FocusChangeEvent());
radioButtonTxt.setOnClickListener(new radioButtonClickEvent());
radioButtonHex.setOnClickListener(new radioButtonClickEvent());
ButtonClear.setOnClickListener(new ButtonClickEvent());
ButtonSendCOMA.setOnClickListener(new ButtonClickEvent());
ButtonSendCOMB.setOnClickListener(new ButtonClickEvent());
toggleButtonCOMA.setOnCheckedChangeListener(new ToggleButtonCheckedChangeEvent());
toggleButtonCOMB.setOnCheckedChangeListener(new ToggleButtonCheckedChangeEvent());
checkBoxAutoCOMA.setOnCheckedChangeListener(new CheckBoxChangeEvent());
checkBoxAutoCOMB.setOnCheckedChangeListener(new CheckBoxChangeEvent());
ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource(this,
R.array.baudrates_value, android.R.layout.simple_spinner_item);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
SpinnerBaudRateCOMA.setAdapter(adapter);
SpinnerBaudRateCOMB.setAdapter(adapter);
SpinnerBaudRateCOMA.setSelection(12);
SpinnerBaudRateCOMB.setSelection(12);
mSerialPortFinder = new SerialPortFinder();
String[] entryValues = mSerialPortFinder.getAllDevicesPath();
List<String> allDevices = new ArrayList<>();
Collections.addAll(allDevices, entryValues);
ArrayAdapter<String> aspnDevices = new ArrayAdapter<>(this,
android.R.layout.simple_spinner_item, allDevices);
aspnDevices.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
SpinnerCOMA.setAdapter(aspnDevices);
SpinnerCOMB.setAdapter(aspnDevices);
if (allDevices.size() > 0) {
SpinnerCOMA.setSelection(0);
}
if (allDevices.size() > 1) {
SpinnerCOMB.setSelection(1);
}
SpinnerCOMA.setOnItemSelectedListener(new ItemSelectedEvent());
SpinnerCOMB.setOnItemSelectedListener(new ItemSelectedEvent());
SpinnerBaudRateCOMA.setOnItemSelectedListener(new ItemSelectedEvent());
SpinnerBaudRateCOMB.setOnItemSelectedListener(new ItemSelectedEvent());
DispAssistData(AssistData);
}
//----------------------------------------------------串口号或波特率变化时,关闭打开的串口
class ItemSelectedEvent implements Spinner.OnItemSelectedListener {
public void onItemSelected(AdapterView<?> arg0, View arg1, int arg2, long arg3) {
if ((arg0 == SpinnerCOMA) || (arg0 == SpinnerBaudRateCOMA)) {
CloseComPort(ComA);
checkBoxAutoCOMA.setChecked(false);
toggleButtonCOMA.setChecked(false);
} else if ((arg0 == SpinnerCOMB) || (arg0 == SpinnerBaudRateCOMB)) {
CloseComPort(ComB);
checkBoxAutoCOMA.setChecked(false);
toggleButtonCOMB.setChecked(false);
}
}
public void onNothingSelected(AdapterView<?> arg0) {
}
}
//----------------------------------------------------编辑框焦点转移事件
class FocusChangeEvent implements EditText.OnFocusChangeListener {
public void onFocusChange(View v, boolean hasFocus) {
if (v == editTextCOMA) {
setSendData(editTextCOMA);
} else if (v == editTextCOMB) {
setSendData(editTextCOMB);
} else if (v == editTextTimeCOMA) {
setDelayTime(editTextTimeCOMA);
} else if (v == editTextTimeCOMB) {
setDelayTime(editTextTimeCOMB);
}
}
}
//----------------------------------------------------编辑框完成事件
class EditorActionEvent implements EditText.OnEditorActionListener {
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if (v == editTextCOMA) {
setSendData(editTextCOMA);
} else if (v == editTextCOMB) {
setSendData(editTextCOMB);
} else if (v == editTextTimeCOMA) {
setDelayTime(editTextTimeCOMA);
} else if (v == editTextTimeCOMB) {
setDelayTime(editTextTimeCOMB);
}
return false;
}
}
//----------------------------------------------------Txt、Hex模式选择
class radioButtonClickEvent implements RadioButton.OnClickListener {
public void onClick(View v) {
if (v == radioButtonTxt) {
KeyListener TxtkeyListener = new TextKeyListener(Capitalize.NONE, false);
editTextCOMA.setKeyListener(TxtkeyListener);
editTextCOMB.setKeyListener(TxtkeyListener);
AssistData.setTxtMode(true);
} else if (v == radioButtonHex) {
KeyListener HexkeyListener = new NumberKeyListener() {
public int getInputType() {
return InputType.TYPE_CLASS_TEXT;
}
@Override
protected char[] getAcceptedChars() {
return new char[]{'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F'};
}
};
editTextCOMA.setKeyListener(HexkeyListener);
editTextCOMB.setKeyListener(HexkeyListener);
AssistData.setTxtMode(false);
}
editTextCOMA.setText(AssistData.getSendA());
editTextCOMB.setText(AssistData.getSendB());
setSendData(editTextCOMA);
setSendData(editTextCOMB);
}
}
//----------------------------------------------------自动发送
class CheckBoxChangeEvent implements CheckBox.OnCheckedChangeListener {
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (buttonView == checkBoxAutoCOMA) {
if (!toggleButtonCOMA.isChecked() && isChecked) {
buttonView.setChecked(false);
return;
}
SetLoopData(ComA, editTextCOMA.getText().toString());
SetAutoSend(ComA, isChecked);
} else if (buttonView == checkBoxAutoCOMB) {
if (!toggleButtonCOMB.isChecked() && isChecked) {
buttonView.setChecked(false);
return;
}
SetLoopData(ComB, editTextCOMB.getText().toString());
SetAutoSend(ComB, isChecked);
}
}
}
//----------------------------------------------------清除按钮、发送按钮
class ButtonClickEvent implements View.OnClickListener {
public void onClick(View v) {
if (v == ButtonClear) {
editTextRecDisp.setText("");
editTextLines.setText("0");
iRecLines = 0;
} else if (v == ButtonSendCOMA) {
sendPortData(ComA, editTextCOMA.getText().toString());
} else if (v == ButtonSendCOMB) {
sendPortData(ComB, editTextCOMB.getText().toString());
}
}
}
//----------------------------------------------------打开关闭串口
class ToggleButtonCheckedChangeEvent implements ToggleButton.OnCheckedChangeListener {
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (buttonView == toggleButtonCOMA) {
if (isChecked) {
if (toggleButtonCOMB.isChecked() && SpinnerCOMA.getSelectedItemPosition() == SpinnerCOMB.getSelectedItemPosition()) {
ShowMessage("串口" + SpinnerCOMA.getSelectedItem().toString() + "已打开");
buttonView.setChecked(false);
} else {
ComA.setPort(SpinnerCOMA.getSelectedItem().toString());
ComA.setBaudRate(SpinnerBaudRateCOMA.getSelectedItem().toString());
OpenComPort(ComA);
}
} else {
CloseComPort(ComA);
checkBoxAutoCOMA.setChecked(false);
}
} else if (buttonView == toggleButtonCOMB) {
if (isChecked) {
if (toggleButtonCOMA.isChecked() && SpinnerCOMB.getSelectedItemPosition() == SpinnerCOMA.getSelectedItemPosition()) {
ShowMessage("串口" + SpinnerCOMB.getSelectedItem().toString() + "已打开");
buttonView.setChecked(false);
} else {
ComB.setPort(SpinnerCOMB.getSelectedItem().toString());
ComB.setBaudRate(SpinnerBaudRateCOMB.getSelectedItem().toString());
OpenComPort(ComB);
}
} else {
CloseComPort(ComB);
checkBoxAutoCOMB.setChecked(false);
}
}
}
}
//----------------------------------------------------串口控制类
private class SerialControl extends SerialHelper {
@Override
protected void onDataReceived(final ComBean ComRecData) {
//数据接收量大或接收时弹出软键盘,界面会卡顿,可能和6410的显示性能有关
//直接刷新显示,接收数据量大时,卡顿明显,但接收与显示同步。
//用线程定时刷新显示可以获得较流畅的显示效果,但是接收数据速度快于显示速度时,显示会滞后。
//最终效果差不多-_-,线程定时刷新稍好一些。
DispQueue.AddQueue(ComRecData);//线程定时刷新显示(推荐)
/*
runOnUiThread(new Runnable()//直接刷新显示
{
public void run()
{
DispRecData(ComRecData);
}
});*/
}
}
//----------------------------------------------------刷新显示线程
private class DispQueueThread extends Thread {
private Queue<ComBean> QueueList = new LinkedList<>();
@Override
public void run() {
super.run();
while (!isInterrupted()) {
final ComBean ComData;
while ((ComData = QueueList.poll()) != null) {
runOnUiThread(new Runnable() {
public void run() {
DispRecData(ComData);
}
});
try {
Thread.sleep(100);//显示性能高的话,可以把此数值调小。
} catch (Exception e) {
e.printStackTrace();
}
break;
}
}
}
synchronized void AddQueue(ComBean ComData) {
QueueList.add(ComData);
}
}
//----------------------------------------------------刷新界面数据
private void DispAssistData(AssistBean AssistData) {
editTextCOMA.setText(AssistData.getSendA());
editTextCOMB.setText(AssistData.getSendB());
setSendData(editTextCOMA);
setSendData(editTextCOMB);
if (AssistData.isTxt()) {
radioButtonTxt.setChecked(true);
} else {
radioButtonHex.setChecked(true);
}
editTextTimeCOMA.setText(AssistData.sTimeA);
editTextTimeCOMB.setText(AssistData.sTimeB);
setDelayTime(editTextTimeCOMA);
setDelayTime(editTextTimeCOMB);
}
//----------------------------------------------------保存、获取界面数据
private void saveAssistData(AssistBean AssistData) {
AssistData.sTimeA = editTextTimeCOMA.getText().toString();
AssistData.sTimeB = editTextTimeCOMB.getText().toString();
SharedPreferences msharedPreferences = getSharedPreferences("SerialPortAssistant", Context.MODE_PRIVATE);
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeObject(AssistData);
String sBase64 = new String(Base64.encode(baos.toByteArray(), 0));
SharedPreferences.Editor editor = msharedPreferences.edit();
editor.putString("AssistData", sBase64);
// editor.commit();
editor.apply();
} catch (IOException e) {
e.printStackTrace();
}
}
//----------------------------------------------------
private AssistBean getAssistData() {
SharedPreferences msharedPreferences = getSharedPreferences("SerialPortAssistant", Context.MODE_PRIVATE);
AssistBean AssistData = new AssistBean();
try {
String personBase64 = msharedPreferences.getString("AssistData", "");
byte[] base64Bytes = Base64.decode(personBase64 != null ? personBase64.getBytes() : new byte[0], 0);
ByteArrayInputStream bais = new ByteArrayInputStream(base64Bytes);
ObjectInputStream ois = new ObjectInputStream(bais);
AssistData = (AssistBean) ois.readObject();
return AssistData;
} catch (Exception e) {
e.printStackTrace();
}
return AssistData;
}
//----------------------------------------------------设置自动发送延时
private void setDelayTime(TextView v) {
if (v == editTextTimeCOMA) {
AssistData.sTimeA = v.getText().toString();
SetiDelayTime(ComA, v.getText().toString());
} else if (v == editTextTimeCOMB) {
AssistData.sTimeB = v.getText().toString();
SetiDelayTime(ComB, v.getText().toString());
}
}
//----------------------------------------------------设置自动发送数据
private void setSendData(TextView v) {
if (v == editTextCOMA) {
AssistData.setSendA(v.getText().toString());
SetLoopData(ComA, v.getText().toString());
} else if (v == editTextCOMB) {
AssistData.setSendB(v.getText().toString());
SetLoopData(ComB, v.getText().toString());
}
}
//----------------------------------------------------设置自动发送延时
private void SetiDelayTime(SerialHelper ComPort, String sTime) {
ComPort.setiDelay(Integer.parseInt(sTime));
}
//----------------------------------------------------设置自动发送数据
private void SetLoopData(SerialHelper ComPort, String sLoopData) {
if (radioButtonTxt.isChecked()) {
ComPort.setTxtLoopData(sLoopData);
} else if (radioButtonHex.isChecked()) {
ComPort.setHexLoopData(sLoopData);
}
}
//----------------------------------------------------显示接收数据
private void DispRecData(ComBean ComRecData) {
StringBuilder sMsg = new StringBuilder();
sMsg.append(ComRecData.sRecTime);
sMsg.append("[");
sMsg.append(ComRecData.sComPort);
sMsg.append("]");
if (radioButtonTxt.isChecked()) {
sMsg.append("[Txt] ");
sMsg.append(new String(ComRecData.bRec));
} else if (radioButtonHex.isChecked()) {
sMsg.append("[Hex] ");
sMsg.append(MyFunc.ByteArrToHex(ComRecData.bRec));
}
sMsg.append("\r\n");
editTextRecDisp.append(sMsg);
iRecLines++;
editTextLines.setText(String.valueOf(iRecLines));
if ((iRecLines > 500) && (checkBoxAutoClear.isChecked()))//达到500项自动清除
{
editTextRecDisp.setText("");
editTextLines.setText("0");
iRecLines = 0;
}
}
//----------------------------------------------------设置自动发送模式开关
private void SetAutoSend(SerialHelper ComPort, boolean isAutoSend) {
if (isAutoSend) {
ComPort.startSend();
} else {
ComPort.stopSend();
}
}
//----------------------------------------------------串口发送
private void sendPortData(SerialHelper ComPort, String sOut) {
if (sOut.equals("")) {
return;
}
if (ComPort != null && ComPort.isOpen()) {
if (radioButtonTxt.isChecked()) {
ComPort.sendTxt(sOut);
} else if (radioButtonHex.isChecked()) {
ComPort.sendHex(sOut);
}
}
}
//----------------------------------------------------关闭串口
private void CloseComPort(SerialHelper ComPort) {
if (ComPort != null) {
ComPort.stopSend();
ComPort.close();
}
}
//----------------------------------------------------打开串口
private void OpenComPort(SerialHelper ComPort) {
try {
ComPort.open();
} catch (SecurityException e) {
ShowMessage("打开串口失败:没有串口读/写权限!");
} catch (IOException e) {
ShowMessage("打开串口失败:未知错误!");
} catch (InvalidParameterException e) {
ShowMessage("打开串口失败:参数错误!");
}
}
//------------------------------------------显示消息
private void ShowMessage(String sMsg) {
Toast.makeText(this, sMsg, Toast.LENGTH_SHORT).show();
}
} |
<filename>src/Interactives.h
/*
* Project: FullereneViewer
* Version: 1.0
* Copyright: (C) 2011-14 Dr.Sc.KAWAMOTO,Takuji (Ext)
*/
#ifndef __INTERACTIVES_H__
#define __INTERACTIVES_H__
#include "Object.h"
#include "List.h"
#include "InteractiveOperation.h"
#include "Interactive.h"
enum ActionLocation {
ACTION_LOCATION_CENTER = -1,
ACTION_LOCATION_NEAREST = -2,
ACTION_LOCATION_VERTEX = 0,
};
#define STABILITY_THRESHOLD 100
class Interactives : public Object {
// friend classes & functions
// members
public:
static bool s_need_simulation;
static bool s_need_draw_pentagon_cellophanes;
private:
List<InteractiveOperation> p_operations;
List<Interactive> p_interactives;
int p_simulation_active;
// private tools
private:
void p_calculate_interaction(LocationForceType force_type, double delta,
Interactive* one, int one_index,
Interactive* the_other, int the_other_index);
void p_calculate_interaction(NormalForceType force_type, double delta,
Interactive* one, Interactive* the_other);
// constructors & the destructor
public:
Interactives();
virtual ~Interactives();
Interactives& operator = (const Interactives& you); /* dont use */
// interactions
protected:
virtual void p_reset_interaction() = 0;
void p_register_interaction(LocationForceType force_type,
Interactive* one, int one_index,
Interactive* the_other, int the_other_index);
void p_register_interaction(NormalForceType force_type,
Interactive* one, Interactive* the_other);
void p_register_interaction(OriginalForceType force_type, Interactive* one);
public:
virtual const Vector3& get_center_location() const = 0;
void register_interactive(Interactive* interactive);
bool operate_interactions(double delta);
void randomized_force(double width = 1.0);
// I/O
public:
void draw_by_OpenGL(bool selection) const;
// stability
void resume_simulation() { p_simulation_active = STABILITY_THRESHOLD; }
// member accessing methods
};
#endif /* __INTERACTIVES_H__ */
/* Local Variables: */
/* mode: c++ */
/* End: */
|
<reponame>rotationalio/honu<gh_stars>0
package honu_test
import (
"fmt"
"io/ioutil"
"os"
"testing"
"github.com/rotationalio/honu"
"github.com/rotationalio/honu/config"
"github.com/rotationalio/honu/options"
"github.com/stretchr/testify/require"
)
var pairs = [][]string{
{"aa", "first"},
{"ab", "second"},
{"ba", "third"},
{"bb", "fourth"},
{"bc", "fifth"},
{"ca", "sixth"},
{"cb", "seventh"},
}
// Returns a constant list of namespace strings.
// TODO: Share with engines/leveldb/leveldb_test.go
var testNamespaces = []string{
"",
"basic",
"namespace with spaces",
"namespace::with::colons",
}
func setupHonuDB(t testing.TB) (db *honu.DB, tmpDir string) {
// Create a new leveldb database in a temporary directory
tmpDir, err := ioutil.TempDir("", "honuldb-*")
require.NoError(t, err)
// Open a Honu leveldb database with default configuration
uri := fmt.Sprintf("leveldb:///%s", tmpDir)
db, err = honu.Open(uri, config.WithReplica(config.ReplicaConfig{PID: 8, Region: "us-southwest-16", Name: "testing"}))
require.NoError(t, err)
if err != nil && tmpDir != "" {
fmt.Println(tmpDir)
os.RemoveAll(tmpDir)
}
return db, tmpDir
}
func TestLevelDBInteractions(t *testing.T) {
db, tmpDir := setupHonuDB(t)
// Cleanup when we're done with the test
defer os.RemoveAll(tmpDir)
defer db.Close()
for _, namespace := range testNamespaces {
// Use a constant key to ensure namespaces
// are working correctly.
key := []byte("foo")
//append a constant to namespace as the value
//because when the empty namespace is returned
//as a key it is unmarsheled as []byte(nil)
//instead of []byte{}
expectedValue := []byte(namespace + "this is the value of foo")
// Put a version to the database
obj, err := db.Put(key, expectedValue, options.WithNamespace(namespace))
require.NoError(t, err)
require.False(t, obj.Tombstone())
// Get the version of foo from the database
value, err := db.Get(key, options.WithNamespace(namespace))
require.NoError(t, err)
require.Equal(t, expectedValue, value)
// Get the meta data from foo
obj, err = db.Object(key, options.WithNamespace(namespace))
require.NoError(t, err)
require.Equal(t, uint64(1), obj.Version.Version)
require.False(t, obj.Tombstone())
// Delete the version from the database and ensure you
// are not able to get the deleted version
_, err = db.Delete(key, options.WithNamespace(namespace))
require.NoError(t, err)
value, err = db.Get(key, options.WithNamespace(namespace))
require.Error(t, err)
require.Empty(t, value)
// Get the tombstone from the database
obj, err = db.Object(key, options.WithNamespace(namespace))
require.NoError(t, err)
require.Equal(t, uint64(2), obj.Version.Version)
require.True(t, obj.Tombstone())
require.Empty(t, obj.Data)
// Be able to "undelete" a tombstone
undeadValue := []byte("this is the undead foo")
obj, err = db.Put(key, undeadValue, options.WithNamespace(namespace))
require.NoError(t, err)
require.False(t, obj.Tombstone())
// Get the metadata from the database (should no longer be a tombstone)
obj, err = db.Object(key, options.WithNamespace(namespace))
require.NoError(t, err)
require.Equal(t, uint64(3), obj.Version.Version)
require.False(t, obj.Tombstone())
// Attempt to directly update the object in the database
obj.Data = []byte("directly updated")
obj.Owner = "me"
obj.Version.Parent = nil
obj.Version.Version = 42
obj.Version.Pid = 93
obj.Version.Region = "here"
obj.Version.Tombstone = false
require.NoError(t, db.Update(obj))
obj, err = db.Object(key, options.WithNamespace(namespace))
require.NoError(t, err)
require.Equal(t, uint64(42), obj.Version.Version)
require.Equal(t, uint64(93), obj.Version.Pid)
require.Equal(t, "me", obj.Owner)
require.Equal(t, "here", obj.Version.Region)
// Update with same namespace option should not error.
require.NoError(t, db.Update(obj, options.WithNamespace(namespace)))
// Update with wrong namespace should error
require.Error(t, db.Update(obj, options.WithNamespace("this is not the right thing")))
// TODO: figure out what to do with this testcase.
// Iter currently grabs the namespace by splitting
// on :: and grabbing the first string, so it only
// grabs "namespace".
if namespace == "namespace::with::colons" {
continue
}
// Put a range of data into the database
for _, pair := range pairs {
key := []byte(pair[0])
value := []byte(pair[1])
_, err := db.Put(key, value, options.WithNamespace(namespace))
require.NoError(t, err)
}
// Iterate over a prefix in the database
iter, err := db.Iter([]byte("b"), options.WithNamespace(namespace))
require.NoError(t, err)
collected := 0
for iter.Next() {
key := iter.Key()
require.Equal(t, string(key), pairs[collected+2][0])
value := iter.Value()
fmt.Println(value)
require.Equal(t, string(value), string(pairs[collected+2][1]))
obj, err := iter.Object()
require.NoError(t, err)
require.Equal(t, uint64(1), obj.Version.Version)
collected++
}
require.Equal(t, 3, collected)
require.NoError(t, iter.Error())
iter.Release()
}
}
|
from nltk.corpus import stopwords
import sys
def process_query(query):
stopperwords = ['what', 'where', 'when', 'who', 'which', 'whom', 'whose', 'why', 'how', '?']
querywords = query.split()
resultwords = [word for word in querywords if word.lower() not in stopperwords]
result = ' '.join(resultwords)
result = result.replace('?', '')
return result
# Example usage
input_query = "What is the Nahuatl word for tomato and how did the Aztecs call tomato?"
processed_query = process_query(input_query)
print(processed_query) # Output: "is Nahuatl word for tomato and did Aztecs call tomato" |
def format_strings(strings, format):
formatted_strings = []
for string in strings:
formatted_string = format.format(person=string)
formatted_strings.append(formatted_string)
return formatted_strings |
<filename>app/components/Header/OptionMenu.js
import styled from 'styled-components';
import colors from 'styles/colors';
export default styled.div`
float: left;
width: 33%;
height: 100%;
display: flex;
border-left: 1px solid ${colors.$silver};
&.option-item-menu {
width: 30%;
}
&.option-item-profile {
width: 70%;
}
.option-item {
width: 33%;
.icon {
width: 20px;
margin: 5px;
}
}
`;
|
A = [1 2 3 4 5; 6 7 8 9 10; 11 12 13 14 15; 16 17 18 19 20; 21 22 23 24 25];
mean_elements = mean(A(:)) |
// This file is required by the index.html file and will
// be executed in the renderer process for that window.
// All of the Node.js APIs are available in this process.
const numberPages = 4;
const numberProcesses = 3;
const runIntervalTime = 1000;
var processList = [];
for (var i = 0; i < numberProcesses; i++) {
var pageList = [];
// p: presence bit which indicates when a page is stored in the primary memory or not
// frame: frame number of a page inside the primary memory (if p is true)
for (var j = 0; j < numberPages; j++) {
pageList.push({
p: false,
frame: 0
});
}
processList.push(pageList);
}
// Page size in bytes
const pageSize = 4096;
// Memory size in pages
const primaryMemorySize = 8;
const virtualMemorySize = primaryMemorySize * 4;
const secondaryMemorySize = primaryMemorySize * 64;
var primaryMemoryList = [];
var virtualMemoryList = [];
var instructionLog = [];
var instructionInterval;
// Stats
var numberPageFaults = 0;
var pageFaultRatePerTime = [numberInstructions];
var clockIndex = 0;
var instructionIndex = 0;
var instructionList = [];
// Random number of instructions between 20 and 5
const maxInstructions = 100;
const minInstructions = 20
var numberInstructions = Math.floor(Math.random() * (maxInstructions - minInstructions)) + minInstructions;
var currentStep = 0;
// Generating random instructions
for (var i = 0; i < numberInstructions; i++) {
instructionList.push({
processId: Math.floor(Math.random() * numberProcesses),
pageId: Math.floor(Math.random() * numberPages),
referenced: false,
});
}
// Init + first iteration
initRender();
runInstruction();
// Functions:
// Runs next page request from a page. After finding it, updates all memory lists and renders data
function runInstruction(){
currentStep++;
if (instructionList.length > 0) {
instructionLog.push(["Iniciando próxima requisição..."]);
var pageLocation = checkMemory(0, instructionList[0]);
// If the requested page isn't in primary memory, it's necessary to push it
if (pageLocation.memoryType != 0){
// If it's not in primary memory, it's a page fault.
numberPageFaults++;
// push() returns the new length of the array
clockIndex - 1 === -1 ? pushIndex = primaryMemoryList.length-1 : pushIndex = clockIndex-1;
if (primaryMemoryList.length == primaryMemorySize){
primaryMemoryList[pushIndex] = instructionList[0];
var frame = pushIndex;
}
else
var frame = primaryMemoryList.push(instructionList[0]) - 1;
// Updating process' page on its page table
instructionLog[instructionIndex].push(`Atualizando Tabela de Página do Processo ${instructionList[0].processId} com o frame atual da Página ${instructionList[0].pageId}.`);
processList[instructionList[0].processId][instructionList[0].pageId].p = true;
processList[instructionList[0].processId][instructionList[0].pageId].frame = frame;
// Removing requested page from its previous location
switch (pageLocation.memoryType){
case 1: // Virtual
virtualMemoryList.splice(pageLocation.index, 1);
break;
}
}
// Update rate of page faults.
pageFaultRatePerTime[currentStep-1] = numberPageFaults/currentStep;
// If the requested page is in primary memory, everything's fine
// Removing instruction from list and rendering updated data
instructionList.splice(0, 1);
renderData();
// for (var i = 0; i < instructionLog[instructionIndex].length; i++) {
// console.log(instructionLog[instructionIndex][i]);
// }
instructionIndex++;
}
}
// Searches for the requested page inside all memory tables
// Recursive function that uses a top-down search process, while considering page swaps
// Search Order: Primary Memory -> Virtual Memory -> Swap Memory -> Secondary Memory
function checkMemory(memoryType, page, pageToSave=null){
var memoryList;
var memorySize;
var memoryName;
// Getting data from the current memory type
switch (memoryType){
case 0: // Primary
memoryList = primaryMemoryList;
memorySize = primaryMemorySize;
memoryName = "na Memória Primária";
break;
case 1: // Swap
memoryList = virtualMemoryList;
memorySize = virtualMemorySize;
memoryName = "no Espaço de Swap";
break;
case 2: // Secondary
// Stopping condition: Page is always stored inside the disk
// No indexes are considered for it
instructionLog[instructionIndex].push(`Procurando por Página ${page.pageId} do Processo ${page.processId} no Disco...`);
instructionLog[instructionIndex].push("Página Encontrada!");
if (pageToSave != null)
instructionLog[instructionIndex].push(`Salvando Página ${pageToSave.pageId} do Processo ${pageToSave.processId} enviada da Memória Principal no Disco.`);
return {
memoryType: 2,
index: 0
}
}
instructionLog[instructionIndex].push(`Procurando por Página ${page.pageId} do Processo ${page.processId} ${memoryName}...`);
// Search for the page inside the current memory list
var index = memoryList.findIndex(function(element){
return element.processId == page.processId && element.pageId == page.pageId;
});
// If it's found, return it, and save any page (sent from an upper memory that's full)
// There's no need to verify if it has space to save since it can swap the requested page with the sent page (worst case)
if (index != -1){
instructionLog[instructionIndex].push("Página Encontrada!");
if (memoryType === 0){
memoryList[index].referenced = true;
}
if (pageToSave != null){
instructionLog[instructionIndex].push(`Salvando Página ${pageToSave.pageId} do Processo ${pageToSave.processId} enviada da Memória Principal ${memoryName}.`);
memoryList.push(pageToSave);
}
return {
memoryType: memoryType,
index: index
}
}
// If it isn't found, send a request for the next memory
else {
// PAGE FAULT: page isn't located in any memory, so it will be requested from disk.
if (memoryType == 0){
instructionLog[instructionIndex].push("Page Fault! Página não se encontra na Memória Primária.");
// numberPageFaults++;
}
else
instructionLog[instructionIndex].push(`Página não se encontra ${memoryName}.`);
if (memoryList.length == memorySize){
// PAGE SWAP: Send a page (chosen with a substitution algorithm) to the next memory so it can have space for the requested page
if (memoryType == 0){
// Current Substitution Algorithm: CLOCK
pageToSave = clockSubstitution(memoryList);
instructionLog[instructionIndex].push(`Memória Primária cheia. Abrindo espaço removendo a Página ${pageToSave.pageId} do Processo ${pageToSave.processId}.`);
// Change presence bit to false since it will be removed from primary memory
processList[pageToSave.processId][pageToSave.pageId].p = false;
return checkMemory(++memoryType, page, pageToSave);
}
else {
// If the current memory received a page to save from a upper memory and it's full as well, send it to the next memory
return checkMemory(++memoryType, page, pageToSave);
}
}
else {
// If the current memory received a page to save, save it
if (pageToSave != null){
instructionLog[instructionIndex].push(`Salvando Página ${pageToSave.pageId} do Processo ${pageToSave.processId} enviada da Memória Principal ${memoryName}.`);
memoryList.push(pageToSave);
}
// There's no need to send the saved page for the next memory since it wasn't full
return checkMemory(++memoryType, page);
}
}
}
function clockSubstitution(memoryList){
while (true){
if (memoryList[clockIndex].referenced)
{
memoryList[clockIndex].referenced = false;
clockIndex++;
} else {
// Update the reference bit.
let res = memoryList[clockIndex];
clockIndex = ++clockIndex % primaryMemoryList.length;
return res;
}
// If it's finished looking on the memory, start again.
if (clockIndex >= memoryList.length)
clockIndex = 0;
}
}
// Stop the process of running all instructions
function stopRunning(){
clearInterval(instructionInterval);
}
// Runs all instructions at once using X ms intervals
function runAllInstructions(){
// Sets a timer to run a instruction every 'runInterval' seconds;
instructionInterval = setInterval(runInstruction, runIntervalTime);
// Sets a timer to clear the interval after the last instruction is ran.
setTimeout(()=>{clearInterval(instructionInterval)}, runIntervalTime*instructionList.length);
}
/*===== RENDER METHODS =====*/
// Initialising render event listeners and innerHTMLs that will not be modified anymore.
function initRender(){
$("#numberProcesses").text(numberProcesses);
document.getElementById("playButton").addEventListener("click", runInstruction);
document.getElementById("playAllButton").addEventListener("click", runAllInstructions);
document.getElementById("stopButton").addEventListener("click", stopRunning);
$("#newRequestForm").submit((e)=>{
e.preventDefault();
let value = $("#newRequestField").val().split(';');
instructionList.push({
processId: value[0],
pageId: value[1],
referenced: false,
});
numberInstructions++;
renderList();
})
$("#newRequestField").focus(()=>{
renderNotification('top','center');
})
}
function renderMemorySizeStats(){
document.getElementById("primaryMemorySize").innerHTML = primaryMemoryList.length * pageSize + "/" + primaryMemorySize * pageSize + " <small>Bytes alocados.</small>";
document.getElementById("virtualMemorySize").innerHTML = virtualMemoryList.length * pageSize + "/" + virtualMemorySize * pageSize + " <small>Bytes alocados.</small>";
document.getElementById("processesTableSize").innerHTML = numberProcesses+" <small>processos alocados.</small>";
}
function renderLog(){
var innerHTML = "";
for (var i = 0; i < instructionLog[instructionIndex].length; i++) {
innerHTML += `
<li>
<a href="#">${instructionLog[instructionIndex][i]}</a>
</li>
`;
}
$("#numberLogs").html(instructionLog[instructionIndex].length);
$("#instructionLog").html(innerHTML);
// $("#logContainer").fadeOut(300, ()=>{
// $("#logText").html(instructionLog);
// $("#logContainer").fadeIn(300,null);
// });
}
function renderPages(p){
var tRows = ``;
for (var i = 0; i < numberPages; i++) {
tRows +=`
<tr ${processList[p][i].p ? 'class="success"':null}>
<td>${i}</td>
<td>${processList[p][i].p ? 'Sim':'Não'} </td>
<td>${processList[p][i].frame}</td>
</tr>
`;
}
return tRows;
}
function renderProcessesList(){
document.getElementById("processesPageTable").innerHTML = "";
for (var i = 0; i < numberProcesses; i++) {
document.getElementById("processesPageTable").innerHTML += `
<h4>Processo ${i}</h4>
<table class="table table-hover">
<thead class="text-danger">
<th>ID da Página</th>
<th>Mapeada?</th>
<th>ID do Frame</th>
</thead>
<tbody>
${renderPages(i)}
</tbody>
</table>`;
}
}
function renderList(){
$("#instruction").html(`Executando tempo <strong>${numberInstructions - instructionList.length} de ${numberInstructions}</strong>`);
document.getElementById("instructionList").innerHTML = ""
for (var i = 0; i < instructionList.length; i++) {
document.getElementById("instructionList").innerHTML += `
<tr>
<td>${instructionList[i].processId}</td>
<td>${instructionList[i].pageId}</td>
</tr>
`;
}
document.getElementById("primaryMemoryList").innerHTML = "";
for (var i = 0; i < primaryMemorySize; i++) {
document.getElementById("primaryMemoryList").innerHTML += `
<tr ${i === clockIndex ? 'class="info"':null}>
<td>${i}</td>
<td>${primaryMemoryList.length > i ? primaryMemoryList[i].processId : '--'}</td>
<td>${primaryMemoryList.length > i ? primaryMemoryList[i].pageId : '--'}</td>
<td>${primaryMemoryList.length > i ? primaryMemoryList[i].referenced ? 'Sim': 'Não' : '--'}</td>
</tr>
`;
}
document.getElementById("virtualMemoryList").innerHTML = "";
for (var i = 0; i < virtualMemorySize; i++) {
document.getElementById("virtualMemoryList").innerHTML += `
<tr>
<td>${i}</td>
<td>${virtualMemoryList.length > i ? virtualMemoryList[i].processId : '--'}</td>
<td>${virtualMemoryList.length > i ? virtualMemoryList[i].pageId : '--'}</td>
</tr>
`;
}
renderProcessesList();
}
function renderPageFaultChart(){
var labelArray = [numberInstructions];
for (var i = 1; i <= numberInstructions; i++) {
labelArray[i-1] = i;
};
dataPageFaultChart = {
labels: labelArray,
series: [pageFaultRatePerTime]
};
optionsPageFaultChart = {
axisX: {
labelInterpolationFnc: function skipLabels(value, index) {
return index % 2 === 0 ? value : null;
}
},
lineSmooth: Chartist.Interpolation.cardinal({
tension: 0
}),
height: 160,
low: 0,
high: 1,
chartPadding: { top: 30, right: 5, bottom: 0, left: 0},
}
var pageFaultRateChart = new Chartist.Line('#pageFaultRateChart', dataPageFaultChart, optionsPageFaultChart);
}
function renderNotification(from, align){
$.notify({
icon: "note_add",
message: "Digite o processo, ponto e vírgula e a página. Ex: 0;1 para Página 1 do Processo 0"
},{
type: 'info',
timer: 4000,
placement: {
from: from,
align: align
}
});
}
// Render all simulation data that is modified between each iteration, such as allocated memory, page faults, etc.
function renderData(){
$('#pageFaults').html(`${numberPageFaults} <small>Page Faults.</small>`);
renderMemorySizeStats();
if (primaryMemoryList.length > 0)
document.getElementById("lastPageRequested").innerHTML = `Última Página Requisitada: <strong>Página ${primaryMemoryList[primaryMemoryList.length-1].pageId} do Processo ${primaryMemoryList[primaryMemoryList.length-1].processId}</strong>`;
else
document.getElementById("lastPageRequested").innerHTML = `Última Página Requisitada: <strong>--</strong>`;
if (primaryMemoryList.length > 0)
document.getElementById("lastPagePrimaryMemory").innerHTML = `Última Página Adicionada: <strong>Página ${primaryMemoryList[primaryMemoryList.length-1].pageId} do Processo ${primaryMemoryList[primaryMemoryList.length-1].processId}</strong>`;
else
document.getElementById("lastPagePrimaryMemory").innerHTML = `Última Página Adicionada: <strong>--</strong>`;
if (virtualMemoryList.length > 0)
document.getElementById("lastPageVirtualMemory").innerHTML = `Última Página Adicionada: <strong>Página ${virtualMemoryList[virtualMemoryList.length-1].pageId} do Processo ${virtualMemoryList[virtualMemoryList.length-1].processId}</strong>`;
else
document.getElementById("lastPageVirtualMemory").innerHTML = `Última Página Adicionada: <strong>--</strong>`;
// Rendering instructions list and memory data;
renderList();
// Render charts;
renderPageFaultChart();
// Showing instruction logs
renderLog();
}
|
The coronavirus pandemic has resulted in mass job losses and an unprecedented increase in public debt. Governments around the world have had to invest heavily in support measures in order to prevent an even deeper economic crisis. |
import styled from 'styled-components';
import { Svg } from 'react-optimized-image';
export const StyledSvg = styled(Svg)`
background-color: red;
`;
|
<reponame>ksmit799/POTCO-PS
# File: P (Python 2.4)
from pirates.piratesgui.GuiPanel import *
from pirates.piratesgui import GuiButton
from pirates.piratesbase import PLocalizer
from pirates.uberdog.UberDogGlobals import InventoryType, InventoryCategory
from pirates.piratesgui import CheckButton
class XButton(GuiButton.GuiButton):
def __init__(self, parent = None, close = True, **kw):
optiondefs = ()
self.defineoptions(kw, optiondefs)
GuiButton.GuiButton.__init__(self, parent)
self.initialiseoptions(XButton)
mainGui = loader.loadModel('models/gui/gui_main')
if close:
glowscale = (0.40000000000000002, 0.40000000000000002, 0.40000000000000002)
else:
glowscale = (0.59999999999999998, 0.40000000000000002, 0.40000000000000002)
self.glow = OnscreenImage(parent = self, image = mainGui.find('**/icon_glow'), scale = glowscale, color = (1.0, 1.0, 1.0, 0.40000000000000002))
self.glow.hide()
mainGui.removeNode()
self.bind(DGG.ENTER, self.highlightOn)
self.bind(DGG.EXIT, self.highlightOff)
def highlightOn(self, event):
self.glow.show()
def highlightOff(self, event):
self.glow.hide()
class IgnoreCheck(CheckButton.CheckButton):
def __init__(self, parent = None, **kw):
optiondefs = ()
self.defineoptions(kw, optiondefs)
CheckButton.CheckButton.__init__(self, parent)
self.initialiseoptions(IgnoreCheck)
mainGui = loader.loadModel('models/gui/gui_main')
self.glow = OnscreenImage(parent = self, image = mainGui.find('**/icon_glow'), scale = 0.33000000000000002, color = (1.0, 1.0, 1.0, 0.59999999999999998))
self.glow.hide()
mainGui.removeNode()
self.bind(DGG.ENTER, self.highlightOn)
self.bind(DGG.EXIT, self.highlightOff)
def setValue(self):
CheckButton.CheckButton.setValue(self)
self['geom_hpr'] = (0, 0, 45)
self['geom_pos'] = (0.029999999999999999, 0, 0.044999999999999998)
self['geom_scale'] = 0.59999999999999998
if hasattr(self, 'glow'):
self.glow.hide()
def highlightOn(self, event):
self.glow.show()
if not self['value']:
self['geom'] = self['checkedGeom']
self['geom_hpr'] = (0, 0, 45)
self['geom_pos'] = (0.029999999999999999, 0, 0.044999999999999998)
self['geom_scale'] = 0.59999999999999998
def highlightOff(self, event):
self.glow.hide()
if not self['value']:
self['geom'] = None
class PotionHint(DirectFrame):
def __init__(self, potionGame):
self.potionGame = potionGame
topGui = loader.loadModel('models/gui/toplevel_gui')
mainGui = loader.loadModel('models/gui/gui_main')
DirectFrame.__init__(self, parent = potionGame.dialogs, relief = None)
self.glow = OnscreenImage(parent = self, image = mainGui.find('**/icon_glow'), color = (0.0, 0.0, 0.0, 1.0), scale = (12.0, 1.0, 8.0))
parch = topGui.find('**/pir_t_gui_gen_parchment')
parch.setScale(0.47249999999999998, 1.0, 0.63749999999999996)
self.background = parch.copyTo(self)
self.titleLabel = DirectLabel(parent = self, relief = None, text = PLocalizer.PotionGui['HintTitle'], text_align = TextNode.ACenter, text_scale = PiratesGuiGlobals.TextScaleTitleSmall, text_fg = (0.59999999999999998, 0.0, 0.0, 1.0), text_font = PiratesGlobals.getPirateOutlineFont(), text_wordwrap = 24, textMayChange = 0, pos = (0.0, 0, 0.20000000000000001))
self.toggleLabel = DirectLabel(parent = self, relief = None, text = PLocalizer.PotionGui['HintToggle'], text_align = TextNode.ALeft, text_scale = PiratesGuiGlobals.TextScaleMed, text_fg = PiratesGuiGlobals.TextFG0, text_wordwrap = 24, textMayChange = 0, pos = (-0.20000000000000001, 0, -0.13))
self.bClose = XButton(parent = self, relief = None, pos = (0.41999999999999998, 0, 0.23999999999999999), image = topGui.find('**/pir_t_gui_gen_Xred'), image_scale = 0.59999999999999998, command = self.showNextMessage)
self.bClose.stash()
self.noHintsCheck = IgnoreCheck(parent = self, relief = None, image = topGui.find('**/pir_t_gui_gen_box_empty'), image_scale = 0.75, checkedGeom = topGui.find('**/pir_t_gui_gen_Check_Red'), pos = (-0.25, 0, -0.12), command = self.noHintsCheckCB)
self.bAccept = XButton(text = PLocalizer.PotionGui['HintAccept'], image = (None, None, None, None), text0_fg = PiratesGuiGlobals.TextFG23, text1_fg = PiratesGuiGlobals.TextFG23, text2_fg = PiratesGuiGlobals.TextFG23, text3_fg = PiratesGuiGlobals.TextFG9, text_pos = (0.029999999999999999, 0, -0.02), text_font = PiratesGlobals.getPirateOutlineFont(), text_shadow = PiratesGuiGlobals.TextShadow, text_align = TextNode.ARight, close = False, text_scale = PiratesGuiGlobals.TextScaleTitleSmall, command = self.showNextMessage)
self.bAccept.reparentTo(self)
self.bAccept.setPos(0.41999999999999998, 0, 0.22)
self.bAccept.stash()
self.message = None
self.showHints = True
self.lastHint = None
self.messageQueue = []
inv = localAvatar.getInventory()
if inv.getStackQuantity(InventoryType.PotionCraftingInstructionsToken) > 0:
self.showHints = False
self.hintCB = self.showHints
self.noHintsCheck['value'] = self.hintCB
self.hintShown = { }
for hintKey in PLocalizer.PotionHints.keys():
self.hintShown[hintKey] = False
topGui.removeNode()
mainGui.removeNode()
def destroy(self):
self.bAccept.destroy()
self.bClose.destroy()
self.noHintsCheck.destroy()
DirectFrame.destroy(self)
def noHintsCheckCB(self, val):
self.hintCB = val
if self.hintCB and len(self.messageQueue) > 0:
self.bAccept.unstash()
self.bClose.stash()
else:
self.bAccept.stash()
self.bClose.unstash()
def setHintsEnabled(self, hintsOn):
self.showHints = hintsOn
for hintKey in PLocalizer.PotionHints.keys():
self.hintShown[hintKey] = not hintsOn
self.potionGame.dist.d_setHintsActive(hintsOn)
self.hintCB = self.showHints
self.noHintsCheck['value'] = self.hintCB
def showLastHint(self):
self.setHintsEnabled(True)
if self.lastHint is not None and len(self.messageQueue) == 0:
if self.show(self.lastHint):
self.hintShown[self.lastHint] = False
self.potionGame.gameFSM.demand('Tutorial')
def showNextMessage(self):
if len(self.messageQueue) > 0 and self.hintCB:
if self.message is not None:
self.message.removeNode()
self.messageText = self.messageQueue.pop()
self.message = DirectLabel(parent = self, relief = None, text = self.messageText, text_scale = PiratesGuiGlobals.TextScaleLarge, text_align = TextNode.ACenter, text_fg = PiratesGuiGlobals.TextFG0, text_shadow = None, text_wordwrap = 32, pos = (0.02, 0, 0.10000000000000001), textMayChange = 0)
if len(self.messageQueue) > 0:
self.bAccept.unstash()
self.bClose.stash()
else:
self.bAccept.stash()
self.bClose.unstash()
elif self.hintCB:
self.accept()
else:
self.dismiss()
def forceShow(self, hintKey, forced = True):
self.hintShown[hintKey] = True
self.messageQueue.reverse()
self.messageQueue.extend(PLocalizer.PotionHints[hintKey])
self.messageQueue.reverse()
if forced:
self.noHintsCheck.stash()
else:
self.noHintsCheck.unstash()
self.showNextMessage()
self.unstash()
self.potionGame.closeCurrentDialog = self.cleanUp
self.potionGame.disableButtons()
def show(self, hintKey):
self.lastHint = hintKey
if self.showHints and not self.hintShown[hintKey]:
if self.potionGame.closeCurrentDialog is not None:
self.potionGame.closeCurrentDialog()
self.forceShow(hintKey, False)
return True
else:
return False
def toggle(self):
if self.isStashed():
self.showLastHint()
else:
self.showNextMessage()
def dismiss(self):
self.setHintsEnabled(False)
self.accept()
def cleanUp(self):
while len(self.messageQueue) > 0:
self.messageQueue.pop()
self.stash()
self.potionGame.closeCurrentDialog = None
self.potionGame.enableButtons()
def accept(self):
self.cleanUp()
if self.potionGame.gameFSM.gameStarted:
self.potionGame.gameFSM.demand('Eval')
else:
self.potionGame.gameFSM.demand('RecipeSelect')
|
#!/bin/sh
if [ -z "$1" ]
then
DLFOLDER="."
else
DLFOLDER="$1"
fi
wget --continue -P $DLFOLDER http://www.openslr.org/resources/17/musan.tar.gz
tar xzf $DLFOLDER/musan.tar.gz -C $DLFOLDER
|
def find_keyword(sentence, keyword):
words = sentence.lower().split()
return words.index(keyword) |
#!/bin/sh
## install poetry
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python -
## initialize the project
# cd my_project
poetry init -n
poetry config --local virtualenvs.in-project true
# check the path yto your virtualenv
poetry env info --path # defines {path_to_venv}
# activate the local virtual env
source {path_to_venv}/bin/activate
## install dependencies
poetry add --dev pytest pytest-cov
poetry add --dev black --allow-prereleases
poetry add --dev requests
# poetry add --dev re sys # fails on my local machine
|
class AdventureGame:
def __init__(self, initial_room):
self.current_room = initial_room
self.inventory = {}
self.rooms = {}
def move(self, direction):
directions = {'north', 'south', 'east', 'west'}
if direction not in directions:
raise ValueError("Invalid direction.")
if direction in self.rooms[self.current_room]:
self.current_room = self.rooms[self.current_room][direction]
else:
raise ValueError("Invalid direction.")
def pickup(self, item):
if item in self.rooms[self.current_room]:
self.inventory[item] = self.rooms[self.current_room].pop(self.rooms[self.current_room].index(item))
else:
raise ValueError("Item not found.")
def use(self, item, target):
if item not in self.inventory:
raise ValueError("Item not in inventory.")
if target in self.rooms[self.current_room]:
self.rooms[self.current_room].remove(target)
else:
raise ValueError("Target not found in the current room.") |
#!/bin/sh
# if using vim, do ':set ft=zsh' for easier reading
source $JBOSS_HOME/bin/launch/logging.sh
function getKieJavaArgs() {
local kieJarDir="${JBOSS_HOME}/standalone/deployments/ROOT.war/WEB-INF/lib"
local kieClassPath="."
for kieJar in ${kieJarDir}/*.jar; do
kieClassPath="${kieClassPath}:${kieJar}"
done
for launchJar in ${JBOSS_HOME}/bin/launch/*.jar; do
kieClassPath="${kieClassPath}:${launchJar}"
done
echo "-Dorg.slf4j.simpleLogger.defaultLogLevel=WARN -jar ${JBOSS_HOME}/jboss-modules.jar -mp ${JBOSS_HOME}/modules -dep javax.enterprise.api,javax.inject.api,sun.jdk -cp ${kieClassPath}"
}
function setKieEnv() {
# discover kie server container deployments
local kieServerContainerDeploymentsFile="${JBOSS_HOME}/kieserver-container-deployments.txt"
if [ "x${KIE_SERVER_CONTAINER_DEPLOYMENT_OVERRIDE}" != "x" ]; then
log_info "Encountered EnvVar KIE_SERVER_CONTAINER_DEPLOYMENT_OVERRIDE: ${KIE_SERVER_CONTAINER_DEPLOYMENT_OVERRIDE}"
if [ "x${KIE_SERVER_CONTAINER_DEPLOYMENT}" != "x" ]; then
KIE_SERVER_CONTAINER_DEPLOYMENT_ORIGINAL="${KIE_SERVER_CONTAINER_DEPLOYMENT}"
export KIE_SERVER_CONTAINER_DEPLOYMENT_ORIGINAL
log_info "Setting EnvVar KIE_SERVER_CONTAINER_DEPLOYMENT_ORIGINAL: ${KIE_SERVER_CONTAINER_DEPLOYMENT_ORIGINAL}"
fi
KIE_SERVER_CONTAINER_DEPLOYMENT="${KIE_SERVER_CONTAINER_DEPLOYMENT_OVERRIDE}"
export KIE_SERVER_CONTAINER_DEPLOYMENT
log_info "Using overridden EnvVar KIE_SERVER_CONTAINER_DEPLOYMENT: ${KIE_SERVER_CONTAINER_DEPLOYMENT}"
elif [ "x${KIE_SERVER_CONTAINER_DEPLOYMENT}" != "x" ]; then
log_info "Using standard EnvVar KIE_SERVER_CONTAINER_DEPLOYMENT: ${KIE_SERVER_CONTAINER_DEPLOYMENT}"
elif [ -e ${kieServerContainerDeploymentsFile} ]; then
local kieServerContainerDeployments=""
while read kieServerContainerDeployment ; do
# add pipe at end of each
kieServerContainerDeployments="${kieServerContainerDeployments}${kieServerContainerDeployment}|"
done <${kieServerContainerDeploymentsFile}
# remove last unecessary pipe
kieServerContainerDeployments=$(echo ${kieServerContainerDeployments} | sed "s/\(.*\)|/\1/")
KIE_SERVER_CONTAINER_DEPLOYMENT="${kieServerContainerDeployments}"
export KIE_SERVER_CONTAINER_DEPLOYMENT
log_info "Read ${kieServerContainerDeploymentsFile} into EnvVar KIE_SERVER_CONTAINER_DEPLOYMENT: ${KIE_SERVER_CONTAINER_DEPLOYMENT}"
fi
# process kie server container deployments
if [ "x${KIE_SERVER_CONTAINER_DEPLOYMENT}" != "x" ]; then
# kieServerContainerDeployment|kieServerContainerDeployment
IFS='|' read -a kieServerContainerDeploymentArray <<< "${KIE_SERVER_CONTAINER_DEPLOYMENT}"
local kieServerContainerDeploymentCount=${#kieServerContainerDeploymentArray[@]}
KIE_SERVER_CONTAINER_DEPLOYMENT_COUNT="${kieServerContainerDeploymentCount}"
for (( i=0; i<${kieServerContainerDeploymentCount}; i++ )); do
# containerId=releaseId
local kieServerContainerDeployment=${kieServerContainerDeploymentArray[i]}
IFS='=' read -a kieServerContainerDefinitionArray <<< "${kieServerContainerDeployment}"
local kieServerContainerId=${kieServerContainerDefinitionArray[0]}
local kjarReleaseId=${kieServerContainerDefinitionArray[1]}
eval "KIE_SERVER_CONTAINER_ID_${i}=\"${kieServerContainerId}\""
# groupId:artifactId:version
IFS=':' read -a kjarReleaseIdArray <<< "${kjarReleaseId}"
local kjarGroupId=${kjarReleaseIdArray[0]}
local kjarArtifactId=${kjarReleaseIdArray[1]}
local kjarVersion=${kjarReleaseIdArray[2]}
eval "KIE_SERVER_CONTAINER_KJAR_GROUP_ID_${i}=${kjarGroupId}"
eval "KIE_SERVER_CONTAINER_KJAR_ARTIFACT_ID_${i}=${kjarArtifactId}"
eval "KIE_SERVER_CONTAINER_KJAR_VERSION_${i}=${kjarVersion}"
done
else
KIE_SERVER_CONTAINER_DEPLOYMENT_COUNT=0
log_warning "Warning: EnvVar KIE_SERVER_CONTAINER_DEPLOYMENT is missing."
log_warning "Example: export KIE_SERVER_CONTAINER_DEPLOYMENT='containerId=groupId:artifactId:version|c2=g2:a2:v2'"
fi
}
function getKieServerContainerVal() {
local kieServerContainerVar="KIE_SERVER_CONTAINER_${1}_${2}"
eval "echo \$${kieServerContainerVar}"
}
function dumpKieEnv() {
echo "KIE_SERVER_CONTAINER_DEPLOYMENT: ${KIE_SERVER_CONTAINER_DEPLOYMENT}"
echo "KIE_SERVER_CONTAINER_DEPLOYMENT_ORIGINAL: ${KIE_SERVER_CONTAINER_DEPLOYMENT_ORIGINAL}"
echo "KIE_SERVER_CONTAINER_DEPLOYMENT_OVERRIDE: ${KIE_SERVER_CONTAINER_DEPLOYMENT_OVERRIDE}"
echo "KIE_SERVER_CONTAINER_DEPLOYMENT_COUNT: ${KIE_SERVER_CONTAINER_DEPLOYMENT_COUNT}"
for (( i=0; i<${KIE_SERVER_CONTAINER_DEPLOYMENT_COUNT}; i++ )); do
echo "KIE_SERVER_CONTAINER_ID_${i}: $(getKieServerContainerVal ID ${i})"
echo "KIE_SERVER_CONTAINER_KJAR_GROUP_ID_${i}: $(getKieServerContainerVal KJAR_GROUP_ID ${i})"
echo "KIE_SERVER_CONTAINER_KJAR_ARTIFACT_ID_${i}: $(getKieServerContainerVal KJAR_ARTIFACT_ID ${i})"
echo "KIE_SERVER_CONTAINER_KJAR_VERSION_${i}: $(getKieServerContainerVal KJAR_VERSION ${i})"
done
}
|
import pygame
from pygame.locals import *
from sys import exit
class StateManager(object):
def __init__(self, states):
self.GAME_STATES = states
self.CURRENT_STATE = None
self.RUNNING = False
def state_exists(self, state_id):
"""
Validate that the state mentioned exists
:return:
"""
return state_id in self.GAME_STATES
def startup(self):
"""
Initializing the manager
:return:
"""
if len(self.GAME_STATES) > 0:
self.RUNNING = True
for state in self.GAME_STATES:
if state is not None:
self.CURRENT_STATE = state
break
return True
return False
def cleanup(self):
"""
Exiting gracefully
:return:
"""
self.GAME_STATES.clear()
self.CURRENT_STATE = None
self.RUNNING = False
def change_state(self, state_id):
"""
Switching States
:return:
"""
if self.state_exists(state_id=state_id):
# clean up the current state
self.CURRENT_STATE.cleanup()
# change to the new state
self.CURRENT_STATE = self.GAME_STATES[state_id]
# setup the new state
self.CURRENT_STATE.startup()
return True
return False
def update(self):
"""
:return:
"""
pass
def draw(self):
"""
:return:
"""
pass
def is_running(self):
"""
:return: boolean if manager is running or not
"""
return self.RUNNING
# INITIALIZATION AND RUNNING
@staticmethod
def init():
"""Initialize all the important aspects of the game
environment
"""
# initialize pygame
pygame.init()
# setup the screen
screen = pygame.display.set_mode((640, 480), pygame.DOUBLEBUF, 32)
pygame.display.set_caption("Connect4X")
@staticmethod
def run():
"""
Main loop of the state manager
init
while true:
check events
update state
draw state
:return:
"""
while True:
# take state's listener and run in loop
# state's listener should return if it needs
# - to change state or not, otherwise, perform action
# check events in the system
for event in pygame.event.get():
if event.type == QUIT:
exit()
pygame.display.update()
|
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#endif
#import "Down.h"
FOUNDATION_EXPORT double DownVersionNumber;
FOUNDATION_EXPORT const unsigned char DownVersionString[];
|
for i in range(0,101):
if i % 2 == 0:
print(i) |
#!/usr/bin/env bash
# This script lets users upload a specific cli spec to fig's cloud
MAGENTA=$(tput setaf 5)
RED=$(tput setaf 1)
BOLD=$(tput bold)
NORMAL=$(tput sgr0)
HIGHLIGHT=$(tput smso)
HIGHLIGHT_END=$(tput rmso)
TAB=' '
print_special() {
echo "${TAB}$@${NORMAL}"$'\n'
}
#####################################
# State
#####################################
# Make sure dev_mode != 1 when this is pushed live
dev_mode=0
if [[ "$dev_mode" == '1' ]]
then
echo
echo "currently in dev mode"
echo
filename_endpoint="http://localhost:3000/autocomplete/team-file-name"
download_endpoint="http://localhost:3000/autocomplete/download-team-file"
else
filename_endpoint="https://waitlist.withfig.com/autocomplete/team-file-name"
download_endpoint="https://waitlist.withfig.com/autocomplete/download-team-file"
fi
subcommand_name="team:download"
upload_subcommand_name="team:upload"
#####################################
# Functions
#####################################
prompt_to_logout() {
# cat <<EOF
echo
print_special "${BOLD}It looks like you are not properly logged into ${MAGENTA}Fig${NORMAL}"
echo
print_special "Please logout using ${BOLD}${MAGENTA}fig util:logout${NORMAL} then log back in and try again"
# print_special "Fig will log you out and prompt you to log in again"
# echo
# print_special "When you've logged back in, please re-run ${BOLD}fig $subcommand_name ${NORMAL}"
# echo
# echo
# # https://serverfault.com/questions/532559/bash-script-count-down-5-minutes-display-on-single-line
# # Countdown timer
# secs=$((8))
# print_special "Press ctrl + c to cancel"
# while [ $secs -gt 0 ]; do
# echo -ne "${TAB}Time remaining before logout: $secs\033[0K\r"
# sleep 1
# : $((secs--))
# done
# fig util:logout
exit 1
}
#####################################
# Check token exists locally and is valid
#####################################
local_access_token=$(defaults read com.mschrage.fig access_token 2> /dev/null)
if [ -z $local_access_token ]
then
prompt_to_logout
fi
#####################################
# Make post request to fig server
#####################################
file_name=$(curl -s -X POST \
-H "Authorization: Bearer $local_access_token" \
$filename_endpoint 2> /dev/null)
#####################################
# Support
#####################################
if [[ "$file_name" == ERROR* ]]
then
cat <<EOF
${BOLD}${RED}Error${NORMAL}
$file_name
There was an error downloading your team's private completion specs.
Please contact ${BOLD}hello@fig.io${NORMAL} for support
EOF
elif [ -z $file_name ]
then
cat <<EOF
There don't seem to be any private completion specs associated with your team's domain.
Are you sure you / your team have uploaded private completion specs?
--
To upload completion specs, use:
fig $upload_subcommand_name <file path to private completion spec>
${BOLD}Examples${NORMAL}
fig $upload_subcommand_name ~/.fig/team/acme.js
fig $upload_subcommand_name /path/to/acme.js
EOF
fi
# If we are here, we know we have a file that exists
# https://stackoverflow.com/questions/21950049/create-a-text-file-in-node-js-from-a-string-and-stream-it-in-response
# -o "$file_name"
result=$(curl -s -X POST \
-H "Authorization: Bearer $local_access_token" \
$download_endpoint \
2> /dev/null )
if [[ -z "$result" ]] || [[ "$result" == ERROR* ]]
then
cat <<EOF
${BOLD}${MAGENTA}Error${NORMAL}
$result
There was an error downloading and/or saving your team's private autocomplete spec ${BOLD}${MAGENTA}$file_name${NORMAL}
If this problem persists, please contact hello@fig.io for support.
EOF
else
touch ~/.fig/team/$file_name
echo "$result" > ~/.fig/team/$file_name
# symlink and force option
ln -fs ~/.fig/team/"$file_name" ~/.fig/autocomplete/"$file_name"
cat <<EOF
${BOLD}${MAGENTA}Success${NORMAL}
Your team's completion spec ${BOLD}${MAGENTA}$file_name${NORMAL} was successfully downloaded/updated.
EOF
fi
|
from typing import List
from requests import get
from django.urls import reverse
class Host:
def __init__(self, pk: int):
self.pk = pk
def simulate_api_request(hosts: List[Host]) -> bool:
if len(hosts) == 0:
return False # No hosts provided
host = hosts[0] # For simplicity, consider only the first host
url = reverse('api:host_detail', kwargs={'pk': host.pk})
response = get(url)
if 'related' in response.json() and 'fact_versions' in response.json()['related']:
expected_fact_versions_url = reverse('api:host_fact_versions_list', kwargs={'pk': host.pk})
if expected_fact_versions_url == response.json()['related']['fact_versions']:
return True
return False |
package com.aveng.wapp.service.exception.handler;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.http.converter.HttpMessageNotReadableException;
import org.springframework.lang.NonNull;
import org.springframework.stereotype.Component;
import org.springframework.util.ObjectUtils;
import org.springframework.validation.FieldError;
import org.springframework.validation.ObjectError;
import org.springframework.web.bind.MethodArgumentNotValidException;
import org.springframework.web.context.request.WebRequest;
import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExceptionHandler;
import com.aveng.wapp.web.rest.model.ApiResponse;
import lombok.extern.slf4j.Slf4j;
/**
* Handle all spring specific exceptions here
*
* @author apaydin
*/
@Component
@Slf4j
public class SpringExceptionHandler extends ResponseEntityExceptionHandler {
@Override
@NonNull
protected ResponseEntity<Object> handleHttpMessageNotReadable(HttpMessageNotReadableException ex,
HttpHeaders headers, HttpStatus status, WebRequest request) {
String errorMessage = "Malformed JSON request";
ApiResponse<String> apiResponse = ApiResponse.<String>builder().message(errorMessage)
.data(ExceptionUtils.getStackTrace(ex))
.build();
return new ResponseEntity<>(apiResponse, status);
}
@Override
@NonNull
protected ResponseEntity<Object> handleMethodArgumentNotValid(MethodArgumentNotValidException ex,
HttpHeaders headers, HttpStatus status, WebRequest request) {
List<String> validationErrors =
ex.getBindingResult().getAllErrors().stream().map(mapToErrorMessage()).collect(Collectors.toList());
ApiResponse<List<String>> apiResponse = ApiResponse.<List<String>>builder().message("Invalid object")
.data(validationErrors)
.build();
return new ResponseEntity<>(apiResponse, status);
}
private Function<ObjectError, String> mapToErrorMessage() {
return objectError -> {
if (objectError instanceof FieldError) {
FieldError fieldError = (FieldError) objectError;
return "Field error in object '" + fieldError.getObjectName() + "' on field '" + fieldError.getField()
+ "': rejected value [" + ObjectUtils.nullSafeToString(fieldError.getRejectedValue()) + "]; "
+ fieldError.getDefaultMessage();
} else {
return "Object error in object '" + objectError.getObjectName() + "', "
+ objectError.getDefaultMessage();
}
};
}
}
|
<filename>internal/app/config/oauth2.go<gh_stars>0
package config
type OAuth2Cred struct {
ClientID string `yaml:"client_id" env:"OAUTH_CLIENT_ID"`
ClientSecret string `yaml:"client_secret" env:"OAUTH_CLIENT_SECRET"`
RandomState string `yaml:"random_state" env:"OAUTH_RANDOM_STATE"`
}
|
import React from 'react'
import shortid from 'shortid'
export default class Dots extends React.Component {
constructor(props) {
super(props)
this.handleClick = this.handleClick.bind(this)
}
handleClick(nbr) {
this.props.handleClickDot(nbr)
}
render() {
const { nbrSlides, currentSlide } = this.props
const items = []
for (let i = 0; i < nbrSlides; i++) {
const li = (
<li
className={'Dot' + (i === currentSlide ? ' current' : '')}
onClick={_ => this.handleClick(i)}
key={shortid.generate()}
>
</li>
)
items.push(li)
}
return <ul className="Dots">{items}</ul>
}
}
|
#!/bin/bash
cd /home/nlpserver/zzilong/kaldi/egs/supermarket-product
. ./path.sh
( echo '#' Running on `hostname`
echo '#' Started at `date`
echo -n '# '; cat <<EOF
nnet-show-progress --use-gpu=no exp/nnet4a/129.mdl exp/nnet4a/130.mdl ark:exp/nnet4a/egs/train_diagnostic.egs
EOF
) >exp/nnet4a/log/progress.130.log
time1=`date +"%s"`
( nnet-show-progress --use-gpu=no exp/nnet4a/129.mdl exp/nnet4a/130.mdl ark:exp/nnet4a/egs/train_diagnostic.egs ) 2>>exp/nnet4a/log/progress.130.log >>exp/nnet4a/log/progress.130.log
ret=$?
time2=`date +"%s"`
echo '#' Accounting: time=$(($time2-$time1)) threads=1 >>exp/nnet4a/log/progress.130.log
echo '#' Finished at `date` with status $ret >>exp/nnet4a/log/progress.130.log
[ $ret -eq 137 ] && exit 100;
touch exp/nnet4a/q/done.5894
exit $[$ret ? 1 : 0]
## submitted with:
# qsub -v PATH -cwd -S /bin/bash -j y -l arch=*64* -o exp/nnet4a/q/progress.130.log -l mem_free=10G,ram_free=2G,arch=*64 /home/nlpserver/zzilong/kaldi/egs/supermarket-product/exp/nnet4a/q/progress.130.sh >>exp/nnet4a/q/progress.130.log 2>&1
|
<reponame>kasuganosora/journey
package methods
import (
"github.com/kabukky/journey/database"
"github.com/kabukky/journey/date"
"github.com/kabukky/journey/structure"
)
func SaveUser(u *structure.User, hashedPassword string, createdBy int64) error {
userId, err := database.InsertUser(u.Name, u.Slug, hashedPassword, u.Email, u.Image, u.Cover, date.GetCurrentTime(), createdBy)
if err != nil {
return err
}
err = database.InsertRoleUser(u.Role, userId)
if err != nil {
return err
}
return nil
}
func UpdateUser(u *structure.User, updatedById int64) error {
err := database.UpdateUser(u.Id, u.Name, u.Slug, u.Email, u.Image, u.Cover, u.Bio, u.Website, u.Location, date.GetCurrentTime(), updatedById)
if err != nil {
return err
}
return nil
}
|
def is_divisible(n, m):
if n % m == 0:
return True
else:
return False
result = is_divisible(8, 4)
print(result) |
function isNumber(input) {
return !isNaN(input)
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.