text
stringlengths 1
1.05M
|
|---|
<reponame>ajayaradhya/portfolio-app
import React from 'react'
import Icon1 from '../../images/svg-2.svg'
import Icon2 from '../../images/svg-3.svg'
import Icon3 from '../../images/svg-4.svg'
import { ServicesCard, ServicesContainer, ServicesIcon, ServicesP, ServicesWrapper, ServicesH1, ServicesH2} from './ServiceElements'
const Services = () => {
return (
<ServicesContainer id='services'>
<ServicesH1>Our Services</ServicesH1>
<ServicesWrapper>
<ServicesCard>
<ServicesIcon src={Icon1} />
<ServicesH2>Reduce Expenses</ServicesH2>
<ServicesP>We help in your fuss and increase revenue</ServicesP>
</ServicesCard>
<ServicesCard>
<ServicesIcon src={Icon2} />
<ServicesH2>Virtual Offices</ServicesH2>
<ServicesP>Some random description goes here</ServicesP>
</ServicesCard>
<ServicesCard>
<ServicesIcon src={Icon3} />
<ServicesH2>Premiun Benefits</ServicesH2>
<ServicesP>One more random quote goes here</ServicesP>
</ServicesCard>
</ServicesWrapper>
</ServicesContainer>
)
}
export default Services
|
<reponame>zrwusa/expo-bunny
export * from './FollowUpSearchBar';
|
#!/bin/sh
#loop with counter
for count in $(seq 10)
do
echo "rida $count"
done
echo "-------------------"
# foreach filename...
for file in *
do
# echo "found file: $file"
file $file
done
echo "-------------------"
for file in $(ls -a p*)
do
echo "file: $file"
done
echo "-------------------"
# foreach command line argument
set yks kaks kolm neli
for var #in $@
do
echo parameeter $var
done
# works only with Korn/bash shell
for ((a=1;a<10; a=a+1))
do
echo number $a
done
|
<gh_stars>1-10
var gulp = require('gulp'),
karma = require('karma').server,
operation = require('./operationalyzer'),
jsufon = require('./jsufonify');
var $ = require('gulp-load-plugins')({
pattern: ['gulp-*']
});
gulp.task('coffee', function() {
return gulp.src('src/**/*.coffee')
.pipe($.coffee({bare: true}).on('error', $.util.log))
.pipe(operation())
.pipe($.concat('font.json'))
.pipe(jsufon())
.pipe(gulp.dest('dist/'));
});
gulp.task('lint', function() {
return gulp.src(['src/**/*.coffee'])
.pipe($.size())
.pipe($.coffeelint())
.pipe($.coffeelint.reporter('default'))
.pipe($.coffeelint.reporter('failOnWarning'));
});
gulp.task('build', ['lint', 'coffee'], function(done) {
return done();
});
gulp.task('watch', function() {
gulp.watch(['src/**/*'], ['coffee']);
});
|
// Create a hashmap to store the words and their frequencies
const frequencies = {};
// Split input string into an array of words
const words = input.split(' ');
// Iterate through the array of words
words.forEach(word => {
// Convert word to lowercase
const wordLowercase = word.toLowerCase();
// If the word already exists, increase its frequency
if (frequencies[wordLowercase]) {
frequencies[wordLowercase]++;
// Otherwise, set the frequency to 1
} else {
frequencies[wordLowercase] = 1;
}
});
// Iterate through the hashmap and find the most common words
let mostFrequentWord = '';
let mostFrequentFrequency = 0;
Object.keys(frequencies).forEach(word => {
const frequency = frequencies[word];
if (mostFrequentFrequency < frequency) {
mostFrequentWord = word;
mostFrequentFrequency = frequency;
}
});
// Print the most common words and its frequencies
console.log(`${mostFrequentWord}: ${mostFrequentFrequency}`);
|
#!/usr/bin/env bash
ganache-cli -d -p 7545 -h 0.0.0.0 -i 577 -e 1000 \
--mnemonic="guess tonight return rude vast goat shadow grant comfort december uniform bronze"
|
let urlPattern = new RegExp('^(https?:\\/\\/)[\\w.-]+(\\.[\\w\\.-]+)+[\\w\\-\\._~:/?#[\\]@!\\$&\'\\(\\)\\*\\+,;=.]+$');
|
<reponame>Hannah-Abi/python-pro-21
import unittest
from unittest.mock import patch
from tmc import points
from tmc.utils import load_module, reload_module, get_stdout, check_source
from functools import reduce
from random import randint
exercise = 'src.change_value_of_item'
def f(d):
return '\n'.join(d)
def getcor(l):
ls = list(range(1, 6))
i = 0
s = []
while l[i] != -1:
ls[l[i]] = l[i+1]
i += 2
s.append(str(ls))
return s
@points('4.change_value_of_item')
class ChangeValueOfItemTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
with patch('builtins.input', side_effect =["-1"]):
cls.module = load_module(exercise, 'en')
def test_inputs_1(self):
values = (0,100,-1)
with patch('builtins.input', side_effect = [str(x) for x in values]):
reload_module(self.module)
output = get_stdout()
output_list = output.split("\n")
cor = getcor(values)
mssage = """\nNote, that, in this exercise, any code SHALL NOT BE PLACED inside
if __name__ == "__main__":
block
"""
#\n{mssage}")
self.assertTrue(len(output)>0, f"Your program does not print out anything when the input is {values}\n{mssage}")
self.assertEqual(len(output_list), len(cor), f"Your program should print out {len(cor)} rows, now it prints out {len(output_list)} rows, when the input is: {values}")
r = 1
for l1,l2 in zip(cor, output_list):
self.assertEqual(l1.strip(), l2.strip(),
f"The print out in row {r}: is incorrect, program should print out\n{l1}\nbut it prints out\n{l2}\nwhen the input is {values}")
r += 1
def test_inputs_2(self):
values = (1,25,3,333,2,-543,-1)
with patch('builtins.input', side_effect = [str(x) for x in values]):
reload_module(self.module)
output = get_stdout()
output_list = output.split("\n")
cor = getcor(values)
self.assertEqual(len(output_list), len(cor), f"Your program should print out {len(cor)} rows, now it prints out {len(output_list)} rows, when the input is: {values}")
r = 1
for l1,l2 in zip(cor, output_list):
self.assertEqual(l1.strip(), l2.strip(),
f"The print out in row {r}: is incorrect, program should print out\n{l1}\nbut it prints out\n{l2}\nwhen the input is {values}")
r += 1
if __name__ == '__main__':
unittest.main()
|
package com.java.study.algorithm.zuo.fsenior.class01;
public class Code04_DistinctSubseq{
}
|
<filename>src/resources/simulators/valueObjects/simulatorIdentifier.test.ts<gh_stars>0
import { expect } from 'chai';
import fc from 'fast-check';
import { ErrorCode } from '../../../core/AppError';
import isURL from 'validator/lib/isURL';
import { SimulatorIdentifier } from './simulatorIdentifier';
describe('SimulatorIdentifier ValueObject', function () {
describe('#create', function () {
it(`when creating without input, expect SimulatorIdentifier to have a new identifier with lenght of 6.`, function () {
//Act
const [simulatorIdentifier] = SimulatorIdentifier.create();
//Assert
expect(simulatorIdentifier?.value).to.be.a('string').with.lengthOf(6);
});
it(`when creating with a string of length 6, expect SimulatorIdentifier to have the same value.`, function () {
fc.assert(
//Arrange
fc.property(
fc
.string({ minLength: 6, maxLength: 6 })
.filter(s => s.trim().length === 6),
identifier => {
//Act
const [simulatorIdentifier] = SimulatorIdentifier.create(identifier);
//Assert
expect(simulatorIdentifier?.value).to.be.equal(identifier);
}
)
);
});
it(`when creating with a string of length not equal 6, expect validation error.`, function () {
fc.assert(
//Arrange
fc.property(
fc.string({ minLength: 1 }).filter(s => s.trim().length !== 6),
identifier => {
//Act
const [, error] = SimulatorIdentifier.create(identifier);
//Assert
expect(error).to.have.property('code').equal(ErrorCode.VALIDATION_ERROR);
}
)
);
});
});
describe('#toDto', function () {
it(`when called, expect an object with only 'simulatorIdentifier' key.`, function () {
fc.assert(
//Arrange
fc.property(
fc
.string({ minLength: 6, maxLength: 6 })
.filter(s => s.trim().length === 6),
identifier => {
const [simulatorIdentifier] = SimulatorIdentifier.create(identifier);
//Act
const dto = simulatorIdentifier?.toDto();
//Assert
expect(dto).to.have.property('identifier');
expect(Object.keys(dto)).to.have.lengthOf(1);
}
)
);
});
});
});
|
<reponame>bitbrain/braingdx
package de.bitbrain.braingdx.input.keyboard;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.InputAdapter;
import de.bitbrain.braingdx.ui.Navigateable;
/**
* Provides keyboard support for a given {@link Navigateable}
*/
public class NavigateableKeyboardInput extends InputAdapter {
private final Navigateable navigateable;
public NavigateableKeyboardInput(Navigateable navigateable) {
this.navigateable = navigateable;
}
@Override
public boolean keyDown(int keycode) {
if (keycode == Input.Keys.S || keycode == Input.Keys.DOWN || keycode == Input.Keys.A || keycode == Input.Keys.LEFT) {
navigateable.next();
return true;
}
if (keycode == Input.Keys.W || keycode == Input.Keys.UP || keycode == Input.Keys.D || keycode == Input.Keys.RIGHT) {
navigateable.previous();
return true;
}
if (keycode == Input.Keys.ENTER) {
navigateable.enter();
return true;
}
return false;
}
}
|
<reponame>cliffclick/h2osql<gh_stars>0
package org.cliffc.sql;
import org.joda.time.DateTime;
import water.*;
import water.fvec.*;
import water.rapids.Merge;
import water.nbhm.NonBlockingHashMapLong;
import water.util.SB;
import java.util.Arrays;
/**
def q11 = count[person1, person2, person3:
person_knows_person(person1, person2)
and person_knows_person(person2, person3)
and person_knows_person(person1, person3)
]
Answer H2O 20CPU DOVE0
SF0.1: 200280 0.000 sec 5.350 sec
SF1 : 3107478 0.015 sec
SF10 : 37853736 0.283 sec
SF100: 487437702 4.365 sec
3.930 sec using 32bit person ids
*/
public class TSMB11 implements TSMB.TSMBI {
@Override public String name() { return "TSMB11"; }
static final boolean PRINT_TIMING = false;
// -----------------------------------------------------------------
// Do triangles the "obvious" H2O way; parallelize edges; at one node, walk
// all outgoing edges and look for a hit. Requires all edges in a vector
// form, and also a sparse adjacency matrix (hash of sparse edges) which fits
// on one node.
@Override public long run() {
long t0 = System.currentTimeMillis(), t;
// Count on dense numbers
// ForAll P1s...
Vec p1s = TSMB.PERSON_KNOWS_PERSON.vec("dp1");
Vec p2s = TSMB.PERSON_KNOWS_PERSON.vec("dp2");
long cnt = new CountI().doAll(p1s,p2s)._cnt;
if( PRINT_TIMING ) { t=System.currentTimeMillis(); System.out.println("ForAll P1,P2,P3 "+(t-t0)+" msec"); t0=t; }
return cnt;
}
private static class CountI extends MRTask<CountI> {
long _cnt;
@Override public void map( Chunk p1s, Chunk p2s ) {
long cnt=0;
for( int i=0; i<p1s._len; i++ ) {
int p1 = (int)p1s.at8(i), p2 = (int)p2s.at8(i);
SparseBitSetInt p1ks = TSMB.P_KNOWS_P.get(p1);
SparseBitSetInt p2ks = TSMB.P_KNOWS_P.get(p2);
// Walk the smaller, test against larger. Worth about 1.67x speedup at
// smaller scales, 1.25x at SF100.
if( p1ks.fast_cardinality() < p2ks.fast_cardinality() )
{ SparseBitSetInt tmp = p1ks; p1ks = p2ks; p2ks = tmp; }
for( int p3 : p2ks.rawKeySet() )
if( p3!=0 && p1ks.tst(p3) ) // p1 knowns p3 also
cnt+=2; // twice, because triangulation
}
_cnt=cnt;
}
@Override public void reduce( CountI C ) { _cnt += C._cnt; }
}
}
|
<filename>IGRP-Template/src/main/java/nosi/webapps/catalogo_igrp/pages/group_components/Group_componentsController.java
package nosi.webapps.catalogo_igrp.pages.group_components;
import nosi.core.webapp.Controller;
import nosi.core.webapp.databse.helpers.ResultSet;
import nosi.core.webapp.databse.helpers.QueryInterface;
import java.io.IOException;
import nosi.core.webapp.Core;
import nosi.core.webapp.Response;
/*----#start-code(packages_import)----*/
/*----#end-code----*/
public class Group_componentsController extends Controller {
public Response actionIndex() throws IOException, IllegalArgumentException, IllegalAccessException{
Group_components model = new Group_components();
model.load();
Group_componentsView view = new Group_componentsView();
/*----#start-code(index)----*/
/*----#end-code----*/
view.setModel(model);
return this.renderView(view);
}
public Response actionPrimero1() throws IOException, IllegalArgumentException, IllegalAccessException{
Group_components model = new Group_components();
model.load();
/*----#gen-example
EXAMPLES COPY/PASTE:
INFO: Core.query(null,... change 'null' to your db connection name, added in Application Builder.
this.addQueryString("p_id","12"); //to send a query string in the URL
return this.forward("catalogo_igrp","Home","index", model, this.queryString()); //if submit, loads the values ----#gen-example */
/*----#start-code(primero1)----*/
/*----#end-code----*/
return this.redirect("catalogo_igrp","Home","index", this.queryString());
}
public Response actionSegundo() throws IOException, IllegalArgumentException, IllegalAccessException{
Group_components model = new Group_components();
model.load();
/*----#gen-example
EXAMPLES COPY/PASTE:
INFO: Core.query(null,... change 'null' to your db connection name, added in Application Builder.
this.addQueryString("p_id","12"); //to send a query string in the URL
return this.forward("catalogo_igrp","Group_components","index", model, this.queryString()); //if submit, loads the values ----#gen-example */
/*----#start-code(segundo)----*/
/*----#end-code----*/
return this.redirect("catalogo_igrp","Group_components","index", this.queryString());
}
public Response actionTerceiro() throws IOException, IllegalArgumentException, IllegalAccessException{
Group_components model = new Group_components();
model.load();
/*----#gen-example
EXAMPLES COPY/PASTE:
INFO: Core.query(null,... change 'null' to your db connection name, added in Application Builder.
this.addQueryString("p_id","12"); //to send a query string in the URL
return this.forward("catalogo_igrp","Group_components","index", model, this.queryString()); //if submit, loads the values ----#gen-example */
/*----#start-code(terceiro)----*/
/*----#end-code----*/
return this.redirect("catalogo_igrp","Group_components","index", this.queryString());
}
public Response actionQuarto_boton() throws IOException, IllegalArgumentException, IllegalAccessException{
Group_components model = new Group_components();
model.load();
/*----#gen-example
EXAMPLES COPY/PASTE:
INFO: Core.query(null,... change 'null' to your db connection name, added in Application Builder.
this.addQueryString("p_id","12"); //to send a query string in the URL
return this.forward("catalogo_igrp","Group_components","index", model, this.queryString()); //if submit, loads the values ----#gen-example */
/*----#start-code(quarto_boton)----*/
/*----#end-code----*/
return this.redirect("catalogo_igrp","Group_components","index", this.queryString());
}
public Response actionPrimeiro() throws IOException, IllegalArgumentException, IllegalAccessException{
Group_components model = new Group_components();
model.load();
/*----#gen-example
EXAMPLES COPY/PASTE:
INFO: Core.query(null,... change 'null' to your db connection name, added in Application Builder.
this.addQueryString("p_id","12"); //to send a query string in the URL
return this.forward("catalogo_igrp","Components_that_need_fields","index", model, this.queryString()); //if submit, loads the values ----#gen-example */
/*----#start-code(primeiro)----*/
/*----#end-code----*/
return this.redirect("catalogo_igrp","Components_that_need_fields","index", this.queryString());
}
public Response actionSegundovertical() throws IOException, IllegalArgumentException, IllegalAccessException{
Group_components model = new Group_components();
model.load();
/*----#gen-example
EXAMPLES COPY/PASTE:
INFO: Core.query(null,... change 'null' to your db connection name, added in Application Builder.
this.addQueryString("p_id","12"); //to send a query string in the URL
return this.forward("catalogo_igrp","Group_components","index", model, this.queryString()); //if submit, loads the values ----#gen-example */
/*----#start-code(segundovertical)----*/
/*----#end-code----*/
return this.redirect("catalogo_igrp","Group_components","index", this.queryString());
}
public Response actionVerticalmenu_1_button_1() throws IOException, IllegalArgumentException, IllegalAccessException{
Group_components model = new Group_components();
model.load();
/*----#gen-example
EXAMPLES COPY/PASTE:
INFO: Core.query(null,... change 'null' to your db connection name, added in Application Builder.
this.addQueryString("p_id","12"); //to send a query string in the URL
return this.forward("catalogo_igrp","Group_components","index", model, this.queryString()); //if submit, loads the values ----#gen-example */
/*----#start-code(verticalmenu_1_button_1)----*/
/*----#end-code----*/
return this.redirect("catalogo_igrp","Group_components","index", this.queryString());
}
public Response actionToolbar_button_1() throws IOException, IllegalArgumentException, IllegalAccessException{
Group_components model = new Group_components();
model.load();
/*----#gen-example
EXAMPLES COPY/PASTE:
INFO: Core.query(null,... change 'null' to your db connection name, added in Application Builder.
this.addQueryString("p_id","12"); //to send a query string in the URL
return this.forward("catalogo_igrp","Group_components","index", model, this.queryString()); //if submit, loads the values ----#gen-example */
/*----#start-code(toolbar_button_1)----*/
/*----#end-code----*/
return this.redirect("catalogo_igrp","Group_components","index", this.queryString());
}
public Response actionTools_button_2() throws IOException, IllegalArgumentException, IllegalAccessException{
Group_components model = new Group_components();
model.load();
/*----#gen-example
EXAMPLES COPY/PASTE:
INFO: Core.query(null,... change 'null' to your db connection name, added in Application Builder.
this.addQueryString("p_id","12"); //to send a query string in the URL
return this.forward("catalogo_igrp","Group_components","index", model, this.queryString()); //if submit, loads the values ----#gen-example */
/*----#start-code(tools_button_2)----*/
/*----#end-code----*/
return this.redirect("catalogo_igrp","Group_components","index", this.queryString());
}
/*----#start-code(custom_actions)----*/
/*----#end-code----*/
}
|
<reponame>ciaranm/glasgow-constraint-solver
/* vim: set sw=4 sts=4 et foldmethod=syntax : */
#include <gcs/constraints/all_different.hh>
#include <gcs/constraints/comparison.hh>
#include <gcs/constraints/linear_equality.hh>
#include <gcs/problem.hh>
#include <gcs/solve.hh>
#include <cstdlib>
#include <fstream>
#include <iostream>
#include <vector>
#include <boost/program_options.hpp>
using namespace gcs;
using std::cerr;
using std::cout;
using std::endl;
using std::ifstream;
using std::pair;
using std::stoi;
using std::vector;
namespace po = boost::program_options;
auto main(int argc, char * argv[]) -> int
{
po::options_description display_options{ "Program options" };
display_options.add_options()
("help", "Display help information")
("prove", "Create a proof")
("all-different", "Use AllDifferent rather than inequalities");
po::options_description all_options{ "All options" };
all_options.add_options()
("size", po::value<int>()->default_value(5), "Size of the problem to solve")
;
all_options.add(display_options);
po::positional_options_description positional_options;
positional_options
.add("size", -1);
po::variables_map options_vars;
try {
po::store(po::command_line_parser(argc, argv)
.options(all_options)
.positional(positional_options)
.run(), options_vars);
po::notify(options_vars);
}
catch (const po::error & e) {
cerr << "Error: " << e.what() << endl;
cerr << "Try " << argv[0] << " --help" << endl;
return EXIT_FAILURE;
}
if (options_vars.count("help")) {
cout << "Usage: " << argv[0] << " [options] [size]" << endl;
cout << endl;
cout << display_options << endl;
return EXIT_SUCCESS;
}
cout << "Replicating the MiniCP Magic Square benchmark." << endl;
cout << "See <NAME>, <NAME>, <NAME>:" << endl;
cout << "\"MiniCP: a lightweight solver for constraint programming.\"" << endl;
cout << "Math. Program. Comput. 13(1): 133-184 (2021)." << endl;
cout << "This should take 6042079 recursions with default options." << endl;
cout << endl;
int size = options_vars["size"].as<int>();
Problem p = options_vars.count("prove") ? Problem{ Proof{ "magic_square.opb", "magic_square.veripb" } } : Problem{ };
Integer m{ size * (size * size + 1) / 2 };
vector<vector<IntegerVariableID> > grid;
vector<IntegerVariableID> grid_flat;
for (int x = 0 ; x < size ; ++x) {
grid.emplace_back();
for (int y = 0 ; y < size ; ++y) {
auto var = p.create_integer_variable(1_i, Integer{ size * size });
grid[x].push_back(var);
grid_flat.push_back(var);
}
}
// As far as I can tell, the statistics reported in the paper only make
// sense for non-GAC all-different.
if (options_vars.count("all-different")) {
p.post(AllDifferent{ grid_flat });
}
else {
for (unsigned x = 0 ; x < grid_flat.size() ; ++x)
for (unsigned y = x + 1 ; y < grid_flat.size() ; ++y)
p.post(NotEquals{ grid_flat[x], grid_flat[y] });
}
for (int x = 0 ; x < size ; ++x) {
Linear coeff_vars;
for (int y = 0 ; y < size ; ++y)
coeff_vars.emplace_back(1_i, grid[x][y]);
p.post(LinearEquality{ move(coeff_vars), m });
}
for (int y = 0 ; y < size ; ++y) {
Linear coeff_vars;
for (int x = 0 ; x < size ; ++x)
coeff_vars.emplace_back(1_i, grid[x][y]);
p.post(LinearEquality{ move(coeff_vars), m });
}
Linear coeff_vars1, coeff_vars2;
for (int xy = 0 ; xy < size ; ++xy) {
coeff_vars1.emplace_back(1_i, grid[xy][xy]);
coeff_vars2.emplace_back(1_i, grid[size - xy - 1][xy]);
}
p.post(LinearEquality{ move(coeff_vars1), m });
p.post(LinearEquality{ move(coeff_vars2), m });
p.post(LessThan{ grid[0][size - 1], grid[size - 1][0] });
p.post(LessThan{ grid[0][0], grid[size - 1][size - 1] });
p.post(LessThan{ grid[0][0], grid[size - 1][0] });
p.branch_on(grid_flat);
unsigned long long n_solutions = 0;
auto stats = solve_with(p, SolveCallbacks{
.solution = [&] (const State &) -> bool {
return ++n_solutions < 10000;
},
.guess = [&] (const State & state, IntegerVariableID var) -> vector<Literal> {
return vector<Literal>{ var == state.lower_bound(var), var != state.lower_bound(var) };
}
} );
cout << stats;
return EXIT_SUCCESS;
}
|
'use strict';
const _ = require('lodash');
const config = require('../../../config/config');
const {Payment} = require('../../../common/classes/payment.class');
const Factory = require('../../../common/classes/factory');
module.exports = async (ctx, next) => {
try {
const user = Factory.User(ctx, _.get(ctx, config.userStatePath));
const payment = new Payment(ctx);
await payment.RemoveCard(user);
ctx.status = 201;
} catch (err) {
return ctx.throw(err.status || 500, err.message)
}
next();
};
|
<filename>ZQUIKit/ZQHoverViewController/ZQHoverViewController.h
//
// ZQHoverViewController.h
// ZQFoundation
//
// Created by 张泉(Macro) on 2019/10/30.
//
#import "BaseViewController.h"
#import "ZQHoverScrollView.h"
NS_ASSUME_NONNULL_BEGIN
@interface ZQHoverViewController : BaseViewController
@property (nonatomic, assign) BOOL isCanScroll;
@property (nonatomic, strong) ZQHoverScrollView * hoverScrollView;
@end
NS_ASSUME_NONNULL_END
|
package clientAPI.impl;
import java.nio.charset.StandardCharsets;
import javax.smartcardio.Card;
import javax.smartcardio.CardException;
import javax.smartcardio.ResponseAPDU;
import clientAPI.PersonalData;
import clientAPI.impl.OncardAPI.PersonalDataOncard;
/**
* Implementierung von {@code clientAPI.PersonalData}
*
*/
public class PersonalDataConnector extends GenericConnector implements PersonalData {
public PersonalDataConnector(Card card) {
super(PersonalDataOncard.AID, card);
}
@Override
protected void checkForError(ResponseAPDU response) throws CardException {
if (response.getSW() != 0x9000)
throw new CardException("Error: " + Integer.toHexString(response.getSW() & 0xffff));
}
@Override
public String getFirstName() throws CardException {
ResponseAPDU response = genericCommand(PersonalDataOncard.GET_FNAME, null);
return new String(response.getData(), StandardCharsets.UTF_8);
}
@Override
public String getSurname() throws CardException {
ResponseAPDU response = genericCommand(PersonalDataOncard.GET_SURNAME, null);
return new String(response.getData(), StandardCharsets.UTF_8);
}
@Override
public String getBirthday() throws CardException {
ResponseAPDU response = genericCommand(PersonalDataOncard.GET_BDAY, null);
return new String(response.getData(), StandardCharsets.UTF_8);
}
@Override
public String getLocation() throws CardException {
ResponseAPDU response = genericCommand(PersonalDataOncard.GET_LOCATION, null);
return new String(response.getData(), StandardCharsets.UTF_8);
}
@Override
public String getStreet() throws CardException {
ResponseAPDU response = genericCommand(PersonalDataOncard.GET_STREET, null);
return new String(response.getData(), StandardCharsets.UTF_8);
}
@Override
public String getPhoneNumber() throws CardException {
ResponseAPDU response = genericCommand(PersonalDataOncard.GET_PHONENR, null);
return new String(response.getData(), StandardCharsets.UTF_8);
}
@Override
public byte[] getPhoto() throws CardException {
ResponseAPDU response = genericCommand(PersonalDataOncard.GET_PHOTO, null);
return response.getData();
}
@Override
public void setFirstName(String newFirstName) throws CardException {
genericCommand(PersonalDataOncard.SET_FNAME, newFirstName.getBytes());
}
@Override
public void setSurname(String newSurname) throws CardException {
genericCommand(PersonalDataOncard.SET_SURNAME, newSurname.getBytes());
}
@Override
public void setBirthday(String newBirthday) throws CardException {
genericCommand(PersonalDataOncard.SET_BDAY, newBirthday.getBytes());
}
@Override
public void setLocation(String newLocation) throws CardException {
genericCommand(PersonalDataOncard.SET_LOCATION, newLocation.getBytes());
}
@Override
public void setStreet(String newStreet) throws CardException {
genericCommand(PersonalDataOncard.SET_STREET, newStreet.getBytes());
}
@Override
public void setPhoneNumber(String newPhoneNumber) throws CardException {
genericCommand(PersonalDataOncard.SET_PHONENR, newPhoneNumber.getBytes());
}
@Override
public void setPhoto(byte[] newPhoto) throws CardException {
genericCommand(PersonalDataOncard.SET_PHOTO, newPhoto);
}
}
|
sudo apt update --fix-missing && sudo apt upgrade -y
# force the package manager to find any missing dependencies or broken packages and install them
sudo apt-get install -f
export CONDA_ENV=snowflakes
sudo apt-get update
sudo apt-get -y install git-all
sudo apt-get -y install build-essential
sudo apt-get -y install make
sudo apt-get -y install libtool m4 automake
sudo apt-get -y install autoconf
sudo apt-get -y install tmux
sudo apt-get -y install texlive-*
export CONDA_BIN=Anaconda3-2021.05-Linux-x86_64.sh
wget https://repo.anaconda.com/archive/$CONDA_BIN
bash $CONDA_BIN
echo "ready"
#LOGOUT AND BACK IN
conda upgrade -y setuptools
conda remove -y --name $CONDA_ENV --all
conda create -y -n $CONDA_ENV python=3.8
conda activate $CONDA_ENV
conda install -y pip Flake8 black sphinx IPython Cython
conda install -y pandas pandas-profiling pandasql
conda install -y numpy scipy scikit-learn
conda install -y matplotlib seaborn
conda install -y pyviz
pip install modin
pip install rich
pip install numba
pip install mlflow kedro-mlflow kedro
pip install tensorflow-gpu keras
pip install tensorflow-probability tensorflow_decision_forests
pip install lightgbm catboost category_encoders
pip install --ignore-installed great-expectations
pip install 'ray[default]' xgboost_ray
#wurlitzer boto3 ipywidgets
#conda install -y -c conda-forge google-cloud-storage
#The modin.pandas DataFrame is an extremely light-weight parallel DataFrame. Modin transparently distributes
#the data and computation so that all you need to do is continue using the pandas API as you were before installing Modin.
#conda install -y -c quantopian pandas-datareader #The Pandas datareader is a sub package that allows one to create a dataframe from various internet datasources, currently including:Yahoo! FinanceGoogle FinanceSt.Louis FED (FRED)Kenneth French’s data libraryWorld BankGoogle Analytics
#datashader altair
#conda install -y scikit-learn-intelex # Intel Data Science Library
conda install -y dill #dill extends python’s pickle module for serializing and de-serializing python objects to the majority of the built-in python types. Serialization is the process of converting an object to a byte stream, and the inverse of which is converting a byte stream back to a python object hierarchy.
#conda install -n $CONDA_ENV -c rapidsai -c nvidia -c conda-forge blazingsql=0.19 cudf=0.19 python=3.8 cudatoolkit=10.1
pip install umap-learn
pip install Optuna
pip install MIDASpy # Multiple imputation with autoencoders
pip install missingno # simple vis for missingness
pip install tsfresh prophet orbit-ml
#pip install --upgrade jax jaxlib==0.1.67+cuda111 -f https://storage.googleapis.com/jax-releases/jax_releases.html
# L1 and Convex Optimization
pip install osqp
#https://pymanopt.github.io/
#Riemannian Optimisation with Pymanopt for Inference in MoG models
#https://pymanopt.github.io/MoG.html
pip install -y --user pymanopt
conda install -y -c conda-forge autograd
#PyEMD is a Python wrapper for Ofir Pele and Michael Werman's implementation of the Earth Mover's Distance that allows it to be used with NumPy
conda install -y pyemd
conda install -y -c conda-forge pot
pip install dit
conda install -y pillow #Imaging - from PIL import Image
conda install -y pystan
# NLP
conda install -y gensim
conda install -y nltk
conda install -y spacy
python -m spacy download en
pip3 install torch
pip install pyLDAvis
pip install transformers #State-of-the-art Natural Language Processing for TensorFlow 2.0 and PyTorch
#spark
# cd ~/opt/
# wget https://www.apache.org/dyn/closer.lua/spark/spark-X.Y.Z/spark-X.Y.Z-bin-hadoop3.2.tgz
# SPARK_BIN=spark-X.Y.Z-bin-hadoop3.2.tgz
# tar -xf $SPARK_BIN
# cd $SPARK_BIN
# cd sbin
# ./start-master.sh
# MASTER_NODE=hostname
# ./start-worker.sh spark://$MASTER_NODE:7077 -c 2
conda install -y pyspark
#time-freq
git clone https://github.com/scikit-signal/pytftb
cd pytftb
pip install -r requirements.txt
python setup.py install
cd ~
git clone https://github.com/chokkan/liblbfgs.git
cd liblbfgs/
./autogen.sh
./configure
make
sudo make install
cd ~
git clone https://rtaylor@bitbucket.org/rtaylor/pylbfgs.git
cd pytftb/
python setup.py install
conda activate $CONDA_ENV
echo '
conda activate `$CONDA_ENV`
' > ~/.bashrc
#Jupyter
cd $HOME
mkdir certs
cd certs
conda install -y jupyter nbconvert jupyterthemes ipykernel
pip install jupyterlab
jupyter labextension install @jupyter-widgets/jupyterlab-manager
pip install jupyter_contrib_nbextensions && jupyter contrib nbextension install --user
conda install -y -c conda-forge jupyterthemes
conda install -y -c conda-forge jupyter_contrib_nbextensions
conda install -y -c conda-forge jupyter_nbextensions_configurator
pip3 install jupyter-tabnine
# Note that Kite is in transition 10-2021
bash -c "$(wget -q -O - https://linux.kite.com/dls/linux/current)"
pip install qgrid
jupyter nbextension enable --py widgetsnbextension
jupyter nbextension enable --py --sys-prefix qgrid
jupyter nbextension install --py jupyter_tabnine
jupyter nbextension enable --py jupyter_tabnine
jupyter serverextension enable --py jupyter_tabnine
openssl req -x509 -nodes -days 365 -newkey rsa:1024 -subj "/C=US/ST=IL/L=Chicago" -keyout mycert.pem -out mycert.pem
jupyter notebook --generate-config
JUPYTER_CONFIG='/home/bruce/.jupyter/jupyter_notebook_config.py'
echo "c = get_config()">>$JUPYTER_CONFIG
echo "# Notebook config this is where you saved your pem cert">>$JUPYTER_CONFIG
echo "c.NotebookApp.certfile = u'/home/bruce/certs/mycert.pem'">>$JUPYTER_CONFIG
echo "# on all IP addresses of your instance">>$JUPYTER_CONFIG
echo "c.NotebookApp.ip = '*'">>$JUPYTER_CONFIG
echo "# Don't open browser by default">>$JUPYTER_CONFIG
echo "c.NotebookApp.open_browser = False">>$JUPYTER_CONFIG
echo "# Fix port to 8080">>$JUPYTER_CONFIG
echo "c.NotebookApp.port = 8080">>$JUPYTER_CONFIG
echo "c.NotebookApp.allow_remote_access = True">>$JUPYTER_CONFIG
cd $HOME
tmux
jupyter notebook &
|
const arraySum = arr =>
arr.reduce((total, currElement) => total + currElement, 0);
// example
arraySum([1,2,3,4,5]); // returns 15
|
import { useParams } from "react-router-dom";
import { useEffect, useState } from "react";
import { RecipeModel } from "../../common/models/recipe.form";
import { LayoutPage } from "../../common/layout/layout-page";
import Box from "@mui/material/Box";
import Grid from "@mui/material/Grid";
import Typography from "@mui/material/Typography";
import Divider from "@mui/material/Divider";
import { recipeEndpointsService } from "../../services/endpoints/recipe.enpoints.service";
import IconButton from "@mui/material/IconButton";
import { GoogleIconsInheritance, Icons } from "../../common/app/google.icon";
import { RoutingServices } from "../../services/routing.service";
import { useNavigate } from "react-router-dom";
import { APP_ROUTES } from "../../utils/routing/app-routes";
import { RecipeStepsView } from "../../components/recipe.steps/recipe.steps.views";
import { IngredientView } from "../../components/recipe.ingredients/ingredients.view";
export default function RecipeView(){
const [initialLoading, setInitialLoading] = useState(true);
const [recipe, setRecipeState] = useState({} as RecipeModel);
const { recipeId } = useParams();
const routingService = RoutingServices;
const navigate = useNavigate();
useEffect(()=>{
recipeEndpointsService.getRecipeByIdAsync(recipeId??'',true)
.then(response => {
setRecipeState(response??recipe);
setInitialLoading(false);
});
},[])
const editRecipe = () =>{
let route = routingService.generateRoute(APP_ROUTES.RECIPES_UPDATE, {recipeId});
navigate(route);
}
return <LayoutPage params={{recipeId}} loadingPage={initialLoading} >
<Box sx={{ width: '100%', bgcolor: 'background.paper' }}>
<Box sx={{ my: 3, mx: 2 }}>
<Grid container alignItems="center">
<Grid item xs>
<Typography gutterBottom variant="h4" component="div">
{recipe.name}
</Typography>
</Grid>
<Grid item>
<IconButton color="primary" sx={{ p: '10px' }} aria-label={Icons.check} onClick={editRecipe} >
<GoogleIconsInheritance iconName={Icons.edit} />
</IconButton>
</Grid>
</Grid>
<Typography color="text.secondary" variant="body2">
{recipe.description}
</Typography>
</Box>
<Divider variant="middle" />
<IngredientView recipeProducts={recipe.recipeProducts} />
<Divider variant="middle" />
<RecipeStepsView steps={recipe.steps} />
</Box>
</LayoutPage>
}
|
import { Cartesian3, Cartographic, Math, ImageryLayer } from 'cesium'
import CoordinateTransform from '@/libs/utils/CoordinateTransform'
export enum CoordinateType {
Wgs84,
Gcj02,
Bd09,
}
class ImageryLayerCoordinateTransform {
protected layer: ImageryLayer
private projectionTransform: (x: number, y: number) => [number, number]
private unprojectionTransform: (x: number, y: number) => [number, number]
private static OLD_PROJECT_PROPERTY_NAME = 'old_project'
private static OLD_UNPROJECT_PROPERTY_NAME = 'old_unproject'
private _coordinateType: CoordinateType
public get coordinateType(): CoordinateType {
return this._coordinateType
}
private _correctOffset: boolean = false
public get correctOffset(): boolean {
return this._correctOffset
}
public set correctOffset(val: boolean) {
if (this._correctOffset === val) {
return
}
this._correctOffset = val
const { layer } = this
const provider = layer.imageryProvider
if (val) {
const webMercatorTilingScheme = provider.tilingScheme
const projection = (webMercatorTilingScheme as any).projection
const { projectionTransform, unprojectionTransform } = this
;(projection as any)[
ImageryLayerCoordinateTransform.OLD_PROJECT_PROPERTY_NAME
] = projection.project
projection.project = function (cartographic: Cartographic) {
const point = projectionTransform(
Math.toDegrees(cartographic.longitude),
Math.toDegrees(cartographic.latitude)
)
return (projection as any)[
ImageryLayerCoordinateTransform.OLD_PROJECT_PROPERTY_NAME
](new Cartographic(Math.toRadians(point[0]), Math.toRadians(point[1])))
}
;(projection as any)[
ImageryLayerCoordinateTransform.OLD_UNPROJECT_PROPERTY_NAME
] = projection.unproject
projection.unproject = function (cartesian: Cartesian3) {
const cartographic = (projection as any)[
ImageryLayerCoordinateTransform.OLD_UNPROJECT_PROPERTY_NAME
](cartesian)
const point = unprojectionTransform(
Math.toDegrees(cartographic.longitude),
Math.toDegrees(cartographic.latitude)
)
return new Cartographic(
Math.toRadians(point[0]),
Math.toRadians(point[1])
)
}
} else {
const webMercatorTilingScheme = provider.tilingScheme
const projection = webMercatorTilingScheme.projection
const oldProject = (projection as any)[
ImageryLayerCoordinateTransform.OLD_PROJECT_PROPERTY_NAME
]
if (oldProject) {
projection.project = oldProject
}
const oldUnproject = (projection as any)[
ImageryLayerCoordinateTransform.OLD_UNPROJECT_PROPERTY_NAME
]
if (oldUnproject) {
projection.unproject = oldUnproject
}
}
}
constructor(
layer: ImageryLayer,
coordinateType: CoordinateType,
defaultCorrentOffset: boolean = false
) {
this.layer = layer
this._coordinateType = coordinateType
if (coordinateType === CoordinateType.Gcj02) {
this.projectionTransform = CoordinateTransform.wgs84togcj02
this.unprojectionTransform = CoordinateTransform.gcj02towgs84
} else if (coordinateType === CoordinateType.Bd09) {
this.projectionTransform = (x, y) => {
const [xx, yy] = CoordinateTransform.wgs84togcj02(x, y)
return CoordinateTransform.gcj02tobd09(xx, yy)
}
this.unprojectionTransform = (x, y) => {
const [xx, yy] = CoordinateTransform.bd09togcj02(x, y)
return CoordinateTransform.gcj02towgs84(xx, yy)
}
} else {
this.projectionTransform = (x, y) => [x, y]
this.unprojectionTransform = (x, y) => [x, y]
}
if (defaultCorrentOffset) {
this.correctOffset = defaultCorrentOffset
}
}
}
export default ImageryLayerCoordinateTransform
|
class GuessingGame:
def __init__(self, target: int):
self._target = target
self._high = float('inf')
self._low = float('-inf')
def guess(self, number: int) -> str:
if number == self._target:
return "BINGO"
elif number > self._target:
self._high = min(self._high, number)
return "HIGH"
else:
self._low = max(self._low, number)
return "LOW"
|
#!/bin/bash
DBNAME="idsrv";
PODNAME=$1
ENVFILE=$2
function run_psql_script() {
if [ "${PODNAME}" == "" ]; then
psql -d "${DBNAME}" -q -f "$1";
else
podman run -it --rm --pod "${PODNAME}" --env-file "${ENVFILE}" -v "$(pwd)":/tmp/context:ro --security-opt label=disable postgres:12.2 psql -h 127.0.0.1 -U postgres -d "${DBNAME}" -f "/tmp/context/${1}"
fi
}
echo 'creating table...';
run_psql_script "tables/idsrv.signing_key.sql";
echo 'creating / updating functions...';
run_psql_script "funcs/idsrv.delete_signing_key.sql";
run_psql_script "funcs/idsrv.get_signing_keys.sql";
run_psql_script "funcs/idsrv.save_signing_key.sql";
echo "...${DBNAME} updated.";
# for podman - run as:
# ./update_idsrv.sh <pod_name> <path_to_pgbackup_env_file>
|
#!/usr/bin/env bash
source ~/ENV/bin/activate
cd ~/MultiModesPreferenceEstimation
python tune_parameters.py --data-dir data/amazon/digital_music/ --save-path amazon/digital_music/tuning_general/mmp-part205.csv --parameters config/amazon/digital_music/mmp-part205.yml
|
#!/bin/bash
# ========== Experiment Seq. Idx. 3107 / 60.5.5.0 / N. 0 - _S=60.5.5.0 D1_N=38 a=1 b=-1 c=1 d=1 e=-1 f=-1 D3_N=6 g=1 h=1 i=-1 D4_N=4 j=4 D5_N=0 ==========
set -u
# Prints header
echo -e '\n\n========== Experiment Seq. Idx. 3107 / 60.5.5.0 / N. 0 - _S=60.5.5.0 D1_N=38 a=1 b=-1 c=1 d=1 e=-1 f=-1 D3_N=6 g=1 h=1 i=-1 D4_N=4 j=4 D5_N=0 ==========\n\n'
# Prepares all environment variables
JBHI_DIR="$HOME/jbhi-special-issue"
RESULTS_DIR="$JBHI_DIR/results"
if [[ "Yes" == "Yes" ]]; then
SVM_SUFFIX="svm"
PREDICTIONS_FORMAT="isbi"
else
SVM_SUFFIX="nosvm"
PREDICTIONS_FORMAT="titans"
fi
RESULTS_PREFIX="$RESULTS_DIR/deep.38.layer.6.test.4.index.3107.$SVM_SUFFIX"
RESULTS_PATH="$RESULTS_PREFIX.results.txt"
# ...variables expected by jbhi-checks.include.sh and jbhi-footer.include.sh
SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
LIST_OF_INPUTS="$RESULTS_PREFIX.finish.txt"
# ...this experiment is a little different --- only one master procedure should run, so there's only a master lock file
METRICS_TEMP_PATH="$RESULTS_DIR/this_results.anova.txt"
METRICS_PATH="$RESULTS_DIR/all_results.anova.txt"
START_PATH="$METRICS_PATH.start.txt"
FINISH_PATH="-"
LOCK_PATH="$METRICS_PATH.running.lock"
LAST_OUTPUT="$METRICS_PATH"
mkdir -p "$RESULTS_DIR"
#
# Assumes that the following environment variables where initialized
# SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
# LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODELS_DIR/finish.txt:"
# START_PATH="$OUTPUT_DIR/start.txt"
# FINISH_PATH="$OUTPUT_DIR/finish.txt"
# LOCK_PATH="$OUTPUT_DIR/running.lock"
# LAST_OUTPUT="$MODEL_DIR/[[[:D1_MAX_NUMBER_OF_STEPS:]]].meta"
EXPERIMENT_STATUS=1
STARTED_BEFORE=No
# Checks if code is stable, otherwise alerts scheduler
pushd "$SOURCES_GIT_DIR" >/dev/null
GIT_STATUS=$(git status --porcelain)
GIT_COMMIT=$(git log | head -n 1)
popd >/dev/null
if [ "$GIT_STATUS" != "" ]; then
echo 'FATAL: there are uncommitted changes in your git sources file' >&2
echo ' for reproducibility, experiments only run on committed changes' >&2
echo >&2
echo ' Git status returned:'>&2
echo "$GIT_STATUS" >&2
exit 162
fi
# The experiment is already finished - exits with special code so scheduler won't retry
if [[ "$FINISH_PATH" != "-" ]]; then
if [[ -e "$FINISH_PATH" ]]; then
echo 'INFO: this experiment has already finished' >&2
exit 163
fi
fi
# The experiment is not ready to run due to dependencies - alerts scheduler
if [[ "$LIST_OF_INPUTS" != "" ]]; then
IFS=':' tokens_of_input=( $LIST_OF_INPUTS )
input_missing=No
for input_to_check in ${tokens_of_input[*]}; do
if [[ ! -e "$input_to_check" ]]; then
echo "ERROR: input $input_to_check missing for this experiment" >&2
input_missing=Yes
fi
done
if [[ "$input_missing" != No ]]; then
exit 164
fi
fi
# Sets trap to return error code if script is interrupted before successful finish
LOCK_SUCCESS=No
FINISH_STATUS=161
function finish_trap {
if [[ "$LOCK_SUCCESS" == "Yes" ]]; then
rmdir "$LOCK_PATH" &> /dev/null
fi
if [[ "$FINISH_STATUS" == "165" ]]; then
echo 'WARNING: experiment discontinued because other process holds its lock' >&2
else
if [[ "$FINISH_STATUS" == "160" ]]; then
echo 'INFO: experiment finished successfully' >&2
else
[[ "$FINISH_PATH" != "-" ]] && rm -f "$FINISH_PATH"
echo 'ERROR: an error occurred while executing the experiment' >&2
fi
fi
exit "$FINISH_STATUS"
}
trap finish_trap EXIT
# While running, locks experiment so other parallel threads won't attempt to run it too
if mkdir "$LOCK_PATH" --mode=u=rwx,g=rx,o=rx &>/dev/null; then
LOCK_SUCCESS=Yes
else
echo 'WARNING: this experiment is already being executed elsewhere' >&2
FINISH_STATUS="165"
exit
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$START_PATH" != "-" ]]; then
if [[ -e "$START_PATH" ]]; then
echo 'WARNING: this experiment is being restarted' >&2
STARTED_BEFORE=Yes
fi
#...marks start
date -u >> "$START_PATH"
echo GIT "$GIT_COMMIT" >> "$START_PATH"
fi
if [[ "$STARTED_BEFORE" == "Yes" ]]; then
# If the experiment was started before, do any cleanup necessary
echo -n
else
echo "D1_N;D3_N;D4_N;a;b;c;d;e;f;g;h;i;j;m_ap;m_auc;m_tn;m_fp;m_fn;m_tp;m_tpr;m_fpr;k_ap;k_auc;k_tn;k_fp;k_fn;k_tp;k_tpr;k_fpr;isbi_auc" > "$METRICS_PATH"
fi
python \
"$SOURCES_GIT_DIR/etc/compute_metrics.py" \
--metadata_file "$SOURCES_GIT_DIR/data/all-metadata.csv" \
--predictions_format "$PREDICTIONS_FORMAT" \
--metrics_file "$METRICS_TEMP_PATH" \
--predictions_file "$RESULTS_PATH"
EXPERIMENT_STATUS="$?"
echo -n "38;6;4;" >> "$METRICS_PATH"
echo -n "1;-1;1;1;-1;-1;1;1;-1;4;" >> "$METRICS_PATH"
tail "$METRICS_TEMP_PATH" -n 1 >> "$METRICS_PATH"
#
#...starts training
if [[ "$EXPERIMENT_STATUS" == "0" ]]; then
if [[ "$LAST_OUTPUT" == "" || -e "$LAST_OUTPUT" ]]; then
if [[ "$FINISH_PATH" != "-" ]]; then
date -u >> "$FINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$FINISH_PATH"
fi
FINISH_STATUS="160"
fi
fi
|
# Load after the other completions to understand what needs to be completed
cite about-plugin
about-plugin 'Automatic completion of aliases'
# References:
# http://superuser.com/a/437508/119764
# http://stackoverflow.com/a/1793178/1228454
# This needs to be a plugin so it gets executed after the completions and the aliases have been defined.
# Bash-it loads its components in the order
# 1) Aliases
# 2) Completions
# 3) Plugins
# 4) Custom scripts
# Automatically add completion for all aliases to commands having completion functions
function alias_completion {
local namespace="alias_completion"
# parse function based completion definitions, where capture group 2 => function and 3 => trigger
local compl_regex='complete( +[^ ]+)* -F ([^ ]+) ("[^"]+"|[^ ]+)'
# parse alias definitions, where capture group 1 => trigger, 2 => command, 3 => command arguments
local alias_regex="alias( -- | )([^=]+)='(\"[^\"]+\"|[^ ]+)(( +[^ ]+)*)'"
# create array of function completion triggers, keeping multi-word triggers together
eval "local completions=($(complete -p | sed -Ene "/$compl_regex/s//'\3'/p"))"
(( ${#completions[@]} == 0 )) && return 0
# create temporary file for wrapper functions and completions
local tmp_file; tmp_file="$(mktemp -t "${namespace}-${RANDOM}XXXXXX")" || return 1
local completion_loader; completion_loader="$(complete -p -D 2>/dev/null | sed -Ene 's/.* -F ([^ ]*).*/\1/p')"
# read in "<alias> '<aliased command>' '<command args>'" lines from defined aliases
local line; while read line; do
eval "local alias_tokens; alias_tokens=($line)" 2>/dev/null || continue # some alias arg patterns cause an eval parse error
local alias_name="${alias_tokens[0]}" alias_cmd="${alias_tokens[1]}" alias_args="${alias_tokens[2]# }"
# skip aliases to pipes, boolean control structures and other command lists
# (leveraging that eval errs out if $alias_args contains unquoted shell metacharacters)
eval "local alias_arg_words; alias_arg_words=($alias_args)" 2>/dev/null || continue
# avoid expanding wildcards
read -a alias_arg_words <<< "$alias_args"
# skip alias if there is no completion function triggered by the aliased command
if [[ ! " ${completions[*]} " =~ " $alias_cmd " ]]; then
if [[ -n "$completion_loader" ]]; then
# force loading of completions for the aliased command
eval "$completion_loader $alias_cmd"
# 124 means completion loader was successful
[[ $? -eq 124 ]] || continue
completions+=($alias_cmd)
else
continue
fi
fi
local new_completion="$(complete -p "$alias_cmd" 2>/dev/null)"
# create a wrapper inserting the alias arguments if any
if [[ -n $alias_args ]]; then
local compl_func="${new_completion/#* -F /}"; compl_func="${compl_func%% *}"
# avoid recursive call loops by ignoring our own functions
if [[ "${compl_func#_$namespace::}" == $compl_func ]]; then
local compl_wrapper="_${namespace}::${alias_name}"
echo "function $compl_wrapper {
(( COMP_CWORD += ${#alias_arg_words[@]} ))
COMP_WORDS=($alias_cmd $alias_args \${COMP_WORDS[@]:1})
(( COMP_POINT -= \${#COMP_LINE} ))
COMP_LINE=\${COMP_LINE/$alias_name/$alias_cmd $alias_args}
(( COMP_POINT += \${#COMP_LINE} ))
$compl_func
}" >> "$tmp_file"
new_completion="${new_completion/ -F $compl_func / -F $compl_wrapper }"
fi
fi
# replace completion trigger by alias
if [[ -n $new_completion ]]; then
new_completion="${new_completion% *} $alias_name"
echo "$new_completion" >> "$tmp_file"
fi
done < <(alias -p | sed -Ene "s/$alias_regex/\2 '\3' '\4'/p")
source "$tmp_file" && rm -f "$tmp_file"
}; alias_completion
|
<gh_stars>10-100
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dbis.piglet.op
/**
* An exception indicating that the name of the pipe isn't a valid identifier (e.g. contains still a leading '$').
*
* @param msg a message describing the exception.
*/
case class InvalidPipeNameException(private val msg: String) extends Exception("invalid pipe name: " + msg)
/**
* A pipe connects some Pig operator and associates a name to this channel.
*
* @param name the name of the pipe
* @param producer the operator producing the data
* @param consumer the list of operators reading this data
*/
class Pipe (var name: String, var producer: PigOperator = null, var consumer: List[PigOperator] = List()) extends Serializable {
override def toString = s"Pipe($name)"
def canEqual(a: Any) = a.isInstanceOf[Pipe]
override def equals(that: Any): Boolean =
that match {
case that: Pipe => that.canEqual(this) && this.name == that.name
case _ => false
}
override def hashCode = name.hashCode
def inputSchema = if (producer != null) producer.schema else None
def removeConsumer(op:PigOperator): Unit = this.consumer = this.consumer.filterNot(_ == op)
def addConsumer(op: PigOperator): Unit = this.consumer = this.consumer.filterNot(_ == op) :+ op
}
object Pipe {
def apply(n: String, producer: PigOperator = null, consumers: List[PigOperator] = List()): Pipe =
new Pipe(n, producer, consumers)
def unapply(p: Pipe): Option[(String, PigOperator, List[PigOperator])] = Some((p.name, p.producer, p.consumer))
def dummy = Pipe("dummy")
}
|
#!/bin/bash
source /usr/lib/hustler/bin/qubole-bash-lib.sh
source /usr/lib/qubole/bootstrap-functions/hive/hiveserver2.sh
##
# Installs Hive Glue Catalog Sync Agent
# param1 - Region for AWS Athena. Defaults to us-east-1
# Requires Hive 2.x
#
function install_glue_sync() {
aws_region=${1:-us-east-1}
is_master=$(nodeinfo is_master)
hive_version=$(nodeinfo hive_version)
if [[ ${is_master} == "1" && ${hive_version} == 2* ]]; then
glue_staging_dir=$(nodeinfo s3_default_location)
glue_staging_dir="${glue_staging_dir}/query_result"
/usr/lib/hadoop2/bin/hadoop dfs -get s3://paid-qubole/aws_glue_sync/HiveGlueCatalogSyncAgent-1.0-SNAPSHOT-jar-with-dependencies.jar /usr/lib/hive1.2/lib/
# Add glue sync configurations to hive-site.xml
# (Refer : https://github.com/awslabs/aws-glue-catalog-sync-agent-for-hive)
cp /usr/lib/hive1.2/conf/hive-site.xml /usr/lib/hive1.2/conf/hive-site.xml.bak
xmlstarlet ed --inplace --omit-decl -s '//configuration' -t elem -n "property" -v "" \
-s '//configuration/property[last()]' -t elem -n "name" -v "hive.metastore.event.listeners" \
-s '//configuration/property[last()]' -t elem -n "value" -v "com.amazonaws.services.glue.catalog.HiveGlueCatalogSyncAgent" /usr/lib/hive1.2/conf/hive-site.xml
xmlstarlet ed --inplace --omit-decl -s '//configuration' -t elem -n "property" -v "" \
-s '//configuration/property[last()]' -t elem -n "name" -v "glue.catalog.athena.jdbc.url" \
-s '//configuration/property[last()]' -t elem -n "value" -v "jdbc:awsathena://athena.${aws_region}.amazonaws.com:443" /usr/lib/hive1.2/conf/hive-site.xml
xmlstarlet ed --inplace --omit-decl -s '//configuration' -t elem -n "property" -v "" \
-s '//configuration/property[last()]' -t elem -n "name" -v "glue.catalog.athena.s3.staging.dir" \
-s '//configuration/property[last()]' -t elem -n "value" -v "${glue_staging_dir}" /usr/lib/hive1.2/conf/hive-site.xml
# Restart metastore
monit unmonitor metastore1_2
export OVERRIDE_HADOOP_JAVA_HOME=/usr/lib/jvm/java-1.8.0
/usr/lib/hive1.2/bin/thrift-metastore server stop && sleep 5 && /usr/lib/hive1.2/bin/thrift-metastore server start
monit monitor metastore1_2
if is_hs2_configured; then
/usr/lib/hive1.2/bin/hiveserver2-admin stop && sleep 5 && /bin/bash /usr/lib/hive1.2/usr-bin/startHS2.sh
fi
rm -f /tmp/jdbc.log
fi
}
|
<reponame>igorivaniuk/md-to-quill-delta
export { markdownToQuillDelta } from './parser'
|
#! /bin/sh
for i in $(ls O90); do
ly musicxml O90/$i/$i.ly -o $i.xml
done
|
rm ms.synctex.gz ms.log ms.out ms.aux ms.blg
|
<reponame>mohamedkhairy/dhis2-android-sdk
/*
* Copyright (c) 2004-2021, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.testapp.program;
import org.hisp.dhis.android.core.arch.repositories.scope.RepositoryScope;
import org.hisp.dhis.android.core.common.ValueTypeRenderingType;
import org.hisp.dhis.android.core.program.ProgramTrackedEntityAttribute;
import org.hisp.dhis.android.core.utils.integration.mock.BaseMockIntegrationTestFullDispatcher;
import org.hisp.dhis.android.core.utils.runner.D2JunitRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.List;
import static com.google.common.truth.Truth.assertThat;
@RunWith(D2JunitRunner.class)
public class ProgramTrackedEntityAttributeCollectionRepositoryMockIntegrationShould
extends BaseMockIntegrationTestFullDispatcher {
@Test
public void find_all() {
List<ProgramTrackedEntityAttribute> programTrackedEntityAttributes =
d2.programModule().programTrackedEntityAttributes()
.blockingGet();
assertThat(programTrackedEntityAttributes.size()).isEqualTo(2);
}
@Test
public void filter_by_mandatory() {
List<ProgramTrackedEntityAttribute> programTrackedEntityAttributes =
d2.programModule().programTrackedEntityAttributes()
.byMandatory()
.isFalse()
.blockingGet();
assertThat(programTrackedEntityAttributes.size()).isEqualTo(1);
}
@Test
public void filter_by_tracked_entity_attribute() {
List<ProgramTrackedEntityAttribute> programTrackedEntityAttributes =
d2.programModule().programTrackedEntityAttributes()
.byTrackedEntityAttribute()
.eq("cejWyOfXge6")
.blockingGet();
assertThat(programTrackedEntityAttributes.size()).isEqualTo(1);
}
@Test
public void filter_by_allow_future_date() {
List<ProgramTrackedEntityAttribute> programTrackedEntityAttributes =
d2.programModule().programTrackedEntityAttributes()
.byAllowFutureDate()
.isTrue()
.blockingGet();
assertThat(programTrackedEntityAttributes.size()).isEqualTo(1);
}
@Test
public void filter_by_display_in_list() {
List<ProgramTrackedEntityAttribute> programTrackedEntityAttributes =
d2.programModule().programTrackedEntityAttributes()
.byDisplayInList()
.isTrue()
.blockingGet();
assertThat(programTrackedEntityAttributes.size()).isEqualTo(1);
}
@Test
public void filter_by_program() {
List<ProgramTrackedEntityAttribute> programTrackedEntityAttributes =
d2.programModule().programTrackedEntityAttributes()
.byProgram()
.eq("lxAQ7Zs9VYR")
.blockingGet();
assertThat(programTrackedEntityAttributes.size()).isEqualTo(2);
}
@Test
public void filter_by_sort_order() {
List<ProgramTrackedEntityAttribute> programTrackedEntityAttributes =
d2.programModule().programTrackedEntityAttributes()
.bySortOrder()
.biggerThan(1)
.blockingGet();
assertThat(programTrackedEntityAttributes.size()).isEqualTo(1);
}
@Test
public void filter_by_searchable() {
List<ProgramTrackedEntityAttribute> programTrackedEntityAttributes =
d2.programModule().programTrackedEntityAttributes()
.bySearchable()
.isTrue()
.blockingGet();
assertThat(programTrackedEntityAttributes.size()).isEqualTo(1);
}
@Test
public void include_render_type_as_children() {
ProgramTrackedEntityAttribute programTrackedEntityAttribute =
d2.programModule().programTrackedEntityAttributes()
.byUid().eq("YhqgQ6Iy4c4")
.withRenderType()
.one().blockingGet();
assertThat(programTrackedEntityAttribute.renderType().mobile().type())
.isEqualTo(ValueTypeRenderingType.SHARED_HEADER_RADIOBUTTONS);
assertThat(programTrackedEntityAttribute.renderType().desktop().type())
.isEqualTo(ValueTypeRenderingType.VERTICAL_RADIOBUTTONS);
}
@Test
public void order_by_sort_order() {
List<ProgramTrackedEntityAttribute> programTrackedEntityAttributes =
d2.programModule().programTrackedEntityAttributes()
.orderBySortOrder(RepositoryScope.OrderByDirection.DESC)
.blockingGet();
assertThat(programTrackedEntityAttributes.get(0).uid()).isEqualTo("QhqgQ6Iy4c4");
assertThat(programTrackedEntityAttributes.get(0).sortOrder()).isEqualTo(2);
assertThat(programTrackedEntityAttributes.get(1).uid()).isEqualTo("YhqgQ6Iy4c4");
assertThat(programTrackedEntityAttributes.get(1).sortOrder()).isEqualTo(1);
}
}
|
def is_palindrome(s):
s = s.lower().replace(" ", "") # Convert to lowercase and remove spaces
return s == s[::-1] # Check if the string is equal to its reverse
def main():
input_str = input("Enter a string: ")
if is_palindrome(input_str):
print("yes, it is a palindrome")
else:
print("no, it is not a palindrome")
if __name__ == "__main__":
main()
|
#include <iostream>
using namespace std;
// Function to find maximum element in given array
int findMax(int arr[], int n)
{
// Initialize maximum element
int max = arr[0];
// Iterate over array and compare maximum
// with all elements of given array
for (int i = 1; i < n; i++)
if (arr[i] > max)
max = arr[i];
return max;
}
// Main function
int main()
{
// array of integers
int arr[] = {3, 6, 4, 8, 1};
// size of array
int n = sizeof(arr) / sizeof(arr[0]);
// Function calling
cout << "Maximum element = " << findMax(arr, n);
return 0;
}
|
import Phaser from 'phaser'
export default class ShiftPosition extends Phaser.Scene
{
private group!: Phaser.GameObjects.Group
private x = 0
private y = 0
private move = 0
preload()
{
this.load.image('sky','/assets/skies/deepblue.png')
this.load.image('ball','/assets/demoscene/ball-tlb.png')
}
create()
{
this.add.image(0, 0, 'sky').setOrigin(0)
this.group = this.add.group()
this.group.createMultiple({ key: 'ball', frameQuantity: 128 })
this.input.on(Phaser.Input.Events.POINTER_MOVE, (pointer) => {
this.x = pointer.x
this.y = pointer.y
})
}
update(time: number, delta: number)
{
this.move += delta;
if (this.move > 6)
{
Phaser.Actions.ShiftPosition(this.group.getChildren(), this.x, this.y)
this.move = 0
}
}
}
|
<gh_stars>0
import { Injectable } from '@angular/core';
import { Actions, createEffect, ofType } from '@ngrx/effects';
import { GoalsSummaryActions as goalsSummaryActions } from '../actions';
import { catchError, map, switchMap } from 'rxjs/operators';
import { of } from 'rxjs';
import { GoalsSummaryService } from 'src/app/services/goals-summary.service';
@Injectable()
export class GoalsSummaryEffects {
loadGoalsSummaryByPatientId$: any = createEffect((): any => this.actions$.pipe(
ofType(goalsSummaryActions.LOAD_FOR_SUBJECT_GOALS_SUMMARY),
switchMap(action => {
return this.service
// @ts-ignore
.getGoalsSummaryByPatientId(action.subjectId, action.carePlanId)
.pipe(
map(goalsSummary => goalsSummaryActions.loadGoalsSummaryForSubjectSuccessAction({ data: goalsSummary })
),
catchError(error =>
of(goalsSummaryActions.loadGoalsSummaryForSubjectFailureAction({ error }))
)
);
})
));
constructor(
private actions$: Actions,
private service: GoalsSummaryService
) { }
}
|
import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import {NgbModule} from '@ng-bootstrap/ng-bootstrap';
import {MatMenuModule} from '@angular/material/menu';
import {MatToolbarModule} from '@angular/material/toolbar';
import {MatBadgeModule} from '@angular/material/badge';
import {MatButtonModule} from '@angular/material/button';
import {MatIconModule} from '@angular/material/icon';
import {MatRadioModule} from '@angular/material/radio';
import {MatSliderModule} from '@angular/material/slider';
import {MatInputModule} from '@angular/material/input';
import {MatFormFieldModule} from '@angular/material/form-field';
import {MatDialogModule} from '@angular/material/dialog';
import {MatCardModule} from '@angular/material/card';
import {MatRippleModule} from '@angular/material/core';
import {MatSelectModule} from '@angular/material/select';
import { AppComponent } from './app.component';
import { BlogComponent } from './blog/blog.component';
import { HeaderComponent } from './header/header.component';
import { FooterComponent } from './footer/footer.component';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { ProductComponent } from './product/product.component';
import { OrderComponent } from './order/order.component';
import { routing } from './app.routing';
import { SingleProductComponent } from './product/singleProduct.component';
import { ProductService } from './product/product.service';
import { LoginService } from './auth/login.service';
import { HttpClientModule, HTTP_INTERCEPTORS } from '@angular/common/http';
import { LoginComponent } from './login/login.component';
import { RegisterComponent } from './login/register.component';
import { BasicAuthInterceptor } from './auth/auth.interceptor';
import { ErrorInterceptor } from './auth/error.interceptor';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import { UserPageComponent } from './login/userPage.component';
import { UserEditComponent } from './login/userEdit.component';
import { OrderService } from './order/order.service';
import { CheckoutComponent } from './order/checkout.component';
import { ConfirmationComponent } from './order/confirmation.component';
import { SingleBlogComponent } from './blog/singleBlog.component'
import { MatTabsModule } from '@angular/material/tabs';
import { HomeComponent } from './home/home.component';
@NgModule({
declarations: [
AppComponent,
BlogComponent,
HeaderComponent,
FooterComponent,
ProductComponent,
SingleProductComponent,
SingleBlogComponent,
OrderComponent,
LoginComponent,
RegisterComponent,
UserPageComponent,
CheckoutComponent,
ConfirmationComponent,
HomeComponent,
UserEditComponent,
],
imports: [
NgbModule,
BrowserModule,
FormsModule,
HttpClientModule,
BrowserAnimationsModule,
MatMenuModule,
MatToolbarModule,
MatBadgeModule,
MatButtonModule,
MatIconModule,
MatRadioModule,
MatSliderModule,
routing,
MatTabsModule,
MatInputModule,
MatFormFieldModule,
MatDialogModule,
MatCardModule,
MatRippleModule,
MatSelectModule,
ReactiveFormsModule,
routing
],
providers: [ProductService, LoginService,OrderService,
{ provide: HTTP_INTERCEPTORS, useClass: BasicAuthInterceptor, multi: true },
{ provide: HTTP_INTERCEPTORS, useClass: ErrorInterceptor, multi: true }
],
bootstrap: [AppComponent, ProductComponent],
exports: [ProductComponent]
})
export class AppModule { }
|
aws dynamodb delete-table --table-name users
aws dynamodb delete-table --table-name politicians
|
#!/bin/sh
# enable location services
/bin/launchctl unload /System/Library/LaunchDaemons/com.apple.locationd.plist
uuid=$(/usr/sbin/system_profiler SPHardwareDataType | grep "Hardware UUID" | cut -c22-57)
/usr/bin/defaults write /var/db/locationd/Library/Preferences/ByHost/com.apple.locationd."$uuid" LocationServicesEnabled -int 1
/usr/bin/defaults write /var/db/locationd/Library/Preferences/ByHost/com.apple.locationd.notbackedup."$uuid" LocationServicesEnabled -int 1
/usr/sbin/chown -R _locationd:_locationd /var/db/locationd
/bin/launchctl load /System/Library/LaunchDaemons/com.apple.locationd.plist
sleep 5
# enable network time
/usr/sbin/systemsetup -setusingnetworktime off
sleep 3
/usr/sbin/systemsetup -setusingnetworktime on
# set the time server
/usr/sbin/systemsetup -setnetworktimeserver time.asia.apple.com
/usr/sbin/systemsetup -settimezone Australia/Sydney
#Set The Keyboard Layout - Function
PLBUDDY=/usr/libexec/PlistBuddy
NAME="Australian"
LAYOUT="15"
update_kdb_layout() {
${PLBUDDY} -c "Delete :AppleCurrentKeyboardLayoutInputSourceID" "${1}" &>/dev/null
if [ ${?} -eq 0 ]
then
${PLBUDDY} -c "Add :AppleCurrentKeyboardLayoutInputSourceID string com.apple.keylayout.${NAME}" "${1}"
fi
for SOURCE in AppleDefaultAsciiInputSource AppleCurrentAsciiInputSource AppleCurrentInputSource AppleEnabledInputSources AppleSelectedInputSources
do
${PLBUDDY} -c "Delete :${SOURCE}" "${1}" &>/dev/null
if [ ${?} -eq 0 ]
then
${PLBUDDY} -c "Add :${SOURCE} array" "${1}"
${PLBUDDY} -c "Add :${SOURCE}:0 dict" "${1}"
${PLBUDDY} -c "Add :${SOURCE}:0:InputSourceKind string 'Keyboard Layout'" "${1}"
${PLBUDDY} -c "Add :${SOURCE}:0:'KeyboardLayout ID' integer ${LAYOUT}" "${1}"
${PLBUDDY} -c "Add :${SOURCE}:0:'KeyboardLayout Name' string '${NAME}'" "${1}"
fi
done
}
#Set The Keyboard Layout - Task
update_kdb_layout "/Library/Preferences/com.apple.HIToolbox.plist" "${NAME}" "${LAYOUT}"
for HOME in /Users/*
do
if [ -d "${HOME}"/Library/Preferences ]
then
cd "${HOME}"/Library/Preferences
HITOOLBOX_FILES=`find . -name "com.apple.HIToolbox.*plist"`
for HITOOLBOX_FILE in ${HITOOLBOX_FILES}
do
update_kdb_layout "${HITOOLBOX_FILE}" "${NAME}" "${LAYOUT}"
done
fi
done
#Setting the OS language
LANG="en"
update_language() {
${PLBUDDY} -c "Delete :AppleLanguages" "${1}" &>/dev/null
if [ ${?} -eq 0 ]
then
${PLBUDDY} -c "Add :AppleLanguages array" "${1}"
${PLBUDDY} -c "Add :AppleLanguages:0 string '${LANG}'" "${1}"
fi
}
#Setting the OS language - Task
update_language "/Library/Preferences/.GlobalPreferences.plist" "${LANG}"
for HOME in /Users/*
do
if [ -d "${HOME}"/Library/Preferences ]
then
cd "${HOME}"/Library/Preferences
GLOBALPREFERENCES_FILES=`find . -name ".GlobalPreferences.*plist"`
for GLOBALPREFERENCES_FILE in ${GLOBALPREFERENCES_FILES}
do
update_language "${GLOBALPREFERENCES_FILE}" "${LANG}"
done
fi
done
#Setting the region
REGION="en_AU"
update_region() {
${PLBUDDY} -c "Delete :AppleLocale" "${1}" &>/dev/null
${PLBUDDY} -c "Add :AppleLocale string ${REGION}" "${1}" &>/dev/null
${PLBUDDY} -c "Delete :Country" "${1}" &>/dev/null
${PLBUDDY} -c "Add :Country string ${REGION:3:2}" "${1}" &>/dev/null
}
#Setting the region - Task
update_region "/Library/Preferences/.GlobalPreferences.plist" "${REGION}"
for HOME in /Users/*
do
if [ -d "${HOME}"/Library/Preferences ]
then
cd "${HOME}"/Library/Preferences
GLOBALPREFERENCES_FILES=`find . -name ".GlobalPreferences.*plist"`
for GLOBALPREFERENCES_FILE in ${GLOBALPREFERENCES_FILES}
do
update_region "${GLOBALPREFERENCES_FILE}" "${REGION}"
done
fi
done
exit 0
|
<gh_stars>0
const express = require('express');
const jwt = require('jsonwebtoken');
const mySql = require('mysql');
const router = express.Router();
const checkAuth = require("../middleware/check-auth");
const { route } = require('./user');
//create connection Pool
const pool = mySql.createPool({
host : process.env.SQL_HOST,
user : process.env.SQL_USER,
password : <PASSWORD>,
database : process.env.SQL_DATABASE
});
router.post("/enter",checkAuth,(req,res)=>{
if(!req.body.quiz_id)
{
return res.status(400).json({
Message: "Required Data to be Sent Missing Please Refer Documentation"
})
}
if(!req.body.p_id)
{
return res.status(400).json({
Message: "Required Data to be Sent Missing Please Refer Documentation"
})
}
if(!req.body.mark)
{
return res.status(400).json({
Message: "Required Data to be Sent Missing Please Refer Documentation"
})
}
pool.getConnection((err,con)=>{
if(err)
{
res.status(500).json({
error: err,
message: "Db Connection Error"
});
}
else
{
con.query("SELECT * FROM quiz_mark WHERE (p_id = ? AND quiz_id = ?)",[req.body.p_id, req.body.quiz_id],(err,rows1,fields)=>{
if(err)
{
con.release();
return res.status(500).json({
error:err
});
}
else
{
if(rows1.length >= 1)
{
return res.status(400).json({
message:"Marks for Quiz for this person already entered. Multiple attempts not allowed"
});
}
else
{
con.query("INSERT INTO mark_details (p_id, mark) VALUES (?,?)",[req.body.p_id,req.body.mark],(err,rows,fields)=>{
if(err)
{
con.release();
return res.status(500).json({
error:err
});
}
else
{
con.query("SELECT mark_id FROM mark_details WHERE (mark = ? AND p_id = ?)",[req.body.mark,req.body.p_id],(err,rows3,fields)=>{
if(err)
{
con.release();
return res.status(500).json({
error:err
});
}
else
{
con.query("INSERT INTO quiz_mark (p_id, quiz_id, mark_id) VALUES (?,?,?)",[req.body.p_id,req.body.quiz_id,rows3[0].mark_id],(err,rows,fields)=>{
if(err)
{
con.release();
return res.status(500).json({
error:err
});
}
else
{
con.release();
res.status(200).json({
message: "Mark Entered Successfully"
})
}
});
}
});
}
});
}
}
});
}
});
});
router.get("/getStudentMarks",checkAuth,(req,res)=>{
pool.getConnection((err,con)=>{
if(err)
{
res.status(500).json({
error: err,
message: "Db Connection Error"
});
}
else
{
con.query("SELECT quiz_id, mark FROM quiz_mark JOIN mark_details ON quiz_mark.mark_id = mark_details.mark_id WHERE quiz_mark.p_id = ?",[req.userData.p_id],(err,rows,fields)=>{
if(err)
{
con.release();
return res.status(500).json({
error:err
});
}
else
{
let data = {};
for(var i = 0;i<rows.length;i++)
{
data[rows[i].quiz_id]= rows[i].mark;
}
con.release();
return res.status(200).json({
all_marks: data
});
}
});
}
});
});
router.post("/getStudentMarksByPID",checkAuth,(req,res)=>{
if(!req.body.p_id)
{
return res.status(400).json({
Message: "Required Data to be Sent Missing Please Refer Documentation"
})
}
pool.getConnection((err,con)=>{
if(err)
{
res.status(500).json({
error: err,
message: "Db Connection Error"
});
}
else
{
con.query("SELECT quiz_id, mark FROM quiz_mark JOIN mark_details ON quiz_mark.mark_id = mark_details.mark_id WHERE quiz_mark.p_id = ?",[req.body.p_id],(err,rows,fields)=>{
if(err)
{
con.release();
return res.status(500).json({
error:err
});
}
else
{
let data = {};
for(var i = 0;i<rows.length;i++)
{
data[rows[i].quiz_id]= rows[i].mark;
}
con.release();
return res.status(200).json({
all_marks: data
});
}
});
}
});
});
module.exports = router;
|
import React from 'react';
import { AppRegistry, SafeAreaView } from 'react-native';
import { Provider } from 'react-redux';
import { isIphoneX } from 'react-native-iphone-x-helper';
import numeral from 'numeral';
import moment from 'moment';
import { resetTo, forwardTo } from './src/store/actions/common';
import i18n from './src/utils/i18n';
import App from './src/App';
import localesResource from './src/assets/locales';
import { CHANGE_LANGUAGE } from './src/constants/types';
import Preload from './src/container/Preload';
import { configStore } from './src/store';
import material from './src/theme/variables/material';
const SafeView = ({ children }) => {
const isiphoneX = isIphoneX();
if (isiphoneX) {
return <SafeAreaView style={styles.safeArea}>{children}</SafeAreaView>;
}
return children;
};
class Root extends React.Component {
state = {
isLoading: true
};
componentDidMount() {
configStore(store => {
const { router, auth } = store.getState();
const state = store.getState();
// store.dispatch(resetTo('home'));
console.log(auth.loggedIn);
const firstRoute = auth.loggedIn ? 'home' : 'maps';
if (
auth.loggedIn &&
router.current &&
router.current.routeName &&
__DEV__
) {
store.dispatch(
forwardTo(router.current.routeName, { ...router.current.params })
);
} else {
store.dispatch(resetTo(firstRoute));
// store.dispatch(forwardTo(router.current.routeName, { ...router.current.params }));
}
// init i18n
i18n.init({
fallbackLng: 'en',
lng: state.setting.language,
ns: ['translations'],
defaultNS: 'translations',
resources: localesResource,
debug: true,
// interpolation: {
// escapeValue: false, // not needed for react!!
// format(value, format, lng) {
// switch (format) {
// case 'uppercase':
// return value.toUpperCase();
// default:
// if (typeof value === 'number')
// return numeral(value).format(format);
// if (value instanceof Date) return moment(value).format(format);
// return value;
// }
// }
// },
react: {
wait: true
}
});
i18n.on('languageChanged', lng => {
const currentLanguage = state.setting.language;
if (currentLanguage !== lng) {
store.dispatch({
type: CHANGE_LANGUAGE,
payload: lng
});
}
});
this.store = store;
this.setState({ isLoading: false }, () => this.forceUpdate());
});
}
shouldComponentUpdate() {
return false;
}
store = null;
render() {
if (!this.store || this.state.isLoading) {
return (
<SafeView>
<Preload />
</SafeView>
);
}
return (
<Provider store={this.store}>
<SafeView>
<App />
</SafeView>
</Provider>
);
}
}
const styles = {
safeArea: {
flex: 1,
backgroundColor: material.safeAreaBackground
}
};
console.disableYellowBox = true;
AppRegistry.registerComponent('GoGoApp', () => Root);
|
from app import app
from gevent.pywsgi import WSGIServer
import setting
if __name__ == "__main__":
app.debug = setting.DEBUG
WSGIServer(('0.0.0.0',setting.FlaskSettings.PORT), app).serve_forever()
|
import java.util.Scanner;
public class Test {
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
int num = in.nextInt();
if (num > 0) {
System.out.println("The number is positive.");
}
}
}
|
from .map import Labyrinth, MapBlock
import numpy as np
class MapGenerator:
MOVES = np.array([[-1, 0], [1, 0], [0, -1], [0, 1]], dtype=np.int32)
def __init__(self):
self.is_visited = None
self.rand_directions = None
self.rows = None
self.columns = None
self.prob = None
def _on_map(self, i: int, j: int) -> bool:
return 0 <= i < self.rows and 0 <= j < self.columns
@staticmethod
def _remove_isolated_cells(labyrinth: Labyrinth) -> Labyrinth:
for i in range(1, labyrinth.rows - 1):
for j in range(1, labyrinth.columns - 1):
is_isolated = labyrinth.wall[i, j]
for di, dj in MapGenerator.MOVES:
is_isolated &= labyrinth.floor[i + di, j + dj]
if is_isolated:
labyrinth[i, j] = MapBlock.FLOOR
return labyrinth
def _dfs(self, i: int, j: int):
inside_visited = True
correct_moves = np.zeros(4, dtype=np.bool)
for k, (di, dj) in enumerate(MapGenerator.MOVES):
correct_moves[k] = self._on_map(i + di, j + dj)
if correct_moves[k]:
inside_visited &= self.is_visited[i + di, j + dj]
if inside_visited:
return
self.is_visited[i, j] = True
self.rand_directions[i, j] = []
correct_moves = np.arange(MapGenerator.MOVES.shape[0])[correct_moves]
direction = MapGenerator.MOVES[np.random.choice(correct_moves)]
self.rand_directions[i, j].append(direction)
if not self.is_visited[i + direction[0], j + direction[1]]:
self._dfs(i + direction[0], j + direction[1])
np.random.shuffle(correct_moves)
for move in MapGenerator.MOVES[correct_moves]:
di, dj = move
if np.random.random() < self.prob:
self.rand_directions[i, j].append(move)
if not self.is_visited[i + di, j + dj]:
self._dfs(i + di, j + dj)
def _generate(self, rows: int, columns: int, prob: float) -> Labyrinth:
self.is_visited = np.zeros((rows, columns), dtype=np.int32)
self.rand_directions = np.zeros((rows, columns), dtype=np.object)
self.rand_directions.fill(None)
self.rows, self.columns, self.prob = rows, columns, prob
self._dfs(rows // 2, columns // 2)
labyrinth = Labyrinth(2 * rows + 1, 2 * columns + 1, MapBlock.WALL)
for i in range(self.rand_directions.shape[0]):
for j in range(self.rand_directions.shape[1]):
directions = self.rand_directions[i, j]
if directions is None:
continue
r, c = 2 * i + 1, 2 * j + 1
for dr, dc in directions:
for k in range(3):
if 0 <= r + dr * k < 2 * rows + 1:
labyrinth[r + dr * k, c] = MapBlock.FLOOR
if 0 <= c + dc * k < 2 * columns + 1:
labyrinth[r, c + dc * k] = MapBlock.FLOOR
return MapGenerator._remove_isolated_cells(labyrinth)
@staticmethod
def _scale(labyrinth: Labyrinth, scale_rows: float, scale_columns: float) -> Labyrinth:
new_rows = int(labyrinth.rows * scale_rows)
new_columns = int(labyrinth.columns * scale_columns)
new_labyrinth = Labyrinth(new_rows, new_columns, MapBlock.WALL)
for i in range(new_labyrinth.rows):
for j in range(new_labyrinth.columns):
new_labyrinth[i, j] = labyrinth[int(i / scale_rows), int(j / scale_columns)]
return new_labyrinth
def generate(self, rows: int, columns: int, free_cells_ratio: float = 0.5, prob: float = 0.25,
scale_rows: float = 1.0, scale_columns: float = 2.0) -> Labyrinth:
labyrinth = Labyrinth(rows, columns, MapBlock.WALL)
while labyrinth.floor.sum() < free_cells_ratio * labyrinth.rows * labyrinth.columns:
labyrinth = self._generate(rows, columns, prob)
return self._scale(labyrinth, scale_rows, scale_columns)
|
#!/bin/bash
SCRIPTDIR=$(cd $(dirname "$0") && pwd)
HOMEDIR="$SCRIPTDIR/../../../"
cd $HOMEDIR
# shallow clone OpenWhisk repo.
git clone --depth 1 https://github.com/apache/incubator-openwhisk.git openwhisk
cd openwhisk
./tools/travis/setup.sh
|
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayDeque;
import java.util.Arrays;
import java.util.Map;
import java.util.TreeMap;
public class SpaceshipCrafting {
private static final String ALUMINIUM = "Aluminium";
private static final String GLASS = "Glass";
private static final String LITHIUM = "Lithium";
private static final String CARBON = "Carbon fiber";
private static final String SPACESHIP_CREATED = "Wohoo! You succeeded in building the spaceship!";
private static final String SPACESHIP_NOT_CREATED = "Ugh, what a pity! You didn't have enough materials to build the spaceship.";
private static final String PHYSICAL_ITEMS_LEFT = "Physical items left: ";
private static final String NONE = "none";
private static final String LIQUIDS_LEFT = "Liquids left: ";
private static final int GLASS_VALUE = 25;
private static final int ALUMINIUM_VALUE = 50;
private static final int LITHIUM_VALUE = 75;
private static final int CARBON_VALUE = 100;
private static void main(String[] args) {
InputStreamReader inputStreamReader = new InputStreamReader(System.in);
BufferedReader reader = new BufferedReader(inputStreamReader);
ArrayDeque<Integer> liquids = new ArrayDeque<>();
ArrayDeque<Integer> physicals = new ArrayDeque<>();
try {
Arrays.stream(reader.readLine().split("\\s+"))
.map(Integer::parseInt)
.forEach(liquids::offer);
Arrays.stream(reader.readLine().split("\\s+"))
.map(Integer::parseInt)
.forEach(physicals::push);
Map<String, Integer> advancedMaterials = new TreeMap<>();
advancedMaterials.put(ALUMINIUM, 0);
advancedMaterials.put(GLASS, 0);
advancedMaterials.put(LITHIUM, 0);
advancedMaterials.put(CARBON, 0);
while (!(liquids.isEmpty() || physicals.isEmpty())) {
int currentLiquid = liquids.poll();
int currentItem = physicals.pop();
int advanceMaterialSum = currentItem + currentLiquid;
String advanceMaterial = getAdvancedMaterial(advanceMaterialSum);
if (advanceMaterial == null) {
physicals.push(currentItem + 3);
} else {
int newValue = advancedMaterials.get(advanceMaterial) + 1;
advancedMaterials.put(advanceMaterial, newValue);
}
}
if (advancedMaterials.entrySet().stream().allMatch(e -> e.getValue() > 0)) {
System.out.println(SPACESHIP_CREATED);
} else {
System.out.println(SPACESHIP_NOT_CREATED);
}
printLiquids(liquids);
printPhysicalItems(physicals);
for (Map.Entry<String, Integer> entry : advancedMaterials.entrySet()) {
System.out.println(entry.getKey() + ": " + entry.getValue());
}
} catch (IOException e) {
System.out.println(e.getMessage());
}
}
private static void printPhysicalItems(ArrayDeque<Integer> physicals) {
String itemsLeft = PHYSICAL_ITEMS_LEFT;
if (physicals.isEmpty()) {
itemsLeft += NONE;
} else {
itemsLeft += joinStack(physicals);
}
System.out.println(itemsLeft);
}
private static void printLiquids(ArrayDeque<Integer> liquids) {
String liquidsLeft = LIQUIDS_LEFT;
if (liquids.isEmpty()) {
liquidsLeft += NONE;
} else {
liquidsLeft += joinQueue(liquids);
}
System.out.println(liquidsLeft);
}
private static String joinStack(ArrayDeque<Integer> physicals) {
StringBuilder sb = new StringBuilder();
while (!physicals.isEmpty()) {
sb.append(physicals.pop());
if (physicals.isEmpty()) {
break;
}
sb.append(", ");
}
return sb.toString().trim();
}
private static String joinQueue(ArrayDeque<Integer> liquids) {
StringBuilder sb = new StringBuilder();
while (!liquids.isEmpty()) {
sb.append(liquids.poll());
if (liquids.isEmpty()) {
break;
}
sb.append(", ");
}
return sb.toString().trim();
}
private static String getAdvancedMaterial(int advanceMaterialSum) {
String advancedMaterial = null;
switch (advanceMaterialSum) {
case GLASS_VALUE:
advancedMaterial = GLASS;
break;
case ALUMINIUM_VALUE:
advancedMaterial = ALUMINIUM;
break;
case LITHIUM_VALUE:
advancedMaterial = LITHIUM;
break;
case CARBON_VALUE:
advancedMaterial = CARBON;
break;
}
return advancedMaterial;
}
}
|
<reponame>sergiorpleon/react-redux-shopping
import React, { Component } from 'react';
import { connect } from 'react-redux';
class EditCategoryComponent extends Component {
handleEdit = (e) => {
e.preventDefault();
const newTitle = this.getTitle.value;
const newDescription = this.getDescription.value;
const data = {
newTitle,
newDescription
}
this.props.dispatch({ type: 'UPDATE_CATEGORY', id: this.props.category.id, data: data })
}
render() {
return (
<div>
<form onSubmit={this.handleEdit}>
<input required type="text" ref={(input) => this.getTitle = input}
defaultValue={this.props.category.title} placeholder="Enter Category Title" /><br /><br />
<textarea required rows="5" ref={(input) => this.getDescription = input}
defaultValue={this.props.category.message} placeholder="Enter Description" /><br /><br />
<button>Update</button>
</form>
</div>
);
}
}
export default connect()(EditCategoryComponent);
|
#!/bin/sh
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
mvn install:install-file -Dfile=cloud-iControl.jar -DgroupId=com.cloud.com.f5 -DartifactId=icontrol -Dversion=1.0 -Dpackaging=jar
mvn install:install-file -Dfile=cloud-netscaler.jar -DgroupId=com.cloud.com.citrix -DartifactId=netscaler -Dversion=1.0 -Dpackaging=jar
mvn install:install-file -Dfile=cloud-netscaler-sdx.jar -DgroupId=com.cloud.com.citrix -DartifactId=netscaler-sdx -Dversion=1.0 -Dpackaging=jar
#
# From http://support.netapp.com/ (not available online, contact your support representative)
# Version: 4.0
mvn install:install-file -Dfile=manageontap.jar -DgroupId=com.cloud.com.netapp -DartifactId=manageontap -Dversion=4.0 -Dpackaging=jar
#
# From https://my.vmware.com/group/vmware/get-download?downloadGroup=VSDK41
# Version: 4.1, Release-date: 2010-07-13, Build: 257238
mvn install:install-file -Dfile=vim25.jar -DgroupId=com.cloud.com.vmware -DartifactId=vmware-vim25 -Dversion=4.1 -Dpackaging=jar
mvn install:install-file -Dfile=apputils.jar -DgroupId=com.cloud.com.vmware -DartifactId=vmware-apputils -Dversion=4.1 -Dpackaging=jar
mvn install:install-file -Dfile=vim.jar -DgroupId=com.cloud.com.vmware -DartifactId=vmware-vim -Dversion=4.1 -Dpackaging=jar
|
import React, { useContext } from "react"
import SEO from "../../../../components/layout/seo"
import {
How,
HowLong,
HowMuch,
Introduction,
SectionContent,
Visit,
WhatTimeOfYear,
When,
Where,
WhereToHave,
WhereToStay,
} from "../../../../components/core/section"
import { Conclusion } from "../../../../components/core/conclusion"
import {
GroupOfImages,
ImageAsLandscape,
ImageAsLandscapeOnTheLeft,
ImageAsLandscapeOnTheRight,
ImageAsPortrait,
ImageAsPortraitOnTheLeft,
ImageAsPortraitOnTheRight,
TwoImagesSameSize,
TwoImagesSameSizeOrToGroup,
} from "../../../../components/images/layout"
import { JapanBlogLayout, JapanHeadline, JapanTitle } from "../../../../components/core/japan/japan"
import { PageProps } from "gatsby"
import { SharedCardJapanImages } from "../../../../components/images/asia/japan/shared-card-japan-images"
import i18n from "i18next"
import translationFr from "../../../../locales/fr/asia/japan/himeji/himeji-castle.json"
import translationEn from "../../../../locales/en/asia/japan/himeji/himeji-castle.json"
import HomeImgUrl from "../../../../images/asia/japan/carousel-japan-2.jpg"
import { useCustomTranslation } from "../../../../i18n-hook"
import { Quote } from "../../../../components/core/quote"
import { Divider } from "../../../../components/core/divider"
import { getLink } from "../../../../components/core/links/links.utils"
import { ApplicationContext } from "../../../../components/application"
import { Comments } from "../../../../components/core/comments"
import { HimejiCastleImages } from "../../../../components/images/asia/japan/himeji/himeji-castle-images"
import HimejiCastleMap from "../../../../images/asia/japan/himeji/castle/himeji-castle-map.jpg"
import { BookingGygCardContainer, MapContainer } from "../../../../components/layout/layout"
import { ExternalLinkNotUnderlined } from "../../../../components/core/links/link"
import { buildPixabayUrl } from "../../../../utils"
import { css } from "@emotion/react"
import { mediumStart } from "../../../../components/core/variables"
import { BookingCard, BookingWarning } from "../../../../components/core/booking"
import richmondHotel from "../../../../images/asia/japan/himeji/castle/richmond-hotel.jpg"
import montereyHotel from "../../../../images/asia/japan/himeji/castle/monterey-hotel.jpg"
import daiwaRoynetHotel from "../../../../images/asia/japan/himeji/castle/daiwa-roynet-hotel.jpg"
const namespace = "asia/japan/himeji/himeji-castle"
const id = "himeji-castle"
i18n.addResourceBundle("fr", namespace, translationFr)
i18n.addResourceBundle("en", namespace, translationEn)
const IndexPage: React.FunctionComponent<PageProps> = ({ location }) => {
const { development } = useContext(ApplicationContext)
const { t, i18n } = useCustomTranslation([namespace, "common"])
const title = t(`common:country.japan.card.${id}`)
const transportLinkPublished = development || getLink("transports-in-japan").published
const foodLinkPublished = development || getLink("food-in-japan").published
return (
<>
<SEO
title={title}
fullTitle={t("full-title")}
socialNetworkDescription={t("social-network-description")}
googleDescription={t("google-description")}
image={HomeImgUrl}
location={location}
/>
<JapanBlogLayout page={id} location={location}>
<JapanTitle title={title} linkId={id} />
<ImageAsLandscape>
<SharedCardJapanImages image="himejiCastle" />
</ImageAsLandscape>
<Quote>{t("quote")}</Quote>
<Divider />
<Introduction>{t("introduction")}</Introduction>
<Divider />
<Where title={t("where.title")}>
<p>{t("where.part1")}</p>
</Where>
<When title={t("when.title")}>
<p>{t("when.part1")}</p>
<p>{t("when.part2")}</p>
<p>{t("when.part3")}</p>
</When>
<How title={t("how.title")}>
<p>{t("how.part1")}</p>
<ul>
<li>{t("how.part2")}</li>
<li>{t("how.part3")}</li>
<li>{t("how.part4")}</li>
</ul>
{transportLinkPublished && <p>{t("how.part5")}</p>}
<p>{t("how.part6")}</p>
<p>{t("how.part7")}</p>
</How>
<HowLong title={t("how-long.title")}>
<p>{t("how-long.part1")}</p>
<p>{t("how-long.part2")}</p>
<p>{t("how-long.part3")}</p>
</HowLong>
<WhatTimeOfYear title={t("what-time-of-year.title")}>
<p>{t("what-time-of-year.part1")}</p>
<p>{t("what-time-of-year.part2")}</p>
<p>{t("what-time-of-year.part3")}</p>
</WhatTimeOfYear>
<HowMuch title={t("how-much.title")}>
<p>{t("how-much.part1")}</p>
<ul>
<li>{t("how-much.part2")}</li>
<li>{t("how-much.part3")}</li>
<li>{t("how-much.part4")}</li>
<li>{t("how-much.part5")}</li>
</ul>
</HowMuch>
<WhereToStay title={t("where-to-stay.title")}>
<p>{t("where-to-stay.part1")}</p>
<p>{t("where-to-stay.part2")}</p>
<BookingGygCardContainer>
<BookingCard
hotel="jp/daiwa-roynet-himeji"
title="Daiwa Roynet Hotel Himeji"
image={daiwaRoynetHotel}
note="9,0"
price={59}
people={2}
kind="hotel"
/>
<BookingCard
hotel="jp/richmond-hotel-himeji"
title="Richmond Hotel Himeji"
image={richmondHotel}
note="8,8"
price={64}
people={2}
kind="hotel"
/>
<BookingCard
hotel="jp/hoterumontoreji-lu"
title="Hotel Monterey Himeji"
image={montereyHotel}
note="8,9"
price={115}
people={2}
kind="hotel"
/>
</BookingGygCardContainer>
<BookingWarning>{t("where-to-stay.part3")}</BookingWarning>
<p>{t("where-to-stay.part4")}</p>
<p>{t("where-to-stay.part5")}</p>
</WhereToStay>
<WhereToHave title={t("where-to-have.title")}>
<p>{t("where-to-have.part1")}</p>
<p>{t("where-to-have.part2")}</p>
<ImageAsPortrait
css={css`
max-width: 50%;
@media (min-width: ${mediumStart}) {
max-width: 40%;
}
`}
className="no-reset-image"
>
<HimejiCastleImages image="gyuKaku" />
</ImageAsPortrait>
{foodLinkPublished && <p>{t("where-to-have.part3")}</p>}
</WhereToHave>
<Visit title={t("visit.title")}>
<section>
<SectionContent>
<p>{t("visit.part1")}</p>
<p>{t("visit.part2")}</p>
<p>{t("visit.part3")}</p>
<ImageAsLandscape>
<HimejiCastleImages image="visit" />
</ImageAsLandscape>
</SectionContent>
</section>
<Divider />
<section>
<JapanHeadline>{t("visit1.title")}</JapanHeadline>
<Divider />
<SectionContent>
<p>{t("visit1.part1")}</p>
<p>{t("visit1.part2")}</p>
<p>{t("visit1.part3")}</p>
<p>{t("visit1.part4")}</p>
<p>{t("visit1.part5")}</p>
<p>{t("visit1.part6")}</p>
<ImageAsLandscape>
<HimejiCastleImages image="history" />
</ImageAsLandscape>
</SectionContent>
</section>
<Divider />
<section>
<JapanHeadline>{t("visit2.title")}</JapanHeadline>
<Divider />
<SectionContent>
<GroupOfImages>
<ImageAsPortrait>
<HimejiCastleImages image="garden" />
</ImageAsPortrait>
<ImageAsLandscape>
<HimejiCastleImages image="garden2" />
</ImageAsLandscape>
</GroupOfImages>
<p>{t("visit2.part1")}</p>
<p>{t("visit2.part2")}</p>
<p>{t("visit2.part3")}</p>
<p>{t("visit2.part4")}</p>
<p>{t("visit2.part5")}</p>
<ImageAsLandscape>
<HimejiCastleImages image="garden3" />
</ImageAsLandscape>
<p>{t("visit2.part6")}</p>
<ImageAsLandscape>
<HimejiCastleImages image="garden4" />
</ImageAsLandscape>
<p>{t("visit2.part7")}</p>
<p>{t("visit2.part8")}</p>
<p>{t("visit2.part9")}</p>
<GroupOfImages>
<ImageAsLandscape>
<HimejiCastleImages image="garden5" />
</ImageAsLandscape>
<ImageAsLandscapeOnTheLeft>
<HimejiCastleImages image="garden6" />
</ImageAsLandscapeOnTheLeft>
<ImageAsLandscapeOnTheRight>
<HimejiCastleImages image="garden7" />
</ImageAsLandscapeOnTheRight>
</GroupOfImages>
</SectionContent>
</section>
<Divider />
<section>
<JapanHeadline>{t("visit3.title")}</JapanHeadline>
<Divider />
<SectionContent>
<MapContainer>
<img src={HimejiCastleMap} alt="Himeji Castle Map" />
</MapContainer>
<p>{t("visit3.part1")}</p>
<p>{t("visit3.part2")}</p>
<p>{t("visit3.part3")}</p>
<GroupOfImages>
<TwoImagesSameSizeOrToGroup>
<HimejiCastleImages image="maze" />
<HimejiCastleImages image="maze2" />
</TwoImagesSameSizeOrToGroup>
<ImageAsLandscape>
<HimejiCastleImages image="maze3" />
</ImageAsLandscape>
</GroupOfImages>
<p>{t("visit3.part4")}</p>
<p>{t("visit3.part5")}</p>
<p>{t("visit3.part6")}</p>
<ImageAsLandscape>
<HimejiCastleImages image="maze4" />
</ImageAsLandscape>
<p>{t("visit3.part7")}</p>
<p>{t("visit3.part8")}</p>
<GroupOfImages>
<ImageAsLandscape>
<HimejiCastleImages image="maze5" />
</ImageAsLandscape>
<ImageAsPortrait>
<HimejiCastleImages image="maze6" />
</ImageAsPortrait>
</GroupOfImages>
<p>{t("visit3.part9")}</p>
<p>{t("visit3.part10")}</p>
<p>{t("visit3.part11")}</p>
<GroupOfImages>
<ImageAsLandscape>
<HimejiCastleImages image="maze7" />
</ImageAsLandscape>
<ImageAsPortrait>
<HimejiCastleImages image="maze8" />
</ImageAsPortrait>
<ImageAsLandscape>
<HimejiCastleImages image="maze9" />
</ImageAsLandscape>
<ImageAsPortraitOnTheLeft>
<HimejiCastleImages image="maze10" />
</ImageAsPortraitOnTheLeft>
<ImageAsPortraitOnTheRight>
<HimejiCastleImages image="maze11" />
</ImageAsPortraitOnTheRight>
<ImageAsLandscape>
<HimejiCastleImages image="maze12" />
</ImageAsLandscape>
</GroupOfImages>
</SectionContent>
</section>
<Divider />
<section>
<JapanHeadline>{t("visit4.title")}</JapanHeadline>
<Divider />
<SectionContent>
<p>{t("visit4.part1")}</p>
<p>{t("visit4.part2")}</p>
<p>{t("visit4.part3")}</p>
<GroupOfImages>
<ImageAsPortrait>
<HimejiCastleImages image="inside" />
</ImageAsPortrait>
<ImageAsLandscapeOnTheLeft>
<HimejiCastleImages image="inside2" />
</ImageAsLandscapeOnTheLeft>
<ImageAsLandscapeOnTheRight>
<HimejiCastleImages image="inside3" />
</ImageAsLandscapeOnTheRight>
</GroupOfImages>
<p>{t("visit4.part4")}</p>
<p>{t("visit4.part5")}</p>
<GroupOfImages>
<ImageAsLandscape
credit={
<ExternalLinkNotUnderlined href="https://fr.wikipedia.org/wiki/Ch%C3%A2teau_de_Himeji#/media/Fichier:Inside_the_Main_Tower_(2856041800).jpg">
wikipedia
</ExternalLinkNotUnderlined>
}
>
<HimejiCastleImages image="inside4" />
</ImageAsLandscape>
<ImageAsPortrait
credit={
<ExternalLinkNotUnderlined href="https://en.wikipedia.org/wiki/File:Himeji_Castle_No09_044.jpg">
wikipedia
</ExternalLinkNotUnderlined>
}
>
<HimejiCastleImages image="inside5" />
</ImageAsPortrait>
</GroupOfImages>
<p>{t("visit4.part6")}</p>
<GroupOfImages>
<ImageAsLandscape>
<HimejiCastleImages image="inside6" />
</ImageAsLandscape>
<ImageAsLandscape>
<HimejiCastleImages image="inside7" />
</ImageAsLandscape>
<ImageAsLandscape>
<HimejiCastleImages image="inside8" />
</ImageAsLandscape>
</GroupOfImages>
<p>{t("visit4.part7")}</p>
<GroupOfImages>
<ImageAsLandscape>
<HimejiCastleImages image="inside9" />
</ImageAsLandscape>
<ImageAsLandscape>
<HimejiCastleImages image="inside10" />
</ImageAsLandscape>
<ImageAsPortrait>
<HimejiCastleImages image="inside11" />
</ImageAsPortrait>
</GroupOfImages>
</SectionContent>
</section>
<Divider />
<section>
<JapanHeadline>{t("visit5.title")}</JapanHeadline>
<Divider />
<SectionContent>
<p>{t("visit5.part1")}</p>
<p>{t("visit5.part2")}</p>
<p>{t("visit5.part3")}</p>
<p>{t("visit5.part4")}</p>
<p>{t("visit5.part5")}</p>
<ImageAsLandscape
credit={
<ExternalLinkNotUnderlined href={buildPixabayUrl(i18n.languageCode)("users/jackmac34-483877")}>
jackmac34
</ExternalLinkNotUnderlined>
}
>
<HimejiCastleImages image="kokoEn" />
</ImageAsLandscape>
</SectionContent>
</section>
<Divider />
<section>
<JapanHeadline>{t("visit6.title")}</JapanHeadline>
<Divider />
<SectionContent>
<p>{t("visit6.part1")}</p>
<p>{t("visit6.part2")}</p>
<p>{t("visit6.part3")}</p>
<GroupOfImages>
<TwoImagesSameSize>
<HimejiCastleImages image="bonus" />
<HimejiCastleImages image="bonus2" />
</TwoImagesSameSize>
<TwoImagesSameSize
css={css`
@media (min-width: ${mediumStart}) {
max-width: 80%;
}
`}
>
<HimejiCastleImages image="bonus3" />
<HimejiCastleImages image="bonus4" />
</TwoImagesSameSize>
</GroupOfImages>
</SectionContent>
</section>
</Visit>
<Divider />
<Conclusion>
<p>{t("conclusion")}</p>
<ul>
<li>{t("question1")}</li>
<li>{t("question2")}</li>
</ul>
<p>{t("love")}</p>
</Conclusion>
<Divider />
<Comments
collectionName={namespace}
location={location}
facebookQuote={`${t("facebook.part1")}\n${t("facebook.part2")}`}
pinterest={{
description: t("pinterest"),
nodes:
i18n.languageCode === "fr"
? [
<HimejiCastleImages image="cardFr1" key="cardFr1" />,
<HimejiCastleImages image="cardFr2" key="cardFr1" />,
]
: [
<HimejiCastleImages image="cardEn1" key="cardEn1" />,
<HimejiCastleImages image="cardEn2" key="cardEn1" />,
],
}}
/>
</JapanBlogLayout>
</>
)
}
export default IndexPage
|
#!/bin/bash
#PBS -q kayvon
#PBS -N interpolate
stochastic.sh interpolate
|
private void TipTiming()
{
if (tipTime > 0)
{
tipTime -= Time.deltaTime; // Decrease tipTime by the time elapsed since the last frame
if (tipTime <= 0)
{
GenerateTip(); // Call the method to generate a tip
}
}
}
private void GenerateTip()
{
// Implement the logic to generate a tip here
// This method will be called when tipTime becomes less than or equal to 0
}
|
<gh_stars>100-1000
/* eslint @typescript-eslint/no-explicit-any: 0 */
/* eslint @typescript-eslint/explicit-module-boundary-types: 0 */
export function isStackError(error: any): error is Error {
return typeof error !== 'undefined' && error !== null && 'message' in error;
}
export function isFetchError(error: any): error is Response {
return typeof error !== 'undefined' && error !== null && 'statusText' in error;
}
export function isLGError(error: any): error is TQueryResponse {
return typeof error !== 'undefined' && error !== null && 'output' in error;
}
/**
* Returns true if the response is an LG error, false if not.
*/
export function isLGOutputOrError(data: any): data is TQueryResponse {
return typeof data !== 'undefined' && data !== null && data?.level !== 'success';
}
|
class AscendingOrder:
def __init__(self, capacity):
self.arr = [0 for i in range(capacity)]
self.size = 0
def insert(self, x):
self.arr[self.size]=x
self.size += 1
self.arr.sort()
def print(self):
for i in range(self.size):
print(self.arr[i], end = " ")
|
#!/bin/sh
wget https://dl.google.com/android/repository/android-ndk-r17-linux-x86_64.zip
wget https://dl.google.com/android/repository/android-ndk-r16b-linux-x86_64.zip
wget https://dl.google.com/android/repository/android-ndk-r15c-linux-x86_64.zip
wget https://dl.google.com/android/repository/android-ndk-r14b-linux-x86_64.zip
|
type WebAttribute<T, U, V> = {
Attribute: T;
Type: U;
Formatted?: V;
};
type WebResource_Select = {
createdby_guid: WebAttribute<"createdby_guid", string | null, { createdby_formatted?: string }>;
createdon: WebAttribute<"createdon", Date | null, { createdon_formatted?: string }>;
createdonbehalfby_guid: WebAttribute<"createdonbehalfby_guid", string | null, { createdonbehalfby_formatted?: string }>;
dependencyxml: WebAttribute<"dependencyxml", string | null, {}>;
description: WebAttribute<"description", string | null, {}>;
displayname: WebAttribute<"displayname", string | null, {}>;
};
|
i=1
classpath="$(java -cp build/main \
edu.washington.escience.myria.tool.EclipseClasspathReader \
.classpath)"
libpath="$(java -cp build/main \
edu.washington.escience.myria.tool.EclipseClasspathReader \
.classpath lib)"
true
while [[ $? -eq 0 ]]
do
echo starting number $i round
java -ea -Xdebug \
-Xrunjdwp:transport=dt_socket,address=21001,server=y,suspend=n \
-cp "$classpath" -Djava.library.path="$libpath" org.junit.runner.JUnitCore \
edu.washington.escience.myria.testsuites.IterativeSystemTests \
&& i=$((i+1))
done
|
#!/bin/sh
JOB=' {"seed": { "nodes": [ {"status":"new","type":"id","value":"somevaluehere1"},{"status":"new","type":"id","value":"somevaluehere2"} ], "edges": []}, "job_config":{"depth": 4, "ttl":0, "description":"job descripion", "adapters": { "HelloWorld":{}, "PlusBang": {} }}}'
curl -H "Content-Type: application/json" -X POST -d "$JOB" http://localhost:9999/api/job
|
<filename>keggtools/resolver.py<gh_stars>1-10
""" Resolve requests to KEGG data Api """
import logging
from .utils import parse_tsv, request
from .storage import KEGGDataStorage
from .models import KEGGPathway
class KEGGPathwayResolver:
"""
KEGGPathwayResolver
Request interface for KEGG API endpoint
"""
def __init__(self, org: str):
"""
Need <org> 3 letter code for organism
:param org: str
"""
if not isinstance(org, str):
logging.error("Expect type str. Got type %s", type(org).__name__)
raise TypeError("Expect type str for organism.")
self.organism: str = org
@staticmethod
def request(url: str):
"""
GET request to given Url
:param url: str
:return: byte
"""
return request(url=url)
def get_pathway_list(self):
"""
Request list of pathways linked to organism. {<pathway-id>: <name>}
:return: dict
"""
# path:mmu00010 Glycolysis / Gluconeogenesis - Mus musculus (mouse)
# path:<org><code>\t<name> - <org>
# Request list of pathways from API
store = KEGGDataStorage()
if store.pathway_list_exist(org=self.organism):
# return pathway list dump
logging.debug("Found pathway list for organism %s in cache.", self.organism)
data = store.load_dump(filename=f"pathway_{self.organism}.dump")
return data
# request pathway list
logging.debug("Not found pathway list for organism %s in cache." \
" Requesting from API", self.organism)
data = parse_tsv(
KEGGPathwayResolver.request(
url=f"http://rest.kegg.jp/list/pathway/{self.organism}"
)
)
pathways = {}
for line in data:
if len(line) == 2 and line[0] != "":
pathways[line[0].split(":")[1].strip(self.organism)] = line[1].split(" - ")[0]
logging.debug("Loading list of %d pathways for organism %s",
len(pathways.keys()),
self.organism)
# save dump
store.save_dump(filename=f"pathway_{self.organism}.dump", data=pathways)
# return pathway list
return pathways
@staticmethod
def build_url(org: str, code: str):
"""
Build path to KGML File at KEGG API endpint
:param org: str
:param code: str
:return: str
"""
return f"http://rest.kegg.jp/get/{org}{code}/kgml"
def get_pathway(self, code: str):
"""
Request pathway by code
:param code: str
:return: KEGGPathway
"""
store = KEGGDataStorage()
if store.pathway_file_exist(org=self.organism, code=code):
# load from file
data = store.load(filename=f"{self.organism}_path{code}.kgml")
logging.debug("Load pathway path:%s%s from file", self.organism, code)
else:
# request pathway and store
data = KEGGPathwayResolver.request(
KEGGPathwayResolver.build_url(org=self.organism, code=code))
store.save(filename=f"{self.organism}_path{code}.kgml", data=data)
logging.debug("Download pathway path:%s%s from rest.kegg.jp", self.organism, code)
return KEGGPathway.parse(data)
def link_pathways(self, geneid: str):
"""
Return all pathways linked to gene-id
:param geneid: str
:return: list
"""
data = parse_tsv(
KEGGPathwayResolver.request(
f"http://rest.kegg.jp/link/pathway/{self.organism}:{geneid}"
)
)
result = []
for item in data:
if len(item) == 2 and item[0] != "":
result.append(item[1])
return result
def download_pathways(self, pathways: list):
"""
Download all pathways from list of pathway id's.
:param pathways:
:return: NoneType
"""
downloads = 0
for code in pathways:
if not KEGGDataStorage.pathway_file_exist(org=self.organism, code=code):
url = KEGGPathwayResolver.build_url(org=self.organism, code=code)
logging.debug("Requesting path:%s%s %s...", self.organism, code, url)
KEGGDataStorage.save(filename=f"{self.organism}_path{code}.kgml",
data=KEGGPathwayResolver.request(url))
downloads += 1
logging.debug("Download %d pathway KGML files from KEGG", downloads)
@staticmethod
def get_components():
"""
Get dict of components. Request if not in cache
:return: dict
"""
filename = "compound.dump"
if not KEGGDataStorage.exist(filename=filename):
url = "http://rest.kegg.jp/list/compound/"
logging.debug("Requesting components %s...", url)
result = {}
for items in parse_tsv(KEGGPathwayResolver.request(url=url)):
if len(items) >= 2 and items[0] != "":
result[items[0].split(":")[1]] = items[1].split(";")[0]
KEGGDataStorage.save_dump(filename=filename, data=result)
return result
return KEGGDataStorage.load_dump(filename=filename)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logging.info(KEGGPathwayResolver.get_components())
|
#!/bin/bash
# Archived program command-line for experiment
# Copyright 2016 Xiang Zhang
#
# Usage: bash {this_file} [additional_options]
set -x;
set -e;
qlua main.lua -driver_location models/amazonbinary/temporal8length486feature256 -driver_variation small -train_data_file data/amazon/binary_train_code.t7b -test_data_file data/amazon/binary_test_code.t7b "$@";
|
<reponame>krishna8421/OfflinePayApp
export const URL = 'https://offline-pay.vercel.app';
|
class Product:
def __init__(self, name, price, quantity):
self.name = name
self.price = price
self.quantity = quantity
def update_price(self, new_price):
self.price = new_price
def update_quantity(self, new_quantity):
self.quantity = new_quantity
def remove_product(self):
del self
def display_product(self):
print(f"Product: {self.name}, Price: ${self.price}, Quantity: {self.quantity}")
class Inventory:
def __init__(self):
self.products = []
def add_product(self, product):
self.products.append(product)
def update_product(self, product_name, new_price=None, new_quantity=None):
for product in self.products:
if product.name == product_name:
if new_price:
product.update_price(new_price)
if new_quantity:
product.update_quantity(new_quantity)
def remove_product(self, product_name):
for product in self.products:
if product.name == product_name:
self.products.remove(product)
def display_inventory(self):
for product in self.products:
product.display_product()
def main():
inventory = Inventory()
while True:
print("\n1. Add Product\n2. Update Product\n3. Remove Product\n4. Display Inventory\n5. Exit")
choice = input("Enter your choice: ")
if choice == "1":
name = input("Enter product name: ")
price = float(input("Enter product price: "))
quantity = int(input("Enter product quantity: "))
new_product = Product(name, price, quantity)
inventory.add_product(new_product)
elif choice == "2":
product_name = input("Enter product name: ")
update_choice = input("Update price (P) or quantity (Q)? ")
if update_choice.upper() == "P":
new_price = float(input("Enter new price: "))
inventory.update_product(product_name, new_price=new_price)
elif update_choice.upper() == "Q":
new_quantity = int(input("Enter new quantity: "))
inventory.update_product(product_name, new_quantity=new_quantity)
elif choice == "3":
product_name = input("Enter product name to remove: ")
inventory.remove_product(product_name)
elif choice == "4":
inventory.display_inventory()
elif choice == "5":
break
else:
print("Invalid choice. Please try again.")
if __name__ == "__main__":
main()
|
// security.js
function checkAccess(username) {
const authorizedUsers = ['admin', 'user1', 'user2']; // Predefined list of authorized users
return authorizedUsers.includes(username);
}
|
// Importações.
const Usuario = require('../api/models/Usuario');
const ContaLocal = require('../api/models/ContaLocal');
const ContaFacebook = require('../api/models/ContaFacebook');
const ContaGoogle = require('../api/models/ContaGoogle');
const EnderecoUsuario = require('../api/models/EnderecoUsuario');
const Animal = require('../api/models/Animal');
const AlbumAnimal = require('../api/models/AlbumAnimal');
const FotoAnimal = require('../api/models/FotoAnimal');
const Anuncio = require('../api/models/Anuncio');
const Momento = require('../api/models/Momento');
const Postagem = require('../api/models/Postagem');
const FotoPostagem = require('../api/models/FotoPostagem');
const AvaliacaoPostagem = require('../api/models/AvaliacaoPostagem');
const AvaliacaoAnuncio = require('../api/models/AvaliacaoAnuncio');
const AnuncioFavorito = require('../api/models/AnuncioFavorito');
const Candidatura = require('../api/models/Candidatura');
const Seguida = require('../api/models/Seguida');
const Conversa = require('../api/models/Conversa');
const Resposta = require('../api/models/Resposta');
const AnexoResposta = require('../api/models/AnexoResposta');
const Denuncia = require('../api/models/Denuncia');
const Bloqueio = require('../api/models/Bloqueio');
const Cliente = require('../api/models/Cliente');
const Notificacao = require('../api/models/Notificacao');
// Exportação da função de verificação de cada um dos Models.
module.exports = () => {
Usuario.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/PerfilUsuario] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/PerfilUsuario] Erro: ', error);
});
ContaLocal.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/AcessoLocal] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/AcessoLocal] Erro: ', error);
});
ContaFacebook.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/AcessoFacebook] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/AcessoFacebook] Erro: ', error);
});
ContaGoogle.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/AcessoGoogle] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/AcessoGoogle] Erro: ', error);
});
EnderecoUsuario.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/EnderecoUsuario] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/EnderecoUsuario] Erro: ', error);
});
Animal.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/Animal] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/Animal] Erro: ', error);
});
AlbumAnimal.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/AlbumAnimal] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/AlbumAnimal] Erro: ', error);
});
FotoAnimal.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/FotoAnimal] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/FotoAnimal] Erro: ', error);
});
Anuncio.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/Anuncio] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/Anuncio] Erro: ', error);
});
Momento.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/Momento] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/Momento] Erro: ', error);
});
Postagem.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/Postagem] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/Postagem] Erro: ', error);
});
FotoPostagem.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/FotoPostagem] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/FotoPostagem] Erro: ', error);
});
AvaliacaoPostagem.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/AvaliacaoPostagem] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/AvaliacaoPostagem] Erro: ', error);
});
AvaliacaoAnuncio.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/AvaliacaoAnuncio] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/AvaliacaoAnuncio] Erro: ', error);
});
AnuncioFavorito.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/AnuncioFavorito] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/AnuncioFavorito] Erro: ', error);
});
Candidatura.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/Candidatura] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/Candidatura] Erro: ', error);
});
Seguida.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/Seguida] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/Seguida] Erro: ', error);
});
Conversa.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/Conversa] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/Conversa] Erro: ', error);
});
Resposta.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/Resposta] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/Resposta] Erro: ', error);
});
AnexoResposta.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/AnexoResposta] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/AnexoResposta] Erro: ', error);
});
Denuncia.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/Denuncia] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/Denuncia] Erro: ', error);
});
Bloqueio.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/Bloqueio] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/Bloqueio] Erro: ', error);
});
Cliente.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/Cliente] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/Cliente] Erro: ', error);
});
Notificacao.findAll({ raw: true, limit: 1 }).then((result) => {
console.log('[ORM/Notificacao] Resultado: ', result);
}).catch((error) => {
console.log('[ORM/Notificacao] Erro: ', error);
});
}
|
<filename>src/main/java/br/com/alinesolutions/anotaai/metadata/io/ResponseEntity.java
package br.com.alinesolutions.anotaai.metadata.io;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonFilter;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import br.com.alinesolutions.anotaai.infra.Constant;
import br.com.alinesolutions.anotaai.metadata.model.AnotaaiMessage;
import br.com.alinesolutions.anotaai.metadata.model.Login;
import br.com.alinesolutions.anotaai.metadata.model.domain.TipoMensagem;
import br.com.alinesolutions.anotaai.model.BaseEntity;
@JsonFilter("entity")
@JsonInclude(Include.NON_NULL)
public class ResponseEntity <T extends BaseEntity<?, ?>> implements Serializable {
private static final long serialVersionUID = 1L;
private T entity;
private ResponseList<T> list;
private Login login;
private String responseText;
private List<AnotaaiMessage> messages;
/**
* Id do processo, A cada requisicao em processos sensiveis, como venda por exempo, um novo identificador do processo, PID (process id)
* serah gerado e deverah ser utilizado na proxima requisicao,
*
*/
//TODO ANOTAI - Implementar seguranca em processos sensiveis.
private String pid;
/**
* Determina se ocorreu ou nao um erro de negocio caso tenha ocorrido a
* lista de mensagens estara preenchida
*/
private Boolean isValid;
public ResponseEntity() {
super();
messages = new ArrayList<>();
}
public ResponseEntity(T entity) {
this();
this.entity = entity;
}
public ResponseEntity(Boolean isValid) {
this();
this.isValid = isValid;
}
public ResponseEntity(AnotaaiMessage mensagem) {
addMessage(mensagem);
}
public T getEntity() {
return entity;
}
public void setEntity(T entity) {
this.entity = entity;
}
public Boolean getIsValid() {
return isValid;
}
public void setIsValid(Boolean isValid) {
this.isValid = isValid;
}
public List<AnotaaiMessage> getMessages() {
return messages;
}
public void setMessages(List<AnotaaiMessage> messages) {
this.messages = messages;
}
public String getResponseText() {
return responseText;
}
public void setResponseText(String responseText) {
this.responseText = responseText;
}
public Login getLogin() {
return login;
}
public void setLogin(Login login) {
this.login = login;
}
public ResponseList<T> getList() {
return list;
}
public void setList(ResponseList<T> list) {
this.list = list;
}
public String getPid() {
return pid;
}
public void setPid(String pid) {
this.pid = pid;
}
public void addMessage(AnotaaiMessage message) {
messages.add(message);
}
public void addMessage(String key, TipoMensagem type, Long time, String... params) {
addMessage(new AnotaaiMessage(key, type, time, params));
}
public void addMessage(String key, TipoMensagem type, String... params) {
addMessage(key, type, Constant.App.DEFAULT_TIME_VIEW, params);
}
}
|
<reponame>gautiselvaraj/nag-me
import React from 'react';
import styled from 'styled-components';
const Heading = styled.h1`
color: ${props => props.theme.white};
font-size: 1.25rem;
margin-top: 0;
margin-bottom: 0;
`;
export default ({ children, ...otherProps }) => (
<Heading {...otherProps}>{children}</Heading>
);
|
import React from 'react'
import { Link as GatsbyLink, graphql, useStaticQuery } from 'gatsby'
import { MenuDataQuery } from 'autogenerated/graphql-types'
export type LinkType = {
label: string
url: string
className: string
}
const Link: React.FC<LinkType> = ({ url, label, className }) => {
return (
<GatsbyLink className={className} to={url} key={url}>
<span className="is-uppercase has-text-weight-semibold is-full-width">
{label}
</span>
</GatsbyLink>
)
}
const query = graphql`
query MenuData {
allWpMenuItem {
nodes {
label
url
}
}
}
`
export type Props = {
linkClassName?: string
}
const Links: React.FC<Props> = ({ linkClassName = '' }) => {
const data = useStaticQuery<MenuDataQuery>(query)
const { nodes } = data.allWpMenuItem
return (
<>
{nodes?.map((item, index) => (
<div className="column" key={index}>
<Link
label={item?.label ?? ''}
url={item?.url ?? ''}
className={linkClassName}
/>
</div>
))}
</>
)
}
export default Links
|
package io.smallrye.mutiny.operators;
import org.testng.annotations.Test;
import io.smallrye.mutiny.Uni;
public class UniNeverTest {
@Test
public void testTheBehaviorOfNever() {
UniAssertSubscriber<Void> subscriber = UniAssertSubscriber.create();
Uni.createFrom().<Void> nothing()
.subscribe().withSubscriber(subscriber);
subscriber.assertNoResult().assertNoResult().assertSubscribed();
}
}
|
<gh_stars>0
package actors
import akka.actor._
import play.api._
import play.api.Play.current
import play.api.libs.iteratee._
import play.api.libs.iteratee.Concurrent.Broadcaster
import play.api.libs.json._
import play.api.libs.oauth._
import play.api.libs.ws.WS
import play.extras.iteratees._
import play.api.libs.concurrent.Execution.Implicits._
import scala.collection.mutable.ArrayBuffer
class TwitterStreamer2(out: ActorRef) extends Actor {
def receive = {
case "subscribe" =>
Logger.info("Received subscription from a client")
TwitterStreamer2.subscribe(out)
}
override def postStop() {
Logger.info("Client unsubscribing from stream")
TwitterStreamer2.unsubscribe(out)
}
}
object TwitterStreamer2 {
private var broadcastEnumerator: Option[Enumerator[JsObject]] = None
private var broadcaster: Option[Broadcaster] = None
private val subscribers = new ArrayBuffer[ActorRef]()
def props(out: ActorRef) = Props(new TwitterStreamer2(out))
def subscribe(out: ActorRef): Unit = {
if (broadcastEnumerator.isEmpty) {
init()
}
def twitterClient: Iteratee[JsObject, Unit] = Cont {
case in@Input.EOF => Done(None)
case in@Input.El(o) =>
if (subscribers.contains(out)) {
out ! o
twitterClient
} else {
Done(None)
}
case in@Input.Empty =>
twitterClient
}
broadcastEnumerator.foreach { enumerator =>
enumerator run twitterClient
}
subscribers += out
}
def unsubscribe(subscriber: ActorRef): Unit = {
val index = subscribers.indexWhere(_ == subscriber)
if (index > 0) {
subscribers.remove(index)
Logger.info("Unsubscribed client from stream")
}
}
def subscribeNode: Enumerator[JsObject] = {
if (broadcastEnumerator.isEmpty) {
TwitterStreamer2.init()
}
broadcastEnumerator.getOrElse {
Enumerator.empty[JsObject]
}
}
def init(): Unit = {
credentials.map { case (consumerKey, requestToken) =>
val (iteratee, enumerator) = Concurrent.joined[Array[Byte]]
val jsonStream: Enumerator[JsObject] = enumerator &>
Encoding.decode() &>
Enumeratee.grouped(JsonIteratees.jsSimpleObject)
val (e, b) = Concurrent.broadcast(jsonStream)
broadcastEnumerator = Some(e)
broadcaster = Some(b)
val maybeMasterNodeUrl = Option(System.getProperty("masterNodeUrl"))
val url = maybeMasterNodeUrl.getOrElse {
"https://stream.twitter.com/1.1/statuses/filter.json"
}
WS
.url(url)
.sign(OAuthCalculator(consumerKey, requestToken))
.withQueryString("track" -> "cat")
.get { response =>
Logger.info("Status: " + response.status)
iteratee
}.map { _ =>
Logger.info("Twitter stream closed")
}
} getOrElse {
Logger.error("Twitter credentials are not configured")
}
}
private def credentials = for {
apiKey <- Play.configuration.getString("twitter.apiKey")
apiSecret <- Play.configuration.getString("twitter.apiSecret")
token <- Play.configuration.getString("twitter.token")
tokenSecret <- Play.configuration.getString("twitter.tokenSecret")
} yield (ConsumerKey(apiKey, apiSecret), RequestToken(token, tokenSecret))
}
|
<filename>app/ErrorHandler.scala
import com.cognism.common.utils.ApplicationException
import play.api.http.HttpErrorHandler
import play.api.mvc._
import play.api.mvc.Results._
import scala.concurrent._
import javax.inject.Singleton
@Singleton
class ErrorHandler extends HttpErrorHandler {
def onClientError(request: RequestHeader, statusCode: Int, message: String) = {
Future.successful(Status(statusCode)(s"HTTP [$statusCode] Client Error {$message}"))
}
def onServerError(request: RequestHeader, exception: Throwable) = {
exception.getCause match {
case ApplicationException(msg, response) =>
Future.successful(response)
case _ =>
Future.successful(InternalServerError("A server error occurred: " + exception.getMessage))
}
}
}
|
<gh_stars>1-10
import Component from 'vue-class-component';
import { Prop } from 'vue-property-decorator';
import HourRange from '../../../../../shared/modules/DataRender/vos/HourRange';
import VueComponentBase from '../../VueComponentBase';
import '../RangesComponent.scss';
@Component({
template: require('./HourRangesComponent.pug'),
components: {}
})
export default class HourRangesComponent extends VueComponentBase {
@Prop({ default: null })
private ranges: HourRange[];
@Prop({ default: 10 })
private limit: number;
private force_override_limit: boolean = false;
}
|
#!/usr/bin/env bash
# SPDX-License-Identifier: BSD-2-Clause
# shellcheck disable=SC1091
#set -x
DIR="$(dirname "$0")"
ROOT="${DIR}/.."
source "${ROOT}/common.sh"
check_ima_support
setup_busybox_container \
"${ROOT}/ns-common.sh" \
"${ROOT}/check.sh" \
"${DIR}/reappraise-after-host-file-signing.sh" \
"${ROOT}/keys/rsakey.pem" \
"${ROOT}/keys/rsa.crt"
if ! check_ns_appraise_support; then
echo " Error: IMA-ns does not support IMA-appraise"
exit "${SKIP:-3}"
fi
check_root_or_sudo
copy_elf_busybox_container "$(type -P keyctl)"
copy_elf_busybox_container "$(type -P evmctl)"
copy_elf_busybox_container "$(type -P getfattr)"
copy_elf_busybox_container "$(type -P setfattr)"
# Test re-appraisal after we sign a file appraised by the namespace from the host
# with a key unknown to the namespace.
# Synchronization of this script and namespace is via shared files
echo "INFO: Testing re-appraisal of file inside container after file signed with unknown key by host"
rootfs="$(get_busybox_container_root)"
SYNCFILE=syncfile
syncfile="${rootfs}/${SYNCFILE}"
TESTEXE=/bin/busybox2
testexe="${rootfs}/${TESTEXE}"
TESTEXE="${TESTEXE}" SYNCFILE="${SYNCFILE}" \
run_busybox_container_key_session ./reappraise-after-host-file-signing.sh &
pid=$!
# Wait until namespace wants us to modify the file
if ! wait_for_file "${syncfile}" 50; then
echo " Error: Syncfile did not appear!"
else
# modify the file signature
if ! sudo evmctl ima_sign --imasig --key "${ROOT}/keys/rsakey2.pem" -a sha256 "${testexe}" >/dev/null 2>&1; then
echo " Error: Could not sign file on the host"
exit "${SKIP:-3}"
fi
# tell namespace to proceed
rm -f "${syncfile}"
fi
wait "${pid}"
rc=$?
if [ $rc -ne 0 ]; then
echo " Error: Test failed in IMA namespace."
exit "$rc"
fi
echo "INFO: Pass test 1"
exit "${SUCCESS:-0}"
|
#!/bin/bash
cmake -GNinja -DBOARD=$1 ${ZEPHYR_BASE}/../bootloader/mcuboot/boot/zephyr -DCONFIG_MCUBOOT_SERIAL=y -DCONFIG_UART_CONSOLE=n -DCONFIG_BOOT_SERIAL_DETECT_PIN_VAL=1 -DCONFIG_HW_STACK_PROTECTION=y -DCONFIG_CONSOLE_HANDLER=n
ninja
|
#!/bin/bash
# Create a resource group
az group create --name myResourceGroup --location eastus
# Create a scale set
# Network resources such as an Azure load balancer are automatically created
# Two data disks are created and attach - a 64Gb disk and a 128Gb disk
az vmss create \
--resource-group myResourceGroup \
--name myScaleSet \
--image UbuntuLTS \
--upgrade-policy-mode automatic \
--admin-username azureuser \
--generate-ssh-keys \
--data-disk-sizes-gb 64 128
# Attach an additional 128Gb data disk
az vmss disk attach \
--resource-group myResourceGroup \
--name myScaleSet \
--size-gb 128
# Install the Azure Custom Script Extension to run a script that prepares the data disks
az vmss extension set \
--publisher Microsoft.Azure.Extensions \
--version 2.0 \
--name CustomScript \
--resource-group myResourceGroup \
--vmss-name myScaleSet \
--settings '{"fileUris":["https://raw.githubusercontent.com/Azure-Samples/compute-automation-configurations/master/prepare_vm_disks.sh"],"commandToExecute":"./prepare_vm_disks.sh"}'
|
#!/bin/bash
FRUIT_VERSION=3.5.0
# To authenticate:
# conan user -p <BINTRAY_API_KEY_HERE> -r fruit-bintray polettimarco
for build_type in Release Debug
do
for is_shared in True False
do
for use_boost in True False
do
conan create . google/stable -o fruit:shared=$is_shared -o fruit:use_boost=$use_boost -s build_type=$build_type
done
done
done
conan remote update fruit-bintray https://api.bintray.com/conan/google/fruit
conan upload fruit/${FRUIT_VERSION}@google/stable --all -r fruit-bintray
|
#!/usr/bin/env bash
# 1. Parse command line arguments
# 2. cd to the test directory
# 3. run tests
# 4. Print summary of successes and failures, exit with 0 if
# all tests pass, else exit with 1
# Uncomment the line below if you want more debugging information
# about this script.
#set -x
# The name of this test script
this_program_name="biodemo-test.sh"
# The program we want to test (either a full path to an executable, or the name of an executable in $PATH)
test_program=""
# Directory containing the test data files and expected outputs
test_data_dir=""
# Number of failed test cases
num_errors=0
# Total number of tests run
num_tests=0
function show_help {
cat << UsageMessage
${this_program_name}: run integration/regression tests for biodemo
Usage:
${this_program_name} [-h] [-v] -p program -d test_data_dir
Example:
${this_program_name} -p bin/biodemo -d data/tests
-h shows this help message
-v verbose output
UsageMessage
}
# echo an error message $1 and exit with status $2
function exit_with_error {
printf "${this_program_name}: ERROR: $1\n"
exit $2
}
# if -v is specified on the command line, print a more verbaose message to stdout
function verbose_message {
if [ "${verbose}" = true ]; then
echo "${this_program_name} $1"
fi
}
# Parse the command line arguments and set the global variables program and test_data_dir
function parse_args {
local OPTIND opt
while getopts "hp:d:v" opt; do
case "${opt}" in
h)
show_help
exit 0
;;
p) test_program="${OPTARG}"
;;
d) test_data_dir="${OPTARG}"
;;
v) verbose=true
;;
esac
done
shift $((OPTIND-1))
[ "$1" = "--" ] && shift
if [[ -z ${test_program} ]]; then
exit_with_error "missing command line argument: -p program, use -h for help" 2
fi
if [[ -z ${test_data_dir} ]]; then
exit_with_error "missing command line argument: -d test_data_dir, use -h for help" 2
fi
}
# Run a command and check that the output is
# exactly equal the contents of a specified file
# ARG1: command we want to test as a string
# ARG2: a file path containing the expected output
# ARG3: expected exit status
function test_stdout_exit {
let num_tests+=1
output=$(eval $1)
exit_status=$?
expected_output_file=$2
expected_exit_status=$3
verbose_message "Testing stdout and exit status: $1"
difference=$(diff <(echo "$output") $expected_output_file)
if [ -n "$difference" ]; then
let num_errors+=1
echo "Test output failed: $1"
echo "Actual output:"
echo "$output"
expected_output=$(cat $2)
echo "Expected output:"
echo "$expected_output"
echo "Difference:"
echo "$difference"
elif [ "$exit_status" -ne "$expected_exit_status" ]; then
let num_errors+=1
echo "Test exit status failed: $1"
echo "Actual exit status: $exit_status"
echo "Expected exit status: $expected_exit_status"
fi
}
# Run a command and check that the exit status is
# equal to an expected value
# exactly equal the contents of a specified file
# ARG1: command we want to test as a string
# ARG2: expected exit status
# NB: this is mostly for checking erroneous conditions, where the
# exact output message is not crucial, but the exit status is
# important
function test_exit_status {
let num_tests+=1
output=$(eval $1)
exit_status=$?
expected_exit_status=$2
verbose_message "Testing exit status: $1"
if [ "$exit_status" -ne "$expected_exit_status" ]; then
let num_errors+=1
echo "Test exit status failed: $1"
echo "Actual exit status: $exit_status"
echo "Expected exit status: $expected_exit_status"
fi
}
# 1. Parse command line arguments.
parse_args $@
# 2. Change to test directory
cd $test_data_dir
# 2. Run tests
test_stdout_exit "$test_program one_sequence.fasta" one_sequence.fasta.expected 0
test_stdout_exit "$test_program two_sequence.fasta" two_sequence.fasta.expected 0
test_stdout_exit "$test_program --minlen 200 two_sequence.fasta" two_sequence.fasta.minlen_200.expected 0
test_stdout_exit "$test_program --maxlen 200 < two_sequence.fasta" two_sequence.fasta.maxlen_200.stdin.expected 0
test_stdout_exit "$test_program --minlen 200 < two_sequence.fasta" two_sequence.fasta.minlen_200.stdin.expected 0
test_stdout_exit "$test_program empty_file" empty_file.expected 0
# Test when --minlen filters out ALL sequences (empty result)
test_stdout_exit "$test_program --minlen 1000 two_sequence.fasta" two_sequence.fasta.minlen_1000.expected 0
# Test exit status for a bad command line invocation
test_exit_status "$test_program --this_is_not_a_valid_argument > /dev/null 2>&1" 2
# Test exit status for a non existent input FASTA file
test_exit_status "$test_program this_file_does_not_exist.fasta > /dev/null 2>&1" 1
# 3. End of testing - check if any errors occurrred
if [ "$num_errors" -gt 0 ]; then
echo "$test_program failed $num_errors out of $num_tests tests"
exit 1
else
echo "$test_program passed all $num_tests successfully"
exit 0
fi
|
function minBits(number) {
let count = 0;
while (number) {
count++;
number >>= 1;
}
return count;
}
let numBits = minBits(15);
console.log(numBits); // Output: 4
|
package starter.search;
import org.openqa.selenium.By;
class SearchResultList {
static By RESULT_TITLES = By.cssSelector("#links .result__title a:nth-child(1)");
}
|
<gh_stars>0
import { GetTextByPathPipe } from '@openchannel/angular-common-components/src/lib/common-components/pipe/get-text-by-path.pipe';
describe('GetTextByPathPipe', () => {
let pipe: GetTextByPathPipe;
let value: any;
beforeEach(() => {
pipe = new GetTextByPathPipe();
value = {
textField: 'text-value',
textFieldWithHtmlTags: '<a>text-value-with-tags</a>',
emptyTextField: '',
numberField: 0,
objectField: {
textField: 'text-value-from-object-field',
},
emptyObjectField: null,
};
});
it('Get text value by path', () => {
expect(pipe.transform(value, 'textField')).toBe('text-value');
});
it('Get text value by multi field path.', () => {
expect(pipe.transform(value, 'objectField.textField', 'default-value')).toBe('text-value-from-object-field');
});
it('Get number value by path. Convert to string.', () => {
expect(pipe.transform(value, 'numberField')).toBe('0');
});
it('Replace HTML tags.', () => {
expect(pipe.transform(value, 'textFieldWithHtmlTags')).toBe('text-value-with-tags');
});
it('Without HTML tag replacing.', () => {
expect(pipe.transform(value, 'textFieldWithHtmlTags', '', false)).toBe('<a>text-value-with-tags</a>');
});
it('Use default value, when value by path is empty or null.', () => {
expect(pipe.transform(value, 'emptyTextField', 'default-value')).toBe('default-value');
});
it('Use default value, when value by path is not text.', () => {
expect(pipe.transform(value, 'objectField', 'default-value')).toBe('default-value');
});
});
|
//
// AppsViewController.h
// Connect SDK Sampler App
//
// Created by <NAME> on 9/17/13.
// Connect SDK Sample App by LG Electronics
//
// To the extent possible under law, the person who associated CC0 with
// this sample app has waived all copyright and related or neighboring rights
// to the sample app.
//
// You should have received a copy of the CC0 legalcode along with this
// work. If not, see http://creativecommons.org/publicdomain/zero/1.0/.
//
#import "BaseViewController.h"
@interface AppsViewController : BaseViewController<UITableViewDataSource, UITableViewDelegate>
@property (weak, nonatomic) IBOutlet UIButton *browserButton;
@property (weak, nonatomic) IBOutlet UIButton *toastButton;
@property (weak, nonatomic) IBOutlet UIButton *netflixButton;
@property (weak, nonatomic) IBOutlet UIButton *appStoreButton;
@property (weak, nonatomic) IBOutlet UIButton *youtubeButton;
@property (weak, nonatomic) IBOutlet UIButton *myAppButton;
@property (weak, nonatomic) IBOutlet UITableView *apps;
- (IBAction)browserPressed:(id)sender;
- (IBAction)toastPressed:(id)sender;
- (IBAction)netflixPressed:(id)sender;
- (IBAction)appStorePressed:(id)sender;
- (IBAction)youtubePressed:(id)sender;
- (IBAction)myAppPressed:(id)sender;
@end
|
#!/bin/bash
# Run all datasets with the default parameters
cd ..
source activate graph
# Best parameter in isolated default search
batch_size=32
num_layers=5
lr=0.01
num_mlp_layers=2
hidden_dim=32
final_dropout=0
epochs=20
for fold in 0
do
echo Processing MUTAG at fold $fold ...
python main.py --dataset MUTAG \
--epochs $epochs \
--batch_size $batch_size \
--num_layers $num_layers \
--lr $lr \
--num_mlp_layers $num_mlp_layers \
--hidden_dim $hidden_dim \
--fold_idx $fold \
--filename MUTAG_0.8_result_fold_$fold \
--degree_as_tag \
--corrupt_label \
--N "0.8" \
--denoise estimate \
--correction forward
> ./logs/COLLAB_0.8_forward.log
echo Done.
done
|
<reponame>louiethe17th/data-structures-and-algorithms<filename>src/day9/LinkedListTest.java<gh_stars>0
package day9;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
class LinkedListTest {
@Test
void hasLoop() {
ListNode n1 = new ListNode(1);
ListNode n2 = new ListNode(7);
ListNode n3 = new ListNode(2);
ListNode n4 = new ListNode(3);
ListNode n5 = new ListNode(5);
n1.next = n2;
n2.next = n3;
n3.next = n4;
n4.next = n5;
n5.next = n2; // point n5 back to n2 to create the loop
LinkedList ll = new LinkedList();
ll.root = n1; // attach n1 as the root manually.
assertEquals(true, ll.hasLoop());
}
@Test
void hasNoLoop() {
ListNode n1 = new ListNode(1);
ListNode n2 = new ListNode(7);
ListNode n3 = new ListNode(2);
ListNode n4 = new ListNode(3);
ListNode n5 = new ListNode(5);
n1.next = n2;
n2.next = n3;
n3.next = n4;
n4.next = n5;
LinkedList ll = new LinkedList();
ll.root = n1; // attach n1 as the root manually.
assertEquals(false, ll.hasLoop());
}
@Test
void hasLoopAtStart() {
ListNode n1 = new ListNode(1);
ListNode n2 = new ListNode(7);
ListNode n3 = new ListNode(2);
ListNode n4 = new ListNode(3);
ListNode n5 = new ListNode(5);
n1.next = n2;
n2.next = n3;
n3.next = n4;
n4.next = n5;
n5.next = n1;
LinkedList ll = new LinkedList();
ll.root = n1; // attach n1 as the root manually.
assertEquals(true, ll.hasLoop());
}
@Test
void listIsEmpty() {
LinkedList ll = new LinkedList();
ll.root = null; // attach n1 as the root manually.
assertEquals(false, ll.hasLoop());
}
}
|
<reponame>UsulPro/netherlands-weather
import React from 'react';
import { Query } from 'react-apollo';
import styled from '@emotion/styled';
import { createIcon } from '../common/weather-icon';
import DateInput from './DateInput';
import query from './city.gql';
const Container = styled.header`
width: 50%;
display: flex;
flex-direction: column;
margin: 8px 24px;
`;
const Title = styled.h1`
color: white;
font-size: 40px;
font-family: sans-serif;
margin: 0px;
margin-bottom: 8px;
display: flex;
align-items: center;
& button {
background-color: #6492bf;
color: white;
border: none;
border-radius: 2px;
margin-left: 10px;
padding: 2px 6px;
&:hover {
background-color: #72a6da;
}
}
`;
const Comment = styled.div`
color: white;
opacity: 0.8;
font-size: 24px;
font-family: sans-serif;
margin-bottom: 8px;
`;
const IconHolder = styled.div`
width: 200px;
display: flex;
justify-content: flex-start;
align-items: center;
margin-top: 20px;
& .icon-position {
transform: scale(2);
transform-origin: center;
height: 10px
padding-top: 5px;
}
& .info-position {
margin-left: 20px;
color: white;
font-family: sans-serif;
font-weight: 800;
font-size: calc(16px + 2vw);
position: relative;
top: calc(-20px + 1vw);
}
`;
const toPascalCase = ([c, ...ity]) =>
`${c.toUpperCase()}${ity.join('').toLowerCase()}`;
const Header = ({ city, home, navigate, today, changeDate }) => {
const title = city ? toPascalCase(city) : 'Netherlands';
return (
<Container>
<Title>
{title}
{!home && <button onClick={() => navigate('../../')}>back</button>}
</Title>
<DateInput date={today} onChange={changeDate}/>
{home ? (
<Comment>Select City</Comment>
) : (
<Query query={query} variables={{ today, city }}>
{({ data: { city }, loading }) => {
if (loading) return 'loading...';
const weather = city[0].weather[0] || {
temperatureMin: null,
precipitationMm: null,
};
const Icon = createIcon(weather).Icon;
const middle = weather.temperatureMin
? `${Math.round(
(weather.temperatureMin + weather.temperatureMax) / 2
)}\u00B0`
: '';
return (
<IconHolder>
<div className="icon-position">
<Icon />
</div>
<div className="info-position">{middle}</div>
</IconHolder>
);
}}
</Query>
)}
</Container>
);
};
export default Header;
|
import java.util.Scanner;
public class LargestPalindromeProduct
{
public static void main(String[] args)
{
Scanner scanner = new Scanner(System.in);
int tests = Integer.parseInt(scanner.nextLine());
for (int i = 0; i < tests; i++)
{
int number = Integer.parseInt(scanner.nextLine());
System.out.println(palindromeNumber(number));
}
scanner.close();
}
private static int palindromeNumber(int number)
{
// int root = (int) Math.sqrt(number);
int max = 0;
for (int j = 999; j >= 100; j--)
{
for (int k = 999; k >= 100; k--)
{
int product = j * k;
if(product < number && isPalindrome(String.valueOf(product)))
{
if(product > max)
{
max = product;
}
}
}
}
return max;
}
private static boolean isPalindrome(String text)
{
for (int i = 0; i < text.length() / 2; i++)
{
if(text.charAt(i) != text.charAt(text.length() - 1 - i))
{
return false;
}
}
return true;
}
}
|
class Vehicle:
def __init__(self, make, model, year, color, mileage):
self.make = make
self.model = model
self.year = year
self.color = color
self.mileage = mileage
def get_make(self):
return self.make
def get_model(self):
return self.model
def get_year(self):
return self.year
def get_color(self):
return self.color
def get_mileage(self):
return self.mileage
|
package weixin.weicar.entity;
import java.math.BigDecimal;
import java.util.Date;
import java.lang.String;
import java.lang.Double;
import java.lang.Integer;
import java.math.BigDecimal;
import javax.xml.soap.Text;
import java.sql.Blob;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import org.hibernate.annotations.GenericGenerator;
import javax.persistence.SequenceGenerator;
import org.jeecgframework.poi.excel.annotation.Excel;
/**
* @Title: Entity
* @Description: 车型图片
* @author onlineGenerator
* @date 2015-05-24 15:38:42
* @version V1.0
*
*/
@Entity
@Table(name = "car_vehicle_type_pic", schema = "")
@SuppressWarnings("serial")
public class CarVehicleTypePicEntity implements java.io.Serializable {
/**主键*/
private java.lang.String id;
/**创建人名称*/
private java.lang.String createName;
/**创建日期*/
private java.util.Date createDate;
/**所属车型*/
//private java.lang.String carVehicleTypeId;
private CarVehicleTypeEntity carVehicleTypeEntity;
/**图片*/
private java.lang.String picture;
/**微信主表ID*/
@Excel(exportName="微信主表ID")
private java.lang.String accountid;
/**
*方法: 取得java.lang.String
*@return: java.lang.String 主键
*/
@Id
@GeneratedValue(generator = "paymentableGenerator")
@GenericGenerator(name = "paymentableGenerator", strategy = "uuid")
@Column(name ="ID",nullable=false,length=36)
public java.lang.String getId(){
return this.id;
}
/**
*方法: 设置java.lang.String
*@param: java.lang.String 主键
*/
public void setId(java.lang.String id){
this.id = id;
}
/**
*方法: 取得java.lang.String
*@return: java.lang.String 创建人名称
*/
@Column(name ="CREATE_NAME",nullable=true,length=50)
public java.lang.String getCreateName(){
return this.createName;
}
/**
*方法: 设置java.lang.String
*@param: java.lang.String 创建人名称
*/
public void setCreateName(java.lang.String createName){
this.createName = createName;
}
/**
*方法: 取得java.util.Date
*@return: java.util.Date 创建日期
*/
@Column(name ="CREATE_DATE",nullable=true,length=20)
public java.util.Date getCreateDate(){
return this.createDate;
}
/**
*方法: 设置java.util.Date
*@param: java.util.Date 创建日期
*/
public void setCreateDate(java.util.Date createDate){
this.createDate = createDate;
}
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CAR_VEHICLE_TYPE_ID", nullable = false)
public CarVehicleTypeEntity getCarVehicleTypeEntity() {
return carVehicleTypeEntity;
}
public void setCarVehicleTypeEntity(CarVehicleTypeEntity carVehicleTypeEntity) {
this.carVehicleTypeEntity = carVehicleTypeEntity;
}
/**
*方法: 取得java.lang.String
*@return: java.lang.String 图片
*/
@Column(name ="PICTURE",nullable=true,length=200)
public java.lang.String getPicture(){
return this.picture;
}
/**
*方法: 设置java.lang.String
*@param: java.lang.String 图片
*/
public void setPicture(java.lang.String picture){
this.picture = picture;
}
/**
*方法: 取得java.lang.String
*@return: java.lang.String 微信主表ID
*/
@Column(name ="ACCOUNTID",nullable=true,length=32)
public java.lang.String getAccountid(){
return this.accountid;
}
/**
*方法: 设置java.lang.String
*@param: java.lang.String 微信主表ID
*/
public void setAccountid(java.lang.String accountid){
this.accountid = accountid;
}
}
|
<reponame>AliFrank608-TMW/RacingReact
/**
* @module configureStore
*/
import configureStore from 'store/ConfigureStore'
/**
* Testing utilities
*/
import { expect } from 'chai'
describe('Store - configuration', () => {
it('it should exist', () => {
expect(configureStore).to.exist
})
it('it should be an function', () => {
expect(configureStore).to.be.an('function')
})
it('it should output an object', () => {
expect(configureStore()).to.be.an('object')
})
})
|
#!/bin/bash
sudo ip netns add fpga_lb
sudo ip link set enp2s0 netns fpga_lb
sudo ip netns exec fpga_lb ip link set up enp2s0
sudo ip netns exec fpga_lb ip addr add 10.0.100.1 dev enp2s0
|
#!/usr/bin/env bash
export CODE_TESTS_PATH="./client/out/test"
export CODE_TESTS_WORKSPACE="./client/testFixture"
node.exe "./client/out/test/runTest"
|
package io.opensphere.mantle.util;
import java.awt.Component;
import javax.swing.JCheckBox;
import io.opensphere.core.util.swing.OptionDialog;
/**
* Activation dialog.
*/
public class ActivationDialog extends OptionDialog
{
/** The serialVersionUID. */
private static final long serialVersionUID = 1L;
/** The active checkbox. */
private JCheckBox myActiveCheckBox;
/**
* Constructor.
*
* @param owner The owner
* @param title The title
* @param component The component
* @param errorStrategy the errorStrategy
*/
public ActivationDialog(Component owner, String title, Component component, ErrorStrategy errorStrategy)
{
super(owner, component, title);
setErrorStrategy(errorStrategy);
init();
}
/**
* Checks if is activated.
*
* @return true, if is activated
*/
public boolean isActivated()
{
return myActiveCheckBox.isSelected();
}
/**
* Sets the activated.
*
* @param isActivated the new activated
*/
public void setActivated(boolean isActivated)
{
myActiveCheckBox.setSelected(isActivated);
}
/**
* Init.
*/
private void init()
{
myActiveCheckBox = new JCheckBox("Active", true);
getContentButtonPanel().add(myActiveCheckBox);
}
}
|
alias gradleclean="rm -rf $HOME/.gradle/caches/"
alias adbota="adb kill-server && adb tcpip 5555 && sleep 5 && adb shell ip route | awk '{print $9}' | xargs adb connect"
|
const Discord = require('discord.js');
const client = new Discord.Client();
client.on('ready', () => {
console.log('Chatbot is ready!');
});
client.on('message', message => {
// Check if the message was sent by a user
if (message.author.bot) return;
// Step 1: the chatbot should detect and respond to greetings
if (message.content.includes('hello')) {
message.reply('Hi there! How can I help?');
}
// Step 2: the chatbot should detect user messages and reply accordingly
if (message.content.includes('what do you do')) {
message.reply('I\'m a chatbot! I can answer simple questions or carry on conversations.');
}
// Step 3: the chatbot can interact with users further
if (message.content.includes('where are you from')) {
message.reply('I\'m from the internet! Where are you from?');
}
});
// Log the chatbot into the server
client.login('your-token-here');
|
<reponame>rmlmcfadden/triumfpp
#include <triumf/nmr/dipole_dipole.hpp>
#include <boost/math/constants/constants.hpp>
#include <cmath>
double dipole_dipole(const double *x, const double *par) {
double omega_d = std::abs(par[2] * par[3] * par[0] * par[0]);
double factor = par[1] * (3.0 / 10.0);
double normalization = factor / omega_d;
return normalization * triumf::nmr::dipole_dipole::slr_rate<double>(
*x, par[0], par[1], par[2], par[3]);
}
double generic(const double *x, const double *par) {
double B_d = par[0];
double nu_c = par[1];
double gamma_I = par[2];
double omega = gamma_I * x[0];
double omega_d = gamma_I * gamma_I * B_d * B_d;
return triumf::nmr::dipole_dipole::j<double>(omega, nu_c) * par[1];
}
void plot_dipole_dipole() {
const double B_min = 2e-5;
const double B_max = 2e-1;
const auto n_points = 200;
const double B_d = 1e-5;
const double nu_c = 1.0 / 23.8e-6;
const double gamma_8Li = boost::math::constants::two_pi<double>() * 6.30221e6;
const double gamma_93Nb =
boost::math::constants::two_pi<double>() * 10.30221e6;
TCanvas *canvas = new TCanvas();
TF1 *f_dipole = new TF1("f_dipole", dipole_dipole, B_min, B_max, 4);
f_dipole->SetTitle("");
f_dipole->SetNpx(n_points);
f_dipole->SetLineColor(kRed);
f_dipole->SetParameter(0, B_d);
f_dipole->SetParameter(1, nu_c);
f_dipole->SetParameter(2, gamma_8Li);
f_dipole->SetParameter(3, gamma_93Nb);
f_dipole->GetHistogram()->GetXaxis()->SetTitle("B_{0} (T)");
f_dipole->GetHistogram()->GetYaxis()->SetTitle(
"[1/T_{1}(B_{0})] / [1/T_{1}(0)]");
f_dipole->Draw();
TF1 *f_generic = new TF1("f_generic", generic, B_min, B_max, 3);
f_generic->SetTitle("");
f_generic->SetLineColor(kBlue);
f_generic->SetNpx(n_points);
f_generic->SetParameter(0, B_d);
f_generic->SetParameter(1, nu_c);
f_generic->SetParameter(2, gamma_8Li);
f_generic->GetHistogram()->GetXaxis()->SetTitle("B_{0} (T)");
f_generic->GetHistogram()->GetYaxis()->SetTitle(
"[1/T_{1}(B_{0})] / [1/T_{1}(0)]");
f_generic->Draw("same");
auto legend = new TLegend(0.15, 0.15, 0.60, 0.45);
// option "C" allows to center the header
legend->SetHeader("Models for dipole-dipole SLR 1/T_{1}", "C");
legend->AddEntry(f_dipole, "Eq. (8.21) in Mehring (1983)", "l");
legend->AddEntry(f_generic, "Generic BPP expression", "l");
legend->Draw();
// logarithmic scale
gPad->SetLogx();
gPad->SetLogy();
// tick marks on all sides of the plot
gPad->SetTickx();
gPad->SetTicky();
// grid lines
gPad->SetGridx();
gPad->SetGridy();
canvas->Print("dipole-dipole.pdf", "EmbedFonts");
}
|
#!/bin/bash
set -e
# copy source and geometry files to install dir
cp -r src "${PREFIX}"
cp -rv data "${PREFIX}"
# set graphics library to link
if [ "$(uname)" == "Linux" ]; then
export CXXFLAGS="${CXXFLAGS}"
else
CMAKE_ARGS+=" -DCMAKE_OSX_SYSROOT=${CONDA_BUILD_SYSROOT} -DCMAKE_FIND_FRAMEWORK=LAST -DCMAKE_OSX_DEPLOYMENT_TARGET=10.15"
# Remove -std=c++14 from build ${CXXFLAGS} and use cmake to set std flags
CXXFLAGS=$(echo "${CXXFLAGS}" | sed -E 's@-std=c\+\+[^ ]+@@g')
export CXXFLAGS
fi
export BLDDIR=${PWD}/build-dir
mkdir -p ${BLDDIR}
cd ${BLDDIR}
cmake ${CMAKE_ARGS} \
-DCMAKE_INSTALL_PREFIX="${PREFIX}" \
-DCMAKE_CXX_STANDARD=17 \
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DCMakeTools_DIR="../cmaketools" \
-DCMSMD5ROOT="${PREFIX}" \
-DTBB_ROOT_DIR="${PREFIX}" \
-DTBB_INCLUDE_DIR="${PREFIX}/include" \
-DPYBIND11_INCLUDE_DIR="${PREFIX}/include" \
-DEIGEN_INCLUDE_DIR="${PREFIX}/include" \
-DSIGCPP_LIB_INCLUDE_DIR="${PREFIX}/lib/sigc++-2.0/include" \
-DSIGCPP_INCLUDE_DIR="${PREFIX}/include/sigc++-2.0" \
-DGSL_INCLUDE_DIR="${PREFIX}/include" \
-DPython_ROOT_DIR="${PREFIX}" \
-DPython_FIND_STRATEGY=LOCATION \
-DPython_FIND_VIRTUALENV=STANDARD \
--trace \
../src
make -j${CPU_COUNT}
make install
if [ "$(uname)" == "Darwin" ]; then
cd ${BLDDIR}/lib
perl -i -pe "s|\.so|${SHLIB_EXT}|;" *.rootmap
fi
cd ${PREFIX}/lib
../bin/edmPluginRefresh plugin*${SHLIB_EXT}
export CMSSW_VERSION="CMSSW_${PKG_VERSION//./_}"
# Create version.txt expected by cmsShow.exe
echo "${CMSSW_VERSION}" > "${PREFIX}/src/Fireworks/Core/data/version.txt"
# Add the post activate/deactivate scripts
mkdir -p "${PREFIX}/etc/conda/activate.d"
sed "s/CMSSW_XX_YY_ZZ/${CMSSW_VERSION}/g" "${RECIPE_DIR}/activate.sh" > "${PREFIX}/etc/conda/activate.d/activate-${PKG_NAME}.sh"
sed "s/CMSSW_XX_YY_ZZ/${CMSSW_VERSION}/g" "${RECIPE_DIR}/activate.csh" > "${PREFIX}/etc/conda/activate.d/activate-${PKG_NAME}.csh"
sed "s/CMSSW_XX_YY_ZZ/${CMSSW_VERSION}/g" "${RECIPE_DIR}/activate.fish" > "${PREFIX}/etc/conda/activate.d/activate-${PKG_NAME}.fish"
mkdir -p "${PREFIX}/etc/conda/deactivate.d"
cp "${RECIPE_DIR}/deactivate.sh" "${PREFIX}/etc/conda/deactivate.d/deactivate-${PKG_NAME}.sh"
cp "${RECIPE_DIR}/deactivate.csh" "${PREFIX}/etc/conda/deactivate.d/deactivate-${PKG_NAME}.csh"
cp "${RECIPE_DIR}/deactivate.fish" "${PREFIX}/etc/conda/deactivate.d/deactivate-${PKG_NAME}.fish"
|
<gh_stars>1-10
/* Helpers for validator layer */
const yaml = require('js-yaml');
const fs = require('fs');
const h = require('./helpers.js');
const env = process.env;
exports.events = yaml.load(fs.readFileSync('./events.yml', 'utf-8'));
exports.createSnsParams = createSnsParams;
exports.handleValidatorResults = handleValidatorResults;
exports.handleVariables = handleVariables;
exports.interpolateAndParseEvent = interpolateAndParseEvent;
exports.parseValidatorFunction = parseValidatorFunction;
exports.publishToTopic = publishToTopic;
function createSnsParams(event) {
const notifications = _createNotificationParams(event);
return h.isEmpty(notifications)
? {}
: {
Message: JSON.stringify(notifications),
MessageAttributes: _formatMessageAttributes(notifications),
MessageStructure: 'json',
};
}
function handleValidatorResults(sns, params, topicName) {
return new Promise((resolve, reject) => {
if (h.isEmpty(params)) resolve('Invalid event.');
else {
publishToTopic(sns, params, topicName)
.then(res => {
resolve(res);
})
.catch(err => {
reject(err);
});
}
});
}
function handleVariables(event, data = {}) {
const mappings = {
emailTo({ newTask, oldTask }) {
oldTask = oldTask || {};
newTask = newTask || {};
return [newTask.assignee_email || oldTask.assignee_email || ''];
},
smsTo({ newTask, oldTask }) {
oldTask = oldTask || {};
newTask = newTask || {};
return [newTask.assignee_phone || oldTask.assignee_phone || ''];
},
taskTitle({ newTask, oldTask }) {
oldTask = oldTask || {};
newTask = newTask || {};
return newTask.title || oldTask.title || '';
},
newTaskTitle({ newTask }) {
return newTask.title || '';
},
oldTaskTitle({ oldTask }) {
return oldTask.title || '';
},
};
return _mapVars(event.variables, mappings, data);
}
function interpolateAndParseEvent(event, variables = {}) {
const eventStr = JSON.stringify(event).interpolate(variables);
return { ...JSON.parse(eventStr), variables };
}
function parseValidatorFunction(eventName) {
return `validate${eventName.charAt(0) + eventName.substr(1).toLowerCase()}`;
}
function publishToTopic(sns, params, topicName) {
return new Promise((resolve, reject) => {
_getTopicArn(sns, topicName)
.then(topicArn => {
sns
.publish({ ...params, TopicArn: topicArn })
.promise()
.then(data => {
resolve(
`Successfully published to events topic: ${JSON.stringify(data)}`
);
})
.catch(err => {
reject(`Could not publish to events topic.
params: ${JSON.stringify(params)}
err: ${err}`);
});
})
.catch(err => {
reject(err);
});
});
}
/* Helpers of helpers */
function _createNotificationParams(event) {
const notifications = event.notifications;
const variables = event.variables;
if (h.isEmpty(notifications) || h.isEmpty(variables)) return {};
/* { emailTo: '<EMAIL>' } => { email: { to: '<EMAIL>', ... } } */
Object.keys(notifications).map(k => {
notifications[k]['to'] = variables[k + 'To'];
});
return {
default: { notifications },
};
}
function _mapVars(keys, mappings, data) {
return keys.reduce((res, key) => {
if (mappings[key]) {
res[key] = mappings[key](data);
}
return res;
}, {});
}
function _getTopicArn(sns, topicName) {
return new Promise((resolve, reject) => {
sns
.listTopics()
.promise()
.then(data => {
const topic = data.Topics.filter(topic => {
return _getTopicNameFromARN(topic.TopicArn) === topicName;
});
topic.length
? resolve(topic[0].TopicArn)
: reject(`No SNS topic ${topicName} found.`);
})
.catch(err => {
reject(err);
});
});
}
function _getTopicNameFromARN(arn) {
/*
Returns undefined if arn is invalid
Example
arn: 'arn:aws:sns:us-east-1:123456789012:events'
*/
return arn.split(':').slice(-1)[0];
}
function _formatMessageAttributes(messageAttributes) {
let res = {};
Object.keys(messageAttributes).forEach(key => {
res[key] = {
DataType: 'String',
StringValue: JSON.stringify(messageAttributes[key]),
};
});
return res;
}
|
def formatReaction(voter, userRating=None, candidate, candidateRating=None):
reaction = f"{voter} {f'({userRating}) ' if userRating else ''}reacted to a message from {candidate} {f'({candidateRating}) ' if candidateRating else ''}"
return reaction
# Test cases
print(formatReaction('Alice', 5, 'Bob', 4)) # Output: Alice (5) reacted to a message from Bob (4)
print(formatReaction('Eve', None, 'Charlie', 3)) # Output: Eve reacted to a message from Charlie (3)
print(formatReaction('Mallory', 2, 'Oscar')) # Output: Mallory (2) reacted to a message from Oscar
|
module KubeDSL::DSL::Storage::V1beta1
autoload :CSIDriver, 'kube-dsl/dsl/storage/v1beta1/csi_driver'
autoload :CSIDriverList, 'kube-dsl/dsl/storage/v1beta1/csi_driver_list'
autoload :CSIDriverSpec, 'kube-dsl/dsl/storage/v1beta1/csi_driver_spec'
autoload :CSINode, 'kube-dsl/dsl/storage/v1beta1/csi_node'
autoload :CSINodeDriver, 'kube-dsl/dsl/storage/v1beta1/csi_node_driver'
autoload :CSINodeList, 'kube-dsl/dsl/storage/v1beta1/csi_node_list'
autoload :CSINodeSpec, 'kube-dsl/dsl/storage/v1beta1/csi_node_spec'
autoload :StorageClass, 'kube-dsl/dsl/storage/v1beta1/storage_class'
autoload :StorageClassList, 'kube-dsl/dsl/storage/v1beta1/storage_class_list'
autoload :VolumeAttachment, 'kube-dsl/dsl/storage/v1beta1/volume_attachment'
autoload :VolumeAttachmentList, 'kube-dsl/dsl/storage/v1beta1/volume_attachment_list'
autoload :VolumeAttachmentSource, 'kube-dsl/dsl/storage/v1beta1/volume_attachment_source'
autoload :VolumeAttachmentSpec, 'kube-dsl/dsl/storage/v1beta1/volume_attachment_spec'
autoload :VolumeAttachmentStatus, 'kube-dsl/dsl/storage/v1beta1/volume_attachment_status'
autoload :VolumeError, 'kube-dsl/dsl/storage/v1beta1/volume_error'
autoload :VolumeNodeResources, 'kube-dsl/dsl/storage/v1beta1/volume_node_resources'
end
|
/**
*
* @project iterlife-xspring
* @file com.iterlife.xspring.servlet.XServletOutputStream.java
* @version 1.0.0
* Copyright 2019 - 2019 for <NAME>
* https://www.iterlife.com
*
**/
package com.iterlife.zeus.spring.servlet;
import java.io.CharConversionException;
import java.io.IOException;
import java.io.OutputStream;
import java.text.MessageFormat;
import java.util.Locale;
import java.util.ResourceBundle;
/**
*
* @desc
* @author <NAME>
* @date 2019 2019年2月6日 下午12:12:38
* @tags
*/
public abstract class XServletOutputStream extends OutputStream {
private static final String LSTRING_FILE = "iterlife.xservlet.LocalStrings";
private static ResourceBundle lStrings = ResourceBundle
.getBundle(LSTRING_FILE);
/**
*
* Does nothing, because this is an abstract class.
*
*/
protected XServletOutputStream() {
}
/**
* Writes a <code>String</code> to the client, without a carriage
* return-line feed (CRLF) character at the end.
*
*
* @param s
* the <code>String</code> to send to the client
*
* @exception IOException
* if an input or output exception occurred
*
*/
public void print(String s) throws IOException {
if (s == null)
s = "null";
int len = s.length();
for (int i = 0; i < len; i++) {
char c = s.charAt(i);
//
// XXX NOTE: This is clearly incorrect for many strings,
// but is the only consistent approach within the current
// servlet framework. It must suffice until servlet output
// streams properly encode their output.
//
if ((c & 0xff00) != 0) { // high order byte must be zero
String errMsg = lStrings.getString("err.not_iso8859_1");
Object[] errArgs = new Object[1];
errArgs[0] = new Character(c);
errMsg = MessageFormat.format(errMsg, errArgs);
throw new CharConversionException(errMsg);
}
write(c);
}
}
/**
* Writes a <code>boolean</code> value to the client, with no carriage
* return-line feed (CRLF) character at the end.
*
* @param b
* the <code>boolean</code> value to send to the client
*
* @exception IOException
* if an input or output exception occurred
*
*/
public void print(boolean b) throws IOException {
String msg;
if (b) {
msg = lStrings.getString("value.true");
} else {
msg = lStrings.getString("value.false");
}
print(msg);
}
/**
* Writes a character to the client, with no carriage return-line feed
* (CRLF) at the end.
*
* @param c
* the character to send to the client
*
* @exception IOException
* if an input or output exception occurred
*
*/
public void print(char c) throws IOException {
print(String.valueOf(c));
}
/**
*
* Writes an int to the client, with no carriage return-line feed (CRLF) at
* the end.
*
* @param i
* the int to send to the client
*
* @exception IOException
* if an input or output exception occurred
*
*/
public void print(int i) throws IOException {
print(String.valueOf(i));
}
/**
*
* Writes a <code>long</code> value to the client, with no carriage
* return-line feed (CRLF) at the end.
*
* @param l
* the <code>long</code> value to send to the client
*
* @exception IOException
* if an input or output exception occurred
*
*/
public void print(long l) throws IOException {
print(String.valueOf(l));
}
/**
*
* Writes a <code>float</code> value to the client, with no carriage
* return-line feed (CRLF) at the end.
*
* @param f
* the <code>float</code> value to send to the client
*
* @exception IOException
* if an input or output exception occurred
*
*
*/
public void print(float f) throws IOException {
print(String.valueOf(f));
}
/**
*
* Writes a <code>double</code> value to the client, with no carriage
* return-line feed (CRLF) at the end.
*
* @param d
* the <code>double</code> value to send to the client
*
* @exception IOException
* if an input or output exception occurred
*
*/
public void print(double d) throws IOException {
print(String.valueOf(d));
}
/**
* Writes a carriage return-line feed (CRLF) to the client.
*
*
*
* @exception IOException
* if an input or output exception occurred
*
*/
public void println() throws IOException {
print("\r\n");
}
/**
* Writes a <code>String</code> to the client, followed by a carriage
* return-line feed (CRLF).
*
*
* @param s
* the <code>String</code> to write to the client
*
* @exception IOException
* if an input or output exception occurred
*
*/
public void println(String s) throws IOException {
print(s);
println();
}
/**
*
* Writes a <code>boolean</code> value to the client, followed by a carriage
* return-line feed (CRLF).
*
*
* @param b
* the <code>boolean</code> value to write to the client
*
* @exception IOException
* if an input or output exception occurred
*
*/
public void println(boolean b) throws IOException {
print(b);
println();
}
/**
*
* Writes a character to the client, followed by a carriage return-line feed
* (CRLF).
*
* @param c
* the character to write to the client
*
* @exception IOException
* if an input or output exception occurred
*
*/
public void println(char c) throws IOException {
print(c);
println();
}
/**
*
* Writes an int to the client, followed by a carriage return-line feed
* (CRLF) character.
*
*
* @param i
* the int to write to the client
*
* @exception IOException
* if an input or output exception occurred
*
*/
public void println(int i) throws IOException {
print(i);
println();
}
/**
*
* Writes a <code>long</code> value to the client, followed by a carriage
* return-line feed (CRLF).
*
*
* @param l
* the <code>long</code> value to write to the client
*
* @exception IOException
* if an input or output exception occurred
*
*/
public void println(long l) throws IOException {
print(l);
println();
}
/**
*
* Writes a <code>float</code> value to the client, followed by a carriage
* return-line feed (CRLF).
*
* @param f
* the <code>float</code> value to write to the client
*
*
* @exception IOException
* if an input or output exception occurred
*
*/
public void println(float f) throws IOException {
print(f);
println();
}
/**
*
* Writes a <code>double</code> value to the client, followed by a carriage
* return-line feed (CRLF).
*
*
* @param d
* the <code>double</code> value to write to the client
*
* @exception IOException
* if an input or output exception occurred
*
*/
public void println(double d) throws IOException {
print(d);
println();
}
/**
* @param args
*/
public static void main(String[] args) {
System.out.println(Locale.getDefault());
System.out.println(lStrings);
}
}
|
package org.liveontologies.protege.explanation.proof.editing;
/*-
* #%L
* This file is part of the OWL API.
* The contents of this file are subject to the LGPL License, Version 3.0.
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2014 The University of Manchester
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.AND;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.ANNOTATIONS;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.ANNOTATION_PROPERTY;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.ANTI_SYMMETRIC;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.ASYMMETRIC;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.CHAIN_CONNECT;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.CHARACTERISTICS;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.CLASS;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.CLOSE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.CLOSEBRACE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.CLOSEBRACKET;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.COMMA;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.DASH;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.DATATYPE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.DATA_PROPERTY;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.DIFFERENT_FROM;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.DIFFERENT_INDIVIDUALS;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.DISJOINT_CLASSES;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.DISJOINT_PROPERTIES;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.DISJOINT_UNION_OF;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.DISJOINT_WITH;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.DOMAIN;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.EQUIVALENT_CLASSES;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.EQUIVALENT_PROPERTIES;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.EQUIVALENT_TO;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.EXACTLY;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.FACTS;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.FUNCTIONAL;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.HAS_KEY;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.IMPORT;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.INDIVIDUAL;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.INDIVIDUALS;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.INV;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.INVERSE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.INVERSE_FUNCTIONAL;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.INVERSE_OF;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.IRREFLEXIVE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.LITERAL_DOUBLE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.LITERAL_FALSE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.LITERAL_FLOAT;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.LITERAL_INTEGER;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.LITERAL_LITERAL;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.LITERAL_LIT_DATATYPE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.LITERAL_LIT_LANG;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.LITERAL_TRUE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.MAX;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.MAX_EXCLUSIVE_FACET;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.MAX_INCLUSIVE_FACET;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.MIN;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.MIN_EXCLUSIVE_FACET;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.MIN_INCLUSIVE_FACET;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.NOT;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.OBJECT_PROPERTY;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.ONLY;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.ONLYSOME;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.ONTOLOGY;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.OPEN;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.OPENBRACE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.OPENBRACKET;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.OR;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.PREFIX;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.RANGE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.REFLEXIVE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.RULE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.SAME_AS;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.SAME_INDIVIDUAL;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.SELF;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.SOME;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.SUBCLASS_OF;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.SUB_PROPERTY_CHAIN;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.SUB_PROPERTY_OF;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.SUPERCLASS_OF;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.SUPER_PROPERTY_OF;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.SYMMETRIC;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.THAT;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.TRANSITIVE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.TYPE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.TYPES;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.VALUE;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.VALUE_PARTITION;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.parse;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax.values;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntaxTokenizer.EOF;
import static org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntaxTokenizer.eof;
import static org.semanticweb.owlapi.util.OWLAPIPreconditions.verifyNotNull;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.inject.Inject;
import javax.inject.Provider;
import org.semanticweb.owlapi.expression.OWLEntityChecker;
import org.semanticweb.owlapi.expression.OWLOntologyChecker;
import org.semanticweb.owlapi.formats.ManchesterSyntaxDocumentFormat;
import org.semanticweb.owlapi.io.XMLUtils;
import org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntax;
import org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntaxOntologyHeader;
import org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntaxTokenizer;
import org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntaxTokenizer.Token;
import org.semanticweb.owlapi.manchestersyntax.renderer.ManchesterOWLSyntaxRenderer;
import org.semanticweb.owlapi.manchestersyntax.renderer.ParserException;
import org.semanticweb.owlapi.model.AddAxiom;
import org.semanticweb.owlapi.model.AddImport;
import org.semanticweb.owlapi.model.AddOntologyAnnotation;
import org.semanticweb.owlapi.model.AxiomType;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLAnnotation;
import org.semanticweb.owlapi.model.OWLAnnotationProperty;
import org.semanticweb.owlapi.model.OWLAnnotationSubject;
import org.semanticweb.owlapi.model.OWLAnnotationValue;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLClassAxiom;
import org.semanticweb.owlapi.model.OWLClassExpression;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLDataProperty;
import org.semanticweb.owlapi.model.OWLDataPropertyCharacteristicAxiom;
import org.semanticweb.owlapi.model.OWLDataPropertyExpression;
import org.semanticweb.owlapi.model.OWLDataRange;
import org.semanticweb.owlapi.model.OWLDatatype;
import org.semanticweb.owlapi.model.OWLDeclarationAxiom;
import org.semanticweb.owlapi.model.OWLEntity;
import org.semanticweb.owlapi.model.OWLEntityVisitor;
import org.semanticweb.owlapi.model.OWLFacetRestriction;
import org.semanticweb.owlapi.model.OWLImportsDeclaration;
import org.semanticweb.owlapi.model.OWLIndividual;
import org.semanticweb.owlapi.model.OWLLiteral;
import org.semanticweb.owlapi.model.OWLNamedIndividual;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.semanticweb.owlapi.model.OWLObjectPropertyCharacteristicAxiom;
import org.semanticweb.owlapi.model.OWLObjectPropertyExpression;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyChange;
import org.semanticweb.owlapi.model.OWLOntologyID;
import org.semanticweb.owlapi.model.OWLOntologyLoaderConfiguration;
import org.semanticweb.owlapi.model.OWLPropertyAssertionAxiom;
import org.semanticweb.owlapi.model.OWLPropertyExpression;
import org.semanticweb.owlapi.model.OWLSubClassOfAxiom;
import org.semanticweb.owlapi.model.SWRLAtom;
import org.semanticweb.owlapi.model.SWRLBuiltInAtom;
import org.semanticweb.owlapi.model.SWRLDArgument;
import org.semanticweb.owlapi.model.SWRLDifferentIndividualsAtom;
import org.semanticweb.owlapi.model.SWRLIArgument;
import org.semanticweb.owlapi.model.SWRLIndividualArgument;
import org.semanticweb.owlapi.model.SWRLLiteralArgument;
import org.semanticweb.owlapi.model.SWRLRule;
import org.semanticweb.owlapi.model.SWRLSameIndividualAtom;
import org.semanticweb.owlapi.model.SWRLVariable;
import org.semanticweb.owlapi.model.SetOntologyID;
import org.semanticweb.owlapi.util.CollectionFactory;
import org.semanticweb.owlapi.util.DefaultPrefixManager;
import org.semanticweb.owlapi.util.NamespaceUtil;
import org.semanticweb.owlapi.util.OntologyAxiomPair;
import org.semanticweb.owlapi.util.RemappingIndividualProvider;
import org.semanticweb.owlapi.util.mansyntax.ManchesterOWLSyntaxParser;
import org.semanticweb.owlapi.vocab.DublinCoreVocabulary;
import org.semanticweb.owlapi.vocab.Namespaces;
import org.semanticweb.owlapi.vocab.OWL2Datatype;
import org.semanticweb.owlapi.vocab.OWLFacet;
import org.semanticweb.owlapi.vocab.OWLRDFVocabulary;
import org.semanticweb.owlapi.vocab.SWRLBuiltInsVocabulary;
import org.semanticweb.owlapi.vocab.XSDVocabulary;
import com.google.common.base.Optional;
/**
* A parser for the Manchester OWL Syntax. All properties must be defined before
* they are used. For example, consider the restriction hasPart some Leg. The
* parser must know in advance whether or not hasPart is an object property or a
* data property so that Leg gets parsed correctly. In a tool, such as an
* editor, it is expected that hasPart will already exists as either a data
* property or an object property. If a complete ontology is being parsed, it is
* expected that hasPart will have been defined at the top of the file before it
* is used in any class expressions or property assertions (e.g. ObjectProperty:
* hasPart)
*
* Patched to parse the output of {@link ManchesterOWLSyntaxRenderer}: the
* {@link ManchesterOWLSyntax} vocabulary for which rendering is different from
* the keyword is now matched using
* {@link #matches(ManchesterOWLSyntax, String)} instead of
* {@link ManchesterOWLSyntax#matches(String)} or
* {@link ManchesterOWLSyntax#matchesEitherForm(String)}
*
* @author <NAME>, The University Of Manchester, Bio-Health
* Informatics Group
*
* @author <NAME>
* @since 2.2.0
*/
public class ManchesterOWLSyntaxParserPatched
implements ManchesterOWLSyntaxParser {
// This parser was built by hand! After struggling with terrible
// error messages produced by ANTLR (or JavaCC) I decides to construct
// this parser by hand. The error messages that this parser generates
// are specific to the Manchester OWL Syntax and are such that it should
// be easy to use this parser in tools such as editors.
@Nonnull
private Provider<OWLOntologyLoaderConfiguration> configProvider;
@Nonnull
private Optional<OWLOntologyLoaderConfiguration> config = Optional.absent();
protected OWLDataFactory dataFactory;
private List<Token> tokens;
private int tokenIndex;
private OWLEntityChecker owlEntityChecker;
private OWLOntologyChecker owlOntologyChecker = new OWLOntologyChecker() {
@Nullable
@Override
public OWLOntology getOntology(String name) {
return null;
}
};
@Nonnull
protected final Set<String> classNames = new HashSet<>();
@Nonnull
protected final Set<String> objectPropertyNames = new HashSet<>();
@Nonnull
protected final Set<String> dataPropertyNames = new HashSet<>();
@Nonnull
protected final Set<String> individualNames = new HashSet<>();
@Nonnull
protected final Set<String> dataTypeNames = new HashSet<>();
@Nonnull
protected final Set<String> annotationPropertyNames = new HashSet<>();
@Nonnull
private final Map<String, SWRLBuiltInsVocabulary> ruleBuiltIns = new HashMap<>();
@Nonnull
protected DefaultPrefixManager pm = new DefaultPrefixManager();
@Nonnull
protected final Set<ManchesterOWLSyntax> potentialKeywords = new HashSet<>();
private OWLOntology defaultOntology;
private final boolean allowEmptyFrameSections = false;
private final Map<ManchesterOWLSyntax, AnnotatedListItemParser<OWLDataProperty, ?>> dataPropertyFrameSections = new EnumMap<>(
ManchesterOWLSyntax.class);
protected RemappingIndividualProvider anonProvider;
/**
* @param configurationProvider
* configuration provider
* @param dataFactory
* dataFactory
*/
@Inject
public ManchesterOWLSyntaxParserPatched(
@Nonnull Provider<OWLOntologyLoaderConfiguration> configurationProvider,
@Nonnull OWLDataFactory dataFactory) {
configProvider = configurationProvider;
this.dataFactory = dataFactory;
anonProvider = new RemappingIndividualProvider(this.dataFactory);
pm.setPrefix("rdf:", Namespaces.RDF.toString());
pm.setPrefix("rdfs:", Namespaces.RDFS.toString());
pm.setPrefix("owl:", Namespaces.OWL.toString());
pm.setPrefix("dc:", DublinCoreVocabulary.NAME_SPACE);
NamespaceUtil u = new NamespaceUtil();
initialiseClassFrameSections();
initialiseObjectPropertyFrameSections();
initialiseDataPropertyFrameSections();
initialiseAnnotationPropertyFrameSections();
initialiseIndividualFrameSections();
for (XSDVocabulary v : XSDVocabulary.values()) {
dataTypeNames.add(v.getIRI().toString());
dataTypeNames.add(v.getIRI().toQuotedString());
dataTypeNames.add(v.getPrefixedName());
}
dataTypeNames.add(OWLRDFVocabulary.RDFS_LITERAL.getPrefixedName());
dataTypeNames.add(OWLRDFVocabulary.RDF_XML_LITERAL.getShortForm());
dataTypeNames.add(OWLRDFVocabulary.RDF_XML_LITERAL.getPrefixedName());
for (IRI iri : OWLRDFVocabulary.BUILT_IN_ANNOTATION_PROPERTY_IRIS) {
String string = iri.toString();
String ns = XMLUtils.getNCNamePrefix(string);
String fragment = XMLUtils.getNCNameSuffix(string);
annotationPropertyNames.add(
u.getPrefix(ns) + ':' + (fragment != null ? fragment : ""));
}
owlEntityChecker = new DefaultEntityChecker();
for (SWRLBuiltInsVocabulary v : SWRLBuiltInsVocabulary.values()) {
ruleBuiltIns.put(v.getShortForm(), v);
ruleBuiltIns.put(v.getIRI().toQuotedString(), v);
}
}
@Override
@Nonnull
public OWLOntologyLoaderConfiguration getOntologyLoaderConfiguration() {
if (config.isPresent()) {
return config.get();
}
config = Optional.of(configProvider.get());
return config.get();
}
@Override
public void setOntologyLoaderConfigurationProvider(
Provider<OWLOntologyLoaderConfiguration> provider) {
configProvider = provider;
}
@Override
public void setOntologyLoaderConfiguration(
OWLOntologyLoaderConfiguration config) {
this.config = Optional.fromNullable(config);
}
@Override
public void setStringToParse(String s) {
tokens = new ArrayList<>();
tokens.addAll(getTokenizer(s).tokenize());
tokenIndex = 0;
}
protected static ManchesterOWLSyntaxTokenizer getTokenizer(String s) {
return new ManchesterOWLSyntaxTokenizer(s);
}
private final Map<ManchesterOWLSyntax, AnnotatedListItemParser<OWLClass, ?>> classFrameSections = new EnumMap<>(
ManchesterOWLSyntax.class);
private void initialiseClassFrameSections() {
initialiseSection(new EntityAnnotationsListItemParser<OWLClass>(),
classFrameSections);
initialiseSection(new ClassSubClassOfListItemParser(),
classFrameSections);
initialiseSection(new ClassEquivalentToListItemParser(),
classFrameSections);
initialiseSection(new ClassDisjointWithListItemParser(),
classFrameSections);
initialiseSection(new ClassHasKeyListItemParser(), classFrameSections);
initialiseSection(new ClassDisjointUnionOfListItemParser(),
classFrameSections);
// Extensions
initialiseSection(new ClassSuperClassOfListItemParser(),
classFrameSections);
initialiseSection(new ClassDisjointClassesListItemParser(),
classFrameSections);
initialiseSection(new ClassIndividualsListItemParser(),
classFrameSections);
}
private final Map<ManchesterOWLSyntax, AnnotatedListItemParser<OWLObjectProperty, ?>> objectPropertyFrameSections = new EnumMap<>(
ManchesterOWLSyntax.class);
private void initialiseObjectPropertyFrameSections() {
initialiseSection(
new EntityAnnotationsListItemParser<OWLObjectProperty>(),
objectPropertyFrameSections);
initialiseSection(new ObjectPropertySubPropertyOfListItemParser(),
objectPropertyFrameSections);
initialiseSection(new ObjectPropertyEquivalentToListItemParser(),
objectPropertyFrameSections);
initialiseSection(new ObjectPropertyDisjointWithListItemParser(),
objectPropertyFrameSections);
initialiseSection(new ObjectPropertyDomainListItemParser(),
objectPropertyFrameSections);
initialiseSection(new ObjectPropertyRangeListItemParser(),
objectPropertyFrameSections);
initialiseSection(new ObjectPropertyInverseOfListItemParser(),
objectPropertyFrameSections);
initialiseSection(new ObjectPropertyCharacteristicsItemParser(),
objectPropertyFrameSections);
initialiseSection(new ObjectPropertySubPropertyChainListItemParser(),
objectPropertyFrameSections);
// Extensions
initialiseSection(new ObjectPropertySuperPropertyOfListItemParser(),
objectPropertyFrameSections);
}
private void initialiseDataPropertyFrameSections() {
initialiseSection(new DataPropertySubPropertyOfListItemParser(),
dataPropertyFrameSections);
initialiseSection(new DataPropertyEquivalentToListItemParser(),
dataPropertyFrameSections);
initialiseSection(new DataPropertyDisjointWithListItemParser(),
dataPropertyFrameSections);
initialiseSection(new DataPropertyDomainListItemParser(),
dataPropertyFrameSections);
initialiseSection(new DataPropertyRangeListItemParser(),
dataPropertyFrameSections);
initialiseSection(new DataPropertyCharacteristicsItemParser(),
dataPropertyFrameSections);
initialiseSection(
new EntityAnnotationsListItemParser<OWLDataProperty>(),
dataPropertyFrameSections);
}
private final Map<ManchesterOWLSyntax, AnnotatedListItemParser<OWLAnnotationProperty, ?>> annotationPropertyFrameSections = new EnumMap<>(
ManchesterOWLSyntax.class);
private void initialiseAnnotationPropertyFrameSections() {
initialiseSection(new AnnotationPropertySubPropertyOfListItemParser(),
annotationPropertyFrameSections);
initialiseSection(new AnnotationPropertyDomainListItemParser(),
annotationPropertyFrameSections);
initialiseSection(new AnnotationPropertyRangeListItemParser(),
annotationPropertyFrameSections);
initialiseSection(
new EntityAnnotationsListItemParser<OWLAnnotationProperty>(),
annotationPropertyFrameSections);
}
private final Map<ManchesterOWLSyntax, AnnotatedListItemParser<OWLIndividual, ?>> individualFrameSections = new EnumMap<>(
ManchesterOWLSyntax.class);
private void initialiseIndividualFrameSections() {
initialiseSection(new IndividualAnnotationItemParser(),
individualFrameSections);
initialiseSection(new IndividualTypesItemParser(),
individualFrameSections);
initialiseSection(new IndividualFactsItemParser(),
individualFrameSections);
initialiseSection(new IndividualSameAsItemParser(),
individualFrameSections);
initialiseSection(new IndividualDifferentFromItemParser(),
individualFrameSections);
// Extensions
initialiseSection(new IndividualDifferentIndividualsItemParser(),
individualFrameSections);
}
@Override
public void setOWLEntityChecker(OWLEntityChecker owlEntityChecker) {
this.owlEntityChecker = owlEntityChecker;
}
private boolean isOntologyName(@Nonnull String name) {
return owlOntologyChecker.getOntology(name) != null;
}
private boolean isClassName(@Nonnull String name) {
return classNames.contains(name) || owlEntityChecker != null
&& owlEntityChecker.getOWLClass(name) != null;
}
private OWLOntology getOntology(String name) {
return owlOntologyChecker.getOntology(name);
}
@Override
public void setOWLOntologyChecker(
@Nonnull OWLOntologyChecker owlOntologyChecker) {
this.owlOntologyChecker = owlOntologyChecker;
}
private boolean isObjectPropertyName(@Nonnull String name) {
return objectPropertyNames.contains(name) || owlEntityChecker != null
&& owlEntityChecker.getOWLObjectProperty(name) != null;
}
private boolean isAnnotationPropertyName(@Nonnull String name) {
return annotationPropertyNames.contains(name)
|| owlEntityChecker != null && owlEntityChecker
.getOWLAnnotationProperty(name) != null;
}
private boolean isDataPropertyName(@Nonnull String name) {
return dataPropertyNames.contains(name) || owlEntityChecker != null
&& owlEntityChecker.getOWLDataProperty(name) != null;
}
private boolean isIndividualName(@Nonnull String name) {
return individualNames.contains(name) || owlEntityChecker != null
&& owlEntityChecker.getOWLIndividual(name) != null;
}
private boolean isDatatypeName(@Nonnull String name) {
return dataTypeNames.contains(name) || owlEntityChecker != null
&& owlEntityChecker.getOWLDatatype(name) != null;
}
private boolean isSWRLBuiltin(@Nonnull String name) {
return ruleBuiltIns.containsKey(name);
}
@Nonnull
private OWLClass getOWLClass(@Nonnull String name) {
OWLClass cls = owlEntityChecker.getOWLClass(name);
if (cls == null && classNames.contains(name)) {
cls = dataFactory.getOWLClass(getIRI(name));
}
if (cls == null) {
throw new ExceptionBuilder().withKeyword(potentialKeywords)
.withClass().build();
}
return cls;
}
@Nonnull
private OWLObjectProperty getOWLObjectProperty(@Nonnull String name) {
OWLObjectProperty prop = owlEntityChecker.getOWLObjectProperty(name);
if (prop == null && objectPropertyNames.contains(name)) {
prop = dataFactory.getOWLObjectProperty(getIRI(name));
}
if (prop == null) {
throw new ExceptionBuilder().withObject().build();
}
return prop;
}
@Nonnull
private OWLIndividual getOWLIndividual(@Nonnull String name) {
if (name.startsWith("_:")) {
return anonProvider.getOWLAnonymousIndividual(name);
}
return getOWLNamedIndividual(name);
}
@Nonnull
private OWLNamedIndividual getOWLNamedIndividual(@Nonnull String name) {
OWLNamedIndividual ind = owlEntityChecker.getOWLIndividual(name);
if (ind == null && individualNames.contains(name)) {
ind = dataFactory.getOWLNamedIndividual(getIRI(name));
}
if (ind == null) {
throw new ExceptionBuilder().withInd().build();
}
return ind;
}
@Nonnull
private OWLDataProperty getOWLDataProperty(@Nonnull String name) {
OWLDataProperty prop = owlEntityChecker.getOWLDataProperty(name);
if (prop == null && dataPropertyNames.contains(name)) {
prop = dataFactory.getOWLDataProperty(getIRI(name));
}
if (prop == null) {
throw new ExceptionBuilder().withData().build();
}
return prop;
}
@Nonnull
private OWLDatatype getOWLDatatype(@Nonnull String name) {
OWLDatatype dt = owlEntityChecker.getOWLDatatype(name);
if (dt == null && dataTypeNames.contains(name)) {
dt = dataFactory.getOWLDatatype(getIRI(name));
}
if (dt == null) {
throw new ExceptionBuilder().withDt().build();
}
return dt;
}
@Nonnull
private OWLAnnotationProperty getOWLAnnotationProperty(
@Nonnull String name) {
OWLAnnotationProperty prop = owlEntityChecker
.getOWLAnnotationProperty(name);
if (prop == null && annotationPropertyNames.contains(name)) {
prop = dataFactory.getOWLAnnotationProperty(getIRI(name));
}
if (prop == null) {
throw new ExceptionBuilder().withAnn().build();
}
return prop;
}
protected Token getLastToken() {
if (tokenIndex - 1 > -1) {
return tokens.get(tokenIndex - 1);
} else {
return tokens.get(0);
}
}
@Nonnull
private String peekToken() {
return getToken().getToken();
}
@Nonnull
private String consumeToken() {
String token = getToken().getToken();
if (tokenIndex < tokens.size()) {
tokenIndex++;
}
return token;
}
private void consumeToken(String expected) {
String tok = consumeToken();
if (!tok.equals(expected)) {
throw new ExceptionBuilder().withKeyword(expected).build();
}
}
private void consumeToken(ManchesterOWLSyntax expected) {
String tok = consumeToken();
if (!matches(expected, tok)) {
throw new ExceptionBuilder().withKeyword(expected).build();
}
}
private Token getToken() {
return tokens
.get(tokenIndex < tokens.size() ? tokenIndex : tokenIndex - 1);
}
public boolean matches(ManchesterOWLSyntax keyword, String string) {
String rendering = keyword.toString();
return rendering.equalsIgnoreCase(string)
|| (rendering + ":").equalsIgnoreCase(string);
}
/* Parser */
@Nonnull
@Override
public OWLClassExpression parseClassExpression() {
OWLClassExpression desc = parseUnion();
if (!eof(consumeToken())) {
throw new ExceptionBuilder().withKeyword(EOF).build();
}
return desc;
}
@Override
public OWLClassExpression parseClassExpression(String s) {
setStringToParse(s);
return parseClassExpression();
}
protected OWLClassExpression parseIntersection() {
Set<OWLClassExpression> ops = new HashSet<>();
String kw = AND.keyword();
while (matches(AND, kw)) {
potentialKeywords.remove(AND);
ops.add(parseNonNaryClassExpression());
potentialKeywords.add(AND);
kw = peekToken();
if (matches(AND, kw)) {
kw = consumeToken();
} else if (matches(THAT, kw)) {
consumeToken();
kw = AND.keyword();
}
}
if (ops.size() == 1) {
return ops.iterator().next();
} else {
return dataFactory.getOWLObjectIntersectionOf(ops);
}
}
@Nonnull
protected OWLClassExpression parseUnion() {
Set<OWLClassExpression> ops = new HashSet<>();
String kw = OR.keyword();
while (matches(OR, kw)) {
potentialKeywords.remove(OR);
ops.add(parseIntersection());
potentialKeywords.add(OR);
kw = peekToken();
if (matches(OR, kw)) {
kw = consumeToken();
}
}
if (ops.size() == 1) {
return ops.iterator().next();
} else {
return dataFactory.getOWLObjectUnionOf(ops);
}
}
@Nonnull
protected OWLObjectPropertyExpression parseObjectPropertyExpression(
boolean allowUndeclared) {
String tok = consumeToken();
if (matches(INVERSE, tok)) {
String open = peekToken();
boolean brackets = false;
if (matches(OPEN, open)) {
consumeToken();
brackets = true;
}
OWLObjectPropertyExpression prop = parseObjectPropertyExpression(
false);
if (brackets) {
String close = consumeToken();
if (!matches(CLOSE, close)) {
throw new ExceptionBuilder().withKeyword(CLOSE).build();
}
}
return dataFactory.getOWLObjectInverseOf(prop);
} else {
if (!allowUndeclared && !isObjectPropertyName(tok)) {
throw new ExceptionBuilder().withObject().build();
}
return getOWLObjectProperty(tok);
}
}
private OWLPropertyExpression parsePropertyExpression() {
String tok = peekToken();
if (isObjectPropertyName(tok)) {
return parseObjectPropertyExpression(false);
} else if (matches(INVERSE, tok)) {
return parseObjectPropertyExpression(false);
} else if (isDataPropertyName(tok)) {
return parseDataProperty();
} else {
consumeToken();
throw new ExceptionBuilder().withObject().withData().build();
}
}
/**
* Parses all class expressions except ObjectIntersectionOf and
* ObjectUnionOf.
*
* @return The class expression which was parsed @ * if a non-nary class
* expression could not be parsed
*/
private OWLClassExpression parseNonNaryClassExpression() {
String tok = peekToken();
if (matches(NOT, tok)) {
consumeToken();
OWLClassExpression complemented = parseNestedClassExpression(false);
return dataFactory.getOWLObjectComplementOf(complemented);
} else if (isObjectPropertyName(tok) || matches(INVERSE, tok)) {
return parseObjectRestriction();
} else if (isDataPropertyName(tok)) {
// Data restriction
return parseDataRestriction();
} else if (matches(OPENBRACE, tok)) {
return parseObjectOneOf();
} else if (matches(OPEN, tok)) {
return parseNestedClassExpression(false);
} else if (isClassName(tok)) {
consumeToken();
return getOWLClass(tok);
}
// Add option for strict class name checking
else {
consumeToken();
throw new ExceptionBuilder().withClass().withObject().withData()
.withKeyword(OPEN, OPENBRACE, NOT, INVERSE).build();
}
}
private OWLClassExpression parseObjectRestriction() {
OWLObjectPropertyExpression prop = parseObjectPropertyExpression(false);
String kw = consumeToken();
if (matches(SOME, kw)) {
String possSelfToken = peekToken();
if (matches(SELF, possSelfToken)) {
consumeToken();
return dataFactory.getOWLObjectHasSelf(prop);
} else {
OWLClassExpression filler = null;
try {
filler = parseNestedClassExpression(false);
} catch (ParserException e) {
e.getExpectedKeywords().add(SELF.keyword());
throw e;
}
return dataFactory.getOWLObjectSomeValuesFrom(prop, filler);
}
} else if (matches(ONLY, kw)) {
OWLClassExpression filler = parseNestedClassExpression(false);
return dataFactory.getOWLObjectAllValuesFrom(prop, filler);
} else if (matches(VALUE, kw)) {
String indName = consumeToken();
if (!isIndividualName(indName)) {
throw new ExceptionBuilder().withInd().build();
}
return dataFactory.getOWLObjectHasValue(prop,
getOWLIndividual(indName));
} else if (matches(MIN, kw)) {
int card = parseInteger();
OWLClassExpression filler = parseNestedClassExpression(true);
return dataFactory.getOWLObjectMinCardinality(card, prop, filler);
} else if (matches(MAX, kw)) {
int card = parseInteger();
OWLClassExpression filler = parseNestedClassExpression(true);
return dataFactory.getOWLObjectMaxCardinality(card, prop, filler);
} else if (matches(EXACTLY, kw)) {
int card = parseInteger();
OWLClassExpression filler = parseNestedClassExpression(true);
return dataFactory.getOWLObjectExactCardinality(card, prop, filler);
} else if (matches(ONLYSOME, kw)) {
String tok = peekToken();
Set<OWLClassExpression> descs = new HashSet<>();
if (!matches(OPENBRACKET, tok)) {
descs.add(parseUnion());
} else {
descs.addAll(
parseClassExpressionList(OPENBRACKET, CLOSEBRACKET));
}
Set<OWLClassExpression> ops = new HashSet<>();
for (OWLClassExpression desc : descs) {
assert desc != null;
ops.add(dataFactory.getOWLObjectSomeValuesFrom(prop, desc));
}
OWLClassExpression filler;
if (descs.size() == 1) {
filler = descs.iterator().next();
} else {
filler = dataFactory.getOWLObjectUnionOf(descs);
}
assert filler != null;
ops.add(dataFactory.getOWLObjectAllValuesFrom(prop, filler));
return dataFactory.getOWLObjectIntersectionOf(ops);
} else if (matches(SELF, kw)) {
return dataFactory.getOWLObjectHasSelf(prop);
} else {
// Error!
throw new ExceptionBuilder()
.withKeyword(SOME, ONLY, VALUE, MIN, MAX, EXACTLY, SELF)
.build();
}
}
private OWLClassExpression parseDataRestriction() {
OWLDataPropertyExpression prop = parseDataProperty();
String kw = consumeToken();
if (matches(SOME, kw)) {
OWLDataRange rng = parseDataRange();
return dataFactory.getOWLDataSomeValuesFrom(prop, rng);
} else if (matches(ONLY, kw)) {
OWLDataRange rng = parseDataRange();
return dataFactory.getOWLDataAllValuesFrom(prop, rng);
} else if (matches(VALUE, kw)) {
OWLLiteral con = parseLiteral(null);
return dataFactory.getOWLDataHasValue(prop, con);
} else if (matches(MIN, kw)) {
int card = parseInteger();
OWLDataRange rng = parseDataRange();
return dataFactory.getOWLDataMinCardinality(card, prop, rng);
} else if (matches(EXACTLY, kw)) {
int card = parseInteger();
OWLDataRange rng = parseDataRange();
return dataFactory.getOWLDataExactCardinality(card, prop, rng);
} else if (matches(MAX, kw)) {
int card = parseInteger();
OWLDataRange rng = parseDataRange();
return dataFactory.getOWLDataMaxCardinality(card, prop, rng);
}
throw new ExceptionBuilder()
.withKeyword(SOME, ONLY, VALUE, MIN, EXACTLY, MAX).build();
}
private OWLFacet parseFacet() {
String facet = consumeToken();
if (MIN_INCLUSIVE_FACET.matches(facet, peekToken())) {
consumeToken();
return OWLFacet.MIN_INCLUSIVE;
}
if (MAX_INCLUSIVE_FACET.matches(facet, peekToken())) {
consumeToken();
return OWLFacet.MAX_INCLUSIVE;
}
if (matches(MIN_EXCLUSIVE_FACET, facet)) {
return OWLFacet.MIN_EXCLUSIVE;
}
if (matches(MAX_EXCLUSIVE_FACET, facet)) {
return OWLFacet.MAX_EXCLUSIVE;
}
return OWLFacet.getFacetBySymbolicName(facet);
}
@Nonnull
private OWLDatatype parseDatatype() {
String name = consumeToken();
return getOWLDatatype(name);
}
@Override
public OWLDataRange parseDataRange() {
return parseDataIntersectionOf();
}
@Nonnull
private OWLDataRange parseDataIntersectionOf() {
String sep = AND.keyword();
Set<OWLDataRange> ranges = new HashSet<>();
while (matches(AND, sep)) {
ranges.add(parseDataUnionOf());
sep = peekToken();
if (matches(AND, sep)) {
consumeToken();
}
}
if (ranges.isEmpty()) {
return dataFactory.getTopDatatype();
}
if (ranges.size() == 1) {
return ranges.iterator().next();
}
return dataFactory.getOWLDataIntersectionOf(ranges);
}
private OWLDataRange parseDataUnionOf() {
String sep = OR.keyword();
Set<OWLDataRange> ranges = new HashSet<>();
while (matches(OR, sep)) {
ranges.add(parseDataRangePrimary());
sep = peekToken();
if (matches(OR, sep)) {
consumeToken();
}
}
if (ranges.size() == 1) {
return ranges.iterator().next();
} else {
return dataFactory.getOWLDataUnionOf(ranges);
}
}
@Nonnull
private OWLDataRange parseDataRangePrimary() {
String tok = peekToken();
if (isDatatypeName(tok)) {
consumeToken();
OWLDatatype datatype = getOWLDatatype(tok);
String next = peekToken();
if (matches(OPENBRACKET, next)) {
// Restricted data range
consumeToken();
String sep = COMMA.keyword();
Set<OWLFacetRestriction> facetRestrictions = new HashSet<>();
while (matches(COMMA, sep)) {
OWLFacet fv = parseFacet();
if (fv == null) {
throw new ExceptionBuilder()
.withKeyword(OWLFacet.getFacets()).build();
}
OWLLiteral con = parseLiteral(datatype);
facetRestrictions
.add(dataFactory.getOWLFacetRestriction(fv, con));
sep = consumeToken();
}
if (!matches(CLOSEBRACKET, sep)) {
throw new ExceptionBuilder().withKeyword(CLOSEBRACKET)
.build();
}
return dataFactory.getOWLDatatypeRestriction(datatype,
facetRestrictions);
} else {
return datatype;
}
} else if (matches(NOT, tok)) {
return parseDataComplementOf();
} else if (matches(OPENBRACE, tok)) {
return parseDataOneOf();
} else if (matches(OPEN, tok)) {
consumeToken();
OWLDataRange rng = parseDataRange();
consumeToken(CLOSE.keyword());
return rng;
} else {
consumeToken();
throw new ExceptionBuilder().withDt().withKeyword(OPENBRACE, NOT)
.build();
}
}
@Override
@Nonnull
public Set<OWLDataRange> parseDataRangeList() {
String sep = COMMA.keyword();
Set<OWLDataRange> ranges = new HashSet<>();
while (matches(COMMA, sep)) {
potentialKeywords.remove(COMMA);
OWLDataRange rng = parseDataRange();
ranges.add(rng);
potentialKeywords.add(COMMA);
sep = peekToken();
if (matches(COMMA, sep)) {
consumeToken();
}
}
return ranges;
}
@Nonnull
private OWLDataRange parseDataOneOf() {
consumeToken();
Set<OWLLiteral> cons = new HashSet<>();
String sep = COMMA.keyword();
while (matches(COMMA, sep)) {
OWLLiteral con = parseLiteral(null);
cons.add(con);
sep = consumeToken();
}
if (!matches(CLOSEBRACE, sep)) {
throw new ExceptionBuilder().withKeyword(COMMA, CLOSEBRACE).build();
}
return dataFactory.getOWLDataOneOf(cons);
}
@Nonnull
private OWLDataRange parseDataComplementOf() {
String not = consumeToken();
if (!matches(NOT, not)) {
throw new ExceptionBuilder().withKeyword(NOT).build();
}
OWLDataRange complementedDataRange = parseDataRangePrimary();
return dataFactory.getOWLDataComplementOf(complementedDataRange);
}
@Nonnull
@Override
public OWLLiteral parseLiteral(OWLDatatype datatype) {
String tok = consumeToken();
if (tok.startsWith("\"")) {
@Nonnull
String lit = unquoteLiteral(tok);
if (peekToken().equals("^")) {
consumeToken();
if (!peekToken().equals("^")) {
throw new ExceptionBuilder().withKeyword("^").build();
}
consumeToken();
return dataFactory.getOWLLiteral(lit, parseDatatype());
} else if (peekToken().startsWith("@")) {
// Plain literal with a language tag
String lang = consumeToken().substring(1);
return dataFactory.getOWLLiteral(lit, lang);
} else {
// Plain literal without a language tag
return dataFactory.getOWLLiteral(lit, "");
}
} else {
if (datatype != null) {
// datatype is known from context
return dataFactory.getOWLLiteral(tok, datatype);
}
try {
int i = Integer.parseInt(tok);
return dataFactory.getOWLLiteral(i);
} catch (@SuppressWarnings("unused") NumberFormatException e) {
// Ignore - not interested
}
if (tok.endsWith("f") || tok.endsWith("F")) {
try {
// XXX this extra F might qualify as Float a Double INF/-INF
float f = Float.parseFloat(tok.replace("INF", "Infinity")
.replace("inf", "Infinity"));
return dataFactory.getOWLLiteral(asFloat(f),
OWL2Datatype.XSD_FLOAT);
} catch (@SuppressWarnings("unused") NumberFormatException e) {
// Ignore - not interested
}
}
try {
// ensure it's a valid double, or skip
Double.parseDouble(tok);
return dataFactory.getOWLLiteral(tok, OWL2Datatype.XSD_DECIMAL);
} catch (@SuppressWarnings("unused") NumberFormatException e) {
// Ignore - not interested
}
if (matches(LITERAL_TRUE, tok)) {
return dataFactory.getOWLLiteral(true);
} else if (matches(LITERAL_FALSE, tok)) {
return dataFactory.getOWLLiteral(false);
}
}
throw new ExceptionBuilder().withKeyword(LITERAL_TRUE, LITERAL_FALSE,
LITERAL_INTEGER, LITERAL_FLOAT, LITERAL_DOUBLE, LITERAL_LITERAL,
LITERAL_LIT_DATATYPE, LITERAL_LIT_LANG).build();
}
@Nonnull
private String unquoteLiteral(String tok) {
String lit = "";
if (!tok.endsWith("\"")) {
consumeToken();
throw new ExceptionBuilder().withKeyword("\"").build();
}
if (tok.length() > 2) {
lit = tok.substring(1, tok.length() - 1);
}
return verifyNotNull(lit);
}
@Nonnull
private static String asFloat(float f) {
return Float.toString(f).replace("Infinity", "INF");
}
private int parseInteger() {
String i = consumeToken();
try {
return Integer.parseInt(i);
} catch (@SuppressWarnings("unused") NumberFormatException e) {
throw new ExceptionBuilder().withInt().build();
}
}
@Nonnull
private OWLClassExpression parseNestedClassExpression(
boolean lookaheadCheck) {
String tok = peekToken();
if (matches(OPEN, tok)) {
consumeToken();
OWLClassExpression desc = parseUnion();
String closeBracket = consumeToken();
if (!matches(CLOSE, closeBracket)) {
// Error!
throw new ExceptionBuilder().withKeyword(CLOSE).build();
}
return desc;
} else if (matches(OPENBRACE, tok)) {
return parseObjectOneOf();
} else if (isClassName(tok)) {
String name = consumeToken();
return getOWLClass(name);
}
// XXX problem: if the class expression is missing, we should return
// owl:Thing. But there are many ways in which it could be missing. Hard
// to tell what sort of lookahead is needed.
// The next two checks should cover most cases.
for (ManchesterOWLSyntax x : values()) {
if (matches(x, tok)) {
return dataFactory.getOWLThing();
}
}
if (eof(tok)) {
return dataFactory.getOWLThing();
}
if (!eof(tok) || !lookaheadCheck) {
consumeToken();
throw new ExceptionBuilder().withKeyword(OPEN, OPENBRACE)
.withClass().build();
}
return dataFactory.getOWLThing();
}
@Nonnull
private OWLClassExpression parseObjectOneOf() {
String open = consumeToken();
if (!matches(OPENBRACE, open)) {
throw new ExceptionBuilder().withKeyword(OPENBRACE).build();
}
String sep = COMMA.keyword();
Set<OWLIndividual> inds = new HashSet<>();
while (matches(COMMA, sep)) {
inds.add(parseIndividual());
sep = peekToken();
if (matches(COMMA, sep)) {
consumeToken();
}
}
String close = consumeToken();
if (!matches(CLOSEBRACE, close)) {
throw new ExceptionBuilder().withKeyword(CLOSEBRACE, COMMA).build();
}
return dataFactory.getOWLObjectOneOf(inds);
}
private static <F> void initialiseSection(
AnnotatedListItemParser<F, ?> parser,
Map<ManchesterOWLSyntax, AnnotatedListItemParser<F, ?>> map,
ManchesterOWLSyntax... synonyms) {
map.put(parser.getFrameSectionKeyword(), parser);
for (ManchesterOWLSyntax syn : synonyms) {
map.put(syn, parser);
}
}
@Override
@Nonnull
public Set<OntologyAxiomPair> parseFrames() {
Set<OntologyAxiomPair> axioms = new HashSet<>();
Set<ManchesterOWLSyntax> possible = new HashSet<>();
resetPossible(possible);
while (true) {
String tok = peekToken();
if (matches(CLASS, tok)) {
potentialKeywords.clear();
resetPossible(possible);
axioms.addAll(parseClassFrame());
possible.addAll(classFrameSections.keySet());
} else if (matches(OBJECT_PROPERTY, tok)) {
potentialKeywords.clear();
resetPossible(possible);
axioms.addAll(parseObjectPropertyFrame());
possible.addAll(objectPropertyFrameSections.keySet());
} else if (matches(DATA_PROPERTY, tok)) {
potentialKeywords.clear();
resetPossible(possible);
axioms.addAll(parseDataPropertyFrame());
possible.addAll(dataPropertyFrameSections.keySet());
} else if (matches(ANNOTATION_PROPERTY, tok)) {
potentialKeywords.clear();
resetPossible(possible);
axioms.addAll(parseAnnotationPropertyFrame());
possible.addAll(Arrays.asList(SUB_PROPERTY_OF, DOMAIN, RANGE));
} else if (matches(INDIVIDUAL, tok)) {
potentialKeywords.clear();
resetPossible(possible);
axioms.addAll(parseIndividualFrame());
possible.addAll(
Arrays.asList(TYPES, FACTS, DIFFERENT_FROM, SAME_AS));
} else if (matches(DATATYPE, tok)) {
potentialKeywords.clear();
resetPossible(possible);
axioms.addAll(parseDatatypeFrame());
possible.add(EQUIVALENT_TO);
} else if (matches(VALUE_PARTITION, tok)) {
potentialKeywords.clear();
resetPossible(possible);
parseValuePartitionFrame();
} else if (matches(RULE, tok)) {
potentialKeywords.clear();
resetPossible(possible);
axioms.addAll(parseRuleFrame());
} else {
if (eof(tok)) {
break;
} else {
consumeToken();
throw new ExceptionBuilder().withKeyword(possible).build();
}
}
}
return axioms;
}
@Override
public Set<OntologyAxiomPair> parseDatatypeFrame() {
String tok = consumeToken();
Set<OntologyAxiomPair> axioms = new HashSet<>();
if (!matches(DATATYPE, tok)) {
throw new ExceptionBuilder().withKeyword(DATATYPE).build();
}
String subj = consumeToken();
OWLDatatype datatype = getOWLDatatype(subj);
axioms.add(new OntologyAxiomPair(defaultOntology,
dataFactory.getOWLDeclarationAxiom(datatype)));
while (true) {
String sect = peekToken();
if (matches(EQUIVALENT_TO, sect)) {
potentialKeywords.clear();
consumeToken();
Set<OWLOntology> onts = getOntologies();
Set<OWLDataRange> drs = parseDataRangeList();
for (OWLOntology ont : onts) {
assert ont != null;
for (OWLDataRange dr : drs) {
assert dr != null;
axioms.add(new OntologyAxiomPair(ont, dataFactory
.getOWLDatatypeDefinitionAxiom(datatype, dr)));
}
}
} else if (matches(ANNOTATIONS, sect)) {
potentialKeywords.clear();
axioms.addAll(parseAnnotations(datatype.getIRI()));
} else {
break;
}
}
return axioms;
}
private static void resetPossible(Set<ManchesterOWLSyntax> possible) {
possible.clear();
possible.add(ANNOTATIONS);
possible.add(ANNOTATION_PROPERTY);
possible.add(CLASS);
possible.add(OBJECT_PROPERTY);
possible.add(DATATYPE);
possible.add(DATA_PROPERTY);
possible.add(INDIVIDUAL);
possible.add(VALUE_PARTITION);
possible.add(RULE);
}
private Set<OntologyAxiomPair> parseNaryEquivalentClasses() {
String tok = consumeToken();
if (!matches(EQUIVALENT_CLASSES, tok)) {
throw new ExceptionBuilder().withKeyword(EQUIVALENT_CLASSES)
.build();
}
Set<OWLOntology> ontologies = getOntologies();
Set<OWLAnnotation> annotations = parseAnnotations();
Set<OWLClassExpression> classExpressions = parseClassExpressionList();
Set<OntologyAxiomPair> pairs = new HashSet<>();
for (OWLOntology ont : ontologies) {
assert ont != null;
pairs.add(new OntologyAxiomPair(ont,
dataFactory.getOWLEquivalentClassesAxiom(classExpressions,
annotations)));
}
return pairs;
}
private Set<OntologyAxiomPair> parseNaryEquivalentProperties() {
String tok = consumeToken();
if (!matches(EQUIVALENT_PROPERTIES, tok)) {
throw new ExceptionBuilder().withKeyword(EQUIVALENT_PROPERTIES)
.build();
}
Set<OWLOntology> ontologies = getOntologies();
Set<OWLAnnotation> annotations = parseAnnotations();
Set<OWLPropertyExpression> properties = parsePropertyList();
OWLAxiom propertyAxiom;
if (properties.iterator().next().isObjectPropertyExpression()) {
Set<OWLObjectPropertyExpression> ope = new HashSet<>();
for (OWLPropertyExpression pe : properties) {
ope.add((OWLObjectPropertyExpression) pe);
}
propertyAxiom = dataFactory
.getOWLEquivalentObjectPropertiesAxiom(ope, annotations);
} else {
Set<OWLDataPropertyExpression> dpe = new HashSet<>();
for (OWLPropertyExpression pe : properties) {
dpe.add((OWLDataPropertyExpression) pe);
}
propertyAxiom = dataFactory.getOWLEquivalentDataPropertiesAxiom(dpe,
annotations);
}
Set<OntologyAxiomPair> pairs = new HashSet<>();
for (OWLOntology ont : ontologies) {
assert ont != null;
pairs.add(new OntologyAxiomPair(ont, propertyAxiom));
}
return pairs;
}
@Nonnull
private Set<OWLAnnotation> parseAnnotations() {
String next = peekToken();
@Nonnull
Set<OWLAnnotation> annotations = CollectionFactory.emptySet();
if (matches(ANNOTATIONS, next)) {
consumeToken();
annotations = parseAnnotationList();
}
return annotations;
}
private Set<OntologyAxiomPair> parseAnnotations(
@Nonnull OWLAnnotationSubject s) {
String header = consumeToken();
if (!matches(ANNOTATIONS, header)) {
throw new ExceptionBuilder().withKeyword(ANNOTATIONS).build();
}
Set<OWLOntology> onts = getOntologies();
Set<OntologyAxiomPair> pairs = new HashSet<>();
Set<OWLAnnotation> annos = parseAnnotationList();
for (OWLOntology ont : onts) {
assert ont != null;
for (OWLAnnotation anno : annos) {
assert anno != null;
if (getOntologyLoaderConfiguration().isLoadAnnotationAxioms()) {
pairs.add(new OntologyAxiomPair(ont, dataFactory
.getOWLAnnotationAssertionAxiom(s, anno)));
}
}
}
return pairs;
}
@Nonnull
private Set<OWLAnnotation> parseAnnotationList() {
String sep = COMMA.keyword();
Set<OWLAnnotation> annos = new HashSet<>();
while (matches(COMMA, sep)) {
potentialKeywords.clear();
Set<OWLAnnotation> annotations = parseAnnotations();
OWLAnnotation anno = parseAnnotation();
anno = anno.getAnnotatedAnnotation(annotations);
annos.add(anno);
sep = peekToken();
if (matches(COMMA, sep)) {
consumeToken();
}
}
return annos;
}
@Nonnull
protected OWLAnnotation parseAnnotation() {
OWLAnnotationProperty annoProp = parseAnnotationProperty();
String obj = peekToken();
OWLAnnotation anno = null;
if (isIndividualName(obj) || isClassName(obj)
|| isObjectPropertyName(obj) || isDataPropertyName(obj)) {
consumeToken();
OWLAnnotationValue value;
if (obj.startsWith("_:")) {
value = anonProvider.getOWLAnonymousIndividual(obj);
} else {
value = getIRI(obj);
}
anno = dataFactory.getOWLAnnotation(annoProp, value);
} else if (obj.startsWith("<")) {
IRI value = parseIRI();
anno = dataFactory.getOWLAnnotation(annoProp, value);
} else {
OWLLiteral con = parseLiteral(null);
anno = dataFactory.getOWLAnnotation(annoProp, con);
}
return anno;
}
@Override
public Set<OntologyAxiomPair> parseClassFrame() {
return parseClassFrame(false);
}
@Override
public Set<OntologyAxiomPair> parseClassFrameEOF() {
return parseClassFrame(true);
}
@Nonnull
private Set<OntologyAxiomPair> parseClassFrame(boolean eof) {
String tok = consumeToken();
Set<OntologyAxiomPair> axioms = new HashSet<>();
if (!matches(CLASS, tok)) {
throw new ExceptionBuilder().withKeyword(CLASS).build();
}
String subj = consumeToken();
OWLClass cls = getOWLClass(subj);
axioms.add(new OntologyAxiomPair(defaultOntology,
dataFactory.getOWLDeclarationAxiom(cls)));
parseFrameSections(eof, axioms, cls, classFrameSections);
return axioms;
}
@Nonnull
private Set<OWLOntology> parseOntologyList() {
potentialKeywords.clear();
consumeToken(OPENBRACKET.keyword());
consumeToken("in");
String sep = COMMA.keyword();
Set<OWLOntology> onts = new HashSet<>();
while (matches(COMMA, sep)) {
String tok = consumeToken();
if (isOntologyName(tok)) {
OWLOntology ont = getOntology(tok);
if (ont != null) {
onts.add(ont);
}
} else {
throw new ExceptionBuilder().withOnto().build();
}
sep = consumeToken();
if (sep.equals(CLOSEBRACKET.keyword())) {
break;
} else if (!matches(COMMA, sep)) {
throw new ExceptionBuilder().withKeyword(COMMA, CLOSEBRACKET)
.build();
}
}
return onts;
}
@Nonnull
private Set<OWLOntology> getOntologies() {
if (peekToken().equals(OPENBRACKET.keyword())) {
return parseOntologyList();
} else {
return CollectionFactory.createSet(defaultOntology);
}
}
@Override
public void setDefaultOntology(@Nonnull OWLOntology defaultOntology) {
this.defaultOntology = defaultOntology;
}
private boolean isEmptyFrameSection(Map<ManchesterOWLSyntax, ?> parsers) {
if (!allowEmptyFrameSections) {
return false;
}
String next = peekToken();
return !matches(ANNOTATIONS, next)
&& (parsers.containsKey(parse(next)) || eof(next));
}
private <F> void parseFrameSections(boolean eof,
Set<OntologyAxiomPair> axioms, @Nonnull F frameSubject,
Map<ManchesterOWLSyntax, AnnotatedListItemParser<F, ?>> sectionParsers) {
while (true) {
String sect = peekToken();
AnnotatedListItemParser<F, ?> parser = sectionParsers
.get(parse(sect));
if (parser != null) {
consumeToken();
Set<OWLOntology> onts = getOntologies();
if (!isEmptyFrameSection(sectionParsers)) {
axioms.addAll(parseAnnotatedListItems(frameSubject, parser,
onts));
}
} else if (eof && !eof(sect)) {
List<ManchesterOWLSyntax> expected = new ArrayList<>();
expected.addAll(sectionParsers.keySet());
if (frameSubject instanceof OWLAnnotationSubject
|| frameSubject instanceof OWLEntity) {
expected.add(ANNOTATIONS);
}
throw new ExceptionBuilder().withKeyword(expected).build();
} else {
break;
}
}
}
@Override
public Set<OntologyAxiomPair> parseObjectPropertyFrame() {
return parseObjectPropertyFrame(false);
}
@Nonnull
private Set<OntologyAxiomPair> parseObjectPropertyFrame(boolean eof) {
Set<OntologyAxiomPair> axioms = new HashSet<>();
consumeToken(OBJECT_PROPERTY);
String token = consumeToken();
OWLObjectProperty prop = getOWLObjectProperty(token);
if (!prop.isAnonymous()) {
axioms.add(new OntologyAxiomPair(defaultOntology, dataFactory
.getOWLDeclarationAxiom(prop.asOWLObjectProperty())));
}
parseFrameSections(eof, axioms, prop, objectPropertyFrameSections);
return axioms;
}
@Override
public Set<OntologyAxiomPair> parseDataPropertyFrame() {
Set<OntologyAxiomPair> axioms = new HashSet<>();
String tok = consumeToken();
if (!matches(DATA_PROPERTY, tok)) {
throw new ExceptionBuilder().withKeyword(DATA_PROPERTY).build();
}
String subj = consumeToken();
OWLDataProperty prop = getOWLDataProperty(subj);
axioms.add(new OntologyAxiomPair(defaultOntology,
dataFactory.getOWLDeclarationAxiom(prop)));
parseFrameSections(false, axioms, prop, dataPropertyFrameSections);
return axioms;
}
@Override
public Set<OntologyAxiomPair> parseAnnotationPropertyFrame() {
Set<OntologyAxiomPair> axioms = new HashSet<>();
String tok = consumeToken();
if (!matches(ANNOTATION_PROPERTY, tok)) {
throw new ExceptionBuilder().withKeyword(ANNOTATION_PROPERTY)
.build();
}
String subj = consumeToken();
OWLAnnotationProperty prop = getOWLAnnotationProperty(subj);
for (OWLOntology ont : getOntologies()) {
axioms.add(new OntologyAxiomPair(ont,
dataFactory.getOWLDeclarationAxiom(prop)));
}
parseFrameSections(false, axioms, prop,
annotationPropertyFrameSections);
return axioms;
}
@Override
public Set<OntologyAxiomPair> parseIndividualFrame() {
String tok = consumeToken();
Set<OntologyAxiomPair> axioms = new HashSet<>();
if (!matches(INDIVIDUAL, tok)) {
throw new ExceptionBuilder().withKeyword(INDIVIDUAL).build();
}
String subj = consumeToken();
OWLIndividual ind = getOWLIndividual(subj);
if (!ind.isAnonymous()) {
axioms.add(new OntologyAxiomPair(getOntology(null), dataFactory
.getOWLDeclarationAxiom(ind.asOWLNamedIndividual())));
}
parseFrameSections(false, axioms, ind, individualFrameSections);
return axioms;
}
@Nonnull
protected OWLPropertyAssertionAxiom<?, ?> parseFact(
@Nonnull OWLIndividual ind) {
boolean negative = false;
if (NOT.matches(peekToken())) {
consumeToken();
negative = true;
}
String prop = peekToken();
if (isDataPropertyName(prop)) {
OWLDataProperty p = parseDataProperty();
OWLLiteral con = parseLiteral(null);
if (!negative) {
return dataFactory.getOWLDataPropertyAssertionAxiom(p, ind,
con);
} else {
return dataFactory.getOWLNegativeDataPropertyAssertionAxiom(p,
ind, con);
}
} else if (isObjectPropertyName(prop) || matches(INVERSE, prop)) {
OWLObjectPropertyExpression p = parseObjectPropertyExpression(
false);
if (!negative) {
return dataFactory.getOWLObjectPropertyAssertionAxiom(p, ind,
parseIndividual());
} else {
return dataFactory.getOWLNegativeObjectPropertyAssertionAxiom(p,
ind, parseIndividual());
}
} else {
consumeToken();
throw new ExceptionBuilder().withObject().withData().build();
}
}
@Override
public Set<OntologyAxiomPair> parseValuePartitionFrame() {
String section = consumeToken();
if (!matches(VALUE_PARTITION, section)) {
throw new ExceptionBuilder().withKeyword(VALUE_PARTITION).build();
}
Set<OWLOntology> onts = getOntologies();
OWLObjectPropertyExpression prop = parseObjectPropertyExpression(false);
String clsName = consumeToken();
if (eof(clsName)) {
throw new ExceptionBuilder().withObject().build();
}
OWLClass cls = getOWLClass(clsName);
Set<OntologyAxiomPair> axioms = new HashSet<>();
axioms.addAll(parseValuePartitionValues(onts, cls));
for (OWLOntology ont : onts) {
assert ont != null;
axioms.add(new OntologyAxiomPair(ont,
dataFactory.getOWLFunctionalObjectPropertyAxiom(prop)));
axioms.add(new OntologyAxiomPair(ont,
dataFactory.getOWLObjectPropertyRangeAxiom(prop, cls)));
}
return axioms;
}
@Nonnull
private Set<OntologyAxiomPair> parseValuePartitionValues(
@Nonnull Set<OWLOntology> onts, @Nonnull OWLClass superclass) {
Set<OntologyAxiomPair> axioms = new HashSet<>();
Set<OWLClass> siblings = new HashSet<>();
consumeToken(OPENBRACKET.keyword());
String sep = COMMA.keyword();
while (matches(COMMA, sep)) {
String clsName = consumeToken();
OWLClass cls = getOWLClass(clsName);
siblings.add(cls);
OWLSubClassOfAxiom ax = dataFactory.getOWLSubClassOfAxiom(cls,
superclass);
for (OWLOntology ont : onts) {
assert ont != null;
axioms.add(new OntologyAxiomPair(ont, ax));
}
if (peekToken().equals(OPENBRACKET.keyword())) {
axioms.addAll(parseValuePartitionValues(onts, cls));
}
sep = peekToken();
if (matches(COMMA, sep)) {
consumeToken();
}
}
consumeToken(CLOSEBRACKET.keyword());
OWLAxiom ax = dataFactory.getOWLDisjointClassesAxiom(siblings);
for (OWLOntology ont : onts) {
assert ont != null;
axioms.add(new OntologyAxiomPair(ont, ax));
}
return axioms;
}
@Override
public List<OntologyAxiomPair> parseRuleFrame() {
String section = consumeToken();
if (!matches(RULE, section)) {
throw new ExceptionBuilder().withKeyword(RULE).build();
}
Set<OWLOntology> ontologies = getOntologies();
List<SWRLAtom> body = parseRuleAtoms();
String tok = consumeToken();
if (!matches(DASH, tok)) {
throw new ExceptionBuilder().withKeyword(DASH, COMMA).build();
}
consumeToken(">");
List<SWRLAtom> head = parseRuleAtoms();
SWRLRule rule = dataFactory.getSWRLRule(new LinkedHashSet<>(body),
new LinkedHashSet<>(head));
List<OntologyAxiomPair> pairs = new ArrayList<>();
for (OWLOntology ont : ontologies) {
assert ont != null;
pairs.add(new OntologyAxiomPair(ont, rule));
}
return pairs;
}
private List<SWRLAtom> parseRuleAtoms() {
String sep = COMMA.keyword();
List<SWRLAtom> atoms = new ArrayList<>();
while (matches(COMMA, sep)) {
potentialKeywords.remove(COMMA);
SWRLAtom atom = parseRuleAtom();
atoms.add(atom);
sep = peekToken();
if (matches(COMMA, sep)) {
consumeToken();
}
potentialKeywords.add(COMMA);
}
return atoms;
}
private SWRLAtom parseRuleAtom() {
String predicate = peekToken();
if (isClassName(predicate)) {
return parseClassAtom();
} else if (matches(OPEN, predicate)) {
return parseClassAtom();
} else if (isObjectPropertyName(predicate)) {
return parseObjectPropertyAtom();
} else if (isDataPropertyName(predicate)) {
return parseDataPropertyAtom();
} else if (isDatatypeName(predicate)) {
return parseDataRangeAtom();
} else if (matches(DIFFERENT_FROM, predicate)) {
return parseDifferentFromAtom();
} else if (matches(SAME_AS, predicate)) {
return parseSameAsAtom();
} else if (isSWRLBuiltin(predicate) || predicate.startsWith("<")) {
return parseBuiltInAtom();
} else {
consumeToken();
List<String> kw = new ArrayList<>(ruleBuiltIns.keySet());
kw.add(DIFFERENT_FROM.toString());
kw.add(SAME_AS.toString());
Collections.sort(kw);
throw new ExceptionBuilder().withKeyword(kw).withClass()
.withObject().withData().build();
}
}
private SWRLAtom parseDataPropertyAtom() {
String predicate = consumeToken();
if (!isDataPropertyName(predicate)) {
throw new ExceptionBuilder().withData().build();
}
consumeToken(OPEN.keyword());
SWRLIArgument obj1 = parseIObject();
consumeToken(COMMA.keyword());
SWRLDArgument obj2 = parseDObject();
consumeToken(CLOSE.keyword());
return dataFactory.getSWRLDataPropertyAtom(
getOWLDataProperty(predicate), obj1, obj2);
}
private SWRLAtom parseDataRangeAtom() {
OWLDataRange range = parseDataRange();
consumeToken(OPEN.keyword());
SWRLVariable obj1 = parseDVariable();
consumeToken(CLOSE.keyword());
return dataFactory.getSWRLDataRangeAtom(range, obj1);
}
private SWRLAtom parseObjectPropertyAtom() {
String predicate = consumeToken();
if (!isObjectPropertyName(predicate)) {
throw new ExceptionBuilder().withObject().build();
}
assert predicate != null;
consumeToken(OPEN.keyword());
SWRLIArgument obj1 = parseIObject();
consumeToken(COMMA.keyword());
SWRLIArgument obj2 = parseIObject();
consumeToken(CLOSE.keyword());
return dataFactory.getSWRLObjectPropertyAtom(
getOWLObjectProperty(predicate), obj1, obj2);
}
private SWRLAtom parseClassAtom() {
OWLClassExpression predicate = parseUnion();
consumeToken(OPEN.keyword());
SWRLIArgument obj = parseIObject();
consumeToken(CLOSE.keyword());
return dataFactory.getSWRLClassAtom(predicate, obj);
}
private SWRLDifferentIndividualsAtom parseDifferentFromAtom() {
consumeToken(ManchesterOWLSyntax.DIFFERENT_FROM.toString());
consumeToken(OPEN.keyword());
SWRLIArgument obj1 = parseIObject();
consumeToken(COMMA.keyword());
SWRLIArgument obj2 = parseIObject();
consumeToken(CLOSE.keyword());
return dataFactory.getSWRLDifferentIndividualsAtom(obj1, obj2);
}
private SWRLSameIndividualAtom parseSameAsAtom() {
consumeToken(ManchesterOWLSyntax.SAME_AS.toString());
consumeToken(OPEN.keyword());
SWRLIArgument obj1 = parseIObject();
consumeToken(COMMA.keyword());
SWRLIArgument obj2 = parseIObject();
consumeToken(CLOSE.keyword());
return dataFactory.getSWRLSameIndividualAtom(obj1, obj2);
}
@Nonnull
private SWRLIArgument parseIObject() {
String s = peekToken();
if (isIndividualName(s)) {
return parseIIndividualObject();
} else if (s.equals("?")) {
return parseIVariable();
} else {
consumeToken();
throw new ExceptionBuilder().withInd().withKeyword("?$var$")
.build();
}
}
@Nonnull
private SWRLVariable parseIVariable() {
return dataFactory.getSWRLVariable(parseVariable());
}
@Nonnull
private SWRLIndividualArgument parseIIndividualObject() {
return dataFactory.getSWRLIndividualArgument(parseIndividual());
}
@Override
public IRI parseVariable() {
consumeToken("?");
String fragment = peekToken();
if (fragment.startsWith("<")) {
// then the variable was saved with a full IRI
// preserve the namespace
return parseIRI();
} else {
consumeToken();
}
return IRI.create("urn:swrl#", fragment);
}
@Nonnull
private SWRLDArgument parseDObject() {
String s = peekToken();
if (s.equals("?")) {
return parseDVariable();
} else {
try {
return parseLiteralObject();
} catch (ParserException e) {
e.getExpectedKeywords().add("?");
throw e;
}
}
}
@Nonnull
private SWRLVariable parseDVariable() {
IRI var = parseVariable();
return dataFactory.getSWRLVariable(var);
}
@Nonnull
private SWRLLiteralArgument parseLiteralObject() {
OWLLiteral lit = parseLiteral(null);
return dataFactory.getSWRLLiteralArgument(lit);
}
private SWRLBuiltInAtom parseBuiltInAtom() {
String predicate = consumeToken();
consumeToken(OPEN.keyword());
SWRLBuiltInsVocabulary v = null;
IRI iri = null;
if (!ruleBuiltIns.containsKey(predicate)) {
iri = getIRI(predicate);
} else {
v = ruleBuiltIns.get(predicate);
iri = v.getIRI();
}
List<SWRLDArgument> args = new ArrayList<>();
if (v != null && v.getMaxArity() >= 0) {
// We know the arity!
for (int i = 0; i < v.getMaxArity(); i++) {
SWRLDArgument obj = parseDObject();
args.add(obj);
// parse at least the minumum arity
if (i < v.getMinArity() - 1) {
consumeToken(COMMA.keyword());
} else if (i < v.getMaxArity() - 1) {
if (peekToken().equals(COMMA.keyword())) {
consumeToken();
} else {
break;
}
}
}
} else {
// Unknown arity so just parse as many arguments as we can
String sep = COMMA.keyword();
while (matches(COMMA, sep)) {
SWRLDArgument arg = parseDObject();
args.add(arg);
sep = peekToken();
if (matches(COMMA, sep)) {
consumeToken();
}
}
}
consumeToken(CLOSE.keyword());
return dataFactory.getSWRLBuiltInAtom(iri, args);
}
private Set<OntologyAxiomPair> parseDisjointClasses() {
String section = consumeToken();
if (!matches(DISJOINT_CLASSES, section)) {
throw new ExceptionBuilder().withKeyword(DISJOINT_CLASSES).build();
}
Set<OWLOntology> ontologies = getOntologies();
Set<OWLAnnotation> annotations = parseAnnotations();
Set<OWLClassExpression> classExpressions = parseClassExpressionList();
Set<OntologyAxiomPair> pairs = new HashSet<>();
for (OWLOntology ont : ontologies) {
assert ont != null;
pairs.add(new OntologyAxiomPair(ont,
dataFactory.getOWLDisjointClassesAxiom(classExpressions,
annotations)));
}
return pairs;
}
private Set<OntologyAxiomPair> parseSameIndividual() {
String section = consumeToken();
if (!matches(SAME_INDIVIDUAL, section)) {
throw new ExceptionBuilder().withKeyword(SAME_INDIVIDUAL).build();
}
Set<OWLIndividual> individuals = parseIndividualList();
Set<OWLOntology> ontologies = getOntologies();
Set<OWLAnnotation> annotations = parseAnnotations();
Set<OntologyAxiomPair> pairs = new HashSet<>();
for (OWLOntology ont : ontologies) {
assert ont != null;
pairs.add(new OntologyAxiomPair(ont, dataFactory
.getOWLSameIndividualAxiom(individuals, annotations)));
}
return pairs;
}
private Set<OntologyAxiomPair> parseDisjointProperties() {
String section = consumeToken();
if (!matches(DISJOINT_PROPERTIES, section)) {
throw new ExceptionBuilder().withKeyword(DISJOINT_PROPERTIES)
.build();
}
Set<OWLOntology> ontologies = getOntologies();
Set<OWLAnnotation> annotations = parseAnnotations();
Set<OWLPropertyExpression> props = parsePropertyList();
Set<OntologyAxiomPair> pairs = new HashSet<>();
OWLAxiom propertiesAxiom;
if (props.iterator().next().isObjectPropertyExpression()) {
Set<OWLObjectPropertyExpression> ope = new HashSet<>();
for (OWLPropertyExpression pe : props) {
ope.add((OWLObjectPropertyExpression) pe);
}
propertiesAxiom = dataFactory
.getOWLDisjointObjectPropertiesAxiom(ope, annotations);
} else {
Set<OWLDataPropertyExpression> dpe = new HashSet<>();
for (OWLPropertyExpression pe : props) {
dpe.add((OWLDataPropertyExpression) pe);
}
propertiesAxiom = dataFactory.getOWLDisjointDataPropertiesAxiom(dpe,
annotations);
}
for (OWLOntology ont : ontologies) {
assert ont != null;
pairs.add(new OntologyAxiomPair(ont, propertiesAxiom));
}
return pairs;
}
private Set<OntologyAxiomPair> parseDifferentIndividuals() {
String section = consumeToken();
if (!matches(DIFFERENT_INDIVIDUALS, section)) {
throw new ExceptionBuilder().withKeyword(DIFFERENT_INDIVIDUALS)
.build();
}
Set<OWLOntology> ontologies = getOntologies();
Set<OWLAnnotation> annotations = parseAnnotations();
Set<OWLIndividual> individuals = parseIndividualList();
Set<OntologyAxiomPair> pairs = new HashSet<>();
for (OWLOntology ontology : ontologies) {
assert ontology != null;
pairs.add(new OntologyAxiomPair(ontology,
dataFactory.getOWLDifferentIndividualsAxiom(individuals,
annotations)));
}
return pairs;
}
@Nonnull
protected OWLObjectPropertyCharacteristicAxiom parseObjectPropertyCharacteristic(
@Nonnull OWLObjectPropertyExpression prop) {
String characteristic = consumeToken();
if (matches(FUNCTIONAL, characteristic)) {
return dataFactory.getOWLFunctionalObjectPropertyAxiom(prop);
} else if (matches(INVERSE_FUNCTIONAL, characteristic)) {
return dataFactory.getOWLInverseFunctionalObjectPropertyAxiom(prop);
} else if (matches(SYMMETRIC, characteristic)) {
return dataFactory.getOWLSymmetricObjectPropertyAxiom(prop);
} else if (matches(ANTI_SYMMETRIC, characteristic)
|| matches(ASYMMETRIC, characteristic)) {
return dataFactory.getOWLAsymmetricObjectPropertyAxiom(prop);
} else if (matches(TRANSITIVE, characteristic)) {
return dataFactory.getOWLTransitiveObjectPropertyAxiom(prop);
} else if (matches(REFLEXIVE, characteristic)) {
return dataFactory.getOWLReflexiveObjectPropertyAxiom(prop);
} else if (matches(IRREFLEXIVE, characteristic)) {
return dataFactory.getOWLIrreflexiveObjectPropertyAxiom(prop);
} else {
throw new ExceptionBuilder()
.withKeyword(FUNCTIONAL, INVERSE_FUNCTIONAL, SYMMETRIC,
ANTI_SYMMETRIC, TRANSITIVE, REFLEXIVE, IRREFLEXIVE)
.build();
}
}
@Nonnull
protected OWLDataPropertyCharacteristicAxiom parseDataPropertyCharacteristic(
@Nonnull OWLDataPropertyExpression prop) {
String characteristic = consumeToken();
if (matches(FUNCTIONAL, characteristic)) {
return dataFactory.getOWLFunctionalDataPropertyAxiom(prop);
} else {
throw new ExceptionBuilder().withKeyword(FUNCTIONAL).build();
}
}
@Override
public Set<OWLClassExpression> parseClassExpressionList() {
Set<OWLClassExpression> descs = new HashSet<>();
String sep = COMMA.keyword();
while (matches(COMMA, sep)) {
potentialKeywords.remove(COMMA);
descs.add(parseUnion());
potentialKeywords.add(COMMA);
sep = peekToken();
if (matches(COMMA, sep)) {
sep = consumeToken();
}
}
return descs;
}
private Set<OWLClassExpression> parseClassExpressionList(
ManchesterOWLSyntax expectedOpen,
ManchesterOWLSyntax expectedClose) {
String open = consumeToken();
Set<OWLClassExpression> descs = new HashSet<>();
if (!matches(expectedOpen, open)) {
throw new ExceptionBuilder().withKeyword(expectedOpen).build();
}
String sep = COMMA.keyword();
while (matches(COMMA, sep)) {
potentialKeywords.remove(COMMA);
OWLClassExpression desc = parseUnion();
potentialKeywords.add(COMMA);
descs.add(desc);
sep = peekToken();
if (matches(COMMA, sep)) {
sep = consumeToken();
}
}
String close = consumeToken();
if (!matches(expectedClose, close)) {
throw new ExceptionBuilder().withKeyword(expectedClose).build();
}
return descs;
}
@Override
public Set<OWLPropertyExpression> parsePropertyList() {
Set<OWLPropertyExpression> props = new HashSet<>();
String sep = COMMA.keyword();
while (matches(COMMA, sep)) {
OWLPropertyExpression prop = parsePropertyExpression();
props.add(prop);
sep = peekToken();
if (matches(COMMA, sep)) {
consumeToken();
}
}
return props;
}
@Override
public Set<OWLObjectPropertyExpression> parseObjectPropertyList() {
Set<OWLObjectPropertyExpression> props = new HashSet<>();
String sep = COMMA.keyword();
while (matches(COMMA, sep)) {
OWLObjectPropertyExpression prop = parseObjectPropertyExpression(
false);
props.add(prop);
sep = peekToken();
if (matches(COMMA, sep)) {
consumeToken();
}
}
return props;
}
@Override
public Set<OWLDataProperty> parseDataPropertyList() {
Set<OWLDataProperty> props = new HashSet<>();
String sep = COMMA.keyword();
while (matches(COMMA, sep)) {
OWLDataProperty prop = parseDataProperty();
props.add(prop);
sep = peekToken();
if (matches(COMMA, sep)) {
consumeToken();
}
}
return props;
}
@Override
public Set<OWLAnnotationProperty> parseAnnotationPropertyList() {
Set<OWLAnnotationProperty> props = new HashSet<>();
String sep = COMMA.keyword();
while (matches(COMMA, sep)) {
OWLAnnotationProperty prop = parseAnnotationProperty();
props.add(prop);
sep = peekToken();
if (matches(COMMA, sep)) {
consumeToken();
}
}
return props;
}
@Override
@Nonnull
public Set<OWLIndividual> parseIndividualList() {
Set<OWLIndividual> inds = new HashSet<>();
String sep = COMMA.keyword();
while (matches(COMMA, sep)) {
inds.add(parseIndividual());
sep = peekToken();
if (matches(COMMA, sep)) {
consumeToken();
}
}
return inds;
}
@Override
public List<OWLObjectPropertyExpression> parseObjectPropertyChain() {
String delim = "o";
List<OWLObjectPropertyExpression> properties = new ArrayList<>();
while (delim.equals("o")) {
OWLObjectPropertyExpression prop = parseObjectPropertyExpression(
false);
properties.add(prop);
delim = peekToken();
if (delim.equals("o")) {
consumeToken();
}
}
return properties;
}
@Nonnull
protected OWLIndividual parseIndividual() {
String name = consumeToken();
if (!isIndividualName(name) && !name.startsWith("_:")) {
throw new ExceptionBuilder().withInd().build();
}
return getOWLIndividual(name);
}
@Nonnull
protected OWLDataProperty parseDataProperty() {
String name = consumeToken();
if (!isDataPropertyName(name)) {
throw new ExceptionBuilder().withData().build();
}
return getOWLDataProperty(name);
}
@Nonnull
protected OWLAnnotationProperty parseAnnotationProperty() {
String name = consumeToken();
if (!isAnnotationPropertyName(name)) {
throw new ExceptionBuilder().withAnn().build();
}
return getOWLAnnotationProperty(name);
}
private Map<String, IRI> parsePrefixDeclaration() {
consumeToken(PREFIX);
Map<String, IRI> map = new HashMap<>(2);
String prefixName = consumeToken();
// Handle legacy = character if necessart
if (peekToken().equals("=")) {
// Legacy
consumeToken();
}
IRI iri = parseIRI();
map.put(prefixName, iri);
return map;
}
@Nonnull
private OWLImportsDeclaration parseImportsDeclaration() {
consumeToken(IMPORT);
return dataFactory.getOWLImportsDeclaration(parseIRI());
}
@Nonnull
protected IRI parseIRI() {
String iriString = consumeToken();
if (!(iriString.startsWith("<") && iriString.endsWith(">"))) {
throw new ExceptionBuilder().withKeyword("<$IRI$>").build();
}
return IRI.create(iriString.substring(1, iriString.length() - 1));
}
private void processDeclaredEntities() {
for (int i = 0; i < tokens.size(); i++) {
String token = tokens.get(i).getToken();
String name = null;
if (i + 1 < tokens.size()) {
name = tokens.get(i + 1).getToken();
}
if (matches(CLASS, token)) {
if (name != null) {
classNames.add(name);
}
} else if (matches(OBJECT_PROPERTY, token)) {
if (name != null) {
objectPropertyNames.add(name);
}
} else if (matches(DATA_PROPERTY, token)) {
if (name != null) {
dataPropertyNames.add(name);
}
} else if (matches(INDIVIDUAL, token)) {
if (name != null) {
individualNames.add(name);
}
} else if (matches(DATATYPE, token)) {
if (name != null) {
dataTypeNames.add(name);
}
} else if (matches(ANNOTATION_PROPERTY, token)) {
if (name != null) {
annotationPropertyNames.add(name);
}
} else if (matches(VALUE_PARTITION, token)) {
if (name != null) {
objectPropertyNames.add(name);
}
if (i + 2 < tokens.size()) {
classNames.add(tokens.get(i + 2).getToken());
}
}
}
}
private void processDeclaredEntities(OWLDeclarationAxiom ax) {
ax.getEntity().accept(new OWLEntityVisitor() {
@Override
public void visit(OWLAnnotationProperty property) {
annotationPropertyNames.add(pm.getShortForm(property.getIRI()));
}
@Override
public void visit(OWLDatatype datatype) {
dataTypeNames.add(pm.getShortForm(datatype.getIRI()));
}
@Override
public void visit(OWLNamedIndividual individual) {
individualNames.add(pm.getShortForm(individual.getIRI()));
}
@Override
public void visit(OWLDataProperty property) {
dataPropertyNames.add(pm.getShortForm(property.getIRI()));
}
@Override
public void visit(OWLObjectProperty property) {
objectPropertyNames.add(pm.getShortForm(property.getIRI()));
}
@Override
public void visit(OWLClass cls) {
classNames.add(pm.getShortForm(cls.getIRI()));
}
});
}
@Override
public ManchesterSyntaxDocumentFormat parseOntology(
@Nonnull OWLOntology ont) {
Set<OntologyAxiomPair> axioms = new HashSet<>();
OWLOntologyID ontologyID = new OWLOntologyID();
Set<AddImport> imports = new HashSet<>();
Set<AddOntologyAnnotation> ontologyAnnotations = new HashSet<>();
defaultOntology = ont;
processDeclaredEntities();
while (true) {
String section = peekToken();
if (matches(ONTOLOGY, section)) {
ManchesterOWLSyntaxOntologyHeader header = parseOntologyHeader(
false);
for (OWLImportsDeclaration decl : header
.getImportsDeclarations()) {
assert decl != null;
imports.add(new AddImport(ont, decl));
ont.getOWLOntologyManager().makeLoadImportRequest(decl,
getOntologyLoaderConfiguration());
OWLOntology imported = ont.getOWLOntologyManager()
.getImportedOntology(decl);
assert imported != null;
for (OWLDeclarationAxiom declaration : imported
.getAxioms(AxiomType.DECLARATION)) {
processDeclaredEntities(declaration);
}
}
for (OWLAnnotation anno : header.getAnnotations()) {
assert anno != null;
ontologyAnnotations
.add(new AddOntologyAnnotation(ont, anno));
}
ontologyID = header.getOntologyID();
} else if (matches(DISJOINT_CLASSES, section)) {
axioms.addAll(parseDisjointClasses());
} else if (matches(EQUIVALENT_CLASSES, section)) {
axioms.addAll(parseNaryEquivalentClasses());
} else if (matches(EQUIVALENT_PROPERTIES, section)) {
axioms.addAll(parseNaryEquivalentProperties());
} else if (matches(DISJOINT_PROPERTIES, section)) {
axioms.addAll(parseDisjointProperties());
} else if (matches(DIFFERENT_INDIVIDUALS, section)) {
axioms.addAll(parseDifferentIndividuals());
} else if (matches(SAME_INDIVIDUAL, section)) {
axioms.addAll(parseSameIndividual());
} else if (matches(CLASS, section)) {
axioms.addAll(parseClassFrame());
} else if (matches(OBJECT_PROPERTY, section)) {
axioms.addAll(parseObjectPropertyFrame());
} else if (matches(DATA_PROPERTY, section)) {
axioms.addAll(parseDataPropertyFrame());
} else if (matches(INDIVIDUAL, section)) {
axioms.addAll(parseIndividualFrame());
} else if (matches(DATATYPE, section)) {
axioms.addAll(parseDatatypeFrame());
} else if (matches(ANNOTATION_PROPERTY, section)) {
axioms.addAll(parseAnnotationPropertyFrame());
} else if (matches(VALUE_PARTITION, section)) {
axioms.addAll(parseValuePartitionFrame());
} else if (matches(IMPORT, section)) {
OWLImportsDeclaration decl = parseImportsDeclaration();
ont.getOWLOntologyManager().makeLoadImportRequest(decl,
getOntologyLoaderConfiguration());
imports.add(new AddImport(ont, decl));
OWLOntology imported = ont.getOWLOntologyManager()
.getImportedOntology(decl);
assert imported != null;
for (OWLDeclarationAxiom declaration : imported
.getAxioms(AxiomType.DECLARATION)) {
processDeclaredEntities(declaration);
}
} else if (matches(PREFIX, section)) {
Map<String, IRI> nsMap = parsePrefixDeclaration();
for (String ns : nsMap.keySet()) {
assert ns != null;
pm.setPrefix(ns, nsMap.get(ns).toString());
}
} else if (matches(RULE, section)) {
axioms.addAll(parseRuleFrame());
} else if (eof(section)) {
break;
} else {
consumeToken();
throw new ExceptionBuilder().withKeyword(CLASS, OBJECT_PROPERTY,
DATA_PROPERTY, INDIVIDUAL, DATATYPE,
ANNOTATION_PROPERTY, IMPORT, VALUE_PARTITION, PREFIX,
EQUIVALENT_CLASSES, DISJOINT_CLASSES,
DISJOINT_PROPERTIES, DIFFERENT_INDIVIDUALS,
SAME_INDIVIDUAL).build();
}
}
List<OWLOntologyChange> changes = new ArrayList<>(axioms.size());
changes.addAll(imports);
changes.addAll(ontologyAnnotations);
for (OntologyAxiomPair pair : axioms) {
changes.add(new AddAxiom(ont, pair.getAxiom()));
}
changes.add(new SetOntologyID(ont, ontologyID));
ont.getOWLOntologyManager().applyChanges(changes);
ManchesterSyntaxDocumentFormat format = new ManchesterSyntaxDocumentFormat();
format.copyPrefixesFrom(pm);
return format;
}
private ManchesterOWLSyntaxOntologyHeader parseOntologyHeader(
boolean toEOF) {
String tok = consumeToken();
if (!matches(ONTOLOGY, tok)) {
throw new ExceptionBuilder().withKeyword(ONTOLOGY).build();
}
IRI ontologyIRI = null;
IRI versionIRI = null;
if (peekToken().startsWith("<")) {
ontologyIRI = parseIRI();
if (peekToken().startsWith("<")) {
versionIRI = parseIRI();
}
}
Set<OWLAnnotation> annotations = new HashSet<>();
Set<OWLImportsDeclaration> imports = new HashSet<>();
while (true) {
String section = peekToken();
if (matches(IMPORT, section)) {
consumeToken();
tok = peekToken();
Optional<IRI> importedIRI = Optional.absent();
if (tok.startsWith("<")) {
importedIRI = Optional.of(parseIRI());
} else if (isOntologyName(tok)) {
consumeToken();
OWLOntology ont = getOntology(tok);
if (ont != null) {
importedIRI = ont.getOntologyID().getOntologyIRI();
}
} else {
consumeToken();
throw new ExceptionBuilder().withOnto()
.withKeyword("<$ONTOLOGYYURI$>").build();
}
if (!importedIRI.isPresent()) {
throw new ExceptionBuilder().withOnto()
.withKeyword("Imported IRI is null").build();
}
IRI importedOntologyIRI = importedIRI.get();
assert importedOntologyIRI != null;
imports.add(dataFactory
.getOWLImportsDeclaration(importedOntologyIRI));
} else if (matches(ANNOTATIONS, section)) {
consumeToken();
annotations.addAll(parseAnnotationList());
} else if (eof(section)) {
break;
} else if (toEOF) {
throw new ExceptionBuilder().withKeyword(IMPORT, ANNOTATIONS)
.build();
} else {
break;
}
}
return new ManchesterOWLSyntaxOntologyHeader(ontologyIRI, versionIRI,
annotations, imports);
}
protected class ExceptionBuilder {
boolean ontologyNameExpected = false;
boolean classNameExpected = false;
boolean objectPropertyNameExpected = false;
boolean dataPropertyNameExpected = false;
boolean individualNameExpected = false;
boolean datatypeNameExpected = false;
boolean annotationPropertyNameExpected = false;
boolean integerExpected = false;
Set<String> keywords = new HashSet<>();
List<String> tokenSequence;
int start = -1;
int line = -1;
int column = -1;
ExceptionBuilder() {
withKeyword(potentialKeywords);
}
ExceptionBuilder(ParserException e) {
ontologyNameExpected = e.isOntologyNameExpected();
classNameExpected = e.isClassNameExpected();
objectPropertyNameExpected = e.isObjectPropertyNameExpected();
dataPropertyNameExpected = e.isDataPropertyNameExpected();
individualNameExpected = e.isIndividualNameExpected();
dataPropertyNameExpected = e.isDatatypeNameExpected();
annotationPropertyNameExpected = e
.isAnnotationPropertyNameExpected();
integerExpected = e.isIntegerExpected();
withKeyword(e.getExpectedKeywords());
tokenSequence = e.getTokenSequence();
start = e.getStartPos();
line = e.getLineNumber();
column = e.getColumnNumber();
}
public ExceptionBuilder withOnto() {
ontologyNameExpected = true;
return this;
}
public ExceptionBuilder withInt() {
integerExpected = true;
return this;
}
public ExceptionBuilder withClass() {
classNameExpected = true;
return this;
}
public ExceptionBuilder withObject() {
objectPropertyNameExpected = true;
withKeyword(INVERSE);
return this;
}
public ExceptionBuilder withData() {
dataPropertyNameExpected = true;
return this;
}
public ExceptionBuilder withInd() {
individualNameExpected = true;
return this;
}
public ExceptionBuilder withDt() {
datatypeNameExpected = true;
return this;
}
public ExceptionBuilder withAnn() {
annotationPropertyNameExpected = true;
return this;
}
public ExceptionBuilder withKeyword(String s) {
keywords.add(s);
return this;
}
public ExceptionBuilder withKeyword(ManchesterOWLSyntax s) {
keywords.add(s.keyword());
return this;
}
public ExceptionBuilder withKeyword(String... strings) {
for (String s : strings) {
keywords.add(s);
}
return this;
}
public ExceptionBuilder withKeyword(ManchesterOWLSyntax... keys) {
for (ManchesterOWLSyntax s : keys) {
keywords.add(s.keyword());
}
return this;
}
public <T> ExceptionBuilder withKeyword(Collection<T> keys) {
for (T s : keys) {
if (s instanceof String) {
withKeyword((String) s);
}
if (s instanceof ManchesterOWLSyntax) {
withKeyword((ManchesterOWLSyntax) s);
}
}
return this;
}
public ParserException build() {
if (tokenSequence == null) {
Token lastToken = getLastToken();
tokenSequence = getTokenSequence();
start = lastToken.getPos();
line = lastToken.getRow();
column = lastToken.getCol();
}
return new ParserException(tokenSequence, start, line, column,
ontologyNameExpected, classNameExpected,
objectPropertyNameExpected, dataPropertyNameExpected,
individualNameExpected, datatypeNameExpected,
annotationPropertyNameExpected, integerExpected, keywords);
}
}
protected List<String> getTokenSequence() {
List<String> seq = new ArrayList<>();
int index = tokenIndex - 1;
if (index < 0) {
index = 0;
}
while (index < tokens.size() && seq.size() < 4 && !seq.contains(EOF)) {
seq.add(tokens.get(index).getToken());
index++;
}
if (seq.isEmpty()) {
seq.add(EOF);
}
return seq;
}
class DefaultEntityChecker implements OWLEntityChecker {
@Override
public OWLClass getOWLClass(String name) {
if (name.equals("Thing") || name.equals("owl:Thing")) {
return dataFactory.getOWLThing();
} else if (name.equals("Nothing") || name.equals("owl:Nothing")) {
return dataFactory.getOWLNothing();
} else if (classNames.contains(name)) {
return dataFactory.getOWLClass(getIRI(name));
}
return null;
}
@Override
public OWLObjectProperty getOWLObjectProperty(String name) {
if (objectPropertyNames.contains(name)) {
return dataFactory.getOWLObjectProperty(getIRI(name));
}
return null;
}
@Override
public OWLDataProperty getOWLDataProperty(String name) {
if (dataPropertyNames.contains(name)) {
return dataFactory.getOWLDataProperty(getIRI(name));
}
return null;
}
@Override
public OWLNamedIndividual getOWLIndividual(String name) {
if (individualNames.contains(name)) {
return dataFactory.getOWLNamedIndividual(getIRI(name));
}
return null;
}
@Override
public OWLDatatype getOWLDatatype(String name) {
if (dataTypeNames.contains(name)) {
return dataFactory.getOWLDatatype(getIRI(name));
}
return null;
}
@Override
public OWLAnnotationProperty getOWLAnnotationProperty(String name) {
if (annotationPropertyNames.contains(name)) {
return dataFactory.getOWLAnnotationProperty(getIRI(name));
}
return null;
}
}
private final Map<String, IRI> nameIRIMap = new HashMap<>();
@Nonnull
protected IRI getIRI(@Nonnull String inputName) {
String name = inputName;
boolean fullIRI = name.equals("<");
if (fullIRI) {
name = consumeToken();
consumeToken();
}
IRI uri = nameIRIMap.get(name);
if (uri != null) {
return uri;
}
if (fullIRI) {
uri = IRI.create(name);
} else {
int colonIndex = name.indexOf(':');
if (colonIndex == -1) {
name = ":" + name;
}
uri = pm.getIRI(name);
}
nameIRIMap.put(name, uri);
return uri;
}
@Override
public OWLAxiom parseAxiom() {
String token = peekToken();
if (isClassName(token)) {
return parseAxiomWithClassExpressionStart();
} else if (isObjectPropertyName(token)) {
return parseAxiomWithObjectPropertyStart();
} else if (isDataPropertyName(token)) {
return parseAxiomWithDataPropertyStart();
} else if (isIndividualName(token)) {
return parseAxiomWithIndividualStart();
} else if (matches(INV, token)) {
return parseAxiomWithObjectPropertyStart();
} else if (matches(OPEN, token)) {
return parseAxiomWithClassExpressionStart();
} else if (matches(OPENBRACE, token)) {
return parseAxiomWithClassExpressionStart();
} else if (matches(FUNCTIONAL, token)) {
return parseFunctionPropertyAxiom();
} else if (matches(INVERSE_FUNCTIONAL, token)) {
return parseInverseFunctionalPropertyAxiom();
} else if (matches(SYMMETRIC, token)) {
return parseSymmetricPropertyAxiom();
} else if (matches(ASYMMETRIC, token)) {
return parseAsymmetricPropertyAxiom();
} else if (matches(TRANSITIVE, token)) {
return parseTransitivePropertyAxiom();
} else if (matches(REFLEXIVE, token)) {
return parseReflexivePropertyAxiom();
} else if (matches(IRREFLEXIVE, token)) {
return parseIrreflexivePropertyAxiom();
}
throw new ExceptionBuilder().withClass().withObject().withData()
.withKeyword(OPEN, OPENBRACE, INV, FUNCTIONAL,
INVERSE_FUNCTIONAL, SYMMETRIC, ASYMMETRIC, TRANSITIVE,
REFLEXIVE, IRREFLEXIVE)
.build();
}
@Override
public OWLClassAxiom parseClassAxiom() {
return (OWLClassAxiom) parseAxiom();
}
@Nonnull
private OWLAxiom parseAxiomWithIndividualStart() {
OWLIndividual ind = parseIndividual();
String kw = consumeToken();
if (matches(TYPE, kw)) {
OWLClassExpression type = parseClassExpression();
return dataFactory.getOWLClassAssertionAxiom(type, ind);
}
throw new ExceptionBuilder().withKeyword(TYPE).build();
}
@Nonnull
private OWLAxiom parseAxiomWithDataPropertyStart() {
OWLDataPropertyExpression prop = parseDataProperty();
String kw = consumeToken();
if (matches(SOME, kw)) {
OWLDataRange dataRange = parseDataIntersectionOf();
return parseClassAxiomRemainder(
dataFactory.getOWLDataSomeValuesFrom(prop, dataRange));
} else if (matches(ONLY, kw)) {
OWLDataRange dataRange = parseDataIntersectionOf();
return parseClassAxiomRemainder(
dataFactory.getOWLDataAllValuesFrom(prop, dataRange));
} else if (matches(MIN, kw)) {
int cardi = parseInteger();
OWLDataRange dataRange = parseDataIntersectionOf();
return parseClassAxiomRemainder(dataFactory
.getOWLDataMinCardinality(cardi, prop, dataRange));
} else if (matches(MAX, kw)) {
int cardi = parseInteger();
OWLDataRange dataRange = parseDataIntersectionOf();
return parseClassAxiomRemainder(dataFactory
.getOWLDataMaxCardinality(cardi, prop, dataRange));
} else if (matches(EXACTLY, kw)) {
int cardi = parseInteger();
OWLDataRange dataRange = parseDataIntersectionOf();
return parseClassAxiomRemainder(dataFactory
.getOWLDataExactCardinality(cardi, prop, dataRange));
} else if (matches(SUB_PROPERTY_OF, kw)) {
OWLDataPropertyExpression superProperty = parseDataPropertyExpression();
return dataFactory.getOWLSubDataPropertyOfAxiom(prop,
superProperty);
} else if (matches(EQUIVALENT_TO, kw)) {
OWLDataPropertyExpression equivProp = parseDataPropertyExpression();
return dataFactory.getOWLEquivalentDataPropertiesAxiom(prop,
equivProp);
} else if (matches(DISJOINT_WITH, kw)) {
OWLDataPropertyExpression disjProp = parseDataPropertyExpression();
return dataFactory.getOWLDisjointDataPropertiesAxiom(prop,
disjProp);
} else if (matches(DOMAIN, kw)) {
OWLClassExpression domain = parseClassExpression();
return dataFactory.getOWLDataPropertyDomainAxiom(prop, domain);
} else if (matches(RANGE, kw)) {
OWLDataRange range = parseDataRange();
return dataFactory.getOWLDataPropertyRangeAxiom(prop, range);
} else {
throw new ExceptionBuilder()
.withKeyword(SOME, ONLY, MIN, MAX, EXACTLY, SUB_PROPERTY_OF,
EQUIVALENT_TO, DISJOINT_WITH, DOMAIN, RANGE)
.build();
}
}
@Nonnull
private OWLDataPropertyExpression parseDataPropertyExpression() {
String tok = consumeToken();
if (!isDataPropertyName(tok)) {
throw new ExceptionBuilder().withData().build();
}
return getOWLDataProperty(tok);
}
@Nonnull
private OWLAxiom parseAxiomWithClassExpressionStart() {
return parseClassAxiomRemainder(parseUnion());
}
@Nonnull
private OWLAxiom parseClassAxiomRemainder(
@Nonnull OWLClassExpression startExpression) {
String kw = consumeToken();
if (matches(SUBCLASS_OF, kw)) {
OWLClassExpression superClass = parseClassExpression();
return dataFactory.getOWLSubClassOfAxiom(startExpression,
superClass);
} else if (matches(DISJOINT_WITH, kw)) {
OWLClassExpression disjointClass = parseClassExpression();
return dataFactory.getOWLDisjointClassesAxiom(startExpression,
disjointClass);
} else if (matches(EQUIVALENT_TO, kw)) {
OWLClassExpression equivClass = parseClassExpression();
return dataFactory.getOWLEquivalentClassesAxiom(startExpression,
equivClass);
} else if (matches(AND, kw)) {
OWLClassExpression conjunct = parseIntersection();
Set<OWLClassExpression> conjuncts = conjunct.asConjunctSet();
conjuncts.add(startExpression);
OWLClassExpression ce = dataFactory
.getOWLObjectIntersectionOf(conjuncts);
return parseClassAxiomRemainder(ce);
} else if (matches(OR, kw)) {
OWLClassExpression disjunct = parseUnion();
Set<OWLClassExpression> disjuncts = disjunct.asDisjunctSet();
disjuncts.add(startExpression);
OWLClassExpression ce = dataFactory.getOWLObjectUnionOf(disjuncts);
return parseClassAxiomRemainder(ce);
} else {
throw new ExceptionBuilder().withKeyword(SUBCLASS_OF, DISJOINT_WITH,
EQUIVALENT_TO, AND, OR).build();
}
}
@Nonnull
private OWLAxiom parseAxiomWithObjectPropertyStart() {
OWLObjectPropertyExpression prop = parseObjectPropertyExpression(false);
String kw = consumeToken();
if (matches(SOME, kw)) {
OWLClassExpression filler = parseUnion();
return parseClassAxiomRemainder(
dataFactory.getOWLObjectSomeValuesFrom(prop, filler));
} else if (matches(ONLY, kw)) {
OWLClassExpression filler = parseUnion();
return parseClassAxiomRemainder(
dataFactory.getOWLObjectAllValuesFrom(prop, filler));
} else if (matches(MIN, kw)) {
int cardi = parseInteger();
OWLClassExpression filler = parseUnion();
return parseClassAxiomRemainder(dataFactory
.getOWLObjectMinCardinality(cardi, prop, filler));
} else if (matches(MAX, kw)) {
int cardi = parseInteger();
OWLClassExpression filler = parseUnion();
return parseClassAxiomRemainder(dataFactory
.getOWLObjectMaxCardinality(cardi, prop, filler));
} else if (matches(EXACTLY, kw)) {
int cardi = parseInteger();
OWLClassExpression filler = parseUnion();
return parseClassAxiomRemainder(dataFactory
.getOWLObjectExactCardinality(cardi, prop, filler));
} else if (matches(SUB_PROPERTY_OF, kw)) {
OWLObjectPropertyExpression superProperty = parseObjectPropertyExpression(
false);
return dataFactory.getOWLSubObjectPropertyOfAxiom(prop,
superProperty);
} else if (matches(EQUIVALENT_TO, kw)) {
OWLObjectPropertyExpression equivProp = parseObjectPropertyExpression(
false);
return dataFactory.getOWLEquivalentObjectPropertiesAxiom(prop,
equivProp);
} else if (matches(INVERSE_OF, kw)) {
OWLObjectPropertyExpression invProp = parseObjectPropertyExpression(
false);
return dataFactory.getOWLInverseObjectPropertiesAxiom(prop,
invProp);
} else if (matches(DISJOINT_WITH, kw)) {
OWLObjectPropertyExpression disjProp = parseObjectPropertyExpression(
false);
return dataFactory.getOWLDisjointObjectPropertiesAxiom(prop,
disjProp);
} else if (matches(DOMAIN, kw)) {
OWLClassExpression domain = parseClassExpression();
return dataFactory.getOWLObjectPropertyDomainAxiom(prop, domain);
} else if (matches(RANGE, kw)) {
OWLClassExpression range = parseClassExpression();
return dataFactory.getOWLObjectPropertyRangeAxiom(prop, range);
} else if (matches(CHAIN_CONNECT, kw)) {
String sep = kw;
List<OWLObjectPropertyExpression> chain = new ArrayList<>();
chain.add(prop);
while (sep.equals("o")) {
OWLObjectPropertyExpression chainProp = parseObjectPropertyExpression(
false);
chain.add(chainProp);
sep = consumeToken();
}
if (!matches(SUB_PROPERTY_OF, sep)) {
throw new ExceptionBuilder().withKeyword(SUB_PROPERTY_OF)
.build();
}
OWLObjectPropertyExpression superProp = parseObjectPropertyExpression(
false);
return dataFactory.getOWLSubPropertyChainOfAxiom(chain, superProp);
} else {
throw new ExceptionBuilder().withKeyword(SOME, ONLY, MIN, MAX,
EXACTLY, SUB_PROPERTY_OF, EQUIVALENT_TO, INVERSE_OF,
DISJOINT_WITH, DOMAIN, RANGE, CHAIN_CONNECT).build();
}
}
@Nonnull
private OWLAxiom parseInverseFunctionalPropertyAxiom() {
String kw = consumeToken();
if (!matches(INVERSE_FUNCTIONAL, kw)) {
throw new ExceptionBuilder().withKeyword(INVERSE_FUNCTIONAL)
.build();
}
OWLObjectPropertyExpression prop = parseObjectPropertyExpression(false);
return dataFactory.getOWLInverseFunctionalObjectPropertyAxiom(prop);
}
@Nonnull
private OWLAxiom parseSymmetricPropertyAxiom() {
String kw = consumeToken();
if (!matches(SYMMETRIC, kw)) {
throw new ExceptionBuilder().withKeyword(SYMMETRIC).build();
}
OWLObjectPropertyExpression prop = parseObjectPropertyExpression(false);
return dataFactory.getOWLSymmetricObjectPropertyAxiom(prop);
}
@Nonnull
private OWLAxiom parseAsymmetricPropertyAxiom() {
String kw = consumeToken();
if (!matches(ASYMMETRIC, kw)) {
throw new ExceptionBuilder().withKeyword(ASYMMETRIC).build();
}
OWLObjectPropertyExpression prop = parseObjectPropertyExpression(false);
return dataFactory.getOWLAsymmetricObjectPropertyAxiom(prop);
}
@Nonnull
private OWLAxiom parseTransitivePropertyAxiom() {
String kw = consumeToken();
if (!matches(TRANSITIVE, kw)) {
throw new ExceptionBuilder().withKeyword(TRANSITIVE).build();
}
OWLObjectPropertyExpression prop = parseObjectPropertyExpression(false);
return dataFactory.getOWLTransitiveObjectPropertyAxiom(prop);
}
@Nonnull
private OWLAxiom parseReflexivePropertyAxiom() {
String kw = consumeToken();
if (!matches(REFLEXIVE, kw)) {
throw new ExceptionBuilder().withKeyword(REFLEXIVE).build();
}
OWLObjectPropertyExpression prop = parseObjectPropertyExpression(false);
return dataFactory.getOWLReflexiveObjectPropertyAxiom(prop);
}
@Nonnull
private OWLAxiom parseIrreflexivePropertyAxiom() {
String kw = consumeToken();
if (!matches(IRREFLEXIVE, kw)) {
throw new ExceptionBuilder().withKeyword(IRREFLEXIVE).build();
}
OWLObjectPropertyExpression prop = parseObjectPropertyExpression(false);
return dataFactory.getOWLIrreflexiveObjectPropertyAxiom(prop);
}
@Nonnull
private OWLAxiom parseFunctionPropertyAxiom() {
String kw = consumeToken();
if (!matches(FUNCTIONAL, kw)) {
throw new ExceptionBuilder().withKeyword(FUNCTIONAL).build();
}
String name = peekToken();
if (isObjectPropertyName(name)) {
OWLObjectPropertyExpression prop = parseObjectPropertyExpression(
false);
return dataFactory.getOWLFunctionalObjectPropertyAxiom(prop);
} else if (isDataPropertyName(name)) {
OWLDataProperty prop = parseDataProperty();
return dataFactory.getOWLFunctionalDataPropertyAxiom(prop);
} else {
consumeToken();
throw new ExceptionBuilder().withObject().withData().build();
}
}
@Nonnull
private <F, O> Set<OntologyAxiomPair> parseAnnotatedListItems(@Nonnull F s,
@Nonnull AnnotatedListItemParser<F, O> itemParser,
@Nonnull Set<OWLOntology> ontologies) {
Set<OntologyAxiomPair> result = new HashSet<>();
String sep = COMMA.keyword();
while (matches(COMMA, sep)) {
Set<OWLAnnotation> annotations = parseAnnotations();
O item = itemParser.parseItem(s);
OWLAxiom axiom = itemParser.createAxiom(s, item, annotations);
for (OWLOntology ontology : ontologies) {
result.add(new OntologyAxiomPair(ontology, axiom));
}
sep = peekToken();
if (matches(COMMA, sep)) {
consumeToken();
}
}
return result;
}
interface AnnotatedListItemParser<F, O> {
@Nonnull
O parseItem(@Nonnull F s);
@Nonnull
OWLAxiom createAxiom(@Nonnull F s, @Nonnull O o,
@Nonnull Set<OWLAnnotation> anns);
ManchesterOWLSyntax getFrameSectionKeyword();
}
abstract class AnnotatedClassExpressionListItemParser<F>
implements AnnotatedListItemParser<F, OWLClassExpression> {
@Override
public OWLClassExpression parseItem(F s) {
return parseUnion();
}
}
abstract class AnnotatedClassExpressionSetListItemParser<F>
implements AnnotatedListItemParser<F, Set<OWLClassExpression>> {
@Override
public Set<OWLClassExpression> parseItem(F s) {
return parseClassExpressionList();
}
}
abstract class AnnotatedPropertyListListItemParser<F>
implements AnnotatedListItemParser<F, Set<OWLPropertyExpression>> {
@Override
public Set<OWLPropertyExpression> parseItem(F s) {
return parsePropertyList();
}
}
abstract class AnnotatedIndividualsListItemParser<F>
implements AnnotatedListItemParser<F, OWLIndividual> {
@Override
public OWLIndividual parseItem(F s) {
return parseIndividual();
}
}
abstract class AnnotationListItemParser<F>
implements AnnotatedListItemParser<F, OWLAnnotation> {
@Override
public OWLAnnotation parseItem(F s) {
return parseAnnotation();
}
}
class ClassSubClassOfListItemParser
extends AnnotatedClassExpressionListItemParser<OWLClass> {
@Override
public OWLAxiom createAxiom(OWLClass s, OWLClassExpression o,
Set<OWLAnnotation> anns) {
return dataFactory.getOWLSubClassOfAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return SUBCLASS_OF;
}
}
class ClassEquivalentToListItemParser
extends AnnotatedClassExpressionListItemParser<OWLClass> {
@Override
public OWLAxiom createAxiom(OWLClass s, OWLClassExpression o,
Set<OWLAnnotation> anns) {
return dataFactory.getOWLEquivalentClassesAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return EQUIVALENT_TO;
}
}
class ClassDisjointWithListItemParser
extends AnnotatedClassExpressionListItemParser<OWLClass> {
@Override
public OWLAxiom createAxiom(OWLClass s, OWLClassExpression o,
Set<OWLAnnotation> anns) {
Set<OWLClassExpression> disjointClasses = new HashSet<>();
disjointClasses.add(s);
disjointClasses.add(o);
return dataFactory.getOWLDisjointClassesAxiom(disjointClasses,
anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return DISJOINT_WITH;
}
}
class ClassDisjointClassesListItemParser
extends AnnotatedClassExpressionSetListItemParser<OWLClass> {
@Override
public OWLAxiom createAxiom(OWLClass s, Set<OWLClassExpression> o,
Set<OWLAnnotation> anns) {
// o.add(s);
return dataFactory.getOWLDisjointClassesAxiom(o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return DISJOINT_CLASSES;
}
}
class ClassDisjointUnionOfListItemParser
extends AnnotatedClassExpressionSetListItemParser<OWLClass> {
@Override
public OWLAxiom createAxiom(OWLClass s, Set<OWLClassExpression> o,
Set<OWLAnnotation> anns) {
return dataFactory.getOWLDisjointUnionAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return DISJOINT_UNION_OF;
}
}
class ClassHasKeyListItemParser
extends AnnotatedPropertyListListItemParser<OWLClass> {
@Override
public OWLAxiom createAxiom(OWLClass s, Set<OWLPropertyExpression> o,
Set<OWLAnnotation> anns) {
return dataFactory.getOWLHasKeyAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return HAS_KEY;
}
}
class ClassSuperClassOfListItemParser
extends AnnotatedClassExpressionListItemParser<OWLClass> {
@Override
public OWLAxiom createAxiom(OWLClass s, OWLClassExpression o,
Set<OWLAnnotation> anns) {
return dataFactory.getOWLSubClassOfAxiom(o, s, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return SUPERCLASS_OF;
}
}
class ClassIndividualsListItemParser
extends AnnotatedIndividualsListItemParser<OWLClass> {
@Override
public OWLAxiom createAxiom(OWLClass s, OWLIndividual o,
Set<OWLAnnotation> anns) {
return dataFactory.getOWLClassAssertionAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return INDIVIDUALS;
}
}
class EntityAnnotationsListItemParser<E extends OWLEntity>
extends AnnotationListItemParser<E> {
@Override
public OWLAxiom createAxiom(E s, OWLAnnotation o,
Set<OWLAnnotation> anns) {
return dataFactory.getOWLAnnotationAssertionAxiom(s.getIRI(), o,
anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return ANNOTATIONS;
}
}
abstract class ObjectPropertyExpressionListItemParser<F>
implements AnnotatedListItemParser<F, OWLObjectPropertyExpression> {
@Override
public OWLObjectPropertyExpression parseItem(F s) {
return parseObjectPropertyExpression(false);
}
}
class ObjectPropertySubPropertyOfListItemParser
extends ObjectPropertyExpressionListItemParser<OWLObjectProperty> {
@Override
public OWLAxiom createAxiom(OWLObjectProperty s,
OWLObjectPropertyExpression o, Set<OWLAnnotation> anns) {
return dataFactory.getOWLSubObjectPropertyOfAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return SUB_PROPERTY_OF;
}
}
class ObjectPropertySuperPropertyOfListItemParser
extends ObjectPropertyExpressionListItemParser<OWLObjectProperty> {
@Override
public OWLAxiom createAxiom(OWLObjectProperty s,
OWLObjectPropertyExpression o, Set<OWLAnnotation> anns) {
return dataFactory.getOWLSubObjectPropertyOfAxiom(o, s, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return SUPER_PROPERTY_OF;
}
}
class ObjectPropertyEquivalentToListItemParser
extends ObjectPropertyExpressionListItemParser<OWLObjectProperty> {
@Override
public OWLAxiom createAxiom(OWLObjectProperty s,
OWLObjectPropertyExpression o, Set<OWLAnnotation> anns) {
return dataFactory.getOWLEquivalentObjectPropertiesAxiom(s, o,
anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return EQUIVALENT_TO;
}
}
class ObjectPropertyDisjointWithListItemParser
extends ObjectPropertyExpressionListItemParser<OWLObjectProperty> {
@Override
public OWLAxiom createAxiom(OWLObjectProperty s,
OWLObjectPropertyExpression o, Set<OWLAnnotation> anns) {
Set<OWLObjectPropertyExpression> properties = new HashSet<>();
properties.add(s);
properties.add(o);
return dataFactory.getOWLDisjointObjectPropertiesAxiom(properties,
anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return DISJOINT_WITH;
}
}
class ObjectPropertyDomainListItemParser
extends AnnotatedClassExpressionListItemParser<OWLObjectProperty> {
@Override
public OWLAxiom createAxiom(OWLObjectProperty s, OWLClassExpression o,
Set<OWLAnnotation> anns) {
return dataFactory.getOWLObjectPropertyDomainAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return DOMAIN;
}
}
class ObjectPropertyRangeListItemParser
extends AnnotatedClassExpressionListItemParser<OWLObjectProperty> {
@Override
public OWLAxiom createAxiom(OWLObjectProperty s, OWLClassExpression o,
Set<OWLAnnotation> anns) {
return dataFactory.getOWLObjectPropertyRangeAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return RANGE;
}
}
class ObjectPropertyInverseOfListItemParser
extends ObjectPropertyExpressionListItemParser<OWLObjectProperty> {
@Override
public OWLAxiom createAxiom(OWLObjectProperty s,
OWLObjectPropertyExpression o, Set<OWLAnnotation> anns) {
return dataFactory.getOWLInverseObjectPropertiesAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return INVERSE_OF;
}
}
class ObjectPropertySubPropertyChainListItemParser implements
AnnotatedListItemParser<OWLObjectProperty, List<OWLObjectPropertyExpression>> {
@Override
public List<OWLObjectPropertyExpression> parseItem(
OWLObjectProperty s) {
return parseObjectPropertyChain();
}
@Override
public OWLAxiom createAxiom(OWLObjectProperty s,
List<OWLObjectPropertyExpression> o, Set<OWLAnnotation> anns) {
return dataFactory.getOWLSubPropertyChainOfAxiom(o, s, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return SUB_PROPERTY_CHAIN;
}
}
class ObjectPropertyCharacteristicsItemParser implements
AnnotatedListItemParser<OWLObjectProperty, OWLObjectPropertyCharacteristicAxiom> {
@Override
public OWLObjectPropertyCharacteristicAxiom parseItem(
@Nonnull OWLObjectProperty s) {
return parseObjectPropertyCharacteristic(s);
}
@Override
public OWLAxiom createAxiom(OWLObjectProperty s,
OWLObjectPropertyCharacteristicAxiom o,
Set<OWLAnnotation> anns) {
return o.getAnnotatedAxiom(anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return CHARACTERISTICS;
}
}
abstract class DataPropertyExpressionListItemParser<F>
implements AnnotatedListItemParser<F, OWLDataPropertyExpression> {
@Override
public OWLDataProperty parseItem(F s) {
return parseDataProperty();
}
}
class DataPropertySubPropertyOfListItemParser
extends DataPropertyExpressionListItemParser<OWLDataProperty> {
@Override
public OWLAxiom createAxiom(OWLDataProperty s,
OWLDataPropertyExpression o, Set<OWLAnnotation> anns) {
return dataFactory.getOWLSubDataPropertyOfAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return SUB_PROPERTY_OF;
}
}
class DataPropertyEquivalentToListItemParser
extends DataPropertyExpressionListItemParser<OWLDataProperty> {
@Override
public OWLAxiom createAxiom(OWLDataProperty s,
OWLDataPropertyExpression o, Set<OWLAnnotation> anns) {
return dataFactory.getOWLEquivalentDataPropertiesAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return EQUIVALENT_TO;
}
}
class DataPropertyDisjointWithListItemParser
extends DataPropertyExpressionListItemParser<OWLDataProperty> {
@Override
public OWLAxiom createAxiom(OWLDataProperty s,
OWLDataPropertyExpression o, Set<OWLAnnotation> anns) {
Set<OWLDataPropertyExpression> properties = new HashSet<>();
properties.add(s);
properties.add(o);
return dataFactory.getOWLDisjointDataPropertiesAxiom(properties,
anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return DISJOINT_WITH;
}
}
class DataPropertyDomainListItemParser
extends AnnotatedClassExpressionListItemParser<OWLDataProperty> {
@Override
public OWLAxiom createAxiom(OWLDataProperty s, OWLClassExpression o,
Set<OWLAnnotation> anns) {
return dataFactory.getOWLDataPropertyDomainAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return DOMAIN;
}
}
abstract class AnnotatedDataRangeListItemParser<F>
implements AnnotatedListItemParser<F, OWLDataRange> {
@Override
public OWLDataRange parseItem(F s) {
return parseDataRange();
}
}
class DataPropertyRangeListItemParser
extends AnnotatedDataRangeListItemParser<OWLDataProperty> {
@Override
public OWLAxiom createAxiom(OWLDataProperty s, OWLDataRange o,
Set<OWLAnnotation> anns) {
return dataFactory.getOWLDataPropertyRangeAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return RANGE;
}
}
class DataPropertyCharacteristicsItemParser implements
AnnotatedListItemParser<OWLDataProperty, OWLDataPropertyCharacteristicAxiom> {
@Override
public OWLDataPropertyCharacteristicAxiom parseItem(OWLDataProperty s) {
return parseDataPropertyCharacteristic(s);
}
@Override
public OWLAxiom createAxiom(OWLDataProperty s,
OWLDataPropertyCharacteristicAxiom o, Set<OWLAnnotation> anns) {
return o.getAnnotatedAxiom(anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return CHARACTERISTICS;
}
}
class IndividualTypesItemParser
extends AnnotatedClassExpressionListItemParser<OWLIndividual> {
@Override
public OWLAxiom createAxiom(OWLIndividual s, OWLClassExpression o,
Set<OWLAnnotation> anns) {
return dataFactory.getOWLClassAssertionAxiom(o, s, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return TYPES;
}
}
class IndividualFactsItemParser implements
AnnotatedListItemParser<OWLIndividual, OWLPropertyAssertionAxiom<?, ?>> {
@Override
public OWLPropertyAssertionAxiom<?, ?> parseItem(OWLIndividual s) {
return parseFact(s);
}
@Override
public OWLAxiom createAxiom(@Nonnull OWLIndividual s,
OWLPropertyAssertionAxiom<?, ?> o, Set<OWLAnnotation> anns) {
return o.getAnnotatedAxiom(anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return FACTS;
}
}
class IndividualSameAsItemParser
extends AnnotatedIndividualsListItemParser<OWLIndividual> {
@Override
public OWLAxiom createAxiom(OWLIndividual s, OWLIndividual o,
@Nonnull Set<OWLAnnotation> anns) {
Set<OWLIndividual> individuals = new HashSet<>();
individuals.add(s);
individuals.add(o);
return dataFactory.getOWLSameIndividualAxiom(individuals, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return SAME_AS;
}
}
class IndividualDifferentFromItemParser
extends AnnotatedIndividualsListItemParser<OWLIndividual> {
@Override
public OWLAxiom createAxiom(OWLIndividual s, OWLIndividual o,
Set<OWLAnnotation> anns) {
Set<OWLIndividual> individuals = new HashSet<>();
individuals.add(s);
individuals.add(o);
return dataFactory.getOWLDifferentIndividualsAxiom(individuals,
anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return DIFFERENT_FROM;
}
}
class IndividualDifferentIndividualsItemParser implements
AnnotatedListItemParser<OWLIndividual, Set<OWLIndividual>> {
@Override
public Set<OWLIndividual> parseItem(OWLIndividual s) {
return parseIndividualList();
}
@Override
public OWLAxiom createAxiom(OWLIndividual s, Set<OWLIndividual> o,
Set<OWLAnnotation> anns) {
Set<OWLIndividual> individuals = new HashSet<>();
individuals.add(s);
individuals.addAll(o);
return dataFactory.getOWLDifferentIndividualsAxiom(individuals,
anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return DIFFERENT_INDIVIDUALS;
}
}
class IndividualAnnotationItemParser
implements AnnotatedListItemParser<OWLIndividual, OWLAnnotation> {
@Override
public OWLAnnotation parseItem(OWLIndividual s) {
return parseAnnotation();
}
@Override
public OWLAxiom createAxiom(OWLIndividual s, OWLAnnotation o,
Set<OWLAnnotation> anns) {
if (s.isAnonymous()) {
return dataFactory.getOWLAnnotationAssertionAxiom(
s.asOWLAnonymousIndividual(), o, anns);
} else {
return dataFactory.getOWLAnnotationAssertionAxiom(
s.asOWLNamedIndividual().getIRI(), o, anns);
}
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return ANNOTATIONS;
}
}
abstract class AnnotatedIRIListItemParser<F>
implements AnnotatedListItemParser<F, IRI> {
@Override
public IRI parseItem(F s) {
return parseIRI();
}
}
class AnnotationPropertySubPropertyOfListItemParser implements
AnnotatedListItemParser<OWLAnnotationProperty, OWLAnnotationProperty> {
@Override
public OWLAnnotationProperty parseItem(OWLAnnotationProperty s) {
return parseAnnotationProperty();
}
@Override
public OWLAxiom createAxiom(OWLAnnotationProperty s,
OWLAnnotationProperty o, Set<OWLAnnotation> anns) {
return dataFactory.getOWLSubAnnotationPropertyOfAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return SUB_PROPERTY_OF;
}
}
class AnnotationPropertyDomainListItemParser
extends AnnotatedIRIListItemParser<OWLAnnotationProperty> {
@Override
public OWLAxiom createAxiom(OWLAnnotationProperty s, IRI o,
Set<OWLAnnotation> anns) {
return dataFactory.getOWLAnnotationPropertyDomainAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return DOMAIN;
}
}
class AnnotationPropertyRangeListItemParser
extends AnnotatedIRIListItemParser<OWLAnnotationProperty> {
@Override
public OWLAxiom createAxiom(OWLAnnotationProperty s, IRI o,
Set<OWLAnnotation> anns) {
return dataFactory.getOWLAnnotationPropertyRangeAxiom(s, o, anns);
}
@Override
public ManchesterOWLSyntax getFrameSectionKeyword() {
return RANGE;
}
}
}
|
#!/usr/bin/env bash
mkdir -p ./db/node1 ./db/node2 ./db/node3 ./db/node4 ./db/node5
mkdir -p ./logs/node1 ./logs/node2 ./logs/node3 ./logs/node4 ./logs/node5
export FBA_VALs=./scripts/configs/local/fba_validators.json
printf "Launching node 1 at 127.0.0.1:9650\n"
export WEB3_API=enabled
./build/flare --network-id=local \
--public-ip=127.0.0.1 \
--http-port=9650 \
--staking-port=9651 \
--log-dir=./logs/node1 \
--db-dir=./db/node1 \
--bootstrap-ips= \
--bootstrap-ids= \
--staking-tls-cert-file=./scripts/configs/local/NodeID-5dDZXn99LCkDoEi6t9gTitZuQmhokxQTc.crt \
--staking-tls-key-file=./scripts/configs/local/NodeID-5dDZXn99LCkDoEi6t9gTitZuQmhokxQTc.key \
--db-type=leveldb \
--log-level=debug 2>&1 > ./logs/node1/console.log &
NODE_1_PID=`echo $!`
sleep 3
printf "Launching node 2 at 127.0.0.1:9660\n"
export WEB3_API=enabled
./build/flare --network-id=local \
--public-ip=127.0.0.1 \
--http-port=9660 \
--staking-port=9661 \
--log-dir=./logs/node2 \
--db-dir=./db/node2 \
--bootstrap-ips=127.0.0.1:9651 \
--bootstrap-ids=NodeID-5dDZXn99LCkDoEi6t9gTitZuQmhokxQTc \
--staking-tls-cert-file=./scripts/configs/local/NodeID-AQghDJTU3zuQj73itPtfTZz6CxsTQVD3R.crt \
--staking-tls-key-file=./scripts/configs/local/NodeID-AQghDJTU3zuQj73itPtfTZz6CxsTQVD3R.key \
--db-type=leveldb \
--log-level=debug 2>&1 > ./logs/node2/console.log &
NODE_2_PID=`echo $!`
sleep 3
printf "Launching node 3 at 127.0.0.1:9670\n"
export WEB3_API=enabled
./build/flare --network-id=local \
--public-ip=127.0.0.1 \
--http-port=9670 \
--staking-port=9671 \
--log-dir=./logs/node3 \
--db-dir=./db/node3 \
--bootstrap-ips=127.0.0.1:9651 \
--bootstrap-ids=NodeID-5dDZXn99LCkDoEi6t9gTitZuQmhokxQTc \
--staking-tls-cert-file=./scripts/configs/local/NodeID-EkH8wyEshzEQBToAdR7Fexxcj9rrmEEHZ.crt \
--staking-tls-key-file=./scripts/configs/local/NodeID-EkH8wyEshzEQBToAdR7Fexxcj9rrmEEHZ.key \
--db-type=leveldb \
--log-level=debug 2>&1 > ./logs/node3/console.log &
NODE_3_PID=`echo $!`
sleep 3
printf "Launching node 4 at 127.0.0.1:9680\n"
export WEB3_API=enabled
./build/flare --network-id=local \
--public-ip=127.0.0.1 \
--http-port=9680 \
--staking-port=9681 \
--log-dir=./logs/node4 \
--db-dir=./db/node4 \
--bootstrap-ips=127.0.0.1:9651 \
--bootstrap-ids=NodeID-5dDZXn99LCkDoEi6t9gTitZuQmhokxQTc \
--staking-tls-cert-file=./scripts/configs/local/NodeID-FPAwqHjs8Mw8Cuki5bkm3vSVisZr8t2Lu.crt \
--staking-tls-key-file=./scripts/configs/local/NodeID-FPAwqHjs8Mw8Cuki5bkm3vSVisZr8t2Lu.key \
--db-type=leveldb \
--log-level=debug 2>&1 > ./logs/node4/console.log &
NODE_4_PID=`echo $!`
sleep 3
printf "Launching node 5 at 127.0.0.1:9690\n"
export WEB3_API=enabled
./build/flare --network-id=local \
--public-ip=127.0.0.1 \
--http-port=9690 \
--staking-port=9691 \
--log-dir=./logs/node5 \
--db-dir=./db/node5 \
--bootstrap-ips=127.0.0.1:9651 \
--bootstrap-ids=NodeID-5dDZXn99LCkDoEi6t9gTitZuQmhokxQTc \
--staking-tls-cert-file=./scripts/configs/local/NodeID-HaZ4HpanjndqSuN252chFsTysmdND5meA.crt \
--staking-tls-key-file=./scripts/configs/local/NodeID-HaZ4HpanjndqSuN252chFsTysmdND5meA.key \
--db-type=leveldb \
--log-level=debug 2>&1 > ./logs/node5/console.log &
NODE_5_PID=`echo $!`
sleep 3
printf "\n"
read -p "Press enter to kill all nodes"
kill $NODE_1_PID
kill $NODE_2_PID
kill $NODE_3_PID
kill $NODE_4_PID
kill $NODE_5_PID
|
def permutation(s):
if len(s) == 1:
return s
res = []
for i, c in enumerate(s):
for cc in permutation(s[:i] + s[i+1:]):
res.append(c + cc)
return res
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.